Show More
@@ -0,0 +1,21 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | from feedgenerator import Rss201rev2Feed, Atom1Feed No newline at end of file |
@@ -0,0 +1,117 b'' | |||||
|
1 | # Copyright (c) Django Software Foundation and individual contributors. | |||
|
2 | # All rights reserved. | |||
|
3 | # | |||
|
4 | # Redistribution and use in source and binary forms, with or without modification, | |||
|
5 | # are permitted provided that the following conditions are met: | |||
|
6 | # | |||
|
7 | # 1. Redistributions of source code must retain the above copyright notice, | |||
|
8 | # this list of conditions and the following disclaimer. | |||
|
9 | # | |||
|
10 | # 2. Redistributions in binary form must reproduce the above copyright | |||
|
11 | # notice, this list of conditions and the following disclaimer in the | |||
|
12 | # documentation and/or other materials provided with the distribution. | |||
|
13 | # | |||
|
14 | # 3. Neither the name of Django nor the names of its contributors may be used | |||
|
15 | # to endorse or promote products derived from this software without | |||
|
16 | # specific prior written permission. | |||
|
17 | # | |||
|
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | |||
|
19 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |||
|
20 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |||
|
21 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR | |||
|
22 | # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |||
|
23 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||
|
24 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |||
|
25 | # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |||
|
26 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |||
|
27 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||
|
28 | ||||
|
29 | # Python's datetime strftime doesn't handle dates before 1900. | |||
|
30 | # These classes override date and datetime to support the formatting of a date | |||
|
31 | # through its full "proleptic Gregorian" date range. | |||
|
32 | # | |||
|
33 | # Based on code submitted to comp.lang.python by Andrew Dalke | |||
|
34 | # | |||
|
35 | # >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A") | |||
|
36 | # '1850/08/02 was a Friday' | |||
|
37 | ||||
|
38 | from datetime import date as real_date, datetime as real_datetime | |||
|
39 | import re | |||
|
40 | import time | |||
|
41 | ||||
|
42 | class date(real_date): | |||
|
43 | def strftime(self, fmt): | |||
|
44 | return strftime(self, fmt) | |||
|
45 | ||||
|
46 | class datetime(real_datetime): | |||
|
47 | def strftime(self, fmt): | |||
|
48 | return strftime(self, fmt) | |||
|
49 | ||||
|
50 | def combine(self, date, time): | |||
|
51 | return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo) | |||
|
52 | ||||
|
53 | def date(self): | |||
|
54 | return date(self.year, self.month, self.day) | |||
|
55 | ||||
|
56 | def new_date(d): | |||
|
57 | "Generate a safe date from a datetime.date object." | |||
|
58 | return date(d.year, d.month, d.day) | |||
|
59 | ||||
|
60 | def new_datetime(d): | |||
|
61 | """ | |||
|
62 | Generate a safe datetime from a datetime.date or datetime.datetime object. | |||
|
63 | """ | |||
|
64 | kw = [d.year, d.month, d.day] | |||
|
65 | if isinstance(d, real_datetime): | |||
|
66 | kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) | |||
|
67 | return datetime(*kw) | |||
|
68 | ||||
|
69 | # This library does not support strftime's "%s" or "%y" format strings. | |||
|
70 | # Allowed if there's an even number of "%"s because they are escaped. | |||
|
71 | _illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])") | |||
|
72 | ||||
|
73 | def _findall(text, substr): | |||
|
74 | # Also finds overlaps | |||
|
75 | sites = [] | |||
|
76 | i = 0 | |||
|
77 | while 1: | |||
|
78 | j = text.find(substr, i) | |||
|
79 | if j == -1: | |||
|
80 | break | |||
|
81 | sites.append(j) | |||
|
82 | i=j+1 | |||
|
83 | return sites | |||
|
84 | ||||
|
85 | def strftime(dt, fmt): | |||
|
86 | if dt.year >= 1900: | |||
|
87 | return super(type(dt), dt).strftime(fmt) | |||
|
88 | illegal_formatting = _illegal_formatting.search(fmt) | |||
|
89 | if illegal_formatting: | |||
|
90 | raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0)) | |||
|
91 | ||||
|
92 | year = dt.year | |||
|
93 | # For every non-leap year century, advance by | |||
|
94 | # 6 years to get into the 28-year repeat cycle | |||
|
95 | delta = 2000 - year | |||
|
96 | off = 6 * (delta // 100 + delta // 400) | |||
|
97 | year = year + off | |||
|
98 | ||||
|
99 | # Move to around the year 2000 | |||
|
100 | year = year + ((2000 - year) // 28) * 28 | |||
|
101 | timetuple = dt.timetuple() | |||
|
102 | s1 = time.strftime(fmt, (year,) + timetuple[1:]) | |||
|
103 | sites1 = _findall(s1, str(year)) | |||
|
104 | ||||
|
105 | s2 = time.strftime(fmt, (year+28,) + timetuple[1:]) | |||
|
106 | sites2 = _findall(s2, str(year+28)) | |||
|
107 | ||||
|
108 | sites = [] | |||
|
109 | for site in sites1: | |||
|
110 | if site in sites2: | |||
|
111 | sites.append(site) | |||
|
112 | ||||
|
113 | s = s1 | |||
|
114 | syear = "%04d" % (dt.year,) | |||
|
115 | for site in sites: | |||
|
116 | s = s[:site] + syear + s[site+4:] | |||
|
117 | return s |
@@ -0,0 +1,444 b'' | |||||
|
1 | # Copyright (c) Django Software Foundation and individual contributors. | |||
|
2 | # All rights reserved. | |||
|
3 | # | |||
|
4 | # Redistribution and use in source and binary forms, with or without modification, | |||
|
5 | # are permitted provided that the following conditions are met: | |||
|
6 | # | |||
|
7 | # 1. Redistributions of source code must retain the above copyright notice, | |||
|
8 | # this list of conditions and the following disclaimer. | |||
|
9 | # | |||
|
10 | # 2. Redistributions in binary form must reproduce the above copyright | |||
|
11 | # notice, this list of conditions and the following disclaimer in the | |||
|
12 | # documentation and/or other materials provided with the distribution. | |||
|
13 | # | |||
|
14 | # 3. Neither the name of Django nor the names of its contributors may be used | |||
|
15 | # to endorse or promote products derived from this software without | |||
|
16 | # specific prior written permission. | |||
|
17 | # | |||
|
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | |||
|
19 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |||
|
20 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |||
|
21 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR | |||
|
22 | # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |||
|
23 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||
|
24 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |||
|
25 | # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |||
|
26 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |||
|
27 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||
|
28 | ||||
|
29 | """ | |||
|
30 | For definitions of the different versions of RSS, see: | |||
|
31 | http://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss | |||
|
32 | """ | |||
|
33 | from __future__ import unicode_literals | |||
|
34 | ||||
|
35 | import datetime | |||
|
36 | from StringIO import StringIO | |||
|
37 | from six.moves.urllib import parse as urlparse | |||
|
38 | ||||
|
39 | from rhodecode.lib.feedgenerator import datetime_safe | |||
|
40 | from rhodecode.lib.feedgenerator.utils import SimplerXMLGenerator, iri_to_uri, force_text | |||
|
41 | ||||
|
42 | ||||
|
43 | #### The following code comes from ``django.utils.feedgenerator`` #### | |||
|
44 | ||||
|
45 | ||||
|
46 | def rfc2822_date(date): | |||
|
47 | # We can't use strftime() because it produces locale-dependent results, so | |||
|
48 | # we have to map english month and day names manually | |||
|
49 | months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',) | |||
|
50 | days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun') | |||
|
51 | # Support datetime objects older than 1900 | |||
|
52 | date = datetime_safe.new_datetime(date) | |||
|
53 | # We do this ourselves to be timezone aware, email.Utils is not tz aware. | |||
|
54 | dow = days[date.weekday()] | |||
|
55 | month = months[date.month - 1] | |||
|
56 | time_str = date.strftime('%s, %%d %s %%Y %%H:%%M:%%S ' % (dow, month)) | |||
|
57 | ||||
|
58 | time_str = time_str.decode('utf-8') | |||
|
59 | offset = date.utcoffset() | |||
|
60 | # Historically, this function assumes that naive datetimes are in UTC. | |||
|
61 | if offset is None: | |||
|
62 | return time_str + '-0000' | |||
|
63 | else: | |||
|
64 | timezone = (offset.days * 24 * 60) + (offset.seconds // 60) | |||
|
65 | hour, minute = divmod(timezone, 60) | |||
|
66 | return time_str + '%+03d%02d' % (hour, minute) | |||
|
67 | ||||
|
68 | ||||
|
69 | def rfc3339_date(date): | |||
|
70 | # Support datetime objects older than 1900 | |||
|
71 | date = datetime_safe.new_datetime(date) | |||
|
72 | time_str = date.strftime('%Y-%m-%dT%H:%M:%S') | |||
|
73 | ||||
|
74 | time_str = time_str.decode('utf-8') | |||
|
75 | offset = date.utcoffset() | |||
|
76 | # Historically, this function assumes that naive datetimes are in UTC. | |||
|
77 | if offset is None: | |||
|
78 | return time_str + 'Z' | |||
|
79 | else: | |||
|
80 | timezone = (offset.days * 24 * 60) + (offset.seconds // 60) | |||
|
81 | hour, minute = divmod(timezone, 60) | |||
|
82 | return time_str + '%+03d:%02d' % (hour, minute) | |||
|
83 | ||||
|
84 | ||||
|
85 | def get_tag_uri(url, date): | |||
|
86 | """ | |||
|
87 | Creates a TagURI. | |||
|
88 | ||||
|
89 | See http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id | |||
|
90 | """ | |||
|
91 | bits = urlparse(url) | |||
|
92 | d = '' | |||
|
93 | if date is not None: | |||
|
94 | d = ',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d') | |||
|
95 | return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment) | |||
|
96 | ||||
|
97 | ||||
|
98 | class SyndicationFeed(object): | |||
|
99 | """Base class for all syndication feeds. Subclasses should provide write()""" | |||
|
100 | ||||
|
101 | def __init__(self, title, link, description, language=None, author_email=None, | |||
|
102 | author_name=None, author_link=None, subtitle=None, categories=None, | |||
|
103 | feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs): | |||
|
104 | def to_unicode(s): | |||
|
105 | return force_text(s, strings_only=True) | |||
|
106 | if categories: | |||
|
107 | categories = [force_text(c) for c in categories] | |||
|
108 | if ttl is not None: | |||
|
109 | # Force ints to unicode | |||
|
110 | ttl = force_text(ttl) | |||
|
111 | self.feed = { | |||
|
112 | 'title': to_unicode(title), | |||
|
113 | 'link': iri_to_uri(link), | |||
|
114 | 'description': to_unicode(description), | |||
|
115 | 'language': to_unicode(language), | |||
|
116 | 'author_email': to_unicode(author_email), | |||
|
117 | 'author_name': to_unicode(author_name), | |||
|
118 | 'author_link': iri_to_uri(author_link), | |||
|
119 | 'subtitle': to_unicode(subtitle), | |||
|
120 | 'categories': categories or (), | |||
|
121 | 'feed_url': iri_to_uri(feed_url), | |||
|
122 | 'feed_copyright': to_unicode(feed_copyright), | |||
|
123 | 'id': feed_guid or link, | |||
|
124 | 'ttl': ttl, | |||
|
125 | } | |||
|
126 | self.feed.update(kwargs) | |||
|
127 | self.items = [] | |||
|
128 | ||||
|
129 | def add_item(self, title, link, description, author_email=None, | |||
|
130 | author_name=None, author_link=None, pubdate=None, comments=None, | |||
|
131 | unique_id=None, unique_id_is_permalink=None, enclosure=None, | |||
|
132 | categories=(), item_copyright=None, ttl=None, updateddate=None, | |||
|
133 | enclosures=None, **kwargs): | |||
|
134 | """ | |||
|
135 | Adds an item to the feed. All args are expected to be Python Unicode | |||
|
136 | objects except pubdate and updateddate, which are datetime.datetime | |||
|
137 | objects, and enclosures, which is an iterable of instances of the | |||
|
138 | Enclosure class. | |||
|
139 | """ | |||
|
140 | def to_unicode(s): | |||
|
141 | return force_text(s, strings_only=True) | |||
|
142 | if categories: | |||
|
143 | categories = [to_unicode(c) for c in categories] | |||
|
144 | if ttl is not None: | |||
|
145 | # Force ints to unicode | |||
|
146 | ttl = force_text(ttl) | |||
|
147 | if enclosure is None: | |||
|
148 | enclosures = [] if enclosures is None else enclosures | |||
|
149 | ||||
|
150 | item = { | |||
|
151 | 'title': to_unicode(title), | |||
|
152 | 'link': iri_to_uri(link), | |||
|
153 | 'description': to_unicode(description), | |||
|
154 | 'author_email': to_unicode(author_email), | |||
|
155 | 'author_name': to_unicode(author_name), | |||
|
156 | 'author_link': iri_to_uri(author_link), | |||
|
157 | 'pubdate': pubdate, | |||
|
158 | 'updateddate': updateddate, | |||
|
159 | 'comments': to_unicode(comments), | |||
|
160 | 'unique_id': to_unicode(unique_id), | |||
|
161 | 'unique_id_is_permalink': unique_id_is_permalink, | |||
|
162 | 'enclosures': enclosures, | |||
|
163 | 'categories': categories or (), | |||
|
164 | 'item_copyright': to_unicode(item_copyright), | |||
|
165 | 'ttl': ttl, | |||
|
166 | } | |||
|
167 | item.update(kwargs) | |||
|
168 | self.items.append(item) | |||
|
169 | ||||
|
170 | def num_items(self): | |||
|
171 | return len(self.items) | |||
|
172 | ||||
|
173 | def root_attributes(self): | |||
|
174 | """ | |||
|
175 | Return extra attributes to place on the root (i.e. feed/channel) element. | |||
|
176 | Called from write(). | |||
|
177 | """ | |||
|
178 | return {} | |||
|
179 | ||||
|
180 | def add_root_elements(self, handler): | |||
|
181 | """ | |||
|
182 | Add elements in the root (i.e. feed/channel) element. Called | |||
|
183 | from write(). | |||
|
184 | """ | |||
|
185 | pass | |||
|
186 | ||||
|
187 | def item_attributes(self, item): | |||
|
188 | """ | |||
|
189 | Return extra attributes to place on each item (i.e. item/entry) element. | |||
|
190 | """ | |||
|
191 | return {} | |||
|
192 | ||||
|
193 | def add_item_elements(self, handler, item): | |||
|
194 | """ | |||
|
195 | Add elements on each item (i.e. item/entry) element. | |||
|
196 | """ | |||
|
197 | pass | |||
|
198 | ||||
|
199 | def write(self, outfile, encoding): | |||
|
200 | """ | |||
|
201 | Outputs the feed in the given encoding to outfile, which is a file-like | |||
|
202 | object. Subclasses should override this. | |||
|
203 | """ | |||
|
204 | raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method') | |||
|
205 | ||||
|
206 | def writeString(self, encoding): | |||
|
207 | """ | |||
|
208 | Returns the feed in the given encoding as a string. | |||
|
209 | """ | |||
|
210 | s = StringIO() | |||
|
211 | self.write(s, encoding) | |||
|
212 | return s.getvalue() | |||
|
213 | ||||
|
214 | def latest_post_date(self): | |||
|
215 | """ | |||
|
216 | Returns the latest item's pubdate or updateddate. If no items | |||
|
217 | have either of these attributes this returns the current UTC date/time. | |||
|
218 | """ | |||
|
219 | latest_date = None | |||
|
220 | date_keys = ('updateddate', 'pubdate') | |||
|
221 | ||||
|
222 | for item in self.items: | |||
|
223 | for date_key in date_keys: | |||
|
224 | item_date = item.get(date_key) | |||
|
225 | if item_date: | |||
|
226 | if latest_date is None or item_date > latest_date: | |||
|
227 | latest_date = item_date | |||
|
228 | ||||
|
229 | # datetime.now(tz=utc) is slower, as documented in django.utils.timezone.now | |||
|
230 | return latest_date or datetime.datetime.utcnow().replace(tzinfo=utc) | |||
|
231 | ||||
|
232 | ||||
|
233 | class Enclosure(object): | |||
|
234 | "Represents an RSS enclosure" | |||
|
235 | def __init__(self, url, length, mime_type): | |||
|
236 | "All args are expected to be Python Unicode objects" | |||
|
237 | self.length, self.mime_type = length, mime_type | |||
|
238 | self.url = iri_to_uri(url) | |||
|
239 | ||||
|
240 | ||||
|
241 | class RssFeed(SyndicationFeed): | |||
|
242 | content_type = 'application/rss+xml; charset=utf-8' | |||
|
243 | ||||
|
244 | def write(self, outfile, encoding): | |||
|
245 | handler = SimplerXMLGenerator(outfile, encoding) | |||
|
246 | handler.startDocument() | |||
|
247 | handler.startElement("rss", self.rss_attributes()) | |||
|
248 | handler.startElement("channel", self.root_attributes()) | |||
|
249 | self.add_root_elements(handler) | |||
|
250 | self.write_items(handler) | |||
|
251 | self.endChannelElement(handler) | |||
|
252 | handler.endElement("rss") | |||
|
253 | ||||
|
254 | def rss_attributes(self): | |||
|
255 | return {"version": self._version, | |||
|
256 | "xmlns:atom": "http://www.w3.org/2005/Atom"} | |||
|
257 | ||||
|
258 | def write_items(self, handler): | |||
|
259 | for item in self.items: | |||
|
260 | handler.startElement('item', self.item_attributes(item)) | |||
|
261 | self.add_item_elements(handler, item) | |||
|
262 | handler.endElement("item") | |||
|
263 | ||||
|
264 | def add_root_elements(self, handler): | |||
|
265 | handler.addQuickElement("title", self.feed['title']) | |||
|
266 | handler.addQuickElement("link", self.feed['link']) | |||
|
267 | handler.addQuickElement("description", self.feed['description']) | |||
|
268 | if self.feed['feed_url'] is not None: | |||
|
269 | handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']}) | |||
|
270 | if self.feed['language'] is not None: | |||
|
271 | handler.addQuickElement("language", self.feed['language']) | |||
|
272 | for cat in self.feed['categories']: | |||
|
273 | handler.addQuickElement("category", cat) | |||
|
274 | if self.feed['feed_copyright'] is not None: | |||
|
275 | handler.addQuickElement("copyright", self.feed['feed_copyright']) | |||
|
276 | handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date())) | |||
|
277 | if self.feed['ttl'] is not None: | |||
|
278 | handler.addQuickElement("ttl", self.feed['ttl']) | |||
|
279 | ||||
|
280 | def endChannelElement(self, handler): | |||
|
281 | handler.endElement("channel") | |||
|
282 | ||||
|
283 | ||||
|
284 | class RssUserland091Feed(RssFeed): | |||
|
285 | _version = "0.91" | |||
|
286 | ||||
|
287 | def add_item_elements(self, handler, item): | |||
|
288 | handler.addQuickElement("title", item['title']) | |||
|
289 | handler.addQuickElement("link", item['link']) | |||
|
290 | if item['description'] is not None: | |||
|
291 | handler.addQuickElement("description", item['description']) | |||
|
292 | ||||
|
293 | ||||
|
294 | class Rss201rev2Feed(RssFeed): | |||
|
295 | # Spec: http://blogs.law.harvard.edu/tech/rss | |||
|
296 | _version = "2.0" | |||
|
297 | ||||
|
298 | def add_item_elements(self, handler, item): | |||
|
299 | handler.addQuickElement("title", item['title']) | |||
|
300 | handler.addQuickElement("link", item['link']) | |||
|
301 | if item['description'] is not None: | |||
|
302 | handler.addQuickElement("description", item['description']) | |||
|
303 | ||||
|
304 | # Author information. | |||
|
305 | if item["author_name"] and item["author_email"]: | |||
|
306 | handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name'])) | |||
|
307 | elif item["author_email"]: | |||
|
308 | handler.addQuickElement("author", item["author_email"]) | |||
|
309 | elif item["author_name"]: | |||
|
310 | handler.addQuickElement( | |||
|
311 | "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"} | |||
|
312 | ) | |||
|
313 | ||||
|
314 | if item['pubdate'] is not None: | |||
|
315 | handler.addQuickElement("pubDate", rfc2822_date(item['pubdate'])) | |||
|
316 | if item['comments'] is not None: | |||
|
317 | handler.addQuickElement("comments", item['comments']) | |||
|
318 | if item['unique_id'] is not None: | |||
|
319 | guid_attrs = {} | |||
|
320 | if isinstance(item.get('unique_id_is_permalink'), bool): | |||
|
321 | guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower() | |||
|
322 | handler.addQuickElement("guid", item['unique_id'], guid_attrs) | |||
|
323 | if item['ttl'] is not None: | |||
|
324 | handler.addQuickElement("ttl", item['ttl']) | |||
|
325 | ||||
|
326 | # Enclosure. | |||
|
327 | if item['enclosures']: | |||
|
328 | enclosures = list(item['enclosures']) | |||
|
329 | if len(enclosures) > 1: | |||
|
330 | raise ValueError( | |||
|
331 | "RSS feed items may only have one enclosure, see " | |||
|
332 | "http://www.rssboard.org/rss-profile#element-channel-item-enclosure" | |||
|
333 | ) | |||
|
334 | enclosure = enclosures[0] | |||
|
335 | handler.addQuickElement('enclosure', '', { | |||
|
336 | 'url': enclosure.url, | |||
|
337 | 'length': enclosure.length, | |||
|
338 | 'type': enclosure.mime_type, | |||
|
339 | }) | |||
|
340 | ||||
|
341 | # Categories. | |||
|
342 | for cat in item['categories']: | |||
|
343 | handler.addQuickElement("category", cat) | |||
|
344 | ||||
|
345 | ||||
|
346 | class Atom1Feed(SyndicationFeed): | |||
|
347 | # Spec: https://tools.ietf.org/html/rfc4287 | |||
|
348 | content_type = 'application/atom+xml; charset=utf-8' | |||
|
349 | ns = "http://www.w3.org/2005/Atom" | |||
|
350 | ||||
|
351 | def write(self, outfile, encoding): | |||
|
352 | handler = SimplerXMLGenerator(outfile, encoding) | |||
|
353 | handler.startDocument() | |||
|
354 | handler.startElement('feed', self.root_attributes()) | |||
|
355 | self.add_root_elements(handler) | |||
|
356 | self.write_items(handler) | |||
|
357 | handler.endElement("feed") | |||
|
358 | ||||
|
359 | def root_attributes(self): | |||
|
360 | if self.feed['language'] is not None: | |||
|
361 | return {"xmlns": self.ns, "xml:lang": self.feed['language']} | |||
|
362 | else: | |||
|
363 | return {"xmlns": self.ns} | |||
|
364 | ||||
|
365 | def add_root_elements(self, handler): | |||
|
366 | handler.addQuickElement("title", self.feed['title']) | |||
|
367 | handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']}) | |||
|
368 | if self.feed['feed_url'] is not None: | |||
|
369 | handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']}) | |||
|
370 | handler.addQuickElement("id", self.feed['id']) | |||
|
371 | handler.addQuickElement("updated", rfc3339_date(self.latest_post_date())) | |||
|
372 | if self.feed['author_name'] is not None: | |||
|
373 | handler.startElement("author", {}) | |||
|
374 | handler.addQuickElement("name", self.feed['author_name']) | |||
|
375 | if self.feed['author_email'] is not None: | |||
|
376 | handler.addQuickElement("email", self.feed['author_email']) | |||
|
377 | if self.feed['author_link'] is not None: | |||
|
378 | handler.addQuickElement("uri", self.feed['author_link']) | |||
|
379 | handler.endElement("author") | |||
|
380 | if self.feed['subtitle'] is not None: | |||
|
381 | handler.addQuickElement("subtitle", self.feed['subtitle']) | |||
|
382 | for cat in self.feed['categories']: | |||
|
383 | handler.addQuickElement("category", "", {"term": cat}) | |||
|
384 | if self.feed['feed_copyright'] is not None: | |||
|
385 | handler.addQuickElement("rights", self.feed['feed_copyright']) | |||
|
386 | ||||
|
387 | def write_items(self, handler): | |||
|
388 | for item in self.items: | |||
|
389 | handler.startElement("entry", self.item_attributes(item)) | |||
|
390 | self.add_item_elements(handler, item) | |||
|
391 | handler.endElement("entry") | |||
|
392 | ||||
|
393 | def add_item_elements(self, handler, item): | |||
|
394 | handler.addQuickElement("title", item['title']) | |||
|
395 | handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"}) | |||
|
396 | ||||
|
397 | if item['pubdate'] is not None: | |||
|
398 | handler.addQuickElement('published', rfc3339_date(item['pubdate'])) | |||
|
399 | ||||
|
400 | if item['updateddate'] is not None: | |||
|
401 | handler.addQuickElement('updated', rfc3339_date(item['updateddate'])) | |||
|
402 | ||||
|
403 | # Author information. | |||
|
404 | if item['author_name'] is not None: | |||
|
405 | handler.startElement("author", {}) | |||
|
406 | handler.addQuickElement("name", item['author_name']) | |||
|
407 | if item['author_email'] is not None: | |||
|
408 | handler.addQuickElement("email", item['author_email']) | |||
|
409 | if item['author_link'] is not None: | |||
|
410 | handler.addQuickElement("uri", item['author_link']) | |||
|
411 | handler.endElement("author") | |||
|
412 | ||||
|
413 | # Unique ID. | |||
|
414 | if item['unique_id'] is not None: | |||
|
415 | unique_id = item['unique_id'] | |||
|
416 | else: | |||
|
417 | unique_id = get_tag_uri(item['link'], item['pubdate']) | |||
|
418 | handler.addQuickElement("id", unique_id) | |||
|
419 | ||||
|
420 | # Summary. | |||
|
421 | if item['description'] is not None: | |||
|
422 | handler.addQuickElement("summary", item['description'], {"type": "html"}) | |||
|
423 | ||||
|
424 | # Enclosures. | |||
|
425 | for enclosure in item['enclosures']: | |||
|
426 | handler.addQuickElement('link', '', { | |||
|
427 | 'rel': 'enclosure', | |||
|
428 | 'href': enclosure.url, | |||
|
429 | 'length': enclosure.length, | |||
|
430 | 'type': enclosure.mime_type, | |||
|
431 | }) | |||
|
432 | ||||
|
433 | # Categories. | |||
|
434 | for cat in item['categories']: | |||
|
435 | handler.addQuickElement("category", "", {"term": cat}) | |||
|
436 | ||||
|
437 | # Rights. | |||
|
438 | if item['item_copyright'] is not None: | |||
|
439 | handler.addQuickElement("rights", item['item_copyright']) | |||
|
440 | ||||
|
441 | ||||
|
442 | # This isolates the decision of what the system default is, so calling code can | |||
|
443 | # do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed". | |||
|
444 | DefaultFeed = Rss201rev2Feed No newline at end of file |
@@ -0,0 +1,57 b'' | |||||
|
1 | """ | |||
|
2 | Utilities for XML generation/parsing. | |||
|
3 | """ | |||
|
4 | ||||
|
5 | import six | |||
|
6 | ||||
|
7 | from xml.sax.saxutils import XMLGenerator, quoteattr | |||
|
8 | from urllib import quote | |||
|
9 | from rhodecode.lib.utils import safe_str, safe_unicode | |||
|
10 | ||||
|
11 | ||||
|
12 | class SimplerXMLGenerator(XMLGenerator): | |||
|
13 | def addQuickElement(self, name, contents=None, attrs=None): | |||
|
14 | "Convenience method for adding an element with no children" | |||
|
15 | if attrs is None: | |||
|
16 | attrs = {} | |||
|
17 | self.startElement(name, attrs) | |||
|
18 | if contents is not None: | |||
|
19 | self.characters(contents) | |||
|
20 | self.endElement(name) | |||
|
21 | ||||
|
22 | def startElement(self, name, attrs): | |||
|
23 | self._write('<' + name) | |||
|
24 | # sort attributes for consistent output | |||
|
25 | for (name, value) in sorted(attrs.items()): | |||
|
26 | self._write(' %s=%s' % (name, quoteattr(value))) | |||
|
27 | self._write(six.u('>')) | |||
|
28 | ||||
|
29 | ||||
|
30 | def iri_to_uri(iri): | |||
|
31 | """ | |||
|
32 | Convert an Internationalized Resource Identifier (IRI) portion to a URI | |||
|
33 | portion that is suitable for inclusion in a URL. | |||
|
34 | This is the algorithm from section 3.1 of RFC 3987. However, since we are | |||
|
35 | assuming input is either UTF-8 or unicode already, we can simplify things a | |||
|
36 | little from the full method. | |||
|
37 | Returns an ASCII string containing the encoded result. | |||
|
38 | """ | |||
|
39 | # The list of safe characters here is constructed from the "reserved" and | |||
|
40 | # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: | |||
|
41 | # reserved = gen-delims / sub-delims | |||
|
42 | # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" | |||
|
43 | # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" | |||
|
44 | # / "*" / "+" / "," / ";" / "=" | |||
|
45 | # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" | |||
|
46 | # Of the unreserved characters, urllib.quote already considers all but | |||
|
47 | # the ~ safe. | |||
|
48 | # The % character is also added to the list of safe characters here, as the | |||
|
49 | # end of section 3.1 of RFC 3987 specifically mentions that % must not be | |||
|
50 | # converted. | |||
|
51 | if iri is None: | |||
|
52 | return iri | |||
|
53 | return quote(safe_str(iri), safe=b"/#%[]=:;$&()+,!?*@'~") | |||
|
54 | ||||
|
55 | ||||
|
56 | def force_text(text, strings_only=False): | |||
|
57 | return safe_unicode(text) |
@@ -1,388 +1,389 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 | import itertools |
|
23 | import itertools | |
24 |
|
24 | |||
25 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
25 | ||
26 |
|
26 | |||
27 | from pyramid.view import view_config |
|
27 | from pyramid.view import view_config | |
28 | from pyramid.httpexceptions import HTTPBadRequest |
|
28 | from pyramid.httpexceptions import HTTPBadRequest | |
29 | from pyramid.response import Response |
|
29 | from pyramid.response import Response | |
30 | from pyramid.renderers import render |
|
30 | from pyramid.renderers import render | |
31 |
|
31 | |||
32 | from rhodecode.apps._base import BaseAppView |
|
32 | from rhodecode.apps._base import BaseAppView | |
33 | from rhodecode.model.db import ( |
|
33 | from rhodecode.model.db import ( | |
34 | or_, joinedload, Repository, UserLog, UserFollowing, User, UserApiKeys) |
|
34 | or_, joinedload, Repository, UserLog, UserFollowing, User, UserApiKeys) | |
35 | from rhodecode.model.meta import Session |
|
35 | from rhodecode.model.meta import Session | |
36 | import rhodecode.lib.helpers as h |
|
36 | import rhodecode.lib.helpers as h | |
37 | from rhodecode.lib.helpers import SqlPage |
|
37 | from rhodecode.lib.helpers import SqlPage | |
38 | from rhodecode.lib.user_log_filter import user_log_filter |
|
38 | from rhodecode.lib.user_log_filter import user_log_filter | |
39 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, HasRepoPermissionAny |
|
39 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, HasRepoPermissionAny | |
40 | from rhodecode.lib.utils2 import safe_int, AttributeDict, md5_safe |
|
40 | from rhodecode.lib.utils2 import safe_int, AttributeDict, md5_safe | |
|
41 | from rhodecode.lib.feedgenerator.feedgenerator import Atom1Feed, Rss201rev2Feed | |||
41 | from rhodecode.model.scm import ScmModel |
|
42 | from rhodecode.model.scm import ScmModel | |
42 |
|
43 | |||
43 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
44 |
|
45 | |||
45 |
|
46 | |||
46 | class JournalView(BaseAppView): |
|
47 | class JournalView(BaseAppView): | |
47 |
|
48 | |||
48 | def load_default_context(self): |
|
49 | def load_default_context(self): | |
49 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
50 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
50 |
|
51 | |||
51 | self._load_defaults(c.rhodecode_name) |
|
52 | self._load_defaults(c.rhodecode_name) | |
52 |
|
53 | |||
53 | # TODO(marcink): what is this, why we need a global register ? |
|
54 | # TODO(marcink): what is this, why we need a global register ? | |
54 | c.search_term = self.request.GET.get('filter') or '' |
|
55 | c.search_term = self.request.GET.get('filter') or '' | |
55 | return c |
|
56 | return c | |
56 |
|
57 | |||
57 | def _get_config(self, rhodecode_name): |
|
58 | def _get_config(self, rhodecode_name): | |
58 | import rhodecode |
|
59 | import rhodecode | |
59 | config = rhodecode.CONFIG |
|
60 | config = rhodecode.CONFIG | |
60 |
|
61 | |||
61 | return { |
|
62 | return { | |
62 | 'language': 'en-us', |
|
63 | 'language': 'en-us', | |
63 | 'feed_ttl': '5', # TTL of feed, |
|
64 | 'feed_ttl': '5', # TTL of feed, | |
64 | 'feed_items_per_page': |
|
65 | 'feed_items_per_page': | |
65 | safe_int(config.get('rss_items_per_page', 20)), |
|
66 | safe_int(config.get('rss_items_per_page', 20)), | |
66 | 'rhodecode_name': rhodecode_name |
|
67 | 'rhodecode_name': rhodecode_name | |
67 | } |
|
68 | } | |
68 |
|
69 | |||
69 | def _load_defaults(self, rhodecode_name): |
|
70 | def _load_defaults(self, rhodecode_name): | |
70 | config = self._get_config(rhodecode_name) |
|
71 | config = self._get_config(rhodecode_name) | |
71 | # common values for feeds |
|
72 | # common values for feeds | |
72 | self.language = config["language"] |
|
73 | self.language = config["language"] | |
73 | self.ttl = config["feed_ttl"] |
|
74 | self.ttl = config["feed_ttl"] | |
74 | self.feed_items_per_page = config['feed_items_per_page'] |
|
75 | self.feed_items_per_page = config['feed_items_per_page'] | |
75 | self.rhodecode_name = config['rhodecode_name'] |
|
76 | self.rhodecode_name = config['rhodecode_name'] | |
76 |
|
77 | |||
77 | def _get_daily_aggregate(self, journal): |
|
78 | def _get_daily_aggregate(self, journal): | |
78 | groups = [] |
|
79 | groups = [] | |
79 | for k, g in itertools.groupby(journal, lambda x: x.action_as_day): |
|
80 | for k, g in itertools.groupby(journal, lambda x: x.action_as_day): | |
80 | user_group = [] |
|
81 | user_group = [] | |
81 | # groupby username if it's a present value, else |
|
82 | # groupby username if it's a present value, else | |
82 | # fallback to journal username |
|
83 | # fallback to journal username | |
83 | for _, g2 in itertools.groupby( |
|
84 | for _, g2 in itertools.groupby( | |
84 | list(g), lambda x: x.user.username if x.user else x.username): |
|
85 | list(g), lambda x: x.user.username if x.user else x.username): | |
85 | l = list(g2) |
|
86 | l = list(g2) | |
86 | user_group.append((l[0].user, l)) |
|
87 | user_group.append((l[0].user, l)) | |
87 |
|
88 | |||
88 | groups.append((k, user_group,)) |
|
89 | groups.append((k, user_group,)) | |
89 |
|
90 | |||
90 | return groups |
|
91 | return groups | |
91 |
|
92 | |||
92 | def _get_journal_data(self, following_repos, search_term): |
|
93 | def _get_journal_data(self, following_repos, search_term): | |
93 | repo_ids = [x.follows_repository.repo_id for x in following_repos |
|
94 | repo_ids = [x.follows_repository.repo_id for x in following_repos | |
94 | if x.follows_repository is not None] |
|
95 | if x.follows_repository is not None] | |
95 | user_ids = [x.follows_user.user_id for x in following_repos |
|
96 | user_ids = [x.follows_user.user_id for x in following_repos | |
96 | if x.follows_user is not None] |
|
97 | if x.follows_user is not None] | |
97 |
|
98 | |||
98 | filtering_criterion = None |
|
99 | filtering_criterion = None | |
99 |
|
100 | |||
100 | if repo_ids and user_ids: |
|
101 | if repo_ids and user_ids: | |
101 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), |
|
102 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), | |
102 | UserLog.user_id.in_(user_ids)) |
|
103 | UserLog.user_id.in_(user_ids)) | |
103 | if repo_ids and not user_ids: |
|
104 | if repo_ids and not user_ids: | |
104 | filtering_criterion = UserLog.repository_id.in_(repo_ids) |
|
105 | filtering_criterion = UserLog.repository_id.in_(repo_ids) | |
105 | if not repo_ids and user_ids: |
|
106 | if not repo_ids and user_ids: | |
106 | filtering_criterion = UserLog.user_id.in_(user_ids) |
|
107 | filtering_criterion = UserLog.user_id.in_(user_ids) | |
107 | if filtering_criterion is not None: |
|
108 | if filtering_criterion is not None: | |
108 | journal = Session().query(UserLog)\ |
|
109 | journal = Session().query(UserLog)\ | |
109 | .options(joinedload(UserLog.user))\ |
|
110 | .options(joinedload(UserLog.user))\ | |
110 | .options(joinedload(UserLog.repository)) |
|
111 | .options(joinedload(UserLog.repository)) | |
111 | # filter |
|
112 | # filter | |
112 | try: |
|
113 | try: | |
113 | journal = user_log_filter(journal, search_term) |
|
114 | journal = user_log_filter(journal, search_term) | |
114 | except Exception: |
|
115 | except Exception: | |
115 | # we want this to crash for now |
|
116 | # we want this to crash for now | |
116 | raise |
|
117 | raise | |
117 | journal = journal.filter(filtering_criterion)\ |
|
118 | journal = journal.filter(filtering_criterion)\ | |
118 | .order_by(UserLog.action_date.desc()) |
|
119 | .order_by(UserLog.action_date.desc()) | |
119 | else: |
|
120 | else: | |
120 | journal = [] |
|
121 | journal = [] | |
121 |
|
122 | |||
122 | return journal |
|
123 | return journal | |
123 |
|
124 | |||
124 | def feed_uid(self, entry_id): |
|
125 | def feed_uid(self, entry_id): | |
125 | return '{}:{}'.format('journal', md5_safe(entry_id)) |
|
126 | return '{}:{}'.format('journal', md5_safe(entry_id)) | |
126 |
|
127 | |||
127 | def _atom_feed(self, repos, search_term, public=True): |
|
128 | def _atom_feed(self, repos, search_term, public=True): | |
128 | _ = self.request.translate |
|
129 | _ = self.request.translate | |
129 | journal = self._get_journal_data(repos, search_term) |
|
130 | journal = self._get_journal_data(repos, search_term) | |
130 | if public: |
|
131 | if public: | |
131 | _link = h.route_url('journal_public_atom') |
|
132 | _link = h.route_url('journal_public_atom') | |
132 | _desc = '%s %s %s' % (self.rhodecode_name, _('public journal'), |
|
133 | _desc = '%s %s %s' % (self.rhodecode_name, _('public journal'), | |
133 | 'atom feed') |
|
134 | 'atom feed') | |
134 | else: |
|
135 | else: | |
135 | _link = h.route_url('journal_atom') |
|
136 | _link = h.route_url('journal_atom') | |
136 | _desc = '%s %s %s' % (self.rhodecode_name, _('journal'), 'atom feed') |
|
137 | _desc = '%s %s %s' % (self.rhodecode_name, _('journal'), 'atom feed') | |
137 |
|
138 | |||
138 | feed = Atom1Feed( |
|
139 | feed = Atom1Feed( | |
139 | title=_desc, link=_link, description=_desc, |
|
140 | title=_desc, link=_link, description=_desc, | |
140 | language=self.language, ttl=self.ttl) |
|
141 | language=self.language, ttl=self.ttl) | |
141 |
|
142 | |||
142 | for entry in journal[:self.feed_items_per_page]: |
|
143 | for entry in journal[:self.feed_items_per_page]: | |
143 | user = entry.user |
|
144 | user = entry.user | |
144 | if user is None: |
|
145 | if user is None: | |
145 | # fix deleted users |
|
146 | # fix deleted users | |
146 | user = AttributeDict({'short_contact': entry.username, |
|
147 | user = AttributeDict({'short_contact': entry.username, | |
147 | 'email': '', |
|
148 | 'email': '', | |
148 | 'full_contact': ''}) |
|
149 | 'full_contact': ''}) | |
149 | action, action_extra, ico = h.action_parser( |
|
150 | action, action_extra, ico = h.action_parser( | |
150 | self.request, entry, feed=True) |
|
151 | self.request, entry, feed=True) | |
151 | title = "%s - %s %s" % (user.short_contact, action(), |
|
152 | title = "%s - %s %s" % (user.short_contact, action(), | |
152 | entry.repository.repo_name) |
|
153 | entry.repository.repo_name) | |
153 | desc = action_extra() |
|
154 | desc = action_extra() | |
154 | _url = h.route_url('home') |
|
155 | _url = h.route_url('home') | |
155 | if entry.repository is not None: |
|
156 | if entry.repository is not None: | |
156 | _url = h.route_url('repo_commits', |
|
157 | _url = h.route_url('repo_commits', | |
157 | repo_name=entry.repository.repo_name) |
|
158 | repo_name=entry.repository.repo_name) | |
158 |
|
159 | |||
159 | feed.add_item( |
|
160 | feed.add_item( | |
160 | unique_id=self.feed_uid(entry.user_log_id), |
|
161 | unique_id=self.feed_uid(entry.user_log_id), | |
161 | title=title, |
|
162 | title=title, | |
162 | pubdate=entry.action_date, |
|
163 | pubdate=entry.action_date, | |
163 | link=_url, |
|
164 | link=_url, | |
164 | author_email=user.email, |
|
165 | author_email=user.email, | |
165 | author_name=user.full_contact, |
|
166 | author_name=user.full_contact, | |
166 | description=desc) |
|
167 | description=desc) | |
167 |
|
168 | |||
168 | response = Response(feed.writeString('utf-8')) |
|
169 | response = Response(feed.writeString('utf-8')) | |
169 |
response.content_type = feed. |
|
170 | response.content_type = feed.content_type | |
170 | return response |
|
171 | return response | |
171 |
|
172 | |||
172 | def _rss_feed(self, repos, search_term, public=True): |
|
173 | def _rss_feed(self, repos, search_term, public=True): | |
173 | _ = self.request.translate |
|
174 | _ = self.request.translate | |
174 | journal = self._get_journal_data(repos, search_term) |
|
175 | journal = self._get_journal_data(repos, search_term) | |
175 | if public: |
|
176 | if public: | |
176 | _link = h.route_url('journal_public_atom') |
|
177 | _link = h.route_url('journal_public_atom') | |
177 | _desc = '%s %s %s' % ( |
|
178 | _desc = '%s %s %s' % ( | |
178 | self.rhodecode_name, _('public journal'), 'rss feed') |
|
179 | self.rhodecode_name, _('public journal'), 'rss feed') | |
179 | else: |
|
180 | else: | |
180 | _link = h.route_url('journal_atom') |
|
181 | _link = h.route_url('journal_atom') | |
181 | _desc = '%s %s %s' % ( |
|
182 | _desc = '%s %s %s' % ( | |
182 | self.rhodecode_name, _('journal'), 'rss feed') |
|
183 | self.rhodecode_name, _('journal'), 'rss feed') | |
183 |
|
184 | |||
184 | feed = Rss201rev2Feed( |
|
185 | feed = Rss201rev2Feed( | |
185 | title=_desc, link=_link, description=_desc, |
|
186 | title=_desc, link=_link, description=_desc, | |
186 | language=self.language, ttl=self.ttl) |
|
187 | language=self.language, ttl=self.ttl) | |
187 |
|
188 | |||
188 | for entry in journal[:self.feed_items_per_page]: |
|
189 | for entry in journal[:self.feed_items_per_page]: | |
189 | user = entry.user |
|
190 | user = entry.user | |
190 | if user is None: |
|
191 | if user is None: | |
191 | # fix deleted users |
|
192 | # fix deleted users | |
192 | user = AttributeDict({'short_contact': entry.username, |
|
193 | user = AttributeDict({'short_contact': entry.username, | |
193 | 'email': '', |
|
194 | 'email': '', | |
194 | 'full_contact': ''}) |
|
195 | 'full_contact': ''}) | |
195 | action, action_extra, ico = h.action_parser( |
|
196 | action, action_extra, ico = h.action_parser( | |
196 | self.request, entry, feed=True) |
|
197 | self.request, entry, feed=True) | |
197 | title = "%s - %s %s" % (user.short_contact, action(), |
|
198 | title = "%s - %s %s" % (user.short_contact, action(), | |
198 | entry.repository.repo_name) |
|
199 | entry.repository.repo_name) | |
199 | desc = action_extra() |
|
200 | desc = action_extra() | |
200 | _url = h.route_url('home') |
|
201 | _url = h.route_url('home') | |
201 | if entry.repository is not None: |
|
202 | if entry.repository is not None: | |
202 | _url = h.route_url('repo_commits', |
|
203 | _url = h.route_url('repo_commits', | |
203 | repo_name=entry.repository.repo_name) |
|
204 | repo_name=entry.repository.repo_name) | |
204 |
|
205 | |||
205 | feed.add_item( |
|
206 | feed.add_item( | |
206 | unique_id=self.feed_uid(entry.user_log_id), |
|
207 | unique_id=self.feed_uid(entry.user_log_id), | |
207 | title=title, |
|
208 | title=title, | |
208 | pubdate=entry.action_date, |
|
209 | pubdate=entry.action_date, | |
209 | link=_url, |
|
210 | link=_url, | |
210 | author_email=user.email, |
|
211 | author_email=user.email, | |
211 | author_name=user.full_contact, |
|
212 | author_name=user.full_contact, | |
212 | description=desc) |
|
213 | description=desc) | |
213 |
|
214 | |||
214 | response = Response(feed.writeString('utf-8')) |
|
215 | response = Response(feed.writeString('utf-8')) | |
215 |
response.content_type = feed. |
|
216 | response.content_type = feed.content_type | |
216 | return response |
|
217 | return response | |
217 |
|
218 | |||
218 | @LoginRequired() |
|
219 | @LoginRequired() | |
219 | @NotAnonymous() |
|
220 | @NotAnonymous() | |
220 | @view_config( |
|
221 | @view_config( | |
221 | route_name='journal', request_method='GET', |
|
222 | route_name='journal', request_method='GET', | |
222 | renderer=None) |
|
223 | renderer=None) | |
223 | def journal(self): |
|
224 | def journal(self): | |
224 | c = self.load_default_context() |
|
225 | c = self.load_default_context() | |
225 |
|
226 | |||
226 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
227 | p = safe_int(self.request.GET.get('page', 1), 1) | |
227 | c.user = User.get(self._rhodecode_user.user_id) |
|
228 | c.user = User.get(self._rhodecode_user.user_id) | |
228 | following = Session().query(UserFollowing)\ |
|
229 | following = Session().query(UserFollowing)\ | |
229 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
230 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
230 | .options(joinedload(UserFollowing.follows_repository))\ |
|
231 | .options(joinedload(UserFollowing.follows_repository))\ | |
231 | .all() |
|
232 | .all() | |
232 |
|
233 | |||
233 | journal = self._get_journal_data(following, c.search_term) |
|
234 | journal = self._get_journal_data(following, c.search_term) | |
234 |
|
235 | |||
235 | def url_generator(page_num): |
|
236 | def url_generator(page_num): | |
236 | query_params = { |
|
237 | query_params = { | |
237 | 'page': page_num, |
|
238 | 'page': page_num, | |
238 | 'filter': c.search_term |
|
239 | 'filter': c.search_term | |
239 | } |
|
240 | } | |
240 | return self.request.current_route_path(_query=query_params) |
|
241 | return self.request.current_route_path(_query=query_params) | |
241 |
|
242 | |||
242 | c.journal_pager = SqlPage( |
|
243 | c.journal_pager = SqlPage( | |
243 | journal, page=p, items_per_page=20, url_maker=url_generator) |
|
244 | journal, page=p, items_per_page=20, url_maker=url_generator) | |
244 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
245 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) | |
245 |
|
246 | |||
246 | c.journal_data = render( |
|
247 | c.journal_data = render( | |
247 | 'rhodecode:templates/journal/journal_data.mako', |
|
248 | 'rhodecode:templates/journal/journal_data.mako', | |
248 | self._get_template_context(c), self.request) |
|
249 | self._get_template_context(c), self.request) | |
249 |
|
250 | |||
250 | if self.request.is_xhr: |
|
251 | if self.request.is_xhr: | |
251 | return Response(c.journal_data) |
|
252 | return Response(c.journal_data) | |
252 |
|
253 | |||
253 | html = render( |
|
254 | html = render( | |
254 | 'rhodecode:templates/journal/journal.mako', |
|
255 | 'rhodecode:templates/journal/journal.mako', | |
255 | self._get_template_context(c), self.request) |
|
256 | self._get_template_context(c), self.request) | |
256 | return Response(html) |
|
257 | return Response(html) | |
257 |
|
258 | |||
258 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
259 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
259 | @NotAnonymous() |
|
260 | @NotAnonymous() | |
260 | @view_config( |
|
261 | @view_config( | |
261 | route_name='journal_atom', request_method='GET', |
|
262 | route_name='journal_atom', request_method='GET', | |
262 | renderer=None) |
|
263 | renderer=None) | |
263 | def journal_atom(self): |
|
264 | def journal_atom(self): | |
264 | """ |
|
265 | """ | |
265 | Produce an atom-1.0 feed via feedgenerator module |
|
266 | Produce an atom-1.0 feed via feedgenerator module | |
266 | """ |
|
267 | """ | |
267 | c = self.load_default_context() |
|
268 | c = self.load_default_context() | |
268 | following_repos = Session().query(UserFollowing)\ |
|
269 | following_repos = Session().query(UserFollowing)\ | |
269 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
270 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
270 | .options(joinedload(UserFollowing.follows_repository))\ |
|
271 | .options(joinedload(UserFollowing.follows_repository))\ | |
271 | .all() |
|
272 | .all() | |
272 | return self._atom_feed(following_repos, c.search_term, public=False) |
|
273 | return self._atom_feed(following_repos, c.search_term, public=False) | |
273 |
|
274 | |||
274 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
275 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
275 | @NotAnonymous() |
|
276 | @NotAnonymous() | |
276 | @view_config( |
|
277 | @view_config( | |
277 | route_name='journal_rss', request_method='GET', |
|
278 | route_name='journal_rss', request_method='GET', | |
278 | renderer=None) |
|
279 | renderer=None) | |
279 | def journal_rss(self): |
|
280 | def journal_rss(self): | |
280 | """ |
|
281 | """ | |
281 | Produce an rss feed via feedgenerator module |
|
282 | Produce an rss feed via feedgenerator module | |
282 | """ |
|
283 | """ | |
283 | c = self.load_default_context() |
|
284 | c = self.load_default_context() | |
284 | following_repos = Session().query(UserFollowing)\ |
|
285 | following_repos = Session().query(UserFollowing)\ | |
285 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
286 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
286 | .options(joinedload(UserFollowing.follows_repository))\ |
|
287 | .options(joinedload(UserFollowing.follows_repository))\ | |
287 | .all() |
|
288 | .all() | |
288 | return self._rss_feed(following_repos, c.search_term, public=False) |
|
289 | return self._rss_feed(following_repos, c.search_term, public=False) | |
289 |
|
290 | |||
290 | @LoginRequired() |
|
291 | @LoginRequired() | |
291 | @NotAnonymous() |
|
292 | @NotAnonymous() | |
292 | @CSRFRequired() |
|
293 | @CSRFRequired() | |
293 | @view_config( |
|
294 | @view_config( | |
294 | route_name='toggle_following', request_method='POST', |
|
295 | route_name='toggle_following', request_method='POST', | |
295 | renderer='json_ext') |
|
296 | renderer='json_ext') | |
296 | def toggle_following(self): |
|
297 | def toggle_following(self): | |
297 | user_id = self.request.POST.get('follows_user_id') |
|
298 | user_id = self.request.POST.get('follows_user_id') | |
298 | if user_id: |
|
299 | if user_id: | |
299 | try: |
|
300 | try: | |
300 | ScmModel().toggle_following_user(user_id, self._rhodecode_user.user_id) |
|
301 | ScmModel().toggle_following_user(user_id, self._rhodecode_user.user_id) | |
301 | Session().commit() |
|
302 | Session().commit() | |
302 | return 'ok' |
|
303 | return 'ok' | |
303 | except Exception: |
|
304 | except Exception: | |
304 | raise HTTPBadRequest() |
|
305 | raise HTTPBadRequest() | |
305 |
|
306 | |||
306 | repo_id = self.request.POST.get('follows_repo_id') |
|
307 | repo_id = self.request.POST.get('follows_repo_id') | |
307 | repo = Repository.get_or_404(repo_id) |
|
308 | repo = Repository.get_or_404(repo_id) | |
308 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
309 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] | |
309 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'RepoWatch check') |
|
310 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'RepoWatch check') | |
310 | if repo and has_perm: |
|
311 | if repo and has_perm: | |
311 | try: |
|
312 | try: | |
312 | ScmModel().toggle_following_repo(repo_id, self._rhodecode_user.user_id) |
|
313 | ScmModel().toggle_following_repo(repo_id, self._rhodecode_user.user_id) | |
313 | Session().commit() |
|
314 | Session().commit() | |
314 | return 'ok' |
|
315 | return 'ok' | |
315 | except Exception: |
|
316 | except Exception: | |
316 | raise HTTPBadRequest() |
|
317 | raise HTTPBadRequest() | |
317 |
|
318 | |||
318 | raise HTTPBadRequest() |
|
319 | raise HTTPBadRequest() | |
319 |
|
320 | |||
320 | @LoginRequired() |
|
321 | @LoginRequired() | |
321 | @view_config( |
|
322 | @view_config( | |
322 | route_name='journal_public', request_method='GET', |
|
323 | route_name='journal_public', request_method='GET', | |
323 | renderer=None) |
|
324 | renderer=None) | |
324 | def journal_public(self): |
|
325 | def journal_public(self): | |
325 | c = self.load_default_context() |
|
326 | c = self.load_default_context() | |
326 | # Return a rendered template |
|
327 | # Return a rendered template | |
327 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
328 | p = safe_int(self.request.GET.get('page', 1), 1) | |
328 |
|
329 | |||
329 | c.following = Session().query(UserFollowing)\ |
|
330 | c.following = Session().query(UserFollowing)\ | |
330 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
331 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
331 | .options(joinedload(UserFollowing.follows_repository))\ |
|
332 | .options(joinedload(UserFollowing.follows_repository))\ | |
332 | .all() |
|
333 | .all() | |
333 |
|
334 | |||
334 | journal = self._get_journal_data(c.following, c.search_term) |
|
335 | journal = self._get_journal_data(c.following, c.search_term) | |
335 |
|
336 | |||
336 | def url_generator(page_num): |
|
337 | def url_generator(page_num): | |
337 | query_params = { |
|
338 | query_params = { | |
338 | 'page': page_num |
|
339 | 'page': page_num | |
339 | } |
|
340 | } | |
340 | return self.request.current_route_path(_query=query_params) |
|
341 | return self.request.current_route_path(_query=query_params) | |
341 |
|
342 | |||
342 | c.journal_pager = SqlPage( |
|
343 | c.journal_pager = SqlPage( | |
343 | journal, page=p, items_per_page=20, url_maker=url_generator) |
|
344 | journal, page=p, items_per_page=20, url_maker=url_generator) | |
344 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
345 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) | |
345 |
|
346 | |||
346 | c.journal_data = render( |
|
347 | c.journal_data = render( | |
347 | 'rhodecode:templates/journal/journal_data.mako', |
|
348 | 'rhodecode:templates/journal/journal_data.mako', | |
348 | self._get_template_context(c), self.request) |
|
349 | self._get_template_context(c), self.request) | |
349 |
|
350 | |||
350 | if self.request.is_xhr: |
|
351 | if self.request.is_xhr: | |
351 | return Response(c.journal_data) |
|
352 | return Response(c.journal_data) | |
352 |
|
353 | |||
353 | html = render( |
|
354 | html = render( | |
354 | 'rhodecode:templates/journal/public_journal.mako', |
|
355 | 'rhodecode:templates/journal/public_journal.mako', | |
355 | self._get_template_context(c), self.request) |
|
356 | self._get_template_context(c), self.request) | |
356 | return Response(html) |
|
357 | return Response(html) | |
357 |
|
358 | |||
358 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
359 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
359 | @view_config( |
|
360 | @view_config( | |
360 | route_name='journal_public_atom', request_method='GET', |
|
361 | route_name='journal_public_atom', request_method='GET', | |
361 | renderer=None) |
|
362 | renderer=None) | |
362 | def journal_public_atom(self): |
|
363 | def journal_public_atom(self): | |
363 | """ |
|
364 | """ | |
364 | Produce an atom-1.0 feed via feedgenerator module |
|
365 | Produce an atom-1.0 feed via feedgenerator module | |
365 | """ |
|
366 | """ | |
366 | c = self.load_default_context() |
|
367 | c = self.load_default_context() | |
367 | following_repos = Session().query(UserFollowing)\ |
|
368 | following_repos = Session().query(UserFollowing)\ | |
368 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
369 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
369 | .options(joinedload(UserFollowing.follows_repository))\ |
|
370 | .options(joinedload(UserFollowing.follows_repository))\ | |
370 | .all() |
|
371 | .all() | |
371 |
|
372 | |||
372 | return self._atom_feed(following_repos, c.search_term) |
|
373 | return self._atom_feed(following_repos, c.search_term) | |
373 |
|
374 | |||
374 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
375 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
375 | @view_config( |
|
376 | @view_config( | |
376 | route_name='journal_public_rss', request_method='GET', |
|
377 | route_name='journal_public_rss', request_method='GET', | |
377 | renderer=None) |
|
378 | renderer=None) | |
378 | def journal_public_rss(self): |
|
379 | def journal_public_rss(self): | |
379 | """ |
|
380 | """ | |
380 | Produce an rss2 feed via feedgenerator module |
|
381 | Produce an rss2 feed via feedgenerator module | |
381 | """ |
|
382 | """ | |
382 | c = self.load_default_context() |
|
383 | c = self.load_default_context() | |
383 | following_repos = Session().query(UserFollowing)\ |
|
384 | following_repos = Session().query(UserFollowing)\ | |
384 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
385 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ | |
385 | .options(joinedload(UserFollowing.follows_repository))\ |
|
386 | .options(joinedload(UserFollowing.follows_repository))\ | |
386 | .all() |
|
387 | .all() | |
387 |
|
388 | |||
388 | return self._rss_feed(following_repos, c.search_term) |
|
389 | return self._rss_feed(following_repos, c.search_term) |
@@ -1,137 +1,137 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 | from rhodecode.model.auth_token import AuthTokenModel |
|
22 | from rhodecode.model.auth_token import AuthTokenModel | |
23 | from rhodecode.tests import TestController |
|
23 | from rhodecode.tests import TestController | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def route_path(name, params=None, **kwargs): |
|
26 | def route_path(name, params=None, **kwargs): | |
27 | import urllib |
|
27 | import urllib | |
28 |
|
28 | |||
29 | base_url = { |
|
29 | base_url = { | |
30 | 'rss_feed_home': '/{repo_name}/feed-rss', |
|
30 | 'rss_feed_home': '/{repo_name}/feed-rss', | |
31 | 'atom_feed_home': '/{repo_name}/feed-atom', |
|
31 | 'atom_feed_home': '/{repo_name}/feed-atom', | |
32 | 'rss_feed_home_old': '/{repo_name}/feed/rss', |
|
32 | 'rss_feed_home_old': '/{repo_name}/feed/rss', | |
33 | 'atom_feed_home_old': '/{repo_name}/feed/atom', |
|
33 | 'atom_feed_home_old': '/{repo_name}/feed/atom', | |
34 | }[name].format(**kwargs) |
|
34 | }[name].format(**kwargs) | |
35 |
|
35 | |||
36 | if params: |
|
36 | if params: | |
37 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
37 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
38 | return base_url |
|
38 | return base_url | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class TestFeedView(TestController): |
|
41 | class TestFeedView(TestController): | |
42 |
|
42 | |||
43 | @pytest.mark.parametrize("feed_type,response_types,content_type",[ |
|
43 | @pytest.mark.parametrize("feed_type,response_types,content_type",[ | |
44 |
('rss', ['<rss version="2.0" |
|
44 | ('rss', ['<rss version="2.0"'], | |
45 | "application/rss+xml"), |
|
45 | "application/rss+xml"), | |
46 | ('atom', ['xmlns="http://www.w3.org/2005/Atom"', 'xml:lang="en-us"'], |
|
46 | ('atom', ['xmlns="http://www.w3.org/2005/Atom"', 'xml:lang="en-us"'], | |
47 | "application/atom+xml"), |
|
47 | "application/atom+xml"), | |
48 | ]) |
|
48 | ]) | |
49 | def test_feed(self, backend, feed_type, response_types, content_type): |
|
49 | def test_feed(self, backend, feed_type, response_types, content_type): | |
50 | self.log_user() |
|
50 | self.log_user() | |
51 | response = self.app.get( |
|
51 | response = self.app.get( | |
52 | route_path('{}_feed_home'.format(feed_type), |
|
52 | route_path('{}_feed_home'.format(feed_type), | |
53 | repo_name=backend.repo_name)) |
|
53 | repo_name=backend.repo_name)) | |
54 |
|
54 | |||
55 | for content in response_types: |
|
55 | for content in response_types: | |
56 | response.mustcontain(content) |
|
56 | response.mustcontain(content) | |
57 |
|
57 | |||
58 | assert response.content_type == content_type |
|
58 | assert response.content_type == content_type | |
59 |
|
59 | |||
60 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
60 | @pytest.mark.parametrize("feed_type, content_type", [ | |
61 | ('rss', "application/rss+xml"), |
|
61 | ('rss', "application/rss+xml"), | |
62 | ('atom', "application/atom+xml") |
|
62 | ('atom', "application/atom+xml") | |
63 | ]) |
|
63 | ]) | |
64 | def test_feed_with_auth_token( |
|
64 | def test_feed_with_auth_token( | |
65 | self, backend, user_admin, feed_type, content_type): |
|
65 | self, backend, user_admin, feed_type, content_type): | |
66 | auth_token = user_admin.feed_token |
|
66 | auth_token = user_admin.feed_token | |
67 | assert auth_token != '' |
|
67 | assert auth_token != '' | |
68 |
|
68 | |||
69 | response = self.app.get( |
|
69 | response = self.app.get( | |
70 | route_path( |
|
70 | route_path( | |
71 | '{}_feed_home'.format(feed_type), |
|
71 | '{}_feed_home'.format(feed_type), | |
72 | repo_name=backend.repo_name, |
|
72 | repo_name=backend.repo_name, | |
73 | params=dict(auth_token=auth_token)), |
|
73 | params=dict(auth_token=auth_token)), | |
74 | status=200) |
|
74 | status=200) | |
75 |
|
75 | |||
76 | assert response.content_type == content_type |
|
76 | assert response.content_type == content_type | |
77 |
|
77 | |||
78 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
78 | @pytest.mark.parametrize("feed_type, content_type", [ | |
79 | ('rss', "application/rss+xml"), |
|
79 | ('rss', "application/rss+xml"), | |
80 | ('atom', "application/atom+xml") |
|
80 | ('atom', "application/atom+xml") | |
81 | ]) |
|
81 | ]) | |
82 | def test_feed_with_auth_token_by_uid( |
|
82 | def test_feed_with_auth_token_by_uid( | |
83 | self, backend, user_admin, feed_type, content_type): |
|
83 | self, backend, user_admin, feed_type, content_type): | |
84 | auth_token = user_admin.feed_token |
|
84 | auth_token = user_admin.feed_token | |
85 | assert auth_token != '' |
|
85 | assert auth_token != '' | |
86 |
|
86 | |||
87 | response = self.app.get( |
|
87 | response = self.app.get( | |
88 | route_path( |
|
88 | route_path( | |
89 | '{}_feed_home'.format(feed_type), |
|
89 | '{}_feed_home'.format(feed_type), | |
90 | repo_name='_{}'.format(backend.repo.repo_id), |
|
90 | repo_name='_{}'.format(backend.repo.repo_id), | |
91 | params=dict(auth_token=auth_token)), |
|
91 | params=dict(auth_token=auth_token)), | |
92 | status=200) |
|
92 | status=200) | |
93 |
|
93 | |||
94 | assert response.content_type == content_type |
|
94 | assert response.content_type == content_type | |
95 |
|
95 | |||
96 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
96 | @pytest.mark.parametrize("feed_type, content_type", [ | |
97 | ('rss', "application/rss+xml"), |
|
97 | ('rss', "application/rss+xml"), | |
98 | ('atom', "application/atom+xml") |
|
98 | ('atom', "application/atom+xml") | |
99 | ]) |
|
99 | ]) | |
100 | def test_feed_old_urls_with_auth_token( |
|
100 | def test_feed_old_urls_with_auth_token( | |
101 | self, backend, user_admin, feed_type, content_type): |
|
101 | self, backend, user_admin, feed_type, content_type): | |
102 | auth_token = user_admin.feed_token |
|
102 | auth_token = user_admin.feed_token | |
103 | assert auth_token != '' |
|
103 | assert auth_token != '' | |
104 |
|
104 | |||
105 | response = self.app.get( |
|
105 | response = self.app.get( | |
106 | route_path( |
|
106 | route_path( | |
107 | '{}_feed_home_old'.format(feed_type), |
|
107 | '{}_feed_home_old'.format(feed_type), | |
108 | repo_name=backend.repo_name, |
|
108 | repo_name=backend.repo_name, | |
109 | params=dict(auth_token=auth_token)), |
|
109 | params=dict(auth_token=auth_token)), | |
110 | status=200) |
|
110 | status=200) | |
111 |
|
111 | |||
112 | assert response.content_type == content_type |
|
112 | assert response.content_type == content_type | |
113 |
|
113 | |||
114 | @pytest.mark.parametrize("feed_type", ['rss', 'atom']) |
|
114 | @pytest.mark.parametrize("feed_type", ['rss', 'atom']) | |
115 | def test_feed_with_auth_token_of_wrong_type( |
|
115 | def test_feed_with_auth_token_of_wrong_type( | |
116 | self, backend, user_util, feed_type): |
|
116 | self, backend, user_util, feed_type): | |
117 | user = user_util.create_user() |
|
117 | user = user_util.create_user() | |
118 | auth_token = AuthTokenModel().create( |
|
118 | auth_token = AuthTokenModel().create( | |
119 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_API) |
|
119 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_API) | |
120 | auth_token = auth_token.api_key |
|
120 | auth_token = auth_token.api_key | |
121 |
|
121 | |||
122 | self.app.get( |
|
122 | self.app.get( | |
123 | route_path( |
|
123 | route_path( | |
124 | '{}_feed_home'.format(feed_type), |
|
124 | '{}_feed_home'.format(feed_type), | |
125 | repo_name=backend.repo_name, |
|
125 | repo_name=backend.repo_name, | |
126 | params=dict(auth_token=auth_token)), |
|
126 | params=dict(auth_token=auth_token)), | |
127 | status=302) |
|
127 | status=302) | |
128 |
|
128 | |||
129 | auth_token = AuthTokenModel().create( |
|
129 | auth_token = AuthTokenModel().create( | |
130 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_FEED) |
|
130 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_FEED) | |
131 | auth_token = auth_token.api_key |
|
131 | auth_token = auth_token.api_key | |
132 | self.app.get( |
|
132 | self.app.get( | |
133 | route_path( |
|
133 | route_path( | |
134 | '{}_feed_home'.format(feed_type), |
|
134 | '{}_feed_home'.format(feed_type), | |
135 | repo_name=backend.repo_name, |
|
135 | repo_name=backend.repo_name, | |
136 | params=dict(auth_token=auth_token)), |
|
136 | params=dict(auth_token=auth_token)), | |
137 | status=200) |
|
137 | status=200) |
@@ -1,205 +1,212 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import pytz |
|
20 | import pytz | |
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.view import view_config |
|
23 | from pyramid.view import view_config | |
24 | from pyramid.response import Response |
|
24 | from pyramid.response import Response | |
25 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed |
|
25 | ||
26 |
|
26 | |||
27 | from rhodecode.apps._base import RepoAppView |
|
27 | from rhodecode.apps._base import RepoAppView | |
|
28 | from rhodecode.lib.feedgenerator import Rss201rev2Feed, Atom1Feed | |||
28 | from rhodecode.lib import audit_logger |
|
29 | from rhodecode.lib import audit_logger | |
29 | from rhodecode.lib import rc_cache |
|
30 | from rhodecode.lib import rc_cache | |
30 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
31 | from rhodecode.lib.auth import ( |
|
32 | from rhodecode.lib.auth import ( | |
32 | LoginRequired, HasRepoPermissionAnyDecorator) |
|
33 | LoginRequired, HasRepoPermissionAnyDecorator) | |
33 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer | |
34 | from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe |
|
35 | from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe | |
35 | from rhodecode.model.db import UserApiKeys, CacheKey |
|
36 | from rhodecode.model.db import UserApiKeys, CacheKey | |
36 |
|
37 | |||
37 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
38 |
|
39 | |||
39 |
|
40 | |||
40 | class RepoFeedView(RepoAppView): |
|
41 | class RepoFeedView(RepoAppView): | |
41 | def load_default_context(self): |
|
42 | def load_default_context(self): | |
42 | c = self._get_local_tmpl_context() |
|
43 | c = self._get_local_tmpl_context() | |
43 | self._load_defaults() |
|
44 | self._load_defaults() | |
44 | return c |
|
45 | return c | |
45 |
|
46 | |||
46 | def _get_config(self): |
|
47 | def _get_config(self): | |
47 | import rhodecode |
|
48 | import rhodecode | |
48 | config = rhodecode.CONFIG |
|
49 | config = rhodecode.CONFIG | |
49 |
|
50 | |||
50 | return { |
|
51 | return { | |
51 | 'language': 'en-us', |
|
52 | 'language': 'en-us', | |
52 | 'feed_ttl': '5', # TTL of feed, |
|
53 | 'feed_ttl': '5', # TTL of feed, | |
53 | 'feed_include_diff': |
|
54 | 'feed_include_diff': | |
54 | str2bool(config.get('rss_include_diff', False)), |
|
55 | str2bool(config.get('rss_include_diff', False)), | |
55 | 'feed_items_per_page': |
|
56 | 'feed_items_per_page': | |
56 | safe_int(config.get('rss_items_per_page', 20)), |
|
57 | safe_int(config.get('rss_items_per_page', 20)), | |
57 | 'feed_diff_limit': |
|
58 | 'feed_diff_limit': | |
58 | # we need to protect from parsing huge diffs here other way |
|
59 | # we need to protect from parsing huge diffs here other way | |
59 | # we can kill the server |
|
60 | # we can kill the server | |
60 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
61 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), | |
61 | } |
|
62 | } | |
62 |
|
63 | |||
63 | def _load_defaults(self): |
|
64 | def _load_defaults(self): | |
64 | _ = self.request.translate |
|
65 | _ = self.request.translate | |
65 | config = self._get_config() |
|
66 | config = self._get_config() | |
66 | # common values for feeds |
|
67 | # common values for feeds | |
67 | self.description = _('Changes on %s repository') |
|
68 | self.description = _('Changes on %s repository') | |
68 |
|
|
69 | self.title = _('%s %s feed') % (self.db_repo_name, '%s') | |
69 | self.language = config["language"] |
|
70 | self.language = config["language"] | |
70 | self.ttl = config["feed_ttl"] |
|
71 | self.ttl = config["feed_ttl"] | |
71 | self.feed_include_diff = config['feed_include_diff'] |
|
72 | self.feed_include_diff = config['feed_include_diff'] | |
72 | self.feed_diff_limit = config['feed_diff_limit'] |
|
73 | self.feed_diff_limit = config['feed_diff_limit'] | |
73 | self.feed_items_per_page = config['feed_items_per_page'] |
|
74 | self.feed_items_per_page = config['feed_items_per_page'] | |
74 |
|
75 | |||
75 | def _changes(self, commit): |
|
76 | def _changes(self, commit): | |
76 | diff_processor = DiffProcessor( |
|
77 | diff_processor = DiffProcessor( | |
77 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
78 | commit.diff(), diff_limit=self.feed_diff_limit) | |
78 | _parsed = diff_processor.prepare(inline_diff=False) |
|
79 | _parsed = diff_processor.prepare(inline_diff=False) | |
79 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
80 | limited_diff = isinstance(_parsed, LimitedDiffContainer) | |
80 |
|
81 | |||
81 | return diff_processor, _parsed, limited_diff |
|
82 | return diff_processor, _parsed, limited_diff | |
82 |
|
83 | |||
83 | def _get_title(self, commit): |
|
84 | def _get_title(self, commit): | |
84 |
return h. |
|
85 | return h.chop_at_smart(commit.message, '\n', suffix_if_chopped='...') | |
85 |
|
86 | |||
86 | def _get_description(self, commit): |
|
87 | def _get_description(self, commit): | |
87 | _renderer = self.request.get_partial_renderer( |
|
88 | _renderer = self.request.get_partial_renderer( | |
88 | 'rhodecode:templates/feed/atom_feed_entry.mako') |
|
89 | 'rhodecode:templates/feed/atom_feed_entry.mako') | |
89 | diff_processor, parsed_diff, limited_diff = self._changes(commit) |
|
90 | diff_processor, parsed_diff, limited_diff = self._changes(commit) | |
90 | filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff) |
|
91 | filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff) | |
91 | return _renderer( |
|
92 | return _renderer( | |
92 | 'body', |
|
93 | 'body', | |
93 | commit=commit, |
|
94 | commit=commit, | |
94 | parsed_diff=filtered_parsed_diff, |
|
95 | parsed_diff=filtered_parsed_diff, | |
95 | limited_diff=limited_diff, |
|
96 | limited_diff=limited_diff, | |
96 | feed_include_diff=self.feed_include_diff, |
|
97 | feed_include_diff=self.feed_include_diff, | |
97 | diff_processor=diff_processor, |
|
98 | diff_processor=diff_processor, | |
98 | has_hidden_changes=has_hidden_changes |
|
99 | has_hidden_changes=has_hidden_changes | |
99 | ) |
|
100 | ) | |
100 |
|
101 | |||
101 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
102 | def _set_timezone(self, date, tzinfo=pytz.utc): | |
102 | if not getattr(date, "tzinfo", None): |
|
103 | if not getattr(date, "tzinfo", None): | |
103 | date.replace(tzinfo=tzinfo) |
|
104 | date.replace(tzinfo=tzinfo) | |
104 | return date |
|
105 | return date | |
105 |
|
106 | |||
106 | def _get_commits(self): |
|
107 | def _get_commits(self): | |
107 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) |
|
108 | pre_load = ['author', 'branch', 'date', 'message', 'parents'] | |
|
109 | collection = self.rhodecode_vcs_repo.get_commits( | |||
|
110 | branch_name=None, show_hidden=False, pre_load=pre_load, | |||
|
111 | translate_tags=False) | |||
|
112 | ||||
|
113 | return list(collection[-self.feed_items_per_page:]) | |||
108 |
|
114 | |||
109 | def uid(self, repo_id, commit_id): |
|
115 | def uid(self, repo_id, commit_id): | |
110 | return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id)) |
|
116 | return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id)) | |
111 |
|
117 | |||
112 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
118 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
113 | @HasRepoPermissionAnyDecorator( |
|
119 | @HasRepoPermissionAnyDecorator( | |
114 | 'repository.read', 'repository.write', 'repository.admin') |
|
120 | 'repository.read', 'repository.write', 'repository.admin') | |
115 | @view_config(route_name='atom_feed_home', request_method='GET', renderer=None) |
|
121 | @view_config(route_name='atom_feed_home', request_method='GET', renderer=None) | |
116 | @view_config(route_name='atom_feed_home_old', request_method='GET', renderer=None) |
|
122 | @view_config(route_name='atom_feed_home_old', request_method='GET', renderer=None) | |
117 | def atom(self): |
|
123 | def atom(self): | |
118 | """ |
|
124 | """ | |
119 | Produce an atom-1.0 feed via feedgenerator module |
|
125 | Produce an atom-1.0 feed via feedgenerator module | |
120 | """ |
|
126 | """ | |
121 | self.load_default_context() |
|
127 | self.load_default_context() | |
|
128 | force_recache = self.get_recache_flag() | |||
122 |
|
129 | |||
123 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) |
|
130 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) | |
124 | condition = not self.path_filter.is_enabled |
|
131 | condition = not (self.path_filter.is_enabled or force_recache) | |
125 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
132 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
126 |
|
133 | |||
127 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
134 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
128 | condition=condition) |
|
135 | condition=condition) | |
129 | def generate_atom_feed(repo_id, _repo_name, commit_id, _feed_type): |
|
136 | def generate_atom_feed(repo_id, _repo_name, _commit_id, _feed_type): | |
130 | feed = Atom1Feed( |
|
137 | feed = Atom1Feed( | |
131 |
title=self.title % |
|
138 | title=self.title % 'atom', | |
|
139 | link=h.route_url('repo_summary', repo_name=_repo_name), | |||
|
140 | description=self.description % _repo_name, | |||
|
141 | language=self.language, | |||
|
142 | ttl=self.ttl | |||
|
143 | ) | |||
|
144 | for commit in reversed(self._get_commits()): | |||
|
145 | date = self._set_timezone(commit.date) | |||
|
146 | feed.add_item( | |||
|
147 | unique_id=self.uid(repo_id, commit.raw_id), | |||
|
148 | title=self._get_title(commit), | |||
|
149 | author_name=commit.author, | |||
|
150 | description=self._get_description(commit), | |||
|
151 | link=h.route_url( | |||
|
152 | 'repo_commit', repo_name=_repo_name, | |||
|
153 | commit_id=commit.raw_id), | |||
|
154 | pubdate=date,) | |||
|
155 | ||||
|
156 | return feed.content_type, feed.writeString('utf-8') | |||
|
157 | ||||
|
158 | commit_id = self.db_repo.changeset_cache.get('raw_id') | |||
|
159 | content_type, feed = generate_atom_feed( | |||
|
160 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom') | |||
|
161 | ||||
|
162 | response = Response(feed) | |||
|
163 | response.content_type = content_type | |||
|
164 | return response | |||
|
165 | ||||
|
166 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |||
|
167 | @HasRepoPermissionAnyDecorator( | |||
|
168 | 'repository.read', 'repository.write', 'repository.admin') | |||
|
169 | @view_config(route_name='rss_feed_home', request_method='GET', renderer=None) | |||
|
170 | @view_config(route_name='rss_feed_home_old', request_method='GET', renderer=None) | |||
|
171 | def rss(self): | |||
|
172 | """ | |||
|
173 | Produce an rss2 feed via feedgenerator module | |||
|
174 | """ | |||
|
175 | self.load_default_context() | |||
|
176 | force_recache = self.get_recache_flag() | |||
|
177 | ||||
|
178 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) | |||
|
179 | condition = not (self.path_filter.is_enabled or force_recache) | |||
|
180 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |||
|
181 | ||||
|
182 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |||
|
183 | condition=condition) | |||
|
184 | def generate_rss_feed(repo_id, _repo_name, _commit_id, _feed_type): | |||
|
185 | feed = Rss201rev2Feed( | |||
|
186 | title=self.title % 'rss', | |||
132 | link=h.route_url('repo_summary', repo_name=_repo_name), |
|
187 | link=h.route_url('repo_summary', repo_name=_repo_name), | |
133 | description=self.description % _repo_name, |
|
188 | description=self.description % _repo_name, | |
134 | language=self.language, |
|
189 | language=self.language, | |
135 | ttl=self.ttl |
|
190 | ttl=self.ttl | |
136 | ) |
|
191 | ) | |
137 |
|
192 | |||
138 | for commit in reversed(self._get_commits()): |
|
193 | for commit in reversed(self._get_commits()): | |
139 | date = self._set_timezone(commit.date) |
|
194 | date = self._set_timezone(commit.date) | |
140 | feed.add_item( |
|
195 | feed.add_item( | |
141 | unique_id=self.uid(repo_id, commit.raw_id), |
|
196 | unique_id=self.uid(repo_id, commit.raw_id), | |
142 | title=self._get_title(commit), |
|
197 | title=self._get_title(commit), | |
143 | author_name=commit.author, |
|
198 | author_name=commit.author, | |
144 | description=self._get_description(commit), |
|
199 | description=self._get_description(commit), | |
145 | link=h.route_url( |
|
200 | link=h.route_url( | |
146 | 'repo_commit', repo_name=_repo_name, |
|
201 | 'repo_commit', repo_name=_repo_name, | |
147 | commit_id=commit.raw_id), |
|
202 | commit_id=commit.raw_id), | |
148 | pubdate=date,) |
|
203 | pubdate=date,) | |
149 |
|
204 | return feed.content_type, feed.writeString('utf-8') | ||
150 | return feed.mime_type, feed.writeString('utf-8') |
|
|||
151 |
|
205 | |||
152 | commit_id = self.db_repo.changeset_cache.get('raw_id') |
|
206 | commit_id = self.db_repo.changeset_cache.get('raw_id') | |
153 |
|
|
207 | content_type, feed = generate_rss_feed( | |
154 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom') |
|
|||
155 |
|
||||
156 | response = Response(feed) |
|
|||
157 | response.content_type = mime_type |
|
|||
158 | return response |
|
|||
159 |
|
||||
160 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
|||
161 | @HasRepoPermissionAnyDecorator( |
|
|||
162 | 'repository.read', 'repository.write', 'repository.admin') |
|
|||
163 | @view_config(route_name='rss_feed_home', request_method='GET', renderer=None) |
|
|||
164 | @view_config(route_name='rss_feed_home_old', request_method='GET', renderer=None) |
|
|||
165 | def rss(self): |
|
|||
166 | """ |
|
|||
167 | Produce an rss2 feed via feedgenerator module |
|
|||
168 | """ |
|
|||
169 | self.load_default_context() |
|
|||
170 |
|
||||
171 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) |
|
|||
172 | condition = not self.path_filter.is_enabled |
|
|||
173 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
|||
174 |
|
||||
175 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
|||
176 | condition=condition) |
|
|||
177 | def generate_rss_feed(repo_id, _repo_name, commit_id, _feed_type): |
|
|||
178 | feed = Rss201rev2Feed( |
|
|||
179 | title=self.title % _repo_name, |
|
|||
180 | link=h.route_url('repo_summary', repo_name=_repo_name), |
|
|||
181 | description=self.description % _repo_name, |
|
|||
182 | language=self.language, |
|
|||
183 | ttl=self.ttl |
|
|||
184 | ) |
|
|||
185 |
|
||||
186 | for commit in reversed(self._get_commits()): |
|
|||
187 | date = self._set_timezone(commit.date) |
|
|||
188 | feed.add_item( |
|
|||
189 | unique_id=self.uid(repo_id, commit.raw_id), |
|
|||
190 | title=self._get_title(commit), |
|
|||
191 | author_name=commit.author, |
|
|||
192 | description=self._get_description(commit), |
|
|||
193 | link=h.route_url( |
|
|||
194 | 'repo_commit', repo_name=_repo_name, |
|
|||
195 | commit_id=commit.raw_id), |
|
|||
196 | pubdate=date,) |
|
|||
197 | return feed.mime_type, feed.writeString('utf-8') |
|
|||
198 |
|
||||
199 | commit_id = self.db_repo.changeset_cache.get('raw_id') |
|
|||
200 | mime_type, feed = generate_rss_feed( |
|
|||
201 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'rss') |
|
208 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'rss') | |
202 |
|
209 | |||
203 | response = Response(feed) |
|
210 | response = Response(feed) | |
204 |
response.content_type = |
|
211 | response.content_type = content_type | |
205 | return response |
|
212 | return response |
General Comments 0
You need to be logged in to leave comments.
Login now