Show More
@@ -1,177 +1,173 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.feed |
|
15 | kallithea.controllers.feed | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Feed controller for Kallithea |
|
18 | Feed controller for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 23, 2010 |
|
22 | :created_on: Apr 23, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 |
|
30 | |||
31 | from pylons import response, tmpl_context as c |
|
31 | from pylons import response, tmpl_context as c | |
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 |
|
33 | |||
34 | from beaker.cache import cache_region, region_invalidate |
|
34 | from beaker.cache import cache_region, region_invalidate | |
35 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
35 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed | |
36 |
|
36 | |||
|
37 | from kallithea import CONFIG | |||
37 | from kallithea.lib import helpers as h |
|
38 | from kallithea.lib import helpers as h | |
38 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
39 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
39 | from kallithea.lib.base import BaseRepoController |
|
40 | from kallithea.lib.base import BaseRepoController | |
40 | from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
41 | from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer | |
41 | from kallithea.model.db import CacheInvalidation |
|
42 | from kallithea.model.db import CacheInvalidation | |
42 | from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode |
|
43 | from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode | |
43 |
|
44 | |||
44 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
45 |
|
46 | |||
46 |
|
47 | |||
|
48 | language = 'en-us' | |||
|
49 | ttl = "5" | |||
|
50 | ||||
|
51 | ||||
47 | class FeedController(BaseRepoController): |
|
52 | class FeedController(BaseRepoController): | |
48 |
|
53 | |||
49 | @LoginRequired(api_access=True) |
|
54 | @LoginRequired(api_access=True) | |
50 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
55 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
51 | 'repository.admin') |
|
56 | 'repository.admin') | |
52 | def __before__(self): |
|
57 | def __before__(self): | |
53 | super(FeedController, self).__before__() |
|
58 | super(FeedController, self).__before__() | |
54 | #common values for feeds |
|
|||
55 | self.description = _('Changes on %s repository') |
|
|||
56 | self.title = self.title = _('%s %s feed') % (c.site_name, '%s') |
|
|||
57 | self.language = 'en-us' |
|
|||
58 | self.ttl = "5" |
|
|||
59 | import kallithea |
|
|||
60 | CONF = kallithea.CONFIG |
|
|||
61 | self.include_diff = str2bool(CONF.get('rss_include_diff', False)) |
|
|||
62 | self.feed_nr = safe_int(CONF.get('rss_items_per_page', 20)) |
|
|||
63 | # we need to protect from parsing huge diffs here other way |
|
|||
64 | # we can kill the server |
|
|||
65 | self.feed_diff_limit = safe_int(CONF.get('rss_cut_off_limit', 32 * 1024)) |
|
|||
66 |
|
59 | |||
67 | def _get_title(self, cs): |
|
60 | def _get_title(self, cs): | |
68 | return h.shorter(cs.message, 160) |
|
61 | return h.shorter(cs.message, 160) | |
69 |
|
62 | |||
70 | def __changes(self, cs): |
|
63 | def __changes(self, cs): | |
71 | changes = [] |
|
64 | changes = [] | |
|
65 | rss_cut_off_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024)) | |||
72 | diff_processor = DiffProcessor(cs.diff(), |
|
66 | diff_processor = DiffProcessor(cs.diff(), | |
73 |
diff_limit= |
|
67 | diff_limit=rss_cut_off_limit) | |
74 | _parsed = diff_processor.prepare(inline_diff=False) |
|
68 | _parsed = diff_processor.prepare(inline_diff=False) | |
75 | limited_diff = False |
|
69 | limited_diff = False | |
76 | if isinstance(_parsed, LimitedDiffContainer): |
|
70 | if isinstance(_parsed, LimitedDiffContainer): | |
77 | limited_diff = True |
|
71 | limited_diff = True | |
78 |
|
72 | |||
79 | for st in _parsed: |
|
73 | for st in _parsed: | |
80 | st.update({'added': st['stats']['added'], |
|
74 | st.update({'added': st['stats']['added'], | |
81 | 'removed': st['stats']['deleted']}) |
|
75 | 'removed': st['stats']['deleted']}) | |
82 | changes.append('\n %(operation)s %(filename)s ' |
|
76 | changes.append('\n %(operation)s %(filename)s ' | |
83 | '(%(added)s lines added, %(removed)s lines removed)' |
|
77 | '(%(added)s lines added, %(removed)s lines removed)' | |
84 | % st) |
|
78 | % st) | |
85 | if limited_diff: |
|
79 | if limited_diff: | |
86 | changes = changes + ['\n ' + |
|
80 | changes = changes + ['\n ' + | |
87 | _('Changeset was too big and was cut off...')] |
|
81 | _('Changeset was too big and was cut off...')] | |
88 | return diff_processor, changes |
|
82 | return diff_processor, changes | |
89 |
|
83 | |||
90 | def __get_desc(self, cs): |
|
84 | def __get_desc(self, cs): | |
91 | desc_msg = [(_('%s committed on %s') |
|
85 | desc_msg = [(_('%s committed on %s') | |
92 | % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>'] |
|
86 | % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>'] | |
93 | #branches, tags, bookmarks |
|
87 | #branches, tags, bookmarks | |
94 | if cs.branch: |
|
88 | if cs.branch: | |
95 | desc_msg.append('branch: %s<br/>' % cs.branch) |
|
89 | desc_msg.append('branch: %s<br/>' % cs.branch) | |
96 | if h.is_hg(c.db_repo_scm_instance): |
|
90 | if h.is_hg(c.db_repo_scm_instance): | |
97 | for book in cs.bookmarks: |
|
91 | for book in cs.bookmarks: | |
98 | desc_msg.append('bookmark: %s<br/>' % book) |
|
92 | desc_msg.append('bookmark: %s<br/>' % book) | |
99 | for tag in cs.tags: |
|
93 | for tag in cs.tags: | |
100 | desc_msg.append('tag: %s<br/>' % tag) |
|
94 | desc_msg.append('tag: %s<br/>' % tag) | |
101 | diff_processor, changes = self.__changes(cs) |
|
95 | diff_processor, changes = self.__changes(cs) | |
102 | # rev link |
|
96 | # rev link | |
103 | _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, |
|
97 | _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, | |
104 | revision=cs.raw_id) |
|
98 | revision=cs.raw_id) | |
105 | desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) |
|
99 | desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) | |
106 |
|
100 | |||
107 | desc_msg.append('<pre>') |
|
101 | desc_msg.append('<pre>') | |
108 | desc_msg.append(h.urlify_text(cs.message)) |
|
102 | desc_msg.append(h.urlify_text(cs.message)) | |
109 | desc_msg.append('\n') |
|
103 | desc_msg.append('\n') | |
110 | desc_msg.extend(changes) |
|
104 | desc_msg.extend(changes) | |
111 | if self.include_diff: |
|
105 | if str2bool(CONFIG.get('rss_include_diff', False)): | |
112 | desc_msg.append('\n\n') |
|
106 | desc_msg.append('\n\n') | |
113 | desc_msg.append(diff_processor.as_raw()) |
|
107 | desc_msg.append(diff_processor.as_raw()) | |
114 | desc_msg.append('</pre>') |
|
108 | desc_msg.append('</pre>') | |
115 | return map(safe_unicode, desc_msg) |
|
109 | return map(safe_unicode, desc_msg) | |
116 |
|
110 | |||
117 | def atom(self, repo_name): |
|
111 | def atom(self, repo_name): | |
118 | """Produce an atom-1.0 feed via feedgenerator module""" |
|
112 | """Produce an atom-1.0 feed via feedgenerator module""" | |
119 |
|
113 | |||
120 | @cache_region('long_term', '_get_feed_from_cache') |
|
114 | @cache_region('long_term', '_get_feed_from_cache') | |
121 | def _get_feed_from_cache(key, kind): |
|
115 | def _get_feed_from_cache(key, kind): | |
122 | feed = Atom1Feed( |
|
116 | feed = Atom1Feed( | |
123 |
|
|
117 | title=_('%s %s feed') % (c.site_name, repo_name), | |
124 |
|
|
118 | link=h.canonical_url('summary_home', repo_name=repo_name), | |
125 |
|
|
119 | description=_('Changes on %s repository') % repo_name, | |
126 |
|
|
120 | language=language, | |
127 |
|
|
121 | ttl=ttl | |
128 | ) |
|
122 | ) | |
129 |
|
123 | |||
130 | for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])): |
|
124 | rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) | |
|
125 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): | |||
131 | feed.add_item(title=self._get_title(cs), |
|
126 | feed.add_item(title=self._get_title(cs), | |
132 | link=h.canonical_url('changeset_home', repo_name=repo_name, |
|
127 | link=h.canonical_url('changeset_home', repo_name=repo_name, | |
133 | revision=cs.raw_id), |
|
128 | revision=cs.raw_id), | |
134 | author_name=cs.author, |
|
129 | author_name=cs.author, | |
135 | description=''.join(self.__get_desc(cs)), |
|
130 | description=''.join(self.__get_desc(cs)), | |
136 | pubdate=cs.date, |
|
131 | pubdate=cs.date, | |
137 | ) |
|
132 | ) | |
138 |
|
133 | |||
139 | response.content_type = feed.mime_type |
|
134 | response.content_type = feed.mime_type | |
140 | return feed.writeString('utf-8') |
|
135 | return feed.writeString('utf-8') | |
141 |
|
136 | |||
142 | kind = 'ATOM' |
|
137 | kind = 'ATOM' | |
143 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) |
|
138 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) | |
144 | if not valid: |
|
139 | if not valid: | |
145 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) |
|
140 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) | |
146 | return _get_feed_from_cache(repo_name, kind) |
|
141 | return _get_feed_from_cache(repo_name, kind) | |
147 |
|
142 | |||
148 | def rss(self, repo_name): |
|
143 | def rss(self, repo_name): | |
149 | """Produce an rss2 feed via feedgenerator module""" |
|
144 | """Produce an rss2 feed via feedgenerator module""" | |
150 |
|
145 | |||
151 | @cache_region('long_term', '_get_feed_from_cache') |
|
146 | @cache_region('long_term', '_get_feed_from_cache') | |
152 | def _get_feed_from_cache(key, kind): |
|
147 | def _get_feed_from_cache(key, kind): | |
153 | feed = Rss201rev2Feed( |
|
148 | feed = Rss201rev2Feed( | |
154 |
title=se |
|
149 | title=_('%s %s feed') % (c.site_name, repo_name), | |
155 | link=h.canonical_url('summary_home', repo_name=repo_name), |
|
150 | link=h.canonical_url('summary_home', repo_name=repo_name), | |
156 |
description= |
|
151 | description=_('Changes on %s repository') % repo_name, | |
157 |
language= |
|
152 | language=language, | |
158 |
ttl= |
|
153 | ttl=ttl | |
159 | ) |
|
154 | ) | |
160 |
|
155 | |||
161 | for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])): |
|
156 | rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) | |
|
157 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): | |||
162 | feed.add_item(title=self._get_title(cs), |
|
158 | feed.add_item(title=self._get_title(cs), | |
163 | link=h.canonical_url('changeset_home', repo_name=repo_name, |
|
159 | link=h.canonical_url('changeset_home', repo_name=repo_name, | |
164 | revision=cs.raw_id), |
|
160 | revision=cs.raw_id), | |
165 | author_name=cs.author, |
|
161 | author_name=cs.author, | |
166 | description=''.join(self.__get_desc(cs)), |
|
162 | description=''.join(self.__get_desc(cs)), | |
167 | pubdate=cs.date, |
|
163 | pubdate=cs.date, | |
168 | ) |
|
164 | ) | |
169 |
|
165 | |||
170 | response.content_type = feed.mime_type |
|
166 | response.content_type = feed.mime_type | |
171 | return feed.writeString('utf-8') |
|
167 | return feed.writeString('utf-8') | |
172 |
|
168 | |||
173 | kind = 'RSS' |
|
169 | kind = 'RSS' | |
174 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) |
|
170 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) | |
175 | if not valid: |
|
171 | if not valid: | |
176 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) |
|
172 | region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) | |
177 | return _get_feed_from_cache(repo_name, kind) |
|
173 | return _get_feed_from_cache(repo_name, kind) |
@@ -1,318 +1,320 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.journal |
|
15 | kallithea.controllers.journal | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Journal controller |
|
18 | Journal controller | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Nov 21, 2010 |
|
22 | :created_on: Nov 21, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 |
|
26 | |||
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 | import traceback |
|
30 | import traceback | |
31 | from itertools import groupby |
|
31 | from itertools import groupby | |
32 |
|
32 | |||
33 | from sqlalchemy import or_ |
|
33 | from sqlalchemy import or_ | |
34 | from sqlalchemy.orm import joinedload |
|
34 | from sqlalchemy.orm import joinedload | |
35 | from sqlalchemy.sql.expression import func |
|
35 | from sqlalchemy.sql.expression import func | |
36 |
|
36 | |||
37 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
37 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed | |
38 |
|
38 | |||
39 | from webob.exc import HTTPBadRequest |
|
39 | from webob.exc import HTTPBadRequest | |
40 | from pylons import request, tmpl_context as c, response |
|
40 | from pylons import request, tmpl_context as c, response | |
41 | from pylons.i18n.translation import _ |
|
41 | from pylons.i18n.translation import _ | |
42 |
|
42 | |||
43 | from kallithea.config.routing import url |
|
43 | from kallithea.config.routing import url | |
44 | from kallithea.controllers.admin.admin import _journal_filter |
|
44 | from kallithea.controllers.admin.admin import _journal_filter | |
45 | from kallithea.model.db import UserLog, UserFollowing, Repository, User |
|
45 | from kallithea.model.db import UserLog, UserFollowing, Repository, User | |
46 | from kallithea.model.meta import Session |
|
46 | from kallithea.model.meta import Session | |
47 | from kallithea.model.repo import RepoModel |
|
47 | from kallithea.model.repo import RepoModel | |
48 | import kallithea.lib.helpers as h |
|
48 | import kallithea.lib.helpers as h | |
49 | from kallithea.lib.auth import LoginRequired, NotAnonymous |
|
49 | from kallithea.lib.auth import LoginRequired, NotAnonymous | |
50 | from kallithea.lib.base import BaseController, render |
|
50 | from kallithea.lib.base import BaseController, render | |
51 | from kallithea.lib.compat import json |
|
51 | from kallithea.lib.compat import json | |
52 | from kallithea.lib.page import Page |
|
52 | from kallithea.lib.page import Page | |
53 | from kallithea.lib.utils2 import safe_int, AttributeDict |
|
53 | from kallithea.lib.utils2 import safe_int, AttributeDict | |
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
|
58 | language = 'en-us' | |||
|
59 | ttl = "5" | |||
|
60 | feed_nr = 20 | |||
|
61 | ||||
|
62 | ||||
58 | class JournalController(BaseController): |
|
63 | class JournalController(BaseController): | |
59 |
|
64 | |||
60 | def __before__(self): |
|
65 | def __before__(self): | |
61 | super(JournalController, self).__before__() |
|
66 | super(JournalController, self).__before__() | |
62 | self.language = 'en-us' |
|
|||
63 | self.ttl = "5" |
|
|||
64 | self.feed_nr = 20 |
|
|||
65 | c.search_term = request.GET.get('filter') |
|
67 | c.search_term = request.GET.get('filter') | |
66 |
|
68 | |||
67 | def _get_daily_aggregate(self, journal): |
|
69 | def _get_daily_aggregate(self, journal): | |
68 | groups = [] |
|
70 | groups = [] | |
69 | for k, g in groupby(journal, lambda x: x.action_as_day): |
|
71 | for k, g in groupby(journal, lambda x: x.action_as_day): | |
70 | user_group = [] |
|
72 | user_group = [] | |
71 | #groupby username if it's a present value, else fallback to journal username |
|
73 | #groupby username if it's a present value, else fallback to journal username | |
72 | for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username): |
|
74 | for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username): | |
73 | l = list(g2) |
|
75 | l = list(g2) | |
74 | user_group.append((l[0].user, l)) |
|
76 | user_group.append((l[0].user, l)) | |
75 |
|
77 | |||
76 | groups.append((k, user_group,)) |
|
78 | groups.append((k, user_group,)) | |
77 |
|
79 | |||
78 | return groups |
|
80 | return groups | |
79 |
|
81 | |||
80 | def _get_journal_data(self, following_repos): |
|
82 | def _get_journal_data(self, following_repos): | |
81 | repo_ids = [x.follows_repository_id for x in following_repos |
|
83 | repo_ids = [x.follows_repository_id for x in following_repos | |
82 | if x.follows_repository_id is not None] |
|
84 | if x.follows_repository_id is not None] | |
83 | user_ids = [x.follows_user_id for x in following_repos |
|
85 | user_ids = [x.follows_user_id for x in following_repos | |
84 | if x.follows_user_id is not None] |
|
86 | if x.follows_user_id is not None] | |
85 |
|
87 | |||
86 | filtering_criterion = None |
|
88 | filtering_criterion = None | |
87 |
|
89 | |||
88 | if repo_ids and user_ids: |
|
90 | if repo_ids and user_ids: | |
89 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), |
|
91 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), | |
90 | UserLog.user_id.in_(user_ids)) |
|
92 | UserLog.user_id.in_(user_ids)) | |
91 | if repo_ids and not user_ids: |
|
93 | if repo_ids and not user_ids: | |
92 | filtering_criterion = UserLog.repository_id.in_(repo_ids) |
|
94 | filtering_criterion = UserLog.repository_id.in_(repo_ids) | |
93 | if not repo_ids and user_ids: |
|
95 | if not repo_ids and user_ids: | |
94 | filtering_criterion = UserLog.user_id.in_(user_ids) |
|
96 | filtering_criterion = UserLog.user_id.in_(user_ids) | |
95 | if filtering_criterion is not None: |
|
97 | if filtering_criterion is not None: | |
96 | journal = self.sa.query(UserLog) \ |
|
98 | journal = self.sa.query(UserLog) \ | |
97 | .options(joinedload(UserLog.user)) \ |
|
99 | .options(joinedload(UserLog.user)) \ | |
98 | .options(joinedload(UserLog.repository)) |
|
100 | .options(joinedload(UserLog.repository)) | |
99 | #filter |
|
101 | #filter | |
100 | journal = _journal_filter(journal, c.search_term) |
|
102 | journal = _journal_filter(journal, c.search_term) | |
101 | journal = journal.filter(filtering_criterion) \ |
|
103 | journal = journal.filter(filtering_criterion) \ | |
102 | .order_by(UserLog.action_date.desc()) |
|
104 | .order_by(UserLog.action_date.desc()) | |
103 | else: |
|
105 | else: | |
104 | journal = [] |
|
106 | journal = [] | |
105 |
|
107 | |||
106 | return journal |
|
108 | return journal | |
107 |
|
109 | |||
108 | def _atom_feed(self, repos, public=True): |
|
110 | def _atom_feed(self, repos, public=True): | |
109 | journal = self._get_journal_data(repos) |
|
111 | journal = self._get_journal_data(repos) | |
110 | if public: |
|
112 | if public: | |
111 | _link = h.canonical_url('public_journal_atom') |
|
113 | _link = h.canonical_url('public_journal_atom') | |
112 | _desc = '%s %s %s' % (c.site_name, _('Public Journal'), |
|
114 | _desc = '%s %s %s' % (c.site_name, _('Public Journal'), | |
113 | 'atom feed') |
|
115 | 'atom feed') | |
114 | else: |
|
116 | else: | |
115 | _link = h.canonical_url('journal_atom') |
|
117 | _link = h.canonical_url('journal_atom') | |
116 | _desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed') |
|
118 | _desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed') | |
117 |
|
119 | |||
118 | feed = Atom1Feed(title=_desc, |
|
120 | feed = Atom1Feed(title=_desc, | |
119 | link=_link, |
|
121 | link=_link, | |
120 | description=_desc, |
|
122 | description=_desc, | |
121 |
language= |
|
123 | language=language, | |
122 |
ttl= |
|
124 | ttl=ttl) | |
123 |
|
125 | |||
124 |
for entry in journal[: |
|
126 | for entry in journal[:feed_nr]: | |
125 | user = entry.user |
|
127 | user = entry.user | |
126 | if user is None: |
|
128 | if user is None: | |
127 | #fix deleted users |
|
129 | #fix deleted users | |
128 | user = AttributeDict({'short_contact': entry.username, |
|
130 | user = AttributeDict({'short_contact': entry.username, | |
129 | 'email': '', |
|
131 | 'email': '', | |
130 | 'full_contact': ''}) |
|
132 | 'full_contact': ''}) | |
131 | action, action_extra, ico = h.action_parser(entry, feed=True) |
|
133 | action, action_extra, ico = h.action_parser(entry, feed=True) | |
132 | title = "%s - %s %s" % (user.short_contact, action(), |
|
134 | title = "%s - %s %s" % (user.short_contact, action(), | |
133 | entry.repository.repo_name) |
|
135 | entry.repository.repo_name) | |
134 | desc = action_extra() |
|
136 | desc = action_extra() | |
135 | _url = None |
|
137 | _url = None | |
136 | if entry.repository is not None: |
|
138 | if entry.repository is not None: | |
137 | _url = h.canonical_url('changelog_home', |
|
139 | _url = h.canonical_url('changelog_home', | |
138 | repo_name=entry.repository.repo_name) |
|
140 | repo_name=entry.repository.repo_name) | |
139 |
|
141 | |||
140 | feed.add_item(title=title, |
|
142 | feed.add_item(title=title, | |
141 | pubdate=entry.action_date, |
|
143 | pubdate=entry.action_date, | |
142 | link=_url or h.canonical_url(''), |
|
144 | link=_url or h.canonical_url(''), | |
143 | author_email=user.email, |
|
145 | author_email=user.email, | |
144 | author_name=user.full_contact, |
|
146 | author_name=user.full_contact, | |
145 | description=desc) |
|
147 | description=desc) | |
146 |
|
148 | |||
147 | response.content_type = feed.mime_type |
|
149 | response.content_type = feed.mime_type | |
148 | return feed.writeString('utf-8') |
|
150 | return feed.writeString('utf-8') | |
149 |
|
151 | |||
150 | def _rss_feed(self, repos, public=True): |
|
152 | def _rss_feed(self, repos, public=True): | |
151 | journal = self._get_journal_data(repos) |
|
153 | journal = self._get_journal_data(repos) | |
152 | if public: |
|
154 | if public: | |
153 | _link = h.canonical_url('public_journal_atom') |
|
155 | _link = h.canonical_url('public_journal_atom') | |
154 | _desc = '%s %s %s' % (c.site_name, _('Public Journal'), |
|
156 | _desc = '%s %s %s' % (c.site_name, _('Public Journal'), | |
155 | 'rss feed') |
|
157 | 'rss feed') | |
156 | else: |
|
158 | else: | |
157 | _link = h.canonical_url('journal_atom') |
|
159 | _link = h.canonical_url('journal_atom') | |
158 | _desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed') |
|
160 | _desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed') | |
159 |
|
161 | |||
160 | feed = Rss201rev2Feed(title=_desc, |
|
162 | feed = Rss201rev2Feed(title=_desc, | |
161 | link=_link, |
|
163 | link=_link, | |
162 | description=_desc, |
|
164 | description=_desc, | |
163 |
language= |
|
165 | language=language, | |
164 |
ttl= |
|
166 | ttl=ttl) | |
165 |
|
167 | |||
166 |
for entry in journal[: |
|
168 | for entry in journal[:feed_nr]: | |
167 | user = entry.user |
|
169 | user = entry.user | |
168 | if user is None: |
|
170 | if user is None: | |
169 | #fix deleted users |
|
171 | #fix deleted users | |
170 | user = AttributeDict({'short_contact': entry.username, |
|
172 | user = AttributeDict({'short_contact': entry.username, | |
171 | 'email': '', |
|
173 | 'email': '', | |
172 | 'full_contact': ''}) |
|
174 | 'full_contact': ''}) | |
173 | action, action_extra, ico = h.action_parser(entry, feed=True) |
|
175 | action, action_extra, ico = h.action_parser(entry, feed=True) | |
174 | title = "%s - %s %s" % (user.short_contact, action(), |
|
176 | title = "%s - %s %s" % (user.short_contact, action(), | |
175 | entry.repository.repo_name) |
|
177 | entry.repository.repo_name) | |
176 | desc = action_extra() |
|
178 | desc = action_extra() | |
177 | _url = None |
|
179 | _url = None | |
178 | if entry.repository is not None: |
|
180 | if entry.repository is not None: | |
179 | _url = h.canonical_url('changelog_home', |
|
181 | _url = h.canonical_url('changelog_home', | |
180 | repo_name=entry.repository.repo_name) |
|
182 | repo_name=entry.repository.repo_name) | |
181 |
|
183 | |||
182 | feed.add_item(title=title, |
|
184 | feed.add_item(title=title, | |
183 | pubdate=entry.action_date, |
|
185 | pubdate=entry.action_date, | |
184 | link=_url or h.canonical_url(''), |
|
186 | link=_url or h.canonical_url(''), | |
185 | author_email=user.email, |
|
187 | author_email=user.email, | |
186 | author_name=user.full_contact, |
|
188 | author_name=user.full_contact, | |
187 | description=desc) |
|
189 | description=desc) | |
188 |
|
190 | |||
189 | response.content_type = feed.mime_type |
|
191 | response.content_type = feed.mime_type | |
190 | return feed.writeString('utf-8') |
|
192 | return feed.writeString('utf-8') | |
191 |
|
193 | |||
192 | @LoginRequired() |
|
194 | @LoginRequired() | |
193 | @NotAnonymous() |
|
195 | @NotAnonymous() | |
194 | def index(self): |
|
196 | def index(self): | |
195 | # Return a rendered template |
|
197 | # Return a rendered template | |
196 | p = safe_int(request.GET.get('page'), 1) |
|
198 | p = safe_int(request.GET.get('page'), 1) | |
197 | c.user = User.get(self.authuser.user_id) |
|
199 | c.user = User.get(self.authuser.user_id) | |
198 | c.following = self.sa.query(UserFollowing) \ |
|
200 | c.following = self.sa.query(UserFollowing) \ | |
199 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
201 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
200 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
202 | .options(joinedload(UserFollowing.follows_repository)) \ | |
201 | .all() |
|
203 | .all() | |
202 |
|
204 | |||
203 | journal = self._get_journal_data(c.following) |
|
205 | journal = self._get_journal_data(c.following) | |
204 |
|
206 | |||
205 | def url_generator(**kw): |
|
207 | def url_generator(**kw): | |
206 | return url.current(filter=c.search_term, **kw) |
|
208 | return url.current(filter=c.search_term, **kw) | |
207 |
|
209 | |||
208 | c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator) |
|
210 | c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator) | |
209 | c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager) |
|
211 | c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager) | |
210 |
|
212 | |||
211 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
213 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
212 | return render('journal/journal_data.html') |
|
214 | return render('journal/journal_data.html') | |
213 |
|
215 | |||
214 | repos_list = Repository.query(sorted=True) \ |
|
216 | repos_list = Repository.query(sorted=True) \ | |
215 | .filter_by(owner_id=self.authuser.user_id).all() |
|
217 | .filter_by(owner_id=self.authuser.user_id).all() | |
216 |
|
218 | |||
217 | repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list, |
|
219 | repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list, | |
218 | admin=True) |
|
220 | admin=True) | |
219 | #json used to render the grid |
|
221 | #json used to render the grid | |
220 | c.data = json.dumps(repos_data) |
|
222 | c.data = json.dumps(repos_data) | |
221 |
|
223 | |||
222 | return render('journal/journal.html') |
|
224 | return render('journal/journal.html') | |
223 |
|
225 | |||
224 | @LoginRequired(api_access=True) |
|
226 | @LoginRequired(api_access=True) | |
225 | @NotAnonymous() |
|
227 | @NotAnonymous() | |
226 | def journal_atom(self): |
|
228 | def journal_atom(self): | |
227 | """ |
|
229 | """ | |
228 | Produce an atom-1.0 feed via feedgenerator module |
|
230 | Produce an atom-1.0 feed via feedgenerator module | |
229 | """ |
|
231 | """ | |
230 | following = self.sa.query(UserFollowing) \ |
|
232 | following = self.sa.query(UserFollowing) \ | |
231 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
233 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
232 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
234 | .options(joinedload(UserFollowing.follows_repository)) \ | |
233 | .all() |
|
235 | .all() | |
234 | return self._atom_feed(following, public=False) |
|
236 | return self._atom_feed(following, public=False) | |
235 |
|
237 | |||
236 | @LoginRequired(api_access=True) |
|
238 | @LoginRequired(api_access=True) | |
237 | @NotAnonymous() |
|
239 | @NotAnonymous() | |
238 | def journal_rss(self): |
|
240 | def journal_rss(self): | |
239 | """ |
|
241 | """ | |
240 | Produce an rss feed via feedgenerator module |
|
242 | Produce an rss feed via feedgenerator module | |
241 | """ |
|
243 | """ | |
242 | following = self.sa.query(UserFollowing) \ |
|
244 | following = self.sa.query(UserFollowing) \ | |
243 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
245 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
244 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
246 | .options(joinedload(UserFollowing.follows_repository)) \ | |
245 | .all() |
|
247 | .all() | |
246 | return self._rss_feed(following, public=False) |
|
248 | return self._rss_feed(following, public=False) | |
247 |
|
249 | |||
248 | @LoginRequired() |
|
250 | @LoginRequired() | |
249 | @NotAnonymous() |
|
251 | @NotAnonymous() | |
250 | def toggle_following(self): |
|
252 | def toggle_following(self): | |
251 | user_id = request.POST.get('follows_user_id') |
|
253 | user_id = request.POST.get('follows_user_id') | |
252 | if user_id: |
|
254 | if user_id: | |
253 | try: |
|
255 | try: | |
254 | self.scm_model.toggle_following_user(user_id, |
|
256 | self.scm_model.toggle_following_user(user_id, | |
255 | self.authuser.user_id) |
|
257 | self.authuser.user_id) | |
256 | Session.commit() |
|
258 | Session.commit() | |
257 | return 'ok' |
|
259 | return 'ok' | |
258 | except Exception: |
|
260 | except Exception: | |
259 | log.error(traceback.format_exc()) |
|
261 | log.error(traceback.format_exc()) | |
260 | raise HTTPBadRequest() |
|
262 | raise HTTPBadRequest() | |
261 |
|
263 | |||
262 | repo_id = request.POST.get('follows_repository_id') |
|
264 | repo_id = request.POST.get('follows_repository_id') | |
263 | if repo_id: |
|
265 | if repo_id: | |
264 | try: |
|
266 | try: | |
265 | self.scm_model.toggle_following_repo(repo_id, |
|
267 | self.scm_model.toggle_following_repo(repo_id, | |
266 | self.authuser.user_id) |
|
268 | self.authuser.user_id) | |
267 | Session.commit() |
|
269 | Session.commit() | |
268 | return 'ok' |
|
270 | return 'ok' | |
269 | except Exception: |
|
271 | except Exception: | |
270 | log.error(traceback.format_exc()) |
|
272 | log.error(traceback.format_exc()) | |
271 | raise HTTPBadRequest() |
|
273 | raise HTTPBadRequest() | |
272 |
|
274 | |||
273 | raise HTTPBadRequest() |
|
275 | raise HTTPBadRequest() | |
274 |
|
276 | |||
275 | @LoginRequired() |
|
277 | @LoginRequired() | |
276 | def public_journal(self): |
|
278 | def public_journal(self): | |
277 | # Return a rendered template |
|
279 | # Return a rendered template | |
278 | p = safe_int(request.GET.get('page'), 1) |
|
280 | p = safe_int(request.GET.get('page'), 1) | |
279 |
|
281 | |||
280 | c.following = self.sa.query(UserFollowing) \ |
|
282 | c.following = self.sa.query(UserFollowing) \ | |
281 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
283 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
282 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
284 | .options(joinedload(UserFollowing.follows_repository)) \ | |
283 | .all() |
|
285 | .all() | |
284 |
|
286 | |||
285 | journal = self._get_journal_data(c.following) |
|
287 | journal = self._get_journal_data(c.following) | |
286 |
|
288 | |||
287 | c.journal_pager = Page(journal, page=p, items_per_page=20) |
|
289 | c.journal_pager = Page(journal, page=p, items_per_page=20) | |
288 |
|
290 | |||
289 | c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager) |
|
291 | c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager) | |
290 |
|
292 | |||
291 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
293 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
292 | return render('journal/journal_data.html') |
|
294 | return render('journal/journal_data.html') | |
293 |
|
295 | |||
294 | return render('journal/public_journal.html') |
|
296 | return render('journal/public_journal.html') | |
295 |
|
297 | |||
296 | @LoginRequired(api_access=True) |
|
298 | @LoginRequired(api_access=True) | |
297 | def public_journal_atom(self): |
|
299 | def public_journal_atom(self): | |
298 | """ |
|
300 | """ | |
299 | Produce an atom-1.0 feed via feedgenerator module |
|
301 | Produce an atom-1.0 feed via feedgenerator module | |
300 | """ |
|
302 | """ | |
301 | c.following = self.sa.query(UserFollowing) \ |
|
303 | c.following = self.sa.query(UserFollowing) \ | |
302 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
304 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
303 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
305 | .options(joinedload(UserFollowing.follows_repository)) \ | |
304 | .all() |
|
306 | .all() | |
305 |
|
307 | |||
306 | return self._atom_feed(c.following) |
|
308 | return self._atom_feed(c.following) | |
307 |
|
309 | |||
308 | @LoginRequired(api_access=True) |
|
310 | @LoginRequired(api_access=True) | |
309 | def public_journal_rss(self): |
|
311 | def public_journal_rss(self): | |
310 | """ |
|
312 | """ | |
311 | Produce an rss2 feed via feedgenerator module |
|
313 | Produce an rss2 feed via feedgenerator module | |
312 | """ |
|
314 | """ | |
313 | c.following = self.sa.query(UserFollowing) \ |
|
315 | c.following = self.sa.query(UserFollowing) \ | |
314 | .filter(UserFollowing.user_id == self.authuser.user_id) \ |
|
316 | .filter(UserFollowing.user_id == self.authuser.user_id) \ | |
315 | .options(joinedload(UserFollowing.follows_repository)) \ |
|
317 | .options(joinedload(UserFollowing.follows_repository)) \ | |
316 | .all() |
|
318 | .all() | |
317 |
|
319 | |||
318 | return self._rss_feed(c.following) |
|
320 | return self._rss_feed(c.following) |
General Comments 0
You need to be logged in to leave comments.
Login now