Show More
@@ -1,968 +1,968 b'' | |||||
1 | .. _changelog: |
|
1 | .. _changelog: | |
2 |
|
2 | |||
3 | ========= |
|
3 | ========= | |
4 | Changelog |
|
4 | Changelog | |
5 | ========= |
|
5 | ========= | |
6 |
|
6 | |||
7 | 1.6.0 (**2013-XX-XX**) |
|
7 | 1.6.0 (**2013-XX-XX**) | |
8 | ---------------------- |
|
8 | ---------------------- | |
9 |
|
9 | |||
10 | :status: in-progress |
|
10 | :status: in-progress | |
11 | :branch: beta |
|
11 | :branch: beta | |
12 |
|
12 | |||
13 | news |
|
13 | news | |
14 | ++++ |
|
14 | ++++ | |
15 |
|
15 | |||
16 | fixes |
|
16 | fixes | |
17 | +++++ |
|
17 | +++++ | |
18 |
|
18 | |||
19 | 1.5.3 (**2013-02-12**) |
|
19 | 1.5.3 (**2013-02-12**) | |
20 | ---------------------- |
|
20 | ---------------------- | |
21 |
|
21 | |||
22 | news |
|
22 | news | |
23 | ++++ |
|
23 | ++++ | |
24 |
|
24 | |||
25 | - IP restrictions now also enabled for IPv6 |
|
25 | - IP restrictions now also enabled for IPv6 | |
26 |
|
26 | |||
27 | fixes |
|
27 | fixes | |
28 | +++++ |
|
28 | +++++ | |
29 |
|
29 | |||
30 | - fixed issues with private checkbox not always working |
|
30 | - fixed issues with private checkbox not always working | |
31 | - fixed #746 unicodeDedode errors on feed controllers |
|
31 | - fixed #746 unicodeDedode errors on feed controllers | |
32 | - fixes issue #756 cleanup repos didn't properly compose paths of repos to be cleaned up. |
|
32 | - fixes issue #756 cleanup repos didn't properly compose paths of repos to be cleaned up. | |
33 | - fixed cache invalidation issues together with vcs_full_cache option |
|
33 | - fixed cache invalidation issues together with vcs_full_cache option | |
34 | - repo scan should skip directories with starting with '.' |
|
34 | - repo scan should skip directories with starting with '.' | |
35 | - fixes for issue #731, update-repoinfo sometimes failed to update data when changesets |
|
35 | - fixes for issue #731, update-repoinfo sometimes failed to update data when changesets | |
36 | were initial commits |
|
36 | were initial commits | |
37 | - recursive mode of setting permission skips private repositories |
|
37 | - recursive mode of setting permission skips private repositories | |
38 |
|
38 | |||
39 | 1.5.2 (**2013-01-14**) |
|
39 | 1.5.2 (**2013-01-14**) | |
40 | ---------------------- |
|
40 | ---------------------- | |
41 |
|
41 | |||
42 | news |
|
42 | news | |
43 | ++++ |
|
43 | ++++ | |
44 |
|
44 | |||
45 | - IP restrictions for users. Each user can get a set of whitelist IP+mask for |
|
45 | - IP restrictions for users. Each user can get a set of whitelist IP+mask for | |
46 | extra protection. Useful for buildbots etc. |
|
46 | extra protection. Useful for buildbots etc. | |
47 | - added full last changeset info to lightweight dashboard. lightweight dashboard |
|
47 | - added full last changeset info to lightweight dashboard. lightweight dashboard | |
48 | is now fully functional replacement of original dashboard. |
|
48 | is now fully functional replacement of original dashboard. | |
49 | - implemented certain API calls for non-admin users. |
|
49 | - implemented certain API calls for non-admin users. | |
50 | - enabled all Markdown Extra plugins |
|
50 | - enabled all Markdown Extra plugins | |
51 | - implemented #725 Pull Request View - Show origin repo URL |
|
51 | - implemented #725 Pull Request View - Show origin repo URL | |
52 | - show comments from pull requests into associated changesets |
|
52 | - show comments from pull requests into associated changesets | |
53 |
|
53 | |||
54 | fixes |
|
54 | fixes | |
55 | +++++ |
|
55 | +++++ | |
56 |
|
56 | |||
57 | - update repoinfo script is more failsafe |
|
57 | - update repoinfo script is more failsafe | |
58 | - fixed #687 Lazy loaded tooltip bug with simultaneous ajax requests |
|
58 | - fixed #687 Lazy loaded tooltip bug with simultaneous ajax requests | |
59 | - fixed #691: Notifications for pull requests: move link to top for better |
|
59 | - fixed #691: Notifications for pull requests: move link to top for better | |
60 | readability |
|
60 | readability | |
61 | - fixed #699: fix missing fork docs for API |
|
61 | - fixed #699: fix missing fork docs for API | |
62 | - fixed #693 Opening changeset from pull request fails |
|
62 | - fixed #693 Opening changeset from pull request fails | |
63 | - fixed #710 File view stripping empty lines from beginning and end of file |
|
63 | - fixed #710 File view stripping empty lines from beginning and end of file | |
64 | - fixed issues with getting repos by path on windows, caused GIT hooks to fail |
|
64 | - fixed issues with getting repos by path on windows, caused GIT hooks to fail | |
65 | - fixed issues with groups paginator on main dashboard |
|
65 | - fixed issues with groups paginator on main dashboard | |
66 | - improved fetch/pull command for git repos, now pulling all refs |
|
66 | - improved fetch/pull command for git repos, now pulling all refs | |
67 | - fixed issue #719 Journal revision ID tooltip AJAX query path is incorrect |
|
67 | - fixed issue #719 Journal revision ID tooltip AJAX query path is incorrect | |
68 | when running in a subdir |
|
68 | when running in a subdir | |
69 | - fixed issue #702 API methods without arguments fail when "args":null |
|
69 | - fixed issue #702 API methods without arguments fail when "args":null | |
70 | - set the status of changesets initially on pull request. Fixes issues #690 and #587 |
|
70 | - set the status of changesets initially on pull request. Fixes issues #690 and #587 | |
71 |
|
71 | |||
72 | 1.5.1 (**2012-12-13**) |
|
72 | 1.5.1 (**2012-12-13**) | |
73 | ---------------------- |
|
73 | ---------------------- | |
74 |
|
74 | |||
75 | news |
|
75 | news | |
76 | ++++ |
|
76 | ++++ | |
77 |
|
77 | |||
78 | - implements #677: Don't allow to close pull requests when they are |
|
78 | - implements #677: Don't allow to close pull requests when they are | |
79 | under-review status |
|
79 | under-review status | |
80 | - implemented #670 Implementation of Roles in Pull Request |
|
80 | - implemented #670 Implementation of Roles in Pull Request | |
81 |
|
81 | |||
82 | fixes |
|
82 | fixes | |
83 | +++++ |
|
83 | +++++ | |
84 |
|
84 | |||
85 | - default permissions can get duplicated after migration |
|
85 | - default permissions can get duplicated after migration | |
86 | - fixed changeset status labels, they now select radio buttons |
|
86 | - fixed changeset status labels, they now select radio buttons | |
87 | - #682 translation difficult for multi-line text |
|
87 | - #682 translation difficult for multi-line text | |
88 | - #683 fixed difference between messages about not mapped repositories |
|
88 | - #683 fixed difference between messages about not mapped repositories | |
89 | - email: fail nicely when no SMTP server has been configured |
|
89 | - email: fail nicely when no SMTP server has been configured | |
90 |
|
90 | |||
91 | 1.5.0 (**2012-12-12**) |
|
91 | 1.5.0 (**2012-12-12**) | |
92 | ---------------------- |
|
92 | ---------------------- | |
93 |
|
93 | |||
94 | news |
|
94 | news | |
95 | ++++ |
|
95 | ++++ | |
96 |
|
96 | |||
97 | - new rewritten from scratch diff engine. 10x faster in edge cases. Handling |
|
97 | - new rewritten from scratch diff engine. 10x faster in edge cases. Handling | |
98 | of file renames, copies, change flags and binary files |
|
98 | of file renames, copies, change flags and binary files | |
99 | - added lightweight dashboard option. ref #500. New version of dashboard |
|
99 | - added lightweight dashboard option. ref #500. New version of dashboard | |
100 | page that doesn't use any VCS data and is super fast to render. Recommended |
|
100 | page that doesn't use any VCS data and is super fast to render. Recommended | |
101 | for large amount of repositories. |
|
101 | for large amount of repositories. | |
102 | - implements #648 write Script for updating last modification time for |
|
102 | - implements #648 write Script for updating last modification time for | |
103 | lightweight dashboard |
|
103 | lightweight dashboard | |
104 | - implemented compare engine for git repositories. |
|
104 | - implemented compare engine for git repositories. | |
105 | - LDAP failover, option to specify multiple servers |
|
105 | - LDAP failover, option to specify multiple servers | |
106 | - added Errormator and Sentry support for monitoring RhodeCode |
|
106 | - added Errormator and Sentry support for monitoring RhodeCode | |
107 | - implemented #628: Pass server URL to rc-extensions hooks |
|
107 | - implemented #628: Pass server URL to rc-extensions hooks | |
108 | - new tooltip implementation - added lazy loading of changesets from journal |
|
108 | - new tooltip implementation - added lazy loading of changesets from journal | |
109 | pages. This can significantly improve speed of rendering the page |
|
109 | pages. This can significantly improve speed of rendering the page | |
110 | - implements #632,added branch/tag/bookmarks info into feeds |
|
110 | - implements #632,added branch/tag/bookmarks info into feeds | |
111 | added changeset link to body of message |
|
111 | added changeset link to body of message | |
112 | - implemented #638 permissions overview to groups |
|
112 | - implemented #638 permissions overview to groups | |
113 | - implements #636, lazy loading of history and authors to speed up source |
|
113 | - implements #636, lazy loading of history and authors to speed up source | |
114 | pages rendering |
|
114 | pages rendering | |
115 | - implemented #647, option to pass list of default encoding used to |
|
115 | - implemented #647, option to pass list of default encoding used to | |
116 | encode to/decode from unicode |
|
116 | encode to/decode from unicode | |
117 | - added caching layer into RSS/ATOM feeds. |
|
117 | - added caching layer into RSS/ATOM feeds. | |
118 | - basic implementation of cherry picking changesets for pull request, ref #575 |
|
118 | - basic implementation of cherry picking changesets for pull request, ref #575 | |
119 | - implemented #661 Add option to include diff in RSS feed |
|
119 | - implemented #661 Add option to include diff in RSS feed | |
120 | - implemented file history page for showing detailed changelog for a given file |
|
120 | - implemented file history page for showing detailed changelog for a given file | |
121 | - implemented #663 Admin/permission: specify default repogroup perms |
|
121 | - implemented #663 Admin/permission: specify default repogroup perms | |
122 | - implemented #379 defaults settings page for creation of repositories, locking |
|
122 | - implemented #379 defaults settings page for creation of repositories, locking | |
123 | statistics, downloads, repository type |
|
123 | statistics, downloads, repository type | |
124 | - implemented #210 filtering of admin journal based on Whoosh Query language |
|
124 | - implemented #210 filtering of admin journal based on Whoosh Query language | |
125 | - added parents/children links in changeset viewref #650 |
|
125 | - added parents/children links in changeset viewref #650 | |
126 |
|
126 | |||
127 | fixes |
|
127 | fixes | |
128 | +++++ |
|
128 | +++++ | |
129 |
|
129 | |||
130 | - fixed git version checker |
|
130 | - fixed git version checker | |
131 | - #586 patched basic auth handler to fix issues with git behind proxy |
|
131 | - #586 patched basic auth handler to fix issues with git behind proxy | |
132 | - #589 search urlgenerator didn't properly escape special characters |
|
132 | - #589 search urlgenerator didn't properly escape special characters | |
133 | - fixed issue #614 Include repo name in delete confirmation dialog |
|
133 | - fixed issue #614 Include repo name in delete confirmation dialog | |
134 | - fixed #623: Lang meta-tag doesn't work with C#/C++ |
|
134 | - fixed #623: Lang meta-tag doesn't work with C#/C++ | |
135 | - fixes #612 Double quotes to Single quotes result in bad html in diff |
|
135 | - fixes #612 Double quotes to Single quotes result in bad html in diff | |
136 | - fixes #630 git statistics do too much work making them slow. |
|
136 | - fixes #630 git statistics do too much work making them slow. | |
137 | - fixes #625 Git-Tags are not displayed in Shortlog |
|
137 | - fixes #625 Git-Tags are not displayed in Shortlog | |
138 | - fix for issue #602, enforce str when setting mercurial UI object. |
|
138 | - fix for issue #602, enforce str when setting mercurial UI object. | |
139 | When this is used together with mercurial internal translation system |
|
139 | When this is used together with mercurial internal translation system | |
140 | it can lead to UnicodeDecodeErrors |
|
140 | it can lead to UnicodeDecodeErrors | |
141 | - fixes #645 Fix git handler when doing delete remote branch |
|
141 | - fixes #645 Fix git handler when doing delete remote branch | |
142 | - implements #649 added two seperate method for author and commiter to VCS |
|
142 | - implements #649 added two seperate method for author and commiter to VCS | |
143 | changeset class switch author for git backed to be the real author not commiter |
|
143 | changeset class switch author for git backed to be the real author not commiter | |
144 | - fix issue #504 RhodeCode is showing different versions of README on |
|
144 | - fix issue #504 RhodeCode is showing different versions of README on | |
145 | different summary page loads |
|
145 | different summary page loads | |
146 | - implemented #658 Changing username in LDAP-Mode should not be allowed. |
|
146 | - implemented #658 Changing username in LDAP-Mode should not be allowed. | |
147 | - fixes #652 switch to generator approach when doing file annotation to prevent |
|
147 | - fixes #652 switch to generator approach when doing file annotation to prevent | |
148 | huge memory consumption |
|
148 | huge memory consumption | |
149 | - fixes #666 move lockkey path location to cache_dir to ensure this path is |
|
149 | - fixes #666 move lockkey path location to cache_dir to ensure this path is | |
150 | always writable for rhodecode server |
|
150 | always writable for rhodecode server | |
151 | - many more small fixes and improvements |
|
151 | - many more small fixes and improvements | |
152 | - fixed issues with recursive scans on removed repositories that could take |
|
152 | - fixed issues with recursive scans on removed repositories that could take | |
153 | long time on instance start |
|
153 | long time on instance start | |
154 |
|
154 | |||
155 | 1.4.4 (**2012-10-08**) |
|
155 | 1.4.4 (**2012-10-08**) | |
156 | ---------------------- |
|
156 | ---------------------- | |
157 |
|
157 | |||
158 | news |
|
158 | news | |
159 | ++++ |
|
159 | ++++ | |
160 |
|
160 | |||
161 | - obfuscate db password in logs for engine connection string |
|
161 | - obfuscate db password in logs for engine connection string | |
162 | - #574 Show pull request status also in shortlog (if any) |
|
162 | - #574 Show pull request status also in shortlog (if any) | |
163 | - remember selected tab in my account page |
|
163 | - remember selected tab in my account page | |
164 | - Bumped mercurial version to 2.3.2 |
|
164 | - Bumped mercurial version to 2.3.2 | |
165 | - #595 rcextension hook for repository delete |
|
165 | - #595 rcextension hook for repository delete | |
166 |
|
166 | |||
167 | fixes |
|
167 | fixes | |
168 | +++++ |
|
168 | +++++ | |
169 |
|
169 | |||
170 | - Add git version detection to warn users that Git used in system is to |
|
170 | - Add git version detection to warn users that Git used in system is to | |
171 | old. Ref #588 - also show git version in system details in settings page |
|
171 | old. Ref #588 - also show git version in system details in settings page | |
172 | - fixed files quick filter links |
|
172 | - fixed files quick filter links | |
173 | - #590 Add GET flag that controls the way the diff are generated, for pull |
|
173 | - #590 Add GET flag that controls the way the diff are generated, for pull | |
174 | requests we want to use non-bundle based diffs, That are far better for |
|
174 | requests we want to use non-bundle based diffs, That are far better for | |
175 | doing code reviews. The /compare url still uses bundle compare for full |
|
175 | doing code reviews. The /compare url still uses bundle compare for full | |
176 | comparison including the incoming changesets |
|
176 | comparison including the incoming changesets | |
177 | - Fixed #585, checks for status of revision where to strict, and made |
|
177 | - Fixed #585, checks for status of revision where to strict, and made | |
178 | opening pull request with those revision impossible due to previously set |
|
178 | opening pull request with those revision impossible due to previously set | |
179 | status. Checks now are made also for the repository. |
|
179 | status. Checks now are made also for the repository. | |
180 | - fixes #591 git backend was causing encoding errors when handling binary |
|
180 | - fixes #591 git backend was causing encoding errors when handling binary | |
181 | files - added a test case for VCS lib tests |
|
181 | files - added a test case for VCS lib tests | |
182 | - fixed #597 commits in future get negative age. |
|
182 | - fixed #597 commits in future get negative age. | |
183 | - fixed #598 API docs methods had wrong members parameter as returned data |
|
183 | - fixed #598 API docs methods had wrong members parameter as returned data | |
184 |
|
184 | |||
185 | 1.4.3 (**2012-09-28**) |
|
185 | 1.4.3 (**2012-09-28**) | |
186 | ---------------------- |
|
186 | ---------------------- | |
187 |
|
187 | |||
188 | news |
|
188 | news | |
189 | ++++ |
|
189 | ++++ | |
190 |
|
190 | |||
191 | - #558 Added config file to hooks extra data |
|
191 | - #558 Added config file to hooks extra data | |
192 | - bumped mercurial version to 2.3.1 |
|
192 | - bumped mercurial version to 2.3.1 | |
193 | - #518 added possibility of specifying multiple patterns for issues |
|
193 | - #518 added possibility of specifying multiple patterns for issues | |
194 | - update codemirror to latest version |
|
194 | - update codemirror to latest version | |
195 |
|
195 | |||
196 | fixes |
|
196 | fixes | |
197 | +++++ |
|
197 | +++++ | |
198 |
|
198 | |||
199 | - fixed #570 explicit user group permissions can overwrite owner permissions |
|
199 | - fixed #570 explicit user group permissions can overwrite owner permissions | |
200 | - fixed #578 set proper PATH with current Python for Git |
|
200 | - fixed #578 set proper PATH with current Python for Git | |
201 | hooks to execute within same Python as RhodeCode |
|
201 | hooks to execute within same Python as RhodeCode | |
202 | - fixed issue with Git bare repos that ends with .git in name |
|
202 | - fixed issue with Git bare repos that ends with .git in name | |
203 |
|
203 | |||
204 | 1.4.2 (**2012-09-12**) |
|
204 | 1.4.2 (**2012-09-12**) | |
205 | ---------------------- |
|
205 | ---------------------- | |
206 |
|
206 | |||
207 | news |
|
207 | news | |
208 | ++++ |
|
208 | ++++ | |
209 |
|
209 | |||
210 | - added option to menu to quick lock/unlock repository for users that have |
|
210 | - added option to menu to quick lock/unlock repository for users that have | |
211 | write access to |
|
211 | write access to | |
212 | - Implemented permissions for writing to repo |
|
212 | - Implemented permissions for writing to repo | |
213 | groups. Now only write access to group allows to create a repostiory |
|
213 | groups. Now only write access to group allows to create a repostiory | |
214 | within that group |
|
214 | within that group | |
215 | - #565 Add support for {netloc} and {scheme} to alternative_gravatar_url |
|
215 | - #565 Add support for {netloc} and {scheme} to alternative_gravatar_url | |
216 | - updated translation for zh_CN |
|
216 | - updated translation for zh_CN | |
217 |
|
217 | |||
218 | fixes |
|
218 | fixes | |
219 | +++++ |
|
219 | +++++ | |
220 |
|
220 | |||
221 | - fixed visual permissions check on repos groups inside groups |
|
221 | - fixed visual permissions check on repos groups inside groups | |
222 | - fixed issues with non-ascii search terms in search, and indexers |
|
222 | - fixed issues with non-ascii search terms in search, and indexers | |
223 | - fixed parsing of page number in GET parameters |
|
223 | - fixed parsing of page number in GET parameters | |
224 | - fixed issues with generating pull-request overview for repos with |
|
224 | - fixed issues with generating pull-request overview for repos with | |
225 | bookmarks and tags, also preview doesn't loose chosen revision from |
|
225 | bookmarks and tags, also preview doesn't loose chosen revision from | |
226 | select dropdown |
|
226 | select dropdown | |
227 |
|
227 | |||
228 | 1.4.1 (**2012-09-07**) |
|
228 | 1.4.1 (**2012-09-07**) | |
229 | ---------------------- |
|
229 | ---------------------- | |
230 |
|
230 | |||
231 | news |
|
231 | news | |
232 | ++++ |
|
232 | ++++ | |
233 |
|
233 | |||
234 | - always put a comment about code-review status change even if user send |
|
234 | - always put a comment about code-review status change even if user send | |
235 | empty data |
|
235 | empty data | |
236 | - modified_on column saves repository update and it's going to be used |
|
236 | - modified_on column saves repository update and it's going to be used | |
237 | later for light version of main page ref #500 |
|
237 | later for light version of main page ref #500 | |
238 | - pull request notifications send much nicer emails with details about pull |
|
238 | - pull request notifications send much nicer emails with details about pull | |
239 | request |
|
239 | request | |
240 | - #551 show breadcrumbs in summary view for repositories inside a group |
|
240 | - #551 show breadcrumbs in summary view for repositories inside a group | |
241 |
|
241 | |||
242 | fixes |
|
242 | fixes | |
243 | +++++ |
|
243 | +++++ | |
244 |
|
244 | |||
245 | - fixed migrations of permissions that can lead to inconsistency. |
|
245 | - fixed migrations of permissions that can lead to inconsistency. | |
246 | Some users sent feedback that after upgrading from older versions issues |
|
246 | Some users sent feedback that after upgrading from older versions issues | |
247 | with updating default permissions occurred. RhodeCode detects that now and |
|
247 | with updating default permissions occurred. RhodeCode detects that now and | |
248 | resets default user permission to initial state if there is a need for that. |
|
248 | resets default user permission to initial state if there is a need for that. | |
249 | Also forces users to set the default value for new forking permission. |
|
249 | Also forces users to set the default value for new forking permission. | |
250 | - #535 improved apache wsgi example configuration in docs |
|
250 | - #535 improved apache wsgi example configuration in docs | |
251 | - fixes #550 mercurial repositories comparision failed when origin repo had |
|
251 | - fixes #550 mercurial repositories comparision failed when origin repo had | |
252 | additional not-common changesets |
|
252 | additional not-common changesets | |
253 | - fixed status of code-review in preview windows of pull request |
|
253 | - fixed status of code-review in preview windows of pull request | |
254 | - git forks were not initialized at bare repos |
|
254 | - git forks were not initialized at bare repos | |
255 | - fixes #555 fixes issues with comparing non-related repositories |
|
255 | - fixes #555 fixes issues with comparing non-related repositories | |
256 | - fixes #557 follower counter always counts up |
|
256 | - fixes #557 follower counter always counts up | |
257 | - fixed issue #560 require push ssl checkbox wasn't shown when option was |
|
257 | - fixed issue #560 require push ssl checkbox wasn't shown when option was | |
258 | enabled |
|
258 | enabled | |
259 | - fixed #559 |
|
259 | - fixed #559 | |
260 | - fixed issue #559 fixed bug in routing that mapped repo names with <name>_<num> in name as |
|
260 | - fixed issue #559 fixed bug in routing that mapped repo names with <name>_<num> in name as | |
261 | if it was a request to url by repository ID |
|
261 | if it was a request to url by repository ID | |
262 |
|
262 | |||
263 | 1.4.0 (**2012-09-03**) |
|
263 | 1.4.0 (**2012-09-03**) | |
264 | ---------------------- |
|
264 | ---------------------- | |
265 |
|
265 | |||
266 | news |
|
266 | news | |
267 | ++++ |
|
267 | ++++ | |
268 |
|
268 | |||
269 | - new codereview system |
|
269 | - new codereview system | |
270 | - email map, allowing users to have multiple email addresses mapped into |
|
270 | - email map, allowing users to have multiple email addresses mapped into | |
271 | their accounts |
|
271 | their accounts | |
272 | - improved git-hook system. Now all actions for git are logged into journal |
|
272 | - improved git-hook system. Now all actions for git are logged into journal | |
273 | including pushed revisions, user and IP address |
|
273 | including pushed revisions, user and IP address | |
274 | - changed setup-app into setup-rhodecode and added default options to it. |
|
274 | - changed setup-app into setup-rhodecode and added default options to it. | |
275 | - new git repos are created as bare now by default |
|
275 | - new git repos are created as bare now by default | |
276 | - #464 added links to groups in permission box |
|
276 | - #464 added links to groups in permission box | |
277 | - #465 mentions autocomplete inside comments boxes |
|
277 | - #465 mentions autocomplete inside comments boxes | |
278 | - #469 added --update-only option to whoosh to re-index only given list |
|
278 | - #469 added --update-only option to whoosh to re-index only given list | |
279 | of repos in index |
|
279 | of repos in index | |
280 | - rhodecode-api CLI client |
|
280 | - rhodecode-api CLI client | |
281 | - new git http protocol replaced buggy dulwich implementation. |
|
281 | - new git http protocol replaced buggy dulwich implementation. | |
282 | Now based on pygrack & gitweb |
|
282 | Now based on pygrack & gitweb | |
283 | - Improved RSS/ATOM feeds. Discoverable by browsers using proper headers, and |
|
283 | - Improved RSS/ATOM feeds. Discoverable by browsers using proper headers, and | |
284 | reformated based on user suggestions. Additional rss/atom feeds for user |
|
284 | reformated based on user suggestions. Additional rss/atom feeds for user | |
285 | journal |
|
285 | journal | |
286 | - various i18n improvements |
|
286 | - various i18n improvements | |
287 | - #478 permissions overview for admin in user edit view |
|
287 | - #478 permissions overview for admin in user edit view | |
288 | - File view now displays small gravatars off all authors of given file |
|
288 | - File view now displays small gravatars off all authors of given file | |
289 | - Implemented landing revisions. Each repository will get landing_rev attribute |
|
289 | - Implemented landing revisions. Each repository will get landing_rev attribute | |
290 | that defines 'default' revision/branch for generating readme files |
|
290 | that defines 'default' revision/branch for generating readme files | |
291 | - Implemented #509, RhodeCode enforces SSL for push/pulling if requested at |
|
291 | - Implemented #509, RhodeCode enforces SSL for push/pulling if requested at | |
292 | earliest possible call. |
|
292 | earliest possible call. | |
293 | - Import remote svn repositories to mercurial using hgsubversion. |
|
293 | - Import remote svn repositories to mercurial using hgsubversion. | |
294 | - Fixed #508 RhodeCode now has a option to explicitly set forking permissions |
|
294 | - Fixed #508 RhodeCode now has a option to explicitly set forking permissions | |
295 | - RhodeCode can use alternative server for generating avatar icons |
|
295 | - RhodeCode can use alternative server for generating avatar icons | |
296 | - implemented repositories locking. Pull locks, push unlocks. Also can be done |
|
296 | - implemented repositories locking. Pull locks, push unlocks. Also can be done | |
297 | via API calls |
|
297 | via API calls | |
298 | - #538 form for permissions can handle multiple users at once |
|
298 | - #538 form for permissions can handle multiple users at once | |
299 |
|
299 | |||
300 | fixes |
|
300 | fixes | |
301 | +++++ |
|
301 | +++++ | |
302 |
|
302 | |||
303 | - improved translations |
|
303 | - improved translations | |
304 | - fixes issue #455 Creating an archive generates an exception on Windows |
|
304 | - fixes issue #455 Creating an archive generates an exception on Windows | |
305 | - fixes #448 Download ZIP archive keeps file in /tmp open and results |
|
305 | - fixes #448 Download ZIP archive keeps file in /tmp open and results | |
306 | in out of disk space |
|
306 | in out of disk space | |
307 | - fixes issue #454 Search results under Windows include proceeding |
|
307 | - fixes issue #454 Search results under Windows include proceeding | |
308 | backslash |
|
308 | backslash | |
309 | - fixed issue #450. Rhodecode no longer will crash when bad revision is |
|
309 | - fixed issue #450. Rhodecode no longer will crash when bad revision is | |
310 | present in journal data. |
|
310 | present in journal data. | |
311 | - fix for issue #417, git execution was broken on windows for certain |
|
311 | - fix for issue #417, git execution was broken on windows for certain | |
312 | commands. |
|
312 | commands. | |
313 | - fixed #413. Don't disable .git directory for bare repos on deleting |
|
313 | - fixed #413. Don't disable .git directory for bare repos on deleting | |
314 | - fixed issue #459. Changed the way of obtaining logger in reindex task. |
|
314 | - fixed issue #459. Changed the way of obtaining logger in reindex task. | |
315 | - fixed #453 added ID field in whoosh SCHEMA that solves the issue of |
|
315 | - fixed #453 added ID field in whoosh SCHEMA that solves the issue of | |
316 | reindexing modified files |
|
316 | reindexing modified files | |
317 | - fixed #481 rhodecode emails are sent without Date header |
|
317 | - fixed #481 rhodecode emails are sent without Date header | |
318 | - fixed #458 wrong count when no repos are present |
|
318 | - fixed #458 wrong count when no repos are present | |
319 | - fixed issue #492 missing `\ No newline at end of file` test at the end of |
|
319 | - fixed issue #492 missing `\ No newline at end of file` test at the end of | |
320 | new chunk in html diff |
|
320 | new chunk in html diff | |
321 | - full text search now works also for commit messages |
|
321 | - full text search now works also for commit messages | |
322 |
|
322 | |||
323 | 1.3.6 (**2012-05-17**) |
|
323 | 1.3.6 (**2012-05-17**) | |
324 | ---------------------- |
|
324 | ---------------------- | |
325 |
|
325 | |||
326 | news |
|
326 | news | |
327 | ++++ |
|
327 | ++++ | |
328 |
|
328 | |||
329 | - chinese traditional translation |
|
329 | - chinese traditional translation | |
330 | - changed setup-app into setup-rhodecode and added arguments for auto-setup |
|
330 | - changed setup-app into setup-rhodecode and added arguments for auto-setup | |
331 | mode that doesn't need user interaction |
|
331 | mode that doesn't need user interaction | |
332 |
|
332 | |||
333 | fixes |
|
333 | fixes | |
334 | +++++ |
|
334 | +++++ | |
335 |
|
335 | |||
336 | - fixed no scm found warning |
|
336 | - fixed no scm found warning | |
337 | - fixed __future__ import error on rcextensions |
|
337 | - fixed __future__ import error on rcextensions | |
338 | - made simplejson required lib for speedup on JSON encoding |
|
338 | - made simplejson required lib for speedup on JSON encoding | |
339 | - fixes #449 bad regex could get more than revisions from parsing history |
|
339 | - fixes #449 bad regex could get more than revisions from parsing history | |
340 | - don't clear DB session when CELERY_EAGER is turned ON |
|
340 | - don't clear DB session when CELERY_EAGER is turned ON | |
341 |
|
341 | |||
342 | 1.3.5 (**2012-05-10**) |
|
342 | 1.3.5 (**2012-05-10**) | |
343 | ---------------------- |
|
343 | ---------------------- | |
344 |
|
344 | |||
345 | news |
|
345 | news | |
346 | ++++ |
|
346 | ++++ | |
347 |
|
347 | |||
348 | - use ext_json for json module |
|
348 | - use ext_json for json module | |
349 | - unified annotation view with file source view |
|
349 | - unified annotation view with file source view | |
350 | - notification improvements, better inbox + css |
|
350 | - notification improvements, better inbox + css | |
351 | - #419 don't strip passwords for login forms, make rhodecode |
|
351 | - #419 don't strip passwords for login forms, make rhodecode | |
352 | more compatible with LDAP servers |
|
352 | more compatible with LDAP servers | |
353 | - Added HTTP_X_FORWARDED_FOR as another method of extracting |
|
353 | - Added HTTP_X_FORWARDED_FOR as another method of extracting | |
354 | IP for pull/push logs. - moved all to base controller |
|
354 | IP for pull/push logs. - moved all to base controller | |
355 | - #415: Adding comment to changeset causes reload. |
|
355 | - #415: Adding comment to changeset causes reload. | |
356 | Comments are now added via ajax and doesn't reload the page |
|
356 | Comments are now added via ajax and doesn't reload the page | |
357 | - #374 LDAP config is discarded when LDAP can't be activated |
|
357 | - #374 LDAP config is discarded when LDAP can't be activated | |
358 | - limited push/pull operations are now logged for git in the journal |
|
358 | - limited push/pull operations are now logged for git in the journal | |
359 | - bumped mercurial to 2.2.X series |
|
359 | - bumped mercurial to 2.2.X series | |
360 | - added support for displaying submodules in file-browser |
|
360 | - added support for displaying submodules in file-browser | |
361 | - #421 added bookmarks in changelog view |
|
361 | - #421 added bookmarks in changelog view | |
362 |
|
362 | |||
363 | fixes |
|
363 | fixes | |
364 | +++++ |
|
364 | +++++ | |
365 |
|
365 | |||
366 | - fixed dev-version marker for stable when served from source codes |
|
366 | - fixed dev-version marker for stable when served from source codes | |
367 | - fixed missing permission checks on show forks page |
|
367 | - fixed missing permission checks on show forks page | |
368 | - #418 cast to unicode fixes in notification objects |
|
368 | - #418 cast to unicode fixes in notification objects | |
369 | - #426 fixed mention extracting regex |
|
369 | - #426 fixed mention extracting regex | |
370 | - fixed remote-pulling for git remotes remopositories |
|
370 | - fixed remote-pulling for git remotes remopositories | |
371 | - fixed #434: Error when accessing files or changesets of a git repository |
|
371 | - fixed #434: Error when accessing files or changesets of a git repository | |
372 | with submodules |
|
372 | with submodules | |
373 | - fixed issue with empty APIKEYS for users after registration ref. #438 |
|
373 | - fixed issue with empty APIKEYS for users after registration ref. #438 | |
374 | - fixed issue with getting README files from git repositories |
|
374 | - fixed issue with getting README files from git repositories | |
375 |
|
375 | |||
376 | 1.3.4 (**2012-03-28**) |
|
376 | 1.3.4 (**2012-03-28**) | |
377 | ---------------------- |
|
377 | ---------------------- | |
378 |
|
378 | |||
379 | news |
|
379 | news | |
380 | ++++ |
|
380 | ++++ | |
381 |
|
381 | |||
382 | - Whoosh logging is now controlled by the .ini files logging setup |
|
382 | - Whoosh logging is now controlled by the .ini files logging setup | |
383 | - added clone-url into edit form on /settings page |
|
383 | - added clone-url into edit form on /settings page | |
384 | - added help text into repo add/edit forms |
|
384 | - added help text into repo add/edit forms | |
385 | - created rcextensions module with additional mappings (ref #322) and |
|
385 | - created rcextensions module with additional mappings (ref #322) and | |
386 | post push/pull/create repo hooks callbacks |
|
386 | post push/pull/create repo hooks callbacks | |
387 | - implemented #377 Users view for his own permissions on account page |
|
387 | - implemented #377 Users view for his own permissions on account page | |
388 | - #399 added inheritance of permissions for user group on repos groups |
|
388 | - #399 added inheritance of permissions for user group on repos groups | |
389 | - #401 repository group is automatically pre-selected when adding repos |
|
389 | - #401 repository group is automatically pre-selected when adding repos | |
390 | inside a repository group |
|
390 | inside a repository group | |
391 | - added alternative HTTP 403 response when client failed to authenticate. Helps |
|
391 | - added alternative HTTP 403 response when client failed to authenticate. Helps | |
392 | solving issues with Mercurial and LDAP |
|
392 | solving issues with Mercurial and LDAP | |
393 | - #402 removed group prefix from repository name when listing repositories |
|
393 | - #402 removed group prefix from repository name when listing repositories | |
394 | inside a group |
|
394 | inside a group | |
395 | - added gravatars into permission view and permissions autocomplete |
|
395 | - added gravatars into permission view and permissions autocomplete | |
396 | - #347 when running multiple RhodeCode instances, properly invalidates cache |
|
396 | - #347 when running multiple RhodeCode instances, properly invalidates cache | |
397 | for all registered servers |
|
397 | for all registered servers | |
398 |
|
398 | |||
399 | fixes |
|
399 | fixes | |
400 | +++++ |
|
400 | +++++ | |
401 |
|
401 | |||
402 | - fixed #390 cache invalidation problems on repos inside group |
|
402 | - fixed #390 cache invalidation problems on repos inside group | |
403 | - fixed #385 clone by ID url was loosing proxy prefix in URL |
|
403 | - fixed #385 clone by ID url was loosing proxy prefix in URL | |
404 | - fixed some unicode problems with waitress |
|
404 | - fixed some unicode problems with waitress | |
405 | - fixed issue with escaping < and > in changeset commits |
|
405 | - fixed issue with escaping < and > in changeset commits | |
406 | - fixed error occurring during recursive group creation in API |
|
406 | - fixed error occurring during recursive group creation in API | |
407 | create_repo function |
|
407 | create_repo function | |
408 | - fixed #393 py2.5 fixes for routes url generator |
|
408 | - fixed #393 py2.5 fixes for routes url generator | |
409 | - fixed #397 Private repository groups shows up before login |
|
409 | - fixed #397 Private repository groups shows up before login | |
410 | - fixed #396 fixed problems with revoking users in nested groups |
|
410 | - fixed #396 fixed problems with revoking users in nested groups | |
411 | - fixed mysql unicode issues + specified InnoDB as default engine with |
|
411 | - fixed mysql unicode issues + specified InnoDB as default engine with | |
412 | utf8 charset |
|
412 | utf8 charset | |
413 | - #406 trim long branch/tag names in changelog to not break UI |
|
413 | - #406 trim long branch/tag names in changelog to not break UI | |
414 |
|
414 | |||
415 | 1.3.3 (**2012-03-02**) |
|
415 | 1.3.3 (**2012-03-02**) | |
416 | ---------------------- |
|
416 | ---------------------- | |
417 |
|
417 | |||
418 | news |
|
418 | news | |
419 | ++++ |
|
419 | ++++ | |
420 |
|
420 | |||
421 |
|
421 | |||
422 | fixes |
|
422 | fixes | |
423 | +++++ |
|
423 | +++++ | |
424 |
|
424 | |||
425 | - fixed some python2.5 compatibility issues |
|
425 | - fixed some python2.5 compatibility issues | |
426 | - fixed issues with removed repos was accidentally added as groups, after |
|
426 | - fixed issues with removed repos was accidentally added as groups, after | |
427 | full rescan of paths |
|
427 | full rescan of paths | |
428 | - fixes #376 Cannot edit user (using container auth) |
|
428 | - fixes #376 Cannot edit user (using container auth) | |
429 | - fixes #378 Invalid image urls on changeset screen with proxy-prefix |
|
429 | - fixes #378 Invalid image urls on changeset screen with proxy-prefix | |
430 | configuration |
|
430 | configuration | |
431 | - fixed initial sorting of repos inside repo group |
|
431 | - fixed initial sorting of repos inside repo group | |
432 | - fixes issue when user tried to resubmit same permission into user/user_groups |
|
432 | - fixes issue when user tried to resubmit same permission into user/user_groups | |
433 | - bumped beaker version that fixes #375 leap error bug |
|
433 | - bumped beaker version that fixes #375 leap error bug | |
434 | - fixed raw_changeset for git. It was generated with hg patch headers |
|
434 | - fixed raw_changeset for git. It was generated with hg patch headers | |
435 | - fixed vcs issue with last_changeset for filenodes |
|
435 | - fixed vcs issue with last_changeset for filenodes | |
436 | - fixed missing commit after hook delete |
|
436 | - fixed missing commit after hook delete | |
437 | - fixed #372 issues with git operation detection that caused a security issue |
|
437 | - fixed #372 issues with git operation detection that caused a security issue | |
438 | for git repos |
|
438 | for git repos | |
439 |
|
439 | |||
440 | 1.3.2 (**2012-02-28**) |
|
440 | 1.3.2 (**2012-02-28**) | |
441 | ---------------------- |
|
441 | ---------------------- | |
442 |
|
442 | |||
443 | news |
|
443 | news | |
444 | ++++ |
|
444 | ++++ | |
445 |
|
445 | |||
446 |
|
446 | |||
447 | fixes |
|
447 | fixes | |
448 | +++++ |
|
448 | +++++ | |
449 |
|
449 | |||
450 | - fixed git protocol issues with repos-groups |
|
450 | - fixed git protocol issues with repos-groups | |
451 | - fixed git remote repos validator that prevented from cloning remote git repos |
|
451 | - fixed git remote repos validator that prevented from cloning remote git repos | |
452 | - fixes #370 ending slashes fixes for repo and groups |
|
452 | - fixes #370 ending slashes fixes for repo and groups | |
453 | - fixes #368 improved git-protocol detection to handle other clients |
|
453 | - fixes #368 improved git-protocol detection to handle other clients | |
454 | - fixes #366 When Setting Repository Group To Blank Repo Group Wont Be |
|
454 | - fixes #366 When Setting Repository Group To Blank Repo Group Wont Be | |
455 | Moved To Root |
|
455 | Moved To Root | |
456 | - fixes #371 fixed issues with beaker/sqlalchemy and non-ascii cache keys |
|
456 | - fixes #371 fixed issues with beaker/sqlalchemy and non-ascii cache keys | |
457 | - fixed #373 missing cascade drop on user_group_to_perm table |
|
457 | - fixed #373 missing cascade drop on user_group_to_perm table | |
458 |
|
458 | |||
459 | 1.3.1 (**2012-02-27**) |
|
459 | 1.3.1 (**2012-02-27**) | |
460 | ---------------------- |
|
460 | ---------------------- | |
461 |
|
461 | |||
462 | news |
|
462 | news | |
463 | ++++ |
|
463 | ++++ | |
464 |
|
464 | |||
465 |
|
465 | |||
466 | fixes |
|
466 | fixes | |
467 | +++++ |
|
467 | +++++ | |
468 |
|
468 | |||
469 | - redirection loop occurs when remember-me wasn't checked during login |
|
469 | - redirection loop occurs when remember-me wasn't checked during login | |
470 | - fixes issues with git blob history generation |
|
470 | - fixes issues with git blob history generation | |
471 | - don't fetch branch for git in file history dropdown. Causes unneeded slowness |
|
471 | - don't fetch branch for git in file history dropdown. Causes unneeded slowness | |
472 |
|
472 | |||
473 | 1.3.0 (**2012-02-26**) |
|
473 | 1.3.0 (**2012-02-26**) | |
474 | ---------------------- |
|
474 | ---------------------- | |
475 |
|
475 | |||
476 | news |
|
476 | news | |
477 | ++++ |
|
477 | ++++ | |
478 |
|
478 | |||
479 | - code review, inspired by github code-comments |
|
479 | - code review, inspired by github code-comments | |
480 | - #215 rst and markdown README files support |
|
480 | - #215 rst and markdown README files support | |
481 | - #252 Container-based and proxy pass-through authentication support |
|
481 | - #252 Container-based and proxy pass-through authentication support | |
482 | - #44 branch browser. Filtering of changelog by branches |
|
482 | - #44 branch browser. Filtering of changelog by branches | |
483 | - mercurial bookmarks support |
|
483 | - mercurial bookmarks support | |
484 | - new hover top menu, optimized to add maximum size for important views |
|
484 | - new hover top menu, optimized to add maximum size for important views | |
485 | - configurable clone url template with possibility to specify protocol like |
|
485 | - configurable clone url template with possibility to specify protocol like | |
486 | ssh:// or http:// and also manually alter other parts of clone_url. |
|
486 | ssh:// or http:// and also manually alter other parts of clone_url. | |
487 | - enabled largefiles extension by default |
|
487 | - enabled largefiles extension by default | |
488 | - optimized summary file pages and saved a lot of unused space in them |
|
488 | - optimized summary file pages and saved a lot of unused space in them | |
489 | - #239 option to manually mark repository as fork |
|
489 | - #239 option to manually mark repository as fork | |
490 | - #320 mapping of commit authors to RhodeCode users |
|
490 | - #320 mapping of commit authors to RhodeCode users | |
491 | - #304 hashes are displayed using monospace font |
|
491 | - #304 hashes are displayed using monospace font | |
492 | - diff configuration, toggle white lines and context lines |
|
492 | - diff configuration, toggle white lines and context lines | |
493 | - #307 configurable diffs, whitespace toggle, increasing context lines |
|
493 | - #307 configurable diffs, whitespace toggle, increasing context lines | |
494 | - sorting on branches, tags and bookmarks using YUI datatable |
|
494 | - sorting on branches, tags and bookmarks using YUI datatable | |
495 | - improved file filter on files page |
|
495 | - improved file filter on files page | |
496 | - implements #330 api method for listing nodes ar particular revision |
|
496 | - implements #330 api method for listing nodes ar particular revision | |
497 | - #73 added linking issues in commit messages to chosen issue tracker url |
|
497 | - #73 added linking issues in commit messages to chosen issue tracker url | |
498 | based on user defined regular expression |
|
498 | based on user defined regular expression | |
499 | - added linking of changesets in commit messages |
|
499 | - added linking of changesets in commit messages | |
500 | - new compact changelog with expandable commit messages |
|
500 | - new compact changelog with expandable commit messages | |
501 | - firstname and lastname are optional in user creation |
|
501 | - firstname and lastname are optional in user creation | |
502 | - #348 added post-create repository hook |
|
502 | - #348 added post-create repository hook | |
503 | - #212 global encoding settings is now configurable from .ini files |
|
503 | - #212 global encoding settings is now configurable from .ini files | |
504 | - #227 added repository groups permissions |
|
504 | - #227 added repository groups permissions | |
505 | - markdown gets codehilite extensions |
|
505 | - markdown gets codehilite extensions | |
506 | - new API methods, delete_repositories, grante/revoke permissions for groups |
|
506 | - new API methods, delete_repositories, grante/revoke permissions for groups | |
507 | and repos |
|
507 | and repos | |
508 |
|
508 | |||
509 |
|
509 | |||
510 | fixes |
|
510 | fixes | |
511 | +++++ |
|
511 | +++++ | |
512 |
|
512 | |||
513 | - rewrote dbsession management for atomic operations, and better error handling |
|
513 | - rewrote dbsession management for atomic operations, and better error handling | |
514 | - fixed sorting of repo tables |
|
514 | - fixed sorting of repo tables | |
515 | - #326 escape of special html entities in diffs |
|
515 | - #326 escape of special html entities in diffs | |
516 | - normalized user_name => username in api attributes |
|
516 | - normalized user_name => username in api attributes | |
517 | - fixes #298 ldap created users with mixed case emails created conflicts |
|
517 | - fixes #298 ldap created users with mixed case emails created conflicts | |
518 | on saving a form |
|
518 | on saving a form | |
519 | - fixes issue when owner of a repo couldn't revoke permissions for users |
|
519 | - fixes issue when owner of a repo couldn't revoke permissions for users | |
520 | and groups |
|
520 | and groups | |
521 | - fixes #271 rare JSON serialization problem with statistics |
|
521 | - fixes #271 rare JSON serialization problem with statistics | |
522 | - fixes #337 missing validation check for conflicting names of a group with a |
|
522 | - fixes #337 missing validation check for conflicting names of a group with a | |
523 |
repositor |
|
523 | repository group | |
524 | - #340 fixed session problem for mysql and celery tasks |
|
524 | - #340 fixed session problem for mysql and celery tasks | |
525 | - fixed #331 RhodeCode mangles repository names if the a repository group |
|
525 | - fixed #331 RhodeCode mangles repository names if the a repository group | |
526 | contains the "full path" to the repositories |
|
526 | contains the "full path" to the repositories | |
527 | - #355 RhodeCode doesn't store encrypted LDAP passwords |
|
527 | - #355 RhodeCode doesn't store encrypted LDAP passwords | |
528 |
|
528 | |||
529 | 1.2.5 (**2012-01-28**) |
|
529 | 1.2.5 (**2012-01-28**) | |
530 | ---------------------- |
|
530 | ---------------------- | |
531 |
|
531 | |||
532 | news |
|
532 | news | |
533 | ++++ |
|
533 | ++++ | |
534 |
|
534 | |||
535 | fixes |
|
535 | fixes | |
536 | +++++ |
|
536 | +++++ | |
537 |
|
537 | |||
538 | - #340 Celery complains about MySQL server gone away, added session cleanup |
|
538 | - #340 Celery complains about MySQL server gone away, added session cleanup | |
539 | for celery tasks |
|
539 | for celery tasks | |
540 | - #341 "scanning for repositories in None" log message during Rescan was missing |
|
540 | - #341 "scanning for repositories in None" log message during Rescan was missing | |
541 | a parameter |
|
541 | a parameter | |
542 | - fixed creating archives with subrepos. Some hooks were triggered during that |
|
542 | - fixed creating archives with subrepos. Some hooks were triggered during that | |
543 | operation leading to crash. |
|
543 | operation leading to crash. | |
544 | - fixed missing email in account page. |
|
544 | - fixed missing email in account page. | |
545 | - Reverted Mercurial to 2.0.1 for windows due to bug in Mercurial that makes |
|
545 | - Reverted Mercurial to 2.0.1 for windows due to bug in Mercurial that makes | |
546 | forking on windows impossible |
|
546 | forking on windows impossible | |
547 |
|
547 | |||
548 | 1.2.4 (**2012-01-19**) |
|
548 | 1.2.4 (**2012-01-19**) | |
549 | ---------------------- |
|
549 | ---------------------- | |
550 |
|
550 | |||
551 | news |
|
551 | news | |
552 | ++++ |
|
552 | ++++ | |
553 |
|
553 | |||
554 | - RhodeCode is bundled with mercurial series 2.0.X by default, with |
|
554 | - RhodeCode is bundled with mercurial series 2.0.X by default, with | |
555 | full support to largefiles extension. Enabled by default in new installations |
|
555 | full support to largefiles extension. Enabled by default in new installations | |
556 | - #329 Ability to Add/Remove Groups to/from a Repository via AP |
|
556 | - #329 Ability to Add/Remove Groups to/from a Repository via AP | |
557 | - added requires.txt file with requirements |
|
557 | - added requires.txt file with requirements | |
558 |
|
558 | |||
559 | fixes |
|
559 | fixes | |
560 | +++++ |
|
560 | +++++ | |
561 |
|
561 | |||
562 | - fixes db session issues with celery when emailing admins |
|
562 | - fixes db session issues with celery when emailing admins | |
563 | - #331 RhodeCode mangles repository names if the a repository group |
|
563 | - #331 RhodeCode mangles repository names if the a repository group | |
564 | contains the "full path" to the repositories |
|
564 | contains the "full path" to the repositories | |
565 | - #298 Conflicting e-mail addresses for LDAP and RhodeCode users |
|
565 | - #298 Conflicting e-mail addresses for LDAP and RhodeCode users | |
566 | - DB session cleanup after hg protocol operations, fixes issues with |
|
566 | - DB session cleanup after hg protocol operations, fixes issues with | |
567 | `mysql has gone away` errors |
|
567 | `mysql has gone away` errors | |
568 | - #333 doc fixes for get_repo api function |
|
568 | - #333 doc fixes for get_repo api function | |
569 | - #271 rare JSON serialization problem with statistics enabled |
|
569 | - #271 rare JSON serialization problem with statistics enabled | |
570 | - #337 Fixes issues with validation of repository name conflicting with |
|
570 | - #337 Fixes issues with validation of repository name conflicting with | |
571 | a group name. A proper message is now displayed. |
|
571 | a group name. A proper message is now displayed. | |
572 | - #292 made ldap_dn in user edit readonly, to get rid of confusion that field |
|
572 | - #292 made ldap_dn in user edit readonly, to get rid of confusion that field | |
573 | doesn't work |
|
573 | doesn't work | |
574 | - #316 fixes issues with web description in hgrc files |
|
574 | - #316 fixes issues with web description in hgrc files | |
575 |
|
575 | |||
576 | 1.2.3 (**2011-11-02**) |
|
576 | 1.2.3 (**2011-11-02**) | |
577 | ---------------------- |
|
577 | ---------------------- | |
578 |
|
578 | |||
579 | news |
|
579 | news | |
580 | ++++ |
|
580 | ++++ | |
581 |
|
581 | |||
582 | - added option to manage repos group for non admin users |
|
582 | - added option to manage repos group for non admin users | |
583 | - added following API methods for get_users, create_user, get_users_groups, |
|
583 | - added following API methods for get_users, create_user, get_users_groups, | |
584 | get_users_group, create_users_group, add_user_to_users_groups, get_repos, |
|
584 | get_users_group, create_users_group, add_user_to_users_groups, get_repos, | |
585 | get_repo, create_repo, add_user_to_repo |
|
585 | get_repo, create_repo, add_user_to_repo | |
586 | - implements #237 added password confirmation for my account |
|
586 | - implements #237 added password confirmation for my account | |
587 | and admin edit user. |
|
587 | and admin edit user. | |
588 | - implements #291 email notification for global events are now sent to all |
|
588 | - implements #291 email notification for global events are now sent to all | |
589 | administrator users, and global config email. |
|
589 | administrator users, and global config email. | |
590 |
|
590 | |||
591 | fixes |
|
591 | fixes | |
592 | +++++ |
|
592 | +++++ | |
593 |
|
593 | |||
594 | - added option for passing auth method for smtp mailer |
|
594 | - added option for passing auth method for smtp mailer | |
595 | - #276 issue with adding a single user with id>10 to usergroups |
|
595 | - #276 issue with adding a single user with id>10 to usergroups | |
596 | - #277 fixes windows LDAP settings in which missing values breaks the ldap auth |
|
596 | - #277 fixes windows LDAP settings in which missing values breaks the ldap auth | |
597 | - #288 fixes managing of repos in a group for non admin user |
|
597 | - #288 fixes managing of repos in a group for non admin user | |
598 |
|
598 | |||
599 | 1.2.2 (**2011-10-17**) |
|
599 | 1.2.2 (**2011-10-17**) | |
600 | ---------------------- |
|
600 | ---------------------- | |
601 |
|
601 | |||
602 | news |
|
602 | news | |
603 | ++++ |
|
603 | ++++ | |
604 |
|
604 | |||
605 | - #226 repo groups are available by path instead of numerical id |
|
605 | - #226 repo groups are available by path instead of numerical id | |
606 |
|
606 | |||
607 | fixes |
|
607 | fixes | |
608 | +++++ |
|
608 | +++++ | |
609 |
|
609 | |||
610 | - #259 Groups with the same name but with different parent group |
|
610 | - #259 Groups with the same name but with different parent group | |
611 | - #260 Put repo in group, then move group to another group -> repo becomes unavailable |
|
611 | - #260 Put repo in group, then move group to another group -> repo becomes unavailable | |
612 | - #258 RhodeCode 1.2 assumes egg folder is writable (lockfiles problems) |
|
612 | - #258 RhodeCode 1.2 assumes egg folder is writable (lockfiles problems) | |
613 | - #265 ldap save fails sometimes on converting attributes to booleans, |
|
613 | - #265 ldap save fails sometimes on converting attributes to booleans, | |
614 | added getter and setter into model that will prevent from this on db model level |
|
614 | added getter and setter into model that will prevent from this on db model level | |
615 | - fixed problems with timestamps issues #251 and #213 |
|
615 | - fixed problems with timestamps issues #251 and #213 | |
616 | - fixes #266 RhodeCode allows to create repo with the same name and in |
|
616 | - fixes #266 RhodeCode allows to create repo with the same name and in | |
617 | the same parent as group |
|
617 | the same parent as group | |
618 | - fixes #245 Rescan of the repositories on Windows |
|
618 | - fixes #245 Rescan of the repositories on Windows | |
619 | - fixes #248 cannot edit repos inside a group on windows |
|
619 | - fixes #248 cannot edit repos inside a group on windows | |
620 | - fixes #219 forking problems on windows |
|
620 | - fixes #219 forking problems on windows | |
621 |
|
621 | |||
622 | 1.2.1 (**2011-10-08**) |
|
622 | 1.2.1 (**2011-10-08**) | |
623 | ---------------------- |
|
623 | ---------------------- | |
624 |
|
624 | |||
625 | news |
|
625 | news | |
626 | ++++ |
|
626 | ++++ | |
627 |
|
627 | |||
628 |
|
628 | |||
629 | fixes |
|
629 | fixes | |
630 | +++++ |
|
630 | +++++ | |
631 |
|
631 | |||
632 | - fixed problems with basic auth and push problems |
|
632 | - fixed problems with basic auth and push problems | |
633 | - gui fixes |
|
633 | - gui fixes | |
634 | - fixed logger |
|
634 | - fixed logger | |
635 |
|
635 | |||
636 | 1.2.0 (**2011-10-07**) |
|
636 | 1.2.0 (**2011-10-07**) | |
637 | ---------------------- |
|
637 | ---------------------- | |
638 |
|
638 | |||
639 | news |
|
639 | news | |
640 | ++++ |
|
640 | ++++ | |
641 |
|
641 | |||
642 | - implemented #47 repository groups |
|
642 | - implemented #47 repository groups | |
643 | - implemented #89 Can setup google analytics code from settings menu |
|
643 | - implemented #89 Can setup google analytics code from settings menu | |
644 | - implemented #91 added nicer looking archive urls with more download options |
|
644 | - implemented #91 added nicer looking archive urls with more download options | |
645 | like tags, branches |
|
645 | like tags, branches | |
646 | - implemented #44 into file browsing, and added follow branch option |
|
646 | - implemented #44 into file browsing, and added follow branch option | |
647 | - implemented #84 downloads can be enabled/disabled for each repository |
|
647 | - implemented #84 downloads can be enabled/disabled for each repository | |
648 | - anonymous repository can be cloned without having to pass default:default |
|
648 | - anonymous repository can be cloned without having to pass default:default | |
649 | into clone url |
|
649 | into clone url | |
650 | - fixed #90 whoosh indexer can index chooses repositories passed in command |
|
650 | - fixed #90 whoosh indexer can index chooses repositories passed in command | |
651 | line |
|
651 | line | |
652 | - extended journal with day aggregates and paging |
|
652 | - extended journal with day aggregates and paging | |
653 | - implemented #107 source code lines highlight ranges |
|
653 | - implemented #107 source code lines highlight ranges | |
654 | - implemented #93 customizable changelog on combined revision ranges - |
|
654 | - implemented #93 customizable changelog on combined revision ranges - | |
655 | equivalent of githubs compare view |
|
655 | equivalent of githubs compare view | |
656 | - implemented #108 extended and more powerful LDAP configuration |
|
656 | - implemented #108 extended and more powerful LDAP configuration | |
657 | - implemented #56 user groups |
|
657 | - implemented #56 user groups | |
658 | - major code rewrites optimized codes for speed and memory usage |
|
658 | - major code rewrites optimized codes for speed and memory usage | |
659 | - raw and diff downloads are now in git format |
|
659 | - raw and diff downloads are now in git format | |
660 | - setup command checks for write access to given path |
|
660 | - setup command checks for write access to given path | |
661 | - fixed many issues with international characters and unicode. It uses utf8 |
|
661 | - fixed many issues with international characters and unicode. It uses utf8 | |
662 | decode with replace to provide less errors even with non utf8 encoded strings |
|
662 | decode with replace to provide less errors even with non utf8 encoded strings | |
663 | - #125 added API KEY access to feeds |
|
663 | - #125 added API KEY access to feeds | |
664 | - #109 Repository can be created from external Mercurial link (aka. remote |
|
664 | - #109 Repository can be created from external Mercurial link (aka. remote | |
665 | repository, and manually updated (via pull) from admin panel |
|
665 | repository, and manually updated (via pull) from admin panel | |
666 | - beta git support - push/pull server + basic view for git repos |
|
666 | - beta git support - push/pull server + basic view for git repos | |
667 | - added followers page and forks page |
|
667 | - added followers page and forks page | |
668 | - server side file creation (with binary file upload interface) |
|
668 | - server side file creation (with binary file upload interface) | |
669 | and edition with commits powered by codemirror |
|
669 | and edition with commits powered by codemirror | |
670 | - #111 file browser file finder, quick lookup files on whole file tree |
|
670 | - #111 file browser file finder, quick lookup files on whole file tree | |
671 | - added quick login sliding menu into main page |
|
671 | - added quick login sliding menu into main page | |
672 | - changelog uses lazy loading of affected files details, in some scenarios |
|
672 | - changelog uses lazy loading of affected files details, in some scenarios | |
673 | this can improve speed of changelog page dramatically especially for |
|
673 | this can improve speed of changelog page dramatically especially for | |
674 | larger repositories. |
|
674 | larger repositories. | |
675 | - implements #214 added support for downloading subrepos in download menu. |
|
675 | - implements #214 added support for downloading subrepos in download menu. | |
676 | - Added basic API for direct operations on rhodecode via JSON |
|
676 | - Added basic API for direct operations on rhodecode via JSON | |
677 | - Implemented advanced hook management |
|
677 | - Implemented advanced hook management | |
678 |
|
678 | |||
679 | fixes |
|
679 | fixes | |
680 | +++++ |
|
680 | +++++ | |
681 |
|
681 | |||
682 | - fixed file browser bug, when switching into given form revision the url was |
|
682 | - fixed file browser bug, when switching into given form revision the url was | |
683 | not changing |
|
683 | not changing | |
684 | - fixed propagation to error controller on simplehg and simplegit middlewares |
|
684 | - fixed propagation to error controller on simplehg and simplegit middlewares | |
685 | - fixed error when trying to make a download on empty repository |
|
685 | - fixed error when trying to make a download on empty repository | |
686 | - fixed problem with '[' chars in commit messages in journal |
|
686 | - fixed problem with '[' chars in commit messages in journal | |
687 | - fixed #99 Unicode errors, on file node paths with non utf-8 characters |
|
687 | - fixed #99 Unicode errors, on file node paths with non utf-8 characters | |
688 | - journal fork fixes |
|
688 | - journal fork fixes | |
689 | - removed issue with space inside renamed repository after deletion |
|
689 | - removed issue with space inside renamed repository after deletion | |
690 | - fixed strange issue on formencode imports |
|
690 | - fixed strange issue on formencode imports | |
691 | - fixed #126 Deleting repository on Windows, rename used incompatible chars. |
|
691 | - fixed #126 Deleting repository on Windows, rename used incompatible chars. | |
692 | - #150 fixes for errors on repositories mapped in db but corrupted in |
|
692 | - #150 fixes for errors on repositories mapped in db but corrupted in | |
693 | filesystem |
|
693 | filesystem | |
694 | - fixed problem with ascendant characters in realm #181 |
|
694 | - fixed problem with ascendant characters in realm #181 | |
695 | - fixed problem with sqlite file based database connection pool |
|
695 | - fixed problem with sqlite file based database connection pool | |
696 | - whoosh indexer and code stats share the same dynamic extensions map |
|
696 | - whoosh indexer and code stats share the same dynamic extensions map | |
697 | - fixes #188 - relationship delete of repo_to_perm entry on user removal |
|
697 | - fixes #188 - relationship delete of repo_to_perm entry on user removal | |
698 | - fixes issue #189 Trending source files shows "show more" when no more exist |
|
698 | - fixes issue #189 Trending source files shows "show more" when no more exist | |
699 | - fixes issue #197 Relative paths for pidlocks |
|
699 | - fixes issue #197 Relative paths for pidlocks | |
700 | - fixes issue #198 password will require only 3 chars now for login form |
|
700 | - fixes issue #198 password will require only 3 chars now for login form | |
701 | - fixes issue #199 wrong redirection for non admin users after creating a repository |
|
701 | - fixes issue #199 wrong redirection for non admin users after creating a repository | |
702 | - fixes issues #202, bad db constraint made impossible to attach same group |
|
702 | - fixes issues #202, bad db constraint made impossible to attach same group | |
703 | more than one time. Affects only mysql/postgres |
|
703 | more than one time. Affects only mysql/postgres | |
704 | - fixes #218 os.kill patch for windows was missing sig param |
|
704 | - fixes #218 os.kill patch for windows was missing sig param | |
705 | - improved rendering of dag (they are not trimmed anymore when number of |
|
705 | - improved rendering of dag (they are not trimmed anymore when number of | |
706 | heads exceeds 5) |
|
706 | heads exceeds 5) | |
707 |
|
707 | |||
708 | 1.1.8 (**2011-04-12**) |
|
708 | 1.1.8 (**2011-04-12**) | |
709 | ---------------------- |
|
709 | ---------------------- | |
710 |
|
710 | |||
711 | news |
|
711 | news | |
712 | ++++ |
|
712 | ++++ | |
713 |
|
713 | |||
714 | - improved windows support |
|
714 | - improved windows support | |
715 |
|
715 | |||
716 | fixes |
|
716 | fixes | |
717 | +++++ |
|
717 | +++++ | |
718 |
|
718 | |||
719 | - fixed #140 freeze of python dateutil library, since new version is python2.x |
|
719 | - fixed #140 freeze of python dateutil library, since new version is python2.x | |
720 | incompatible |
|
720 | incompatible | |
721 | - setup-app will check for write permission in given path |
|
721 | - setup-app will check for write permission in given path | |
722 | - cleaned up license info issue #149 |
|
722 | - cleaned up license info issue #149 | |
723 | - fixes for issues #137,#116 and problems with unicode and accented characters. |
|
723 | - fixes for issues #137,#116 and problems with unicode and accented characters. | |
724 | - fixes crashes on gravatar, when passed in email as unicode |
|
724 | - fixes crashes on gravatar, when passed in email as unicode | |
725 | - fixed tooltip flickering problems |
|
725 | - fixed tooltip flickering problems | |
726 | - fixed came_from redirection on windows |
|
726 | - fixed came_from redirection on windows | |
727 | - fixed logging modules, and sql formatters |
|
727 | - fixed logging modules, and sql formatters | |
728 | - windows fixes for os.kill issue #133 |
|
728 | - windows fixes for os.kill issue #133 | |
729 | - fixes path splitting for windows issues #148 |
|
729 | - fixes path splitting for windows issues #148 | |
730 | - fixed issue #143 wrong import on migration to 1.1.X |
|
730 | - fixed issue #143 wrong import on migration to 1.1.X | |
731 | - fixed problems with displaying binary files, thanks to Thomas Waldmann |
|
731 | - fixed problems with displaying binary files, thanks to Thomas Waldmann | |
732 | - removed name from archive files since it's breaking ui for long repo names |
|
732 | - removed name from archive files since it's breaking ui for long repo names | |
733 | - fixed issue with archive headers sent to browser, thanks to Thomas Waldmann |
|
733 | - fixed issue with archive headers sent to browser, thanks to Thomas Waldmann | |
734 | - fixed compatibility for 1024px displays, and larger dpi settings, thanks to |
|
734 | - fixed compatibility for 1024px displays, and larger dpi settings, thanks to | |
735 | Thomas Waldmann |
|
735 | Thomas Waldmann | |
736 | - fixed issue #166 summary pager was skipping 10 revisions on second page |
|
736 | - fixed issue #166 summary pager was skipping 10 revisions on second page | |
737 |
|
737 | |||
738 |
|
738 | |||
739 | 1.1.7 (**2011-03-23**) |
|
739 | 1.1.7 (**2011-03-23**) | |
740 | ---------------------- |
|
740 | ---------------------- | |
741 |
|
741 | |||
742 | news |
|
742 | news | |
743 | ++++ |
|
743 | ++++ | |
744 |
|
744 | |||
745 | fixes |
|
745 | fixes | |
746 | +++++ |
|
746 | +++++ | |
747 |
|
747 | |||
748 | - fixed (again) #136 installation support for FreeBSD |
|
748 | - fixed (again) #136 installation support for FreeBSD | |
749 |
|
749 | |||
750 |
|
750 | |||
751 | 1.1.6 (**2011-03-21**) |
|
751 | 1.1.6 (**2011-03-21**) | |
752 | ---------------------- |
|
752 | ---------------------- | |
753 |
|
753 | |||
754 | news |
|
754 | news | |
755 | ++++ |
|
755 | ++++ | |
756 |
|
756 | |||
757 | fixes |
|
757 | fixes | |
758 | +++++ |
|
758 | +++++ | |
759 |
|
759 | |||
760 | - fixed #136 installation support for FreeBSD |
|
760 | - fixed #136 installation support for FreeBSD | |
761 | - RhodeCode will check for python version during installation |
|
761 | - RhodeCode will check for python version during installation | |
762 |
|
762 | |||
763 | 1.1.5 (**2011-03-17**) |
|
763 | 1.1.5 (**2011-03-17**) | |
764 | ---------------------- |
|
764 | ---------------------- | |
765 |
|
765 | |||
766 | news |
|
766 | news | |
767 | ++++ |
|
767 | ++++ | |
768 |
|
768 | |||
769 | - basic windows support, by exchanging pybcrypt into sha256 for windows only |
|
769 | - basic windows support, by exchanging pybcrypt into sha256 for windows only | |
770 | highly inspired by idea of mantis406 |
|
770 | highly inspired by idea of mantis406 | |
771 |
|
771 | |||
772 | fixes |
|
772 | fixes | |
773 | +++++ |
|
773 | +++++ | |
774 |
|
774 | |||
775 | - fixed sorting by author in main page |
|
775 | - fixed sorting by author in main page | |
776 | - fixed crashes with diffs on binary files |
|
776 | - fixed crashes with diffs on binary files | |
777 | - fixed #131 problem with boolean values for LDAP |
|
777 | - fixed #131 problem with boolean values for LDAP | |
778 | - fixed #122 mysql problems thanks to striker69 |
|
778 | - fixed #122 mysql problems thanks to striker69 | |
779 | - fixed problem with errors on calling raw/raw_files/annotate functions |
|
779 | - fixed problem with errors on calling raw/raw_files/annotate functions | |
780 | with unknown revisions |
|
780 | with unknown revisions | |
781 | - fixed returned rawfiles attachment names with international character |
|
781 | - fixed returned rawfiles attachment names with international character | |
782 | - cleaned out docs, big thanks to Jason Harris |
|
782 | - cleaned out docs, big thanks to Jason Harris | |
783 |
|
783 | |||
784 | 1.1.4 (**2011-02-19**) |
|
784 | 1.1.4 (**2011-02-19**) | |
785 | ---------------------- |
|
785 | ---------------------- | |
786 |
|
786 | |||
787 | news |
|
787 | news | |
788 | ++++ |
|
788 | ++++ | |
789 |
|
789 | |||
790 | fixes |
|
790 | fixes | |
791 | +++++ |
|
791 | +++++ | |
792 |
|
792 | |||
793 | - fixed formencode import problem on settings page, that caused server crash |
|
793 | - fixed formencode import problem on settings page, that caused server crash | |
794 | when that page was accessed as first after server start |
|
794 | when that page was accessed as first after server start | |
795 | - journal fixes |
|
795 | - journal fixes | |
796 | - fixed option to access repository just by entering http://server/<repo_name> |
|
796 | - fixed option to access repository just by entering http://server/<repo_name> | |
797 |
|
797 | |||
798 | 1.1.3 (**2011-02-16**) |
|
798 | 1.1.3 (**2011-02-16**) | |
799 | ---------------------- |
|
799 | ---------------------- | |
800 |
|
800 | |||
801 | news |
|
801 | news | |
802 | ++++ |
|
802 | ++++ | |
803 |
|
803 | |||
804 | - implemented #102 allowing the '.' character in username |
|
804 | - implemented #102 allowing the '.' character in username | |
805 | - added option to access repository just by entering http://server/<repo_name> |
|
805 | - added option to access repository just by entering http://server/<repo_name> | |
806 | - celery task ignores result for better performance |
|
806 | - celery task ignores result for better performance | |
807 |
|
807 | |||
808 | fixes |
|
808 | fixes | |
809 | +++++ |
|
809 | +++++ | |
810 |
|
810 | |||
811 | - fixed ehlo command and non auth mail servers on smtp_lib. Thanks to |
|
811 | - fixed ehlo command and non auth mail servers on smtp_lib. Thanks to | |
812 | apollo13 and Johan Walles |
|
812 | apollo13 and Johan Walles | |
813 | - small fixes in journal |
|
813 | - small fixes in journal | |
814 | - fixed problems with getting setting for celery from .ini files |
|
814 | - fixed problems with getting setting for celery from .ini files | |
815 | - registration, password reset and login boxes share the same title as main |
|
815 | - registration, password reset and login boxes share the same title as main | |
816 | application now |
|
816 | application now | |
817 | - fixed #113: to high permissions to fork repository |
|
817 | - fixed #113: to high permissions to fork repository | |
818 | - fixed problem with '[' chars in commit messages in journal |
|
818 | - fixed problem with '[' chars in commit messages in journal | |
819 | - removed issue with space inside renamed repository after deletion |
|
819 | - removed issue with space inside renamed repository after deletion | |
820 | - db transaction fixes when filesystem repository creation failed |
|
820 | - db transaction fixes when filesystem repository creation failed | |
821 | - fixed #106 relation issues on databases different than sqlite |
|
821 | - fixed #106 relation issues on databases different than sqlite | |
822 | - fixed static files paths links to use of url() method |
|
822 | - fixed static files paths links to use of url() method | |
823 |
|
823 | |||
824 | 1.1.2 (**2011-01-12**) |
|
824 | 1.1.2 (**2011-01-12**) | |
825 | ---------------------- |
|
825 | ---------------------- | |
826 |
|
826 | |||
827 | news |
|
827 | news | |
828 | ++++ |
|
828 | ++++ | |
829 |
|
829 | |||
830 |
|
830 | |||
831 | fixes |
|
831 | fixes | |
832 | +++++ |
|
832 | +++++ | |
833 |
|
833 | |||
834 | - fixes #98 protection against float division of percentage stats |
|
834 | - fixes #98 protection against float division of percentage stats | |
835 | - fixed graph bug |
|
835 | - fixed graph bug | |
836 | - forced webhelpers version since it was making troubles during installation |
|
836 | - forced webhelpers version since it was making troubles during installation | |
837 |
|
837 | |||
838 | 1.1.1 (**2011-01-06**) |
|
838 | 1.1.1 (**2011-01-06**) | |
839 | ---------------------- |
|
839 | ---------------------- | |
840 |
|
840 | |||
841 | news |
|
841 | news | |
842 | ++++ |
|
842 | ++++ | |
843 |
|
843 | |||
844 | - added force https option into ini files for easier https usage (no need to |
|
844 | - added force https option into ini files for easier https usage (no need to | |
845 | set server headers with this options) |
|
845 | set server headers with this options) | |
846 | - small css updates |
|
846 | - small css updates | |
847 |
|
847 | |||
848 | fixes |
|
848 | fixes | |
849 | +++++ |
|
849 | +++++ | |
850 |
|
850 | |||
851 | - fixed #96 redirect loop on files view on repositories without changesets |
|
851 | - fixed #96 redirect loop on files view on repositories without changesets | |
852 | - fixed #97 unicode string passed into server header in special cases (mod_wsgi) |
|
852 | - fixed #97 unicode string passed into server header in special cases (mod_wsgi) | |
853 | and server crashed with errors |
|
853 | and server crashed with errors | |
854 | - fixed large tooltips problems on main page |
|
854 | - fixed large tooltips problems on main page | |
855 | - fixed #92 whoosh indexer is more error proof |
|
855 | - fixed #92 whoosh indexer is more error proof | |
856 |
|
856 | |||
857 | 1.1.0 (**2010-12-18**) |
|
857 | 1.1.0 (**2010-12-18**) | |
858 | ---------------------- |
|
858 | ---------------------- | |
859 |
|
859 | |||
860 | news |
|
860 | news | |
861 | ++++ |
|
861 | ++++ | |
862 |
|
862 | |||
863 | - rewrite of internals for vcs >=0.1.10 |
|
863 | - rewrite of internals for vcs >=0.1.10 | |
864 | - uses mercurial 1.7 with dotencode disabled for maintaining compatibility |
|
864 | - uses mercurial 1.7 with dotencode disabled for maintaining compatibility | |
865 | with older clients |
|
865 | with older clients | |
866 | - anonymous access, authentication via ldap |
|
866 | - anonymous access, authentication via ldap | |
867 | - performance upgrade for cached repos list - each repository has its own |
|
867 | - performance upgrade for cached repos list - each repository has its own | |
868 | cache that's invalidated when needed. |
|
868 | cache that's invalidated when needed. | |
869 | - performance upgrades on repositories with large amount of commits (20K+) |
|
869 | - performance upgrades on repositories with large amount of commits (20K+) | |
870 | - main page quick filter for filtering repositories |
|
870 | - main page quick filter for filtering repositories | |
871 | - user dashboards with ability to follow chosen repositories actions |
|
871 | - user dashboards with ability to follow chosen repositories actions | |
872 | - sends email to admin on new user registration |
|
872 | - sends email to admin on new user registration | |
873 | - added cache/statistics reset options into repository settings |
|
873 | - added cache/statistics reset options into repository settings | |
874 | - more detailed action logger (based on hooks) with pushed changesets lists |
|
874 | - more detailed action logger (based on hooks) with pushed changesets lists | |
875 | and options to disable those hooks from admin panel |
|
875 | and options to disable those hooks from admin panel | |
876 | - introduced new enhanced changelog for merges that shows more accurate results |
|
876 | - introduced new enhanced changelog for merges that shows more accurate results | |
877 | - new improved and faster code stats (based on pygments lexers mapping tables, |
|
877 | - new improved and faster code stats (based on pygments lexers mapping tables, | |
878 | showing up to 10 trending sources for each repository. Additionally stats |
|
878 | showing up to 10 trending sources for each repository. Additionally stats | |
879 | can be disabled in repository settings. |
|
879 | can be disabled in repository settings. | |
880 | - gui optimizations, fixed application width to 1024px |
|
880 | - gui optimizations, fixed application width to 1024px | |
881 | - added cut off (for large files/changesets) limit into config files |
|
881 | - added cut off (for large files/changesets) limit into config files | |
882 | - whoosh, celeryd, upgrade moved to paster command |
|
882 | - whoosh, celeryd, upgrade moved to paster command | |
883 | - other than sqlite database backends can be used |
|
883 | - other than sqlite database backends can be used | |
884 |
|
884 | |||
885 | fixes |
|
885 | fixes | |
886 | +++++ |
|
886 | +++++ | |
887 |
|
887 | |||
888 | - fixes #61 forked repo was showing only after cache expired |
|
888 | - fixes #61 forked repo was showing only after cache expired | |
889 | - fixes #76 no confirmation on user deletes |
|
889 | - fixes #76 no confirmation on user deletes | |
890 | - fixes #66 Name field misspelled |
|
890 | - fixes #66 Name field misspelled | |
891 | - fixes #72 block user removal when he owns repositories |
|
891 | - fixes #72 block user removal when he owns repositories | |
892 | - fixes #69 added password confirmation fields |
|
892 | - fixes #69 added password confirmation fields | |
893 | - fixes #87 RhodeCode crashes occasionally on updating repository owner |
|
893 | - fixes #87 RhodeCode crashes occasionally on updating repository owner | |
894 | - fixes #82 broken annotations on files with more than 1 blank line at the end |
|
894 | - fixes #82 broken annotations on files with more than 1 blank line at the end | |
895 | - a lot of fixes and tweaks for file browser |
|
895 | - a lot of fixes and tweaks for file browser | |
896 | - fixed detached session issues |
|
896 | - fixed detached session issues | |
897 | - fixed when user had no repos he would see all repos listed in my account |
|
897 | - fixed when user had no repos he would see all repos listed in my account | |
898 | - fixed ui() instance bug when global hgrc settings was loaded for server |
|
898 | - fixed ui() instance bug when global hgrc settings was loaded for server | |
899 | instance and all hgrc options were merged with our db ui() object |
|
899 | instance and all hgrc options were merged with our db ui() object | |
900 | - numerous small bugfixes |
|
900 | - numerous small bugfixes | |
901 |
|
901 | |||
902 | (special thanks for TkSoh for detailed feedback) |
|
902 | (special thanks for TkSoh for detailed feedback) | |
903 |
|
903 | |||
904 |
|
904 | |||
905 | 1.0.2 (**2010-11-12**) |
|
905 | 1.0.2 (**2010-11-12**) | |
906 | ---------------------- |
|
906 | ---------------------- | |
907 |
|
907 | |||
908 | news |
|
908 | news | |
909 | ++++ |
|
909 | ++++ | |
910 |
|
910 | |||
911 | - tested under python2.7 |
|
911 | - tested under python2.7 | |
912 | - bumped sqlalchemy and celery versions |
|
912 | - bumped sqlalchemy and celery versions | |
913 |
|
913 | |||
914 | fixes |
|
914 | fixes | |
915 | +++++ |
|
915 | +++++ | |
916 |
|
916 | |||
917 | - fixed #59 missing graph.js |
|
917 | - fixed #59 missing graph.js | |
918 | - fixed repo_size crash when repository had broken symlinks |
|
918 | - fixed repo_size crash when repository had broken symlinks | |
919 | - fixed python2.5 crashes. |
|
919 | - fixed python2.5 crashes. | |
920 |
|
920 | |||
921 |
|
921 | |||
922 | 1.0.1 (**2010-11-10**) |
|
922 | 1.0.1 (**2010-11-10**) | |
923 | ---------------------- |
|
923 | ---------------------- | |
924 |
|
924 | |||
925 | news |
|
925 | news | |
926 | ++++ |
|
926 | ++++ | |
927 |
|
927 | |||
928 | - small css updated |
|
928 | - small css updated | |
929 |
|
929 | |||
930 | fixes |
|
930 | fixes | |
931 | +++++ |
|
931 | +++++ | |
932 |
|
932 | |||
933 | - fixed #53 python2.5 incompatible enumerate calls |
|
933 | - fixed #53 python2.5 incompatible enumerate calls | |
934 | - fixed #52 disable mercurial extension for web |
|
934 | - fixed #52 disable mercurial extension for web | |
935 | - fixed #51 deleting repositories don't delete it's dependent objects |
|
935 | - fixed #51 deleting repositories don't delete it's dependent objects | |
936 |
|
936 | |||
937 |
|
937 | |||
938 | 1.0.0 (**2010-11-02**) |
|
938 | 1.0.0 (**2010-11-02**) | |
939 | ---------------------- |
|
939 | ---------------------- | |
940 |
|
940 | |||
941 | - security bugfix simplehg wasn't checking for permissions on commands |
|
941 | - security bugfix simplehg wasn't checking for permissions on commands | |
942 | other than pull or push. |
|
942 | other than pull or push. | |
943 | - fixed doubled messages after push or pull in admin journal |
|
943 | - fixed doubled messages after push or pull in admin journal | |
944 | - templating and css corrections, fixed repo switcher on chrome, updated titles |
|
944 | - templating and css corrections, fixed repo switcher on chrome, updated titles | |
945 | - admin menu accessible from options menu on repository view |
|
945 | - admin menu accessible from options menu on repository view | |
946 | - permissions cached queries |
|
946 | - permissions cached queries | |
947 |
|
947 | |||
948 | 1.0.0rc4 (**2010-10-12**) |
|
948 | 1.0.0rc4 (**2010-10-12**) | |
949 | -------------------------- |
|
949 | -------------------------- | |
950 |
|
950 | |||
951 | - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman) |
|
951 | - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman) | |
952 | - removed cache_manager settings from sqlalchemy meta |
|
952 | - removed cache_manager settings from sqlalchemy meta | |
953 | - added sqlalchemy cache settings to ini files |
|
953 | - added sqlalchemy cache settings to ini files | |
954 | - validated password length and added second try of failure on paster setup-app |
|
954 | - validated password length and added second try of failure on paster setup-app | |
955 | - fixed setup database destroy prompt even when there was no db |
|
955 | - fixed setup database destroy prompt even when there was no db | |
956 |
|
956 | |||
957 |
|
957 | |||
958 | 1.0.0rc3 (**2010-10-11**) |
|
958 | 1.0.0rc3 (**2010-10-11**) | |
959 | ------------------------- |
|
959 | ------------------------- | |
960 |
|
960 | |||
961 | - fixed i18n during installation. |
|
961 | - fixed i18n during installation. | |
962 |
|
962 | |||
963 | 1.0.0rc2 (**2010-10-11**) |
|
963 | 1.0.0rc2 (**2010-10-11**) | |
964 | ------------------------- |
|
964 | ------------------------- | |
965 |
|
965 | |||
966 | - Disabled dirsize in file browser, it's causing nasty bug when dir renames |
|
966 | - Disabled dirsize in file browser, it's causing nasty bug when dir renames | |
967 | occure. After vcs is fixed it'll be put back again. |
|
967 | occure. After vcs is fixed it'll be put back again. | |
968 | - templating/css rewrites, optimized css. |
|
968 | - templating/css rewrites, optimized css. |
@@ -1,648 +1,648 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Routes configuration |
|
2 | Routes configuration | |
3 |
|
3 | |||
4 | The more specific and detailed routes should be defined first so they |
|
4 | The more specific and detailed routes should be defined first so they | |
5 | may take precedent over the more generic routes. For more information |
|
5 | may take precedent over the more generic routes. For more information | |
6 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
6 | refer to the routes manual at http://routes.groovie.org/docs/ | |
7 | """ |
|
7 | """ | |
8 | from __future__ import with_statement |
|
8 | from __future__ import with_statement | |
9 | from routes import Mapper |
|
9 | from routes import Mapper | |
10 |
|
10 | |||
11 | # prefix for non repository related links needs to be prefixed with `/` |
|
11 | # prefix for non repository related links needs to be prefixed with `/` | |
12 | ADMIN_PREFIX = '/_admin' |
|
12 | ADMIN_PREFIX = '/_admin' | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | def make_map(config): |
|
15 | def make_map(config): | |
16 | """Create, configure and return the routes Mapper""" |
|
16 | """Create, configure and return the routes Mapper""" | |
17 | rmap = Mapper(directory=config['pylons.paths']['controllers'], |
|
17 | rmap = Mapper(directory=config['pylons.paths']['controllers'], | |
18 | always_scan=config['debug']) |
|
18 | always_scan=config['debug']) | |
19 | rmap.minimization = False |
|
19 | rmap.minimization = False | |
20 | rmap.explicit = False |
|
20 | rmap.explicit = False | |
21 |
|
21 | |||
22 | from rhodecode.lib.utils import is_valid_repo |
|
22 | from rhodecode.lib.utils import is_valid_repo | |
23 | from rhodecode.lib.utils import is_valid_repos_group |
|
23 | from rhodecode.lib.utils import is_valid_repos_group | |
24 |
|
24 | |||
25 | def check_repo(environ, match_dict): |
|
25 | def check_repo(environ, match_dict): | |
26 | """ |
|
26 | """ | |
27 | check for valid repository for proper 404 handling |
|
27 | check for valid repository for proper 404 handling | |
28 |
|
28 | |||
29 | :param environ: |
|
29 | :param environ: | |
30 | :param match_dict: |
|
30 | :param match_dict: | |
31 | """ |
|
31 | """ | |
32 | from rhodecode.model.db import Repository |
|
32 | from rhodecode.model.db import Repository | |
33 | repo_name = match_dict.get('repo_name') |
|
33 | repo_name = match_dict.get('repo_name') | |
34 |
|
34 | |||
35 | if match_dict.get('f_path'): |
|
35 | if match_dict.get('f_path'): | |
36 | #fix for multiple initial slashes that causes errors |
|
36 | #fix for multiple initial slashes that causes errors | |
37 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
37 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') | |
38 |
|
38 | |||
39 | try: |
|
39 | try: | |
40 | by_id = repo_name.split('_') |
|
40 | by_id = repo_name.split('_') | |
41 | if len(by_id) == 2 and by_id[1].isdigit() and by_id[0] == '': |
|
41 | if len(by_id) == 2 and by_id[1].isdigit() and by_id[0] == '': | |
42 | repo_name = Repository.get(by_id[1]).repo_name |
|
42 | repo_name = Repository.get(by_id[1]).repo_name | |
43 | match_dict['repo_name'] = repo_name |
|
43 | match_dict['repo_name'] = repo_name | |
44 | except: |
|
44 | except: | |
45 | pass |
|
45 | pass | |
46 |
|
46 | |||
47 | return is_valid_repo(repo_name, config['base_path']) |
|
47 | return is_valid_repo(repo_name, config['base_path']) | |
48 |
|
48 | |||
49 | def check_group(environ, match_dict): |
|
49 | def check_group(environ, match_dict): | |
50 | """ |
|
50 | """ | |
51 |
check for valid repositor |
|
51 | check for valid repository group for proper 404 handling | |
52 |
|
52 | |||
53 | :param environ: |
|
53 | :param environ: | |
54 | :param match_dict: |
|
54 | :param match_dict: | |
55 | """ |
|
55 | """ | |
56 | repos_group_name = match_dict.get('group_name') |
|
56 | repos_group_name = match_dict.get('group_name') | |
57 | return is_valid_repos_group(repos_group_name, config['base_path']) |
|
57 | return is_valid_repos_group(repos_group_name, config['base_path']) | |
58 |
|
58 | |||
59 | def check_int(environ, match_dict): |
|
59 | def check_int(environ, match_dict): | |
60 | return match_dict.get('id').isdigit() |
|
60 | return match_dict.get('id').isdigit() | |
61 |
|
61 | |||
62 | # The ErrorController route (handles 404/500 error pages); it should |
|
62 | # The ErrorController route (handles 404/500 error pages); it should | |
63 | # likely stay at the top, ensuring it can always be resolved |
|
63 | # likely stay at the top, ensuring it can always be resolved | |
64 | rmap.connect('/error/{action}', controller='error') |
|
64 | rmap.connect('/error/{action}', controller='error') | |
65 | rmap.connect('/error/{action}/{id}', controller='error') |
|
65 | rmap.connect('/error/{action}/{id}', controller='error') | |
66 |
|
66 | |||
67 | #========================================================================== |
|
67 | #========================================================================== | |
68 | # CUSTOM ROUTES HERE |
|
68 | # CUSTOM ROUTES HERE | |
69 | #========================================================================== |
|
69 | #========================================================================== | |
70 |
|
70 | |||
71 | #MAIN PAGE |
|
71 | #MAIN PAGE | |
72 | rmap.connect('home', '/', controller='home', action='index') |
|
72 | rmap.connect('home', '/', controller='home', action='index') | |
73 | rmap.connect('repo_switcher', '/repos', controller='home', |
|
73 | rmap.connect('repo_switcher', '/repos', controller='home', | |
74 | action='repo_switcher') |
|
74 | action='repo_switcher') | |
75 | rmap.connect('branch_tag_switcher', '/branches-tags/{repo_name:.*?}', |
|
75 | rmap.connect('branch_tag_switcher', '/branches-tags/{repo_name:.*?}', | |
76 | controller='home', action='branch_tag_switcher') |
|
76 | controller='home', action='branch_tag_switcher') | |
77 | rmap.connect('bugtracker', |
|
77 | rmap.connect('bugtracker', | |
78 | "http://bitbucket.org/marcinkuzminski/rhodecode/issues", |
|
78 | "http://bitbucket.org/marcinkuzminski/rhodecode/issues", | |
79 | _static=True) |
|
79 | _static=True) | |
80 | rmap.connect('rst_help', |
|
80 | rmap.connect('rst_help', | |
81 | "http://docutils.sourceforge.net/docs/user/rst/quickref.html", |
|
81 | "http://docutils.sourceforge.net/docs/user/rst/quickref.html", | |
82 | _static=True) |
|
82 | _static=True) | |
83 | rmap.connect('rhodecode_official', "http://rhodecode.org", _static=True) |
|
83 | rmap.connect('rhodecode_official', "http://rhodecode.org", _static=True) | |
84 |
|
84 | |||
85 | #ADMIN REPOSITORY REST ROUTES |
|
85 | #ADMIN REPOSITORY REST ROUTES | |
86 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
86 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
87 | controller='admin/repos') as m: |
|
87 | controller='admin/repos') as m: | |
88 | m.connect("repos", "/repos", |
|
88 | m.connect("repos", "/repos", | |
89 | action="create", conditions=dict(method=["POST"])) |
|
89 | action="create", conditions=dict(method=["POST"])) | |
90 | m.connect("repos", "/repos", |
|
90 | m.connect("repos", "/repos", | |
91 | action="index", conditions=dict(method=["GET"])) |
|
91 | action="index", conditions=dict(method=["GET"])) | |
92 | m.connect("formatted_repos", "/repos.{format}", |
|
92 | m.connect("formatted_repos", "/repos.{format}", | |
93 | action="index", |
|
93 | action="index", | |
94 | conditions=dict(method=["GET"])) |
|
94 | conditions=dict(method=["GET"])) | |
95 | m.connect("new_repo", "/repos/new", |
|
95 | m.connect("new_repo", "/repos/new", | |
96 | action="new", conditions=dict(method=["GET"])) |
|
96 | action="new", conditions=dict(method=["GET"])) | |
97 | m.connect("formatted_new_repo", "/repos/new.{format}", |
|
97 | m.connect("formatted_new_repo", "/repos/new.{format}", | |
98 | action="new", conditions=dict(method=["GET"])) |
|
98 | action="new", conditions=dict(method=["GET"])) | |
99 | m.connect("/repos/{repo_name:.*?}", |
|
99 | m.connect("/repos/{repo_name:.*?}", | |
100 | action="update", conditions=dict(method=["PUT"], |
|
100 | action="update", conditions=dict(method=["PUT"], | |
101 | function=check_repo)) |
|
101 | function=check_repo)) | |
102 | m.connect("/repos/{repo_name:.*?}", |
|
102 | m.connect("/repos/{repo_name:.*?}", | |
103 | action="delete", conditions=dict(method=["DELETE"], |
|
103 | action="delete", conditions=dict(method=["DELETE"], | |
104 | function=check_repo)) |
|
104 | function=check_repo)) | |
105 | # no longer used: |
|
105 | # no longer used: | |
106 | m.connect("edit_repo_admin", "/repos/{repo_name:.*?}/edit", |
|
106 | m.connect("edit_repo_admin", "/repos/{repo_name:.*?}/edit", | |
107 | action="edit", conditions=dict(method=["GET"], |
|
107 | action="edit", conditions=dict(method=["GET"], | |
108 | function=check_repo)) |
|
108 | function=check_repo)) | |
109 | m.connect("formatted_edit_repo", "/repos/{repo_name:.*?}.{format}/edit", |
|
109 | m.connect("formatted_edit_repo", "/repos/{repo_name:.*?}.{format}/edit", | |
110 | action="edit", conditions=dict(method=["GET"], |
|
110 | action="edit", conditions=dict(method=["GET"], | |
111 | function=check_repo)) |
|
111 | function=check_repo)) | |
112 | m.connect("repo", "/repos/{repo_name:.*?}", |
|
112 | m.connect("repo", "/repos/{repo_name:.*?}", | |
113 | action="show", conditions=dict(method=["GET"], |
|
113 | action="show", conditions=dict(method=["GET"], | |
114 | function=check_repo)) |
|
114 | function=check_repo)) | |
115 | m.connect("formatted_repo", "/repos/{repo_name:.*?}.{format}", |
|
115 | m.connect("formatted_repo", "/repos/{repo_name:.*?}.{format}", | |
116 | action="show", conditions=dict(method=["GET"], |
|
116 | action="show", conditions=dict(method=["GET"], | |
117 | function=check_repo)) |
|
117 | function=check_repo)) | |
118 | #ajax delete repo perm user |
|
118 | #ajax delete repo perm user | |
119 | m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*?}", |
|
119 | m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*?}", | |
120 | action="delete_perm_user", |
|
120 | action="delete_perm_user", | |
121 | conditions=dict(method=["DELETE"], function=check_repo)) |
|
121 | conditions=dict(method=["DELETE"], function=check_repo)) | |
122 |
|
122 | |||
123 | #ajax delete repo perm users_group |
|
123 | #ajax delete repo perm users_group | |
124 | m.connect('delete_repo_users_group', |
|
124 | m.connect('delete_repo_users_group', | |
125 | "/repos_delete_users_group/{repo_name:.*?}", |
|
125 | "/repos_delete_users_group/{repo_name:.*?}", | |
126 | action="delete_perm_users_group", |
|
126 | action="delete_perm_users_group", | |
127 | conditions=dict(method=["DELETE"], function=check_repo)) |
|
127 | conditions=dict(method=["DELETE"], function=check_repo)) | |
128 |
|
128 | |||
129 | #settings actions |
|
129 | #settings actions | |
130 | m.connect('repo_stats', "/repos_stats/{repo_name:.*?}", |
|
130 | m.connect('repo_stats', "/repos_stats/{repo_name:.*?}", | |
131 | action="repo_stats", conditions=dict(method=["DELETE"], |
|
131 | action="repo_stats", conditions=dict(method=["DELETE"], | |
132 | function=check_repo)) |
|
132 | function=check_repo)) | |
133 | m.connect('repo_cache', "/repos_cache/{repo_name:.*?}", |
|
133 | m.connect('repo_cache', "/repos_cache/{repo_name:.*?}", | |
134 | action="repo_cache", conditions=dict(method=["DELETE"], |
|
134 | action="repo_cache", conditions=dict(method=["DELETE"], | |
135 | function=check_repo)) |
|
135 | function=check_repo)) | |
136 | m.connect('repo_public_journal', "/repos_public_journal/{repo_name:.*?}", |
|
136 | m.connect('repo_public_journal', "/repos_public_journal/{repo_name:.*?}", | |
137 | action="repo_public_journal", conditions=dict(method=["PUT"], |
|
137 | action="repo_public_journal", conditions=dict(method=["PUT"], | |
138 | function=check_repo)) |
|
138 | function=check_repo)) | |
139 | m.connect('repo_pull', "/repo_pull/{repo_name:.*?}", |
|
139 | m.connect('repo_pull', "/repo_pull/{repo_name:.*?}", | |
140 | action="repo_pull", conditions=dict(method=["PUT"], |
|
140 | action="repo_pull", conditions=dict(method=["PUT"], | |
141 | function=check_repo)) |
|
141 | function=check_repo)) | |
142 | m.connect('repo_as_fork', "/repo_as_fork/{repo_name:.*?}", |
|
142 | m.connect('repo_as_fork', "/repo_as_fork/{repo_name:.*?}", | |
143 | action="repo_as_fork", conditions=dict(method=["PUT"], |
|
143 | action="repo_as_fork", conditions=dict(method=["PUT"], | |
144 | function=check_repo)) |
|
144 | function=check_repo)) | |
145 | m.connect('repo_locking', "/repo_locking/{repo_name:.*?}", |
|
145 | m.connect('repo_locking', "/repo_locking/{repo_name:.*?}", | |
146 | action="repo_locking", conditions=dict(method=["PUT"], |
|
146 | action="repo_locking", conditions=dict(method=["PUT"], | |
147 | function=check_repo)) |
|
147 | function=check_repo)) | |
148 | #repo fields |
|
148 | #repo fields | |
149 | m.connect('create_repo_fields', "/repo_fields/{repo_name:.*?}/new", |
|
149 | m.connect('create_repo_fields', "/repo_fields/{repo_name:.*?}/new", | |
150 | action="create_repo_field", conditions=dict(method=["PUT"], |
|
150 | action="create_repo_field", conditions=dict(method=["PUT"], | |
151 | function=check_repo)) |
|
151 | function=check_repo)) | |
152 |
|
152 | |||
153 | m.connect('delete_repo_fields', "/repo_fields/{repo_name:.*?}/{field_id}", |
|
153 | m.connect('delete_repo_fields', "/repo_fields/{repo_name:.*?}/{field_id}", | |
154 | action="delete_repo_field", conditions=dict(method=["DELETE"], |
|
154 | action="delete_repo_field", conditions=dict(method=["DELETE"], | |
155 | function=check_repo)) |
|
155 | function=check_repo)) | |
156 |
|
156 | |||
157 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
157 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
158 | controller='admin/repos_groups') as m: |
|
158 | controller='admin/repos_groups') as m: | |
159 | m.connect("repos_groups", "/repos_groups", |
|
159 | m.connect("repos_groups", "/repos_groups", | |
160 | action="create", conditions=dict(method=["POST"])) |
|
160 | action="create", conditions=dict(method=["POST"])) | |
161 | m.connect("repos_groups", "/repos_groups", |
|
161 | m.connect("repos_groups", "/repos_groups", | |
162 | action="index", conditions=dict(method=["GET"])) |
|
162 | action="index", conditions=dict(method=["GET"])) | |
163 | m.connect("formatted_repos_groups", "/repos_groups.{format}", |
|
163 | m.connect("formatted_repos_groups", "/repos_groups.{format}", | |
164 | action="index", conditions=dict(method=["GET"])) |
|
164 | action="index", conditions=dict(method=["GET"])) | |
165 | m.connect("new_repos_group", "/repos_groups/new", |
|
165 | m.connect("new_repos_group", "/repos_groups/new", | |
166 | action="new", conditions=dict(method=["GET"])) |
|
166 | action="new", conditions=dict(method=["GET"])) | |
167 | m.connect("formatted_new_repos_group", "/repos_groups/new.{format}", |
|
167 | m.connect("formatted_new_repos_group", "/repos_groups/new.{format}", | |
168 | action="new", conditions=dict(method=["GET"])) |
|
168 | action="new", conditions=dict(method=["GET"])) | |
169 | m.connect("update_repos_group", "/repos_groups/{group_name:.*?}", |
|
169 | m.connect("update_repos_group", "/repos_groups/{group_name:.*?}", | |
170 | action="update", conditions=dict(method=["PUT"], |
|
170 | action="update", conditions=dict(method=["PUT"], | |
171 | function=check_group)) |
|
171 | function=check_group)) | |
172 | m.connect("delete_repos_group", "/repos_groups/{group_name:.*?}", |
|
172 | m.connect("delete_repos_group", "/repos_groups/{group_name:.*?}", | |
173 | action="delete", conditions=dict(method=["DELETE"], |
|
173 | action="delete", conditions=dict(method=["DELETE"], | |
174 | function=check_group)) |
|
174 | function=check_group)) | |
175 | m.connect("edit_repos_group", "/repos_groups/{group_name:.*?}/edit", |
|
175 | m.connect("edit_repos_group", "/repos_groups/{group_name:.*?}/edit", | |
176 | action="edit", conditions=dict(method=["GET"],)) |
|
176 | action="edit", conditions=dict(method=["GET"],)) | |
177 | m.connect("formatted_edit_repos_group", |
|
177 | m.connect("formatted_edit_repos_group", | |
178 | "/repos_groups/{group_name:.*?}.{format}/edit", |
|
178 | "/repos_groups/{group_name:.*?}.{format}/edit", | |
179 | action="edit", conditions=dict(method=["GET"], |
|
179 | action="edit", conditions=dict(method=["GET"], | |
180 | function=check_group)) |
|
180 | function=check_group)) | |
181 | m.connect("repos_group", "/repos_groups/{group_name:.*?}", |
|
181 | m.connect("repos_group", "/repos_groups/{group_name:.*?}", | |
182 | action="show", conditions=dict(method=["GET"], |
|
182 | action="show", conditions=dict(method=["GET"], | |
183 | function=check_group)) |
|
183 | function=check_group)) | |
184 | m.connect("formatted_repos_group", "/repos_groups/{group_name:.*?}.{format}", |
|
184 | m.connect("formatted_repos_group", "/repos_groups/{group_name:.*?}.{format}", | |
185 | action="show", conditions=dict(method=["GET"], |
|
185 | action="show", conditions=dict(method=["GET"], | |
186 | function=check_group)) |
|
186 | function=check_group)) | |
187 | # ajax delete repos group perm user |
|
187 | # ajax delete repos group perm user | |
188 | m.connect('delete_repos_group_user_perm', |
|
188 | m.connect('delete_repos_group_user_perm', | |
189 | "/delete_repos_group_user_perm/{group_name:.*?}", |
|
189 | "/delete_repos_group_user_perm/{group_name:.*?}", | |
190 | action="delete_repos_group_user_perm", |
|
190 | action="delete_repos_group_user_perm", | |
191 | conditions=dict(method=["DELETE"], function=check_group)) |
|
191 | conditions=dict(method=["DELETE"], function=check_group)) | |
192 |
|
192 | |||
193 | # ajax delete repos group perm users_group |
|
193 | # ajax delete repos group perm users_group | |
194 | m.connect('delete_repos_group_users_group_perm', |
|
194 | m.connect('delete_repos_group_users_group_perm', | |
195 | "/delete_repos_group_users_group_perm/{group_name:.*?}", |
|
195 | "/delete_repos_group_users_group_perm/{group_name:.*?}", | |
196 | action="delete_repos_group_users_group_perm", |
|
196 | action="delete_repos_group_users_group_perm", | |
197 | conditions=dict(method=["DELETE"], function=check_group)) |
|
197 | conditions=dict(method=["DELETE"], function=check_group)) | |
198 |
|
198 | |||
199 | #ADMIN USER REST ROUTES |
|
199 | #ADMIN USER REST ROUTES | |
200 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
200 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
201 | controller='admin/users') as m: |
|
201 | controller='admin/users') as m: | |
202 | m.connect("users", "/users", |
|
202 | m.connect("users", "/users", | |
203 | action="create", conditions=dict(method=["POST"])) |
|
203 | action="create", conditions=dict(method=["POST"])) | |
204 | m.connect("users", "/users", |
|
204 | m.connect("users", "/users", | |
205 | action="index", conditions=dict(method=["GET"])) |
|
205 | action="index", conditions=dict(method=["GET"])) | |
206 | m.connect("formatted_users", "/users.{format}", |
|
206 | m.connect("formatted_users", "/users.{format}", | |
207 | action="index", conditions=dict(method=["GET"])) |
|
207 | action="index", conditions=dict(method=["GET"])) | |
208 | m.connect("new_user", "/users/new", |
|
208 | m.connect("new_user", "/users/new", | |
209 | action="new", conditions=dict(method=["GET"])) |
|
209 | action="new", conditions=dict(method=["GET"])) | |
210 | m.connect("formatted_new_user", "/users/new.{format}", |
|
210 | m.connect("formatted_new_user", "/users/new.{format}", | |
211 | action="new", conditions=dict(method=["GET"])) |
|
211 | action="new", conditions=dict(method=["GET"])) | |
212 | m.connect("update_user", "/users/{id}", |
|
212 | m.connect("update_user", "/users/{id}", | |
213 | action="update", conditions=dict(method=["PUT"])) |
|
213 | action="update", conditions=dict(method=["PUT"])) | |
214 | m.connect("delete_user", "/users/{id}", |
|
214 | m.connect("delete_user", "/users/{id}", | |
215 | action="delete", conditions=dict(method=["DELETE"])) |
|
215 | action="delete", conditions=dict(method=["DELETE"])) | |
216 | m.connect("edit_user", "/users/{id}/edit", |
|
216 | m.connect("edit_user", "/users/{id}/edit", | |
217 | action="edit", conditions=dict(method=["GET"])) |
|
217 | action="edit", conditions=dict(method=["GET"])) | |
218 | m.connect("formatted_edit_user", |
|
218 | m.connect("formatted_edit_user", | |
219 | "/users/{id}.{format}/edit", |
|
219 | "/users/{id}.{format}/edit", | |
220 | action="edit", conditions=dict(method=["GET"])) |
|
220 | action="edit", conditions=dict(method=["GET"])) | |
221 | m.connect("user", "/users/{id}", |
|
221 | m.connect("user", "/users/{id}", | |
222 | action="show", conditions=dict(method=["GET"])) |
|
222 | action="show", conditions=dict(method=["GET"])) | |
223 | m.connect("formatted_user", "/users/{id}.{format}", |
|
223 | m.connect("formatted_user", "/users/{id}.{format}", | |
224 | action="show", conditions=dict(method=["GET"])) |
|
224 | action="show", conditions=dict(method=["GET"])) | |
225 |
|
225 | |||
226 | #EXTRAS USER ROUTES |
|
226 | #EXTRAS USER ROUTES | |
227 | m.connect("user_perm", "/users_perm/{id}", |
|
227 | m.connect("user_perm", "/users_perm/{id}", | |
228 | action="update_perm", conditions=dict(method=["PUT"])) |
|
228 | action="update_perm", conditions=dict(method=["PUT"])) | |
229 | m.connect("user_emails", "/users_emails/{id}", |
|
229 | m.connect("user_emails", "/users_emails/{id}", | |
230 | action="add_email", conditions=dict(method=["PUT"])) |
|
230 | action="add_email", conditions=dict(method=["PUT"])) | |
231 | m.connect("user_emails_delete", "/users_emails/{id}", |
|
231 | m.connect("user_emails_delete", "/users_emails/{id}", | |
232 | action="delete_email", conditions=dict(method=["DELETE"])) |
|
232 | action="delete_email", conditions=dict(method=["DELETE"])) | |
233 | m.connect("user_ips", "/users_ips/{id}", |
|
233 | m.connect("user_ips", "/users_ips/{id}", | |
234 | action="add_ip", conditions=dict(method=["PUT"])) |
|
234 | action="add_ip", conditions=dict(method=["PUT"])) | |
235 | m.connect("user_ips_delete", "/users_ips/{id}", |
|
235 | m.connect("user_ips_delete", "/users_ips/{id}", | |
236 | action="delete_ip", conditions=dict(method=["DELETE"])) |
|
236 | action="delete_ip", conditions=dict(method=["DELETE"])) | |
237 |
|
237 | |||
238 | #ADMIN USER GROUPS REST ROUTES |
|
238 | #ADMIN USER GROUPS REST ROUTES | |
239 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
239 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
240 | controller='admin/users_groups') as m: |
|
240 | controller='admin/users_groups') as m: | |
241 | m.connect("users_groups", "/users_groups", |
|
241 | m.connect("users_groups", "/users_groups", | |
242 | action="create", conditions=dict(method=["POST"])) |
|
242 | action="create", conditions=dict(method=["POST"])) | |
243 | m.connect("users_groups", "/users_groups", |
|
243 | m.connect("users_groups", "/users_groups", | |
244 | action="index", conditions=dict(method=["GET"])) |
|
244 | action="index", conditions=dict(method=["GET"])) | |
245 | m.connect("formatted_users_groups", "/users_groups.{format}", |
|
245 | m.connect("formatted_users_groups", "/users_groups.{format}", | |
246 | action="index", conditions=dict(method=["GET"])) |
|
246 | action="index", conditions=dict(method=["GET"])) | |
247 | m.connect("new_users_group", "/users_groups/new", |
|
247 | m.connect("new_users_group", "/users_groups/new", | |
248 | action="new", conditions=dict(method=["GET"])) |
|
248 | action="new", conditions=dict(method=["GET"])) | |
249 | m.connect("formatted_new_users_group", "/users_groups/new.{format}", |
|
249 | m.connect("formatted_new_users_group", "/users_groups/new.{format}", | |
250 | action="new", conditions=dict(method=["GET"])) |
|
250 | action="new", conditions=dict(method=["GET"])) | |
251 | m.connect("update_users_group", "/users_groups/{id}", |
|
251 | m.connect("update_users_group", "/users_groups/{id}", | |
252 | action="update", conditions=dict(method=["PUT"])) |
|
252 | action="update", conditions=dict(method=["PUT"])) | |
253 | m.connect("delete_users_group", "/users_groups/{id}", |
|
253 | m.connect("delete_users_group", "/users_groups/{id}", | |
254 | action="delete", conditions=dict(method=["DELETE"])) |
|
254 | action="delete", conditions=dict(method=["DELETE"])) | |
255 | m.connect("edit_users_group", "/users_groups/{id}/edit", |
|
255 | m.connect("edit_users_group", "/users_groups/{id}/edit", | |
256 | action="edit", conditions=dict(method=["GET"])) |
|
256 | action="edit", conditions=dict(method=["GET"])) | |
257 | m.connect("formatted_edit_users_group", |
|
257 | m.connect("formatted_edit_users_group", | |
258 | "/users_groups/{id}.{format}/edit", |
|
258 | "/users_groups/{id}.{format}/edit", | |
259 | action="edit", conditions=dict(method=["GET"])) |
|
259 | action="edit", conditions=dict(method=["GET"])) | |
260 | m.connect("users_group", "/users_groups/{id}", |
|
260 | m.connect("users_group", "/users_groups/{id}", | |
261 | action="show", conditions=dict(method=["GET"])) |
|
261 | action="show", conditions=dict(method=["GET"])) | |
262 | m.connect("formatted_users_group", "/users_groups/{id}.{format}", |
|
262 | m.connect("formatted_users_group", "/users_groups/{id}.{format}", | |
263 | action="show", conditions=dict(method=["GET"])) |
|
263 | action="show", conditions=dict(method=["GET"])) | |
264 |
|
264 | |||
265 | #EXTRAS USER ROUTES |
|
265 | #EXTRAS USER ROUTES | |
266 | m.connect("users_group_perm", "/users_groups_perm/{id}", |
|
266 | m.connect("users_group_perm", "/users_groups_perm/{id}", | |
267 | action="update_perm", conditions=dict(method=["PUT"])) |
|
267 | action="update_perm", conditions=dict(method=["PUT"])) | |
268 |
|
268 | |||
269 | #ADMIN GROUP REST ROUTES |
|
269 | #ADMIN GROUP REST ROUTES | |
270 | rmap.resource('group', 'groups', |
|
270 | rmap.resource('group', 'groups', | |
271 | controller='admin/groups', path_prefix=ADMIN_PREFIX) |
|
271 | controller='admin/groups', path_prefix=ADMIN_PREFIX) | |
272 |
|
272 | |||
273 | #ADMIN PERMISSIONS REST ROUTES |
|
273 | #ADMIN PERMISSIONS REST ROUTES | |
274 | rmap.resource('permission', 'permissions', |
|
274 | rmap.resource('permission', 'permissions', | |
275 | controller='admin/permissions', path_prefix=ADMIN_PREFIX) |
|
275 | controller='admin/permissions', path_prefix=ADMIN_PREFIX) | |
276 |
|
276 | |||
277 | #ADMIN DEFAULTS REST ROUTES |
|
277 | #ADMIN DEFAULTS REST ROUTES | |
278 | rmap.resource('default', 'defaults', |
|
278 | rmap.resource('default', 'defaults', | |
279 | controller='admin/defaults', path_prefix=ADMIN_PREFIX) |
|
279 | controller='admin/defaults', path_prefix=ADMIN_PREFIX) | |
280 |
|
280 | |||
281 | ##ADMIN LDAP SETTINGS |
|
281 | ##ADMIN LDAP SETTINGS | |
282 | rmap.connect('ldap_settings', '%s/ldap' % ADMIN_PREFIX, |
|
282 | rmap.connect('ldap_settings', '%s/ldap' % ADMIN_PREFIX, | |
283 | controller='admin/ldap_settings', action='ldap_settings', |
|
283 | controller='admin/ldap_settings', action='ldap_settings', | |
284 | conditions=dict(method=["POST"])) |
|
284 | conditions=dict(method=["POST"])) | |
285 |
|
285 | |||
286 | rmap.connect('ldap_home', '%s/ldap' % ADMIN_PREFIX, |
|
286 | rmap.connect('ldap_home', '%s/ldap' % ADMIN_PREFIX, | |
287 | controller='admin/ldap_settings') |
|
287 | controller='admin/ldap_settings') | |
288 |
|
288 | |||
289 | #ADMIN SETTINGS REST ROUTES |
|
289 | #ADMIN SETTINGS REST ROUTES | |
290 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
290 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
291 | controller='admin/settings') as m: |
|
291 | controller='admin/settings') as m: | |
292 | m.connect("admin_settings", "/settings", |
|
292 | m.connect("admin_settings", "/settings", | |
293 | action="create", conditions=dict(method=["POST"])) |
|
293 | action="create", conditions=dict(method=["POST"])) | |
294 | m.connect("admin_settings", "/settings", |
|
294 | m.connect("admin_settings", "/settings", | |
295 | action="index", conditions=dict(method=["GET"])) |
|
295 | action="index", conditions=dict(method=["GET"])) | |
296 | m.connect("formatted_admin_settings", "/settings.{format}", |
|
296 | m.connect("formatted_admin_settings", "/settings.{format}", | |
297 | action="index", conditions=dict(method=["GET"])) |
|
297 | action="index", conditions=dict(method=["GET"])) | |
298 | m.connect("admin_new_setting", "/settings/new", |
|
298 | m.connect("admin_new_setting", "/settings/new", | |
299 | action="new", conditions=dict(method=["GET"])) |
|
299 | action="new", conditions=dict(method=["GET"])) | |
300 | m.connect("formatted_admin_new_setting", "/settings/new.{format}", |
|
300 | m.connect("formatted_admin_new_setting", "/settings/new.{format}", | |
301 | action="new", conditions=dict(method=["GET"])) |
|
301 | action="new", conditions=dict(method=["GET"])) | |
302 | m.connect("/settings/{setting_id}", |
|
302 | m.connect("/settings/{setting_id}", | |
303 | action="update", conditions=dict(method=["PUT"])) |
|
303 | action="update", conditions=dict(method=["PUT"])) | |
304 | m.connect("/settings/{setting_id}", |
|
304 | m.connect("/settings/{setting_id}", | |
305 | action="delete", conditions=dict(method=["DELETE"])) |
|
305 | action="delete", conditions=dict(method=["DELETE"])) | |
306 | m.connect("admin_edit_setting", "/settings/{setting_id}/edit", |
|
306 | m.connect("admin_edit_setting", "/settings/{setting_id}/edit", | |
307 | action="edit", conditions=dict(method=["GET"])) |
|
307 | action="edit", conditions=dict(method=["GET"])) | |
308 | m.connect("formatted_admin_edit_setting", |
|
308 | m.connect("formatted_admin_edit_setting", | |
309 | "/settings/{setting_id}.{format}/edit", |
|
309 | "/settings/{setting_id}.{format}/edit", | |
310 | action="edit", conditions=dict(method=["GET"])) |
|
310 | action="edit", conditions=dict(method=["GET"])) | |
311 | m.connect("admin_setting", "/settings/{setting_id}", |
|
311 | m.connect("admin_setting", "/settings/{setting_id}", | |
312 | action="show", conditions=dict(method=["GET"])) |
|
312 | action="show", conditions=dict(method=["GET"])) | |
313 | m.connect("formatted_admin_setting", "/settings/{setting_id}.{format}", |
|
313 | m.connect("formatted_admin_setting", "/settings/{setting_id}.{format}", | |
314 | action="show", conditions=dict(method=["GET"])) |
|
314 | action="show", conditions=dict(method=["GET"])) | |
315 | m.connect("admin_settings_my_account", "/my_account", |
|
315 | m.connect("admin_settings_my_account", "/my_account", | |
316 | action="my_account", conditions=dict(method=["GET"])) |
|
316 | action="my_account", conditions=dict(method=["GET"])) | |
317 | m.connect("admin_settings_my_account_update", "/my_account_update", |
|
317 | m.connect("admin_settings_my_account_update", "/my_account_update", | |
318 | action="my_account_update", conditions=dict(method=["PUT"])) |
|
318 | action="my_account_update", conditions=dict(method=["PUT"])) | |
319 | m.connect("admin_settings_create_repository", "/create_repository", |
|
319 | m.connect("admin_settings_create_repository", "/create_repository", | |
320 | action="create_repository", conditions=dict(method=["GET"])) |
|
320 | action="create_repository", conditions=dict(method=["GET"])) | |
321 | m.connect("admin_settings_my_repos", "/my_account/repos", |
|
321 | m.connect("admin_settings_my_repos", "/my_account/repos", | |
322 | action="my_account_my_repos", conditions=dict(method=["GET"])) |
|
322 | action="my_account_my_repos", conditions=dict(method=["GET"])) | |
323 | m.connect("admin_settings_my_pullrequests", "/my_account/pull_requests", |
|
323 | m.connect("admin_settings_my_pullrequests", "/my_account/pull_requests", | |
324 | action="my_account_my_pullrequests", conditions=dict(method=["GET"])) |
|
324 | action="my_account_my_pullrequests", conditions=dict(method=["GET"])) | |
325 |
|
325 | |||
326 | #NOTIFICATION REST ROUTES |
|
326 | #NOTIFICATION REST ROUTES | |
327 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
327 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
328 | controller='admin/notifications') as m: |
|
328 | controller='admin/notifications') as m: | |
329 | m.connect("notifications", "/notifications", |
|
329 | m.connect("notifications", "/notifications", | |
330 | action="create", conditions=dict(method=["POST"])) |
|
330 | action="create", conditions=dict(method=["POST"])) | |
331 | m.connect("notifications", "/notifications", |
|
331 | m.connect("notifications", "/notifications", | |
332 | action="index", conditions=dict(method=["GET"])) |
|
332 | action="index", conditions=dict(method=["GET"])) | |
333 | m.connect("notifications_mark_all_read", "/notifications/mark_all_read", |
|
333 | m.connect("notifications_mark_all_read", "/notifications/mark_all_read", | |
334 | action="mark_all_read", conditions=dict(method=["GET"])) |
|
334 | action="mark_all_read", conditions=dict(method=["GET"])) | |
335 | m.connect("formatted_notifications", "/notifications.{format}", |
|
335 | m.connect("formatted_notifications", "/notifications.{format}", | |
336 | action="index", conditions=dict(method=["GET"])) |
|
336 | action="index", conditions=dict(method=["GET"])) | |
337 | m.connect("new_notification", "/notifications/new", |
|
337 | m.connect("new_notification", "/notifications/new", | |
338 | action="new", conditions=dict(method=["GET"])) |
|
338 | action="new", conditions=dict(method=["GET"])) | |
339 | m.connect("formatted_new_notification", "/notifications/new.{format}", |
|
339 | m.connect("formatted_new_notification", "/notifications/new.{format}", | |
340 | action="new", conditions=dict(method=["GET"])) |
|
340 | action="new", conditions=dict(method=["GET"])) | |
341 | m.connect("/notification/{notification_id}", |
|
341 | m.connect("/notification/{notification_id}", | |
342 | action="update", conditions=dict(method=["PUT"])) |
|
342 | action="update", conditions=dict(method=["PUT"])) | |
343 | m.connect("/notification/{notification_id}", |
|
343 | m.connect("/notification/{notification_id}", | |
344 | action="delete", conditions=dict(method=["DELETE"])) |
|
344 | action="delete", conditions=dict(method=["DELETE"])) | |
345 | m.connect("edit_notification", "/notification/{notification_id}/edit", |
|
345 | m.connect("edit_notification", "/notification/{notification_id}/edit", | |
346 | action="edit", conditions=dict(method=["GET"])) |
|
346 | action="edit", conditions=dict(method=["GET"])) | |
347 | m.connect("formatted_edit_notification", |
|
347 | m.connect("formatted_edit_notification", | |
348 | "/notification/{notification_id}.{format}/edit", |
|
348 | "/notification/{notification_id}.{format}/edit", | |
349 | action="edit", conditions=dict(method=["GET"])) |
|
349 | action="edit", conditions=dict(method=["GET"])) | |
350 | m.connect("notification", "/notification/{notification_id}", |
|
350 | m.connect("notification", "/notification/{notification_id}", | |
351 | action="show", conditions=dict(method=["GET"])) |
|
351 | action="show", conditions=dict(method=["GET"])) | |
352 | m.connect("formatted_notification", "/notifications/{notification_id}.{format}", |
|
352 | m.connect("formatted_notification", "/notifications/{notification_id}.{format}", | |
353 | action="show", conditions=dict(method=["GET"])) |
|
353 | action="show", conditions=dict(method=["GET"])) | |
354 |
|
354 | |||
355 | #ADMIN MAIN PAGES |
|
355 | #ADMIN MAIN PAGES | |
356 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
356 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
357 | controller='admin/admin') as m: |
|
357 | controller='admin/admin') as m: | |
358 | m.connect('admin_home', '', action='index') |
|
358 | m.connect('admin_home', '', action='index') | |
359 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
359 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', | |
360 | action='add_repo') |
|
360 | action='add_repo') | |
361 |
|
361 | |||
362 | #========================================================================== |
|
362 | #========================================================================== | |
363 | # API V2 |
|
363 | # API V2 | |
364 | #========================================================================== |
|
364 | #========================================================================== | |
365 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
365 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
366 | controller='api/api') as m: |
|
366 | controller='api/api') as m: | |
367 | m.connect('api', '/api') |
|
367 | m.connect('api', '/api') | |
368 |
|
368 | |||
369 | #USER JOURNAL |
|
369 | #USER JOURNAL | |
370 | rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, |
|
370 | rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, | |
371 | controller='journal', action='index') |
|
371 | controller='journal', action='index') | |
372 | rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX, |
|
372 | rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX, | |
373 | controller='journal', action='journal_rss') |
|
373 | controller='journal', action='journal_rss') | |
374 | rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX, |
|
374 | rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX, | |
375 | controller='journal', action='journal_atom') |
|
375 | controller='journal', action='journal_atom') | |
376 |
|
376 | |||
377 | rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX, |
|
377 | rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX, | |
378 | controller='journal', action="public_journal") |
|
378 | controller='journal', action="public_journal") | |
379 |
|
379 | |||
380 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX, |
|
380 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX, | |
381 | controller='journal', action="public_journal_rss") |
|
381 | controller='journal', action="public_journal_rss") | |
382 |
|
382 | |||
383 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX, |
|
383 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX, | |
384 | controller='journal', action="public_journal_rss") |
|
384 | controller='journal', action="public_journal_rss") | |
385 |
|
385 | |||
386 | rmap.connect('public_journal_atom', |
|
386 | rmap.connect('public_journal_atom', | |
387 | '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal', |
|
387 | '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal', | |
388 | action="public_journal_atom") |
|
388 | action="public_journal_atom") | |
389 |
|
389 | |||
390 | rmap.connect('public_journal_atom_old', |
|
390 | rmap.connect('public_journal_atom_old', | |
391 | '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal', |
|
391 | '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal', | |
392 | action="public_journal_atom") |
|
392 | action="public_journal_atom") | |
393 |
|
393 | |||
394 | rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX, |
|
394 | rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX, | |
395 | controller='journal', action='toggle_following', |
|
395 | controller='journal', action='toggle_following', | |
396 | conditions=dict(method=["POST"])) |
|
396 | conditions=dict(method=["POST"])) | |
397 |
|
397 | |||
398 | #SEARCH |
|
398 | #SEARCH | |
399 | rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',) |
|
399 | rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',) | |
400 | rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX, |
|
400 | rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX, | |
401 | controller='search', |
|
401 | controller='search', | |
402 | conditions=dict(function=check_repo)) |
|
402 | conditions=dict(function=check_repo)) | |
403 | rmap.connect('search_repo', '/{repo_name:.*?}/search', |
|
403 | rmap.connect('search_repo', '/{repo_name:.*?}/search', | |
404 | controller='search', |
|
404 | controller='search', | |
405 | conditions=dict(function=check_repo), |
|
405 | conditions=dict(function=check_repo), | |
406 | ) |
|
406 | ) | |
407 |
|
407 | |||
408 | #LOGIN/LOGOUT/REGISTER/SIGN IN |
|
408 | #LOGIN/LOGOUT/REGISTER/SIGN IN | |
409 | rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login') |
|
409 | rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login') | |
410 | rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login', |
|
410 | rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login', | |
411 | action='logout') |
|
411 | action='logout') | |
412 |
|
412 | |||
413 | rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login', |
|
413 | rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login', | |
414 | action='register') |
|
414 | action='register') | |
415 |
|
415 | |||
416 | rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX, |
|
416 | rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX, | |
417 | controller='login', action='password_reset') |
|
417 | controller='login', action='password_reset') | |
418 |
|
418 | |||
419 | rmap.connect('reset_password_confirmation', |
|
419 | rmap.connect('reset_password_confirmation', | |
420 | '%s/password_reset_confirmation' % ADMIN_PREFIX, |
|
420 | '%s/password_reset_confirmation' % ADMIN_PREFIX, | |
421 | controller='login', action='password_reset_confirmation') |
|
421 | controller='login', action='password_reset_confirmation') | |
422 |
|
422 | |||
423 | #FEEDS |
|
423 | #FEEDS | |
424 | rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss', |
|
424 | rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss', | |
425 | controller='feed', action='rss', |
|
425 | controller='feed', action='rss', | |
426 | conditions=dict(function=check_repo)) |
|
426 | conditions=dict(function=check_repo)) | |
427 |
|
427 | |||
428 | rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom', |
|
428 | rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom', | |
429 | controller='feed', action='atom', |
|
429 | controller='feed', action='atom', | |
430 | conditions=dict(function=check_repo)) |
|
430 | conditions=dict(function=check_repo)) | |
431 |
|
431 | |||
432 | #========================================================================== |
|
432 | #========================================================================== | |
433 | # REPOSITORY ROUTES |
|
433 | # REPOSITORY ROUTES | |
434 | #========================================================================== |
|
434 | #========================================================================== | |
435 | rmap.connect('summary_home', '/{repo_name:.*?}', |
|
435 | rmap.connect('summary_home', '/{repo_name:.*?}', | |
436 | controller='summary', |
|
436 | controller='summary', | |
437 | conditions=dict(function=check_repo)) |
|
437 | conditions=dict(function=check_repo)) | |
438 |
|
438 | |||
439 | rmap.connect('repo_size', '/{repo_name:.*?}/repo_size', |
|
439 | rmap.connect('repo_size', '/{repo_name:.*?}/repo_size', | |
440 | controller='summary', action='repo_size', |
|
440 | controller='summary', action='repo_size', | |
441 | conditions=dict(function=check_repo)) |
|
441 | conditions=dict(function=check_repo)) | |
442 |
|
442 | |||
443 | rmap.connect('repos_group_home', '/{group_name:.*}', |
|
443 | rmap.connect('repos_group_home', '/{group_name:.*}', | |
444 | controller='admin/repos_groups', action="show_by_name", |
|
444 | controller='admin/repos_groups', action="show_by_name", | |
445 | conditions=dict(function=check_group)) |
|
445 | conditions=dict(function=check_group)) | |
446 |
|
446 | |||
447 | rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision}', |
|
447 | rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision}', | |
448 | controller='changeset', revision='tip', |
|
448 | controller='changeset', revision='tip', | |
449 | conditions=dict(function=check_repo)) |
|
449 | conditions=dict(function=check_repo)) | |
450 |
|
450 | |||
451 | rmap.connect("edit_repo", "/{repo_name:.*?}/edit", |
|
451 | rmap.connect("edit_repo", "/{repo_name:.*?}/edit", | |
452 | controller='admin/repos', action="edit", |
|
452 | controller='admin/repos', action="edit", | |
453 | conditions=dict(method=["GET"], function=check_repo) |
|
453 | conditions=dict(method=["GET"], function=check_repo) | |
454 | ) |
|
454 | ) | |
455 |
|
455 | |||
456 | #still working url for backward compat. |
|
456 | #still working url for backward compat. | |
457 | rmap.connect('raw_changeset_home_depraced', |
|
457 | rmap.connect('raw_changeset_home_depraced', | |
458 | '/{repo_name:.*?}/raw-changeset/{revision}', |
|
458 | '/{repo_name:.*?}/raw-changeset/{revision}', | |
459 | controller='changeset', action='changeset_raw', |
|
459 | controller='changeset', action='changeset_raw', | |
460 | revision='tip', conditions=dict(function=check_repo)) |
|
460 | revision='tip', conditions=dict(function=check_repo)) | |
461 |
|
461 | |||
462 | ## new URLs |
|
462 | ## new URLs | |
463 | rmap.connect('changeset_raw_home', |
|
463 | rmap.connect('changeset_raw_home', | |
464 | '/{repo_name:.*?}/changeset-diff/{revision}', |
|
464 | '/{repo_name:.*?}/changeset-diff/{revision}', | |
465 | controller='changeset', action='changeset_raw', |
|
465 | controller='changeset', action='changeset_raw', | |
466 | revision='tip', conditions=dict(function=check_repo)) |
|
466 | revision='tip', conditions=dict(function=check_repo)) | |
467 |
|
467 | |||
468 | rmap.connect('changeset_patch_home', |
|
468 | rmap.connect('changeset_patch_home', | |
469 | '/{repo_name:.*?}/changeset-patch/{revision}', |
|
469 | '/{repo_name:.*?}/changeset-patch/{revision}', | |
470 | controller='changeset', action='changeset_patch', |
|
470 | controller='changeset', action='changeset_patch', | |
471 | revision='tip', conditions=dict(function=check_repo)) |
|
471 | revision='tip', conditions=dict(function=check_repo)) | |
472 |
|
472 | |||
473 | rmap.connect('changeset_download_home', |
|
473 | rmap.connect('changeset_download_home', | |
474 | '/{repo_name:.*?}/changeset-download/{revision}', |
|
474 | '/{repo_name:.*?}/changeset-download/{revision}', | |
475 | controller='changeset', action='changeset_download', |
|
475 | controller='changeset', action='changeset_download', | |
476 | revision='tip', conditions=dict(function=check_repo)) |
|
476 | revision='tip', conditions=dict(function=check_repo)) | |
477 |
|
477 | |||
478 | rmap.connect('changeset_comment', |
|
478 | rmap.connect('changeset_comment', | |
479 | '/{repo_name:.*?}/changeset/{revision}/comment', |
|
479 | '/{repo_name:.*?}/changeset/{revision}/comment', | |
480 | controller='changeset', revision='tip', action='comment', |
|
480 | controller='changeset', revision='tip', action='comment', | |
481 | conditions=dict(function=check_repo)) |
|
481 | conditions=dict(function=check_repo)) | |
482 |
|
482 | |||
483 | rmap.connect('changeset_comment_delete', |
|
483 | rmap.connect('changeset_comment_delete', | |
484 | '/{repo_name:.*?}/changeset/comment/{comment_id}/delete', |
|
484 | '/{repo_name:.*?}/changeset/comment/{comment_id}/delete', | |
485 | controller='changeset', action='delete_comment', |
|
485 | controller='changeset', action='delete_comment', | |
486 | conditions=dict(function=check_repo, method=["DELETE"])) |
|
486 | conditions=dict(function=check_repo, method=["DELETE"])) | |
487 |
|
487 | |||
488 | rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}', |
|
488 | rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}', | |
489 | controller='changeset', action='changeset_info') |
|
489 | controller='changeset', action='changeset_info') | |
490 |
|
490 | |||
491 | rmap.connect('compare_url', |
|
491 | rmap.connect('compare_url', | |
492 | '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref:.*?}...{other_ref_type}@{other_ref:.*?}', |
|
492 | '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref:.*?}...{other_ref_type}@{other_ref:.*?}', | |
493 | controller='compare', action='index', |
|
493 | controller='compare', action='index', | |
494 | conditions=dict(function=check_repo), |
|
494 | conditions=dict(function=check_repo), | |
495 | requirements=dict( |
|
495 | requirements=dict( | |
496 | org_ref_type='(branch|book|tag|rev|__other_ref_type__)', |
|
496 | org_ref_type='(branch|book|tag|rev|__other_ref_type__)', | |
497 | other_ref_type='(branch|book|tag|rev|__org_ref_type__)') |
|
497 | other_ref_type='(branch|book|tag|rev|__org_ref_type__)') | |
498 | ) |
|
498 | ) | |
499 |
|
499 | |||
500 | rmap.connect('pullrequest_home', |
|
500 | rmap.connect('pullrequest_home', | |
501 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', |
|
501 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', | |
502 | action='index', conditions=dict(function=check_repo, |
|
502 | action='index', conditions=dict(function=check_repo, | |
503 | method=["GET"])) |
|
503 | method=["GET"])) | |
504 |
|
504 | |||
505 | rmap.connect('pullrequest', |
|
505 | rmap.connect('pullrequest', | |
506 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', |
|
506 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', | |
507 | action='create', conditions=dict(function=check_repo, |
|
507 | action='create', conditions=dict(function=check_repo, | |
508 | method=["POST"])) |
|
508 | method=["POST"])) | |
509 |
|
509 | |||
510 | rmap.connect('pullrequest_show', |
|
510 | rmap.connect('pullrequest_show', | |
511 | '/{repo_name:.*?}/pull-request/{pull_request_id}', |
|
511 | '/{repo_name:.*?}/pull-request/{pull_request_id}', | |
512 | controller='pullrequests', |
|
512 | controller='pullrequests', | |
513 | action='show', conditions=dict(function=check_repo, |
|
513 | action='show', conditions=dict(function=check_repo, | |
514 | method=["GET"])) |
|
514 | method=["GET"])) | |
515 | rmap.connect('pullrequest_update', |
|
515 | rmap.connect('pullrequest_update', | |
516 | '/{repo_name:.*?}/pull-request/{pull_request_id}', |
|
516 | '/{repo_name:.*?}/pull-request/{pull_request_id}', | |
517 | controller='pullrequests', |
|
517 | controller='pullrequests', | |
518 | action='update', conditions=dict(function=check_repo, |
|
518 | action='update', conditions=dict(function=check_repo, | |
519 | method=["PUT"])) |
|
519 | method=["PUT"])) | |
520 | rmap.connect('pullrequest_delete', |
|
520 | rmap.connect('pullrequest_delete', | |
521 | '/{repo_name:.*?}/pull-request/{pull_request_id}', |
|
521 | '/{repo_name:.*?}/pull-request/{pull_request_id}', | |
522 | controller='pullrequests', |
|
522 | controller='pullrequests', | |
523 | action='delete', conditions=dict(function=check_repo, |
|
523 | action='delete', conditions=dict(function=check_repo, | |
524 | method=["DELETE"])) |
|
524 | method=["DELETE"])) | |
525 |
|
525 | |||
526 | rmap.connect('pullrequest_show_all', |
|
526 | rmap.connect('pullrequest_show_all', | |
527 | '/{repo_name:.*?}/pull-request', |
|
527 | '/{repo_name:.*?}/pull-request', | |
528 | controller='pullrequests', |
|
528 | controller='pullrequests', | |
529 | action='show_all', conditions=dict(function=check_repo, |
|
529 | action='show_all', conditions=dict(function=check_repo, | |
530 | method=["GET"])) |
|
530 | method=["GET"])) | |
531 |
|
531 | |||
532 | rmap.connect('pullrequest_comment', |
|
532 | rmap.connect('pullrequest_comment', | |
533 | '/{repo_name:.*?}/pull-request-comment/{pull_request_id}', |
|
533 | '/{repo_name:.*?}/pull-request-comment/{pull_request_id}', | |
534 | controller='pullrequests', |
|
534 | controller='pullrequests', | |
535 | action='comment', conditions=dict(function=check_repo, |
|
535 | action='comment', conditions=dict(function=check_repo, | |
536 | method=["POST"])) |
|
536 | method=["POST"])) | |
537 |
|
537 | |||
538 | rmap.connect('pullrequest_comment_delete', |
|
538 | rmap.connect('pullrequest_comment_delete', | |
539 | '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete', |
|
539 | '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete', | |
540 | controller='pullrequests', action='delete_comment', |
|
540 | controller='pullrequests', action='delete_comment', | |
541 | conditions=dict(function=check_repo, method=["DELETE"])) |
|
541 | conditions=dict(function=check_repo, method=["DELETE"])) | |
542 |
|
542 | |||
543 | rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary', |
|
543 | rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary', | |
544 | controller='summary', conditions=dict(function=check_repo)) |
|
544 | controller='summary', conditions=dict(function=check_repo)) | |
545 |
|
545 | |||
546 | rmap.connect('shortlog_home', '/{repo_name:.*?}/shortlog', |
|
546 | rmap.connect('shortlog_home', '/{repo_name:.*?}/shortlog', | |
547 | controller='shortlog', conditions=dict(function=check_repo)) |
|
547 | controller='shortlog', conditions=dict(function=check_repo)) | |
548 |
|
548 | |||
549 | rmap.connect('shortlog_file_home', '/{repo_name:.*?}/shortlog/{revision}/{f_path:.*}', |
|
549 | rmap.connect('shortlog_file_home', '/{repo_name:.*?}/shortlog/{revision}/{f_path:.*}', | |
550 | controller='shortlog', f_path=None, |
|
550 | controller='shortlog', f_path=None, | |
551 | conditions=dict(function=check_repo)) |
|
551 | conditions=dict(function=check_repo)) | |
552 |
|
552 | |||
553 | rmap.connect('branches_home', '/{repo_name:.*?}/branches', |
|
553 | rmap.connect('branches_home', '/{repo_name:.*?}/branches', | |
554 | controller='branches', conditions=dict(function=check_repo)) |
|
554 | controller='branches', conditions=dict(function=check_repo)) | |
555 |
|
555 | |||
556 | rmap.connect('tags_home', '/{repo_name:.*?}/tags', |
|
556 | rmap.connect('tags_home', '/{repo_name:.*?}/tags', | |
557 | controller='tags', conditions=dict(function=check_repo)) |
|
557 | controller='tags', conditions=dict(function=check_repo)) | |
558 |
|
558 | |||
559 | rmap.connect('bookmarks_home', '/{repo_name:.*?}/bookmarks', |
|
559 | rmap.connect('bookmarks_home', '/{repo_name:.*?}/bookmarks', | |
560 | controller='bookmarks', conditions=dict(function=check_repo)) |
|
560 | controller='bookmarks', conditions=dict(function=check_repo)) | |
561 |
|
561 | |||
562 | rmap.connect('changelog_home', '/{repo_name:.*?}/changelog', |
|
562 | rmap.connect('changelog_home', '/{repo_name:.*?}/changelog', | |
563 | controller='changelog', conditions=dict(function=check_repo)) |
|
563 | controller='changelog', conditions=dict(function=check_repo)) | |
564 |
|
564 | |||
565 | rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}', |
|
565 | rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}', | |
566 | controller='changelog', action='changelog_details', |
|
566 | controller='changelog', action='changelog_details', | |
567 | conditions=dict(function=check_repo)) |
|
567 | conditions=dict(function=check_repo)) | |
568 |
|
568 | |||
569 | rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}', |
|
569 | rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}', | |
570 | controller='files', revision='tip', f_path='', |
|
570 | controller='files', revision='tip', f_path='', | |
571 | conditions=dict(function=check_repo)) |
|
571 | conditions=dict(function=check_repo)) | |
572 |
|
572 | |||
573 | rmap.connect('files_history_home', |
|
573 | rmap.connect('files_history_home', | |
574 | '/{repo_name:.*?}/history/{revision}/{f_path:.*}', |
|
574 | '/{repo_name:.*?}/history/{revision}/{f_path:.*}', | |
575 | controller='files', action='history', revision='tip', f_path='', |
|
575 | controller='files', action='history', revision='tip', f_path='', | |
576 | conditions=dict(function=check_repo)) |
|
576 | conditions=dict(function=check_repo)) | |
577 |
|
577 | |||
578 | rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}', |
|
578 | rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}', | |
579 | controller='files', action='diff', revision='tip', f_path='', |
|
579 | controller='files', action='diff', revision='tip', f_path='', | |
580 | conditions=dict(function=check_repo)) |
|
580 | conditions=dict(function=check_repo)) | |
581 |
|
581 | |||
582 | rmap.connect('files_rawfile_home', |
|
582 | rmap.connect('files_rawfile_home', | |
583 | '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}', |
|
583 | '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}', | |
584 | controller='files', action='rawfile', revision='tip', |
|
584 | controller='files', action='rawfile', revision='tip', | |
585 | f_path='', conditions=dict(function=check_repo)) |
|
585 | f_path='', conditions=dict(function=check_repo)) | |
586 |
|
586 | |||
587 | rmap.connect('files_raw_home', |
|
587 | rmap.connect('files_raw_home', | |
588 | '/{repo_name:.*?}/raw/{revision}/{f_path:.*}', |
|
588 | '/{repo_name:.*?}/raw/{revision}/{f_path:.*}', | |
589 | controller='files', action='raw', revision='tip', f_path='', |
|
589 | controller='files', action='raw', revision='tip', f_path='', | |
590 | conditions=dict(function=check_repo)) |
|
590 | conditions=dict(function=check_repo)) | |
591 |
|
591 | |||
592 | rmap.connect('files_annotate_home', |
|
592 | rmap.connect('files_annotate_home', | |
593 | '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}', |
|
593 | '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}', | |
594 | controller='files', action='index', revision='tip', |
|
594 | controller='files', action='index', revision='tip', | |
595 | f_path='', annotate=True, conditions=dict(function=check_repo)) |
|
595 | f_path='', annotate=True, conditions=dict(function=check_repo)) | |
596 |
|
596 | |||
597 | rmap.connect('files_edit_home', |
|
597 | rmap.connect('files_edit_home', | |
598 | '/{repo_name:.*?}/edit/{revision}/{f_path:.*}', |
|
598 | '/{repo_name:.*?}/edit/{revision}/{f_path:.*}', | |
599 | controller='files', action='edit', revision='tip', |
|
599 | controller='files', action='edit', revision='tip', | |
600 | f_path='', conditions=dict(function=check_repo)) |
|
600 | f_path='', conditions=dict(function=check_repo)) | |
601 |
|
601 | |||
602 | rmap.connect('files_add_home', |
|
602 | rmap.connect('files_add_home', | |
603 | '/{repo_name:.*?}/add/{revision}/{f_path:.*}', |
|
603 | '/{repo_name:.*?}/add/{revision}/{f_path:.*}', | |
604 | controller='files', action='add', revision='tip', |
|
604 | controller='files', action='add', revision='tip', | |
605 | f_path='', conditions=dict(function=check_repo)) |
|
605 | f_path='', conditions=dict(function=check_repo)) | |
606 |
|
606 | |||
607 | rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}', |
|
607 | rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}', | |
608 | controller='files', action='archivefile', |
|
608 | controller='files', action='archivefile', | |
609 | conditions=dict(function=check_repo)) |
|
609 | conditions=dict(function=check_repo)) | |
610 |
|
610 | |||
611 | rmap.connect('files_nodelist_home', |
|
611 | rmap.connect('files_nodelist_home', | |
612 | '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}', |
|
612 | '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}', | |
613 | controller='files', action='nodelist', |
|
613 | controller='files', action='nodelist', | |
614 | conditions=dict(function=check_repo)) |
|
614 | conditions=dict(function=check_repo)) | |
615 |
|
615 | |||
616 | rmap.connect('repo_settings_delete', '/{repo_name:.*?}/settings', |
|
616 | rmap.connect('repo_settings_delete', '/{repo_name:.*?}/settings', | |
617 | controller='settings', action="delete", |
|
617 | controller='settings', action="delete", | |
618 | conditions=dict(method=["DELETE"], function=check_repo)) |
|
618 | conditions=dict(method=["DELETE"], function=check_repo)) | |
619 |
|
619 | |||
620 | rmap.connect('repo_settings_update', '/{repo_name:.*?}/settings', |
|
620 | rmap.connect('repo_settings_update', '/{repo_name:.*?}/settings', | |
621 | controller='settings', action="update", |
|
621 | controller='settings', action="update", | |
622 | conditions=dict(method=["PUT"], function=check_repo)) |
|
622 | conditions=dict(method=["PUT"], function=check_repo)) | |
623 |
|
623 | |||
624 | rmap.connect('repo_settings_home', '/{repo_name:.*?}/settings', |
|
624 | rmap.connect('repo_settings_home', '/{repo_name:.*?}/settings', | |
625 | controller='settings', action='index', |
|
625 | controller='settings', action='index', | |
626 | conditions=dict(function=check_repo)) |
|
626 | conditions=dict(function=check_repo)) | |
627 |
|
627 | |||
628 | rmap.connect('toggle_locking', "/{repo_name:.*?}/locking_toggle", |
|
628 | rmap.connect('toggle_locking', "/{repo_name:.*?}/locking_toggle", | |
629 | controller='settings', action="toggle_locking", |
|
629 | controller='settings', action="toggle_locking", | |
630 | conditions=dict(method=["GET"], function=check_repo)) |
|
630 | conditions=dict(method=["GET"], function=check_repo)) | |
631 |
|
631 | |||
632 | rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork', |
|
632 | rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork', | |
633 | controller='forks', action='fork_create', |
|
633 | controller='forks', action='fork_create', | |
634 | conditions=dict(function=check_repo, method=["POST"])) |
|
634 | conditions=dict(function=check_repo, method=["POST"])) | |
635 |
|
635 | |||
636 | rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork', |
|
636 | rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork', | |
637 | controller='forks', action='fork', |
|
637 | controller='forks', action='fork', | |
638 | conditions=dict(function=check_repo)) |
|
638 | conditions=dict(function=check_repo)) | |
639 |
|
639 | |||
640 | rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks', |
|
640 | rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks', | |
641 | controller='forks', action='forks', |
|
641 | controller='forks', action='forks', | |
642 | conditions=dict(function=check_repo)) |
|
642 | conditions=dict(function=check_repo)) | |
643 |
|
643 | |||
644 | rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers', |
|
644 | rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers', | |
645 | controller='followers', action='followers', |
|
645 | controller='followers', action='followers', | |
646 | conditions=dict(function=check_repo)) |
|
646 | conditions=dict(function=check_repo)) | |
647 |
|
647 | |||
648 | return rmap |
|
648 | return rmap |
@@ -1,2042 +1,2042 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.model.db |
|
3 | rhodecode.model.db | |
4 | ~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Database Models for RhodeCode |
|
6 | Database Models for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 08, 2010 |
|
8 | :created_on: Apr 08, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import datetime |
|
28 | import datetime | |
29 | import traceback |
|
29 | import traceback | |
30 | import hashlib |
|
30 | import hashlib | |
31 | import time |
|
31 | import time | |
32 | from collections import defaultdict |
|
32 | from collections import defaultdict | |
33 |
|
33 | |||
34 | from sqlalchemy import * |
|
34 | from sqlalchemy import * | |
35 | from sqlalchemy.ext.hybrid import hybrid_property |
|
35 | from sqlalchemy.ext.hybrid import hybrid_property | |
36 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
36 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates | |
37 | from sqlalchemy.exc import DatabaseError |
|
37 | from sqlalchemy.exc import DatabaseError | |
38 | from beaker.cache import cache_region, region_invalidate |
|
38 | from beaker.cache import cache_region, region_invalidate | |
39 | from webob.exc import HTTPNotFound |
|
39 | from webob.exc import HTTPNotFound | |
40 |
|
40 | |||
41 | from pylons.i18n.translation import lazy_ugettext as _ |
|
41 | from pylons.i18n.translation import lazy_ugettext as _ | |
42 |
|
42 | |||
43 | from rhodecode.lib.vcs import get_backend |
|
43 | from rhodecode.lib.vcs import get_backend | |
44 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
44 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
45 | from rhodecode.lib.vcs.exceptions import VCSError |
|
45 | from rhodecode.lib.vcs.exceptions import VCSError | |
46 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
46 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
47 |
|
47 | |||
48 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ |
|
48 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ | |
49 | safe_unicode, remove_suffix, remove_prefix |
|
49 | safe_unicode, remove_suffix, remove_prefix | |
50 | from rhodecode.lib.compat import json |
|
50 | from rhodecode.lib.compat import json | |
51 | from rhodecode.lib.caching_query import FromCache |
|
51 | from rhodecode.lib.caching_query import FromCache | |
52 |
|
52 | |||
53 | from rhodecode.model.meta import Base, Session |
|
53 | from rhodecode.model.meta import Base, Session | |
54 |
|
54 | |||
55 | URL_SEP = '/' |
|
55 | URL_SEP = '/' | |
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 | #============================================================================== |
|
58 | #============================================================================== | |
59 | # BASE CLASSES |
|
59 | # BASE CLASSES | |
60 | #============================================================================== |
|
60 | #============================================================================== | |
61 |
|
61 | |||
62 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
62 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class BaseModel(object): |
|
65 | class BaseModel(object): | |
66 | """ |
|
66 | """ | |
67 | Base Model for all classess |
|
67 | Base Model for all classess | |
68 | """ |
|
68 | """ | |
69 |
|
69 | |||
70 | @classmethod |
|
70 | @classmethod | |
71 | def _get_keys(cls): |
|
71 | def _get_keys(cls): | |
72 | """return column names for this model """ |
|
72 | """return column names for this model """ | |
73 | return class_mapper(cls).c.keys() |
|
73 | return class_mapper(cls).c.keys() | |
74 |
|
74 | |||
75 | def get_dict(self): |
|
75 | def get_dict(self): | |
76 | """ |
|
76 | """ | |
77 | return dict with keys and values corresponding |
|
77 | return dict with keys and values corresponding | |
78 | to this model data """ |
|
78 | to this model data """ | |
79 |
|
79 | |||
80 | d = {} |
|
80 | d = {} | |
81 | for k in self._get_keys(): |
|
81 | for k in self._get_keys(): | |
82 | d[k] = getattr(self, k) |
|
82 | d[k] = getattr(self, k) | |
83 |
|
83 | |||
84 | # also use __json__() if present to get additional fields |
|
84 | # also use __json__() if present to get additional fields | |
85 | _json_attr = getattr(self, '__json__', None) |
|
85 | _json_attr = getattr(self, '__json__', None) | |
86 | if _json_attr: |
|
86 | if _json_attr: | |
87 | # update with attributes from __json__ |
|
87 | # update with attributes from __json__ | |
88 | if callable(_json_attr): |
|
88 | if callable(_json_attr): | |
89 | _json_attr = _json_attr() |
|
89 | _json_attr = _json_attr() | |
90 | for k, val in _json_attr.iteritems(): |
|
90 | for k, val in _json_attr.iteritems(): | |
91 | d[k] = val |
|
91 | d[k] = val | |
92 | return d |
|
92 | return d | |
93 |
|
93 | |||
94 | def get_appstruct(self): |
|
94 | def get_appstruct(self): | |
95 | """return list with keys and values tupples corresponding |
|
95 | """return list with keys and values tupples corresponding | |
96 | to this model data """ |
|
96 | to this model data """ | |
97 |
|
97 | |||
98 | l = [] |
|
98 | l = [] | |
99 | for k in self._get_keys(): |
|
99 | for k in self._get_keys(): | |
100 | l.append((k, getattr(self, k),)) |
|
100 | l.append((k, getattr(self, k),)) | |
101 | return l |
|
101 | return l | |
102 |
|
102 | |||
103 | def populate_obj(self, populate_dict): |
|
103 | def populate_obj(self, populate_dict): | |
104 | """populate model with data from given populate_dict""" |
|
104 | """populate model with data from given populate_dict""" | |
105 |
|
105 | |||
106 | for k in self._get_keys(): |
|
106 | for k in self._get_keys(): | |
107 | if k in populate_dict: |
|
107 | if k in populate_dict: | |
108 | setattr(self, k, populate_dict[k]) |
|
108 | setattr(self, k, populate_dict[k]) | |
109 |
|
109 | |||
110 | @classmethod |
|
110 | @classmethod | |
111 | def query(cls): |
|
111 | def query(cls): | |
112 | return Session().query(cls) |
|
112 | return Session().query(cls) | |
113 |
|
113 | |||
114 | @classmethod |
|
114 | @classmethod | |
115 | def get(cls, id_): |
|
115 | def get(cls, id_): | |
116 | if id_: |
|
116 | if id_: | |
117 | return cls.query().get(id_) |
|
117 | return cls.query().get(id_) | |
118 |
|
118 | |||
119 | @classmethod |
|
119 | @classmethod | |
120 | def get_or_404(cls, id_): |
|
120 | def get_or_404(cls, id_): | |
121 | try: |
|
121 | try: | |
122 | id_ = int(id_) |
|
122 | id_ = int(id_) | |
123 | except (TypeError, ValueError): |
|
123 | except (TypeError, ValueError): | |
124 | raise HTTPNotFound |
|
124 | raise HTTPNotFound | |
125 |
|
125 | |||
126 | res = cls.query().get(id_) |
|
126 | res = cls.query().get(id_) | |
127 | if not res: |
|
127 | if not res: | |
128 | raise HTTPNotFound |
|
128 | raise HTTPNotFound | |
129 | return res |
|
129 | return res | |
130 |
|
130 | |||
131 | @classmethod |
|
131 | @classmethod | |
132 | def getAll(cls): |
|
132 | def getAll(cls): | |
133 | return cls.query().all() |
|
133 | return cls.query().all() | |
134 |
|
134 | |||
135 | @classmethod |
|
135 | @classmethod | |
136 | def delete(cls, id_): |
|
136 | def delete(cls, id_): | |
137 | obj = cls.query().get(id_) |
|
137 | obj = cls.query().get(id_) | |
138 | Session().delete(obj) |
|
138 | Session().delete(obj) | |
139 |
|
139 | |||
140 | def __repr__(self): |
|
140 | def __repr__(self): | |
141 | if hasattr(self, '__unicode__'): |
|
141 | if hasattr(self, '__unicode__'): | |
142 | # python repr needs to return str |
|
142 | # python repr needs to return str | |
143 | return safe_str(self.__unicode__()) |
|
143 | return safe_str(self.__unicode__()) | |
144 | return '<DB:%s>' % (self.__class__.__name__) |
|
144 | return '<DB:%s>' % (self.__class__.__name__) | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | class RhodeCodeSetting(Base, BaseModel): |
|
147 | class RhodeCodeSetting(Base, BaseModel): | |
148 | __tablename__ = 'rhodecode_settings' |
|
148 | __tablename__ = 'rhodecode_settings' | |
149 | __table_args__ = ( |
|
149 | __table_args__ = ( | |
150 | UniqueConstraint('app_settings_name'), |
|
150 | UniqueConstraint('app_settings_name'), | |
151 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
151 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
152 | 'mysql_charset': 'utf8'} |
|
152 | 'mysql_charset': 'utf8'} | |
153 | ) |
|
153 | ) | |
154 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
154 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
155 | app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
155 | app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
156 | _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
156 | _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
157 |
|
157 | |||
158 | def __init__(self, k='', v=''): |
|
158 | def __init__(self, k='', v=''): | |
159 | self.app_settings_name = k |
|
159 | self.app_settings_name = k | |
160 | self.app_settings_value = v |
|
160 | self.app_settings_value = v | |
161 |
|
161 | |||
162 | @validates('_app_settings_value') |
|
162 | @validates('_app_settings_value') | |
163 | def validate_settings_value(self, key, val): |
|
163 | def validate_settings_value(self, key, val): | |
164 | assert type(val) == unicode |
|
164 | assert type(val) == unicode | |
165 | return val |
|
165 | return val | |
166 |
|
166 | |||
167 | @hybrid_property |
|
167 | @hybrid_property | |
168 | def app_settings_value(self): |
|
168 | def app_settings_value(self): | |
169 | v = self._app_settings_value |
|
169 | v = self._app_settings_value | |
170 | if self.app_settings_name in ["ldap_active", |
|
170 | if self.app_settings_name in ["ldap_active", | |
171 | "default_repo_enable_statistics", |
|
171 | "default_repo_enable_statistics", | |
172 | "default_repo_enable_locking", |
|
172 | "default_repo_enable_locking", | |
173 | "default_repo_private", |
|
173 | "default_repo_private", | |
174 | "default_repo_enable_downloads"]: |
|
174 | "default_repo_enable_downloads"]: | |
175 | v = str2bool(v) |
|
175 | v = str2bool(v) | |
176 | return v |
|
176 | return v | |
177 |
|
177 | |||
178 | @app_settings_value.setter |
|
178 | @app_settings_value.setter | |
179 | def app_settings_value(self, val): |
|
179 | def app_settings_value(self, val): | |
180 | """ |
|
180 | """ | |
181 | Setter that will always make sure we use unicode in app_settings_value |
|
181 | Setter that will always make sure we use unicode in app_settings_value | |
182 |
|
182 | |||
183 | :param val: |
|
183 | :param val: | |
184 | """ |
|
184 | """ | |
185 | self._app_settings_value = safe_unicode(val) |
|
185 | self._app_settings_value = safe_unicode(val) | |
186 |
|
186 | |||
187 | def __unicode__(self): |
|
187 | def __unicode__(self): | |
188 | return u"<%s('%s:%s')>" % ( |
|
188 | return u"<%s('%s:%s')>" % ( | |
189 | self.__class__.__name__, |
|
189 | self.__class__.__name__, | |
190 | self.app_settings_name, self.app_settings_value |
|
190 | self.app_settings_name, self.app_settings_value | |
191 | ) |
|
191 | ) | |
192 |
|
192 | |||
193 | @classmethod |
|
193 | @classmethod | |
194 | def get_by_name(cls, key): |
|
194 | def get_by_name(cls, key): | |
195 | return cls.query()\ |
|
195 | return cls.query()\ | |
196 | .filter(cls.app_settings_name == key).scalar() |
|
196 | .filter(cls.app_settings_name == key).scalar() | |
197 |
|
197 | |||
198 | @classmethod |
|
198 | @classmethod | |
199 | def get_by_name_or_create(cls, key): |
|
199 | def get_by_name_or_create(cls, key): | |
200 | res = cls.get_by_name(key) |
|
200 | res = cls.get_by_name(key) | |
201 | if not res: |
|
201 | if not res: | |
202 | res = cls(key) |
|
202 | res = cls(key) | |
203 | return res |
|
203 | return res | |
204 |
|
204 | |||
205 | @classmethod |
|
205 | @classmethod | |
206 | def get_app_settings(cls, cache=False): |
|
206 | def get_app_settings(cls, cache=False): | |
207 |
|
207 | |||
208 | ret = cls.query() |
|
208 | ret = cls.query() | |
209 |
|
209 | |||
210 | if cache: |
|
210 | if cache: | |
211 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
211 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) | |
212 |
|
212 | |||
213 | if not ret: |
|
213 | if not ret: | |
214 | raise Exception('Could not get application settings !') |
|
214 | raise Exception('Could not get application settings !') | |
215 | settings = {} |
|
215 | settings = {} | |
216 | for each in ret: |
|
216 | for each in ret: | |
217 | settings['rhodecode_' + each.app_settings_name] = \ |
|
217 | settings['rhodecode_' + each.app_settings_name] = \ | |
218 | each.app_settings_value |
|
218 | each.app_settings_value | |
219 |
|
219 | |||
220 | return settings |
|
220 | return settings | |
221 |
|
221 | |||
222 | @classmethod |
|
222 | @classmethod | |
223 | def get_ldap_settings(cls, cache=False): |
|
223 | def get_ldap_settings(cls, cache=False): | |
224 | ret = cls.query()\ |
|
224 | ret = cls.query()\ | |
225 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
225 | .filter(cls.app_settings_name.startswith('ldap_')).all() | |
226 | fd = {} |
|
226 | fd = {} | |
227 | for row in ret: |
|
227 | for row in ret: | |
228 | fd.update({row.app_settings_name: row.app_settings_value}) |
|
228 | fd.update({row.app_settings_name: row.app_settings_value}) | |
229 |
|
229 | |||
230 | return fd |
|
230 | return fd | |
231 |
|
231 | |||
232 | @classmethod |
|
232 | @classmethod | |
233 | def get_default_repo_settings(cls, cache=False, strip_prefix=False): |
|
233 | def get_default_repo_settings(cls, cache=False, strip_prefix=False): | |
234 | ret = cls.query()\ |
|
234 | ret = cls.query()\ | |
235 | .filter(cls.app_settings_name.startswith('default_')).all() |
|
235 | .filter(cls.app_settings_name.startswith('default_')).all() | |
236 | fd = {} |
|
236 | fd = {} | |
237 | for row in ret: |
|
237 | for row in ret: | |
238 | key = row.app_settings_name |
|
238 | key = row.app_settings_name | |
239 | if strip_prefix: |
|
239 | if strip_prefix: | |
240 | key = remove_prefix(key, prefix='default_') |
|
240 | key = remove_prefix(key, prefix='default_') | |
241 | fd.update({key: row.app_settings_value}) |
|
241 | fd.update({key: row.app_settings_value}) | |
242 |
|
242 | |||
243 | return fd |
|
243 | return fd | |
244 |
|
244 | |||
245 |
|
245 | |||
246 | class RhodeCodeUi(Base, BaseModel): |
|
246 | class RhodeCodeUi(Base, BaseModel): | |
247 | __tablename__ = 'rhodecode_ui' |
|
247 | __tablename__ = 'rhodecode_ui' | |
248 | __table_args__ = ( |
|
248 | __table_args__ = ( | |
249 | UniqueConstraint('ui_key'), |
|
249 | UniqueConstraint('ui_key'), | |
250 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
250 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
251 | 'mysql_charset': 'utf8'} |
|
251 | 'mysql_charset': 'utf8'} | |
252 | ) |
|
252 | ) | |
253 |
|
253 | |||
254 | HOOK_UPDATE = 'changegroup.update' |
|
254 | HOOK_UPDATE = 'changegroup.update' | |
255 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
255 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |
256 | HOOK_PUSH = 'changegroup.push_logger' |
|
256 | HOOK_PUSH = 'changegroup.push_logger' | |
257 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
257 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |
258 | HOOK_PULL = 'outgoing.pull_logger' |
|
258 | HOOK_PULL = 'outgoing.pull_logger' | |
259 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
259 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |
260 |
|
260 | |||
261 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
261 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
262 | ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
262 | ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
263 | ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
263 | ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
264 | ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
264 | ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
265 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
265 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) | |
266 |
|
266 | |||
267 | @classmethod |
|
267 | @classmethod | |
268 | def get_by_key(cls, key): |
|
268 | def get_by_key(cls, key): | |
269 | return cls.query().filter(cls.ui_key == key).scalar() |
|
269 | return cls.query().filter(cls.ui_key == key).scalar() | |
270 |
|
270 | |||
271 | @classmethod |
|
271 | @classmethod | |
272 | def get_builtin_hooks(cls): |
|
272 | def get_builtin_hooks(cls): | |
273 | q = cls.query() |
|
273 | q = cls.query() | |
274 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE, |
|
274 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE, | |
275 | cls.HOOK_PUSH, cls.HOOK_PRE_PUSH, |
|
275 | cls.HOOK_PUSH, cls.HOOK_PRE_PUSH, | |
276 | cls.HOOK_PULL, cls.HOOK_PRE_PULL])) |
|
276 | cls.HOOK_PULL, cls.HOOK_PRE_PULL])) | |
277 | return q.all() |
|
277 | return q.all() | |
278 |
|
278 | |||
279 | @classmethod |
|
279 | @classmethod | |
280 | def get_custom_hooks(cls): |
|
280 | def get_custom_hooks(cls): | |
281 | q = cls.query() |
|
281 | q = cls.query() | |
282 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE, |
|
282 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE, | |
283 | cls.HOOK_PUSH, cls.HOOK_PRE_PUSH, |
|
283 | cls.HOOK_PUSH, cls.HOOK_PRE_PUSH, | |
284 | cls.HOOK_PULL, cls.HOOK_PRE_PULL])) |
|
284 | cls.HOOK_PULL, cls.HOOK_PRE_PULL])) | |
285 | q = q.filter(cls.ui_section == 'hooks') |
|
285 | q = q.filter(cls.ui_section == 'hooks') | |
286 | return q.all() |
|
286 | return q.all() | |
287 |
|
287 | |||
288 | @classmethod |
|
288 | @classmethod | |
289 | def get_repos_location(cls): |
|
289 | def get_repos_location(cls): | |
290 | return cls.get_by_key('/').ui_value |
|
290 | return cls.get_by_key('/').ui_value | |
291 |
|
291 | |||
292 | @classmethod |
|
292 | @classmethod | |
293 | def create_or_update_hook(cls, key, val): |
|
293 | def create_or_update_hook(cls, key, val): | |
294 | new_ui = cls.get_by_key(key) or cls() |
|
294 | new_ui = cls.get_by_key(key) or cls() | |
295 | new_ui.ui_section = 'hooks' |
|
295 | new_ui.ui_section = 'hooks' | |
296 | new_ui.ui_active = True |
|
296 | new_ui.ui_active = True | |
297 | new_ui.ui_key = key |
|
297 | new_ui.ui_key = key | |
298 | new_ui.ui_value = val |
|
298 | new_ui.ui_value = val | |
299 |
|
299 | |||
300 | Session().add(new_ui) |
|
300 | Session().add(new_ui) | |
301 |
|
301 | |||
302 | def __repr__(self): |
|
302 | def __repr__(self): | |
303 | return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key, |
|
303 | return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key, | |
304 | self.ui_value) |
|
304 | self.ui_value) | |
305 |
|
305 | |||
306 |
|
306 | |||
307 | class User(Base, BaseModel): |
|
307 | class User(Base, BaseModel): | |
308 | __tablename__ = 'users' |
|
308 | __tablename__ = 'users' | |
309 | __table_args__ = ( |
|
309 | __table_args__ = ( | |
310 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
310 | UniqueConstraint('username'), UniqueConstraint('email'), | |
311 | Index('u_username_idx', 'username'), |
|
311 | Index('u_username_idx', 'username'), | |
312 | Index('u_email_idx', 'email'), |
|
312 | Index('u_email_idx', 'email'), | |
313 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
313 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
314 | 'mysql_charset': 'utf8'} |
|
314 | 'mysql_charset': 'utf8'} | |
315 | ) |
|
315 | ) | |
316 | DEFAULT_USER = 'default' |
|
316 | DEFAULT_USER = 'default' | |
317 | DEFAULT_PERMISSIONS = [ |
|
317 | DEFAULT_PERMISSIONS = [ | |
318 | 'hg.register.manual_activate', 'hg.create.repository', |
|
318 | 'hg.register.manual_activate', 'hg.create.repository', | |
319 | 'hg.fork.repository', 'repository.read', 'group.read' |
|
319 | 'hg.fork.repository', 'repository.read', 'group.read' | |
320 | ] |
|
320 | ] | |
321 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
321 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
322 | username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
322 | username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
323 | password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
323 | password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
324 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
324 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
325 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
325 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |
326 | name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
326 | name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
327 | lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
327 | lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
328 | _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
328 | _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
329 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
329 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
330 | ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
330 | ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
331 | api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
331 | api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
332 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
332 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
333 |
|
333 | |||
334 | user_log = relationship('UserLog') |
|
334 | user_log = relationship('UserLog') | |
335 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
335 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |
336 |
|
336 | |||
337 | repositories = relationship('Repository') |
|
337 | repositories = relationship('Repository') | |
338 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
338 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |
339 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
339 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |
340 |
|
340 | |||
341 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
341 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |
342 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
342 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |
343 |
|
343 | |||
344 | group_member = relationship('UsersGroupMember', cascade='all') |
|
344 | group_member = relationship('UsersGroupMember', cascade='all') | |
345 |
|
345 | |||
346 | notifications = relationship('UserNotification', cascade='all') |
|
346 | notifications = relationship('UserNotification', cascade='all') | |
347 | # notifications assigned to this user |
|
347 | # notifications assigned to this user | |
348 | user_created_notifications = relationship('Notification', cascade='all') |
|
348 | user_created_notifications = relationship('Notification', cascade='all') | |
349 | # comments created by this user |
|
349 | # comments created by this user | |
350 | user_comments = relationship('ChangesetComment', cascade='all') |
|
350 | user_comments = relationship('ChangesetComment', cascade='all') | |
351 | #extra emails for this user |
|
351 | #extra emails for this user | |
352 | user_emails = relationship('UserEmailMap', cascade='all') |
|
352 | user_emails = relationship('UserEmailMap', cascade='all') | |
353 |
|
353 | |||
354 | @hybrid_property |
|
354 | @hybrid_property | |
355 | def email(self): |
|
355 | def email(self): | |
356 | return self._email |
|
356 | return self._email | |
357 |
|
357 | |||
358 | @email.setter |
|
358 | @email.setter | |
359 | def email(self, val): |
|
359 | def email(self, val): | |
360 | self._email = val.lower() if val else None |
|
360 | self._email = val.lower() if val else None | |
361 |
|
361 | |||
362 | @property |
|
362 | @property | |
363 | def firstname(self): |
|
363 | def firstname(self): | |
364 | # alias for future |
|
364 | # alias for future | |
365 | return self.name |
|
365 | return self.name | |
366 |
|
366 | |||
367 | @property |
|
367 | @property | |
368 | def emails(self): |
|
368 | def emails(self): | |
369 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() |
|
369 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() | |
370 | return [self.email] + [x.email for x in other] |
|
370 | return [self.email] + [x.email for x in other] | |
371 |
|
371 | |||
372 | @property |
|
372 | @property | |
373 | def ip_addresses(self): |
|
373 | def ip_addresses(self): | |
374 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() |
|
374 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() | |
375 | return [x.ip_addr for x in ret] |
|
375 | return [x.ip_addr for x in ret] | |
376 |
|
376 | |||
377 | @property |
|
377 | @property | |
378 | def username_and_name(self): |
|
378 | def username_and_name(self): | |
379 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
379 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) | |
380 |
|
380 | |||
381 | @property |
|
381 | @property | |
382 | def full_name(self): |
|
382 | def full_name(self): | |
383 | return '%s %s' % (self.firstname, self.lastname) |
|
383 | return '%s %s' % (self.firstname, self.lastname) | |
384 |
|
384 | |||
385 | @property |
|
385 | @property | |
386 | def full_name_or_username(self): |
|
386 | def full_name_or_username(self): | |
387 | return ('%s %s' % (self.firstname, self.lastname) |
|
387 | return ('%s %s' % (self.firstname, self.lastname) | |
388 | if (self.firstname and self.lastname) else self.username) |
|
388 | if (self.firstname and self.lastname) else self.username) | |
389 |
|
389 | |||
390 | @property |
|
390 | @property | |
391 | def full_contact(self): |
|
391 | def full_contact(self): | |
392 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
392 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) | |
393 |
|
393 | |||
394 | @property |
|
394 | @property | |
395 | def short_contact(self): |
|
395 | def short_contact(self): | |
396 | return '%s %s' % (self.firstname, self.lastname) |
|
396 | return '%s %s' % (self.firstname, self.lastname) | |
397 |
|
397 | |||
398 | @property |
|
398 | @property | |
399 | def is_admin(self): |
|
399 | def is_admin(self): | |
400 | return self.admin |
|
400 | return self.admin | |
401 |
|
401 | |||
402 | @property |
|
402 | @property | |
403 | def AuthUser(self): |
|
403 | def AuthUser(self): | |
404 | """ |
|
404 | """ | |
405 | Returns instance of AuthUser for this user |
|
405 | Returns instance of AuthUser for this user | |
406 | """ |
|
406 | """ | |
407 | from rhodecode.lib.auth import AuthUser |
|
407 | from rhodecode.lib.auth import AuthUser | |
408 | return AuthUser(user_id=self.user_id, api_key=self.api_key, |
|
408 | return AuthUser(user_id=self.user_id, api_key=self.api_key, | |
409 | username=self.username) |
|
409 | username=self.username) | |
410 |
|
410 | |||
411 | def __unicode__(self): |
|
411 | def __unicode__(self): | |
412 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
412 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
413 | self.user_id, self.username) |
|
413 | self.user_id, self.username) | |
414 |
|
414 | |||
415 | @classmethod |
|
415 | @classmethod | |
416 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
416 | def get_by_username(cls, username, case_insensitive=False, cache=False): | |
417 | if case_insensitive: |
|
417 | if case_insensitive: | |
418 | q = cls.query().filter(cls.username.ilike(username)) |
|
418 | q = cls.query().filter(cls.username.ilike(username)) | |
419 | else: |
|
419 | else: | |
420 | q = cls.query().filter(cls.username == username) |
|
420 | q = cls.query().filter(cls.username == username) | |
421 |
|
421 | |||
422 | if cache: |
|
422 | if cache: | |
423 | q = q.options(FromCache( |
|
423 | q = q.options(FromCache( | |
424 | "sql_cache_short", |
|
424 | "sql_cache_short", | |
425 | "get_user_%s" % _hash_key(username) |
|
425 | "get_user_%s" % _hash_key(username) | |
426 | ) |
|
426 | ) | |
427 | ) |
|
427 | ) | |
428 | return q.scalar() |
|
428 | return q.scalar() | |
429 |
|
429 | |||
430 | @classmethod |
|
430 | @classmethod | |
431 | def get_by_api_key(cls, api_key, cache=False): |
|
431 | def get_by_api_key(cls, api_key, cache=False): | |
432 | q = cls.query().filter(cls.api_key == api_key) |
|
432 | q = cls.query().filter(cls.api_key == api_key) | |
433 |
|
433 | |||
434 | if cache: |
|
434 | if cache: | |
435 | q = q.options(FromCache("sql_cache_short", |
|
435 | q = q.options(FromCache("sql_cache_short", | |
436 | "get_api_key_%s" % api_key)) |
|
436 | "get_api_key_%s" % api_key)) | |
437 | return q.scalar() |
|
437 | return q.scalar() | |
438 |
|
438 | |||
439 | @classmethod |
|
439 | @classmethod | |
440 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
440 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |
441 | if case_insensitive: |
|
441 | if case_insensitive: | |
442 | q = cls.query().filter(cls.email.ilike(email)) |
|
442 | q = cls.query().filter(cls.email.ilike(email)) | |
443 | else: |
|
443 | else: | |
444 | q = cls.query().filter(cls.email == email) |
|
444 | q = cls.query().filter(cls.email == email) | |
445 |
|
445 | |||
446 | if cache: |
|
446 | if cache: | |
447 | q = q.options(FromCache("sql_cache_short", |
|
447 | q = q.options(FromCache("sql_cache_short", | |
448 | "get_email_key_%s" % email)) |
|
448 | "get_email_key_%s" % email)) | |
449 |
|
449 | |||
450 | ret = q.scalar() |
|
450 | ret = q.scalar() | |
451 | if ret is None: |
|
451 | if ret is None: | |
452 | q = UserEmailMap.query() |
|
452 | q = UserEmailMap.query() | |
453 | # try fetching in alternate email map |
|
453 | # try fetching in alternate email map | |
454 | if case_insensitive: |
|
454 | if case_insensitive: | |
455 | q = q.filter(UserEmailMap.email.ilike(email)) |
|
455 | q = q.filter(UserEmailMap.email.ilike(email)) | |
456 | else: |
|
456 | else: | |
457 | q = q.filter(UserEmailMap.email == email) |
|
457 | q = q.filter(UserEmailMap.email == email) | |
458 | q = q.options(joinedload(UserEmailMap.user)) |
|
458 | q = q.options(joinedload(UserEmailMap.user)) | |
459 | if cache: |
|
459 | if cache: | |
460 | q = q.options(FromCache("sql_cache_short", |
|
460 | q = q.options(FromCache("sql_cache_short", | |
461 | "get_email_map_key_%s" % email)) |
|
461 | "get_email_map_key_%s" % email)) | |
462 | ret = getattr(q.scalar(), 'user', None) |
|
462 | ret = getattr(q.scalar(), 'user', None) | |
463 |
|
463 | |||
464 | return ret |
|
464 | return ret | |
465 |
|
465 | |||
466 | @classmethod |
|
466 | @classmethod | |
467 | def get_from_cs_author(cls, author): |
|
467 | def get_from_cs_author(cls, author): | |
468 | """ |
|
468 | """ | |
469 | Tries to get User objects out of commit author string |
|
469 | Tries to get User objects out of commit author string | |
470 |
|
470 | |||
471 | :param author: |
|
471 | :param author: | |
472 | """ |
|
472 | """ | |
473 | from rhodecode.lib.helpers import email, author_name |
|
473 | from rhodecode.lib.helpers import email, author_name | |
474 | # Valid email in the attribute passed, see if they're in the system |
|
474 | # Valid email in the attribute passed, see if they're in the system | |
475 | _email = email(author) |
|
475 | _email = email(author) | |
476 | if _email: |
|
476 | if _email: | |
477 | user = cls.get_by_email(_email, case_insensitive=True) |
|
477 | user = cls.get_by_email(_email, case_insensitive=True) | |
478 | if user: |
|
478 | if user: | |
479 | return user |
|
479 | return user | |
480 | # Maybe we can match by username? |
|
480 | # Maybe we can match by username? | |
481 | _author = author_name(author) |
|
481 | _author = author_name(author) | |
482 | user = cls.get_by_username(_author, case_insensitive=True) |
|
482 | user = cls.get_by_username(_author, case_insensitive=True) | |
483 | if user: |
|
483 | if user: | |
484 | return user |
|
484 | return user | |
485 |
|
485 | |||
486 | def update_lastlogin(self): |
|
486 | def update_lastlogin(self): | |
487 | """Update user lastlogin""" |
|
487 | """Update user lastlogin""" | |
488 | self.last_login = datetime.datetime.now() |
|
488 | self.last_login = datetime.datetime.now() | |
489 | Session().add(self) |
|
489 | Session().add(self) | |
490 | log.debug('updated user %s lastlogin' % self.username) |
|
490 | log.debug('updated user %s lastlogin' % self.username) | |
491 |
|
491 | |||
492 | def get_api_data(self): |
|
492 | def get_api_data(self): | |
493 | """ |
|
493 | """ | |
494 | Common function for generating user related data for API |
|
494 | Common function for generating user related data for API | |
495 | """ |
|
495 | """ | |
496 | user = self |
|
496 | user = self | |
497 | data = dict( |
|
497 | data = dict( | |
498 | user_id=user.user_id, |
|
498 | user_id=user.user_id, | |
499 | username=user.username, |
|
499 | username=user.username, | |
500 | firstname=user.name, |
|
500 | firstname=user.name, | |
501 | lastname=user.lastname, |
|
501 | lastname=user.lastname, | |
502 | email=user.email, |
|
502 | email=user.email, | |
503 | emails=user.emails, |
|
503 | emails=user.emails, | |
504 | api_key=user.api_key, |
|
504 | api_key=user.api_key, | |
505 | active=user.active, |
|
505 | active=user.active, | |
506 | admin=user.admin, |
|
506 | admin=user.admin, | |
507 | ldap_dn=user.ldap_dn, |
|
507 | ldap_dn=user.ldap_dn, | |
508 | last_login=user.last_login, |
|
508 | last_login=user.last_login, | |
509 | ip_addresses=user.ip_addresses |
|
509 | ip_addresses=user.ip_addresses | |
510 | ) |
|
510 | ) | |
511 | return data |
|
511 | return data | |
512 |
|
512 | |||
513 | def __json__(self): |
|
513 | def __json__(self): | |
514 | data = dict( |
|
514 | data = dict( | |
515 | full_name=self.full_name, |
|
515 | full_name=self.full_name, | |
516 | full_name_or_username=self.full_name_or_username, |
|
516 | full_name_or_username=self.full_name_or_username, | |
517 | short_contact=self.short_contact, |
|
517 | short_contact=self.short_contact, | |
518 | full_contact=self.full_contact |
|
518 | full_contact=self.full_contact | |
519 | ) |
|
519 | ) | |
520 | data.update(self.get_api_data()) |
|
520 | data.update(self.get_api_data()) | |
521 | return data |
|
521 | return data | |
522 |
|
522 | |||
523 |
|
523 | |||
524 | class UserEmailMap(Base, BaseModel): |
|
524 | class UserEmailMap(Base, BaseModel): | |
525 | __tablename__ = 'user_email_map' |
|
525 | __tablename__ = 'user_email_map' | |
526 | __table_args__ = ( |
|
526 | __table_args__ = ( | |
527 | Index('uem_email_idx', 'email'), |
|
527 | Index('uem_email_idx', 'email'), | |
528 | UniqueConstraint('email'), |
|
528 | UniqueConstraint('email'), | |
529 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
529 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
530 | 'mysql_charset': 'utf8'} |
|
530 | 'mysql_charset': 'utf8'} | |
531 | ) |
|
531 | ) | |
532 | __mapper_args__ = {} |
|
532 | __mapper_args__ = {} | |
533 |
|
533 | |||
534 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
534 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
535 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
535 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
536 | _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
536 | _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) | |
537 | user = relationship('User', lazy='joined') |
|
537 | user = relationship('User', lazy='joined') | |
538 |
|
538 | |||
539 | @validates('_email') |
|
539 | @validates('_email') | |
540 | def validate_email(self, key, email): |
|
540 | def validate_email(self, key, email): | |
541 | # check if this email is not main one |
|
541 | # check if this email is not main one | |
542 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
542 | main_email = Session().query(User).filter(User.email == email).scalar() | |
543 | if main_email is not None: |
|
543 | if main_email is not None: | |
544 | raise AttributeError('email %s is present is user table' % email) |
|
544 | raise AttributeError('email %s is present is user table' % email) | |
545 | return email |
|
545 | return email | |
546 |
|
546 | |||
547 | @hybrid_property |
|
547 | @hybrid_property | |
548 | def email(self): |
|
548 | def email(self): | |
549 | return self._email |
|
549 | return self._email | |
550 |
|
550 | |||
551 | @email.setter |
|
551 | @email.setter | |
552 | def email(self, val): |
|
552 | def email(self, val): | |
553 | self._email = val.lower() if val else None |
|
553 | self._email = val.lower() if val else None | |
554 |
|
554 | |||
555 |
|
555 | |||
556 | class UserIpMap(Base, BaseModel): |
|
556 | class UserIpMap(Base, BaseModel): | |
557 | __tablename__ = 'user_ip_map' |
|
557 | __tablename__ = 'user_ip_map' | |
558 | __table_args__ = ( |
|
558 | __table_args__ = ( | |
559 | UniqueConstraint('user_id', 'ip_addr'), |
|
559 | UniqueConstraint('user_id', 'ip_addr'), | |
560 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
560 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
561 | 'mysql_charset': 'utf8'} |
|
561 | 'mysql_charset': 'utf8'} | |
562 | ) |
|
562 | ) | |
563 | __mapper_args__ = {} |
|
563 | __mapper_args__ = {} | |
564 |
|
564 | |||
565 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
565 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
566 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
566 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
567 | ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
567 | ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) | |
568 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
568 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
569 | user = relationship('User', lazy='joined') |
|
569 | user = relationship('User', lazy='joined') | |
570 |
|
570 | |||
571 | @classmethod |
|
571 | @classmethod | |
572 | def _get_ip_range(cls, ip_addr): |
|
572 | def _get_ip_range(cls, ip_addr): | |
573 | from rhodecode.lib import ipaddr |
|
573 | from rhodecode.lib import ipaddr | |
574 | net = ipaddr.IPNetwork(address=ip_addr) |
|
574 | net = ipaddr.IPNetwork(address=ip_addr) | |
575 | return [str(net.network), str(net.broadcast)] |
|
575 | return [str(net.network), str(net.broadcast)] | |
576 |
|
576 | |||
577 | def __json__(self): |
|
577 | def __json__(self): | |
578 | return dict( |
|
578 | return dict( | |
579 | ip_addr=self.ip_addr, |
|
579 | ip_addr=self.ip_addr, | |
580 | ip_range=self._get_ip_range(self.ip_addr) |
|
580 | ip_range=self._get_ip_range(self.ip_addr) | |
581 | ) |
|
581 | ) | |
582 |
|
582 | |||
583 |
|
583 | |||
584 | class UserLog(Base, BaseModel): |
|
584 | class UserLog(Base, BaseModel): | |
585 | __tablename__ = 'user_logs' |
|
585 | __tablename__ = 'user_logs' | |
586 | __table_args__ = ( |
|
586 | __table_args__ = ( | |
587 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
587 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
588 | 'mysql_charset': 'utf8'}, |
|
588 | 'mysql_charset': 'utf8'}, | |
589 | ) |
|
589 | ) | |
590 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
590 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
591 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
591 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
592 | username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
592 | username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
593 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
593 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |
594 | repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
594 | repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
595 | user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
595 | user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
596 | action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
596 | action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
597 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
597 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
598 |
|
598 | |||
599 | @property |
|
599 | @property | |
600 | def action_as_day(self): |
|
600 | def action_as_day(self): | |
601 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
601 | return datetime.date(*self.action_date.timetuple()[:3]) | |
602 |
|
602 | |||
603 | user = relationship('User') |
|
603 | user = relationship('User') | |
604 | repository = relationship('Repository', cascade='') |
|
604 | repository = relationship('Repository', cascade='') | |
605 |
|
605 | |||
606 |
|
606 | |||
607 | class UsersGroup(Base, BaseModel): |
|
607 | class UsersGroup(Base, BaseModel): | |
608 | __tablename__ = 'users_groups' |
|
608 | __tablename__ = 'users_groups' | |
609 | __table_args__ = ( |
|
609 | __table_args__ = ( | |
610 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
610 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
611 | 'mysql_charset': 'utf8'}, |
|
611 | 'mysql_charset': 'utf8'}, | |
612 | ) |
|
612 | ) | |
613 |
|
613 | |||
614 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
614 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
615 | users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
615 | users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
616 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
616 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |
617 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
617 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
618 |
|
618 | |||
619 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
619 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |
620 | users_group_to_perm = relationship('UsersGroupToPerm', cascade='all') |
|
620 | users_group_to_perm = relationship('UsersGroupToPerm', cascade='all') | |
621 | users_group_repo_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') |
|
621 | users_group_repo_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') | |
622 |
|
622 | |||
623 | def __unicode__(self): |
|
623 | def __unicode__(self): | |
624 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
624 | return u'<userGroup(%s)>' % (self.users_group_name) | |
625 |
|
625 | |||
626 | @classmethod |
|
626 | @classmethod | |
627 | def get_by_group_name(cls, group_name, cache=False, |
|
627 | def get_by_group_name(cls, group_name, cache=False, | |
628 | case_insensitive=False): |
|
628 | case_insensitive=False): | |
629 | if case_insensitive: |
|
629 | if case_insensitive: | |
630 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
630 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) | |
631 | else: |
|
631 | else: | |
632 | q = cls.query().filter(cls.users_group_name == group_name) |
|
632 | q = cls.query().filter(cls.users_group_name == group_name) | |
633 | if cache: |
|
633 | if cache: | |
634 | q = q.options(FromCache( |
|
634 | q = q.options(FromCache( | |
635 | "sql_cache_short", |
|
635 | "sql_cache_short", | |
636 | "get_user_%s" % _hash_key(group_name) |
|
636 | "get_user_%s" % _hash_key(group_name) | |
637 | ) |
|
637 | ) | |
638 | ) |
|
638 | ) | |
639 | return q.scalar() |
|
639 | return q.scalar() | |
640 |
|
640 | |||
641 | @classmethod |
|
641 | @classmethod | |
642 | def get(cls, users_group_id, cache=False): |
|
642 | def get(cls, users_group_id, cache=False): | |
643 | users_group = cls.query() |
|
643 | users_group = cls.query() | |
644 | if cache: |
|
644 | if cache: | |
645 | users_group = users_group.options(FromCache("sql_cache_short", |
|
645 | users_group = users_group.options(FromCache("sql_cache_short", | |
646 | "get_users_group_%s" % users_group_id)) |
|
646 | "get_users_group_%s" % users_group_id)) | |
647 | return users_group.get(users_group_id) |
|
647 | return users_group.get(users_group_id) | |
648 |
|
648 | |||
649 | def get_api_data(self): |
|
649 | def get_api_data(self): | |
650 | users_group = self |
|
650 | users_group = self | |
651 |
|
651 | |||
652 | data = dict( |
|
652 | data = dict( | |
653 | users_group_id=users_group.users_group_id, |
|
653 | users_group_id=users_group.users_group_id, | |
654 | group_name=users_group.users_group_name, |
|
654 | group_name=users_group.users_group_name, | |
655 | active=users_group.users_group_active, |
|
655 | active=users_group.users_group_active, | |
656 | ) |
|
656 | ) | |
657 |
|
657 | |||
658 | return data |
|
658 | return data | |
659 |
|
659 | |||
660 |
|
660 | |||
661 | class UsersGroupMember(Base, BaseModel): |
|
661 | class UsersGroupMember(Base, BaseModel): | |
662 | __tablename__ = 'users_groups_members' |
|
662 | __tablename__ = 'users_groups_members' | |
663 | __table_args__ = ( |
|
663 | __table_args__ = ( | |
664 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
664 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
665 | 'mysql_charset': 'utf8'}, |
|
665 | 'mysql_charset': 'utf8'}, | |
666 | ) |
|
666 | ) | |
667 |
|
667 | |||
668 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
668 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
669 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
669 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
670 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
670 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
671 |
|
671 | |||
672 | user = relationship('User', lazy='joined') |
|
672 | user = relationship('User', lazy='joined') | |
673 | users_group = relationship('UsersGroup') |
|
673 | users_group = relationship('UsersGroup') | |
674 |
|
674 | |||
675 | def __init__(self, gr_id='', u_id=''): |
|
675 | def __init__(self, gr_id='', u_id=''): | |
676 | self.users_group_id = gr_id |
|
676 | self.users_group_id = gr_id | |
677 | self.user_id = u_id |
|
677 | self.user_id = u_id | |
678 |
|
678 | |||
679 |
|
679 | |||
680 | class RepositoryField(Base, BaseModel): |
|
680 | class RepositoryField(Base, BaseModel): | |
681 | __tablename__ = 'repositories_fields' |
|
681 | __tablename__ = 'repositories_fields' | |
682 | __table_args__ = ( |
|
682 | __table_args__ = ( | |
683 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
683 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |
684 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
684 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
685 | 'mysql_charset': 'utf8'}, |
|
685 | 'mysql_charset': 'utf8'}, | |
686 | ) |
|
686 | ) | |
687 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
687 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |
688 |
|
688 | |||
689 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
689 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
690 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
690 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
691 | field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None)) |
|
691 | field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None)) | |
692 | field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False) |
|
692 | field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False) | |
693 | field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False) |
|
693 | field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False) | |
694 | field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False) |
|
694 | field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False) | |
695 | field_type = Column("field_type", String(256), nullable=False, unique=None) |
|
695 | field_type = Column("field_type", String(256), nullable=False, unique=None) | |
696 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
696 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
697 |
|
697 | |||
698 | repository = relationship('Repository') |
|
698 | repository = relationship('Repository') | |
699 |
|
699 | |||
700 | @property |
|
700 | @property | |
701 | def field_key_prefixed(self): |
|
701 | def field_key_prefixed(self): | |
702 | return 'ex_%s' % self.field_key |
|
702 | return 'ex_%s' % self.field_key | |
703 |
|
703 | |||
704 | @classmethod |
|
704 | @classmethod | |
705 | def un_prefix_key(cls, key): |
|
705 | def un_prefix_key(cls, key): | |
706 | if key.startswith(cls.PREFIX): |
|
706 | if key.startswith(cls.PREFIX): | |
707 | return key[len(cls.PREFIX):] |
|
707 | return key[len(cls.PREFIX):] | |
708 | return key |
|
708 | return key | |
709 |
|
709 | |||
710 | @classmethod |
|
710 | @classmethod | |
711 | def get_by_key_name(cls, key, repo): |
|
711 | def get_by_key_name(cls, key, repo): | |
712 | row = cls.query()\ |
|
712 | row = cls.query()\ | |
713 | .filter(cls.repository == repo)\ |
|
713 | .filter(cls.repository == repo)\ | |
714 | .filter(cls.field_key == key).scalar() |
|
714 | .filter(cls.field_key == key).scalar() | |
715 | return row |
|
715 | return row | |
716 |
|
716 | |||
717 |
|
717 | |||
718 | class Repository(Base, BaseModel): |
|
718 | class Repository(Base, BaseModel): | |
719 | __tablename__ = 'repositories' |
|
719 | __tablename__ = 'repositories' | |
720 | __table_args__ = ( |
|
720 | __table_args__ = ( | |
721 | UniqueConstraint('repo_name'), |
|
721 | UniqueConstraint('repo_name'), | |
722 | Index('r_repo_name_idx', 'repo_name'), |
|
722 | Index('r_repo_name_idx', 'repo_name'), | |
723 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
723 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
724 | 'mysql_charset': 'utf8'}, |
|
724 | 'mysql_charset': 'utf8'}, | |
725 | ) |
|
725 | ) | |
726 |
|
726 | |||
727 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
727 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
728 | repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
728 | repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
729 | clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
729 | clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) | |
730 | repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) |
|
730 | repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) | |
731 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
731 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
732 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
732 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) | |
733 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
733 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) | |
734 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
734 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) | |
735 | description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
735 | description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
736 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
736 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
737 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
737 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
738 | landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) |
|
738 | landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) | |
739 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
739 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
740 | _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
740 | _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) | |
741 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data |
|
741 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data | |
742 |
|
742 | |||
743 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
743 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) | |
744 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
744 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) | |
745 |
|
745 | |||
746 | user = relationship('User') |
|
746 | user = relationship('User') | |
747 | fork = relationship('Repository', remote_side=repo_id) |
|
747 | fork = relationship('Repository', remote_side=repo_id) | |
748 | group = relationship('RepoGroup') |
|
748 | group = relationship('RepoGroup') | |
749 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
749 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') | |
750 | users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') |
|
750 | users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') | |
751 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
751 | stats = relationship('Statistics', cascade='all', uselist=False) | |
752 |
|
752 | |||
753 | followers = relationship('UserFollowing', |
|
753 | followers = relationship('UserFollowing', | |
754 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
754 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |
755 | cascade='all') |
|
755 | cascade='all') | |
756 | extra_fields = relationship('RepositoryField', |
|
756 | extra_fields = relationship('RepositoryField', | |
757 | cascade="all, delete, delete-orphan") |
|
757 | cascade="all, delete, delete-orphan") | |
758 |
|
758 | |||
759 | logs = relationship('UserLog') |
|
759 | logs = relationship('UserLog') | |
760 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") |
|
760 | comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan") | |
761 |
|
761 | |||
762 | pull_requests_org = relationship('PullRequest', |
|
762 | pull_requests_org = relationship('PullRequest', | |
763 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', |
|
763 | primaryjoin='PullRequest.org_repo_id==Repository.repo_id', | |
764 | cascade="all, delete, delete-orphan") |
|
764 | cascade="all, delete, delete-orphan") | |
765 |
|
765 | |||
766 | pull_requests_other = relationship('PullRequest', |
|
766 | pull_requests_other = relationship('PullRequest', | |
767 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', |
|
767 | primaryjoin='PullRequest.other_repo_id==Repository.repo_id', | |
768 | cascade="all, delete, delete-orphan") |
|
768 | cascade="all, delete, delete-orphan") | |
769 |
|
769 | |||
770 | def __unicode__(self): |
|
770 | def __unicode__(self): | |
771 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
771 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |
772 | self.repo_name) |
|
772 | self.repo_name) | |
773 |
|
773 | |||
774 | @hybrid_property |
|
774 | @hybrid_property | |
775 | def locked(self): |
|
775 | def locked(self): | |
776 | # always should return [user_id, timelocked] |
|
776 | # always should return [user_id, timelocked] | |
777 | if self._locked: |
|
777 | if self._locked: | |
778 | _lock_info = self._locked.split(':') |
|
778 | _lock_info = self._locked.split(':') | |
779 | return int(_lock_info[0]), _lock_info[1] |
|
779 | return int(_lock_info[0]), _lock_info[1] | |
780 | return [None, None] |
|
780 | return [None, None] | |
781 |
|
781 | |||
782 | @locked.setter |
|
782 | @locked.setter | |
783 | def locked(self, val): |
|
783 | def locked(self, val): | |
784 | if val and isinstance(val, (list, tuple)): |
|
784 | if val and isinstance(val, (list, tuple)): | |
785 | self._locked = ':'.join(map(str, val)) |
|
785 | self._locked = ':'.join(map(str, val)) | |
786 | else: |
|
786 | else: | |
787 | self._locked = None |
|
787 | self._locked = None | |
788 |
|
788 | |||
789 | @hybrid_property |
|
789 | @hybrid_property | |
790 | def changeset_cache(self): |
|
790 | def changeset_cache(self): | |
791 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
791 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
792 | dummy = EmptyChangeset().__json__() |
|
792 | dummy = EmptyChangeset().__json__() | |
793 | if not self._changeset_cache: |
|
793 | if not self._changeset_cache: | |
794 | return dummy |
|
794 | return dummy | |
795 | try: |
|
795 | try: | |
796 | return json.loads(self._changeset_cache) |
|
796 | return json.loads(self._changeset_cache) | |
797 | except TypeError: |
|
797 | except TypeError: | |
798 | return dummy |
|
798 | return dummy | |
799 |
|
799 | |||
800 | @changeset_cache.setter |
|
800 | @changeset_cache.setter | |
801 | def changeset_cache(self, val): |
|
801 | def changeset_cache(self, val): | |
802 | try: |
|
802 | try: | |
803 | self._changeset_cache = json.dumps(val) |
|
803 | self._changeset_cache = json.dumps(val) | |
804 | except: |
|
804 | except: | |
805 | log.error(traceback.format_exc()) |
|
805 | log.error(traceback.format_exc()) | |
806 |
|
806 | |||
807 | @classmethod |
|
807 | @classmethod | |
808 | def url_sep(cls): |
|
808 | def url_sep(cls): | |
809 | return URL_SEP |
|
809 | return URL_SEP | |
810 |
|
810 | |||
811 | @classmethod |
|
811 | @classmethod | |
812 | def normalize_repo_name(cls, repo_name): |
|
812 | def normalize_repo_name(cls, repo_name): | |
813 | """ |
|
813 | """ | |
814 | Normalizes os specific repo_name to the format internally stored inside |
|
814 | Normalizes os specific repo_name to the format internally stored inside | |
815 | dabatabase using URL_SEP |
|
815 | dabatabase using URL_SEP | |
816 |
|
816 | |||
817 | :param cls: |
|
817 | :param cls: | |
818 | :param repo_name: |
|
818 | :param repo_name: | |
819 | """ |
|
819 | """ | |
820 | return cls.url_sep().join(repo_name.split(os.sep)) |
|
820 | return cls.url_sep().join(repo_name.split(os.sep)) | |
821 |
|
821 | |||
822 | @classmethod |
|
822 | @classmethod | |
823 | def get_by_repo_name(cls, repo_name): |
|
823 | def get_by_repo_name(cls, repo_name): | |
824 | q = Session().query(cls).filter(cls.repo_name == repo_name) |
|
824 | q = Session().query(cls).filter(cls.repo_name == repo_name) | |
825 | q = q.options(joinedload(Repository.fork))\ |
|
825 | q = q.options(joinedload(Repository.fork))\ | |
826 | .options(joinedload(Repository.user))\ |
|
826 | .options(joinedload(Repository.user))\ | |
827 | .options(joinedload(Repository.group)) |
|
827 | .options(joinedload(Repository.group)) | |
828 | return q.scalar() |
|
828 | return q.scalar() | |
829 |
|
829 | |||
830 | @classmethod |
|
830 | @classmethod | |
831 | def get_by_full_path(cls, repo_full_path): |
|
831 | def get_by_full_path(cls, repo_full_path): | |
832 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] |
|
832 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | |
833 | repo_name = cls.normalize_repo_name(repo_name) |
|
833 | repo_name = cls.normalize_repo_name(repo_name) | |
834 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) |
|
834 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | |
835 |
|
835 | |||
836 | @classmethod |
|
836 | @classmethod | |
837 | def get_repo_forks(cls, repo_id): |
|
837 | def get_repo_forks(cls, repo_id): | |
838 | return cls.query().filter(Repository.fork_id == repo_id) |
|
838 | return cls.query().filter(Repository.fork_id == repo_id) | |
839 |
|
839 | |||
840 | @classmethod |
|
840 | @classmethod | |
841 | def base_path(cls): |
|
841 | def base_path(cls): | |
842 | """ |
|
842 | """ | |
843 | Returns base path when all repos are stored |
|
843 | Returns base path when all repos are stored | |
844 |
|
844 | |||
845 | :param cls: |
|
845 | :param cls: | |
846 | """ |
|
846 | """ | |
847 | q = Session().query(RhodeCodeUi)\ |
|
847 | q = Session().query(RhodeCodeUi)\ | |
848 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) |
|
848 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) | |
849 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
849 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
850 | return q.one().ui_value |
|
850 | return q.one().ui_value | |
851 |
|
851 | |||
852 | @property |
|
852 | @property | |
853 | def forks(self): |
|
853 | def forks(self): | |
854 | """ |
|
854 | """ | |
855 | Return forks of this repo |
|
855 | Return forks of this repo | |
856 | """ |
|
856 | """ | |
857 | return Repository.get_repo_forks(self.repo_id) |
|
857 | return Repository.get_repo_forks(self.repo_id) | |
858 |
|
858 | |||
859 | @property |
|
859 | @property | |
860 | def parent(self): |
|
860 | def parent(self): | |
861 | """ |
|
861 | """ | |
862 | Returns fork parent |
|
862 | Returns fork parent | |
863 | """ |
|
863 | """ | |
864 | return self.fork |
|
864 | return self.fork | |
865 |
|
865 | |||
866 | @property |
|
866 | @property | |
867 | def just_name(self): |
|
867 | def just_name(self): | |
868 | return self.repo_name.split(Repository.url_sep())[-1] |
|
868 | return self.repo_name.split(Repository.url_sep())[-1] | |
869 |
|
869 | |||
870 | @property |
|
870 | @property | |
871 | def groups_with_parents(self): |
|
871 | def groups_with_parents(self): | |
872 | groups = [] |
|
872 | groups = [] | |
873 | if self.group is None: |
|
873 | if self.group is None: | |
874 | return groups |
|
874 | return groups | |
875 |
|
875 | |||
876 | cur_gr = self.group |
|
876 | cur_gr = self.group | |
877 | groups.insert(0, cur_gr) |
|
877 | groups.insert(0, cur_gr) | |
878 | while 1: |
|
878 | while 1: | |
879 | gr = getattr(cur_gr, 'parent_group', None) |
|
879 | gr = getattr(cur_gr, 'parent_group', None) | |
880 | cur_gr = cur_gr.parent_group |
|
880 | cur_gr = cur_gr.parent_group | |
881 | if gr is None: |
|
881 | if gr is None: | |
882 | break |
|
882 | break | |
883 | groups.insert(0, gr) |
|
883 | groups.insert(0, gr) | |
884 |
|
884 | |||
885 | return groups |
|
885 | return groups | |
886 |
|
886 | |||
887 | @property |
|
887 | @property | |
888 | def groups_and_repo(self): |
|
888 | def groups_and_repo(self): | |
889 | return self.groups_with_parents, self.just_name |
|
889 | return self.groups_with_parents, self.just_name | |
890 |
|
890 | |||
891 | @LazyProperty |
|
891 | @LazyProperty | |
892 | def repo_path(self): |
|
892 | def repo_path(self): | |
893 | """ |
|
893 | """ | |
894 | Returns base full path for that repository means where it actually |
|
894 | Returns base full path for that repository means where it actually | |
895 | exists on a filesystem |
|
895 | exists on a filesystem | |
896 | """ |
|
896 | """ | |
897 | q = Session().query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
897 | q = Session().query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == | |
898 | Repository.url_sep()) |
|
898 | Repository.url_sep()) | |
899 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
899 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
900 | return q.one().ui_value |
|
900 | return q.one().ui_value | |
901 |
|
901 | |||
902 | @property |
|
902 | @property | |
903 | def repo_full_path(self): |
|
903 | def repo_full_path(self): | |
904 | p = [self.repo_path] |
|
904 | p = [self.repo_path] | |
905 | # we need to split the name by / since this is how we store the |
|
905 | # we need to split the name by / since this is how we store the | |
906 | # names in the database, but that eventually needs to be converted |
|
906 | # names in the database, but that eventually needs to be converted | |
907 | # into a valid system path |
|
907 | # into a valid system path | |
908 | p += self.repo_name.split(Repository.url_sep()) |
|
908 | p += self.repo_name.split(Repository.url_sep()) | |
909 | return os.path.join(*p) |
|
909 | return os.path.join(*p) | |
910 |
|
910 | |||
911 | @property |
|
911 | @property | |
912 | def cache_keys(self): |
|
912 | def cache_keys(self): | |
913 | """ |
|
913 | """ | |
914 | Returns associated cache keys for that repo |
|
914 | Returns associated cache keys for that repo | |
915 | """ |
|
915 | """ | |
916 | return CacheInvalidation.query()\ |
|
916 | return CacheInvalidation.query()\ | |
917 | .filter(CacheInvalidation.cache_args == self.repo_name)\ |
|
917 | .filter(CacheInvalidation.cache_args == self.repo_name)\ | |
918 | .order_by(CacheInvalidation.cache_key)\ |
|
918 | .order_by(CacheInvalidation.cache_key)\ | |
919 | .all() |
|
919 | .all() | |
920 |
|
920 | |||
921 | def get_new_name(self, repo_name): |
|
921 | def get_new_name(self, repo_name): | |
922 | """ |
|
922 | """ | |
923 | returns new full repository name based on assigned group and new new |
|
923 | returns new full repository name based on assigned group and new new | |
924 |
|
924 | |||
925 | :param group_name: |
|
925 | :param group_name: | |
926 | """ |
|
926 | """ | |
927 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
927 | path_prefix = self.group.full_path_splitted if self.group else [] | |
928 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
928 | return Repository.url_sep().join(path_prefix + [repo_name]) | |
929 |
|
929 | |||
930 | @property |
|
930 | @property | |
931 | def _ui(self): |
|
931 | def _ui(self): | |
932 | """ |
|
932 | """ | |
933 | Creates an db based ui object for this repository |
|
933 | Creates an db based ui object for this repository | |
934 | """ |
|
934 | """ | |
935 | from rhodecode.lib.utils import make_ui |
|
935 | from rhodecode.lib.utils import make_ui | |
936 | return make_ui('db', clear_session=False) |
|
936 | return make_ui('db', clear_session=False) | |
937 |
|
937 | |||
938 | @classmethod |
|
938 | @classmethod | |
939 | def inject_ui(cls, repo, extras={}): |
|
939 | def inject_ui(cls, repo, extras={}): | |
940 | from rhodecode.lib.vcs.backends.hg import MercurialRepository |
|
940 | from rhodecode.lib.vcs.backends.hg import MercurialRepository | |
941 | from rhodecode.lib.vcs.backends.git import GitRepository |
|
941 | from rhodecode.lib.vcs.backends.git import GitRepository | |
942 | required = (MercurialRepository, GitRepository) |
|
942 | required = (MercurialRepository, GitRepository) | |
943 | if not isinstance(repo, required): |
|
943 | if not isinstance(repo, required): | |
944 | raise Exception('repo must be instance of %s' % required) |
|
944 | raise Exception('repo must be instance of %s' % required) | |
945 |
|
945 | |||
946 | # inject ui extra param to log this action via push logger |
|
946 | # inject ui extra param to log this action via push logger | |
947 | for k, v in extras.items(): |
|
947 | for k, v in extras.items(): | |
948 | repo._repo.ui.setconfig('rhodecode_extras', k, v) |
|
948 | repo._repo.ui.setconfig('rhodecode_extras', k, v) | |
949 |
|
949 | |||
950 | @classmethod |
|
950 | @classmethod | |
951 | def is_valid(cls, repo_name): |
|
951 | def is_valid(cls, repo_name): | |
952 | """ |
|
952 | """ | |
953 | returns True if given repo name is a valid filesystem repository |
|
953 | returns True if given repo name is a valid filesystem repository | |
954 |
|
954 | |||
955 | :param cls: |
|
955 | :param cls: | |
956 | :param repo_name: |
|
956 | :param repo_name: | |
957 | """ |
|
957 | """ | |
958 | from rhodecode.lib.utils import is_valid_repo |
|
958 | from rhodecode.lib.utils import is_valid_repo | |
959 |
|
959 | |||
960 | return is_valid_repo(repo_name, cls.base_path()) |
|
960 | return is_valid_repo(repo_name, cls.base_path()) | |
961 |
|
961 | |||
962 | def get_api_data(self): |
|
962 | def get_api_data(self): | |
963 | """ |
|
963 | """ | |
964 | Common function for generating repo api data |
|
964 | Common function for generating repo api data | |
965 |
|
965 | |||
966 | """ |
|
966 | """ | |
967 | repo = self |
|
967 | repo = self | |
968 | data = dict( |
|
968 | data = dict( | |
969 | repo_id=repo.repo_id, |
|
969 | repo_id=repo.repo_id, | |
970 | repo_name=repo.repo_name, |
|
970 | repo_name=repo.repo_name, | |
971 | repo_type=repo.repo_type, |
|
971 | repo_type=repo.repo_type, | |
972 | clone_uri=repo.clone_uri, |
|
972 | clone_uri=repo.clone_uri, | |
973 | private=repo.private, |
|
973 | private=repo.private, | |
974 | created_on=repo.created_on, |
|
974 | created_on=repo.created_on, | |
975 | description=repo.description, |
|
975 | description=repo.description, | |
976 | landing_rev=repo.landing_rev, |
|
976 | landing_rev=repo.landing_rev, | |
977 | owner=repo.user.username, |
|
977 | owner=repo.user.username, | |
978 | fork_of=repo.fork.repo_name if repo.fork else None, |
|
978 | fork_of=repo.fork.repo_name if repo.fork else None, | |
979 | enable_statistics=repo.enable_statistics, |
|
979 | enable_statistics=repo.enable_statistics, | |
980 | enable_locking=repo.enable_locking, |
|
980 | enable_locking=repo.enable_locking, | |
981 | enable_downloads=repo.enable_downloads, |
|
981 | enable_downloads=repo.enable_downloads, | |
982 | last_changeset=repo.changeset_cache |
|
982 | last_changeset=repo.changeset_cache | |
983 | ) |
|
983 | ) | |
984 | rc_config = RhodeCodeSetting.get_app_settings() |
|
984 | rc_config = RhodeCodeSetting.get_app_settings() | |
985 | repository_fields = str2bool(rc_config.get('rhodecode_repository_fields')) |
|
985 | repository_fields = str2bool(rc_config.get('rhodecode_repository_fields')) | |
986 | if repository_fields: |
|
986 | if repository_fields: | |
987 | for f in self.extra_fields: |
|
987 | for f in self.extra_fields: | |
988 | data[f.field_key_prefixed] = f.field_value |
|
988 | data[f.field_key_prefixed] = f.field_value | |
989 |
|
989 | |||
990 | return data |
|
990 | return data | |
991 |
|
991 | |||
992 | @classmethod |
|
992 | @classmethod | |
993 | def lock(cls, repo, user_id): |
|
993 | def lock(cls, repo, user_id): | |
994 | repo.locked = [user_id, time.time()] |
|
994 | repo.locked = [user_id, time.time()] | |
995 | Session().add(repo) |
|
995 | Session().add(repo) | |
996 | Session().commit() |
|
996 | Session().commit() | |
997 |
|
997 | |||
998 | @classmethod |
|
998 | @classmethod | |
999 | def unlock(cls, repo): |
|
999 | def unlock(cls, repo): | |
1000 | repo.locked = None |
|
1000 | repo.locked = None | |
1001 | Session().add(repo) |
|
1001 | Session().add(repo) | |
1002 | Session().commit() |
|
1002 | Session().commit() | |
1003 |
|
1003 | |||
1004 | @property |
|
1004 | @property | |
1005 | def last_db_change(self): |
|
1005 | def last_db_change(self): | |
1006 | return self.updated_on |
|
1006 | return self.updated_on | |
1007 |
|
1007 | |||
1008 | def clone_url(self, **override): |
|
1008 | def clone_url(self, **override): | |
1009 | from pylons import url |
|
1009 | from pylons import url | |
1010 | from urlparse import urlparse |
|
1010 | from urlparse import urlparse | |
1011 | import urllib |
|
1011 | import urllib | |
1012 | parsed_url = urlparse(url('home', qualified=True)) |
|
1012 | parsed_url = urlparse(url('home', qualified=True)) | |
1013 | default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s' |
|
1013 | default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s' | |
1014 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path)) |
|
1014 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path)) | |
1015 | args = { |
|
1015 | args = { | |
1016 | 'user': '', |
|
1016 | 'user': '', | |
1017 | 'pass': '', |
|
1017 | 'pass': '', | |
1018 | 'scheme': parsed_url.scheme, |
|
1018 | 'scheme': parsed_url.scheme, | |
1019 | 'netloc': parsed_url.netloc, |
|
1019 | 'netloc': parsed_url.netloc, | |
1020 | 'prefix': decoded_path, |
|
1020 | 'prefix': decoded_path, | |
1021 | 'path': self.repo_name |
|
1021 | 'path': self.repo_name | |
1022 | } |
|
1022 | } | |
1023 |
|
1023 | |||
1024 | args.update(override) |
|
1024 | args.update(override) | |
1025 | return default_clone_uri % args |
|
1025 | return default_clone_uri % args | |
1026 |
|
1026 | |||
1027 | #========================================================================== |
|
1027 | #========================================================================== | |
1028 | # SCM PROPERTIES |
|
1028 | # SCM PROPERTIES | |
1029 | #========================================================================== |
|
1029 | #========================================================================== | |
1030 |
|
1030 | |||
1031 | def get_changeset(self, rev=None): |
|
1031 | def get_changeset(self, rev=None): | |
1032 | return get_changeset_safe(self.scm_instance, rev) |
|
1032 | return get_changeset_safe(self.scm_instance, rev) | |
1033 |
|
1033 | |||
1034 | def get_landing_changeset(self): |
|
1034 | def get_landing_changeset(self): | |
1035 | """ |
|
1035 | """ | |
1036 | Returns landing changeset, or if that doesn't exist returns the tip |
|
1036 | Returns landing changeset, or if that doesn't exist returns the tip | |
1037 | """ |
|
1037 | """ | |
1038 | cs = self.get_changeset(self.landing_rev) or self.get_changeset() |
|
1038 | cs = self.get_changeset(self.landing_rev) or self.get_changeset() | |
1039 | return cs |
|
1039 | return cs | |
1040 |
|
1040 | |||
1041 | def update_changeset_cache(self, cs_cache=None): |
|
1041 | def update_changeset_cache(self, cs_cache=None): | |
1042 | """ |
|
1042 | """ | |
1043 | Update cache of last changeset for repository, keys should be:: |
|
1043 | Update cache of last changeset for repository, keys should be:: | |
1044 |
|
1044 | |||
1045 | short_id |
|
1045 | short_id | |
1046 | raw_id |
|
1046 | raw_id | |
1047 | revision |
|
1047 | revision | |
1048 | message |
|
1048 | message | |
1049 | date |
|
1049 | date | |
1050 | author |
|
1050 | author | |
1051 |
|
1051 | |||
1052 | :param cs_cache: |
|
1052 | :param cs_cache: | |
1053 | """ |
|
1053 | """ | |
1054 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
1054 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
1055 | if cs_cache is None: |
|
1055 | if cs_cache is None: | |
1056 | cs_cache = self.get_changeset() |
|
1056 | cs_cache = self.get_changeset() | |
1057 | if isinstance(cs_cache, BaseChangeset): |
|
1057 | if isinstance(cs_cache, BaseChangeset): | |
1058 | cs_cache = cs_cache.__json__() |
|
1058 | cs_cache = cs_cache.__json__() | |
1059 |
|
1059 | |||
1060 | if (cs_cache != self.changeset_cache |
|
1060 | if (cs_cache != self.changeset_cache | |
1061 | or not self.last_change |
|
1061 | or not self.last_change | |
1062 | or not self.changeset_cache): |
|
1062 | or not self.changeset_cache): | |
1063 | _default = datetime.datetime.fromtimestamp(0) |
|
1063 | _default = datetime.datetime.fromtimestamp(0) | |
1064 | last_change = cs_cache.get('date') or self.last_change or _default |
|
1064 | last_change = cs_cache.get('date') or self.last_change or _default | |
1065 | log.debug('updated repo %s with new cs cache %s' % (self, cs_cache)) |
|
1065 | log.debug('updated repo %s with new cs cache %s' % (self, cs_cache)) | |
1066 | self.updated_on = last_change |
|
1066 | self.updated_on = last_change | |
1067 | self.changeset_cache = cs_cache |
|
1067 | self.changeset_cache = cs_cache | |
1068 | Session().add(self) |
|
1068 | Session().add(self) | |
1069 | Session().commit() |
|
1069 | Session().commit() | |
1070 | else: |
|
1070 | else: | |
1071 | log.debug('Skipping repo:%s already with latest changes' % self) |
|
1071 | log.debug('Skipping repo:%s already with latest changes' % self) | |
1072 |
|
1072 | |||
1073 | @property |
|
1073 | @property | |
1074 | def tip(self): |
|
1074 | def tip(self): | |
1075 | return self.get_changeset('tip') |
|
1075 | return self.get_changeset('tip') | |
1076 |
|
1076 | |||
1077 | @property |
|
1077 | @property | |
1078 | def author(self): |
|
1078 | def author(self): | |
1079 | return self.tip.author |
|
1079 | return self.tip.author | |
1080 |
|
1080 | |||
1081 | @property |
|
1081 | @property | |
1082 | def last_change(self): |
|
1082 | def last_change(self): | |
1083 | return self.scm_instance.last_change |
|
1083 | return self.scm_instance.last_change | |
1084 |
|
1084 | |||
1085 | def get_comments(self, revisions=None): |
|
1085 | def get_comments(self, revisions=None): | |
1086 | """ |
|
1086 | """ | |
1087 | Returns comments for this repository grouped by revisions |
|
1087 | Returns comments for this repository grouped by revisions | |
1088 |
|
1088 | |||
1089 | :param revisions: filter query by revisions only |
|
1089 | :param revisions: filter query by revisions only | |
1090 | """ |
|
1090 | """ | |
1091 | cmts = ChangesetComment.query()\ |
|
1091 | cmts = ChangesetComment.query()\ | |
1092 | .filter(ChangesetComment.repo == self) |
|
1092 | .filter(ChangesetComment.repo == self) | |
1093 | if revisions: |
|
1093 | if revisions: | |
1094 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
1094 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |
1095 | grouped = defaultdict(list) |
|
1095 | grouped = defaultdict(list) | |
1096 | for cmt in cmts.all(): |
|
1096 | for cmt in cmts.all(): | |
1097 | grouped[cmt.revision].append(cmt) |
|
1097 | grouped[cmt.revision].append(cmt) | |
1098 | return grouped |
|
1098 | return grouped | |
1099 |
|
1099 | |||
1100 | def statuses(self, revisions=None): |
|
1100 | def statuses(self, revisions=None): | |
1101 | """ |
|
1101 | """ | |
1102 | Returns statuses for this repository |
|
1102 | Returns statuses for this repository | |
1103 |
|
1103 | |||
1104 | :param revisions: list of revisions to get statuses for |
|
1104 | :param revisions: list of revisions to get statuses for | |
1105 | :type revisions: list |
|
1105 | :type revisions: list | |
1106 | """ |
|
1106 | """ | |
1107 |
|
1107 | |||
1108 | statuses = ChangesetStatus.query()\ |
|
1108 | statuses = ChangesetStatus.query()\ | |
1109 | .filter(ChangesetStatus.repo == self)\ |
|
1109 | .filter(ChangesetStatus.repo == self)\ | |
1110 | .filter(ChangesetStatus.version == 0) |
|
1110 | .filter(ChangesetStatus.version == 0) | |
1111 | if revisions: |
|
1111 | if revisions: | |
1112 | statuses = statuses.filter(ChangesetStatus.revision.in_(revisions)) |
|
1112 | statuses = statuses.filter(ChangesetStatus.revision.in_(revisions)) | |
1113 | grouped = {} |
|
1113 | grouped = {} | |
1114 |
|
1114 | |||
1115 | #maybe we have open new pullrequest without a status ? |
|
1115 | #maybe we have open new pullrequest without a status ? | |
1116 | stat = ChangesetStatus.STATUS_UNDER_REVIEW |
|
1116 | stat = ChangesetStatus.STATUS_UNDER_REVIEW | |
1117 | status_lbl = ChangesetStatus.get_status_lbl(stat) |
|
1117 | status_lbl = ChangesetStatus.get_status_lbl(stat) | |
1118 | for pr in PullRequest.query().filter(PullRequest.org_repo == self).all(): |
|
1118 | for pr in PullRequest.query().filter(PullRequest.org_repo == self).all(): | |
1119 | for rev in pr.revisions: |
|
1119 | for rev in pr.revisions: | |
1120 | pr_id = pr.pull_request_id |
|
1120 | pr_id = pr.pull_request_id | |
1121 | pr_repo = pr.other_repo.repo_name |
|
1121 | pr_repo = pr.other_repo.repo_name | |
1122 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] |
|
1122 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | |
1123 |
|
1123 | |||
1124 | for stat in statuses.all(): |
|
1124 | for stat in statuses.all(): | |
1125 | pr_id = pr_repo = None |
|
1125 | pr_id = pr_repo = None | |
1126 | if stat.pull_request: |
|
1126 | if stat.pull_request: | |
1127 | pr_id = stat.pull_request.pull_request_id |
|
1127 | pr_id = stat.pull_request.pull_request_id | |
1128 | pr_repo = stat.pull_request.other_repo.repo_name |
|
1128 | pr_repo = stat.pull_request.other_repo.repo_name | |
1129 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, |
|
1129 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, | |
1130 | pr_id, pr_repo] |
|
1130 | pr_id, pr_repo] | |
1131 | return grouped |
|
1131 | return grouped | |
1132 |
|
1132 | |||
1133 | def _repo_size(self): |
|
1133 | def _repo_size(self): | |
1134 | from rhodecode.lib import helpers as h |
|
1134 | from rhodecode.lib import helpers as h | |
1135 | log.debug('calculating repository size...') |
|
1135 | log.debug('calculating repository size...') | |
1136 | return h.format_byte_size(self.scm_instance.size) |
|
1136 | return h.format_byte_size(self.scm_instance.size) | |
1137 |
|
1137 | |||
1138 | #========================================================================== |
|
1138 | #========================================================================== | |
1139 | # SCM CACHE INSTANCE |
|
1139 | # SCM CACHE INSTANCE | |
1140 | #========================================================================== |
|
1140 | #========================================================================== | |
1141 |
|
1141 | |||
1142 | @property |
|
1142 | @property | |
1143 | def invalidate(self): |
|
1143 | def invalidate(self): | |
1144 | return CacheInvalidation.invalidate(self.repo_name) |
|
1144 | return CacheInvalidation.invalidate(self.repo_name) | |
1145 |
|
1145 | |||
1146 | def set_invalidate(self): |
|
1146 | def set_invalidate(self): | |
1147 | """ |
|
1147 | """ | |
1148 | set a cache for invalidation for this instance |
|
1148 | set a cache for invalidation for this instance | |
1149 | """ |
|
1149 | """ | |
1150 | CacheInvalidation.set_invalidate(repo_name=self.repo_name) |
|
1150 | CacheInvalidation.set_invalidate(repo_name=self.repo_name) | |
1151 |
|
1151 | |||
1152 | @LazyProperty |
|
1152 | @LazyProperty | |
1153 | def scm_instance_no_cache(self): |
|
1153 | def scm_instance_no_cache(self): | |
1154 | return self.__get_instance() |
|
1154 | return self.__get_instance() | |
1155 |
|
1155 | |||
1156 | @LazyProperty |
|
1156 | @LazyProperty | |
1157 | def scm_instance(self): |
|
1157 | def scm_instance(self): | |
1158 | import rhodecode |
|
1158 | import rhodecode | |
1159 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) |
|
1159 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) | |
1160 | if full_cache: |
|
1160 | if full_cache: | |
1161 | return self.scm_instance_cached() |
|
1161 | return self.scm_instance_cached() | |
1162 | return self.__get_instance() |
|
1162 | return self.__get_instance() | |
1163 |
|
1163 | |||
1164 | def scm_instance_cached(self, cache_map=None): |
|
1164 | def scm_instance_cached(self, cache_map=None): | |
1165 | @cache_region('long_term') |
|
1165 | @cache_region('long_term') | |
1166 | def _c(repo_name): |
|
1166 | def _c(repo_name): | |
1167 | return self.__get_instance() |
|
1167 | return self.__get_instance() | |
1168 | rn = self.repo_name |
|
1168 | rn = self.repo_name | |
1169 | log.debug('Getting cached instance of repo') |
|
1169 | log.debug('Getting cached instance of repo') | |
1170 |
|
1170 | |||
1171 | if cache_map: |
|
1171 | if cache_map: | |
1172 | # get using prefilled cache_map |
|
1172 | # get using prefilled cache_map | |
1173 | invalidate_repo = cache_map[self.repo_name] |
|
1173 | invalidate_repo = cache_map[self.repo_name] | |
1174 | if invalidate_repo: |
|
1174 | if invalidate_repo: | |
1175 | invalidate_repo = (None if invalidate_repo.cache_active |
|
1175 | invalidate_repo = (None if invalidate_repo.cache_active | |
1176 | else invalidate_repo) |
|
1176 | else invalidate_repo) | |
1177 | else: |
|
1177 | else: | |
1178 | # get from invalidate |
|
1178 | # get from invalidate | |
1179 | invalidate_repo = self.invalidate |
|
1179 | invalidate_repo = self.invalidate | |
1180 |
|
1180 | |||
1181 | if invalidate_repo is not None: |
|
1181 | if invalidate_repo is not None: | |
1182 | region_invalidate(_c, None, rn) |
|
1182 | region_invalidate(_c, None, rn) | |
1183 | # update our cache |
|
1183 | # update our cache | |
1184 | CacheInvalidation.set_valid(invalidate_repo.cache_key) |
|
1184 | CacheInvalidation.set_valid(invalidate_repo.cache_key) | |
1185 | return _c(rn) |
|
1185 | return _c(rn) | |
1186 |
|
1186 | |||
1187 | def __get_instance(self): |
|
1187 | def __get_instance(self): | |
1188 | repo_full_path = self.repo_full_path |
|
1188 | repo_full_path = self.repo_full_path | |
1189 | try: |
|
1189 | try: | |
1190 | alias = get_scm(repo_full_path)[0] |
|
1190 | alias = get_scm(repo_full_path)[0] | |
1191 | log.debug('Creating instance of %s repository' % alias) |
|
1191 | log.debug('Creating instance of %s repository' % alias) | |
1192 | backend = get_backend(alias) |
|
1192 | backend = get_backend(alias) | |
1193 | except VCSError: |
|
1193 | except VCSError: | |
1194 | log.error(traceback.format_exc()) |
|
1194 | log.error(traceback.format_exc()) | |
1195 | log.error('Perhaps this repository is in db and not in ' |
|
1195 | log.error('Perhaps this repository is in db and not in ' | |
1196 | 'filesystem run rescan repositories with ' |
|
1196 | 'filesystem run rescan repositories with ' | |
1197 | '"destroy old data " option from admin panel') |
|
1197 | '"destroy old data " option from admin panel') | |
1198 | return |
|
1198 | return | |
1199 |
|
1199 | |||
1200 | if alias == 'hg': |
|
1200 | if alias == 'hg': | |
1201 |
|
1201 | |||
1202 | repo = backend(safe_str(repo_full_path), create=False, |
|
1202 | repo = backend(safe_str(repo_full_path), create=False, | |
1203 | baseui=self._ui) |
|
1203 | baseui=self._ui) | |
1204 | # skip hidden web repository |
|
1204 | # skip hidden web repository | |
1205 | if repo._get_hidden(): |
|
1205 | if repo._get_hidden(): | |
1206 | return |
|
1206 | return | |
1207 | else: |
|
1207 | else: | |
1208 | repo = backend(repo_full_path, create=False) |
|
1208 | repo = backend(repo_full_path, create=False) | |
1209 |
|
1209 | |||
1210 | return repo |
|
1210 | return repo | |
1211 |
|
1211 | |||
1212 |
|
1212 | |||
1213 | class RepoGroup(Base, BaseModel): |
|
1213 | class RepoGroup(Base, BaseModel): | |
1214 | __tablename__ = 'groups' |
|
1214 | __tablename__ = 'groups' | |
1215 | __table_args__ = ( |
|
1215 | __table_args__ = ( | |
1216 | UniqueConstraint('group_name', 'group_parent_id'), |
|
1216 | UniqueConstraint('group_name', 'group_parent_id'), | |
1217 | CheckConstraint('group_id != group_parent_id'), |
|
1217 | CheckConstraint('group_id != group_parent_id'), | |
1218 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1218 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1219 | 'mysql_charset': 'utf8'}, |
|
1219 | 'mysql_charset': 'utf8'}, | |
1220 | ) |
|
1220 | ) | |
1221 | __mapper_args__ = {'order_by': 'group_name'} |
|
1221 | __mapper_args__ = {'order_by': 'group_name'} | |
1222 |
|
1222 | |||
1223 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1223 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1224 | group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
1224 | group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
1225 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
1225 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
1226 | group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1226 | group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1227 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
1227 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
1228 |
|
1228 | |||
1229 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
1229 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |
1230 | users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all') |
|
1230 | users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all') | |
1231 |
|
1231 | |||
1232 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
1232 | parent_group = relationship('RepoGroup', remote_side=group_id) | |
1233 |
|
1233 | |||
1234 | def __init__(self, group_name='', parent_group=None): |
|
1234 | def __init__(self, group_name='', parent_group=None): | |
1235 | self.group_name = group_name |
|
1235 | self.group_name = group_name | |
1236 | self.parent_group = parent_group |
|
1236 | self.parent_group = parent_group | |
1237 |
|
1237 | |||
1238 | def __unicode__(self): |
|
1238 | def __unicode__(self): | |
1239 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
1239 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, | |
1240 | self.group_name) |
|
1240 | self.group_name) | |
1241 |
|
1241 | |||
1242 | @classmethod |
|
1242 | @classmethod | |
1243 | def groups_choices(cls, groups=None, show_empty_group=True): |
|
1243 | def groups_choices(cls, groups=None, show_empty_group=True): | |
1244 | from webhelpers.html import literal as _literal |
|
1244 | from webhelpers.html import literal as _literal | |
1245 | if not groups: |
|
1245 | if not groups: | |
1246 | groups = cls.query().all() |
|
1246 | groups = cls.query().all() | |
1247 |
|
1247 | |||
1248 | repo_groups = [] |
|
1248 | repo_groups = [] | |
1249 | if show_empty_group: |
|
1249 | if show_empty_group: | |
1250 | repo_groups = [('-1', '-- no parent --')] |
|
1250 | repo_groups = [('-1', '-- no parent --')] | |
1251 | sep = ' » ' |
|
1251 | sep = ' » ' | |
1252 | _name = lambda k: _literal(sep.join(k)) |
|
1252 | _name = lambda k: _literal(sep.join(k)) | |
1253 |
|
1253 | |||
1254 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) |
|
1254 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) | |
1255 | for x in groups]) |
|
1255 | for x in groups]) | |
1256 |
|
1256 | |||
1257 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) |
|
1257 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) | |
1258 | return repo_groups |
|
1258 | return repo_groups | |
1259 |
|
1259 | |||
1260 | @classmethod |
|
1260 | @classmethod | |
1261 | def url_sep(cls): |
|
1261 | def url_sep(cls): | |
1262 | return URL_SEP |
|
1262 | return URL_SEP | |
1263 |
|
1263 | |||
1264 | @classmethod |
|
1264 | @classmethod | |
1265 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
1265 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |
1266 | if case_insensitive: |
|
1266 | if case_insensitive: | |
1267 | gr = cls.query()\ |
|
1267 | gr = cls.query()\ | |
1268 | .filter(cls.group_name.ilike(group_name)) |
|
1268 | .filter(cls.group_name.ilike(group_name)) | |
1269 | else: |
|
1269 | else: | |
1270 | gr = cls.query()\ |
|
1270 | gr = cls.query()\ | |
1271 | .filter(cls.group_name == group_name) |
|
1271 | .filter(cls.group_name == group_name) | |
1272 | if cache: |
|
1272 | if cache: | |
1273 | gr = gr.options(FromCache( |
|
1273 | gr = gr.options(FromCache( | |
1274 | "sql_cache_short", |
|
1274 | "sql_cache_short", | |
1275 | "get_group_%s" % _hash_key(group_name) |
|
1275 | "get_group_%s" % _hash_key(group_name) | |
1276 | ) |
|
1276 | ) | |
1277 | ) |
|
1277 | ) | |
1278 | return gr.scalar() |
|
1278 | return gr.scalar() | |
1279 |
|
1279 | |||
1280 | @property |
|
1280 | @property | |
1281 | def parents(self): |
|
1281 | def parents(self): | |
1282 | parents_recursion_limit = 5 |
|
1282 | parents_recursion_limit = 5 | |
1283 | groups = [] |
|
1283 | groups = [] | |
1284 | if self.parent_group is None: |
|
1284 | if self.parent_group is None: | |
1285 | return groups |
|
1285 | return groups | |
1286 | cur_gr = self.parent_group |
|
1286 | cur_gr = self.parent_group | |
1287 | groups.insert(0, cur_gr) |
|
1287 | groups.insert(0, cur_gr) | |
1288 | cnt = 0 |
|
1288 | cnt = 0 | |
1289 | while 1: |
|
1289 | while 1: | |
1290 | cnt += 1 |
|
1290 | cnt += 1 | |
1291 | gr = getattr(cur_gr, 'parent_group', None) |
|
1291 | gr = getattr(cur_gr, 'parent_group', None) | |
1292 | cur_gr = cur_gr.parent_group |
|
1292 | cur_gr = cur_gr.parent_group | |
1293 | if gr is None: |
|
1293 | if gr is None: | |
1294 | break |
|
1294 | break | |
1295 | if cnt == parents_recursion_limit: |
|
1295 | if cnt == parents_recursion_limit: | |
1296 | # this will prevent accidental infinit loops |
|
1296 | # this will prevent accidental infinit loops | |
1297 | log.error('group nested more than %s' % |
|
1297 | log.error('group nested more than %s' % | |
1298 | parents_recursion_limit) |
|
1298 | parents_recursion_limit) | |
1299 | break |
|
1299 | break | |
1300 |
|
1300 | |||
1301 | groups.insert(0, gr) |
|
1301 | groups.insert(0, gr) | |
1302 | return groups |
|
1302 | return groups | |
1303 |
|
1303 | |||
1304 | @property |
|
1304 | @property | |
1305 | def children(self): |
|
1305 | def children(self): | |
1306 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
1306 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |
1307 |
|
1307 | |||
1308 | @property |
|
1308 | @property | |
1309 | def name(self): |
|
1309 | def name(self): | |
1310 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
1310 | return self.group_name.split(RepoGroup.url_sep())[-1] | |
1311 |
|
1311 | |||
1312 | @property |
|
1312 | @property | |
1313 | def full_path(self): |
|
1313 | def full_path(self): | |
1314 | return self.group_name |
|
1314 | return self.group_name | |
1315 |
|
1315 | |||
1316 | @property |
|
1316 | @property | |
1317 | def full_path_splitted(self): |
|
1317 | def full_path_splitted(self): | |
1318 | return self.group_name.split(RepoGroup.url_sep()) |
|
1318 | return self.group_name.split(RepoGroup.url_sep()) | |
1319 |
|
1319 | |||
1320 | @property |
|
1320 | @property | |
1321 | def repositories(self): |
|
1321 | def repositories(self): | |
1322 | return Repository.query()\ |
|
1322 | return Repository.query()\ | |
1323 | .filter(Repository.group == self)\ |
|
1323 | .filter(Repository.group == self)\ | |
1324 | .order_by(Repository.repo_name) |
|
1324 | .order_by(Repository.repo_name) | |
1325 |
|
1325 | |||
1326 | @property |
|
1326 | @property | |
1327 | def repositories_recursive_count(self): |
|
1327 | def repositories_recursive_count(self): | |
1328 | cnt = self.repositories.count() |
|
1328 | cnt = self.repositories.count() | |
1329 |
|
1329 | |||
1330 | def children_count(group): |
|
1330 | def children_count(group): | |
1331 | cnt = 0 |
|
1331 | cnt = 0 | |
1332 | for child in group.children: |
|
1332 | for child in group.children: | |
1333 | cnt += child.repositories.count() |
|
1333 | cnt += child.repositories.count() | |
1334 | cnt += children_count(child) |
|
1334 | cnt += children_count(child) | |
1335 | return cnt |
|
1335 | return cnt | |
1336 |
|
1336 | |||
1337 | return cnt + children_count(self) |
|
1337 | return cnt + children_count(self) | |
1338 |
|
1338 | |||
1339 | def recursive_groups_and_repos(self): |
|
1339 | def recursive_groups_and_repos(self): | |
1340 | """ |
|
1340 | """ | |
1341 | Recursive return all groups, with repositories in those groups |
|
1341 | Recursive return all groups, with repositories in those groups | |
1342 | """ |
|
1342 | """ | |
1343 | all_ = [] |
|
1343 | all_ = [] | |
1344 |
|
1344 | |||
1345 | def _get_members(root_gr): |
|
1345 | def _get_members(root_gr): | |
1346 | for r in root_gr.repositories: |
|
1346 | for r in root_gr.repositories: | |
1347 | all_.append(r) |
|
1347 | all_.append(r) | |
1348 | childs = root_gr.children.all() |
|
1348 | childs = root_gr.children.all() | |
1349 | if childs: |
|
1349 | if childs: | |
1350 | for gr in childs: |
|
1350 | for gr in childs: | |
1351 | all_.append(gr) |
|
1351 | all_.append(gr) | |
1352 | _get_members(gr) |
|
1352 | _get_members(gr) | |
1353 |
|
1353 | |||
1354 | _get_members(self) |
|
1354 | _get_members(self) | |
1355 | return [self] + all_ |
|
1355 | return [self] + all_ | |
1356 |
|
1356 | |||
1357 | def get_new_name(self, group_name): |
|
1357 | def get_new_name(self, group_name): | |
1358 | """ |
|
1358 | """ | |
1359 | returns new full group name based on parent and new name |
|
1359 | returns new full group name based on parent and new name | |
1360 |
|
1360 | |||
1361 | :param group_name: |
|
1361 | :param group_name: | |
1362 | """ |
|
1362 | """ | |
1363 | path_prefix = (self.parent_group.full_path_splitted if |
|
1363 | path_prefix = (self.parent_group.full_path_splitted if | |
1364 | self.parent_group else []) |
|
1364 | self.parent_group else []) | |
1365 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
1365 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |
1366 |
|
1366 | |||
1367 |
|
1367 | |||
1368 | class Permission(Base, BaseModel): |
|
1368 | class Permission(Base, BaseModel): | |
1369 | __tablename__ = 'permissions' |
|
1369 | __tablename__ = 'permissions' | |
1370 | __table_args__ = ( |
|
1370 | __table_args__ = ( | |
1371 | Index('p_perm_name_idx', 'permission_name'), |
|
1371 | Index('p_perm_name_idx', 'permission_name'), | |
1372 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1372 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1373 | 'mysql_charset': 'utf8'}, |
|
1373 | 'mysql_charset': 'utf8'}, | |
1374 | ) |
|
1374 | ) | |
1375 | PERMS = [ |
|
1375 | PERMS = [ | |
1376 | ('repository.none', _('Repository no access')), |
|
1376 | ('repository.none', _('Repository no access')), | |
1377 | ('repository.read', _('Repository read access')), |
|
1377 | ('repository.read', _('Repository read access')), | |
1378 | ('repository.write', _('Repository write access')), |
|
1378 | ('repository.write', _('Repository write access')), | |
1379 | ('repository.admin', _('Repository admin access')), |
|
1379 | ('repository.admin', _('Repository admin access')), | |
1380 |
|
1380 | |||
1381 |
('group.none', _('Repositor |
|
1381 | ('group.none', _('Repository group no access')), | |
1382 |
('group.read', _('Repositor |
|
1382 | ('group.read', _('Repository group read access')), | |
1383 |
('group.write', _('Repositor |
|
1383 | ('group.write', _('Repository group write access')), | |
1384 |
('group.admin', _('Repositor |
|
1384 | ('group.admin', _('Repository group admin access')), | |
1385 |
|
1385 | |||
1386 | ('hg.admin', _('RhodeCode Administrator')), |
|
1386 | ('hg.admin', _('RhodeCode Administrator')), | |
1387 | ('hg.create.none', _('Repository creation disabled')), |
|
1387 | ('hg.create.none', _('Repository creation disabled')), | |
1388 | ('hg.create.repository', _('Repository creation enabled')), |
|
1388 | ('hg.create.repository', _('Repository creation enabled')), | |
1389 | ('hg.fork.none', _('Repository forking disabled')), |
|
1389 | ('hg.fork.none', _('Repository forking disabled')), | |
1390 | ('hg.fork.repository', _('Repository forking enabled')), |
|
1390 | ('hg.fork.repository', _('Repository forking enabled')), | |
1391 | ('hg.register.none', _('Register disabled')), |
|
1391 | ('hg.register.none', _('Register disabled')), | |
1392 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' |
|
1392 | ('hg.register.manual_activate', _('Register new user with RhodeCode ' | |
1393 | 'with manual activation')), |
|
1393 | 'with manual activation')), | |
1394 |
|
1394 | |||
1395 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' |
|
1395 | ('hg.register.auto_activate', _('Register new user with RhodeCode ' | |
1396 | 'with auto activation')), |
|
1396 | 'with auto activation')), | |
1397 | ] |
|
1397 | ] | |
1398 |
|
1398 | |||
1399 | # defines which permissions are more important higher the more important |
|
1399 | # defines which permissions are more important higher the more important | |
1400 | PERM_WEIGHTS = { |
|
1400 | PERM_WEIGHTS = { | |
1401 | 'repository.none': 0, |
|
1401 | 'repository.none': 0, | |
1402 | 'repository.read': 1, |
|
1402 | 'repository.read': 1, | |
1403 | 'repository.write': 3, |
|
1403 | 'repository.write': 3, | |
1404 | 'repository.admin': 4, |
|
1404 | 'repository.admin': 4, | |
1405 |
|
1405 | |||
1406 | 'group.none': 0, |
|
1406 | 'group.none': 0, | |
1407 | 'group.read': 1, |
|
1407 | 'group.read': 1, | |
1408 | 'group.write': 3, |
|
1408 | 'group.write': 3, | |
1409 | 'group.admin': 4, |
|
1409 | 'group.admin': 4, | |
1410 |
|
1410 | |||
1411 | 'hg.fork.none': 0, |
|
1411 | 'hg.fork.none': 0, | |
1412 | 'hg.fork.repository': 1, |
|
1412 | 'hg.fork.repository': 1, | |
1413 | 'hg.create.none': 0, |
|
1413 | 'hg.create.none': 0, | |
1414 | 'hg.create.repository':1 |
|
1414 | 'hg.create.repository':1 | |
1415 | } |
|
1415 | } | |
1416 |
|
1416 | |||
1417 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1417 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1418 | permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1418 | permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1419 | permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1419 | permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1420 |
|
1420 | |||
1421 | def __unicode__(self): |
|
1421 | def __unicode__(self): | |
1422 | return u"<%s('%s:%s')>" % ( |
|
1422 | return u"<%s('%s:%s')>" % ( | |
1423 | self.__class__.__name__, self.permission_id, self.permission_name |
|
1423 | self.__class__.__name__, self.permission_id, self.permission_name | |
1424 | ) |
|
1424 | ) | |
1425 |
|
1425 | |||
1426 | @classmethod |
|
1426 | @classmethod | |
1427 | def get_by_key(cls, key): |
|
1427 | def get_by_key(cls, key): | |
1428 | return cls.query().filter(cls.permission_name == key).scalar() |
|
1428 | return cls.query().filter(cls.permission_name == key).scalar() | |
1429 |
|
1429 | |||
1430 | @classmethod |
|
1430 | @classmethod | |
1431 | def get_default_perms(cls, default_user_id): |
|
1431 | def get_default_perms(cls, default_user_id): | |
1432 | q = Session().query(UserRepoToPerm, Repository, cls)\ |
|
1432 | q = Session().query(UserRepoToPerm, Repository, cls)\ | |
1433 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
1433 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |
1434 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ |
|
1434 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ | |
1435 | .filter(UserRepoToPerm.user_id == default_user_id) |
|
1435 | .filter(UserRepoToPerm.user_id == default_user_id) | |
1436 |
|
1436 | |||
1437 | return q.all() |
|
1437 | return q.all() | |
1438 |
|
1438 | |||
1439 | @classmethod |
|
1439 | @classmethod | |
1440 | def get_default_group_perms(cls, default_user_id): |
|
1440 | def get_default_group_perms(cls, default_user_id): | |
1441 | q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\ |
|
1441 | q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\ | |
1442 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
1442 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |
1443 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ |
|
1443 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ | |
1444 | .filter(UserRepoGroupToPerm.user_id == default_user_id) |
|
1444 | .filter(UserRepoGroupToPerm.user_id == default_user_id) | |
1445 |
|
1445 | |||
1446 | return q.all() |
|
1446 | return q.all() | |
1447 |
|
1447 | |||
1448 |
|
1448 | |||
1449 | class UserRepoToPerm(Base, BaseModel): |
|
1449 | class UserRepoToPerm(Base, BaseModel): | |
1450 | __tablename__ = 'repo_to_perm' |
|
1450 | __tablename__ = 'repo_to_perm' | |
1451 | __table_args__ = ( |
|
1451 | __table_args__ = ( | |
1452 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
1452 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |
1453 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1453 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1454 | 'mysql_charset': 'utf8'} |
|
1454 | 'mysql_charset': 'utf8'} | |
1455 | ) |
|
1455 | ) | |
1456 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1456 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1457 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1457 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
1458 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1458 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1459 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1459 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
1460 |
|
1460 | |||
1461 | user = relationship('User') |
|
1461 | user = relationship('User') | |
1462 | repository = relationship('Repository') |
|
1462 | repository = relationship('Repository') | |
1463 | permission = relationship('Permission') |
|
1463 | permission = relationship('Permission') | |
1464 |
|
1464 | |||
1465 | @classmethod |
|
1465 | @classmethod | |
1466 | def create(cls, user, repository, permission): |
|
1466 | def create(cls, user, repository, permission): | |
1467 | n = cls() |
|
1467 | n = cls() | |
1468 | n.user = user |
|
1468 | n.user = user | |
1469 | n.repository = repository |
|
1469 | n.repository = repository | |
1470 | n.permission = permission |
|
1470 | n.permission = permission | |
1471 | Session().add(n) |
|
1471 | Session().add(n) | |
1472 | return n |
|
1472 | return n | |
1473 |
|
1473 | |||
1474 | def __unicode__(self): |
|
1474 | def __unicode__(self): | |
1475 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
1475 | return u'<user:%s => %s >' % (self.user, self.repository) | |
1476 |
|
1476 | |||
1477 |
|
1477 | |||
1478 | class UserToPerm(Base, BaseModel): |
|
1478 | class UserToPerm(Base, BaseModel): | |
1479 | __tablename__ = 'user_to_perm' |
|
1479 | __tablename__ = 'user_to_perm' | |
1480 | __table_args__ = ( |
|
1480 | __table_args__ = ( | |
1481 | UniqueConstraint('user_id', 'permission_id'), |
|
1481 | UniqueConstraint('user_id', 'permission_id'), | |
1482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1482 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1483 | 'mysql_charset': 'utf8'} |
|
1483 | 'mysql_charset': 'utf8'} | |
1484 | ) |
|
1484 | ) | |
1485 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1485 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1486 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1486 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
1487 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1487 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1488 |
|
1488 | |||
1489 | user = relationship('User') |
|
1489 | user = relationship('User') | |
1490 | permission = relationship('Permission', lazy='joined') |
|
1490 | permission = relationship('Permission', lazy='joined') | |
1491 |
|
1491 | |||
1492 |
|
1492 | |||
1493 | class UsersGroupRepoToPerm(Base, BaseModel): |
|
1493 | class UsersGroupRepoToPerm(Base, BaseModel): | |
1494 | __tablename__ = 'users_group_repo_to_perm' |
|
1494 | __tablename__ = 'users_group_repo_to_perm' | |
1495 | __table_args__ = ( |
|
1495 | __table_args__ = ( | |
1496 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
1496 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |
1497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1498 | 'mysql_charset': 'utf8'} |
|
1498 | 'mysql_charset': 'utf8'} | |
1499 | ) |
|
1499 | ) | |
1500 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1500 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1501 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1501 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
1502 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1502 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1503 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1503 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
1504 |
|
1504 | |||
1505 | users_group = relationship('UsersGroup') |
|
1505 | users_group = relationship('UsersGroup') | |
1506 | permission = relationship('Permission') |
|
1506 | permission = relationship('Permission') | |
1507 | repository = relationship('Repository') |
|
1507 | repository = relationship('Repository') | |
1508 |
|
1508 | |||
1509 | @classmethod |
|
1509 | @classmethod | |
1510 | def create(cls, users_group, repository, permission): |
|
1510 | def create(cls, users_group, repository, permission): | |
1511 | n = cls() |
|
1511 | n = cls() | |
1512 | n.users_group = users_group |
|
1512 | n.users_group = users_group | |
1513 | n.repository = repository |
|
1513 | n.repository = repository | |
1514 | n.permission = permission |
|
1514 | n.permission = permission | |
1515 | Session().add(n) |
|
1515 | Session().add(n) | |
1516 | return n |
|
1516 | return n | |
1517 |
|
1517 | |||
1518 | def __unicode__(self): |
|
1518 | def __unicode__(self): | |
1519 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
1519 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) | |
1520 |
|
1520 | |||
1521 |
|
1521 | |||
1522 | class UsersGroupToPerm(Base, BaseModel): |
|
1522 | class UsersGroupToPerm(Base, BaseModel): | |
1523 | __tablename__ = 'users_group_to_perm' |
|
1523 | __tablename__ = 'users_group_to_perm' | |
1524 | __table_args__ = ( |
|
1524 | __table_args__ = ( | |
1525 | UniqueConstraint('users_group_id', 'permission_id',), |
|
1525 | UniqueConstraint('users_group_id', 'permission_id',), | |
1526 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1526 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1527 | 'mysql_charset': 'utf8'} |
|
1527 | 'mysql_charset': 'utf8'} | |
1528 | ) |
|
1528 | ) | |
1529 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1529 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1530 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1530 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
1531 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1531 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1532 |
|
1532 | |||
1533 | users_group = relationship('UsersGroup') |
|
1533 | users_group = relationship('UsersGroup') | |
1534 | permission = relationship('Permission') |
|
1534 | permission = relationship('Permission') | |
1535 |
|
1535 | |||
1536 |
|
1536 | |||
1537 | class UserRepoGroupToPerm(Base, BaseModel): |
|
1537 | class UserRepoGroupToPerm(Base, BaseModel): | |
1538 | __tablename__ = 'user_repo_group_to_perm' |
|
1538 | __tablename__ = 'user_repo_group_to_perm' | |
1539 | __table_args__ = ( |
|
1539 | __table_args__ = ( | |
1540 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
1540 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |
1541 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1541 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1542 | 'mysql_charset': 'utf8'} |
|
1542 | 'mysql_charset': 'utf8'} | |
1543 | ) |
|
1543 | ) | |
1544 |
|
1544 | |||
1545 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1545 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1546 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1546 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
1547 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
1547 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
1548 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1548 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1549 |
|
1549 | |||
1550 | user = relationship('User') |
|
1550 | user = relationship('User') | |
1551 | group = relationship('RepoGroup') |
|
1551 | group = relationship('RepoGroup') | |
1552 | permission = relationship('Permission') |
|
1552 | permission = relationship('Permission') | |
1553 |
|
1553 | |||
1554 |
|
1554 | |||
1555 | class UsersGroupRepoGroupToPerm(Base, BaseModel): |
|
1555 | class UsersGroupRepoGroupToPerm(Base, BaseModel): | |
1556 | __tablename__ = 'users_group_repo_group_to_perm' |
|
1556 | __tablename__ = 'users_group_repo_group_to_perm' | |
1557 | __table_args__ = ( |
|
1557 | __table_args__ = ( | |
1558 | UniqueConstraint('users_group_id', 'group_id'), |
|
1558 | UniqueConstraint('users_group_id', 'group_id'), | |
1559 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1559 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1560 | 'mysql_charset': 'utf8'} |
|
1560 | 'mysql_charset': 'utf8'} | |
1561 | ) |
|
1561 | ) | |
1562 |
|
1562 | |||
1563 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1563 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1564 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1564 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
1565 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
1565 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
1566 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1566 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
1567 |
|
1567 | |||
1568 | users_group = relationship('UsersGroup') |
|
1568 | users_group = relationship('UsersGroup') | |
1569 | permission = relationship('Permission') |
|
1569 | permission = relationship('Permission') | |
1570 | group = relationship('RepoGroup') |
|
1570 | group = relationship('RepoGroup') | |
1571 |
|
1571 | |||
1572 |
|
1572 | |||
1573 | class Statistics(Base, BaseModel): |
|
1573 | class Statistics(Base, BaseModel): | |
1574 | __tablename__ = 'statistics' |
|
1574 | __tablename__ = 'statistics' | |
1575 | __table_args__ = ( |
|
1575 | __table_args__ = ( | |
1576 | UniqueConstraint('repository_id'), |
|
1576 | UniqueConstraint('repository_id'), | |
1577 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1577 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1578 | 'mysql_charset': 'utf8'} |
|
1578 | 'mysql_charset': 'utf8'} | |
1579 | ) |
|
1579 | ) | |
1580 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1580 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
1581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |
1582 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
1582 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |
1583 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
1583 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |
1584 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
1584 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |
1585 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
1585 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |
1586 |
|
1586 | |||
1587 | repository = relationship('Repository', single_parent=True) |
|
1587 | repository = relationship('Repository', single_parent=True) | |
1588 |
|
1588 | |||
1589 |
|
1589 | |||
1590 | class UserFollowing(Base, BaseModel): |
|
1590 | class UserFollowing(Base, BaseModel): | |
1591 | __tablename__ = 'user_followings' |
|
1591 | __tablename__ = 'user_followings' | |
1592 | __table_args__ = ( |
|
1592 | __table_args__ = ( | |
1593 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
1593 | UniqueConstraint('user_id', 'follows_repository_id'), | |
1594 | UniqueConstraint('user_id', 'follows_user_id'), |
|
1594 | UniqueConstraint('user_id', 'follows_user_id'), | |
1595 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1595 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1596 | 'mysql_charset': 'utf8'} |
|
1596 | 'mysql_charset': 'utf8'} | |
1597 | ) |
|
1597 | ) | |
1598 |
|
1598 | |||
1599 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1599 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1600 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1600 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
1601 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1601 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |
1602 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1602 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
1603 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1603 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
1604 |
|
1604 | |||
1605 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1605 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |
1606 |
|
1606 | |||
1607 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1607 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |
1608 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1608 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |
1609 |
|
1609 | |||
1610 | @classmethod |
|
1610 | @classmethod | |
1611 | def get_repo_followers(cls, repo_id): |
|
1611 | def get_repo_followers(cls, repo_id): | |
1612 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1612 | return cls.query().filter(cls.follows_repo_id == repo_id) | |
1613 |
|
1613 | |||
1614 |
|
1614 | |||
1615 | class CacheInvalidation(Base, BaseModel): |
|
1615 | class CacheInvalidation(Base, BaseModel): | |
1616 | __tablename__ = 'cache_invalidation' |
|
1616 | __tablename__ = 'cache_invalidation' | |
1617 | __table_args__ = ( |
|
1617 | __table_args__ = ( | |
1618 | UniqueConstraint('cache_key'), |
|
1618 | UniqueConstraint('cache_key'), | |
1619 | Index('key_idx', 'cache_key'), |
|
1619 | Index('key_idx', 'cache_key'), | |
1620 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1620 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1621 | 'mysql_charset': 'utf8'}, |
|
1621 | 'mysql_charset': 'utf8'}, | |
1622 | ) |
|
1622 | ) | |
1623 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1623 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1624 | cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1624 | cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1625 | cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1625 | cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1626 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1626 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |
1627 |
|
1627 | |||
1628 | def __init__(self, cache_key, cache_args=''): |
|
1628 | def __init__(self, cache_key, cache_args=''): | |
1629 | self.cache_key = cache_key |
|
1629 | self.cache_key = cache_key | |
1630 | self.cache_args = cache_args |
|
1630 | self.cache_args = cache_args | |
1631 | self.cache_active = False |
|
1631 | self.cache_active = False | |
1632 |
|
1632 | |||
1633 | def __unicode__(self): |
|
1633 | def __unicode__(self): | |
1634 | return u"<%s('%s:%s')>" % (self.__class__.__name__, |
|
1634 | return u"<%s('%s:%s')>" % (self.__class__.__name__, | |
1635 | self.cache_id, self.cache_key) |
|
1635 | self.cache_id, self.cache_key) | |
1636 |
|
1636 | |||
1637 | @property |
|
1637 | @property | |
1638 | def prefix(self): |
|
1638 | def prefix(self): | |
1639 | _split = self.cache_key.split(self.cache_args, 1) |
|
1639 | _split = self.cache_key.split(self.cache_args, 1) | |
1640 | if _split and len(_split) == 2: |
|
1640 | if _split and len(_split) == 2: | |
1641 | return _split[0] |
|
1641 | return _split[0] | |
1642 | return '' |
|
1642 | return '' | |
1643 |
|
1643 | |||
1644 | @classmethod |
|
1644 | @classmethod | |
1645 | def clear_cache(cls): |
|
1645 | def clear_cache(cls): | |
1646 | cls.query().delete() |
|
1646 | cls.query().delete() | |
1647 |
|
1647 | |||
1648 | @classmethod |
|
1648 | @classmethod | |
1649 | def _get_key(cls, key): |
|
1649 | def _get_key(cls, key): | |
1650 | """ |
|
1650 | """ | |
1651 | Wrapper for generating a key, together with a prefix |
|
1651 | Wrapper for generating a key, together with a prefix | |
1652 |
|
1652 | |||
1653 | :param key: |
|
1653 | :param key: | |
1654 | """ |
|
1654 | """ | |
1655 | import rhodecode |
|
1655 | import rhodecode | |
1656 | prefix = '' |
|
1656 | prefix = '' | |
1657 | org_key = key |
|
1657 | org_key = key | |
1658 | iid = rhodecode.CONFIG.get('instance_id') |
|
1658 | iid = rhodecode.CONFIG.get('instance_id') | |
1659 | if iid: |
|
1659 | if iid: | |
1660 | prefix = iid |
|
1660 | prefix = iid | |
1661 |
|
1661 | |||
1662 | return "%s%s" % (prefix, key), prefix, org_key |
|
1662 | return "%s%s" % (prefix, key), prefix, org_key | |
1663 |
|
1663 | |||
1664 | @classmethod |
|
1664 | @classmethod | |
1665 | def get_by_key(cls, key): |
|
1665 | def get_by_key(cls, key): | |
1666 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1666 | return cls.query().filter(cls.cache_key == key).scalar() | |
1667 |
|
1667 | |||
1668 | @classmethod |
|
1668 | @classmethod | |
1669 | def get_by_repo_name(cls, repo_name): |
|
1669 | def get_by_repo_name(cls, repo_name): | |
1670 | return cls.query().filter(cls.cache_args == repo_name).all() |
|
1670 | return cls.query().filter(cls.cache_args == repo_name).all() | |
1671 |
|
1671 | |||
1672 | @classmethod |
|
1672 | @classmethod | |
1673 | def _get_or_create_key(cls, key, repo_name, commit=True): |
|
1673 | def _get_or_create_key(cls, key, repo_name, commit=True): | |
1674 | inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar() |
|
1674 | inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar() | |
1675 | if not inv_obj: |
|
1675 | if not inv_obj: | |
1676 | try: |
|
1676 | try: | |
1677 | inv_obj = CacheInvalidation(key, repo_name) |
|
1677 | inv_obj = CacheInvalidation(key, repo_name) | |
1678 | Session().add(inv_obj) |
|
1678 | Session().add(inv_obj) | |
1679 | if commit: |
|
1679 | if commit: | |
1680 | Session().commit() |
|
1680 | Session().commit() | |
1681 | except Exception: |
|
1681 | except Exception: | |
1682 | log.error(traceback.format_exc()) |
|
1682 | log.error(traceback.format_exc()) | |
1683 | Session().rollback() |
|
1683 | Session().rollback() | |
1684 | return inv_obj |
|
1684 | return inv_obj | |
1685 |
|
1685 | |||
1686 | @classmethod |
|
1686 | @classmethod | |
1687 | def invalidate(cls, key): |
|
1687 | def invalidate(cls, key): | |
1688 | """ |
|
1688 | """ | |
1689 | Returns Invalidation object if this given key should be invalidated |
|
1689 | Returns Invalidation object if this given key should be invalidated | |
1690 | None otherwise. `cache_active = False` means that this cache |
|
1690 | None otherwise. `cache_active = False` means that this cache | |
1691 | state is not valid and needs to be invalidated |
|
1691 | state is not valid and needs to be invalidated | |
1692 |
|
1692 | |||
1693 | :param key: |
|
1693 | :param key: | |
1694 | """ |
|
1694 | """ | |
1695 | repo_name = key |
|
1695 | repo_name = key | |
1696 | repo_name = remove_suffix(repo_name, '_README') |
|
1696 | repo_name = remove_suffix(repo_name, '_README') | |
1697 | repo_name = remove_suffix(repo_name, '_RSS') |
|
1697 | repo_name = remove_suffix(repo_name, '_RSS') | |
1698 | repo_name = remove_suffix(repo_name, '_ATOM') |
|
1698 | repo_name = remove_suffix(repo_name, '_ATOM') | |
1699 |
|
1699 | |||
1700 | # adds instance prefix |
|
1700 | # adds instance prefix | |
1701 | key, _prefix, _org_key = cls._get_key(key) |
|
1701 | key, _prefix, _org_key = cls._get_key(key) | |
1702 | inv = cls._get_or_create_key(key, repo_name) |
|
1702 | inv = cls._get_or_create_key(key, repo_name) | |
1703 |
|
1703 | |||
1704 | if inv and inv.cache_active is False: |
|
1704 | if inv and inv.cache_active is False: | |
1705 | return inv |
|
1705 | return inv | |
1706 |
|
1706 | |||
1707 | @classmethod |
|
1707 | @classmethod | |
1708 | def set_invalidate(cls, key=None, repo_name=None): |
|
1708 | def set_invalidate(cls, key=None, repo_name=None): | |
1709 | """ |
|
1709 | """ | |
1710 | Mark this Cache key for invalidation, either by key or whole |
|
1710 | Mark this Cache key for invalidation, either by key or whole | |
1711 | cache sets based on repo_name |
|
1711 | cache sets based on repo_name | |
1712 |
|
1712 | |||
1713 | :param key: |
|
1713 | :param key: | |
1714 | """ |
|
1714 | """ | |
1715 | invalidated_keys = [] |
|
1715 | invalidated_keys = [] | |
1716 | if key: |
|
1716 | if key: | |
1717 | key, _prefix, _org_key = cls._get_key(key) |
|
1717 | key, _prefix, _org_key = cls._get_key(key) | |
1718 | inv_objs = Session().query(cls).filter(cls.cache_key == key).all() |
|
1718 | inv_objs = Session().query(cls).filter(cls.cache_key == key).all() | |
1719 | elif repo_name: |
|
1719 | elif repo_name: | |
1720 | inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all() |
|
1720 | inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all() | |
1721 |
|
1721 | |||
1722 | try: |
|
1722 | try: | |
1723 | for inv_obj in inv_objs: |
|
1723 | for inv_obj in inv_objs: | |
1724 | inv_obj.cache_active = False |
|
1724 | inv_obj.cache_active = False | |
1725 | log.debug('marking %s key for invalidation based on key=%s,repo_name=%s' |
|
1725 | log.debug('marking %s key for invalidation based on key=%s,repo_name=%s' | |
1726 | % (inv_obj, key, repo_name)) |
|
1726 | % (inv_obj, key, repo_name)) | |
1727 | invalidated_keys.append(inv_obj.cache_key) |
|
1727 | invalidated_keys.append(inv_obj.cache_key) | |
1728 | Session().add(inv_obj) |
|
1728 | Session().add(inv_obj) | |
1729 | Session().commit() |
|
1729 | Session().commit() | |
1730 | except Exception: |
|
1730 | except Exception: | |
1731 | log.error(traceback.format_exc()) |
|
1731 | log.error(traceback.format_exc()) | |
1732 | Session().rollback() |
|
1732 | Session().rollback() | |
1733 | return invalidated_keys |
|
1733 | return invalidated_keys | |
1734 |
|
1734 | |||
1735 | @classmethod |
|
1735 | @classmethod | |
1736 | def set_valid(cls, key): |
|
1736 | def set_valid(cls, key): | |
1737 | """ |
|
1737 | """ | |
1738 | Mark this cache key as active and currently cached |
|
1738 | Mark this cache key as active and currently cached | |
1739 |
|
1739 | |||
1740 | :param key: |
|
1740 | :param key: | |
1741 | """ |
|
1741 | """ | |
1742 | inv_obj = cls.get_by_key(key) |
|
1742 | inv_obj = cls.get_by_key(key) | |
1743 | inv_obj.cache_active = True |
|
1743 | inv_obj.cache_active = True | |
1744 | Session().add(inv_obj) |
|
1744 | Session().add(inv_obj) | |
1745 | Session().commit() |
|
1745 | Session().commit() | |
1746 |
|
1746 | |||
1747 | @classmethod |
|
1747 | @classmethod | |
1748 | def get_cache_map(cls): |
|
1748 | def get_cache_map(cls): | |
1749 |
|
1749 | |||
1750 | class cachemapdict(dict): |
|
1750 | class cachemapdict(dict): | |
1751 |
|
1751 | |||
1752 | def __init__(self, *args, **kwargs): |
|
1752 | def __init__(self, *args, **kwargs): | |
1753 | fixkey = kwargs.get('fixkey') |
|
1753 | fixkey = kwargs.get('fixkey') | |
1754 | if fixkey: |
|
1754 | if fixkey: | |
1755 | del kwargs['fixkey'] |
|
1755 | del kwargs['fixkey'] | |
1756 | self.fixkey = fixkey |
|
1756 | self.fixkey = fixkey | |
1757 | super(cachemapdict, self).__init__(*args, **kwargs) |
|
1757 | super(cachemapdict, self).__init__(*args, **kwargs) | |
1758 |
|
1758 | |||
1759 | def __getattr__(self, name): |
|
1759 | def __getattr__(self, name): | |
1760 | key = name |
|
1760 | key = name | |
1761 | if self.fixkey: |
|
1761 | if self.fixkey: | |
1762 | key, _prefix, _org_key = cls._get_key(key) |
|
1762 | key, _prefix, _org_key = cls._get_key(key) | |
1763 | if key in self.__dict__: |
|
1763 | if key in self.__dict__: | |
1764 | return self.__dict__[key] |
|
1764 | return self.__dict__[key] | |
1765 | else: |
|
1765 | else: | |
1766 | return self[key] |
|
1766 | return self[key] | |
1767 |
|
1767 | |||
1768 | def __getitem__(self, key): |
|
1768 | def __getitem__(self, key): | |
1769 | if self.fixkey: |
|
1769 | if self.fixkey: | |
1770 | key, _prefix, _org_key = cls._get_key(key) |
|
1770 | key, _prefix, _org_key = cls._get_key(key) | |
1771 | try: |
|
1771 | try: | |
1772 | return super(cachemapdict, self).__getitem__(key) |
|
1772 | return super(cachemapdict, self).__getitem__(key) | |
1773 | except KeyError: |
|
1773 | except KeyError: | |
1774 | return |
|
1774 | return | |
1775 |
|
1775 | |||
1776 | cache_map = cachemapdict(fixkey=True) |
|
1776 | cache_map = cachemapdict(fixkey=True) | |
1777 | for obj in cls.query().all(): |
|
1777 | for obj in cls.query().all(): | |
1778 | cache_map[obj.cache_key] = cachemapdict(obj.get_dict()) |
|
1778 | cache_map[obj.cache_key] = cachemapdict(obj.get_dict()) | |
1779 | return cache_map |
|
1779 | return cache_map | |
1780 |
|
1780 | |||
1781 |
|
1781 | |||
1782 | class ChangesetComment(Base, BaseModel): |
|
1782 | class ChangesetComment(Base, BaseModel): | |
1783 | __tablename__ = 'changeset_comments' |
|
1783 | __tablename__ = 'changeset_comments' | |
1784 | __table_args__ = ( |
|
1784 | __table_args__ = ( | |
1785 | Index('cc_revision_idx', 'revision'), |
|
1785 | Index('cc_revision_idx', 'revision'), | |
1786 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1786 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1787 | 'mysql_charset': 'utf8'}, |
|
1787 | 'mysql_charset': 'utf8'}, | |
1788 | ) |
|
1788 | ) | |
1789 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1789 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |
1790 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1790 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1791 | revision = Column('revision', String(40), nullable=True) |
|
1791 | revision = Column('revision', String(40), nullable=True) | |
1792 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
1792 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
1793 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
1793 | line_no = Column('line_no', Unicode(10), nullable=True) | |
1794 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
1794 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |
1795 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
1795 | f_path = Column('f_path', Unicode(1000), nullable=True) | |
1796 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
1796 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |
1797 | text = Column('text', UnicodeText(25000), nullable=False) |
|
1797 | text = Column('text', UnicodeText(25000), nullable=False) | |
1798 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1798 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1799 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1799 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1800 |
|
1800 | |||
1801 | author = relationship('User', lazy='joined') |
|
1801 | author = relationship('User', lazy='joined') | |
1802 | repo = relationship('Repository') |
|
1802 | repo = relationship('Repository') | |
1803 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
1803 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") | |
1804 | pull_request = relationship('PullRequest', lazy='joined') |
|
1804 | pull_request = relationship('PullRequest', lazy='joined') | |
1805 |
|
1805 | |||
1806 | @classmethod |
|
1806 | @classmethod | |
1807 | def get_users(cls, revision=None, pull_request_id=None): |
|
1807 | def get_users(cls, revision=None, pull_request_id=None): | |
1808 | """ |
|
1808 | """ | |
1809 | Returns user associated with this ChangesetComment. ie those |
|
1809 | Returns user associated with this ChangesetComment. ie those | |
1810 | who actually commented |
|
1810 | who actually commented | |
1811 |
|
1811 | |||
1812 | :param cls: |
|
1812 | :param cls: | |
1813 | :param revision: |
|
1813 | :param revision: | |
1814 | """ |
|
1814 | """ | |
1815 | q = Session().query(User)\ |
|
1815 | q = Session().query(User)\ | |
1816 | .join(ChangesetComment.author) |
|
1816 | .join(ChangesetComment.author) | |
1817 | if revision: |
|
1817 | if revision: | |
1818 | q = q.filter(cls.revision == revision) |
|
1818 | q = q.filter(cls.revision == revision) | |
1819 | elif pull_request_id: |
|
1819 | elif pull_request_id: | |
1820 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
1820 | q = q.filter(cls.pull_request_id == pull_request_id) | |
1821 | return q.all() |
|
1821 | return q.all() | |
1822 |
|
1822 | |||
1823 |
|
1823 | |||
1824 | class ChangesetStatus(Base, BaseModel): |
|
1824 | class ChangesetStatus(Base, BaseModel): | |
1825 | __tablename__ = 'changeset_statuses' |
|
1825 | __tablename__ = 'changeset_statuses' | |
1826 | __table_args__ = ( |
|
1826 | __table_args__ = ( | |
1827 | Index('cs_revision_idx', 'revision'), |
|
1827 | Index('cs_revision_idx', 'revision'), | |
1828 | Index('cs_version_idx', 'version'), |
|
1828 | Index('cs_version_idx', 'version'), | |
1829 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
1829 | UniqueConstraint('repo_id', 'revision', 'version'), | |
1830 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1830 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1831 | 'mysql_charset': 'utf8'} |
|
1831 | 'mysql_charset': 'utf8'} | |
1832 | ) |
|
1832 | ) | |
1833 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
1833 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |
1834 | STATUS_APPROVED = 'approved' |
|
1834 | STATUS_APPROVED = 'approved' | |
1835 | STATUS_REJECTED = 'rejected' |
|
1835 | STATUS_REJECTED = 'rejected' | |
1836 | STATUS_UNDER_REVIEW = 'under_review' |
|
1836 | STATUS_UNDER_REVIEW = 'under_review' | |
1837 |
|
1837 | |||
1838 | STATUSES = [ |
|
1838 | STATUSES = [ | |
1839 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
1839 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |
1840 | (STATUS_APPROVED, _("Approved")), |
|
1840 | (STATUS_APPROVED, _("Approved")), | |
1841 | (STATUS_REJECTED, _("Rejected")), |
|
1841 | (STATUS_REJECTED, _("Rejected")), | |
1842 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
1842 | (STATUS_UNDER_REVIEW, _("Under Review")), | |
1843 | ] |
|
1843 | ] | |
1844 |
|
1844 | |||
1845 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
1845 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |
1846 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1846 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1847 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
1847 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
1848 | revision = Column('revision', String(40), nullable=False) |
|
1848 | revision = Column('revision', String(40), nullable=False) | |
1849 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
1849 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |
1850 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
1850 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |
1851 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
1851 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |
1852 | version = Column('version', Integer(), nullable=False, default=0) |
|
1852 | version = Column('version', Integer(), nullable=False, default=0) | |
1853 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
1853 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
1854 |
|
1854 | |||
1855 | author = relationship('User', lazy='joined') |
|
1855 | author = relationship('User', lazy='joined') | |
1856 | repo = relationship('Repository') |
|
1856 | repo = relationship('Repository') | |
1857 | comment = relationship('ChangesetComment', lazy='joined') |
|
1857 | comment = relationship('ChangesetComment', lazy='joined') | |
1858 | pull_request = relationship('PullRequest', lazy='joined') |
|
1858 | pull_request = relationship('PullRequest', lazy='joined') | |
1859 |
|
1859 | |||
1860 | def __unicode__(self): |
|
1860 | def __unicode__(self): | |
1861 | return u"<%s('%s:%s')>" % ( |
|
1861 | return u"<%s('%s:%s')>" % ( | |
1862 | self.__class__.__name__, |
|
1862 | self.__class__.__name__, | |
1863 | self.status, self.author |
|
1863 | self.status, self.author | |
1864 | ) |
|
1864 | ) | |
1865 |
|
1865 | |||
1866 | @classmethod |
|
1866 | @classmethod | |
1867 | def get_status_lbl(cls, value): |
|
1867 | def get_status_lbl(cls, value): | |
1868 | return dict(cls.STATUSES).get(value) |
|
1868 | return dict(cls.STATUSES).get(value) | |
1869 |
|
1869 | |||
1870 | @property |
|
1870 | @property | |
1871 | def status_lbl(self): |
|
1871 | def status_lbl(self): | |
1872 | return ChangesetStatus.get_status_lbl(self.status) |
|
1872 | return ChangesetStatus.get_status_lbl(self.status) | |
1873 |
|
1873 | |||
1874 |
|
1874 | |||
1875 | class PullRequest(Base, BaseModel): |
|
1875 | class PullRequest(Base, BaseModel): | |
1876 | __tablename__ = 'pull_requests' |
|
1876 | __tablename__ = 'pull_requests' | |
1877 | __table_args__ = ( |
|
1877 | __table_args__ = ( | |
1878 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1878 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1879 | 'mysql_charset': 'utf8'}, |
|
1879 | 'mysql_charset': 'utf8'}, | |
1880 | ) |
|
1880 | ) | |
1881 |
|
1881 | |||
1882 | STATUS_NEW = u'new' |
|
1882 | STATUS_NEW = u'new' | |
1883 | STATUS_OPEN = u'open' |
|
1883 | STATUS_OPEN = u'open' | |
1884 | STATUS_CLOSED = u'closed' |
|
1884 | STATUS_CLOSED = u'closed' | |
1885 |
|
1885 | |||
1886 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
1886 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) | |
1887 | title = Column('title', Unicode(256), nullable=True) |
|
1887 | title = Column('title', Unicode(256), nullable=True) | |
1888 | description = Column('description', UnicodeText(10240), nullable=True) |
|
1888 | description = Column('description', UnicodeText(10240), nullable=True) | |
1889 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
1889 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) | |
1890 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1890 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1891 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1891 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1892 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
1892 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
1893 | _revisions = Column('revisions', UnicodeText(20500)) # 500 revisions max |
|
1893 | _revisions = Column('revisions', UnicodeText(20500)) # 500 revisions max | |
1894 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1894 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1895 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
1895 | org_ref = Column('org_ref', Unicode(256), nullable=False) | |
1896 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1896 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1897 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
|
1897 | other_ref = Column('other_ref', Unicode(256), nullable=False) | |
1898 |
|
1898 | |||
1899 | @hybrid_property |
|
1899 | @hybrid_property | |
1900 | def revisions(self): |
|
1900 | def revisions(self): | |
1901 | return self._revisions.split(':') |
|
1901 | return self._revisions.split(':') | |
1902 |
|
1902 | |||
1903 | @revisions.setter |
|
1903 | @revisions.setter | |
1904 | def revisions(self, val): |
|
1904 | def revisions(self, val): | |
1905 | self._revisions = ':'.join(val) |
|
1905 | self._revisions = ':'.join(val) | |
1906 |
|
1906 | |||
1907 | @property |
|
1907 | @property | |
1908 | def org_ref_parts(self): |
|
1908 | def org_ref_parts(self): | |
1909 | return self.org_ref.split(':') |
|
1909 | return self.org_ref.split(':') | |
1910 |
|
1910 | |||
1911 | @property |
|
1911 | @property | |
1912 | def other_ref_parts(self): |
|
1912 | def other_ref_parts(self): | |
1913 | return self.other_ref.split(':') |
|
1913 | return self.other_ref.split(':') | |
1914 |
|
1914 | |||
1915 | author = relationship('User', lazy='joined') |
|
1915 | author = relationship('User', lazy='joined') | |
1916 | reviewers = relationship('PullRequestReviewers', |
|
1916 | reviewers = relationship('PullRequestReviewers', | |
1917 | cascade="all, delete, delete-orphan") |
|
1917 | cascade="all, delete, delete-orphan") | |
1918 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') |
|
1918 | org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id') | |
1919 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') |
|
1919 | other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id') | |
1920 | statuses = relationship('ChangesetStatus') |
|
1920 | statuses = relationship('ChangesetStatus') | |
1921 | comments = relationship('ChangesetComment', |
|
1921 | comments = relationship('ChangesetComment', | |
1922 | cascade="all, delete, delete-orphan") |
|
1922 | cascade="all, delete, delete-orphan") | |
1923 |
|
1923 | |||
1924 | def is_closed(self): |
|
1924 | def is_closed(self): | |
1925 | return self.status == self.STATUS_CLOSED |
|
1925 | return self.status == self.STATUS_CLOSED | |
1926 |
|
1926 | |||
1927 | @property |
|
1927 | @property | |
1928 | def last_review_status(self): |
|
1928 | def last_review_status(self): | |
1929 | return self.statuses[-1].status if self.statuses else '' |
|
1929 | return self.statuses[-1].status if self.statuses else '' | |
1930 |
|
1930 | |||
1931 | def __json__(self): |
|
1931 | def __json__(self): | |
1932 | return dict( |
|
1932 | return dict( | |
1933 | revisions=self.revisions |
|
1933 | revisions=self.revisions | |
1934 | ) |
|
1934 | ) | |
1935 |
|
1935 | |||
1936 |
|
1936 | |||
1937 | class PullRequestReviewers(Base, BaseModel): |
|
1937 | class PullRequestReviewers(Base, BaseModel): | |
1938 | __tablename__ = 'pull_request_reviewers' |
|
1938 | __tablename__ = 'pull_request_reviewers' | |
1939 | __table_args__ = ( |
|
1939 | __table_args__ = ( | |
1940 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1940 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1941 | 'mysql_charset': 'utf8'}, |
|
1941 | 'mysql_charset': 'utf8'}, | |
1942 | ) |
|
1942 | ) | |
1943 |
|
1943 | |||
1944 | def __init__(self, user=None, pull_request=None): |
|
1944 | def __init__(self, user=None, pull_request=None): | |
1945 | self.user = user |
|
1945 | self.user = user | |
1946 | self.pull_request = pull_request |
|
1946 | self.pull_request = pull_request | |
1947 |
|
1947 | |||
1948 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) |
|
1948 | pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True) | |
1949 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
1949 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
1950 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1950 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |
1951 |
|
1951 | |||
1952 | user = relationship('User') |
|
1952 | user = relationship('User') | |
1953 | pull_request = relationship('PullRequest') |
|
1953 | pull_request = relationship('PullRequest') | |
1954 |
|
1954 | |||
1955 |
|
1955 | |||
1956 | class Notification(Base, BaseModel): |
|
1956 | class Notification(Base, BaseModel): | |
1957 | __tablename__ = 'notifications' |
|
1957 | __tablename__ = 'notifications' | |
1958 | __table_args__ = ( |
|
1958 | __table_args__ = ( | |
1959 | Index('notification_type_idx', 'type'), |
|
1959 | Index('notification_type_idx', 'type'), | |
1960 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1960 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1961 | 'mysql_charset': 'utf8'}, |
|
1961 | 'mysql_charset': 'utf8'}, | |
1962 | ) |
|
1962 | ) | |
1963 |
|
1963 | |||
1964 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1964 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |
1965 | TYPE_MESSAGE = u'message' |
|
1965 | TYPE_MESSAGE = u'message' | |
1966 | TYPE_MENTION = u'mention' |
|
1966 | TYPE_MENTION = u'mention' | |
1967 | TYPE_REGISTRATION = u'registration' |
|
1967 | TYPE_REGISTRATION = u'registration' | |
1968 | TYPE_PULL_REQUEST = u'pull_request' |
|
1968 | TYPE_PULL_REQUEST = u'pull_request' | |
1969 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
1969 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |
1970 |
|
1970 | |||
1971 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1971 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |
1972 | subject = Column('subject', Unicode(512), nullable=True) |
|
1972 | subject = Column('subject', Unicode(512), nullable=True) | |
1973 | body = Column('body', UnicodeText(50000), nullable=True) |
|
1973 | body = Column('body', UnicodeText(50000), nullable=True) | |
1974 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1974 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |
1975 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1975 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1976 | type_ = Column('type', Unicode(256)) |
|
1976 | type_ = Column('type', Unicode(256)) | |
1977 |
|
1977 | |||
1978 | created_by_user = relationship('User') |
|
1978 | created_by_user = relationship('User') | |
1979 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1979 | notifications_to_users = relationship('UserNotification', lazy='joined', | |
1980 | cascade="all, delete, delete-orphan") |
|
1980 | cascade="all, delete, delete-orphan") | |
1981 |
|
1981 | |||
1982 | @property |
|
1982 | @property | |
1983 | def recipients(self): |
|
1983 | def recipients(self): | |
1984 | return [x.user for x in UserNotification.query()\ |
|
1984 | return [x.user for x in UserNotification.query()\ | |
1985 | .filter(UserNotification.notification == self)\ |
|
1985 | .filter(UserNotification.notification == self)\ | |
1986 | .order_by(UserNotification.user_id.asc()).all()] |
|
1986 | .order_by(UserNotification.user_id.asc()).all()] | |
1987 |
|
1987 | |||
1988 | @classmethod |
|
1988 | @classmethod | |
1989 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
1989 | def create(cls, created_by, subject, body, recipients, type_=None): | |
1990 | if type_ is None: |
|
1990 | if type_ is None: | |
1991 | type_ = Notification.TYPE_MESSAGE |
|
1991 | type_ = Notification.TYPE_MESSAGE | |
1992 |
|
1992 | |||
1993 | notification = cls() |
|
1993 | notification = cls() | |
1994 | notification.created_by_user = created_by |
|
1994 | notification.created_by_user = created_by | |
1995 | notification.subject = subject |
|
1995 | notification.subject = subject | |
1996 | notification.body = body |
|
1996 | notification.body = body | |
1997 | notification.type_ = type_ |
|
1997 | notification.type_ = type_ | |
1998 | notification.created_on = datetime.datetime.now() |
|
1998 | notification.created_on = datetime.datetime.now() | |
1999 |
|
1999 | |||
2000 | for u in recipients: |
|
2000 | for u in recipients: | |
2001 | assoc = UserNotification() |
|
2001 | assoc = UserNotification() | |
2002 | assoc.notification = notification |
|
2002 | assoc.notification = notification | |
2003 | u.notifications.append(assoc) |
|
2003 | u.notifications.append(assoc) | |
2004 | Session().add(notification) |
|
2004 | Session().add(notification) | |
2005 | return notification |
|
2005 | return notification | |
2006 |
|
2006 | |||
2007 | @property |
|
2007 | @property | |
2008 | def description(self): |
|
2008 | def description(self): | |
2009 | from rhodecode.model.notification import NotificationModel |
|
2009 | from rhodecode.model.notification import NotificationModel | |
2010 | return NotificationModel().make_description(self) |
|
2010 | return NotificationModel().make_description(self) | |
2011 |
|
2011 | |||
2012 |
|
2012 | |||
2013 | class UserNotification(Base, BaseModel): |
|
2013 | class UserNotification(Base, BaseModel): | |
2014 | __tablename__ = 'user_to_notification' |
|
2014 | __tablename__ = 'user_to_notification' | |
2015 | __table_args__ = ( |
|
2015 | __table_args__ = ( | |
2016 | UniqueConstraint('user_id', 'notification_id'), |
|
2016 | UniqueConstraint('user_id', 'notification_id'), | |
2017 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2017 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2018 | 'mysql_charset': 'utf8'} |
|
2018 | 'mysql_charset': 'utf8'} | |
2019 | ) |
|
2019 | ) | |
2020 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
2020 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |
2021 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
2021 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |
2022 | read = Column('read', Boolean, default=False) |
|
2022 | read = Column('read', Boolean, default=False) | |
2023 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
2023 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |
2024 |
|
2024 | |||
2025 | user = relationship('User', lazy="joined") |
|
2025 | user = relationship('User', lazy="joined") | |
2026 | notification = relationship('Notification', lazy="joined", |
|
2026 | notification = relationship('Notification', lazy="joined", | |
2027 | order_by=lambda: Notification.created_on.desc(),) |
|
2027 | order_by=lambda: Notification.created_on.desc(),) | |
2028 |
|
2028 | |||
2029 | def mark_as_read(self): |
|
2029 | def mark_as_read(self): | |
2030 | self.read = True |
|
2030 | self.read = True | |
2031 | Session().add(self) |
|
2031 | Session().add(self) | |
2032 |
|
2032 | |||
2033 |
|
2033 | |||
2034 | class DbMigrateVersion(Base, BaseModel): |
|
2034 | class DbMigrateVersion(Base, BaseModel): | |
2035 | __tablename__ = 'db_migrate_version' |
|
2035 | __tablename__ = 'db_migrate_version' | |
2036 | __table_args__ = ( |
|
2036 | __table_args__ = ( | |
2037 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2037 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2038 | 'mysql_charset': 'utf8'}, |
|
2038 | 'mysql_charset': 'utf8'}, | |
2039 | ) |
|
2039 | ) | |
2040 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
2040 | repository_id = Column('repository_id', String(250), primary_key=True) | |
2041 | repository_path = Column('repository_path', Text) |
|
2041 | repository_path = Column('repository_path', Text) | |
2042 | version = Column('version', Integer) |
|
2042 | version = Column('version', Integer) |
@@ -1,426 +1,426 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.model.user_group |
|
3 | rhodecode.model.user_group | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | repo group model for RhodeCode |
|
6 | repo group model for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Jan 25, 2011 |
|
8 | :created_on: Jan 25, 2011 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import shutil |
|
29 | import shutil | |
30 | import datetime |
|
30 | import datetime | |
31 |
|
31 | |||
32 | from rhodecode.lib.utils2 import LazyProperty |
|
32 | from rhodecode.lib.utils2 import LazyProperty | |
33 |
|
33 | |||
34 | from rhodecode.model import BaseModel |
|
34 | from rhodecode.model import BaseModel | |
35 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ |
|
35 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ | |
36 | User, Permission, UsersGroupRepoGroupToPerm, UsersGroup, Repository |
|
36 | User, Permission, UsersGroupRepoGroupToPerm, UsersGroup, Repository | |
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class ReposGroupModel(BaseModel): |
|
41 | class ReposGroupModel(BaseModel): | |
42 |
|
42 | |||
43 | cls = RepoGroup |
|
43 | cls = RepoGroup | |
44 |
|
44 | |||
45 | def __get_users_group(self, users_group): |
|
45 | def __get_users_group(self, users_group): | |
46 | return self._get_instance(UsersGroup, users_group, |
|
46 | return self._get_instance(UsersGroup, users_group, | |
47 | callback=UsersGroup.get_by_group_name) |
|
47 | callback=UsersGroup.get_by_group_name) | |
48 |
|
48 | |||
49 | def _get_repos_group(self, repos_group): |
|
49 | def _get_repos_group(self, repos_group): | |
50 | return self._get_instance(RepoGroup, repos_group, |
|
50 | return self._get_instance(RepoGroup, repos_group, | |
51 | callback=RepoGroup.get_by_group_name) |
|
51 | callback=RepoGroup.get_by_group_name) | |
52 |
|
52 | |||
53 | @LazyProperty |
|
53 | @LazyProperty | |
54 | def repos_path(self): |
|
54 | def repos_path(self): | |
55 | """ |
|
55 | """ | |
56 | Get's the repositories root path from database |
|
56 | Get's the repositories root path from database | |
57 | """ |
|
57 | """ | |
58 |
|
58 | |||
59 | q = RhodeCodeUi.get_by_key('/') |
|
59 | q = RhodeCodeUi.get_by_key('/') | |
60 | return q.ui_value |
|
60 | return q.ui_value | |
61 |
|
61 | |||
62 | def _create_default_perms(self, new_group): |
|
62 | def _create_default_perms(self, new_group): | |
63 | # create default permission |
|
63 | # create default permission | |
64 | repo_group_to_perm = UserRepoGroupToPerm() |
|
64 | repo_group_to_perm = UserRepoGroupToPerm() | |
65 | default_perm = 'group.read' |
|
65 | default_perm = 'group.read' | |
66 | for p in User.get_by_username('default').user_perms: |
|
66 | for p in User.get_by_username('default').user_perms: | |
67 | if p.permission.permission_name.startswith('group.'): |
|
67 | if p.permission.permission_name.startswith('group.'): | |
68 | default_perm = p.permission.permission_name |
|
68 | default_perm = p.permission.permission_name | |
69 | break |
|
69 | break | |
70 |
|
70 | |||
71 | repo_group_to_perm.permission_id = self.sa.query(Permission)\ |
|
71 | repo_group_to_perm.permission_id = self.sa.query(Permission)\ | |
72 | .filter(Permission.permission_name == default_perm)\ |
|
72 | .filter(Permission.permission_name == default_perm)\ | |
73 | .one().permission_id |
|
73 | .one().permission_id | |
74 |
|
74 | |||
75 | repo_group_to_perm.group = new_group |
|
75 | repo_group_to_perm.group = new_group | |
76 | repo_group_to_perm.user_id = User.get_by_username('default').user_id |
|
76 | repo_group_to_perm.user_id = User.get_by_username('default').user_id | |
77 |
|
77 | |||
78 | self.sa.add(repo_group_to_perm) |
|
78 | self.sa.add(repo_group_to_perm) | |
79 |
|
79 | |||
80 | def __create_group(self, group_name): |
|
80 | def __create_group(self, group_name): | |
81 | """ |
|
81 | """ | |
82 |
makes repositor |
|
82 | makes repository group on filesystem | |
83 |
|
83 | |||
84 | :param repo_name: |
|
84 | :param repo_name: | |
85 | :param parent_id: |
|
85 | :param parent_id: | |
86 | """ |
|
86 | """ | |
87 |
|
87 | |||
88 | create_path = os.path.join(self.repos_path, group_name) |
|
88 | create_path = os.path.join(self.repos_path, group_name) | |
89 | log.debug('creating new group in %s' % create_path) |
|
89 | log.debug('creating new group in %s' % create_path) | |
90 |
|
90 | |||
91 | if os.path.isdir(create_path): |
|
91 | if os.path.isdir(create_path): | |
92 | raise Exception('That directory already exists !') |
|
92 | raise Exception('That directory already exists !') | |
93 |
|
93 | |||
94 | os.makedirs(create_path) |
|
94 | os.makedirs(create_path) | |
95 |
|
95 | |||
96 | def __rename_group(self, old, new): |
|
96 | def __rename_group(self, old, new): | |
97 | """ |
|
97 | """ | |
98 | Renames a group on filesystem |
|
98 | Renames a group on filesystem | |
99 |
|
99 | |||
100 | :param group_name: |
|
100 | :param group_name: | |
101 | """ |
|
101 | """ | |
102 |
|
102 | |||
103 | if old == new: |
|
103 | if old == new: | |
104 | log.debug('skipping group rename') |
|
104 | log.debug('skipping group rename') | |
105 | return |
|
105 | return | |
106 |
|
106 | |||
107 | log.debug('renaming repos group from %s to %s' % (old, new)) |
|
107 | log.debug('renaming repos group from %s to %s' % (old, new)) | |
108 |
|
108 | |||
109 | old_path = os.path.join(self.repos_path, old) |
|
109 | old_path = os.path.join(self.repos_path, old) | |
110 | new_path = os.path.join(self.repos_path, new) |
|
110 | new_path = os.path.join(self.repos_path, new) | |
111 |
|
111 | |||
112 | log.debug('renaming repos paths from %s to %s' % (old_path, new_path)) |
|
112 | log.debug('renaming repos paths from %s to %s' % (old_path, new_path)) | |
113 |
|
113 | |||
114 | if os.path.isdir(new_path): |
|
114 | if os.path.isdir(new_path): | |
115 | raise Exception('Was trying to rename to already ' |
|
115 | raise Exception('Was trying to rename to already ' | |
116 | 'existing dir %s' % new_path) |
|
116 | 'existing dir %s' % new_path) | |
117 | shutil.move(old_path, new_path) |
|
117 | shutil.move(old_path, new_path) | |
118 |
|
118 | |||
119 | def __delete_group(self, group, force_delete=False): |
|
119 | def __delete_group(self, group, force_delete=False): | |
120 | """ |
|
120 | """ | |
121 | Deletes a group from a filesystem |
|
121 | Deletes a group from a filesystem | |
122 |
|
122 | |||
123 | :param group: instance of group from database |
|
123 | :param group: instance of group from database | |
124 | :param force_delete: use shutil rmtree to remove all objects |
|
124 | :param force_delete: use shutil rmtree to remove all objects | |
125 | """ |
|
125 | """ | |
126 | paths = group.full_path.split(RepoGroup.url_sep()) |
|
126 | paths = group.full_path.split(RepoGroup.url_sep()) | |
127 | paths = os.sep.join(paths) |
|
127 | paths = os.sep.join(paths) | |
128 |
|
128 | |||
129 | rm_path = os.path.join(self.repos_path, paths) |
|
129 | rm_path = os.path.join(self.repos_path, paths) | |
130 | log.info("Removing group %s" % (rm_path)) |
|
130 | log.info("Removing group %s" % (rm_path)) | |
131 | # delete only if that path really exists |
|
131 | # delete only if that path really exists | |
132 | if os.path.isdir(rm_path): |
|
132 | if os.path.isdir(rm_path): | |
133 | if force_delete: |
|
133 | if force_delete: | |
134 | shutil.rmtree(rm_path) |
|
134 | shutil.rmtree(rm_path) | |
135 | else: |
|
135 | else: | |
136 | #archive that group` |
|
136 | #archive that group` | |
137 | _now = datetime.datetime.now() |
|
137 | _now = datetime.datetime.now() | |
138 | _ms = str(_now.microsecond).rjust(6, '0') |
|
138 | _ms = str(_now.microsecond).rjust(6, '0') | |
139 | _d = 'rm__%s_GROUP_%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
139 | _d = 'rm__%s_GROUP_%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
140 | group.name) |
|
140 | group.name) | |
141 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
141 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
142 |
|
142 | |||
143 | def create(self, group_name, group_description, owner, parent=None, just_db=False): |
|
143 | def create(self, group_name, group_description, owner, parent=None, just_db=False): | |
144 | try: |
|
144 | try: | |
145 | new_repos_group = RepoGroup() |
|
145 | new_repos_group = RepoGroup() | |
146 | new_repos_group.group_description = group_description or group_name |
|
146 | new_repos_group.group_description = group_description or group_name | |
147 | new_repos_group.parent_group = self._get_repos_group(parent) |
|
147 | new_repos_group.parent_group = self._get_repos_group(parent) | |
148 | new_repos_group.group_name = new_repos_group.get_new_name(group_name) |
|
148 | new_repos_group.group_name = new_repos_group.get_new_name(group_name) | |
149 |
|
149 | |||
150 | self.sa.add(new_repos_group) |
|
150 | self.sa.add(new_repos_group) | |
151 | self._create_default_perms(new_repos_group) |
|
151 | self._create_default_perms(new_repos_group) | |
152 |
|
152 | |||
153 | #create an ADMIN permission for owner, later owner should go into |
|
153 | #create an ADMIN permission for owner, later owner should go into | |
154 | #the owner field of groups |
|
154 | #the owner field of groups | |
155 | self.grant_user_permission(repos_group=new_repos_group, |
|
155 | self.grant_user_permission(repos_group=new_repos_group, | |
156 | user=owner, perm='group.admin') |
|
156 | user=owner, perm='group.admin') | |
157 |
|
157 | |||
158 | if not just_db: |
|
158 | if not just_db: | |
159 | # we need to flush here, in order to check if database won't |
|
159 | # we need to flush here, in order to check if database won't | |
160 | # throw any exceptions, create filesystem dirs at the very end |
|
160 | # throw any exceptions, create filesystem dirs at the very end | |
161 | self.sa.flush() |
|
161 | self.sa.flush() | |
162 | self.__create_group(new_repos_group.group_name) |
|
162 | self.__create_group(new_repos_group.group_name) | |
163 |
|
163 | |||
164 | return new_repos_group |
|
164 | return new_repos_group | |
165 | except: |
|
165 | except: | |
166 | log.error(traceback.format_exc()) |
|
166 | log.error(traceback.format_exc()) | |
167 | raise |
|
167 | raise | |
168 |
|
168 | |||
169 | def _update_permissions(self, repos_group, perms_new=None, |
|
169 | def _update_permissions(self, repos_group, perms_new=None, | |
170 | perms_updates=None, recursive=False): |
|
170 | perms_updates=None, recursive=False): | |
171 | from rhodecode.model.repo import RepoModel |
|
171 | from rhodecode.model.repo import RepoModel | |
172 | if not perms_new: |
|
172 | if not perms_new: | |
173 | perms_new = [] |
|
173 | perms_new = [] | |
174 | if not perms_updates: |
|
174 | if not perms_updates: | |
175 | perms_updates = [] |
|
175 | perms_updates = [] | |
176 |
|
176 | |||
177 | def _set_perm_user(obj, user, perm): |
|
177 | def _set_perm_user(obj, user, perm): | |
178 | if isinstance(obj, RepoGroup): |
|
178 | if isinstance(obj, RepoGroup): | |
179 | ReposGroupModel().grant_user_permission( |
|
179 | ReposGroupModel().grant_user_permission( | |
180 | repos_group=obj, user=user, perm=perm |
|
180 | repos_group=obj, user=user, perm=perm | |
181 | ) |
|
181 | ) | |
182 | elif isinstance(obj, Repository): |
|
182 | elif isinstance(obj, Repository): | |
183 | #we do this ONLY IF repository is non-private |
|
183 | #we do this ONLY IF repository is non-private | |
184 | if obj.private: |
|
184 | if obj.private: | |
185 | return |
|
185 | return | |
186 |
|
186 | |||
187 | # we set group permission but we have to switch to repo |
|
187 | # we set group permission but we have to switch to repo | |
188 | # permission |
|
188 | # permission | |
189 | perm = perm.replace('group.', 'repository.') |
|
189 | perm = perm.replace('group.', 'repository.') | |
190 | RepoModel().grant_user_permission( |
|
190 | RepoModel().grant_user_permission( | |
191 | repo=obj, user=user, perm=perm |
|
191 | repo=obj, user=user, perm=perm | |
192 | ) |
|
192 | ) | |
193 |
|
193 | |||
194 | def _set_perm_group(obj, users_group, perm): |
|
194 | def _set_perm_group(obj, users_group, perm): | |
195 | if isinstance(obj, RepoGroup): |
|
195 | if isinstance(obj, RepoGroup): | |
196 | ReposGroupModel().grant_users_group_permission( |
|
196 | ReposGroupModel().grant_users_group_permission( | |
197 | repos_group=obj, group_name=users_group, perm=perm |
|
197 | repos_group=obj, group_name=users_group, perm=perm | |
198 | ) |
|
198 | ) | |
199 | elif isinstance(obj, Repository): |
|
199 | elif isinstance(obj, Repository): | |
200 | # we set group permission but we have to switch to repo |
|
200 | # we set group permission but we have to switch to repo | |
201 | # permission |
|
201 | # permission | |
202 | perm = perm.replace('group.', 'repository.') |
|
202 | perm = perm.replace('group.', 'repository.') | |
203 | RepoModel().grant_users_group_permission( |
|
203 | RepoModel().grant_users_group_permission( | |
204 | repo=obj, group_name=users_group, perm=perm |
|
204 | repo=obj, group_name=users_group, perm=perm | |
205 | ) |
|
205 | ) | |
206 | updates = [] |
|
206 | updates = [] | |
207 | log.debug('Now updating permissions for %s in recursive mode:%s' |
|
207 | log.debug('Now updating permissions for %s in recursive mode:%s' | |
208 | % (repos_group, recursive)) |
|
208 | % (repos_group, recursive)) | |
209 |
|
209 | |||
210 | for obj in repos_group.recursive_groups_and_repos(): |
|
210 | for obj in repos_group.recursive_groups_and_repos(): | |
211 | #obj is an instance of a group or repositories in that group |
|
211 | #obj is an instance of a group or repositories in that group | |
212 | if not recursive: |
|
212 | if not recursive: | |
213 | obj = repos_group |
|
213 | obj = repos_group | |
214 |
|
214 | |||
215 | # update permissions |
|
215 | # update permissions | |
216 | for member, perm, member_type in perms_updates: |
|
216 | for member, perm, member_type in perms_updates: | |
217 | ## set for user |
|
217 | ## set for user | |
218 | if member_type == 'user': |
|
218 | if member_type == 'user': | |
219 | # this updates also current one if found |
|
219 | # this updates also current one if found | |
220 | _set_perm_user(obj, user=member, perm=perm) |
|
220 | _set_perm_user(obj, user=member, perm=perm) | |
221 | ## set for user group |
|
221 | ## set for user group | |
222 | else: |
|
222 | else: | |
223 | _set_perm_group(obj, users_group=member, perm=perm) |
|
223 | _set_perm_group(obj, users_group=member, perm=perm) | |
224 | # set new permissions |
|
224 | # set new permissions | |
225 | for member, perm, member_type in perms_new: |
|
225 | for member, perm, member_type in perms_new: | |
226 | if member_type == 'user': |
|
226 | if member_type == 'user': | |
227 | _set_perm_user(obj, user=member, perm=perm) |
|
227 | _set_perm_user(obj, user=member, perm=perm) | |
228 | else: |
|
228 | else: | |
229 | _set_perm_group(obj, users_group=member, perm=perm) |
|
229 | _set_perm_group(obj, users_group=member, perm=perm) | |
230 | updates.append(obj) |
|
230 | updates.append(obj) | |
231 | #if it's not recursive call |
|
231 | #if it's not recursive call | |
232 | # break the loop and don't proceed with other changes |
|
232 | # break the loop and don't proceed with other changes | |
233 | if not recursive: |
|
233 | if not recursive: | |
234 | break |
|
234 | break | |
235 | return updates |
|
235 | return updates | |
236 |
|
236 | |||
237 | def update(self, repos_group, form_data): |
|
237 | def update(self, repos_group, form_data): | |
238 |
|
238 | |||
239 | try: |
|
239 | try: | |
240 | repos_group = self._get_repos_group(repos_group) |
|
240 | repos_group = self._get_repos_group(repos_group) | |
241 | recursive = form_data['recursive'] |
|
241 | recursive = form_data['recursive'] | |
242 | # iterate over all members(if in recursive mode) of this groups and |
|
242 | # iterate over all members(if in recursive mode) of this groups and | |
243 | # set the permissions ! |
|
243 | # set the permissions ! | |
244 | # this can be potentially heavy operation |
|
244 | # this can be potentially heavy operation | |
245 | self._update_permissions(repos_group, form_data['perms_new'], |
|
245 | self._update_permissions(repos_group, form_data['perms_new'], | |
246 | form_data['perms_updates'], recursive) |
|
246 | form_data['perms_updates'], recursive) | |
247 |
|
247 | |||
248 | old_path = repos_group.full_path |
|
248 | old_path = repos_group.full_path | |
249 |
|
249 | |||
250 | # change properties |
|
250 | # change properties | |
251 | repos_group.group_description = form_data['group_description'] |
|
251 | repos_group.group_description = form_data['group_description'] | |
252 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) |
|
252 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) | |
253 | repos_group.group_parent_id = form_data['group_parent_id'] |
|
253 | repos_group.group_parent_id = form_data['group_parent_id'] | |
254 | repos_group.enable_locking = form_data['enable_locking'] |
|
254 | repos_group.enable_locking = form_data['enable_locking'] | |
255 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) |
|
255 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) | |
256 | new_path = repos_group.full_path |
|
256 | new_path = repos_group.full_path | |
257 |
|
257 | |||
258 | self.sa.add(repos_group) |
|
258 | self.sa.add(repos_group) | |
259 |
|
259 | |||
260 | # iterate over all members of this groups and set the locking ! |
|
260 | # iterate over all members of this groups and set the locking ! | |
261 | # this can be potentially heavy operation |
|
261 | # this can be potentially heavy operation | |
262 | for obj in repos_group.recursive_groups_and_repos(): |
|
262 | for obj in repos_group.recursive_groups_and_repos(): | |
263 | #set the value from it's parent |
|
263 | #set the value from it's parent | |
264 | obj.enable_locking = repos_group.enable_locking |
|
264 | obj.enable_locking = repos_group.enable_locking | |
265 | self.sa.add(obj) |
|
265 | self.sa.add(obj) | |
266 |
|
266 | |||
267 | # we need to get all repositories from this new group and |
|
267 | # we need to get all repositories from this new group and | |
268 | # rename them accordingly to new group path |
|
268 | # rename them accordingly to new group path | |
269 | for r in repos_group.repositories: |
|
269 | for r in repos_group.repositories: | |
270 | r.repo_name = r.get_new_name(r.just_name) |
|
270 | r.repo_name = r.get_new_name(r.just_name) | |
271 | self.sa.add(r) |
|
271 | self.sa.add(r) | |
272 |
|
272 | |||
273 | self.__rename_group(old_path, new_path) |
|
273 | self.__rename_group(old_path, new_path) | |
274 |
|
274 | |||
275 | return repos_group |
|
275 | return repos_group | |
276 | except: |
|
276 | except: | |
277 | log.error(traceback.format_exc()) |
|
277 | log.error(traceback.format_exc()) | |
278 | raise |
|
278 | raise | |
279 |
|
279 | |||
280 | def delete(self, repos_group, force_delete=False): |
|
280 | def delete(self, repos_group, force_delete=False): | |
281 | repos_group = self._get_repos_group(repos_group) |
|
281 | repos_group = self._get_repos_group(repos_group) | |
282 | try: |
|
282 | try: | |
283 | self.sa.delete(repos_group) |
|
283 | self.sa.delete(repos_group) | |
284 | self.__delete_group(repos_group, force_delete) |
|
284 | self.__delete_group(repos_group, force_delete) | |
285 | except: |
|
285 | except: | |
286 | log.error('Error removing repos_group %s' % repos_group) |
|
286 | log.error('Error removing repos_group %s' % repos_group) | |
287 | raise |
|
287 | raise | |
288 |
|
288 | |||
289 | def delete_permission(self, repos_group, obj, obj_type, recursive): |
|
289 | def delete_permission(self, repos_group, obj, obj_type, recursive): | |
290 | """ |
|
290 | """ | |
291 | Revokes permission for repos_group for given obj(user or users_group), |
|
291 | Revokes permission for repos_group for given obj(user or users_group), | |
292 | obj_type can be user or user group |
|
292 | obj_type can be user or user group | |
293 |
|
293 | |||
294 | :param repos_group: |
|
294 | :param repos_group: | |
295 | :param obj: user or user group id |
|
295 | :param obj: user or user group id | |
296 | :param obj_type: user or user group type |
|
296 | :param obj_type: user or user group type | |
297 | :param recursive: recurse to all children of group |
|
297 | :param recursive: recurse to all children of group | |
298 | """ |
|
298 | """ | |
299 | from rhodecode.model.repo import RepoModel |
|
299 | from rhodecode.model.repo import RepoModel | |
300 | repos_group = self._get_repos_group(repos_group) |
|
300 | repos_group = self._get_repos_group(repos_group) | |
301 |
|
301 | |||
302 | for el in repos_group.recursive_groups_and_repos(): |
|
302 | for el in repos_group.recursive_groups_and_repos(): | |
303 | if not recursive: |
|
303 | if not recursive: | |
304 | # if we don't recurse set the permission on only the top level |
|
304 | # if we don't recurse set the permission on only the top level | |
305 | # object |
|
305 | # object | |
306 | el = repos_group |
|
306 | el = repos_group | |
307 |
|
307 | |||
308 | if isinstance(el, RepoGroup): |
|
308 | if isinstance(el, RepoGroup): | |
309 | if obj_type == 'user': |
|
309 | if obj_type == 'user': | |
310 | ReposGroupModel().revoke_user_permission(el, user=obj) |
|
310 | ReposGroupModel().revoke_user_permission(el, user=obj) | |
311 | elif obj_type == 'users_group': |
|
311 | elif obj_type == 'users_group': | |
312 | ReposGroupModel().revoke_users_group_permission(el, group_name=obj) |
|
312 | ReposGroupModel().revoke_users_group_permission(el, group_name=obj) | |
313 | else: |
|
313 | else: | |
314 | raise Exception('undefined object type %s' % obj_type) |
|
314 | raise Exception('undefined object type %s' % obj_type) | |
315 | elif isinstance(el, Repository): |
|
315 | elif isinstance(el, Repository): | |
316 | if obj_type == 'user': |
|
316 | if obj_type == 'user': | |
317 | RepoModel().revoke_user_permission(el, user=obj) |
|
317 | RepoModel().revoke_user_permission(el, user=obj) | |
318 | elif obj_type == 'users_group': |
|
318 | elif obj_type == 'users_group': | |
319 | RepoModel().revoke_users_group_permission(el, group_name=obj) |
|
319 | RepoModel().revoke_users_group_permission(el, group_name=obj) | |
320 | else: |
|
320 | else: | |
321 | raise Exception('undefined object type %s' % obj_type) |
|
321 | raise Exception('undefined object type %s' % obj_type) | |
322 |
|
322 | |||
323 | #if it's not recursive call |
|
323 | #if it's not recursive call | |
324 | # break the loop and don't proceed with other changes |
|
324 | # break the loop and don't proceed with other changes | |
325 | if not recursive: |
|
325 | if not recursive: | |
326 | break |
|
326 | break | |
327 |
|
327 | |||
328 | def grant_user_permission(self, repos_group, user, perm): |
|
328 | def grant_user_permission(self, repos_group, user, perm): | |
329 | """ |
|
329 | """ | |
330 |
Grant permission for user on given repositor |
|
330 | Grant permission for user on given repository group, or update | |
331 | existing one if found |
|
331 | existing one if found | |
332 |
|
332 | |||
333 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
333 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
334 | or repositories_group name |
|
334 | or repositories_group name | |
335 | :param user: Instance of User, user_id or username |
|
335 | :param user: Instance of User, user_id or username | |
336 | :param perm: Instance of Permission, or permission_name |
|
336 | :param perm: Instance of Permission, or permission_name | |
337 | """ |
|
337 | """ | |
338 |
|
338 | |||
339 | repos_group = self._get_repos_group(repos_group) |
|
339 | repos_group = self._get_repos_group(repos_group) | |
340 | user = self._get_user(user) |
|
340 | user = self._get_user(user) | |
341 | permission = self._get_perm(perm) |
|
341 | permission = self._get_perm(perm) | |
342 |
|
342 | |||
343 | # check if we have that permission already |
|
343 | # check if we have that permission already | |
344 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
344 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
345 | .filter(UserRepoGroupToPerm.user == user)\ |
|
345 | .filter(UserRepoGroupToPerm.user == user)\ | |
346 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
346 | .filter(UserRepoGroupToPerm.group == repos_group)\ | |
347 | .scalar() |
|
347 | .scalar() | |
348 | if obj is None: |
|
348 | if obj is None: | |
349 | # create new ! |
|
349 | # create new ! | |
350 | obj = UserRepoGroupToPerm() |
|
350 | obj = UserRepoGroupToPerm() | |
351 | obj.group = repos_group |
|
351 | obj.group = repos_group | |
352 | obj.user = user |
|
352 | obj.user = user | |
353 | obj.permission = permission |
|
353 | obj.permission = permission | |
354 | self.sa.add(obj) |
|
354 | self.sa.add(obj) | |
355 | log.debug('Granted perm %s to %s on %s' % (perm, user, repos_group)) |
|
355 | log.debug('Granted perm %s to %s on %s' % (perm, user, repos_group)) | |
356 |
|
356 | |||
357 | def revoke_user_permission(self, repos_group, user): |
|
357 | def revoke_user_permission(self, repos_group, user): | |
358 | """ |
|
358 | """ | |
359 |
Revoke permission for user on given repositor |
|
359 | Revoke permission for user on given repository group | |
360 |
|
360 | |||
361 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
361 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
362 | or repositories_group name |
|
362 | or repositories_group name | |
363 | :param user: Instance of User, user_id or username |
|
363 | :param user: Instance of User, user_id or username | |
364 | """ |
|
364 | """ | |
365 |
|
365 | |||
366 | repos_group = self._get_repos_group(repos_group) |
|
366 | repos_group = self._get_repos_group(repos_group) | |
367 | user = self._get_user(user) |
|
367 | user = self._get_user(user) | |
368 |
|
368 | |||
369 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
369 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
370 | .filter(UserRepoGroupToPerm.user == user)\ |
|
370 | .filter(UserRepoGroupToPerm.user == user)\ | |
371 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
371 | .filter(UserRepoGroupToPerm.group == repos_group)\ | |
372 | .scalar() |
|
372 | .scalar() | |
373 | if obj: |
|
373 | if obj: | |
374 | self.sa.delete(obj) |
|
374 | self.sa.delete(obj) | |
375 | log.debug('Revoked perm on %s on %s' % (repos_group, user)) |
|
375 | log.debug('Revoked perm on %s on %s' % (repos_group, user)) | |
376 |
|
376 | |||
377 | def grant_users_group_permission(self, repos_group, group_name, perm): |
|
377 | def grant_users_group_permission(self, repos_group, group_name, perm): | |
378 | """ |
|
378 | """ | |
379 |
Grant permission for user group on given repositor |
|
379 | Grant permission for user group on given repository group, or update | |
380 | existing one if found |
|
380 | existing one if found | |
381 |
|
381 | |||
382 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
382 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
383 | or repositories_group name |
|
383 | or repositories_group name | |
384 | :param group_name: Instance of UserGroup, users_group_id, |
|
384 | :param group_name: Instance of UserGroup, users_group_id, | |
385 | or user group name |
|
385 | or user group name | |
386 | :param perm: Instance of Permission, or permission_name |
|
386 | :param perm: Instance of Permission, or permission_name | |
387 | """ |
|
387 | """ | |
388 | repos_group = self._get_repos_group(repos_group) |
|
388 | repos_group = self._get_repos_group(repos_group) | |
389 | group_name = self.__get_users_group(group_name) |
|
389 | group_name = self.__get_users_group(group_name) | |
390 | permission = self._get_perm(perm) |
|
390 | permission = self._get_perm(perm) | |
391 |
|
391 | |||
392 | # check if we have that permission already |
|
392 | # check if we have that permission already | |
393 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
393 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ | |
394 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
394 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ | |
395 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
395 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ | |
396 | .scalar() |
|
396 | .scalar() | |
397 |
|
397 | |||
398 | if obj is None: |
|
398 | if obj is None: | |
399 | # create new |
|
399 | # create new | |
400 | obj = UsersGroupRepoGroupToPerm() |
|
400 | obj = UsersGroupRepoGroupToPerm() | |
401 |
|
401 | |||
402 | obj.group = repos_group |
|
402 | obj.group = repos_group | |
403 | obj.users_group = group_name |
|
403 | obj.users_group = group_name | |
404 | obj.permission = permission |
|
404 | obj.permission = permission | |
405 | self.sa.add(obj) |
|
405 | self.sa.add(obj) | |
406 | log.debug('Granted perm %s to %s on %s' % (perm, group_name, repos_group)) |
|
406 | log.debug('Granted perm %s to %s on %s' % (perm, group_name, repos_group)) | |
407 |
|
407 | |||
408 | def revoke_users_group_permission(self, repos_group, group_name): |
|
408 | def revoke_users_group_permission(self, repos_group, group_name): | |
409 | """ |
|
409 | """ | |
410 |
Revoke permission for user group on given repositor |
|
410 | Revoke permission for user group on given repository group | |
411 |
|
411 | |||
412 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
412 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
413 | or repositories_group name |
|
413 | or repositories_group name | |
414 | :param group_name: Instance of UserGroup, users_group_id, |
|
414 | :param group_name: Instance of UserGroup, users_group_id, | |
415 | or user group name |
|
415 | or user group name | |
416 | """ |
|
416 | """ | |
417 | repos_group = self._get_repos_group(repos_group) |
|
417 | repos_group = self._get_repos_group(repos_group) | |
418 | group_name = self.__get_users_group(group_name) |
|
418 | group_name = self.__get_users_group(group_name) | |
419 |
|
419 | |||
420 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
420 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ | |
421 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
421 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ | |
422 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
422 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ | |
423 | .scalar() |
|
423 | .scalar() | |
424 | if obj: |
|
424 | if obj: | |
425 | self.sa.delete(obj) |
|
425 | self.sa.delete(obj) | |
426 | log.debug('Revoked perm to %s on %s' % (repos_group, group_name)) |
|
426 | log.debug('Revoked perm to %s on %s' % (repos_group, group_name)) |
@@ -1,809 +1,809 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Set of generic validators |
|
2 | Set of generic validators | |
3 | """ |
|
3 | """ | |
4 | import os |
|
4 | import os | |
5 | import re |
|
5 | import re | |
6 | import formencode |
|
6 | import formencode | |
7 | import logging |
|
7 | import logging | |
8 | from collections import defaultdict |
|
8 | from collections import defaultdict | |
9 | from pylons.i18n.translation import _ |
|
9 | from pylons.i18n.translation import _ | |
10 | from webhelpers.pylonslib.secure_form import authentication_token |
|
10 | from webhelpers.pylonslib.secure_form import authentication_token | |
11 |
|
11 | |||
12 | from formencode.validators import ( |
|
12 | from formencode.validators import ( | |
13 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, |
|
13 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, | |
14 | NotEmpty, IPAddress, CIDR |
|
14 | NotEmpty, IPAddress, CIDR | |
15 | ) |
|
15 | ) | |
16 | from rhodecode.lib.compat import OrderedSet |
|
16 | from rhodecode.lib.compat import OrderedSet | |
17 | from rhodecode.lib import ipaddr |
|
17 | from rhodecode.lib import ipaddr | |
18 | from rhodecode.lib.utils import repo_name_slug |
|
18 | from rhodecode.lib.utils import repo_name_slug | |
19 | from rhodecode.model.db import RepoGroup, Repository, UsersGroup, User,\ |
|
19 | from rhodecode.model.db import RepoGroup, Repository, UsersGroup, User,\ | |
20 | ChangesetStatus |
|
20 | ChangesetStatus | |
21 | from rhodecode.lib.exceptions import LdapImportError |
|
21 | from rhodecode.lib.exceptions import LdapImportError | |
22 | from rhodecode.config.routing import ADMIN_PREFIX |
|
22 | from rhodecode.config.routing import ADMIN_PREFIX | |
23 | from rhodecode.lib.auth import HasReposGroupPermissionAny, HasPermissionAny |
|
23 | from rhodecode.lib.auth import HasReposGroupPermissionAny, HasPermissionAny | |
24 |
|
24 | |||
25 | # silence warnings and pylint |
|
25 | # silence warnings and pylint | |
26 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \ |
|
26 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \ | |
27 | NotEmpty, IPAddress, CIDR |
|
27 | NotEmpty, IPAddress, CIDR | |
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class UniqueList(formencode.FancyValidator): |
|
32 | class UniqueList(formencode.FancyValidator): | |
33 | """ |
|
33 | """ | |
34 | Unique List ! |
|
34 | Unique List ! | |
35 | """ |
|
35 | """ | |
36 | messages = dict( |
|
36 | messages = dict( | |
37 | empty=_('Value cannot be an empty list'), |
|
37 | empty=_('Value cannot be an empty list'), | |
38 | missing_value=_('Value cannot be an empty list'), |
|
38 | missing_value=_('Value cannot be an empty list'), | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | def _to_python(self, value, state): |
|
41 | def _to_python(self, value, state): | |
42 | if isinstance(value, list): |
|
42 | if isinstance(value, list): | |
43 | return value |
|
43 | return value | |
44 | elif isinstance(value, set): |
|
44 | elif isinstance(value, set): | |
45 | return list(value) |
|
45 | return list(value) | |
46 | elif isinstance(value, tuple): |
|
46 | elif isinstance(value, tuple): | |
47 | return list(value) |
|
47 | return list(value) | |
48 | elif value is None: |
|
48 | elif value is None: | |
49 | return [] |
|
49 | return [] | |
50 | else: |
|
50 | else: | |
51 | return [value] |
|
51 | return [value] | |
52 |
|
52 | |||
53 | def empty_value(self, value): |
|
53 | def empty_value(self, value): | |
54 | return [] |
|
54 | return [] | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class StateObj(object): |
|
57 | class StateObj(object): | |
58 | """ |
|
58 | """ | |
59 | this is needed to translate the messages using _() in validators |
|
59 | this is needed to translate the messages using _() in validators | |
60 | """ |
|
60 | """ | |
61 | _ = staticmethod(_) |
|
61 | _ = staticmethod(_) | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def M(self, key, state=None, **kwargs): |
|
64 | def M(self, key, state=None, **kwargs): | |
65 | """ |
|
65 | """ | |
66 | returns string from self.message based on given key, |
|
66 | returns string from self.message based on given key, | |
67 | passed kw params are used to substitute %(named)s params inside |
|
67 | passed kw params are used to substitute %(named)s params inside | |
68 | translated strings |
|
68 | translated strings | |
69 |
|
69 | |||
70 | :param msg: |
|
70 | :param msg: | |
71 | :param state: |
|
71 | :param state: | |
72 | """ |
|
72 | """ | |
73 | if state is None: |
|
73 | if state is None: | |
74 | state = StateObj() |
|
74 | state = StateObj() | |
75 | else: |
|
75 | else: | |
76 | state._ = staticmethod(_) |
|
76 | state._ = staticmethod(_) | |
77 | #inject validator into state object |
|
77 | #inject validator into state object | |
78 | return self.message(key, state, **kwargs) |
|
78 | return self.message(key, state, **kwargs) | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | def ValidUsername(edit=False, old_data={}): |
|
81 | def ValidUsername(edit=False, old_data={}): | |
82 | class _validator(formencode.validators.FancyValidator): |
|
82 | class _validator(formencode.validators.FancyValidator): | |
83 | messages = { |
|
83 | messages = { | |
84 | 'username_exists': _(u'Username "%(username)s" already exists'), |
|
84 | 'username_exists': _(u'Username "%(username)s" already exists'), | |
85 | 'system_invalid_username': |
|
85 | 'system_invalid_username': | |
86 | _(u'Username "%(username)s" is forbidden'), |
|
86 | _(u'Username "%(username)s" is forbidden'), | |
87 | 'invalid_username': |
|
87 | 'invalid_username': | |
88 | _(u'Username may only contain alphanumeric characters ' |
|
88 | _(u'Username may only contain alphanumeric characters ' | |
89 | 'underscores, periods or dashes and must begin with ' |
|
89 | 'underscores, periods or dashes and must begin with ' | |
90 | 'alphanumeric character') |
|
90 | 'alphanumeric character') | |
91 | } |
|
91 | } | |
92 |
|
92 | |||
93 | def validate_python(self, value, state): |
|
93 | def validate_python(self, value, state): | |
94 | if value in ['default', 'new_user']: |
|
94 | if value in ['default', 'new_user']: | |
95 | msg = M(self, 'system_invalid_username', state, username=value) |
|
95 | msg = M(self, 'system_invalid_username', state, username=value) | |
96 | raise formencode.Invalid(msg, value, state) |
|
96 | raise formencode.Invalid(msg, value, state) | |
97 | #check if user is unique |
|
97 | #check if user is unique | |
98 | old_un = None |
|
98 | old_un = None | |
99 | if edit: |
|
99 | if edit: | |
100 | old_un = User.get(old_data.get('user_id')).username |
|
100 | old_un = User.get(old_data.get('user_id')).username | |
101 |
|
101 | |||
102 | if old_un != value or not edit: |
|
102 | if old_un != value or not edit: | |
103 | if User.get_by_username(value, case_insensitive=True): |
|
103 | if User.get_by_username(value, case_insensitive=True): | |
104 | msg = M(self, 'username_exists', state, username=value) |
|
104 | msg = M(self, 'username_exists', state, username=value) | |
105 | raise formencode.Invalid(msg, value, state) |
|
105 | raise formencode.Invalid(msg, value, state) | |
106 |
|
106 | |||
107 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]*$', value) is None: |
|
107 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]*$', value) is None: | |
108 | msg = M(self, 'invalid_username', state) |
|
108 | msg = M(self, 'invalid_username', state) | |
109 | raise formencode.Invalid(msg, value, state) |
|
109 | raise formencode.Invalid(msg, value, state) | |
110 | return _validator |
|
110 | return _validator | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def ValidRepoUser(): |
|
113 | def ValidRepoUser(): | |
114 | class _validator(formencode.validators.FancyValidator): |
|
114 | class _validator(formencode.validators.FancyValidator): | |
115 | messages = { |
|
115 | messages = { | |
116 | 'invalid_username': _(u'Username %(username)s is not valid') |
|
116 | 'invalid_username': _(u'Username %(username)s is not valid') | |
117 | } |
|
117 | } | |
118 |
|
118 | |||
119 | def validate_python(self, value, state): |
|
119 | def validate_python(self, value, state): | |
120 | try: |
|
120 | try: | |
121 | User.query().filter(User.active == True)\ |
|
121 | User.query().filter(User.active == True)\ | |
122 | .filter(User.username == value).one() |
|
122 | .filter(User.username == value).one() | |
123 | except Exception: |
|
123 | except Exception: | |
124 | msg = M(self, 'invalid_username', state, username=value) |
|
124 | msg = M(self, 'invalid_username', state, username=value) | |
125 | raise formencode.Invalid(msg, value, state, |
|
125 | raise formencode.Invalid(msg, value, state, | |
126 | error_dict=dict(username=msg) |
|
126 | error_dict=dict(username=msg) | |
127 | ) |
|
127 | ) | |
128 |
|
128 | |||
129 | return _validator |
|
129 | return _validator | |
130 |
|
130 | |||
131 |
|
131 | |||
132 | def ValidUsersGroup(edit=False, old_data={}): |
|
132 | def ValidUsersGroup(edit=False, old_data={}): | |
133 | class _validator(formencode.validators.FancyValidator): |
|
133 | class _validator(formencode.validators.FancyValidator): | |
134 | messages = { |
|
134 | messages = { | |
135 | 'invalid_group': _(u'Invalid user group name'), |
|
135 | 'invalid_group': _(u'Invalid user group name'), | |
136 |
'group_exist': _(u'User |
|
136 | 'group_exist': _(u'User group "%(usersgroup)s" already exists'), | |
137 | 'invalid_usersgroup_name': |
|
137 | 'invalid_usersgroup_name': | |
138 | _(u'user group name may only contain alphanumeric ' |
|
138 | _(u'user group name may only contain alphanumeric ' | |
139 | 'characters underscores, periods or dashes and must begin ' |
|
139 | 'characters underscores, periods or dashes and must begin ' | |
140 | 'with alphanumeric character') |
|
140 | 'with alphanumeric character') | |
141 | } |
|
141 | } | |
142 |
|
142 | |||
143 | def validate_python(self, value, state): |
|
143 | def validate_python(self, value, state): | |
144 | if value in ['default']: |
|
144 | if value in ['default']: | |
145 | msg = M(self, 'invalid_group', state) |
|
145 | msg = M(self, 'invalid_group', state) | |
146 | raise formencode.Invalid(msg, value, state, |
|
146 | raise formencode.Invalid(msg, value, state, | |
147 | error_dict=dict(users_group_name=msg) |
|
147 | error_dict=dict(users_group_name=msg) | |
148 | ) |
|
148 | ) | |
149 | #check if group is unique |
|
149 | #check if group is unique | |
150 | old_ugname = None |
|
150 | old_ugname = None | |
151 | if edit: |
|
151 | if edit: | |
152 | old_id = old_data.get('users_group_id') |
|
152 | old_id = old_data.get('users_group_id') | |
153 | old_ugname = UsersGroup.get(old_id).users_group_name |
|
153 | old_ugname = UsersGroup.get(old_id).users_group_name | |
154 |
|
154 | |||
155 | if old_ugname != value or not edit: |
|
155 | if old_ugname != value or not edit: | |
156 | is_existing_group = UsersGroup.get_by_group_name(value, |
|
156 | is_existing_group = UsersGroup.get_by_group_name(value, | |
157 | case_insensitive=True) |
|
157 | case_insensitive=True) | |
158 | if is_existing_group: |
|
158 | if is_existing_group: | |
159 | msg = M(self, 'group_exist', state, usersgroup=value) |
|
159 | msg = M(self, 'group_exist', state, usersgroup=value) | |
160 | raise formencode.Invalid(msg, value, state, |
|
160 | raise formencode.Invalid(msg, value, state, | |
161 | error_dict=dict(users_group_name=msg) |
|
161 | error_dict=dict(users_group_name=msg) | |
162 | ) |
|
162 | ) | |
163 |
|
163 | |||
164 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
164 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: | |
165 | msg = M(self, 'invalid_usersgroup_name', state) |
|
165 | msg = M(self, 'invalid_usersgroup_name', state) | |
166 | raise formencode.Invalid(msg, value, state, |
|
166 | raise formencode.Invalid(msg, value, state, | |
167 | error_dict=dict(users_group_name=msg) |
|
167 | error_dict=dict(users_group_name=msg) | |
168 | ) |
|
168 | ) | |
169 |
|
169 | |||
170 | return _validator |
|
170 | return _validator | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | def ValidReposGroup(edit=False, old_data={}): |
|
173 | def ValidReposGroup(edit=False, old_data={}): | |
174 | class _validator(formencode.validators.FancyValidator): |
|
174 | class _validator(formencode.validators.FancyValidator): | |
175 | messages = { |
|
175 | messages = { | |
176 | 'group_parent_id': _(u'Cannot assign this group as parent'), |
|
176 | 'group_parent_id': _(u'Cannot assign this group as parent'), | |
177 | 'group_exists': _(u'Group "%(group_name)s" already exists'), |
|
177 | 'group_exists': _(u'Group "%(group_name)s" already exists'), | |
178 | 'repo_exists': |
|
178 | 'repo_exists': | |
179 | _(u'Repository with name "%(group_name)s" already exists') |
|
179 | _(u'Repository with name "%(group_name)s" already exists') | |
180 | } |
|
180 | } | |
181 |
|
181 | |||
182 | def validate_python(self, value, state): |
|
182 | def validate_python(self, value, state): | |
183 | # TODO WRITE VALIDATIONS |
|
183 | # TODO WRITE VALIDATIONS | |
184 | group_name = value.get('group_name') |
|
184 | group_name = value.get('group_name') | |
185 | group_parent_id = value.get('group_parent_id') |
|
185 | group_parent_id = value.get('group_parent_id') | |
186 |
|
186 | |||
187 | # slugify repo group just in case :) |
|
187 | # slugify repo group just in case :) | |
188 | slug = repo_name_slug(group_name) |
|
188 | slug = repo_name_slug(group_name) | |
189 |
|
189 | |||
190 | # check for parent of self |
|
190 | # check for parent of self | |
191 | parent_of_self = lambda: ( |
|
191 | parent_of_self = lambda: ( | |
192 | old_data['group_id'] == int(group_parent_id) |
|
192 | old_data['group_id'] == int(group_parent_id) | |
193 | if group_parent_id else False |
|
193 | if group_parent_id else False | |
194 | ) |
|
194 | ) | |
195 | if edit and parent_of_self(): |
|
195 | if edit and parent_of_self(): | |
196 | msg = M(self, 'group_parent_id', state) |
|
196 | msg = M(self, 'group_parent_id', state) | |
197 | raise formencode.Invalid(msg, value, state, |
|
197 | raise formencode.Invalid(msg, value, state, | |
198 | error_dict=dict(group_parent_id=msg) |
|
198 | error_dict=dict(group_parent_id=msg) | |
199 | ) |
|
199 | ) | |
200 |
|
200 | |||
201 | old_gname = None |
|
201 | old_gname = None | |
202 | if edit: |
|
202 | if edit: | |
203 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
203 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name | |
204 |
|
204 | |||
205 | if old_gname != group_name or not edit: |
|
205 | if old_gname != group_name or not edit: | |
206 |
|
206 | |||
207 | # check group |
|
207 | # check group | |
208 | gr = RepoGroup.query()\ |
|
208 | gr = RepoGroup.query()\ | |
209 | .filter(RepoGroup.group_name == slug)\ |
|
209 | .filter(RepoGroup.group_name == slug)\ | |
210 | .filter(RepoGroup.group_parent_id == group_parent_id)\ |
|
210 | .filter(RepoGroup.group_parent_id == group_parent_id)\ | |
211 | .scalar() |
|
211 | .scalar() | |
212 |
|
212 | |||
213 | if gr: |
|
213 | if gr: | |
214 | msg = M(self, 'group_exists', state, group_name=slug) |
|
214 | msg = M(self, 'group_exists', state, group_name=slug) | |
215 | raise formencode.Invalid(msg, value, state, |
|
215 | raise formencode.Invalid(msg, value, state, | |
216 | error_dict=dict(group_name=msg) |
|
216 | error_dict=dict(group_name=msg) | |
217 | ) |
|
217 | ) | |
218 |
|
218 | |||
219 | # check for same repo |
|
219 | # check for same repo | |
220 | repo = Repository.query()\ |
|
220 | repo = Repository.query()\ | |
221 | .filter(Repository.repo_name == slug)\ |
|
221 | .filter(Repository.repo_name == slug)\ | |
222 | .scalar() |
|
222 | .scalar() | |
223 |
|
223 | |||
224 | if repo: |
|
224 | if repo: | |
225 | msg = M(self, 'repo_exists', state, group_name=slug) |
|
225 | msg = M(self, 'repo_exists', state, group_name=slug) | |
226 | raise formencode.Invalid(msg, value, state, |
|
226 | raise formencode.Invalid(msg, value, state, | |
227 | error_dict=dict(group_name=msg) |
|
227 | error_dict=dict(group_name=msg) | |
228 | ) |
|
228 | ) | |
229 |
|
229 | |||
230 | return _validator |
|
230 | return _validator | |
231 |
|
231 | |||
232 |
|
232 | |||
233 | def ValidPassword(): |
|
233 | def ValidPassword(): | |
234 | class _validator(formencode.validators.FancyValidator): |
|
234 | class _validator(formencode.validators.FancyValidator): | |
235 | messages = { |
|
235 | messages = { | |
236 | 'invalid_password': |
|
236 | 'invalid_password': | |
237 | _(u'Invalid characters (non-ascii) in password') |
|
237 | _(u'Invalid characters (non-ascii) in password') | |
238 | } |
|
238 | } | |
239 |
|
239 | |||
240 | def validate_python(self, value, state): |
|
240 | def validate_python(self, value, state): | |
241 | try: |
|
241 | try: | |
242 | (value or '').decode('ascii') |
|
242 | (value or '').decode('ascii') | |
243 | except UnicodeError: |
|
243 | except UnicodeError: | |
244 | msg = M(self, 'invalid_password', state) |
|
244 | msg = M(self, 'invalid_password', state) | |
245 | raise formencode.Invalid(msg, value, state,) |
|
245 | raise formencode.Invalid(msg, value, state,) | |
246 | return _validator |
|
246 | return _validator | |
247 |
|
247 | |||
248 |
|
248 | |||
249 | def ValidPasswordsMatch(): |
|
249 | def ValidPasswordsMatch(): | |
250 | class _validator(formencode.validators.FancyValidator): |
|
250 | class _validator(formencode.validators.FancyValidator): | |
251 | messages = { |
|
251 | messages = { | |
252 | 'password_mismatch': _(u'Passwords do not match'), |
|
252 | 'password_mismatch': _(u'Passwords do not match'), | |
253 | } |
|
253 | } | |
254 |
|
254 | |||
255 | def validate_python(self, value, state): |
|
255 | def validate_python(self, value, state): | |
256 |
|
256 | |||
257 | pass_val = value.get('password') or value.get('new_password') |
|
257 | pass_val = value.get('password') or value.get('new_password') | |
258 | if pass_val != value['password_confirmation']: |
|
258 | if pass_val != value['password_confirmation']: | |
259 | msg = M(self, 'password_mismatch', state) |
|
259 | msg = M(self, 'password_mismatch', state) | |
260 | raise formencode.Invalid(msg, value, state, |
|
260 | raise formencode.Invalid(msg, value, state, | |
261 | error_dict=dict(password_confirmation=msg) |
|
261 | error_dict=dict(password_confirmation=msg) | |
262 | ) |
|
262 | ) | |
263 | return _validator |
|
263 | return _validator | |
264 |
|
264 | |||
265 |
|
265 | |||
266 | def ValidAuth(): |
|
266 | def ValidAuth(): | |
267 | class _validator(formencode.validators.FancyValidator): |
|
267 | class _validator(formencode.validators.FancyValidator): | |
268 | messages = { |
|
268 | messages = { | |
269 | 'invalid_password': _(u'invalid password'), |
|
269 | 'invalid_password': _(u'invalid password'), | |
270 | 'invalid_username': _(u'invalid user name'), |
|
270 | 'invalid_username': _(u'invalid user name'), | |
271 | 'disabled_account': _(u'Your account is disabled') |
|
271 | 'disabled_account': _(u'Your account is disabled') | |
272 | } |
|
272 | } | |
273 |
|
273 | |||
274 | def validate_python(self, value, state): |
|
274 | def validate_python(self, value, state): | |
275 | from rhodecode.lib.auth import authenticate |
|
275 | from rhodecode.lib.auth import authenticate | |
276 |
|
276 | |||
277 | password = value['password'] |
|
277 | password = value['password'] | |
278 | username = value['username'] |
|
278 | username = value['username'] | |
279 |
|
279 | |||
280 | if not authenticate(username, password): |
|
280 | if not authenticate(username, password): | |
281 | user = User.get_by_username(username) |
|
281 | user = User.get_by_username(username) | |
282 | if user and user.active is False: |
|
282 | if user and user.active is False: | |
283 | log.warning('user %s is disabled' % username) |
|
283 | log.warning('user %s is disabled' % username) | |
284 | msg = M(self, 'disabled_account', state) |
|
284 | msg = M(self, 'disabled_account', state) | |
285 | raise formencode.Invalid(msg, value, state, |
|
285 | raise formencode.Invalid(msg, value, state, | |
286 | error_dict=dict(username=msg) |
|
286 | error_dict=dict(username=msg) | |
287 | ) |
|
287 | ) | |
288 | else: |
|
288 | else: | |
289 | log.warning('user %s failed to authenticate' % username) |
|
289 | log.warning('user %s failed to authenticate' % username) | |
290 | msg = M(self, 'invalid_username', state) |
|
290 | msg = M(self, 'invalid_username', state) | |
291 | msg2 = M(self, 'invalid_password', state) |
|
291 | msg2 = M(self, 'invalid_password', state) | |
292 | raise formencode.Invalid(msg, value, state, |
|
292 | raise formencode.Invalid(msg, value, state, | |
293 | error_dict=dict(username=msg, password=msg2) |
|
293 | error_dict=dict(username=msg, password=msg2) | |
294 | ) |
|
294 | ) | |
295 | return _validator |
|
295 | return _validator | |
296 |
|
296 | |||
297 |
|
297 | |||
298 | def ValidAuthToken(): |
|
298 | def ValidAuthToken(): | |
299 | class _validator(formencode.validators.FancyValidator): |
|
299 | class _validator(formencode.validators.FancyValidator): | |
300 | messages = { |
|
300 | messages = { | |
301 | 'invalid_token': _(u'Token mismatch') |
|
301 | 'invalid_token': _(u'Token mismatch') | |
302 | } |
|
302 | } | |
303 |
|
303 | |||
304 | def validate_python(self, value, state): |
|
304 | def validate_python(self, value, state): | |
305 | if value != authentication_token(): |
|
305 | if value != authentication_token(): | |
306 | msg = M(self, 'invalid_token', state) |
|
306 | msg = M(self, 'invalid_token', state) | |
307 | raise formencode.Invalid(msg, value, state) |
|
307 | raise formencode.Invalid(msg, value, state) | |
308 | return _validator |
|
308 | return _validator | |
309 |
|
309 | |||
310 |
|
310 | |||
311 | def ValidRepoName(edit=False, old_data={}): |
|
311 | def ValidRepoName(edit=False, old_data={}): | |
312 | class _validator(formencode.validators.FancyValidator): |
|
312 | class _validator(formencode.validators.FancyValidator): | |
313 | messages = { |
|
313 | messages = { | |
314 | 'invalid_repo_name': |
|
314 | 'invalid_repo_name': | |
315 | _(u'Repository name %(repo)s is disallowed'), |
|
315 | _(u'Repository name %(repo)s is disallowed'), | |
316 | 'repository_exists': |
|
316 | 'repository_exists': | |
317 | _(u'Repository named %(repo)s already exists'), |
|
317 | _(u'Repository named %(repo)s already exists'), | |
318 | 'repository_in_group_exists': _(u'Repository "%(repo)s" already ' |
|
318 | 'repository_in_group_exists': _(u'Repository "%(repo)s" already ' | |
319 | 'exists in group "%(group)s"'), |
|
319 | 'exists in group "%(group)s"'), | |
320 |
'same_group_exists': _(u'Repositor |
|
320 | 'same_group_exists': _(u'Repository group with name "%(repo)s" ' | |
321 | 'already exists') |
|
321 | 'already exists') | |
322 | } |
|
322 | } | |
323 |
|
323 | |||
324 | def _to_python(self, value, state): |
|
324 | def _to_python(self, value, state): | |
325 | repo_name = repo_name_slug(value.get('repo_name', '')) |
|
325 | repo_name = repo_name_slug(value.get('repo_name', '')) | |
326 | repo_group = value.get('repo_group') |
|
326 | repo_group = value.get('repo_group') | |
327 | if repo_group: |
|
327 | if repo_group: | |
328 | gr = RepoGroup.get(repo_group) |
|
328 | gr = RepoGroup.get(repo_group) | |
329 | group_path = gr.full_path |
|
329 | group_path = gr.full_path | |
330 | group_name = gr.group_name |
|
330 | group_name = gr.group_name | |
331 | # value needs to be aware of group name in order to check |
|
331 | # value needs to be aware of group name in order to check | |
332 | # db key This is an actual just the name to store in the |
|
332 | # db key This is an actual just the name to store in the | |
333 | # database |
|
333 | # database | |
334 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
334 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name | |
335 | else: |
|
335 | else: | |
336 | group_name = group_path = '' |
|
336 | group_name = group_path = '' | |
337 | repo_name_full = repo_name |
|
337 | repo_name_full = repo_name | |
338 |
|
338 | |||
339 | value['repo_name'] = repo_name |
|
339 | value['repo_name'] = repo_name | |
340 | value['repo_name_full'] = repo_name_full |
|
340 | value['repo_name_full'] = repo_name_full | |
341 | value['group_path'] = group_path |
|
341 | value['group_path'] = group_path | |
342 | value['group_name'] = group_name |
|
342 | value['group_name'] = group_name | |
343 | return value |
|
343 | return value | |
344 |
|
344 | |||
345 | def validate_python(self, value, state): |
|
345 | def validate_python(self, value, state): | |
346 |
|
346 | |||
347 | repo_name = value.get('repo_name') |
|
347 | repo_name = value.get('repo_name') | |
348 | repo_name_full = value.get('repo_name_full') |
|
348 | repo_name_full = value.get('repo_name_full') | |
349 | group_path = value.get('group_path') |
|
349 | group_path = value.get('group_path') | |
350 | group_name = value.get('group_name') |
|
350 | group_name = value.get('group_name') | |
351 |
|
351 | |||
352 | if repo_name in [ADMIN_PREFIX, '']: |
|
352 | if repo_name in [ADMIN_PREFIX, '']: | |
353 | msg = M(self, 'invalid_repo_name', state, repo=repo_name) |
|
353 | msg = M(self, 'invalid_repo_name', state, repo=repo_name) | |
354 | raise formencode.Invalid(msg, value, state, |
|
354 | raise formencode.Invalid(msg, value, state, | |
355 | error_dict=dict(repo_name=msg) |
|
355 | error_dict=dict(repo_name=msg) | |
356 | ) |
|
356 | ) | |
357 |
|
357 | |||
358 | rename = old_data.get('repo_name') != repo_name_full |
|
358 | rename = old_data.get('repo_name') != repo_name_full | |
359 | create = not edit |
|
359 | create = not edit | |
360 | if rename or create: |
|
360 | if rename or create: | |
361 |
|
361 | |||
362 | if group_path != '': |
|
362 | if group_path != '': | |
363 | if Repository.get_by_repo_name(repo_name_full): |
|
363 | if Repository.get_by_repo_name(repo_name_full): | |
364 | msg = M(self, 'repository_in_group_exists', state, |
|
364 | msg = M(self, 'repository_in_group_exists', state, | |
365 | repo=repo_name, group=group_name) |
|
365 | repo=repo_name, group=group_name) | |
366 | raise formencode.Invalid(msg, value, state, |
|
366 | raise formencode.Invalid(msg, value, state, | |
367 | error_dict=dict(repo_name=msg) |
|
367 | error_dict=dict(repo_name=msg) | |
368 | ) |
|
368 | ) | |
369 | elif RepoGroup.get_by_group_name(repo_name_full): |
|
369 | elif RepoGroup.get_by_group_name(repo_name_full): | |
370 | msg = M(self, 'same_group_exists', state, |
|
370 | msg = M(self, 'same_group_exists', state, | |
371 | repo=repo_name) |
|
371 | repo=repo_name) | |
372 | raise formencode.Invalid(msg, value, state, |
|
372 | raise formencode.Invalid(msg, value, state, | |
373 | error_dict=dict(repo_name=msg) |
|
373 | error_dict=dict(repo_name=msg) | |
374 | ) |
|
374 | ) | |
375 |
|
375 | |||
376 | elif Repository.get_by_repo_name(repo_name_full): |
|
376 | elif Repository.get_by_repo_name(repo_name_full): | |
377 | msg = M(self, 'repository_exists', state, |
|
377 | msg = M(self, 'repository_exists', state, | |
378 | repo=repo_name) |
|
378 | repo=repo_name) | |
379 | raise formencode.Invalid(msg, value, state, |
|
379 | raise formencode.Invalid(msg, value, state, | |
380 | error_dict=dict(repo_name=msg) |
|
380 | error_dict=dict(repo_name=msg) | |
381 | ) |
|
381 | ) | |
382 | return value |
|
382 | return value | |
383 | return _validator |
|
383 | return _validator | |
384 |
|
384 | |||
385 |
|
385 | |||
386 | def ValidForkName(*args, **kwargs): |
|
386 | def ValidForkName(*args, **kwargs): | |
387 | return ValidRepoName(*args, **kwargs) |
|
387 | return ValidRepoName(*args, **kwargs) | |
388 |
|
388 | |||
389 |
|
389 | |||
390 | def SlugifyName(): |
|
390 | def SlugifyName(): | |
391 | class _validator(formencode.validators.FancyValidator): |
|
391 | class _validator(formencode.validators.FancyValidator): | |
392 |
|
392 | |||
393 | def _to_python(self, value, state): |
|
393 | def _to_python(self, value, state): | |
394 | return repo_name_slug(value) |
|
394 | return repo_name_slug(value) | |
395 |
|
395 | |||
396 | def validate_python(self, value, state): |
|
396 | def validate_python(self, value, state): | |
397 | pass |
|
397 | pass | |
398 |
|
398 | |||
399 | return _validator |
|
399 | return _validator | |
400 |
|
400 | |||
401 |
|
401 | |||
402 | def ValidCloneUri(): |
|
402 | def ValidCloneUri(): | |
403 | from rhodecode.lib.utils import make_ui |
|
403 | from rhodecode.lib.utils import make_ui | |
404 |
|
404 | |||
405 | def url_handler(repo_type, url, ui=None): |
|
405 | def url_handler(repo_type, url, ui=None): | |
406 | if repo_type == 'hg': |
|
406 | if repo_type == 'hg': | |
407 | from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository |
|
407 | from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository | |
408 | from mercurial.httppeer import httppeer |
|
408 | from mercurial.httppeer import httppeer | |
409 | if url.startswith('http'): |
|
409 | if url.startswith('http'): | |
410 | ## initially check if it's at least the proper URL |
|
410 | ## initially check if it's at least the proper URL | |
411 | ## or does it pass basic auth |
|
411 | ## or does it pass basic auth | |
412 | MercurialRepository._check_url(url) |
|
412 | MercurialRepository._check_url(url) | |
413 | httppeer(ui, url)._capabilities() |
|
413 | httppeer(ui, url)._capabilities() | |
414 | elif url.startswith('svn+http'): |
|
414 | elif url.startswith('svn+http'): | |
415 | from hgsubversion.svnrepo import svnremoterepo |
|
415 | from hgsubversion.svnrepo import svnremoterepo | |
416 | svnremoterepo(ui, url).capabilities |
|
416 | svnremoterepo(ui, url).capabilities | |
417 | elif url.startswith('git+http'): |
|
417 | elif url.startswith('git+http'): | |
418 | raise NotImplementedError() |
|
418 | raise NotImplementedError() | |
419 |
|
419 | |||
420 | elif repo_type == 'git': |
|
420 | elif repo_type == 'git': | |
421 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
421 | from rhodecode.lib.vcs.backends.git.repository import GitRepository | |
422 | if url.startswith('http'): |
|
422 | if url.startswith('http'): | |
423 | ## initially check if it's at least the proper URL |
|
423 | ## initially check if it's at least the proper URL | |
424 | ## or does it pass basic auth |
|
424 | ## or does it pass basic auth | |
425 | GitRepository._check_url(url) |
|
425 | GitRepository._check_url(url) | |
426 | elif url.startswith('svn+http'): |
|
426 | elif url.startswith('svn+http'): | |
427 | raise NotImplementedError() |
|
427 | raise NotImplementedError() | |
428 | elif url.startswith('hg+http'): |
|
428 | elif url.startswith('hg+http'): | |
429 | raise NotImplementedError() |
|
429 | raise NotImplementedError() | |
430 |
|
430 | |||
431 | class _validator(formencode.validators.FancyValidator): |
|
431 | class _validator(formencode.validators.FancyValidator): | |
432 | messages = { |
|
432 | messages = { | |
433 | 'clone_uri': _(u'invalid clone url'), |
|
433 | 'clone_uri': _(u'invalid clone url'), | |
434 | 'invalid_clone_uri': _(u'Invalid clone url, provide a ' |
|
434 | 'invalid_clone_uri': _(u'Invalid clone url, provide a ' | |
435 | 'valid clone http(s)/svn+http(s) url') |
|
435 | 'valid clone http(s)/svn+http(s) url') | |
436 | } |
|
436 | } | |
437 |
|
437 | |||
438 | def validate_python(self, value, state): |
|
438 | def validate_python(self, value, state): | |
439 | repo_type = value.get('repo_type') |
|
439 | repo_type = value.get('repo_type') | |
440 | url = value.get('clone_uri') |
|
440 | url = value.get('clone_uri') | |
441 |
|
441 | |||
442 | if not url: |
|
442 | if not url: | |
443 | pass |
|
443 | pass | |
444 | else: |
|
444 | else: | |
445 | try: |
|
445 | try: | |
446 | url_handler(repo_type, url, make_ui('db', clear_session=False)) |
|
446 | url_handler(repo_type, url, make_ui('db', clear_session=False)) | |
447 | except Exception: |
|
447 | except Exception: | |
448 | log.exception('Url validation failed') |
|
448 | log.exception('Url validation failed') | |
449 | msg = M(self, 'clone_uri') |
|
449 | msg = M(self, 'clone_uri') | |
450 | raise formencode.Invalid(msg, value, state, |
|
450 | raise formencode.Invalid(msg, value, state, | |
451 | error_dict=dict(clone_uri=msg) |
|
451 | error_dict=dict(clone_uri=msg) | |
452 | ) |
|
452 | ) | |
453 | return _validator |
|
453 | return _validator | |
454 |
|
454 | |||
455 |
|
455 | |||
456 | def ValidForkType(old_data={}): |
|
456 | def ValidForkType(old_data={}): | |
457 | class _validator(formencode.validators.FancyValidator): |
|
457 | class _validator(formencode.validators.FancyValidator): | |
458 | messages = { |
|
458 | messages = { | |
459 | 'invalid_fork_type': _(u'Fork have to be the same type as parent') |
|
459 | 'invalid_fork_type': _(u'Fork have to be the same type as parent') | |
460 | } |
|
460 | } | |
461 |
|
461 | |||
462 | def validate_python(self, value, state): |
|
462 | def validate_python(self, value, state): | |
463 | if old_data['repo_type'] != value: |
|
463 | if old_data['repo_type'] != value: | |
464 | msg = M(self, 'invalid_fork_type', state) |
|
464 | msg = M(self, 'invalid_fork_type', state) | |
465 | raise formencode.Invalid(msg, value, state, |
|
465 | raise formencode.Invalid(msg, value, state, | |
466 | error_dict=dict(repo_type=msg) |
|
466 | error_dict=dict(repo_type=msg) | |
467 | ) |
|
467 | ) | |
468 | return _validator |
|
468 | return _validator | |
469 |
|
469 | |||
470 |
|
470 | |||
471 | def CanWriteGroup(): |
|
471 | def CanWriteGroup(): | |
472 | class _validator(formencode.validators.FancyValidator): |
|
472 | class _validator(formencode.validators.FancyValidator): | |
473 | messages = { |
|
473 | messages = { | |
474 | 'permission_denied': _(u"You don't have permissions " |
|
474 | 'permission_denied': _(u"You don't have permissions " | |
475 | "to create repository in this group"), |
|
475 | "to create repository in this group"), | |
476 | 'permission_denied_root': _(u"no permission to create repository " |
|
476 | 'permission_denied_root': _(u"no permission to create repository " | |
477 | "in root location") |
|
477 | "in root location") | |
478 | } |
|
478 | } | |
479 |
|
479 | |||
480 | def _to_python(self, value, state): |
|
480 | def _to_python(self, value, state): | |
481 | #root location |
|
481 | #root location | |
482 | if value in [-1, "-1"]: |
|
482 | if value in [-1, "-1"]: | |
483 | return None |
|
483 | return None | |
484 | return value |
|
484 | return value | |
485 |
|
485 | |||
486 | def validate_python(self, value, state): |
|
486 | def validate_python(self, value, state): | |
487 | gr = RepoGroup.get(value) |
|
487 | gr = RepoGroup.get(value) | |
488 | gr_name = gr.group_name if gr else None # None means ROOT location |
|
488 | gr_name = gr.group_name if gr else None # None means ROOT location | |
489 | val = HasReposGroupPermissionAny('group.write', 'group.admin') |
|
489 | val = HasReposGroupPermissionAny('group.write', 'group.admin') | |
490 | can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') |
|
490 | can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') | |
491 | forbidden = not val(gr_name, 'can write into group validator') |
|
491 | forbidden = not val(gr_name, 'can write into group validator') | |
492 | #parent group need to be existing |
|
492 | #parent group need to be existing | |
493 | if gr and forbidden: |
|
493 | if gr and forbidden: | |
494 | msg = M(self, 'permission_denied', state) |
|
494 | msg = M(self, 'permission_denied', state) | |
495 | raise formencode.Invalid(msg, value, state, |
|
495 | raise formencode.Invalid(msg, value, state, | |
496 | error_dict=dict(repo_type=msg) |
|
496 | error_dict=dict(repo_type=msg) | |
497 | ) |
|
497 | ) | |
498 | ## check if we can write to root location ! |
|
498 | ## check if we can write to root location ! | |
499 | elif gr is None and can_create_repos() is False: |
|
499 | elif gr is None and can_create_repos() is False: | |
500 | msg = M(self, 'permission_denied_root', state) |
|
500 | msg = M(self, 'permission_denied_root', state) | |
501 | raise formencode.Invalid(msg, value, state, |
|
501 | raise formencode.Invalid(msg, value, state, | |
502 | error_dict=dict(repo_type=msg) |
|
502 | error_dict=dict(repo_type=msg) | |
503 | ) |
|
503 | ) | |
504 |
|
504 | |||
505 | return _validator |
|
505 | return _validator | |
506 |
|
506 | |||
507 |
|
507 | |||
508 | def CanCreateGroup(can_create_in_root=False): |
|
508 | def CanCreateGroup(can_create_in_root=False): | |
509 | class _validator(formencode.validators.FancyValidator): |
|
509 | class _validator(formencode.validators.FancyValidator): | |
510 | messages = { |
|
510 | messages = { | |
511 | 'permission_denied': _(u"You don't have permissions " |
|
511 | 'permission_denied': _(u"You don't have permissions " | |
512 | "to create a group in this location") |
|
512 | "to create a group in this location") | |
513 | } |
|
513 | } | |
514 |
|
514 | |||
515 | def to_python(self, value, state): |
|
515 | def to_python(self, value, state): | |
516 | #root location |
|
516 | #root location | |
517 | if value in [-1, "-1"]: |
|
517 | if value in [-1, "-1"]: | |
518 | return None |
|
518 | return None | |
519 | return value |
|
519 | return value | |
520 |
|
520 | |||
521 | def validate_python(self, value, state): |
|
521 | def validate_python(self, value, state): | |
522 | gr = RepoGroup.get(value) |
|
522 | gr = RepoGroup.get(value) | |
523 | gr_name = gr.group_name if gr else None # None means ROOT location |
|
523 | gr_name = gr.group_name if gr else None # None means ROOT location | |
524 |
|
524 | |||
525 | if can_create_in_root and gr is None: |
|
525 | if can_create_in_root and gr is None: | |
526 | #we can create in root, we're fine no validations required |
|
526 | #we can create in root, we're fine no validations required | |
527 | return |
|
527 | return | |
528 |
|
528 | |||
529 | forbidden_in_root = gr is None and can_create_in_root is False |
|
529 | forbidden_in_root = gr is None and can_create_in_root is False | |
530 | val = HasReposGroupPermissionAny('group.admin') |
|
530 | val = HasReposGroupPermissionAny('group.admin') | |
531 | forbidden = not val(gr_name, 'can create group validator') |
|
531 | forbidden = not val(gr_name, 'can create group validator') | |
532 | if forbidden_in_root or forbidden: |
|
532 | if forbidden_in_root or forbidden: | |
533 | msg = M(self, 'permission_denied', state) |
|
533 | msg = M(self, 'permission_denied', state) | |
534 | raise formencode.Invalid(msg, value, state, |
|
534 | raise formencode.Invalid(msg, value, state, | |
535 | error_dict=dict(group_parent_id=msg) |
|
535 | error_dict=dict(group_parent_id=msg) | |
536 | ) |
|
536 | ) | |
537 |
|
537 | |||
538 | return _validator |
|
538 | return _validator | |
539 |
|
539 | |||
540 |
|
540 | |||
541 | def ValidPerms(type_='repo'): |
|
541 | def ValidPerms(type_='repo'): | |
542 | if type_ == 'group': |
|
542 | if type_ == 'group': | |
543 | EMPTY_PERM = 'group.none' |
|
543 | EMPTY_PERM = 'group.none' | |
544 | elif type_ == 'repo': |
|
544 | elif type_ == 'repo': | |
545 | EMPTY_PERM = 'repository.none' |
|
545 | EMPTY_PERM = 'repository.none' | |
546 |
|
546 | |||
547 | class _validator(formencode.validators.FancyValidator): |
|
547 | class _validator(formencode.validators.FancyValidator): | |
548 | messages = { |
|
548 | messages = { | |
549 | 'perm_new_member_name': |
|
549 | 'perm_new_member_name': | |
550 | _(u'This username or user group name is not valid') |
|
550 | _(u'This username or user group name is not valid') | |
551 | } |
|
551 | } | |
552 |
|
552 | |||
553 | def to_python(self, value, state): |
|
553 | def to_python(self, value, state): | |
554 | perms_update = OrderedSet() |
|
554 | perms_update = OrderedSet() | |
555 | perms_new = OrderedSet() |
|
555 | perms_new = OrderedSet() | |
556 | # build a list of permission to update and new permission to create |
|
556 | # build a list of permission to update and new permission to create | |
557 |
|
557 | |||
558 | #CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using |
|
558 | #CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using | |
559 | new_perms_group = defaultdict(dict) |
|
559 | new_perms_group = defaultdict(dict) | |
560 | for k, v in value.copy().iteritems(): |
|
560 | for k, v in value.copy().iteritems(): | |
561 | if k.startswith('perm_new_member'): |
|
561 | if k.startswith('perm_new_member'): | |
562 | del value[k] |
|
562 | del value[k] | |
563 | _type, part = k.split('perm_new_member_') |
|
563 | _type, part = k.split('perm_new_member_') | |
564 | args = part.split('_') |
|
564 | args = part.split('_') | |
565 | if len(args) == 1: |
|
565 | if len(args) == 1: | |
566 | new_perms_group[args[0]]['perm'] = v |
|
566 | new_perms_group[args[0]]['perm'] = v | |
567 | elif len(args) == 2: |
|
567 | elif len(args) == 2: | |
568 | _key, pos = args |
|
568 | _key, pos = args | |
569 | new_perms_group[pos][_key] = v |
|
569 | new_perms_group[pos][_key] = v | |
570 |
|
570 | |||
571 | # fill new permissions in order of how they were added |
|
571 | # fill new permissions in order of how they were added | |
572 | for k in sorted(map(int, new_perms_group.keys())): |
|
572 | for k in sorted(map(int, new_perms_group.keys())): | |
573 | perm_dict = new_perms_group[str(k)] |
|
573 | perm_dict = new_perms_group[str(k)] | |
574 | new_member = perm_dict.get('name') |
|
574 | new_member = perm_dict.get('name') | |
575 | new_perm = perm_dict.get('perm') |
|
575 | new_perm = perm_dict.get('perm') | |
576 | new_type = perm_dict.get('type') |
|
576 | new_type = perm_dict.get('type') | |
577 | if new_member and new_perm and new_type: |
|
577 | if new_member and new_perm and new_type: | |
578 | perms_new.add((new_member, new_perm, new_type)) |
|
578 | perms_new.add((new_member, new_perm, new_type)) | |
579 |
|
579 | |||
580 | for k, v in value.iteritems(): |
|
580 | for k, v in value.iteritems(): | |
581 | if k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
581 | if k.startswith('u_perm_') or k.startswith('g_perm_'): | |
582 | member = k[7:] |
|
582 | member = k[7:] | |
583 | t = {'u': 'user', |
|
583 | t = {'u': 'user', | |
584 | 'g': 'users_group' |
|
584 | 'g': 'users_group' | |
585 | }[k[0]] |
|
585 | }[k[0]] | |
586 | if member == 'default': |
|
586 | if member == 'default': | |
587 | if value.get('repo_private'): |
|
587 | if value.get('repo_private'): | |
588 | # set none for default when updating to |
|
588 | # set none for default when updating to | |
589 | # private repo |
|
589 | # private repo | |
590 | v = EMPTY_PERM |
|
590 | v = EMPTY_PERM | |
591 | perms_update.add((member, v, t)) |
|
591 | perms_update.add((member, v, t)) | |
592 | #always set NONE when private flag is set |
|
592 | #always set NONE when private flag is set | |
593 | if value.get('repo_private'): |
|
593 | if value.get('repo_private'): | |
594 | perms_update.add(('default', EMPTY_PERM, 'user')) |
|
594 | perms_update.add(('default', EMPTY_PERM, 'user')) | |
595 |
|
595 | |||
596 | value['perms_updates'] = list(perms_update) |
|
596 | value['perms_updates'] = list(perms_update) | |
597 | value['perms_new'] = list(perms_new) |
|
597 | value['perms_new'] = list(perms_new) | |
598 |
|
598 | |||
599 | # update permissions |
|
599 | # update permissions | |
600 | for k, v, t in perms_new: |
|
600 | for k, v, t in perms_new: | |
601 | try: |
|
601 | try: | |
602 | if t is 'user': |
|
602 | if t is 'user': | |
603 | self.user_db = User.query()\ |
|
603 | self.user_db = User.query()\ | |
604 | .filter(User.active == True)\ |
|
604 | .filter(User.active == True)\ | |
605 | .filter(User.username == k).one() |
|
605 | .filter(User.username == k).one() | |
606 | if t is 'users_group': |
|
606 | if t is 'users_group': | |
607 | self.user_db = UsersGroup.query()\ |
|
607 | self.user_db = UsersGroup.query()\ | |
608 | .filter(UsersGroup.users_group_active == True)\ |
|
608 | .filter(UsersGroup.users_group_active == True)\ | |
609 | .filter(UsersGroup.users_group_name == k).one() |
|
609 | .filter(UsersGroup.users_group_name == k).one() | |
610 |
|
610 | |||
611 | except Exception: |
|
611 | except Exception: | |
612 | log.exception('Updated permission failed') |
|
612 | log.exception('Updated permission failed') | |
613 | msg = M(self, 'perm_new_member_type', state) |
|
613 | msg = M(self, 'perm_new_member_type', state) | |
614 | raise formencode.Invalid(msg, value, state, |
|
614 | raise formencode.Invalid(msg, value, state, | |
615 | error_dict=dict(perm_new_member_name=msg) |
|
615 | error_dict=dict(perm_new_member_name=msg) | |
616 | ) |
|
616 | ) | |
617 | return value |
|
617 | return value | |
618 | return _validator |
|
618 | return _validator | |
619 |
|
619 | |||
620 |
|
620 | |||
621 | def ValidSettings(): |
|
621 | def ValidSettings(): | |
622 | class _validator(formencode.validators.FancyValidator): |
|
622 | class _validator(formencode.validators.FancyValidator): | |
623 | def _to_python(self, value, state): |
|
623 | def _to_python(self, value, state): | |
624 | # settings form for users that are not admin |
|
624 | # settings form for users that are not admin | |
625 | # can't edit certain parameters, it's extra backup if they mangle |
|
625 | # can't edit certain parameters, it's extra backup if they mangle | |
626 | # with forms |
|
626 | # with forms | |
627 |
|
627 | |||
628 | forbidden_params = [ |
|
628 | forbidden_params = [ | |
629 | 'user', 'repo_type', 'repo_enable_locking', |
|
629 | 'user', 'repo_type', 'repo_enable_locking', | |
630 | 'repo_enable_downloads', 'repo_enable_statistics' |
|
630 | 'repo_enable_downloads', 'repo_enable_statistics' | |
631 | ] |
|
631 | ] | |
632 |
|
632 | |||
633 | for param in forbidden_params: |
|
633 | for param in forbidden_params: | |
634 | if param in value: |
|
634 | if param in value: | |
635 | del value[param] |
|
635 | del value[param] | |
636 | return value |
|
636 | return value | |
637 |
|
637 | |||
638 | def validate_python(self, value, state): |
|
638 | def validate_python(self, value, state): | |
639 | pass |
|
639 | pass | |
640 | return _validator |
|
640 | return _validator | |
641 |
|
641 | |||
642 |
|
642 | |||
643 | def ValidPath(): |
|
643 | def ValidPath(): | |
644 | class _validator(formencode.validators.FancyValidator): |
|
644 | class _validator(formencode.validators.FancyValidator): | |
645 | messages = { |
|
645 | messages = { | |
646 | 'invalid_path': _(u'This is not a valid path') |
|
646 | 'invalid_path': _(u'This is not a valid path') | |
647 | } |
|
647 | } | |
648 |
|
648 | |||
649 | def validate_python(self, value, state): |
|
649 | def validate_python(self, value, state): | |
650 | if not os.path.isdir(value): |
|
650 | if not os.path.isdir(value): | |
651 | msg = M(self, 'invalid_path', state) |
|
651 | msg = M(self, 'invalid_path', state) | |
652 | raise formencode.Invalid(msg, value, state, |
|
652 | raise formencode.Invalid(msg, value, state, | |
653 | error_dict=dict(paths_root_path=msg) |
|
653 | error_dict=dict(paths_root_path=msg) | |
654 | ) |
|
654 | ) | |
655 | return _validator |
|
655 | return _validator | |
656 |
|
656 | |||
657 |
|
657 | |||
658 | def UniqSystemEmail(old_data={}): |
|
658 | def UniqSystemEmail(old_data={}): | |
659 | class _validator(formencode.validators.FancyValidator): |
|
659 | class _validator(formencode.validators.FancyValidator): | |
660 | messages = { |
|
660 | messages = { | |
661 | 'email_taken': _(u'This e-mail address is already taken') |
|
661 | 'email_taken': _(u'This e-mail address is already taken') | |
662 | } |
|
662 | } | |
663 |
|
663 | |||
664 | def _to_python(self, value, state): |
|
664 | def _to_python(self, value, state): | |
665 | return value.lower() |
|
665 | return value.lower() | |
666 |
|
666 | |||
667 | def validate_python(self, value, state): |
|
667 | def validate_python(self, value, state): | |
668 | if (old_data.get('email') or '').lower() != value: |
|
668 | if (old_data.get('email') or '').lower() != value: | |
669 | user = User.get_by_email(value, case_insensitive=True) |
|
669 | user = User.get_by_email(value, case_insensitive=True) | |
670 | if user: |
|
670 | if user: | |
671 | msg = M(self, 'email_taken', state) |
|
671 | msg = M(self, 'email_taken', state) | |
672 | raise formencode.Invalid(msg, value, state, |
|
672 | raise formencode.Invalid(msg, value, state, | |
673 | error_dict=dict(email=msg) |
|
673 | error_dict=dict(email=msg) | |
674 | ) |
|
674 | ) | |
675 | return _validator |
|
675 | return _validator | |
676 |
|
676 | |||
677 |
|
677 | |||
678 | def ValidSystemEmail(): |
|
678 | def ValidSystemEmail(): | |
679 | class _validator(formencode.validators.FancyValidator): |
|
679 | class _validator(formencode.validators.FancyValidator): | |
680 | messages = { |
|
680 | messages = { | |
681 | 'non_existing_email': _(u'e-mail "%(email)s" does not exist.') |
|
681 | 'non_existing_email': _(u'e-mail "%(email)s" does not exist.') | |
682 | } |
|
682 | } | |
683 |
|
683 | |||
684 | def _to_python(self, value, state): |
|
684 | def _to_python(self, value, state): | |
685 | return value.lower() |
|
685 | return value.lower() | |
686 |
|
686 | |||
687 | def validate_python(self, value, state): |
|
687 | def validate_python(self, value, state): | |
688 | user = User.get_by_email(value, case_insensitive=True) |
|
688 | user = User.get_by_email(value, case_insensitive=True) | |
689 | if user is None: |
|
689 | if user is None: | |
690 | msg = M(self, 'non_existing_email', state, email=value) |
|
690 | msg = M(self, 'non_existing_email', state, email=value) | |
691 | raise formencode.Invalid(msg, value, state, |
|
691 | raise formencode.Invalid(msg, value, state, | |
692 | error_dict=dict(email=msg) |
|
692 | error_dict=dict(email=msg) | |
693 | ) |
|
693 | ) | |
694 |
|
694 | |||
695 | return _validator |
|
695 | return _validator | |
696 |
|
696 | |||
697 |
|
697 | |||
698 | def LdapLibValidator(): |
|
698 | def LdapLibValidator(): | |
699 | class _validator(formencode.validators.FancyValidator): |
|
699 | class _validator(formencode.validators.FancyValidator): | |
700 | messages = { |
|
700 | messages = { | |
701 |
|
701 | |||
702 | } |
|
702 | } | |
703 |
|
703 | |||
704 | def validate_python(self, value, state): |
|
704 | def validate_python(self, value, state): | |
705 | try: |
|
705 | try: | |
706 | import ldap |
|
706 | import ldap | |
707 | ldap # pyflakes silence ! |
|
707 | ldap # pyflakes silence ! | |
708 | except ImportError: |
|
708 | except ImportError: | |
709 | raise LdapImportError() |
|
709 | raise LdapImportError() | |
710 |
|
710 | |||
711 | return _validator |
|
711 | return _validator | |
712 |
|
712 | |||
713 |
|
713 | |||
714 | def AttrLoginValidator(): |
|
714 | def AttrLoginValidator(): | |
715 | class _validator(formencode.validators.FancyValidator): |
|
715 | class _validator(formencode.validators.FancyValidator): | |
716 | messages = { |
|
716 | messages = { | |
717 | 'invalid_cn': |
|
717 | 'invalid_cn': | |
718 | _(u'The LDAP Login attribute of the CN must be specified - ' |
|
718 | _(u'The LDAP Login attribute of the CN must be specified - ' | |
719 | 'this is the name of the attribute that is equivalent ' |
|
719 | 'this is the name of the attribute that is equivalent ' | |
720 | 'to "username"') |
|
720 | 'to "username"') | |
721 | } |
|
721 | } | |
722 |
|
722 | |||
723 | def validate_python(self, value, state): |
|
723 | def validate_python(self, value, state): | |
724 | if not value or not isinstance(value, (str, unicode)): |
|
724 | if not value or not isinstance(value, (str, unicode)): | |
725 | msg = M(self, 'invalid_cn', state) |
|
725 | msg = M(self, 'invalid_cn', state) | |
726 | raise formencode.Invalid(msg, value, state, |
|
726 | raise formencode.Invalid(msg, value, state, | |
727 | error_dict=dict(ldap_attr_login=msg) |
|
727 | error_dict=dict(ldap_attr_login=msg) | |
728 | ) |
|
728 | ) | |
729 |
|
729 | |||
730 | return _validator |
|
730 | return _validator | |
731 |
|
731 | |||
732 |
|
732 | |||
733 | def NotReviewedRevisions(repo_id): |
|
733 | def NotReviewedRevisions(repo_id): | |
734 | class _validator(formencode.validators.FancyValidator): |
|
734 | class _validator(formencode.validators.FancyValidator): | |
735 | messages = { |
|
735 | messages = { | |
736 | 'rev_already_reviewed': |
|
736 | 'rev_already_reviewed': | |
737 | _(u'Revisions %(revs)s are already part of pull request ' |
|
737 | _(u'Revisions %(revs)s are already part of pull request ' | |
738 | 'or have set status') |
|
738 | 'or have set status') | |
739 | } |
|
739 | } | |
740 |
|
740 | |||
741 | def validate_python(self, value, state): |
|
741 | def validate_python(self, value, state): | |
742 | # check revisions if they are not reviewed, or a part of another |
|
742 | # check revisions if they are not reviewed, or a part of another | |
743 | # pull request |
|
743 | # pull request | |
744 | statuses = ChangesetStatus.query()\ |
|
744 | statuses = ChangesetStatus.query()\ | |
745 | .filter(ChangesetStatus.revision.in_(value))\ |
|
745 | .filter(ChangesetStatus.revision.in_(value))\ | |
746 | .filter(ChangesetStatus.repo_id == repo_id)\ |
|
746 | .filter(ChangesetStatus.repo_id == repo_id)\ | |
747 | .all() |
|
747 | .all() | |
748 |
|
748 | |||
749 | errors = [] |
|
749 | errors = [] | |
750 | for cs in statuses: |
|
750 | for cs in statuses: | |
751 | if cs.pull_request_id: |
|
751 | if cs.pull_request_id: | |
752 | errors.append(['pull_req', cs.revision[:12]]) |
|
752 | errors.append(['pull_req', cs.revision[:12]]) | |
753 | elif cs.status: |
|
753 | elif cs.status: | |
754 | errors.append(['status', cs.revision[:12]]) |
|
754 | errors.append(['status', cs.revision[:12]]) | |
755 |
|
755 | |||
756 | if errors: |
|
756 | if errors: | |
757 | revs = ','.join([x[1] for x in errors]) |
|
757 | revs = ','.join([x[1] for x in errors]) | |
758 | msg = M(self, 'rev_already_reviewed', state, revs=revs) |
|
758 | msg = M(self, 'rev_already_reviewed', state, revs=revs) | |
759 | raise formencode.Invalid(msg, value, state, |
|
759 | raise formencode.Invalid(msg, value, state, | |
760 | error_dict=dict(revisions=revs) |
|
760 | error_dict=dict(revisions=revs) | |
761 | ) |
|
761 | ) | |
762 |
|
762 | |||
763 | return _validator |
|
763 | return _validator | |
764 |
|
764 | |||
765 |
|
765 | |||
766 | def ValidIp(): |
|
766 | def ValidIp(): | |
767 | class _validator(CIDR): |
|
767 | class _validator(CIDR): | |
768 | messages = dict( |
|
768 | messages = dict( | |
769 | badFormat=_('Please enter a valid IPv4 or IpV6 address'), |
|
769 | badFormat=_('Please enter a valid IPv4 or IpV6 address'), | |
770 | illegalBits=_('The network size (bits) must be within the range' |
|
770 | illegalBits=_('The network size (bits) must be within the range' | |
771 | ' of 0-32 (not %(bits)r)')) |
|
771 | ' of 0-32 (not %(bits)r)')) | |
772 |
|
772 | |||
773 | def to_python(self, value, state): |
|
773 | def to_python(self, value, state): | |
774 | v = super(_validator, self).to_python(value, state) |
|
774 | v = super(_validator, self).to_python(value, state) | |
775 | v = v.strip() |
|
775 | v = v.strip() | |
776 | net = ipaddr.IPNetwork(address=v) |
|
776 | net = ipaddr.IPNetwork(address=v) | |
777 | if isinstance(net, ipaddr.IPv4Network): |
|
777 | if isinstance(net, ipaddr.IPv4Network): | |
778 | #if IPv4 doesn't end with a mask, add /32 |
|
778 | #if IPv4 doesn't end with a mask, add /32 | |
779 | if '/' not in value: |
|
779 | if '/' not in value: | |
780 | v += '/32' |
|
780 | v += '/32' | |
781 | if isinstance(net, ipaddr.IPv6Network): |
|
781 | if isinstance(net, ipaddr.IPv6Network): | |
782 | #if IPv6 doesn't end with a mask, add /128 |
|
782 | #if IPv6 doesn't end with a mask, add /128 | |
783 | if '/' not in value: |
|
783 | if '/' not in value: | |
784 | v += '/128' |
|
784 | v += '/128' | |
785 | return v |
|
785 | return v | |
786 |
|
786 | |||
787 | def validate_python(self, value, state): |
|
787 | def validate_python(self, value, state): | |
788 | try: |
|
788 | try: | |
789 | addr = value.strip() |
|
789 | addr = value.strip() | |
790 | #this raises an ValueError if address is not IpV4 or IpV6 |
|
790 | #this raises an ValueError if address is not IpV4 or IpV6 | |
791 | ipaddr.IPNetwork(address=addr) |
|
791 | ipaddr.IPNetwork(address=addr) | |
792 | except ValueError: |
|
792 | except ValueError: | |
793 | raise formencode.Invalid(self.message('badFormat', state), |
|
793 | raise formencode.Invalid(self.message('badFormat', state), | |
794 | value, state) |
|
794 | value, state) | |
795 |
|
795 | |||
796 | return _validator |
|
796 | return _validator | |
797 |
|
797 | |||
798 |
|
798 | |||
799 | def FieldKey(): |
|
799 | def FieldKey(): | |
800 | class _validator(formencode.validators.FancyValidator): |
|
800 | class _validator(formencode.validators.FancyValidator): | |
801 | messages = dict( |
|
801 | messages = dict( | |
802 | badFormat=_('Key name can only consist of letters, ' |
|
802 | badFormat=_('Key name can only consist of letters, ' | |
803 | 'underscore, dash or numbers'),) |
|
803 | 'underscore, dash or numbers'),) | |
804 |
|
804 | |||
805 | def validate_python(self, value, state): |
|
805 | def validate_python(self, value, state): | |
806 | if not re.match('[a-zA-Z0-9_-]+$', value): |
|
806 | if not re.match('[a-zA-Z0-9_-]+$', value): | |
807 | raise formencode.Invalid(self.message('badFormat', state), |
|
807 | raise formencode.Invalid(self.message('badFormat', state), | |
808 | value, state) |
|
808 | value, state) | |
809 | return _validator |
|
809 | return _validator |
@@ -1,215 +1,215 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="/base/base.html"/> |
|
2 | <%inherit file="/base/base.html"/> | |
3 |
|
3 | |||
4 | <%def name="title()"> |
|
4 | <%def name="title()"> | |
5 | ${_('Permissions administration')} - ${c.rhodecode_name} |
|
5 | ${_('Permissions administration')} - ${c.rhodecode_name} | |
6 | </%def> |
|
6 | </%def> | |
7 |
|
7 | |||
8 | <%def name="breadcrumbs_links()"> |
|
8 | <%def name="breadcrumbs_links()"> | |
9 | ${h.link_to(_('Admin'),h.url('admin_home'))} |
|
9 | ${h.link_to(_('Admin'),h.url('admin_home'))} | |
10 | » |
|
10 | » | |
11 | ${_('permissions')} |
|
11 | ${_('permissions')} | |
12 | </%def> |
|
12 | </%def> | |
13 |
|
13 | |||
14 | <%def name="page_nav()"> |
|
14 | <%def name="page_nav()"> | |
15 | ${self.menu('admin')} |
|
15 | ${self.menu('admin')} | |
16 | </%def> |
|
16 | </%def> | |
17 |
|
17 | |||
18 | <%def name="main()"> |
|
18 | <%def name="main()"> | |
19 | <div class="box box-left"> |
|
19 | <div class="box box-left"> | |
20 | <!-- box / title --> |
|
20 | <!-- box / title --> | |
21 | <div class="title"> |
|
21 | <div class="title"> | |
22 | ${self.breadcrumbs()} |
|
22 | ${self.breadcrumbs()} | |
23 | </div> |
|
23 | </div> | |
24 | <h3>${_('Default permissions')}</h3> |
|
24 | <h3>${_('Default permissions')}</h3> | |
25 | ${h.form(url('permission', id='default'),method='put')} |
|
25 | ${h.form(url('permission', id='default'),method='put')} | |
26 | <div class="form"> |
|
26 | <div class="form"> | |
27 | <!-- fields --> |
|
27 | <!-- fields --> | |
28 | <div class="fields"> |
|
28 | <div class="fields"> | |
29 | <div class="field"> |
|
29 | <div class="field"> | |
30 | <div class="label label-checkbox"> |
|
30 | <div class="label label-checkbox"> | |
31 | <label for="anonymous">${_('Anonymous access')}:</label> |
|
31 | <label for="anonymous">${_('Anonymous access')}:</label> | |
32 | </div> |
|
32 | </div> | |
33 | <div class="checkboxes"> |
|
33 | <div class="checkboxes"> | |
34 | <div class="checkbox"> |
|
34 | <div class="checkbox"> | |
35 | ${h.checkbox('anonymous',True)} |
|
35 | ${h.checkbox('anonymous',True)} | |
36 | </div> |
|
36 | </div> | |
37 | </div> |
|
37 | </div> | |
38 | </div> |
|
38 | </div> | |
39 | <div class="field"> |
|
39 | <div class="field"> | |
40 | <div class="label"> |
|
40 | <div class="label"> | |
41 | <label for="default_repo_perm">${_('Repository')}:</label> |
|
41 | <label for="default_repo_perm">${_('Repository')}:</label> | |
42 | </div> |
|
42 | </div> | |
43 | <div class="select"> |
|
43 | <div class="select"> | |
44 | ${h.select('default_repo_perm','',c.repo_perms_choices)} |
|
44 | ${h.select('default_repo_perm','',c.repo_perms_choices)} | |
45 |
|
45 | |||
46 | ${h.checkbox('overwrite_default_repo','true')} |
|
46 | ${h.checkbox('overwrite_default_repo','true')} | |
47 | <label for="overwrite_default_repo"> |
|
47 | <label for="overwrite_default_repo"> | |
48 | <span class="tooltip" |
|
48 | <span class="tooltip" | |
49 | title="${h.tooltip(_('All default permissions on each repository will be reset to choosen permission, note that all custom default permission on repositories will be lost'))}"> |
|
49 | title="${h.tooltip(_('All default permissions on each repository will be reset to choosen permission, note that all custom default permission on repositories will be lost'))}"> | |
50 | ${_('overwrite existing settings')}</span> </label> |
|
50 | ${_('overwrite existing settings')}</span> </label> | |
51 | </div> |
|
51 | </div> | |
52 | </div> |
|
52 | </div> | |
53 | <div class="field"> |
|
53 | <div class="field"> | |
54 | <div class="label"> |
|
54 | <div class="label"> | |
55 | <label for="default_group_perm">${_('Repository group')}:</label> |
|
55 | <label for="default_group_perm">${_('Repository group')}:</label> | |
56 | </div> |
|
56 | </div> | |
57 | <div class="select"> |
|
57 | <div class="select"> | |
58 | ${h.select('default_group_perm','',c.group_perms_choices)} |
|
58 | ${h.select('default_group_perm','',c.group_perms_choices)} | |
59 | ${h.checkbox('overwrite_default_group','true')} |
|
59 | ${h.checkbox('overwrite_default_group','true')} | |
60 | <label for="overwrite_default_group"> |
|
60 | <label for="overwrite_default_group"> | |
61 | <span class="tooltip" |
|
61 | <span class="tooltip" | |
62 |
title="${h.tooltip(_('All default permissions on each repository group will be reset to choosen permission, note that all custom default permission on repositor |
|
62 | title="${h.tooltip(_('All default permissions on each repository group will be reset to choosen permission, note that all custom default permission on repository groups will be lost'))}"> | |
63 | ${_('overwrite existing settings')}</span> </label> |
|
63 | ${_('overwrite existing settings')}</span> </label> | |
64 |
|
64 | |||
65 | </div> |
|
65 | </div> | |
66 | </div> |
|
66 | </div> | |
67 | <div class="field"> |
|
67 | <div class="field"> | |
68 | <div class="label"> |
|
68 | <div class="label"> | |
69 | <label for="default_register">${_('Registration')}:</label> |
|
69 | <label for="default_register">${_('Registration')}:</label> | |
70 | </div> |
|
70 | </div> | |
71 | <div class="select"> |
|
71 | <div class="select"> | |
72 | ${h.select('default_register','',c.register_choices)} |
|
72 | ${h.select('default_register','',c.register_choices)} | |
73 | </div> |
|
73 | </div> | |
74 | </div> |
|
74 | </div> | |
75 | <div class="field"> |
|
75 | <div class="field"> | |
76 | <div class="label"> |
|
76 | <div class="label"> | |
77 | <label for="default_create">${_('Repository creation')}:</label> |
|
77 | <label for="default_create">${_('Repository creation')}:</label> | |
78 | </div> |
|
78 | </div> | |
79 | <div class="select"> |
|
79 | <div class="select"> | |
80 | ${h.select('default_create','',c.create_choices)} |
|
80 | ${h.select('default_create','',c.create_choices)} | |
81 | </div> |
|
81 | </div> | |
82 | </div> |
|
82 | </div> | |
83 | <div class="field"> |
|
83 | <div class="field"> | |
84 | <div class="label"> |
|
84 | <div class="label"> | |
85 | <label for="default_fork">${_('Repository forking')}:</label> |
|
85 | <label for="default_fork">${_('Repository forking')}:</label> | |
86 | </div> |
|
86 | </div> | |
87 | <div class="select"> |
|
87 | <div class="select"> | |
88 | ${h.select('default_fork','',c.fork_choices)} |
|
88 | ${h.select('default_fork','',c.fork_choices)} | |
89 | </div> |
|
89 | </div> | |
90 | </div> |
|
90 | </div> | |
91 | <div class="buttons"> |
|
91 | <div class="buttons"> | |
92 | ${h.submit('save',_('Save'),class_="ui-btn large")} |
|
92 | ${h.submit('save',_('Save'),class_="ui-btn large")} | |
93 | ${h.reset('reset',_('Reset'),class_="ui-btn large")} |
|
93 | ${h.reset('reset',_('Reset'),class_="ui-btn large")} | |
94 | </div> |
|
94 | </div> | |
95 | </div> |
|
95 | </div> | |
96 | </div> |
|
96 | </div> | |
97 | ${h.end_form()} |
|
97 | ${h.end_form()} | |
98 | </div> |
|
98 | </div> | |
99 |
|
99 | |||
100 | <div style="min-height:780px" class="box box-right"> |
|
100 | <div style="min-height:780px" class="box box-right"> | |
101 | <!-- box / title --> |
|
101 | <!-- box / title --> | |
102 | <div class="title"> |
|
102 | <div class="title"> | |
103 | <h5>${_('Default User Permissions')}</h5> |
|
103 | <h5>${_('Default User Permissions')}</h5> | |
104 | </div> |
|
104 | </div> | |
105 |
|
105 | |||
106 | ## permissions overview |
|
106 | ## permissions overview | |
107 | <div id="perms" class="table"> |
|
107 | <div id="perms" class="table"> | |
108 | %for section in sorted(c.perm_user.permissions.keys()): |
|
108 | %for section in sorted(c.perm_user.permissions.keys()): | |
109 | <div class="perms_section_head">${section.replace("_"," ").capitalize()}</div> |
|
109 | <div class="perms_section_head">${section.replace("_"," ").capitalize()}</div> | |
110 | %if not c.perm_user.permissions[section]: |
|
110 | %if not c.perm_user.permissions[section]: | |
111 | <span class="empty_data">${_('Nothing here yet')}</span> |
|
111 | <span class="empty_data">${_('Nothing here yet')}</span> | |
112 | %else: |
|
112 | %else: | |
113 | <div id='tbl_list_wrap_${section}' class="yui-skin-sam"> |
|
113 | <div id='tbl_list_wrap_${section}' class="yui-skin-sam"> | |
114 | <table id="tbl_list_${section}"> |
|
114 | <table id="tbl_list_${section}"> | |
115 | <thead> |
|
115 | <thead> | |
116 | <tr> |
|
116 | <tr> | |
117 | <th class="left">${_('Name')}</th> |
|
117 | <th class="left">${_('Name')}</th> | |
118 | <th class="left">${_('Permission')}</th> |
|
118 | <th class="left">${_('Permission')}</th> | |
119 | <th class="left">${_('Edit Permission')}</th> |
|
119 | <th class="left">${_('Edit Permission')}</th> | |
120 | </thead> |
|
120 | </thead> | |
121 | <tbody> |
|
121 | <tbody> | |
122 | %for k in c.perm_user.permissions[section]: |
|
122 | %for k in c.perm_user.permissions[section]: | |
123 | <% |
|
123 | <% | |
124 | if section != 'global': |
|
124 | if section != 'global': | |
125 | section_perm = c.perm_user.permissions[section].get(k) |
|
125 | section_perm = c.perm_user.permissions[section].get(k) | |
126 | _perm = section_perm.split('.')[-1] |
|
126 | _perm = section_perm.split('.')[-1] | |
127 | else: |
|
127 | else: | |
128 | _perm = section_perm = None |
|
128 | _perm = section_perm = None | |
129 | %> |
|
129 | %> | |
130 | <tr> |
|
130 | <tr> | |
131 | <td> |
|
131 | <td> | |
132 | %if section == 'repositories': |
|
132 | %if section == 'repositories': | |
133 | <a href="${h.url('summary_home',repo_name=k)}">${k}</a> |
|
133 | <a href="${h.url('summary_home',repo_name=k)}">${k}</a> | |
134 | %elif section == 'repositories_groups': |
|
134 | %elif section == 'repositories_groups': | |
135 | <a href="${h.url('repos_group_home',group_name=k)}">${k}</a> |
|
135 | <a href="${h.url('repos_group_home',group_name=k)}">${k}</a> | |
136 | %else: |
|
136 | %else: | |
137 | ${h.get_permission_name(k)} |
|
137 | ${h.get_permission_name(k)} | |
138 | %endif |
|
138 | %endif | |
139 | </td> |
|
139 | </td> | |
140 | <td> |
|
140 | <td> | |
141 | %if section == 'global': |
|
141 | %if section == 'global': | |
142 | ${h.bool2icon(k.split('.')[-1] != 'none')} |
|
142 | ${h.bool2icon(k.split('.')[-1] != 'none')} | |
143 | %else: |
|
143 | %else: | |
144 | <span class="perm_tag ${_perm}">${section_perm}</span> |
|
144 | <span class="perm_tag ${_perm}">${section_perm}</span> | |
145 | %endif |
|
145 | %endif | |
146 | </td> |
|
146 | </td> | |
147 | <td> |
|
147 | <td> | |
148 | %if section == 'repositories': |
|
148 | %if section == 'repositories': | |
149 | <a href="${h.url('edit_repo',repo_name=k,anchor='permissions_manage')}">${_('edit')}</a> |
|
149 | <a href="${h.url('edit_repo',repo_name=k,anchor='permissions_manage')}">${_('edit')}</a> | |
150 | %elif section == 'repositories_groups': |
|
150 | %elif section == 'repositories_groups': | |
151 | <a href="${h.url('edit_repos_group',group_name=k,anchor='permissions_manage')}">${_('edit')}</a> |
|
151 | <a href="${h.url('edit_repos_group',group_name=k,anchor='permissions_manage')}">${_('edit')}</a> | |
152 | %else: |
|
152 | %else: | |
153 | -- |
|
153 | -- | |
154 | %endif |
|
154 | %endif | |
155 | </td> |
|
155 | </td> | |
156 | </tr> |
|
156 | </tr> | |
157 | %endfor |
|
157 | %endfor | |
158 | </tbody> |
|
158 | </tbody> | |
159 | </table> |
|
159 | </table> | |
160 | </div> |
|
160 | </div> | |
161 | %endif |
|
161 | %endif | |
162 | %endfor |
|
162 | %endfor | |
163 | </div> |
|
163 | </div> | |
164 | </div> |
|
164 | </div> | |
165 | <div class="box box-left" style="clear:left"> |
|
165 | <div class="box box-left" style="clear:left"> | |
166 | <!-- box / title --> |
|
166 | <!-- box / title --> | |
167 | <div class="title"> |
|
167 | <div class="title"> | |
168 | <h5>${_('Allowed IP addresses')}</h5> |
|
168 | <h5>${_('Allowed IP addresses')}</h5> | |
169 | </div> |
|
169 | </div> | |
170 |
|
170 | |||
171 | <div class="ips_wrap"> |
|
171 | <div class="ips_wrap"> | |
172 | <table class="noborder"> |
|
172 | <table class="noborder"> | |
173 | %if c.user_ip_map: |
|
173 | %if c.user_ip_map: | |
174 | %for ip in c.user_ip_map: |
|
174 | %for ip in c.user_ip_map: | |
175 | <tr> |
|
175 | <tr> | |
176 | <td><div class="ip">${ip.ip_addr}</div></td> |
|
176 | <td><div class="ip">${ip.ip_addr}</div></td> | |
177 | <td><div class="ip">${h.ip_range(ip.ip_addr)}</div></td> |
|
177 | <td><div class="ip">${h.ip_range(ip.ip_addr)}</div></td> | |
178 | <td> |
|
178 | <td> | |
179 | ${h.form(url('user_ips_delete', id=c.user.user_id),method='delete')} |
|
179 | ${h.form(url('user_ips_delete', id=c.user.user_id),method='delete')} | |
180 | ${h.hidden('del_ip',ip.ip_id)} |
|
180 | ${h.hidden('del_ip',ip.ip_id)} | |
181 | ${h.hidden('default_user', 'True')} |
|
181 | ${h.hidden('default_user', 'True')} | |
182 | ${h.submit('remove_',_('delete'),id="remove_ip_%s" % ip.ip_id, |
|
182 | ${h.submit('remove_',_('delete'),id="remove_ip_%s" % ip.ip_id, | |
183 | class_="delete_icon action_button", onclick="return confirm('"+_('Confirm to delete this ip: %s') % ip.ip_addr+"');")} |
|
183 | class_="delete_icon action_button", onclick="return confirm('"+_('Confirm to delete this ip: %s') % ip.ip_addr+"');")} | |
184 | ${h.end_form()} |
|
184 | ${h.end_form()} | |
185 | </td> |
|
185 | </td> | |
186 | </tr> |
|
186 | </tr> | |
187 | %endfor |
|
187 | %endfor | |
188 | %else: |
|
188 | %else: | |
189 | <tr><td><div class="ip">${_('All IP addresses are allowed')}</div></td></tr> |
|
189 | <tr><td><div class="ip">${_('All IP addresses are allowed')}</div></td></tr> | |
190 | %endif |
|
190 | %endif | |
191 | </table> |
|
191 | </table> | |
192 | </div> |
|
192 | </div> | |
193 |
|
193 | |||
194 | ${h.form(url('user_ips', id=c.user.user_id),method='put')} |
|
194 | ${h.form(url('user_ips', id=c.user.user_id),method='put')} | |
195 | <div class="form"> |
|
195 | <div class="form"> | |
196 | <!-- fields --> |
|
196 | <!-- fields --> | |
197 | <div class="fields"> |
|
197 | <div class="fields"> | |
198 | <div class="field"> |
|
198 | <div class="field"> | |
199 | <div class="label"> |
|
199 | <div class="label"> | |
200 | <label for="new_ip">${_('New ip address')}:</label> |
|
200 | <label for="new_ip">${_('New ip address')}:</label> | |
201 | </div> |
|
201 | </div> | |
202 | <div class="input"> |
|
202 | <div class="input"> | |
203 | ${h.hidden('default_user', 'True')} |
|
203 | ${h.hidden('default_user', 'True')} | |
204 | ${h.text('new_ip', class_='medium')} |
|
204 | ${h.text('new_ip', class_='medium')} | |
205 | </div> |
|
205 | </div> | |
206 | </div> |
|
206 | </div> | |
207 | <div class="buttons"> |
|
207 | <div class="buttons"> | |
208 | ${h.submit('save',_('Add'),class_="ui-btn large")} |
|
208 | ${h.submit('save',_('Add'),class_="ui-btn large")} | |
209 | ${h.reset('reset',_('Reset'),class_="ui-btn large")} |
|
209 | ${h.reset('reset',_('Reset'),class_="ui-btn large")} | |
210 | </div> |
|
210 | </div> | |
211 | </div> |
|
211 | </div> | |
212 | </div> |
|
212 | </div> | |
213 | ${h.end_form()} |
|
213 | ${h.end_form()} | |
214 | </div> |
|
214 | </div> | |
215 | </%def> |
|
215 | </%def> |
@@ -1,340 +1,340 b'' | |||||
1 | <%page args="parent" /> |
|
1 | <%page args="parent" /> | |
2 | <div class="box"> |
|
2 | <div class="box"> | |
3 | <!-- box / title --> |
|
3 | <!-- box / title --> | |
4 | <div class="title"> |
|
4 | <div class="title"> | |
5 | <h5> |
|
5 | <h5> | |
6 | <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" value="${_('quick filter...')}"/> ${parent.breadcrumbs()} <span id="repo_count">0</span> ${_('repositories')} |
|
6 | <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" value="${_('quick filter...')}"/> ${parent.breadcrumbs()} <span id="repo_count">0</span> ${_('repositories')} | |
7 | </h5> |
|
7 | </h5> | |
8 | %if c.rhodecode_user.username != 'default': |
|
8 | %if c.rhodecode_user.username != 'default': | |
9 | <ul class="links"> |
|
9 | <ul class="links"> | |
10 | %if h.HasPermissionAny('hg.admin','hg.create.repository')() or h.HasReposGroupPermissionAny('group.write', 'group.admin')(c.group.group_name if c.group else None): |
|
10 | %if h.HasPermissionAny('hg.admin','hg.create.repository')() or h.HasReposGroupPermissionAny('group.write', 'group.admin')(c.group.group_name if c.group else None): | |
11 | <li> |
|
11 | <li> | |
12 | %if c.group: |
|
12 | %if c.group: | |
13 | <span>${h.link_to(_('Add repository'),h.url('admin_settings_create_repository',parent_group=c.group.group_id))}</span> |
|
13 | <span>${h.link_to(_('Add repository'),h.url('admin_settings_create_repository',parent_group=c.group.group_id))}</span> | |
14 | %if h.HasPermissionAny('hg.admin')() or h.HasReposGroupPermissionAny('group.admin')(c.group.group_name): |
|
14 | %if h.HasPermissionAny('hg.admin')() or h.HasReposGroupPermissionAny('group.admin')(c.group.group_name): | |
15 | <span>${h.link_to(_(u'Add group'),h.url('new_repos_group', parent_group=c.group.group_id))}</span> |
|
15 | <span>${h.link_to(_(u'Add group'),h.url('new_repos_group', parent_group=c.group.group_id))}</span> | |
16 | %endif |
|
16 | %endif | |
17 | %else: |
|
17 | %else: | |
18 | <span>${h.link_to(_('Add repository'),h.url('admin_settings_create_repository'))}</span> |
|
18 | <span>${h.link_to(_('Add repository'),h.url('admin_settings_create_repository'))}</span> | |
19 | %if h.HasPermissionAny('hg.admin')(): |
|
19 | %if h.HasPermissionAny('hg.admin')(): | |
20 | <span>${h.link_to(_(u'Add group'),h.url('new_repos_group'))}</span> |
|
20 | <span>${h.link_to(_(u'Add group'),h.url('new_repos_group'))}</span> | |
21 | %endif |
|
21 | %endif | |
22 | %endif |
|
22 | %endif | |
23 | </li> |
|
23 | </li> | |
24 | %endif |
|
24 | %endif | |
25 | %if c.group and h.HasReposGroupPermissionAny('group.admin')(c.group.group_name): |
|
25 | %if c.group and h.HasReposGroupPermissionAny('group.admin')(c.group.group_name): | |
26 | <li> |
|
26 | <li> | |
27 | <span>${h.link_to(_('Edit group'),h.url('edit_repos_group',group_name=c.group.group_name), title=_('You have admin right to this group, and can edit it'))}</span> |
|
27 | <span>${h.link_to(_('Edit group'),h.url('edit_repos_group',group_name=c.group.group_name), title=_('You have admin right to this group, and can edit it'))}</span> | |
28 | </li> |
|
28 | </li> | |
29 | %endif |
|
29 | %endif | |
30 | </ul> |
|
30 | </ul> | |
31 | %endif |
|
31 | %endif | |
32 | </div> |
|
32 | </div> | |
33 | <!-- end box / title --> |
|
33 | <!-- end box / title --> | |
34 | <div class="table"> |
|
34 | <div class="table"> | |
35 | % if c.groups: |
|
35 | % if c.groups: | |
36 | <div id='groups_list_wrap' class="yui-skin-sam"> |
|
36 | <div id='groups_list_wrap' class="yui-skin-sam"> | |
37 | <table id="groups_list"> |
|
37 | <table id="groups_list"> | |
38 | <thead> |
|
38 | <thead> | |
39 | <tr> |
|
39 | <tr> | |
40 | <th class="left"><a href="#">${_('Group name')}</a></th> |
|
40 | <th class="left"><a href="#">${_('Group name')}</a></th> | |
41 | <th class="left"><a href="#">${_('Description')}</a></th> |
|
41 | <th class="left"><a href="#">${_('Description')}</a></th> | |
42 | ##<th class="left"><a href="#">${_('Number of repositories')}</a></th> |
|
42 | ##<th class="left"><a href="#">${_('Number of repositories')}</a></th> | |
43 | </tr> |
|
43 | </tr> | |
44 | </thead> |
|
44 | </thead> | |
45 |
|
45 | |||
46 | ## REPO GROUPS |
|
46 | ## REPO GROUPS | |
47 | % for gr in c.groups: |
|
47 | % for gr in c.groups: | |
48 | <tr> |
|
48 | <tr> | |
49 | <td> |
|
49 | <td> | |
50 | <div style="white-space: nowrap"> |
|
50 | <div style="white-space: nowrap"> | |
51 |
<img class="icon" alt="${_('Repositor |
|
51 | <img class="icon" alt="${_('Repository group')}" src="${h.url('/images/icons/database_link.png')}"/> | |
52 | ${h.link_to(gr.name,url('repos_group_home',group_name=gr.group_name))} |
|
52 | ${h.link_to(gr.name,url('repos_group_home',group_name=gr.group_name))} | |
53 | </div> |
|
53 | </div> | |
54 | </td> |
|
54 | </td> | |
55 | %if c.visual.stylify_metatags: |
|
55 | %if c.visual.stylify_metatags: | |
56 | <td>${h.urlify_text(h.desc_stylize(gr.group_description))}</td> |
|
56 | <td>${h.urlify_text(h.desc_stylize(gr.group_description))}</td> | |
57 | %else: |
|
57 | %else: | |
58 | <td>${gr.group_description}</td> |
|
58 | <td>${gr.group_description}</td> | |
59 | %endif |
|
59 | %endif | |
60 | ## this is commented out since for multi nested repos can be HEAVY! |
|
60 | ## this is commented out since for multi nested repos can be HEAVY! | |
61 | ## in number of executed queries during traversing uncomment at will |
|
61 | ## in number of executed queries during traversing uncomment at will | |
62 | ##<td><b>${gr.repositories_recursive_count}</b></td> |
|
62 | ##<td><b>${gr.repositories_recursive_count}</b></td> | |
63 | </tr> |
|
63 | </tr> | |
64 | % endfor |
|
64 | % endfor | |
65 | </table> |
|
65 | </table> | |
66 | </div> |
|
66 | </div> | |
67 | <div id="group-user-paginator" style="padding: 0px 0px 0px 0px"></div> |
|
67 | <div id="group-user-paginator" style="padding: 0px 0px 0px 0px"></div> | |
68 | <div style="height: 20px"></div> |
|
68 | <div style="height: 20px"></div> | |
69 | % endif |
|
69 | % endif | |
70 | <div id="welcome" style="display:none;text-align:center"> |
|
70 | <div id="welcome" style="display:none;text-align:center"> | |
71 | <h1><a href="${h.url('home')}">${c.rhodecode_name} ${c.rhodecode_version}</a></h1> |
|
71 | <h1><a href="${h.url('home')}">${c.rhodecode_name} ${c.rhodecode_version}</a></h1> | |
72 | </div> |
|
72 | </div> | |
73 | <%cnt=0%> |
|
73 | <%cnt=0%> | |
74 | <%namespace name="dt" file="/data_table/_dt_elements.html"/> |
|
74 | <%namespace name="dt" file="/data_table/_dt_elements.html"/> | |
75 | % if c.visual.lightweight_dashboard is False: |
|
75 | % if c.visual.lightweight_dashboard is False: | |
76 | ## old full detailed version |
|
76 | ## old full detailed version | |
77 | <div id='repos_list_wrap' class="yui-skin-sam"> |
|
77 | <div id='repos_list_wrap' class="yui-skin-sam"> | |
78 | <table id="repos_list"> |
|
78 | <table id="repos_list"> | |
79 | <thead> |
|
79 | <thead> | |
80 | <tr> |
|
80 | <tr> | |
81 | <th class="left"></th> |
|
81 | <th class="left"></th> | |
82 | <th class="left">${_('Name')}</th> |
|
82 | <th class="left">${_('Name')}</th> | |
83 | <th class="left">${_('Description')}</th> |
|
83 | <th class="left">${_('Description')}</th> | |
84 | <th class="left">${_('Last change')}</th> |
|
84 | <th class="left">${_('Last change')}</th> | |
85 | <th class="left">${_('Tip')}</th> |
|
85 | <th class="left">${_('Tip')}</th> | |
86 | <th class="left">${_('Owner')}</th> |
|
86 | <th class="left">${_('Owner')}</th> | |
87 | <th class="left">${_('Atom')}</th> |
|
87 | <th class="left">${_('Atom')}</th> | |
88 | </tr> |
|
88 | </tr> | |
89 | </thead> |
|
89 | </thead> | |
90 | <tbody> |
|
90 | <tbody> | |
91 | %for cnt,repo in enumerate(c.repos_list): |
|
91 | %for cnt,repo in enumerate(c.repos_list): | |
92 | <tr class="parity${(cnt+1)%2}"> |
|
92 | <tr class="parity${(cnt+1)%2}"> | |
93 | ##QUICK MENU |
|
93 | ##QUICK MENU | |
94 | <td class="quick_repo_menu"> |
|
94 | <td class="quick_repo_menu"> | |
95 | ${dt.quick_menu(repo['name'])} |
|
95 | ${dt.quick_menu(repo['name'])} | |
96 | </td> |
|
96 | </td> | |
97 | ##REPO NAME AND ICONS |
|
97 | ##REPO NAME AND ICONS | |
98 | <td class="reponame"> |
|
98 | <td class="reponame"> | |
99 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],h.AttributeDict(repo['dbrepo_fork']),pageargs.get('short_repo_names'))} |
|
99 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],h.AttributeDict(repo['dbrepo_fork']),pageargs.get('short_repo_names'))} | |
100 | </td> |
|
100 | </td> | |
101 | ##DESCRIPTION |
|
101 | ##DESCRIPTION | |
102 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> |
|
102 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> | |
103 | %if c.visual.stylify_metatags: |
|
103 | %if c.visual.stylify_metatags: | |
104 | ${h.urlify_text(h.desc_stylize(h.truncate(repo['description'],60)))}</span> |
|
104 | ${h.urlify_text(h.desc_stylize(h.truncate(repo['description'],60)))}</span> | |
105 | %else: |
|
105 | %else: | |
106 | ${h.truncate(repo['description'],60)}</span> |
|
106 | ${h.truncate(repo['description'],60)}</span> | |
107 | %endif |
|
107 | %endif | |
108 | </td> |
|
108 | </td> | |
109 | ##LAST CHANGE DATE |
|
109 | ##LAST CHANGE DATE | |
110 | <td> |
|
110 | <td> | |
111 | ${dt.last_change(repo['last_change'])} |
|
111 | ${dt.last_change(repo['last_change'])} | |
112 | </td> |
|
112 | </td> | |
113 | ##LAST REVISION |
|
113 | ##LAST REVISION | |
114 | <td> |
|
114 | <td> | |
115 | ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])} |
|
115 | ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])} | |
116 | </td> |
|
116 | </td> | |
117 | ## |
|
117 | ## | |
118 | <td title="${repo['contact']}">${h.person(repo['contact'])}</td> |
|
118 | <td title="${repo['contact']}">${h.person(repo['contact'])}</td> | |
119 | <td> |
|
119 | <td> | |
120 | ${dt.atom(repo['name'])} |
|
120 | ${dt.atom(repo['name'])} | |
121 | </td> |
|
121 | </td> | |
122 | </tr> |
|
122 | </tr> | |
123 | %endfor |
|
123 | %endfor | |
124 | </tbody> |
|
124 | </tbody> | |
125 | </table> |
|
125 | </table> | |
126 | </div> |
|
126 | </div> | |
127 | % else: |
|
127 | % else: | |
128 | ## lightweight version |
|
128 | ## lightweight version | |
129 | <div class="yui-skin-sam" id="repos_list_wrap"></div> |
|
129 | <div class="yui-skin-sam" id="repos_list_wrap"></div> | |
130 | <div id="user-paginator" style="padding: 0px 0px 0px 0px"></div> |
|
130 | <div id="user-paginator" style="padding: 0px 0px 0px 0px"></div> | |
131 | % endif |
|
131 | % endif | |
132 | </div> |
|
132 | </div> | |
133 | </div> |
|
133 | </div> | |
134 | % if c.visual.lightweight_dashboard is False: |
|
134 | % if c.visual.lightweight_dashboard is False: | |
135 | <script> |
|
135 | <script> | |
136 | YUD.get('repo_count').innerHTML = ${cnt+1 if cnt else 0}; |
|
136 | YUD.get('repo_count').innerHTML = ${cnt+1 if cnt else 0}; | |
137 |
|
137 | |||
138 | // groups table sorting |
|
138 | // groups table sorting | |
139 | var myColumnDefs = [ |
|
139 | var myColumnDefs = [ | |
140 | {key:"name",label:"${_('Group name')}",sortable:true, |
|
140 | {key:"name",label:"${_('Group name')}",sortable:true, | |
141 | sortOptions: { sortFunction: groupNameSort }}, |
|
141 | sortOptions: { sortFunction: groupNameSort }}, | |
142 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
142 | {key:"desc",label:"${_('Description')}",sortable:true}, | |
143 | ]; |
|
143 | ]; | |
144 |
|
144 | |||
145 | var myDataSource = new YAHOO.util.DataSource(YUD.get("groups_list")); |
|
145 | var myDataSource = new YAHOO.util.DataSource(YUD.get("groups_list")); | |
146 |
|
146 | |||
147 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; |
|
147 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; | |
148 | myDataSource.responseSchema = { |
|
148 | myDataSource.responseSchema = { | |
149 | fields: [ |
|
149 | fields: [ | |
150 | {key:"name"}, |
|
150 | {key:"name"}, | |
151 | {key:"desc"}, |
|
151 | {key:"desc"}, | |
152 | ] |
|
152 | ] | |
153 | }; |
|
153 | }; | |
154 |
|
154 | |||
155 | var myDataTable = new YAHOO.widget.DataTable("groups_list_wrap", myColumnDefs, myDataSource,{ |
|
155 | var myDataTable = new YAHOO.widget.DataTable("groups_list_wrap", myColumnDefs, myDataSource,{ | |
156 | sortedBy:{key:"name",dir:"asc"}, |
|
156 | sortedBy:{key:"name",dir:"asc"}, | |
157 | paginator: new YAHOO.widget.Paginator({ |
|
157 | paginator: new YAHOO.widget.Paginator({ | |
158 | rowsPerPage: 50, |
|
158 | rowsPerPage: 50, | |
159 | alwaysVisible: false, |
|
159 | alwaysVisible: false, | |
160 | template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}", |
|
160 | template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}", | |
161 | pageLinks: 5, |
|
161 | pageLinks: 5, | |
162 | containerClass: 'pagination-wh', |
|
162 | containerClass: 'pagination-wh', | |
163 | currentPageClass: 'pager_curpage', |
|
163 | currentPageClass: 'pager_curpage', | |
164 | pageLinkClass: 'pager_link', |
|
164 | pageLinkClass: 'pager_link', | |
165 | nextPageLinkLabel: '>', |
|
165 | nextPageLinkLabel: '>', | |
166 | previousPageLinkLabel: '<', |
|
166 | previousPageLinkLabel: '<', | |
167 | firstPageLinkLabel: '<<', |
|
167 | firstPageLinkLabel: '<<', | |
168 | lastPageLinkLabel: '>>', |
|
168 | lastPageLinkLabel: '>>', | |
169 | containers:['group-user-paginator'] |
|
169 | containers:['group-user-paginator'] | |
170 | }), |
|
170 | }), | |
171 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
171 | MSG_SORTASC:"${_('Click to sort ascending')}", | |
172 | MSG_SORTDESC:"${_('Click to sort descending')}" |
|
172 | MSG_SORTDESC:"${_('Click to sort descending')}" | |
173 | }); |
|
173 | }); | |
174 |
|
174 | |||
175 | // main table sorting |
|
175 | // main table sorting | |
176 | var myColumnDefs = [ |
|
176 | var myColumnDefs = [ | |
177 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, |
|
177 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, | |
178 | {key:"name",label:"${_('Name')}",sortable:true, |
|
178 | {key:"name",label:"${_('Name')}",sortable:true, | |
179 | sortOptions: { sortFunction: nameSort }}, |
|
179 | sortOptions: { sortFunction: nameSort }}, | |
180 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
180 | {key:"desc",label:"${_('Description')}",sortable:true}, | |
181 | {key:"last_change",label:"${_('Last Change')}",sortable:true, |
|
181 | {key:"last_change",label:"${_('Last Change')}",sortable:true, | |
182 | sortOptions: { sortFunction: ageSort }}, |
|
182 | sortOptions: { sortFunction: ageSort }}, | |
183 | {key:"tip",label:"${_('Tip')}",sortable:true, |
|
183 | {key:"tip",label:"${_('Tip')}",sortable:true, | |
184 | sortOptions: { sortFunction: revisionSort }}, |
|
184 | sortOptions: { sortFunction: revisionSort }}, | |
185 | {key:"owner",label:"${_('Owner')}",sortable:true}, |
|
185 | {key:"owner",label:"${_('Owner')}",sortable:true}, | |
186 | {key:"atom",label:"",sortable:false}, |
|
186 | {key:"atom",label:"",sortable:false}, | |
187 | ]; |
|
187 | ]; | |
188 |
|
188 | |||
189 | var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list")); |
|
189 | var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list")); | |
190 |
|
190 | |||
191 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; |
|
191 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; | |
192 |
|
192 | |||
193 | myDataSource.responseSchema = { |
|
193 | myDataSource.responseSchema = { | |
194 | fields: [ |
|
194 | fields: [ | |
195 | {key:"menu"}, |
|
195 | {key:"menu"}, | |
196 | //{key:"raw_name"}, |
|
196 | //{key:"raw_name"}, | |
197 | {key:"name"}, |
|
197 | {key:"name"}, | |
198 | {key:"desc"}, |
|
198 | {key:"desc"}, | |
199 | {key:"last_change"}, |
|
199 | {key:"last_change"}, | |
200 | {key:"tip"}, |
|
200 | {key:"tip"}, | |
201 | {key:"owner"}, |
|
201 | {key:"owner"}, | |
202 | {key:"atom"}, |
|
202 | {key:"atom"}, | |
203 | ] |
|
203 | ] | |
204 | }; |
|
204 | }; | |
205 |
|
205 | |||
206 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource, |
|
206 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource, | |
207 | { |
|
207 | { | |
208 | sortedBy:{key:"name",dir:"asc"}, |
|
208 | sortedBy:{key:"name",dir:"asc"}, | |
209 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
209 | MSG_SORTASC:"${_('Click to sort ascending')}", | |
210 | MSG_SORTDESC:"${_('Click to sort descending')}", |
|
210 | MSG_SORTDESC:"${_('Click to sort descending')}", | |
211 | MSG_EMPTY:"${_('No records found.')}", |
|
211 | MSG_EMPTY:"${_('No records found.')}", | |
212 | MSG_ERROR:"${_('Data error.')}", |
|
212 | MSG_ERROR:"${_('Data error.')}", | |
213 | MSG_LOADING:"${_('Loading...')}", |
|
213 | MSG_LOADING:"${_('Loading...')}", | |
214 | } |
|
214 | } | |
215 | ); |
|
215 | ); | |
216 | myDataTable.subscribe('postRenderEvent',function(oArgs) { |
|
216 | myDataTable.subscribe('postRenderEvent',function(oArgs) { | |
217 | tooltip_activate(); |
|
217 | tooltip_activate(); | |
218 | quick_repo_menu(); |
|
218 | quick_repo_menu(); | |
219 | var func = function(node){ |
|
219 | var func = function(node){ | |
220 | return node.parentNode.parentNode.parentNode.parentNode; |
|
220 | return node.parentNode.parentNode.parentNode.parentNode; | |
221 | } |
|
221 | } | |
222 | q_filter('q_filter',YUQ('div.table tr td a.repo_name'),func); |
|
222 | q_filter('q_filter',YUQ('div.table tr td a.repo_name'),func); | |
223 | }); |
|
223 | }); | |
224 |
|
224 | |||
225 | </script> |
|
225 | </script> | |
226 | % else: |
|
226 | % else: | |
227 | <script> |
|
227 | <script> | |
228 | var data = ${c.data|n}; |
|
228 | var data = ${c.data|n}; | |
229 | var myDataSource = new YAHOO.util.DataSource(data); |
|
229 | var myDataSource = new YAHOO.util.DataSource(data); | |
230 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_JSON; |
|
230 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_JSON; | |
231 |
|
231 | |||
232 | myDataSource.responseSchema = { |
|
232 | myDataSource.responseSchema = { | |
233 | resultsList: "records", |
|
233 | resultsList: "records", | |
234 | fields: [ |
|
234 | fields: [ | |
235 | {key:"menu"}, |
|
235 | {key:"menu"}, | |
236 | {key:"raw_name"}, |
|
236 | {key:"raw_name"}, | |
237 | {key:"name"}, |
|
237 | {key:"name"}, | |
238 | {key:"desc"}, |
|
238 | {key:"desc"}, | |
239 | {key:"last_change"}, |
|
239 | {key:"last_change"}, | |
240 | {key:"last_changeset"}, |
|
240 | {key:"last_changeset"}, | |
241 | {key:"owner"}, |
|
241 | {key:"owner"}, | |
242 | {key:"atom"}, |
|
242 | {key:"atom"}, | |
243 | ] |
|
243 | ] | |
244 | }; |
|
244 | }; | |
245 | myDataSource.doBeforeCallback = function(req,raw,res,cb) { |
|
245 | myDataSource.doBeforeCallback = function(req,raw,res,cb) { | |
246 | // This is the filter function |
|
246 | // This is the filter function | |
247 | var data = res.results || [], |
|
247 | var data = res.results || [], | |
248 | filtered = [], |
|
248 | filtered = [], | |
249 | i,l; |
|
249 | i,l; | |
250 |
|
250 | |||
251 | if (req) { |
|
251 | if (req) { | |
252 | req = req.toLowerCase(); |
|
252 | req = req.toLowerCase(); | |
253 | for (i = 0; i<data.length; i++) { |
|
253 | for (i = 0; i<data.length; i++) { | |
254 | var pos = data[i].raw_name.toLowerCase().indexOf(req) |
|
254 | var pos = data[i].raw_name.toLowerCase().indexOf(req) | |
255 | if (pos != -1) { |
|
255 | if (pos != -1) { | |
256 | filtered.push(data[i]); |
|
256 | filtered.push(data[i]); | |
257 | } |
|
257 | } | |
258 | } |
|
258 | } | |
259 | res.results = filtered; |
|
259 | res.results = filtered; | |
260 | } |
|
260 | } | |
261 | YUD.get('repo_count').innerHTML = res.results.length; |
|
261 | YUD.get('repo_count').innerHTML = res.results.length; | |
262 | return res; |
|
262 | return res; | |
263 | } |
|
263 | } | |
264 |
|
264 | |||
265 | // main table sorting |
|
265 | // main table sorting | |
266 | var myColumnDefs = [ |
|
266 | var myColumnDefs = [ | |
267 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, |
|
267 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, | |
268 | {key:"name",label:"${_('Name')}",sortable:true, |
|
268 | {key:"name",label:"${_('Name')}",sortable:true, | |
269 | sortOptions: { sortFunction: nameSort }}, |
|
269 | sortOptions: { sortFunction: nameSort }}, | |
270 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
270 | {key:"desc",label:"${_('Description')}",sortable:true}, | |
271 | {key:"last_change",label:"${_('Last Change')}",sortable:true, |
|
271 | {key:"last_change",label:"${_('Last Change')}",sortable:true, | |
272 | sortOptions: { sortFunction: ageSort }}, |
|
272 | sortOptions: { sortFunction: ageSort }}, | |
273 | {key:"last_changeset",label:"${_('Tip')}",sortable:true, |
|
273 | {key:"last_changeset",label:"${_('Tip')}",sortable:true, | |
274 | sortOptions: { sortFunction: revisionSort }}, |
|
274 | sortOptions: { sortFunction: revisionSort }}, | |
275 | {key:"owner",label:"${_('Owner')}",sortable:true}, |
|
275 | {key:"owner",label:"${_('Owner')}",sortable:true}, | |
276 | {key:"atom",label:"",sortable:false}, |
|
276 | {key:"atom",label:"",sortable:false}, | |
277 | ]; |
|
277 | ]; | |
278 |
|
278 | |||
279 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource,{ |
|
279 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource,{ | |
280 | sortedBy:{key:"name",dir:"asc"}, |
|
280 | sortedBy:{key:"name",dir:"asc"}, | |
281 | paginator: new YAHOO.widget.Paginator({ |
|
281 | paginator: new YAHOO.widget.Paginator({ | |
282 | rowsPerPage: ${c.visual.lightweight_dashboard_items}, |
|
282 | rowsPerPage: ${c.visual.lightweight_dashboard_items}, | |
283 | alwaysVisible: false, |
|
283 | alwaysVisible: false, | |
284 | template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}", |
|
284 | template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}", | |
285 | pageLinks: 5, |
|
285 | pageLinks: 5, | |
286 | containerClass: 'pagination-wh', |
|
286 | containerClass: 'pagination-wh', | |
287 | currentPageClass: 'pager_curpage', |
|
287 | currentPageClass: 'pager_curpage', | |
288 | pageLinkClass: 'pager_link', |
|
288 | pageLinkClass: 'pager_link', | |
289 | nextPageLinkLabel: '>', |
|
289 | nextPageLinkLabel: '>', | |
290 | previousPageLinkLabel: '<', |
|
290 | previousPageLinkLabel: '<', | |
291 | firstPageLinkLabel: '<<', |
|
291 | firstPageLinkLabel: '<<', | |
292 | lastPageLinkLabel: '>>', |
|
292 | lastPageLinkLabel: '>>', | |
293 | containers:['user-paginator'] |
|
293 | containers:['user-paginator'] | |
294 | }), |
|
294 | }), | |
295 |
|
295 | |||
296 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
296 | MSG_SORTASC:"${_('Click to sort ascending')}", | |
297 | MSG_SORTDESC:"${_('Click to sort descending')}", |
|
297 | MSG_SORTDESC:"${_('Click to sort descending')}", | |
298 | MSG_EMPTY:"${_('No records found.')}", |
|
298 | MSG_EMPTY:"${_('No records found.')}", | |
299 | MSG_ERROR:"${_('Data error.')}", |
|
299 | MSG_ERROR:"${_('Data error.')}", | |
300 | MSG_LOADING:"${_('Loading...')}", |
|
300 | MSG_LOADING:"${_('Loading...')}", | |
301 | } |
|
301 | } | |
302 | ); |
|
302 | ); | |
303 | myDataTable.subscribe('postRenderEvent',function(oArgs) { |
|
303 | myDataTable.subscribe('postRenderEvent',function(oArgs) { | |
304 | tooltip_activate(); |
|
304 | tooltip_activate(); | |
305 | quick_repo_menu(); |
|
305 | quick_repo_menu(); | |
306 | }); |
|
306 | }); | |
307 |
|
307 | |||
308 | var filterTimeout = null; |
|
308 | var filterTimeout = null; | |
309 |
|
309 | |||
310 | updateFilter = function () { |
|
310 | updateFilter = function () { | |
311 | // Reset timeout |
|
311 | // Reset timeout | |
312 | filterTimeout = null; |
|
312 | filterTimeout = null; | |
313 |
|
313 | |||
314 | // Reset sort |
|
314 | // Reset sort | |
315 | var state = myDataTable.getState(); |
|
315 | var state = myDataTable.getState(); | |
316 | state.sortedBy = {key:'name', dir:YAHOO.widget.DataTable.CLASS_ASC}; |
|
316 | state.sortedBy = {key:'name', dir:YAHOO.widget.DataTable.CLASS_ASC}; | |
317 |
|
317 | |||
318 | // Get filtered data |
|
318 | // Get filtered data | |
319 | myDataSource.sendRequest(YUD.get('q_filter').value,{ |
|
319 | myDataSource.sendRequest(YUD.get('q_filter').value,{ | |
320 | success : myDataTable.onDataReturnInitializeTable, |
|
320 | success : myDataTable.onDataReturnInitializeTable, | |
321 | failure : myDataTable.onDataReturnInitializeTable, |
|
321 | failure : myDataTable.onDataReturnInitializeTable, | |
322 | scope : myDataTable, |
|
322 | scope : myDataTable, | |
323 | argument: state |
|
323 | argument: state | |
324 | }); |
|
324 | }); | |
325 |
|
325 | |||
326 | }; |
|
326 | }; | |
327 | YUE.on('q_filter','click',function(){ |
|
327 | YUE.on('q_filter','click',function(){ | |
328 | if(!YUD.hasClass('q_filter', 'loaded')){ |
|
328 | if(!YUD.hasClass('q_filter', 'loaded')){ | |
329 | YUD.get('q_filter').value = ''; |
|
329 | YUD.get('q_filter').value = ''; | |
330 | //TODO: load here full list later to do search within groups |
|
330 | //TODO: load here full list later to do search within groups | |
331 | YUD.addClass('q_filter', 'loaded'); |
|
331 | YUD.addClass('q_filter', 'loaded'); | |
332 | } |
|
332 | } | |
333 | }); |
|
333 | }); | |
334 |
|
334 | |||
335 | YUE.on('q_filter','keyup',function (e) { |
|
335 | YUE.on('q_filter','keyup',function (e) { | |
336 | clearTimeout(filterTimeout); |
|
336 | clearTimeout(filterTimeout); | |
337 | filterTimeout = setTimeout(updateFilter,600); |
|
337 | filterTimeout = setTimeout(updateFilter,600); | |
338 | }); |
|
338 | }); | |
339 | </script> |
|
339 | </script> | |
340 | % endif |
|
340 | % endif |
General Comments 0
You need to be logged in to leave comments.
Login now