Show More
@@ -1,18 +1,19 b'' | |||||
1 | List of contributors to RhodeCode project: |
|
1 | List of contributors to RhodeCode project: | |
2 | Marcin KuΕΊmiΕski <marcin@python-works.com> |
|
2 | Marcin KuΕΊmiΕski <marcin@python-works.com> | |
3 | Lukasz Balcerzak <lukaszbalcerzak@gmail.com> |
|
3 | Lukasz Balcerzak <lukaszbalcerzak@gmail.com> | |
4 | Jason Harris <jason@jasonfharris.com> |
|
4 | Jason Harris <jason@jasonfharris.com> | |
5 | Thayne Harbaugh <thayne@fusionio.com> |
|
5 | Thayne Harbaugh <thayne@fusionio.com> | |
6 | cejones |
|
6 | cejones | |
7 | Thomas Waldmann <tw-public@gmx.de> |
|
7 | Thomas Waldmann <tw-public@gmx.de> | |
8 | Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it> |
|
8 | Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it> | |
9 | Dmitri Kuznetsov |
|
9 | Dmitri Kuznetsov | |
10 | Jared Bunting <jared.bunting@peachjean.com> |
|
10 | Jared Bunting <jared.bunting@peachjean.com> | |
11 | Steve Romanow <slestak989@gmail.com> |
|
11 | Steve Romanow <slestak989@gmail.com> | |
12 | Augosto Hermann <augusto.herrmann@planejamento.gov.br> |
|
12 | Augosto Hermann <augusto.herrmann@planejamento.gov.br> | |
13 | Ankit Solanki <ankit.solanki@gmail.com> |
|
13 | Ankit Solanki <ankit.solanki@gmail.com> | |
14 | Liad Shani <liadff@gmail.com> |
|
14 | Liad Shani <liadff@gmail.com> | |
15 | Les Peabody <lpeabody@gmail.com> |
|
15 | Les Peabody <lpeabody@gmail.com> | |
16 | Jonas Oberschweiber <jonas.oberschweiber@d-velop.de> |
|
16 | Jonas Oberschweiber <jonas.oberschweiber@d-velop.de> | |
17 | Matt Zuba <matt.zuba@goodwillaz.org> |
|
17 | Matt Zuba <matt.zuba@goodwillaz.org> | |
18 | Aras Pranckevicius <aras@unity3d.com> No newline at end of file |
|
18 | Aras Pranckevicius <aras@unity3d.com> | |
|
19 | Tony Bussieres <t.bussieres@gmail.com> |
@@ -1,516 +1,535 b'' | |||||
1 | .. _changelog: |
|
1 | .. _changelog: | |
2 |
|
2 | |||
3 | Changelog |
|
3 | Changelog | |
4 | ========= |
|
4 | ========= | |
5 |
|
5 | |||
6 |
|
6 | |||
|
7 | 1.3.2 (**2012-02-28**) | |||
|
8 | ---------------------- | |||
|
9 | ||||
|
10 | news | |||
|
11 | ++++ | |||
|
12 | ||||
|
13 | ||||
|
14 | fixes | |||
|
15 | +++++ | |||
|
16 | ||||
|
17 | - fixed git protocol issues with repos-groups | |||
|
18 | - fixed git remote repos validator that prevented from cloning remote git repos | |||
|
19 | - fixes #370 ending slashes fixes for repo and groups | |||
|
20 | - fixes #368 improved git-protocol detection to handle other clients | |||
|
21 | - fixes #366 When Setting Repository Group To Blank Repo Group Wont Be | |||
|
22 | Moved To Root | |||
|
23 | - fixes #371 fixed issues with beaker/sqlalchemy and non-ascii cache keys | |||
|
24 | - fixed #373 missing cascade drop on user_group_to_perm table | |||
|
25 | ||||
7 | 1.3.1 (**2012-02-27**) |
|
26 | 1.3.1 (**2012-02-27**) | |
8 | ---------------------- |
|
27 | ---------------------- | |
9 |
|
28 | |||
10 | news |
|
29 | news | |
11 | ++++ |
|
30 | ++++ | |
12 |
|
31 | |||
13 |
|
32 | |||
14 | fixes |
|
33 | fixes | |
15 | +++++ |
|
34 | +++++ | |
16 |
|
35 | |||
17 | - redirection loop occurs when remember-me wasn't checked during login |
|
36 | - redirection loop occurs when remember-me wasn't checked during login | |
18 | - fixes issues with git blob history generation |
|
37 | - fixes issues with git blob history generation | |
19 | - don't fetch branch for git in file history dropdown. Causes unneeded slowness |
|
38 | - don't fetch branch for git in file history dropdown. Causes unneeded slowness | |
20 |
|
39 | |||
21 | 1.3.0 (**2012-02-26**) |
|
40 | 1.3.0 (**2012-02-26**) | |
22 | ---------------------- |
|
41 | ---------------------- | |
23 |
|
42 | |||
24 | news |
|
43 | news | |
25 | ++++ |
|
44 | ++++ | |
26 |
|
45 | |||
27 | - code review, inspired by github code-comments |
|
46 | - code review, inspired by github code-comments | |
28 | - #215 rst and markdown README files support |
|
47 | - #215 rst and markdown README files support | |
29 | - #252 Container-based and proxy pass-through authentication support |
|
48 | - #252 Container-based and proxy pass-through authentication support | |
30 | - #44 branch browser. Filtering of changelog by branches |
|
49 | - #44 branch browser. Filtering of changelog by branches | |
31 | - mercurial bookmarks support |
|
50 | - mercurial bookmarks support | |
32 | - new hover top menu, optimized to add maximum size for important views |
|
51 | - new hover top menu, optimized to add maximum size for important views | |
33 | - configurable clone url template with possibility to specify protocol like |
|
52 | - configurable clone url template with possibility to specify protocol like | |
34 | ssh:// or http:// and also manually alter other parts of clone_url. |
|
53 | ssh:// or http:// and also manually alter other parts of clone_url. | |
35 | - enabled largefiles extension by default |
|
54 | - enabled largefiles extension by default | |
36 | - optimized summary file pages and saved a lot of unused space in them |
|
55 | - optimized summary file pages and saved a lot of unused space in them | |
37 | - #239 option to manually mark repository as fork |
|
56 | - #239 option to manually mark repository as fork | |
38 | - #320 mapping of commit authors to RhodeCode users |
|
57 | - #320 mapping of commit authors to RhodeCode users | |
39 | - #304 hashes are displayed using monospace font |
|
58 | - #304 hashes are displayed using monospace font | |
40 | - diff configuration, toggle white lines and context lines |
|
59 | - diff configuration, toggle white lines and context lines | |
41 | - #307 configurable diffs, whitespace toggle, increasing context lines |
|
60 | - #307 configurable diffs, whitespace toggle, increasing context lines | |
42 | - sorting on branches, tags and bookmarks using YUI datatable |
|
61 | - sorting on branches, tags and bookmarks using YUI datatable | |
43 | - improved file filter on files page |
|
62 | - improved file filter on files page | |
44 | - implements #330 api method for listing nodes ar particular revision |
|
63 | - implements #330 api method for listing nodes ar particular revision | |
45 | - #73 added linking issues in commit messages to chosen issue tracker url |
|
64 | - #73 added linking issues in commit messages to chosen issue tracker url | |
46 | based on user defined regular expression |
|
65 | based on user defined regular expression | |
47 | - added linking of changesets in commit messages |
|
66 | - added linking of changesets in commit messages | |
48 | - new compact changelog with expandable commit messages |
|
67 | - new compact changelog with expandable commit messages | |
49 | - firstname and lastname are optional in user creation |
|
68 | - firstname and lastname are optional in user creation | |
50 | - #348 added post-create repository hook |
|
69 | - #348 added post-create repository hook | |
51 | - #212 global encoding settings is now configurable from .ini files |
|
70 | - #212 global encoding settings is now configurable from .ini files | |
52 | - #227 added repository groups permissions |
|
71 | - #227 added repository groups permissions | |
53 | - markdown gets codehilite extensions |
|
72 | - markdown gets codehilite extensions | |
54 | - new API methods, delete_repositories, grante/revoke permissions for groups |
|
73 | - new API methods, delete_repositories, grante/revoke permissions for groups | |
55 | and repos |
|
74 | and repos | |
56 |
|
75 | |||
57 |
|
76 | |||
58 | fixes |
|
77 | fixes | |
59 | +++++ |
|
78 | +++++ | |
60 |
|
79 | |||
61 | - rewrote dbsession management for atomic operations, and better error handling |
|
80 | - rewrote dbsession management for atomic operations, and better error handling | |
62 | - fixed sorting of repo tables |
|
81 | - fixed sorting of repo tables | |
63 | - #326 escape of special html entities in diffs |
|
82 | - #326 escape of special html entities in diffs | |
64 | - normalized user_name => username in api attributes |
|
83 | - normalized user_name => username in api attributes | |
65 | - fixes #298 ldap created users with mixed case emails created conflicts |
|
84 | - fixes #298 ldap created users with mixed case emails created conflicts | |
66 | on saving a form |
|
85 | on saving a form | |
67 | - fixes issue when owner of a repo couldn't revoke permissions for users |
|
86 | - fixes issue when owner of a repo couldn't revoke permissions for users | |
68 | and groups |
|
87 | and groups | |
69 | - fixes #271 rare JSON serialization problem with statistics |
|
88 | - fixes #271 rare JSON serialization problem with statistics | |
70 | - fixes #337 missing validation check for conflicting names of a group with a |
|
89 | - fixes #337 missing validation check for conflicting names of a group with a | |
71 | repositories group |
|
90 | repositories group | |
72 | - #340 fixed session problem for mysql and celery tasks |
|
91 | - #340 fixed session problem for mysql and celery tasks | |
73 | - fixed #331 RhodeCode mangles repository names if the a repository group |
|
92 | - fixed #331 RhodeCode mangles repository names if the a repository group | |
74 | contains the "full path" to the repositories |
|
93 | contains the "full path" to the repositories | |
75 | - #355 RhodeCode doesn't store encrypted LDAP passwords |
|
94 | - #355 RhodeCode doesn't store encrypted LDAP passwords | |
76 |
|
95 | |||
77 | 1.2.5 (**2012-01-28**) |
|
96 | 1.2.5 (**2012-01-28**) | |
78 | ---------------------- |
|
97 | ---------------------- | |
79 |
|
98 | |||
80 | news |
|
99 | news | |
81 | ++++ |
|
100 | ++++ | |
82 |
|
101 | |||
83 | fixes |
|
102 | fixes | |
84 | +++++ |
|
103 | +++++ | |
85 |
|
104 | |||
86 | - #340 Celery complains about MySQL server gone away, added session cleanup |
|
105 | - #340 Celery complains about MySQL server gone away, added session cleanup | |
87 | for celery tasks |
|
106 | for celery tasks | |
88 | - #341 "scanning for repositories in None" log message during Rescan was missing |
|
107 | - #341 "scanning for repositories in None" log message during Rescan was missing | |
89 | a parameter |
|
108 | a parameter | |
90 | - fixed creating archives with subrepos. Some hooks were triggered during that |
|
109 | - fixed creating archives with subrepos. Some hooks were triggered during that | |
91 | operation leading to crash. |
|
110 | operation leading to crash. | |
92 | - fixed missing email in account page. |
|
111 | - fixed missing email in account page. | |
93 | - Reverted Mercurial to 2.0.1 for windows due to bug in Mercurial that makes |
|
112 | - Reverted Mercurial to 2.0.1 for windows due to bug in Mercurial that makes | |
94 | forking on windows impossible |
|
113 | forking on windows impossible | |
95 |
|
114 | |||
96 | 1.2.4 (**2012-01-19**) |
|
115 | 1.2.4 (**2012-01-19**) | |
97 | ---------------------- |
|
116 | ---------------------- | |
98 |
|
117 | |||
99 | news |
|
118 | news | |
100 | ++++ |
|
119 | ++++ | |
101 |
|
120 | |||
102 | - RhodeCode is bundled with mercurial series 2.0.X by default, with |
|
121 | - RhodeCode is bundled with mercurial series 2.0.X by default, with | |
103 | full support to largefiles extension. Enabled by default in new installations |
|
122 | full support to largefiles extension. Enabled by default in new installations | |
104 | - #329 Ability to Add/Remove Groups to/from a Repository via AP |
|
123 | - #329 Ability to Add/Remove Groups to/from a Repository via AP | |
105 | - added requires.txt file with requirements |
|
124 | - added requires.txt file with requirements | |
106 |
|
125 | |||
107 | fixes |
|
126 | fixes | |
108 | +++++ |
|
127 | +++++ | |
109 |
|
128 | |||
110 | - fixes db session issues with celery when emailing admins |
|
129 | - fixes db session issues with celery when emailing admins | |
111 | - #331 RhodeCode mangles repository names if the a repository group |
|
130 | - #331 RhodeCode mangles repository names if the a repository group | |
112 | contains the "full path" to the repositories |
|
131 | contains the "full path" to the repositories | |
113 | - #298 Conflicting e-mail addresses for LDAP and RhodeCode users |
|
132 | - #298 Conflicting e-mail addresses for LDAP and RhodeCode users | |
114 | - DB session cleanup after hg protocol operations, fixes issues with |
|
133 | - DB session cleanup after hg protocol operations, fixes issues with | |
115 | `mysql has gone away` errors |
|
134 | `mysql has gone away` errors | |
116 | - #333 doc fixes for get_repo api function |
|
135 | - #333 doc fixes for get_repo api function | |
117 | - #271 rare JSON serialization problem with statistics enabled |
|
136 | - #271 rare JSON serialization problem with statistics enabled | |
118 | - #337 Fixes issues with validation of repository name conflicting with |
|
137 | - #337 Fixes issues with validation of repository name conflicting with | |
119 | a group name. A proper message is now displayed. |
|
138 | a group name. A proper message is now displayed. | |
120 | - #292 made ldap_dn in user edit readonly, to get rid of confusion that field |
|
139 | - #292 made ldap_dn in user edit readonly, to get rid of confusion that field | |
121 | doesn't work |
|
140 | doesn't work | |
122 | - #316 fixes issues with web description in hgrc files |
|
141 | - #316 fixes issues with web description in hgrc files | |
123 |
|
142 | |||
124 | 1.2.3 (**2011-11-02**) |
|
143 | 1.2.3 (**2011-11-02**) | |
125 | ---------------------- |
|
144 | ---------------------- | |
126 |
|
145 | |||
127 | news |
|
146 | news | |
128 | ++++ |
|
147 | ++++ | |
129 |
|
148 | |||
130 | - added option to manage repos group for non admin users |
|
149 | - added option to manage repos group for non admin users | |
131 | - added following API methods for get_users, create_user, get_users_groups, |
|
150 | - added following API methods for get_users, create_user, get_users_groups, | |
132 | get_users_group, create_users_group, add_user_to_users_groups, get_repos, |
|
151 | get_users_group, create_users_group, add_user_to_users_groups, get_repos, | |
133 | get_repo, create_repo, add_user_to_repo |
|
152 | get_repo, create_repo, add_user_to_repo | |
134 | - implements #237 added password confirmation for my account |
|
153 | - implements #237 added password confirmation for my account | |
135 | and admin edit user. |
|
154 | and admin edit user. | |
136 | - implements #291 email notification for global events are now sent to all |
|
155 | - implements #291 email notification for global events are now sent to all | |
137 | administrator users, and global config email. |
|
156 | administrator users, and global config email. | |
138 |
|
157 | |||
139 | fixes |
|
158 | fixes | |
140 | +++++ |
|
159 | +++++ | |
141 |
|
160 | |||
142 | - added option for passing auth method for smtp mailer |
|
161 | - added option for passing auth method for smtp mailer | |
143 | - #276 issue with adding a single user with id>10 to usergroups |
|
162 | - #276 issue with adding a single user with id>10 to usergroups | |
144 | - #277 fixes windows LDAP settings in which missing values breaks the ldap auth |
|
163 | - #277 fixes windows LDAP settings in which missing values breaks the ldap auth | |
145 | - #288 fixes managing of repos in a group for non admin user |
|
164 | - #288 fixes managing of repos in a group for non admin user | |
146 |
|
165 | |||
147 | 1.2.2 (**2011-10-17**) |
|
166 | 1.2.2 (**2011-10-17**) | |
148 | ---------------------- |
|
167 | ---------------------- | |
149 |
|
168 | |||
150 | news |
|
169 | news | |
151 | ++++ |
|
170 | ++++ | |
152 |
|
171 | |||
153 | - #226 repo groups are available by path instead of numerical id |
|
172 | - #226 repo groups are available by path instead of numerical id | |
154 |
|
173 | |||
155 | fixes |
|
174 | fixes | |
156 | +++++ |
|
175 | +++++ | |
157 |
|
176 | |||
158 | - #259 Groups with the same name but with different parent group |
|
177 | - #259 Groups with the same name but with different parent group | |
159 | - #260 Put repo in group, then move group to another group -> repo becomes unavailable |
|
178 | - #260 Put repo in group, then move group to another group -> repo becomes unavailable | |
160 | - #258 RhodeCode 1.2 assumes egg folder is writable (lockfiles problems) |
|
179 | - #258 RhodeCode 1.2 assumes egg folder is writable (lockfiles problems) | |
161 | - #265 ldap save fails sometimes on converting attributes to booleans, |
|
180 | - #265 ldap save fails sometimes on converting attributes to booleans, | |
162 | added getter and setter into model that will prevent from this on db model level |
|
181 | added getter and setter into model that will prevent from this on db model level | |
163 | - fixed problems with timestamps issues #251 and #213 |
|
182 | - fixed problems with timestamps issues #251 and #213 | |
164 | - fixes #266 RhodeCode allows to create repo with the same name and in |
|
183 | - fixes #266 RhodeCode allows to create repo with the same name and in | |
165 | the same parent as group |
|
184 | the same parent as group | |
166 | - fixes #245 Rescan of the repositories on Windows |
|
185 | - fixes #245 Rescan of the repositories on Windows | |
167 | - fixes #248 cannot edit repos inside a group on windows |
|
186 | - fixes #248 cannot edit repos inside a group on windows | |
168 | - fixes #219 forking problems on windows |
|
187 | - fixes #219 forking problems on windows | |
169 |
|
188 | |||
170 | 1.2.1 (**2011-10-08**) |
|
189 | 1.2.1 (**2011-10-08**) | |
171 | ---------------------- |
|
190 | ---------------------- | |
172 |
|
191 | |||
173 | news |
|
192 | news | |
174 | ++++ |
|
193 | ++++ | |
175 |
|
194 | |||
176 |
|
195 | |||
177 | fixes |
|
196 | fixes | |
178 | +++++ |
|
197 | +++++ | |
179 |
|
198 | |||
180 | - fixed problems with basic auth and push problems |
|
199 | - fixed problems with basic auth and push problems | |
181 | - gui fixes |
|
200 | - gui fixes | |
182 | - fixed logger |
|
201 | - fixed logger | |
183 |
|
202 | |||
184 | 1.2.0 (**2011-10-07**) |
|
203 | 1.2.0 (**2011-10-07**) | |
185 | ---------------------- |
|
204 | ---------------------- | |
186 |
|
205 | |||
187 | news |
|
206 | news | |
188 | ++++ |
|
207 | ++++ | |
189 |
|
208 | |||
190 | - implemented #47 repository groups |
|
209 | - implemented #47 repository groups | |
191 | - implemented #89 Can setup google analytics code from settings menu |
|
210 | - implemented #89 Can setup google analytics code from settings menu | |
192 | - implemented #91 added nicer looking archive urls with more download options |
|
211 | - implemented #91 added nicer looking archive urls with more download options | |
193 | like tags, branches |
|
212 | like tags, branches | |
194 | - implemented #44 into file browsing, and added follow branch option |
|
213 | - implemented #44 into file browsing, and added follow branch option | |
195 | - implemented #84 downloads can be enabled/disabled for each repository |
|
214 | - implemented #84 downloads can be enabled/disabled for each repository | |
196 | - anonymous repository can be cloned without having to pass default:default |
|
215 | - anonymous repository can be cloned without having to pass default:default | |
197 | into clone url |
|
216 | into clone url | |
198 | - fixed #90 whoosh indexer can index chooses repositories passed in command |
|
217 | - fixed #90 whoosh indexer can index chooses repositories passed in command | |
199 | line |
|
218 | line | |
200 | - extended journal with day aggregates and paging |
|
219 | - extended journal with day aggregates and paging | |
201 | - implemented #107 source code lines highlight ranges |
|
220 | - implemented #107 source code lines highlight ranges | |
202 | - implemented #93 customizable changelog on combined revision ranges - |
|
221 | - implemented #93 customizable changelog on combined revision ranges - | |
203 | equivalent of githubs compare view |
|
222 | equivalent of githubs compare view | |
204 | - implemented #108 extended and more powerful LDAP configuration |
|
223 | - implemented #108 extended and more powerful LDAP configuration | |
205 | - implemented #56 users groups |
|
224 | - implemented #56 users groups | |
206 | - major code rewrites optimized codes for speed and memory usage |
|
225 | - major code rewrites optimized codes for speed and memory usage | |
207 | - raw and diff downloads are now in git format |
|
226 | - raw and diff downloads are now in git format | |
208 | - setup command checks for write access to given path |
|
227 | - setup command checks for write access to given path | |
209 | - fixed many issues with international characters and unicode. It uses utf8 |
|
228 | - fixed many issues with international characters and unicode. It uses utf8 | |
210 | decode with replace to provide less errors even with non utf8 encoded strings |
|
229 | decode with replace to provide less errors even with non utf8 encoded strings | |
211 | - #125 added API KEY access to feeds |
|
230 | - #125 added API KEY access to feeds | |
212 | - #109 Repository can be created from external Mercurial link (aka. remote |
|
231 | - #109 Repository can be created from external Mercurial link (aka. remote | |
213 | repository, and manually updated (via pull) from admin panel |
|
232 | repository, and manually updated (via pull) from admin panel | |
214 | - beta git support - push/pull server + basic view for git repos |
|
233 | - beta git support - push/pull server + basic view for git repos | |
215 | - added followers page and forks page |
|
234 | - added followers page and forks page | |
216 | - server side file creation (with binary file upload interface) |
|
235 | - server side file creation (with binary file upload interface) | |
217 | and edition with commits powered by codemirror |
|
236 | and edition with commits powered by codemirror | |
218 | - #111 file browser file finder, quick lookup files on whole file tree |
|
237 | - #111 file browser file finder, quick lookup files on whole file tree | |
219 | - added quick login sliding menu into main page |
|
238 | - added quick login sliding menu into main page | |
220 | - changelog uses lazy loading of affected files details, in some scenarios |
|
239 | - changelog uses lazy loading of affected files details, in some scenarios | |
221 | this can improve speed of changelog page dramatically especially for |
|
240 | this can improve speed of changelog page dramatically especially for | |
222 | larger repositories. |
|
241 | larger repositories. | |
223 | - implements #214 added support for downloading subrepos in download menu. |
|
242 | - implements #214 added support for downloading subrepos in download menu. | |
224 | - Added basic API for direct operations on rhodecode via JSON |
|
243 | - Added basic API for direct operations on rhodecode via JSON | |
225 | - Implemented advanced hook management |
|
244 | - Implemented advanced hook management | |
226 |
|
245 | |||
227 | fixes |
|
246 | fixes | |
228 | +++++ |
|
247 | +++++ | |
229 |
|
248 | |||
230 | - fixed file browser bug, when switching into given form revision the url was |
|
249 | - fixed file browser bug, when switching into given form revision the url was | |
231 | not changing |
|
250 | not changing | |
232 | - fixed propagation to error controller on simplehg and simplegit middlewares |
|
251 | - fixed propagation to error controller on simplehg and simplegit middlewares | |
233 | - fixed error when trying to make a download on empty repository |
|
252 | - fixed error when trying to make a download on empty repository | |
234 | - fixed problem with '[' chars in commit messages in journal |
|
253 | - fixed problem with '[' chars in commit messages in journal | |
235 | - fixed #99 Unicode errors, on file node paths with non utf-8 characters |
|
254 | - fixed #99 Unicode errors, on file node paths with non utf-8 characters | |
236 | - journal fork fixes |
|
255 | - journal fork fixes | |
237 | - removed issue with space inside renamed repository after deletion |
|
256 | - removed issue with space inside renamed repository after deletion | |
238 | - fixed strange issue on formencode imports |
|
257 | - fixed strange issue on formencode imports | |
239 | - fixed #126 Deleting repository on Windows, rename used incompatible chars. |
|
258 | - fixed #126 Deleting repository on Windows, rename used incompatible chars. | |
240 | - #150 fixes for errors on repositories mapped in db but corrupted in |
|
259 | - #150 fixes for errors on repositories mapped in db but corrupted in | |
241 | filesystem |
|
260 | filesystem | |
242 | - fixed problem with ascendant characters in realm #181 |
|
261 | - fixed problem with ascendant characters in realm #181 | |
243 | - fixed problem with sqlite file based database connection pool |
|
262 | - fixed problem with sqlite file based database connection pool | |
244 | - whoosh indexer and code stats share the same dynamic extensions map |
|
263 | - whoosh indexer and code stats share the same dynamic extensions map | |
245 | - fixes #188 - relationship delete of repo_to_perm entry on user removal |
|
264 | - fixes #188 - relationship delete of repo_to_perm entry on user removal | |
246 | - fixes issue #189 Trending source files shows "show more" when no more exist |
|
265 | - fixes issue #189 Trending source files shows "show more" when no more exist | |
247 | - fixes issue #197 Relative paths for pidlocks |
|
266 | - fixes issue #197 Relative paths for pidlocks | |
248 | - fixes issue #198 password will require only 3 chars now for login form |
|
267 | - fixes issue #198 password will require only 3 chars now for login form | |
249 | - fixes issue #199 wrong redirection for non admin users after creating a repository |
|
268 | - fixes issue #199 wrong redirection for non admin users after creating a repository | |
250 | - fixes issues #202, bad db constraint made impossible to attach same group |
|
269 | - fixes issues #202, bad db constraint made impossible to attach same group | |
251 | more than one time. Affects only mysql/postgres |
|
270 | more than one time. Affects only mysql/postgres | |
252 | - fixes #218 os.kill patch for windows was missing sig param |
|
271 | - fixes #218 os.kill patch for windows was missing sig param | |
253 | - improved rendering of dag (they are not trimmed anymore when number of |
|
272 | - improved rendering of dag (they are not trimmed anymore when number of | |
254 | heads exceeds 5) |
|
273 | heads exceeds 5) | |
255 |
|
274 | |||
256 | 1.1.8 (**2011-04-12**) |
|
275 | 1.1.8 (**2011-04-12**) | |
257 | ---------------------- |
|
276 | ---------------------- | |
258 |
|
277 | |||
259 | news |
|
278 | news | |
260 | ++++ |
|
279 | ++++ | |
261 |
|
280 | |||
262 | - improved windows support |
|
281 | - improved windows support | |
263 |
|
282 | |||
264 | fixes |
|
283 | fixes | |
265 | +++++ |
|
284 | +++++ | |
266 |
|
285 | |||
267 | - fixed #140 freeze of python dateutil library, since new version is python2.x |
|
286 | - fixed #140 freeze of python dateutil library, since new version is python2.x | |
268 | incompatible |
|
287 | incompatible | |
269 | - setup-app will check for write permission in given path |
|
288 | - setup-app will check for write permission in given path | |
270 | - cleaned up license info issue #149 |
|
289 | - cleaned up license info issue #149 | |
271 | - fixes for issues #137,#116 and problems with unicode and accented characters. |
|
290 | - fixes for issues #137,#116 and problems with unicode and accented characters. | |
272 | - fixes crashes on gravatar, when passed in email as unicode |
|
291 | - fixes crashes on gravatar, when passed in email as unicode | |
273 | - fixed tooltip flickering problems |
|
292 | - fixed tooltip flickering problems | |
274 | - fixed came_from redirection on windows |
|
293 | - fixed came_from redirection on windows | |
275 | - fixed logging modules, and sql formatters |
|
294 | - fixed logging modules, and sql formatters | |
276 | - windows fixes for os.kill issue #133 |
|
295 | - windows fixes for os.kill issue #133 | |
277 | - fixes path splitting for windows issues #148 |
|
296 | - fixes path splitting for windows issues #148 | |
278 | - fixed issue #143 wrong import on migration to 1.1.X |
|
297 | - fixed issue #143 wrong import on migration to 1.1.X | |
279 | - fixed problems with displaying binary files, thanks to Thomas Waldmann |
|
298 | - fixed problems with displaying binary files, thanks to Thomas Waldmann | |
280 | - removed name from archive files since it's breaking ui for long repo names |
|
299 | - removed name from archive files since it's breaking ui for long repo names | |
281 | - fixed issue with archive headers sent to browser, thanks to Thomas Waldmann |
|
300 | - fixed issue with archive headers sent to browser, thanks to Thomas Waldmann | |
282 | - fixed compatibility for 1024px displays, and larger dpi settings, thanks to |
|
301 | - fixed compatibility for 1024px displays, and larger dpi settings, thanks to | |
283 | Thomas Waldmann |
|
302 | Thomas Waldmann | |
284 | - fixed issue #166 summary pager was skipping 10 revisions on second page |
|
303 | - fixed issue #166 summary pager was skipping 10 revisions on second page | |
285 |
|
304 | |||
286 |
|
305 | |||
287 | 1.1.7 (**2011-03-23**) |
|
306 | 1.1.7 (**2011-03-23**) | |
288 | ---------------------- |
|
307 | ---------------------- | |
289 |
|
308 | |||
290 | news |
|
309 | news | |
291 | ++++ |
|
310 | ++++ | |
292 |
|
311 | |||
293 | fixes |
|
312 | fixes | |
294 | +++++ |
|
313 | +++++ | |
295 |
|
314 | |||
296 | - fixed (again) #136 installation support for FreeBSD |
|
315 | - fixed (again) #136 installation support for FreeBSD | |
297 |
|
316 | |||
298 |
|
317 | |||
299 | 1.1.6 (**2011-03-21**) |
|
318 | 1.1.6 (**2011-03-21**) | |
300 | ---------------------- |
|
319 | ---------------------- | |
301 |
|
320 | |||
302 | news |
|
321 | news | |
303 | ++++ |
|
322 | ++++ | |
304 |
|
323 | |||
305 | fixes |
|
324 | fixes | |
306 | +++++ |
|
325 | +++++ | |
307 |
|
326 | |||
308 | - fixed #136 installation support for FreeBSD |
|
327 | - fixed #136 installation support for FreeBSD | |
309 | - RhodeCode will check for python version during installation |
|
328 | - RhodeCode will check for python version during installation | |
310 |
|
329 | |||
311 | 1.1.5 (**2011-03-17**) |
|
330 | 1.1.5 (**2011-03-17**) | |
312 | ---------------------- |
|
331 | ---------------------- | |
313 |
|
332 | |||
314 | news |
|
333 | news | |
315 | ++++ |
|
334 | ++++ | |
316 |
|
335 | |||
317 | - basic windows support, by exchanging pybcrypt into sha256 for windows only |
|
336 | - basic windows support, by exchanging pybcrypt into sha256 for windows only | |
318 | highly inspired by idea of mantis406 |
|
337 | highly inspired by idea of mantis406 | |
319 |
|
338 | |||
320 | fixes |
|
339 | fixes | |
321 | +++++ |
|
340 | +++++ | |
322 |
|
341 | |||
323 | - fixed sorting by author in main page |
|
342 | - fixed sorting by author in main page | |
324 | - fixed crashes with diffs on binary files |
|
343 | - fixed crashes with diffs on binary files | |
325 | - fixed #131 problem with boolean values for LDAP |
|
344 | - fixed #131 problem with boolean values for LDAP | |
326 | - fixed #122 mysql problems thanks to striker69 |
|
345 | - fixed #122 mysql problems thanks to striker69 | |
327 | - fixed problem with errors on calling raw/raw_files/annotate functions |
|
346 | - fixed problem with errors on calling raw/raw_files/annotate functions | |
328 | with unknown revisions |
|
347 | with unknown revisions | |
329 | - fixed returned rawfiles attachment names with international character |
|
348 | - fixed returned rawfiles attachment names with international character | |
330 | - cleaned out docs, big thanks to Jason Harris |
|
349 | - cleaned out docs, big thanks to Jason Harris | |
331 |
|
350 | |||
332 | 1.1.4 (**2011-02-19**) |
|
351 | 1.1.4 (**2011-02-19**) | |
333 | ---------------------- |
|
352 | ---------------------- | |
334 |
|
353 | |||
335 | news |
|
354 | news | |
336 | ++++ |
|
355 | ++++ | |
337 |
|
356 | |||
338 | fixes |
|
357 | fixes | |
339 | +++++ |
|
358 | +++++ | |
340 |
|
359 | |||
341 | - fixed formencode import problem on settings page, that caused server crash |
|
360 | - fixed formencode import problem on settings page, that caused server crash | |
342 | when that page was accessed as first after server start |
|
361 | when that page was accessed as first after server start | |
343 | - journal fixes |
|
362 | - journal fixes | |
344 | - fixed option to access repository just by entering http://server/<repo_name> |
|
363 | - fixed option to access repository just by entering http://server/<repo_name> | |
345 |
|
364 | |||
346 | 1.1.3 (**2011-02-16**) |
|
365 | 1.1.3 (**2011-02-16**) | |
347 | ---------------------- |
|
366 | ---------------------- | |
348 |
|
367 | |||
349 | news |
|
368 | news | |
350 | ++++ |
|
369 | ++++ | |
351 |
|
370 | |||
352 | - implemented #102 allowing the '.' character in username |
|
371 | - implemented #102 allowing the '.' character in username | |
353 | - added option to access repository just by entering http://server/<repo_name> |
|
372 | - added option to access repository just by entering http://server/<repo_name> | |
354 | - celery task ignores result for better performance |
|
373 | - celery task ignores result for better performance | |
355 |
|
374 | |||
356 | fixes |
|
375 | fixes | |
357 | +++++ |
|
376 | +++++ | |
358 |
|
377 | |||
359 | - fixed ehlo command and non auth mail servers on smtp_lib. Thanks to |
|
378 | - fixed ehlo command and non auth mail servers on smtp_lib. Thanks to | |
360 | apollo13 and Johan Walles |
|
379 | apollo13 and Johan Walles | |
361 | - small fixes in journal |
|
380 | - small fixes in journal | |
362 | - fixed problems with getting setting for celery from .ini files |
|
381 | - fixed problems with getting setting for celery from .ini files | |
363 | - registration, password reset and login boxes share the same title as main |
|
382 | - registration, password reset and login boxes share the same title as main | |
364 | application now |
|
383 | application now | |
365 | - fixed #113: to high permissions to fork repository |
|
384 | - fixed #113: to high permissions to fork repository | |
366 | - fixed problem with '[' chars in commit messages in journal |
|
385 | - fixed problem with '[' chars in commit messages in journal | |
367 | - removed issue with space inside renamed repository after deletion |
|
386 | - removed issue with space inside renamed repository after deletion | |
368 | - db transaction fixes when filesystem repository creation failed |
|
387 | - db transaction fixes when filesystem repository creation failed | |
369 | - fixed #106 relation issues on databases different than sqlite |
|
388 | - fixed #106 relation issues on databases different than sqlite | |
370 | - fixed static files paths links to use of url() method |
|
389 | - fixed static files paths links to use of url() method | |
371 |
|
390 | |||
372 | 1.1.2 (**2011-01-12**) |
|
391 | 1.1.2 (**2011-01-12**) | |
373 | ---------------------- |
|
392 | ---------------------- | |
374 |
|
393 | |||
375 | news |
|
394 | news | |
376 | ++++ |
|
395 | ++++ | |
377 |
|
396 | |||
378 |
|
397 | |||
379 | fixes |
|
398 | fixes | |
380 | +++++ |
|
399 | +++++ | |
381 |
|
400 | |||
382 | - fixes #98 protection against float division of percentage stats |
|
401 | - fixes #98 protection against float division of percentage stats | |
383 | - fixed graph bug |
|
402 | - fixed graph bug | |
384 | - forced webhelpers version since it was making troubles during installation |
|
403 | - forced webhelpers version since it was making troubles during installation | |
385 |
|
404 | |||
386 | 1.1.1 (**2011-01-06**) |
|
405 | 1.1.1 (**2011-01-06**) | |
387 | ---------------------- |
|
406 | ---------------------- | |
388 |
|
407 | |||
389 | news |
|
408 | news | |
390 | ++++ |
|
409 | ++++ | |
391 |
|
410 | |||
392 | - added force https option into ini files for easier https usage (no need to |
|
411 | - added force https option into ini files for easier https usage (no need to | |
393 | set server headers with this options) |
|
412 | set server headers with this options) | |
394 | - small css updates |
|
413 | - small css updates | |
395 |
|
414 | |||
396 | fixes |
|
415 | fixes | |
397 | +++++ |
|
416 | +++++ | |
398 |
|
417 | |||
399 | - fixed #96 redirect loop on files view on repositories without changesets |
|
418 | - fixed #96 redirect loop on files view on repositories without changesets | |
400 | - fixed #97 unicode string passed into server header in special cases (mod_wsgi) |
|
419 | - fixed #97 unicode string passed into server header in special cases (mod_wsgi) | |
401 | and server crashed with errors |
|
420 | and server crashed with errors | |
402 | - fixed large tooltips problems on main page |
|
421 | - fixed large tooltips problems on main page | |
403 | - fixed #92 whoosh indexer is more error proof |
|
422 | - fixed #92 whoosh indexer is more error proof | |
404 |
|
423 | |||
405 | 1.1.0 (**2010-12-18**) |
|
424 | 1.1.0 (**2010-12-18**) | |
406 | ---------------------- |
|
425 | ---------------------- | |
407 |
|
426 | |||
408 | news |
|
427 | news | |
409 | ++++ |
|
428 | ++++ | |
410 |
|
429 | |||
411 | - rewrite of internals for vcs >=0.1.10 |
|
430 | - rewrite of internals for vcs >=0.1.10 | |
412 | - uses mercurial 1.7 with dotencode disabled for maintaining compatibility |
|
431 | - uses mercurial 1.7 with dotencode disabled for maintaining compatibility | |
413 | with older clients |
|
432 | with older clients | |
414 | - anonymous access, authentication via ldap |
|
433 | - anonymous access, authentication via ldap | |
415 | - performance upgrade for cached repos list - each repository has its own |
|
434 | - performance upgrade for cached repos list - each repository has its own | |
416 | cache that's invalidated when needed. |
|
435 | cache that's invalidated when needed. | |
417 | - performance upgrades on repositories with large amount of commits (20K+) |
|
436 | - performance upgrades on repositories with large amount of commits (20K+) | |
418 | - main page quick filter for filtering repositories |
|
437 | - main page quick filter for filtering repositories | |
419 | - user dashboards with ability to follow chosen repositories actions |
|
438 | - user dashboards with ability to follow chosen repositories actions | |
420 | - sends email to admin on new user registration |
|
439 | - sends email to admin on new user registration | |
421 | - added cache/statistics reset options into repository settings |
|
440 | - added cache/statistics reset options into repository settings | |
422 | - more detailed action logger (based on hooks) with pushed changesets lists |
|
441 | - more detailed action logger (based on hooks) with pushed changesets lists | |
423 | and options to disable those hooks from admin panel |
|
442 | and options to disable those hooks from admin panel | |
424 | - introduced new enhanced changelog for merges that shows more accurate results |
|
443 | - introduced new enhanced changelog for merges that shows more accurate results | |
425 | - new improved and faster code stats (based on pygments lexers mapping tables, |
|
444 | - new improved and faster code stats (based on pygments lexers mapping tables, | |
426 | showing up to 10 trending sources for each repository. Additionally stats |
|
445 | showing up to 10 trending sources for each repository. Additionally stats | |
427 | can be disabled in repository settings. |
|
446 | can be disabled in repository settings. | |
428 | - gui optimizations, fixed application width to 1024px |
|
447 | - gui optimizations, fixed application width to 1024px | |
429 | - added cut off (for large files/changesets) limit into config files |
|
448 | - added cut off (for large files/changesets) limit into config files | |
430 | - whoosh, celeryd, upgrade moved to paster command |
|
449 | - whoosh, celeryd, upgrade moved to paster command | |
431 | - other than sqlite database backends can be used |
|
450 | - other than sqlite database backends can be used | |
432 |
|
451 | |||
433 | fixes |
|
452 | fixes | |
434 | +++++ |
|
453 | +++++ | |
435 |
|
454 | |||
436 | - fixes #61 forked repo was showing only after cache expired |
|
455 | - fixes #61 forked repo was showing only after cache expired | |
437 | - fixes #76 no confirmation on user deletes |
|
456 | - fixes #76 no confirmation on user deletes | |
438 | - fixes #66 Name field misspelled |
|
457 | - fixes #66 Name field misspelled | |
439 | - fixes #72 block user removal when he owns repositories |
|
458 | - fixes #72 block user removal when he owns repositories | |
440 | - fixes #69 added password confirmation fields |
|
459 | - fixes #69 added password confirmation fields | |
441 | - fixes #87 RhodeCode crashes occasionally on updating repository owner |
|
460 | - fixes #87 RhodeCode crashes occasionally on updating repository owner | |
442 | - fixes #82 broken annotations on files with more than 1 blank line at the end |
|
461 | - fixes #82 broken annotations on files with more than 1 blank line at the end | |
443 | - a lot of fixes and tweaks for file browser |
|
462 | - a lot of fixes and tweaks for file browser | |
444 | - fixed detached session issues |
|
463 | - fixed detached session issues | |
445 | - fixed when user had no repos he would see all repos listed in my account |
|
464 | - fixed when user had no repos he would see all repos listed in my account | |
446 | - fixed ui() instance bug when global hgrc settings was loaded for server |
|
465 | - fixed ui() instance bug when global hgrc settings was loaded for server | |
447 | instance and all hgrc options were merged with our db ui() object |
|
466 | instance and all hgrc options were merged with our db ui() object | |
448 | - numerous small bugfixes |
|
467 | - numerous small bugfixes | |
449 |
|
468 | |||
450 | (special thanks for TkSoh for detailed feedback) |
|
469 | (special thanks for TkSoh for detailed feedback) | |
451 |
|
470 | |||
452 |
|
471 | |||
453 | 1.0.2 (**2010-11-12**) |
|
472 | 1.0.2 (**2010-11-12**) | |
454 | ---------------------- |
|
473 | ---------------------- | |
455 |
|
474 | |||
456 | news |
|
475 | news | |
457 | ++++ |
|
476 | ++++ | |
458 |
|
477 | |||
459 | - tested under python2.7 |
|
478 | - tested under python2.7 | |
460 | - bumped sqlalchemy and celery versions |
|
479 | - bumped sqlalchemy and celery versions | |
461 |
|
480 | |||
462 | fixes |
|
481 | fixes | |
463 | +++++ |
|
482 | +++++ | |
464 |
|
483 | |||
465 | - fixed #59 missing graph.js |
|
484 | - fixed #59 missing graph.js | |
466 | - fixed repo_size crash when repository had broken symlinks |
|
485 | - fixed repo_size crash when repository had broken symlinks | |
467 | - fixed python2.5 crashes. |
|
486 | - fixed python2.5 crashes. | |
468 |
|
487 | |||
469 |
|
488 | |||
470 | 1.0.1 (**2010-11-10**) |
|
489 | 1.0.1 (**2010-11-10**) | |
471 | ---------------------- |
|
490 | ---------------------- | |
472 |
|
491 | |||
473 | news |
|
492 | news | |
474 | ++++ |
|
493 | ++++ | |
475 |
|
494 | |||
476 | - small css updated |
|
495 | - small css updated | |
477 |
|
496 | |||
478 | fixes |
|
497 | fixes | |
479 | +++++ |
|
498 | +++++ | |
480 |
|
499 | |||
481 | - fixed #53 python2.5 incompatible enumerate calls |
|
500 | - fixed #53 python2.5 incompatible enumerate calls | |
482 | - fixed #52 disable mercurial extension for web |
|
501 | - fixed #52 disable mercurial extension for web | |
483 | - fixed #51 deleting repositories don't delete it's dependent objects |
|
502 | - fixed #51 deleting repositories don't delete it's dependent objects | |
484 |
|
503 | |||
485 |
|
504 | |||
486 | 1.0.0 (**2010-11-02**) |
|
505 | 1.0.0 (**2010-11-02**) | |
487 | ---------------------- |
|
506 | ---------------------- | |
488 |
|
507 | |||
489 | - security bugfix simplehg wasn't checking for permissions on commands |
|
508 | - security bugfix simplehg wasn't checking for permissions on commands | |
490 | other than pull or push. |
|
509 | other than pull or push. | |
491 | - fixed doubled messages after push or pull in admin journal |
|
510 | - fixed doubled messages after push or pull in admin journal | |
492 | - templating and css corrections, fixed repo switcher on chrome, updated titles |
|
511 | - templating and css corrections, fixed repo switcher on chrome, updated titles | |
493 | - admin menu accessible from options menu on repository view |
|
512 | - admin menu accessible from options menu on repository view | |
494 | - permissions cached queries |
|
513 | - permissions cached queries | |
495 |
|
514 | |||
496 | 1.0.0rc4 (**2010-10-12**) |
|
515 | 1.0.0rc4 (**2010-10-12**) | |
497 | -------------------------- |
|
516 | -------------------------- | |
498 |
|
517 | |||
499 | - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman) |
|
518 | - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman) | |
500 | - removed cache_manager settings from sqlalchemy meta |
|
519 | - removed cache_manager settings from sqlalchemy meta | |
501 | - added sqlalchemy cache settings to ini files |
|
520 | - added sqlalchemy cache settings to ini files | |
502 | - validated password length and added second try of failure on paster setup-app |
|
521 | - validated password length and added second try of failure on paster setup-app | |
503 | - fixed setup database destroy prompt even when there was no db |
|
522 | - fixed setup database destroy prompt even when there was no db | |
504 |
|
523 | |||
505 |
|
524 | |||
506 | 1.0.0rc3 (**2010-10-11**) |
|
525 | 1.0.0rc3 (**2010-10-11**) | |
507 | ------------------------- |
|
526 | ------------------------- | |
508 |
|
527 | |||
509 | - fixed i18n during installation. |
|
528 | - fixed i18n during installation. | |
510 |
|
529 | |||
511 | 1.0.0rc2 (**2010-10-11**) |
|
530 | 1.0.0rc2 (**2010-10-11**) | |
512 | ------------------------- |
|
531 | ------------------------- | |
513 |
|
532 | |||
514 | - Disabled dirsize in file browser, it's causing nasty bug when dir renames |
|
533 | - Disabled dirsize in file browser, it's causing nasty bug when dir renames | |
515 | occure. After vcs is fixed it'll be put back again. |
|
534 | occure. After vcs is fixed it'll be put back again. | |
516 | - templating/css rewrites, optimized css. No newline at end of file |
|
535 | - templating/css rewrites, optimized css. |
@@ -1,18 +1,18 b'' | |||||
1 | {% extends "basic/layout.html" %} |
|
1 | {% extends "basic/layout.html" %} | |
2 |
|
2 | |||
3 | {% block sidebarlogo %} |
|
3 | {% block sidebarlogo %} | |
4 | <h3>Support RhodeCode development.</h3> |
|
4 | <h3>Support RhodeCode development.</h3> | |
5 | <div style="text-align:center"> |
|
5 | <div style="text-align:center"> | |
6 | <form action="https://www.paypal.com/cgi-bin/webscr" method="post"> |
|
6 | <form action="https://www.paypal.com/cgi-bin/webscr" method="post"> | |
7 | <input type="hidden" name="cmd" value="_s-xclick"> |
|
7 | <input type="hidden" name="cmd" value="_s-xclick"> | |
8 | <input type="hidden" name="hosted_button_id" value="8U2LLRPLBKWDU"> |
|
8 | <input type="hidden" name="hosted_button_id" value="8U2LLRPLBKWDU"> | |
9 | <input style="border:0px !important" type="image" src="https://www.paypal.com/en_US/i/btn/btn_donate_SM.gif" |
|
9 | <input style="border:0px !important" type="image" src="https://www.paypal.com/en_US/i/btn/btn_donate_SM.gif" | |
10 | border="0" name="submit" alt="PayPal - The safer, easier way to pay online!"> |
|
10 | border="0" name="submit" alt="PayPal - The safer, easier way to pay online!"> | |
11 | <img alt="" border="0" src="https://www.paypal.com/en_US/i/scr/pixel.gif" width="1" height="1"> |
|
11 | <img alt="" border="0" src="https://www.paypal.com/en_US/i/scr/pixel.gif" width="1" height="1"> | |
12 | </form> |
|
12 | </form> | |
13 | <div style="padding:5px"> |
|
13 | <div style="padding:5px"> | |
14 | <a href="http://flattr.com/thing/167489/RhodeCode" target="_blank"> |
|
14 | <a href="http://flattr.com/thing/167489/RhodeCode" target="_blank"> | |
15 | <img src="http://api.flattr.com/button/flattr-badge-large.png" alt="Flattr this" title="Flattr this" border="0" /></a> |
|
15 | <img src="http://api.flattr.com/button/flattr-badge-large.png" alt="Flattr this" title="Flattr this" border="0" /></a> | |
16 |
</div> |
|
16 | </div> | |
17 | </div> |
|
17 | </div> | |
18 | {% endblock %}} |
|
18 | {% endblock %}} |
@@ -1,313 +1,318 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.admin.repos_groups |
|
3 | rhodecode.controllers.admin.repos_groups | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Repositories groups controller for RhodeCode |
|
6 | Repositories groups controller for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Mar 23, 2010 |
|
8 | :created_on: Mar 23, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 | import formencode |
|
28 | import formencode | |
29 |
|
29 | |||
30 | from formencode import htmlfill |
|
30 | from formencode import htmlfill | |
31 |
|
31 | |||
32 | from pylons import request, tmpl_context as c, url |
|
32 | from pylons import request, tmpl_context as c, url | |
33 | from pylons.controllers.util import redirect |
|
33 | from pylons.controllers.util import redirect | |
34 | from pylons.i18n.translation import _ |
|
34 | from pylons.i18n.translation import _ | |
35 |
|
35 | |||
36 | from sqlalchemy.exc import IntegrityError |
|
36 | from sqlalchemy.exc import IntegrityError | |
37 |
|
37 | |||
38 | from rhodecode.lib import helpers as h |
|
38 | from rhodecode.lib import helpers as h | |
39 | from rhodecode.lib.auth import LoginRequired, HasPermissionAnyDecorator,\ |
|
39 | from rhodecode.lib.auth import LoginRequired, HasPermissionAnyDecorator,\ | |
40 | HasReposGroupPermissionAnyDecorator |
|
40 | HasReposGroupPermissionAnyDecorator | |
41 | from rhodecode.lib.base import BaseController, render |
|
41 | from rhodecode.lib.base import BaseController, render | |
42 | from rhodecode.model.db import RepoGroup |
|
42 | from rhodecode.model.db import RepoGroup | |
43 | from rhodecode.model.repos_group import ReposGroupModel |
|
43 | from rhodecode.model.repos_group import ReposGroupModel | |
44 | from rhodecode.model.forms import ReposGroupForm |
|
44 | from rhodecode.model.forms import ReposGroupForm | |
45 | from rhodecode.model.meta import Session |
|
45 | from rhodecode.model.meta import Session | |
46 | from rhodecode.model.repo import RepoModel |
|
46 | from rhodecode.model.repo import RepoModel | |
47 | from webob.exc import HTTPInternalServerError |
|
47 | from webob.exc import HTTPInternalServerError | |
48 |
|
48 | |||
49 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | class ReposGroupsController(BaseController): |
|
52 | class ReposGroupsController(BaseController): | |
53 | """REST Controller styled on the Atom Publishing Protocol""" |
|
53 | """REST Controller styled on the Atom Publishing Protocol""" | |
54 | # To properly map this controller, ensure your config/routing.py |
|
54 | # To properly map this controller, ensure your config/routing.py | |
55 | # file has a resource setup: |
|
55 | # file has a resource setup: | |
56 | # map.resource('repos_group', 'repos_groups') |
|
56 | # map.resource('repos_group', 'repos_groups') | |
57 |
|
57 | |||
58 | @LoginRequired() |
|
58 | @LoginRequired() | |
59 | def __before__(self): |
|
59 | def __before__(self): | |
60 | super(ReposGroupsController, self).__before__() |
|
60 | super(ReposGroupsController, self).__before__() | |
61 |
|
61 | |||
62 | def __load_defaults(self): |
|
62 | def __load_defaults(self): | |
63 | c.repo_groups = RepoGroup.groups_choices() |
|
63 | c.repo_groups = RepoGroup.groups_choices() | |
64 | c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups) |
|
64 | c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups) | |
65 |
|
65 | |||
66 | repo_model = RepoModel() |
|
66 | repo_model = RepoModel() | |
67 | c.users_array = repo_model.get_users_js() |
|
67 | c.users_array = repo_model.get_users_js() | |
68 | c.users_groups_array = repo_model.get_users_groups_js() |
|
68 | c.users_groups_array = repo_model.get_users_groups_js() | |
69 |
|
69 | |||
70 | def __load_data(self, group_id): |
|
70 | def __load_data(self, group_id): | |
71 | """ |
|
71 | """ | |
72 | Load defaults settings for edit, and update |
|
72 | Load defaults settings for edit, and update | |
73 |
|
73 | |||
74 | :param group_id: |
|
74 | :param group_id: | |
75 | """ |
|
75 | """ | |
76 | self.__load_defaults() |
|
76 | self.__load_defaults() | |
77 |
|
77 | |||
78 | repo_group = RepoGroup.get(group_id) |
|
78 | repo_group = RepoGroup.get(group_id) | |
79 |
|
79 | |||
80 | data = repo_group.get_dict() |
|
80 | data = repo_group.get_dict() | |
81 |
|
81 | |||
82 | data['group_name'] = repo_group.name |
|
82 | data['group_name'] = repo_group.name | |
83 |
|
83 | |||
84 | # fill repository users |
|
84 | # fill repository users | |
85 | for p in repo_group.repo_group_to_perm: |
|
85 | for p in repo_group.repo_group_to_perm: | |
86 | data.update({'u_perm_%s' % p.user.username: |
|
86 | data.update({'u_perm_%s' % p.user.username: | |
87 | p.permission.permission_name}) |
|
87 | p.permission.permission_name}) | |
88 |
|
88 | |||
89 | # fill repository groups |
|
89 | # fill repository groups | |
90 | for p in repo_group.users_group_to_perm: |
|
90 | for p in repo_group.users_group_to_perm: | |
91 | data.update({'g_perm_%s' % p.users_group.users_group_name: |
|
91 | data.update({'g_perm_%s' % p.users_group.users_group_name: | |
92 | p.permission.permission_name}) |
|
92 | p.permission.permission_name}) | |
93 |
|
93 | |||
94 | return data |
|
94 | return data | |
95 |
|
95 | |||
96 | @HasPermissionAnyDecorator('hg.admin') |
|
96 | @HasPermissionAnyDecorator('hg.admin') | |
97 | def index(self, format='html'): |
|
97 | def index(self, format='html'): | |
98 | """GET /repos_groups: All items in the collection""" |
|
98 | """GET /repos_groups: All items in the collection""" | |
99 | # url('repos_groups') |
|
99 | # url('repos_groups') | |
100 | sk = lambda g: g.parents[0].group_name if g.parents else g.group_name |
|
100 | sk = lambda g: g.parents[0].group_name if g.parents else g.group_name | |
101 | c.groups = sorted(RepoGroup.query().all(), key=sk) |
|
101 | c.groups = sorted(RepoGroup.query().all(), key=sk) | |
102 | return render('admin/repos_groups/repos_groups_show.html') |
|
102 | return render('admin/repos_groups/repos_groups_show.html') | |
103 |
|
103 | |||
104 | @HasPermissionAnyDecorator('hg.admin') |
|
104 | @HasPermissionAnyDecorator('hg.admin') | |
105 | def create(self): |
|
105 | def create(self): | |
106 | """POST /repos_groups: Create a new item""" |
|
106 | """POST /repos_groups: Create a new item""" | |
107 | # url('repos_groups') |
|
107 | # url('repos_groups') | |
108 | self.__load_defaults() |
|
108 | self.__load_defaults() | |
109 | repos_group_form = ReposGroupForm(available_groups = |
|
109 | repos_group_form = ReposGroupForm(available_groups = | |
110 | c.repo_groups_choices)() |
|
110 | c.repo_groups_choices)() | |
111 | try: |
|
111 | try: | |
112 | form_result = repos_group_form.to_python(dict(request.POST)) |
|
112 | form_result = repos_group_form.to_python(dict(request.POST)) | |
113 | ReposGroupModel().create( |
|
113 | ReposGroupModel().create( | |
114 | group_name=form_result['group_name'], |
|
114 | group_name=form_result['group_name'], | |
115 | group_description=form_result['group_description'], |
|
115 | group_description=form_result['group_description'], | |
116 | parent=form_result['group_parent_id'] |
|
116 | parent=form_result['group_parent_id'] | |
117 | ) |
|
117 | ) | |
118 | Session.commit() |
|
118 | Session.commit() | |
119 | h.flash(_('created repos group %s') \ |
|
119 | h.flash(_('created repos group %s') \ | |
120 | % form_result['group_name'], category='success') |
|
120 | % form_result['group_name'], category='success') | |
121 | #TODO: in futureaction_logger(, '', '', '', self.sa) |
|
121 | #TODO: in futureaction_logger(, '', '', '', self.sa) | |
122 | except formencode.Invalid, errors: |
|
122 | except formencode.Invalid, errors: | |
123 |
|
123 | |||
124 | return htmlfill.render( |
|
124 | return htmlfill.render( | |
125 | render('admin/repos_groups/repos_groups_add.html'), |
|
125 | render('admin/repos_groups/repos_groups_add.html'), | |
126 | defaults=errors.value, |
|
126 | defaults=errors.value, | |
127 | errors=errors.error_dict or {}, |
|
127 | errors=errors.error_dict or {}, | |
128 | prefix_error=False, |
|
128 | prefix_error=False, | |
129 | encoding="UTF-8") |
|
129 | encoding="UTF-8") | |
130 | except Exception: |
|
130 | except Exception: | |
131 | log.error(traceback.format_exc()) |
|
131 | log.error(traceback.format_exc()) | |
132 | h.flash(_('error occurred during creation of repos group %s') \ |
|
132 | h.flash(_('error occurred during creation of repos group %s') \ | |
133 | % request.POST.get('group_name'), category='error') |
|
133 | % request.POST.get('group_name'), category='error') | |
134 |
|
134 | |||
135 | return redirect(url('repos_groups')) |
|
135 | return redirect(url('repos_groups')) | |
136 |
|
136 | |||
137 | @HasPermissionAnyDecorator('hg.admin') |
|
137 | @HasPermissionAnyDecorator('hg.admin') | |
138 | def new(self, format='html'): |
|
138 | def new(self, format='html'): | |
139 | """GET /repos_groups/new: Form to create a new item""" |
|
139 | """GET /repos_groups/new: Form to create a new item""" | |
140 | # url('new_repos_group') |
|
140 | # url('new_repos_group') | |
141 | self.__load_defaults() |
|
141 | self.__load_defaults() | |
142 | return render('admin/repos_groups/repos_groups_add.html') |
|
142 | return render('admin/repos_groups/repos_groups_add.html') | |
143 |
|
143 | |||
144 | @HasPermissionAnyDecorator('hg.admin') |
|
144 | @HasPermissionAnyDecorator('hg.admin') | |
145 | def update(self, id): |
|
145 | def update(self, id): | |
146 | """PUT /repos_groups/id: Update an existing item""" |
|
146 | """PUT /repos_groups/id: Update an existing item""" | |
147 | # Forms posted to this method should contain a hidden field: |
|
147 | # Forms posted to this method should contain a hidden field: | |
148 | # <input type="hidden" name="_method" value="PUT" /> |
|
148 | # <input type="hidden" name="_method" value="PUT" /> | |
149 | # Or using helpers: |
|
149 | # Or using helpers: | |
150 | # h.form(url('repos_group', id=ID), |
|
150 | # h.form(url('repos_group', id=ID), | |
151 | # method='put') |
|
151 | # method='put') | |
152 | # url('repos_group', id=ID) |
|
152 | # url('repos_group', id=ID) | |
153 |
|
153 | |||
154 | self.__load_defaults() |
|
154 | self.__load_defaults() | |
155 | c.repos_group = RepoGroup.get(id) |
|
155 | c.repos_group = RepoGroup.get(id) | |
156 |
|
156 | |||
157 | repos_group_form = ReposGroupForm( |
|
157 | repos_group_form = ReposGroupForm( | |
158 | edit=True, |
|
158 | edit=True, | |
159 | old_data=c.repos_group.get_dict(), |
|
159 | old_data=c.repos_group.get_dict(), | |
160 | available_groups=c.repo_groups_choices |
|
160 | available_groups=c.repo_groups_choices | |
161 | )() |
|
161 | )() | |
162 | try: |
|
162 | try: | |
163 | form_result = repos_group_form.to_python(dict(request.POST)) |
|
163 | form_result = repos_group_form.to_python(dict(request.POST)) | |
164 | ReposGroupModel().update(id, form_result) |
|
164 | ReposGroupModel().update(id, form_result) | |
165 | Session.commit() |
|
165 | Session.commit() | |
166 | h.flash(_('updated repos group %s') \ |
|
166 | h.flash(_('updated repos group %s') \ | |
167 | % form_result['group_name'], category='success') |
|
167 | % form_result['group_name'], category='success') | |
168 | #TODO: in futureaction_logger(, '', '', '', self.sa) |
|
168 | #TODO: in futureaction_logger(, '', '', '', self.sa) | |
169 | except formencode.Invalid, errors: |
|
169 | except formencode.Invalid, errors: | |
170 |
|
170 | |||
171 | return htmlfill.render( |
|
171 | return htmlfill.render( | |
172 | render('admin/repos_groups/repos_groups_edit.html'), |
|
172 | render('admin/repos_groups/repos_groups_edit.html'), | |
173 | defaults=errors.value, |
|
173 | defaults=errors.value, | |
174 | errors=errors.error_dict or {}, |
|
174 | errors=errors.error_dict or {}, | |
175 | prefix_error=False, |
|
175 | prefix_error=False, | |
176 | encoding="UTF-8") |
|
176 | encoding="UTF-8") | |
177 | except Exception: |
|
177 | except Exception: | |
178 | log.error(traceback.format_exc()) |
|
178 | log.error(traceback.format_exc()) | |
179 | h.flash(_('error occurred during update of repos group %s') \ |
|
179 | h.flash(_('error occurred during update of repos group %s') \ | |
180 | % request.POST.get('group_name'), category='error') |
|
180 | % request.POST.get('group_name'), category='error') | |
181 |
|
181 | |||
182 | return redirect(url('repos_groups')) |
|
182 | return redirect(url('repos_groups')) | |
183 |
|
183 | |||
184 | @HasPermissionAnyDecorator('hg.admin') |
|
184 | @HasPermissionAnyDecorator('hg.admin') | |
185 | def delete(self, id): |
|
185 | def delete(self, id): | |
186 | """DELETE /repos_groups/id: Delete an existing item""" |
|
186 | """DELETE /repos_groups/id: Delete an existing item""" | |
187 | # Forms posted to this method should contain a hidden field: |
|
187 | # Forms posted to this method should contain a hidden field: | |
188 | # <input type="hidden" name="_method" value="DELETE" /> |
|
188 | # <input type="hidden" name="_method" value="DELETE" /> | |
189 | # Or using helpers: |
|
189 | # Or using helpers: | |
190 | # h.form(url('repos_group', id=ID), |
|
190 | # h.form(url('repos_group', id=ID), | |
191 | # method='delete') |
|
191 | # method='delete') | |
192 | # url('repos_group', id=ID) |
|
192 | # url('repos_group', id=ID) | |
193 |
|
193 | |||
194 | gr = RepoGroup.get(id) |
|
194 | gr = RepoGroup.get(id) | |
195 | repos = gr.repositories.all() |
|
195 | repos = gr.repositories.all() | |
196 | if repos: |
|
196 | if repos: | |
197 | h.flash(_('This group contains %s repositores and cannot be ' |
|
197 | h.flash(_('This group contains %s repositores and cannot be ' | |
198 | 'deleted' % len(repos)), |
|
198 | 'deleted' % len(repos)), | |
199 | category='error') |
|
199 | category='error') | |
200 | return redirect(url('repos_groups')) |
|
200 | return redirect(url('repos_groups')) | |
201 |
|
201 | |||
202 | try: |
|
202 | try: | |
203 | ReposGroupModel().delete(id) |
|
203 | ReposGroupModel().delete(id) | |
204 | Session.commit() |
|
204 | Session.commit() | |
205 | h.flash(_('removed repos group %s' % gr.group_name), category='success') |
|
205 | h.flash(_('removed repos group %s' % gr.group_name), category='success') | |
206 | #TODO: in future action_logger(, '', '', '', self.sa) |
|
206 | #TODO: in future action_logger(, '', '', '', self.sa) | |
207 | except IntegrityError, e: |
|
207 | except IntegrityError, e: | |
208 | if e.message.find('groups_group_parent_id_fkey') != -1: |
|
208 | if e.message.find('groups_group_parent_id_fkey') != -1: | |
209 | log.error(traceback.format_exc()) |
|
209 | log.error(traceback.format_exc()) | |
210 | h.flash(_('Cannot delete this group it still contains ' |
|
210 | h.flash(_('Cannot delete this group it still contains ' | |
211 | 'subgroups'), |
|
211 | 'subgroups'), | |
212 | category='warning') |
|
212 | category='warning') | |
213 | else: |
|
213 | else: | |
214 | log.error(traceback.format_exc()) |
|
214 | log.error(traceback.format_exc()) | |
215 | h.flash(_('error occurred during deletion of repos ' |
|
215 | h.flash(_('error occurred during deletion of repos ' | |
216 | 'group %s' % gr.group_name), category='error') |
|
216 | 'group %s' % gr.group_name), category='error') | |
217 |
|
217 | |||
218 | except Exception: |
|
218 | except Exception: | |
219 | log.error(traceback.format_exc()) |
|
219 | log.error(traceback.format_exc()) | |
220 | h.flash(_('error occurred during deletion of repos ' |
|
220 | h.flash(_('error occurred during deletion of repos ' | |
221 | 'group %s' % gr.group_name), category='error') |
|
221 | 'group %s' % gr.group_name), category='error') | |
222 |
|
222 | |||
223 | return redirect(url('repos_groups')) |
|
223 | return redirect(url('repos_groups')) | |
224 |
|
224 | |||
225 | @HasReposGroupPermissionAnyDecorator('group.admin') |
|
225 | @HasReposGroupPermissionAnyDecorator('group.admin') | |
226 | def delete_repos_group_user_perm(self, group_name): |
|
226 | def delete_repos_group_user_perm(self, group_name): | |
227 | """ |
|
227 | """ | |
228 | DELETE an existing repositories group permission user |
|
228 | DELETE an existing repositories group permission user | |
229 |
|
229 | |||
230 | :param group_name: |
|
230 | :param group_name: | |
231 | """ |
|
231 | """ | |
232 |
|
232 | |||
233 | try: |
|
233 | try: | |
234 | ReposGroupModel().revoke_user_permission( |
|
234 | ReposGroupModel().revoke_user_permission( | |
235 | repos_group=group_name, user=request.POST['user_id'] |
|
235 | repos_group=group_name, user=request.POST['user_id'] | |
236 | ) |
|
236 | ) | |
237 | Session.commit() |
|
237 | Session.commit() | |
238 | except Exception: |
|
238 | except Exception: | |
239 | log.error(traceback.format_exc()) |
|
239 | log.error(traceback.format_exc()) | |
240 | h.flash(_('An error occurred during deletion of group user'), |
|
240 | h.flash(_('An error occurred during deletion of group user'), | |
241 | category='error') |
|
241 | category='error') | |
242 | raise HTTPInternalServerError() |
|
242 | raise HTTPInternalServerError() | |
243 |
|
243 | |||
244 | @HasReposGroupPermissionAnyDecorator('group.admin') |
|
244 | @HasReposGroupPermissionAnyDecorator('group.admin') | |
245 | def delete_repos_group_users_group_perm(self, group_name): |
|
245 | def delete_repos_group_users_group_perm(self, group_name): | |
246 | """ |
|
246 | """ | |
247 | DELETE an existing repositories group permission users group |
|
247 | DELETE an existing repositories group permission users group | |
248 |
|
248 | |||
249 | :param group_name: |
|
249 | :param group_name: | |
250 | """ |
|
250 | """ | |
251 |
|
251 | |||
252 | try: |
|
252 | try: | |
253 | ReposGroupModel().revoke_users_group_permission( |
|
253 | ReposGroupModel().revoke_users_group_permission( | |
254 | repos_group=group_name, |
|
254 | repos_group=group_name, | |
255 | group_name=request.POST['users_group_id'] |
|
255 | group_name=request.POST['users_group_id'] | |
256 | ) |
|
256 | ) | |
257 | Session.commit() |
|
257 | Session.commit() | |
258 | except Exception: |
|
258 | except Exception: | |
259 | log.error(traceback.format_exc()) |
|
259 | log.error(traceback.format_exc()) | |
260 | h.flash(_('An error occurred during deletion of group' |
|
260 | h.flash(_('An error occurred during deletion of group' | |
261 | ' users groups'), |
|
261 | ' users groups'), | |
262 | category='error') |
|
262 | category='error') | |
263 | raise HTTPInternalServerError() |
|
263 | raise HTTPInternalServerError() | |
264 |
|
264 | |||
265 | def show_by_name(self, group_name): |
|
265 | def show_by_name(self, group_name): | |
|
266 | """ | |||
|
267 | This is a proxy that does a lookup group_name -> id, and shows | |||
|
268 | the group by id view instead | |||
|
269 | """ | |||
|
270 | group_name = group_name.rstrip('/') | |||
266 | id_ = RepoGroup.get_by_group_name(group_name).group_id |
|
271 | id_ = RepoGroup.get_by_group_name(group_name).group_id | |
267 | return self.show(id_) |
|
272 | return self.show(id_) | |
268 |
|
273 | |||
269 | @HasReposGroupPermissionAnyDecorator('group.read', 'group.write', |
|
274 | @HasReposGroupPermissionAnyDecorator('group.read', 'group.write', | |
270 | 'group.admin') |
|
275 | 'group.admin') | |
271 | def show(self, id, format='html'): |
|
276 | def show(self, id, format='html'): | |
272 | """GET /repos_groups/id: Show a specific item""" |
|
277 | """GET /repos_groups/id: Show a specific item""" | |
273 | # url('repos_group', id=ID) |
|
278 | # url('repos_group', id=ID) | |
274 |
|
279 | |||
275 | c.group = RepoGroup.get(id) |
|
280 | c.group = RepoGroup.get(id) | |
276 |
|
281 | |||
277 | if c.group: |
|
282 | if c.group: | |
278 | c.group_repos = c.group.repositories.all() |
|
283 | c.group_repos = c.group.repositories.all() | |
279 | else: |
|
284 | else: | |
280 | return redirect(url('home')) |
|
285 | return redirect(url('home')) | |
281 |
|
286 | |||
282 | #overwrite our cached list with current filter |
|
287 | #overwrite our cached list with current filter | |
283 | gr_filter = c.group_repos |
|
288 | gr_filter = c.group_repos | |
284 | c.cached_repo_list = self.scm_model.get_repos(all_repos=gr_filter) |
|
289 | c.cached_repo_list = self.scm_model.get_repos(all_repos=gr_filter) | |
285 |
|
290 | |||
286 | c.repos_list = c.cached_repo_list |
|
291 | c.repos_list = c.cached_repo_list | |
287 |
|
292 | |||
288 | c.repo_cnt = 0 |
|
293 | c.repo_cnt = 0 | |
289 |
|
294 | |||
290 | c.groups = self.sa.query(RepoGroup).order_by(RepoGroup.group_name)\ |
|
295 | c.groups = self.sa.query(RepoGroup).order_by(RepoGroup.group_name)\ | |
291 | .filter(RepoGroup.group_parent_id == id).all() |
|
296 | .filter(RepoGroup.group_parent_id == id).all() | |
292 |
|
297 | |||
293 | return render('admin/repos_groups/repos_groups.html') |
|
298 | return render('admin/repos_groups/repos_groups.html') | |
294 |
|
299 | |||
295 | @HasPermissionAnyDecorator('hg.admin') |
|
300 | @HasPermissionAnyDecorator('hg.admin') | |
296 | def edit(self, id, format='html'): |
|
301 | def edit(self, id, format='html'): | |
297 | """GET /repos_groups/id/edit: Form to edit an existing item""" |
|
302 | """GET /repos_groups/id/edit: Form to edit an existing item""" | |
298 | # url('edit_repos_group', id=ID) |
|
303 | # url('edit_repos_group', id=ID) | |
299 |
|
304 | |||
300 | id_ = int(id) |
|
305 | id_ = int(id) | |
301 |
|
306 | |||
302 | c.repos_group = RepoGroup.get(id_) |
|
307 | c.repos_group = RepoGroup.get(id_) | |
303 | defaults = self.__load_data(id_) |
|
308 | defaults = self.__load_data(id_) | |
304 |
|
309 | |||
305 | # we need to exclude this group from the group list for editing |
|
310 | # we need to exclude this group from the group list for editing | |
306 | c.repo_groups = filter(lambda x: x[0] != id_, c.repo_groups) |
|
311 | c.repo_groups = filter(lambda x: x[0] != id_, c.repo_groups) | |
307 |
|
312 | |||
308 | return htmlfill.render( |
|
313 | return htmlfill.render( | |
309 | render('admin/repos_groups/repos_groups_edit.html'), |
|
314 | render('admin/repos_groups/repos_groups_edit.html'), | |
310 | defaults=defaults, |
|
315 | defaults=defaults, | |
311 | encoding="UTF-8", |
|
316 | encoding="UTF-8", | |
312 | force_defaults=False |
|
317 | force_defaults=False | |
313 | ) |
|
318 | ) |
@@ -1,226 +1,227 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.admin.users_groups |
|
3 | rhodecode.controllers.admin.users_groups | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Users Groups crud controller for pylons |
|
6 | Users Groups crud controller for pylons | |
7 |
|
7 | |||
8 | :created_on: Jan 25, 2011 |
|
8 | :created_on: Jan 25, 2011 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 | import formencode |
|
28 | import formencode | |
29 |
|
29 | |||
30 | from formencode import htmlfill |
|
30 | from formencode import htmlfill | |
31 | from pylons import request, session, tmpl_context as c, url, config |
|
31 | from pylons import request, session, tmpl_context as c, url, config | |
32 | from pylons.controllers.util import abort, redirect |
|
32 | from pylons.controllers.util import abort, redirect | |
33 | from pylons.i18n.translation import _ |
|
33 | from pylons.i18n.translation import _ | |
34 |
|
34 | |||
35 | from rhodecode.lib.exceptions import UsersGroupsAssignedException |
|
35 | from rhodecode.lib.exceptions import UsersGroupsAssignedException | |
36 | from rhodecode.lib import helpers as h, safe_unicode |
|
36 | from rhodecode.lib import helpers as h, safe_unicode | |
37 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
37 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator | |
38 | from rhodecode.lib.base import BaseController, render |
|
38 | from rhodecode.lib.base import BaseController, render | |
39 |
|
39 | |||
40 | from rhodecode.model.users_group import UsersGroupModel |
|
40 | from rhodecode.model.users_group import UsersGroupModel | |
41 |
|
41 | |||
42 | from rhodecode.model.db import User, UsersGroup, Permission, UsersGroupToPerm |
|
42 | from rhodecode.model.db import User, UsersGroup, Permission, UsersGroupToPerm | |
43 | from rhodecode.model.forms import UsersGroupForm |
|
43 | from rhodecode.model.forms import UsersGroupForm | |
44 | from rhodecode.model.meta import Session |
|
44 | from rhodecode.model.meta import Session | |
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class UsersGroupsController(BaseController): |
|
49 | class UsersGroupsController(BaseController): | |
50 | """REST Controller styled on the Atom Publishing Protocol""" |
|
50 | """REST Controller styled on the Atom Publishing Protocol""" | |
51 | # To properly map this controller, ensure your config/routing.py |
|
51 | # To properly map this controller, ensure your config/routing.py | |
52 | # file has a resource setup: |
|
52 | # file has a resource setup: | |
53 | # map.resource('users_group', 'users_groups') |
|
53 | # map.resource('users_group', 'users_groups') | |
54 |
|
54 | |||
55 | @LoginRequired() |
|
55 | @LoginRequired() | |
56 | @HasPermissionAllDecorator('hg.admin') |
|
56 | @HasPermissionAllDecorator('hg.admin') | |
57 | def __before__(self): |
|
57 | def __before__(self): | |
58 | c.admin_user = session.get('admin_user') |
|
58 | c.admin_user = session.get('admin_user') | |
59 | c.admin_username = session.get('admin_username') |
|
59 | c.admin_username = session.get('admin_username') | |
60 | super(UsersGroupsController, self).__before__() |
|
60 | super(UsersGroupsController, self).__before__() | |
61 | c.available_permissions = config['available_permissions'] |
|
61 | c.available_permissions = config['available_permissions'] | |
62 |
|
62 | |||
63 | def index(self, format='html'): |
|
63 | def index(self, format='html'): | |
64 | """GET /users_groups: All items in the collection""" |
|
64 | """GET /users_groups: All items in the collection""" | |
65 | # url('users_groups') |
|
65 | # url('users_groups') | |
66 | c.users_groups_list = self.sa.query(UsersGroup).all() |
|
66 | c.users_groups_list = self.sa.query(UsersGroup).all() | |
67 | return render('admin/users_groups/users_groups.html') |
|
67 | return render('admin/users_groups/users_groups.html') | |
68 |
|
68 | |||
69 | def create(self): |
|
69 | def create(self): | |
70 | """POST /users_groups: Create a new item""" |
|
70 | """POST /users_groups: Create a new item""" | |
71 | # url('users_groups') |
|
71 | # url('users_groups') | |
72 |
|
72 | |||
73 | users_group_form = UsersGroupForm()() |
|
73 | users_group_form = UsersGroupForm()() | |
74 | try: |
|
74 | try: | |
75 | form_result = users_group_form.to_python(dict(request.POST)) |
|
75 | form_result = users_group_form.to_python(dict(request.POST)) | |
76 | UsersGroupModel().create(name=form_result['users_group_name'], |
|
76 | UsersGroupModel().create(name=form_result['users_group_name'], | |
77 | active=form_result['users_group_active']) |
|
77 | active=form_result['users_group_active']) | |
78 | h.flash(_('created users group %s') \ |
|
78 | h.flash(_('created users group %s') \ | |
79 | % form_result['users_group_name'], category='success') |
|
79 | % form_result['users_group_name'], category='success') | |
80 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) |
|
80 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) | |
81 | Session.commit() |
|
81 | Session.commit() | |
82 | except formencode.Invalid, errors: |
|
82 | except formencode.Invalid, errors: | |
83 | return htmlfill.render( |
|
83 | return htmlfill.render( | |
84 | render('admin/users_groups/users_group_add.html'), |
|
84 | render('admin/users_groups/users_group_add.html'), | |
85 | defaults=errors.value, |
|
85 | defaults=errors.value, | |
86 | errors=errors.error_dict or {}, |
|
86 | errors=errors.error_dict or {}, | |
87 | prefix_error=False, |
|
87 | prefix_error=False, | |
88 | encoding="UTF-8") |
|
88 | encoding="UTF-8") | |
89 | except Exception: |
|
89 | except Exception: | |
90 | log.error(traceback.format_exc()) |
|
90 | log.error(traceback.format_exc()) | |
91 | h.flash(_('error occurred during creation of users group %s') \ |
|
91 | h.flash(_('error occurred during creation of users group %s') \ | |
92 | % request.POST.get('users_group_name'), category='error') |
|
92 | % request.POST.get('users_group_name'), category='error') | |
93 |
|
93 | |||
94 | return redirect(url('users_groups')) |
|
94 | return redirect(url('users_groups')) | |
95 |
|
95 | |||
96 | def new(self, format='html'): |
|
96 | def new(self, format='html'): | |
97 | """GET /users_groups/new: Form to create a new item""" |
|
97 | """GET /users_groups/new: Form to create a new item""" | |
98 | # url('new_users_group') |
|
98 | # url('new_users_group') | |
99 | return render('admin/users_groups/users_group_add.html') |
|
99 | return render('admin/users_groups/users_group_add.html') | |
100 |
|
100 | |||
101 | def update(self, id): |
|
101 | def update(self, id): | |
102 | """PUT /users_groups/id: Update an existing item""" |
|
102 | """PUT /users_groups/id: Update an existing item""" | |
103 | # Forms posted to this method should contain a hidden field: |
|
103 | # Forms posted to this method should contain a hidden field: | |
104 | # <input type="hidden" name="_method" value="PUT" /> |
|
104 | # <input type="hidden" name="_method" value="PUT" /> | |
105 | # Or using helpers: |
|
105 | # Or using helpers: | |
106 | # h.form(url('users_group', id=ID), |
|
106 | # h.form(url('users_group', id=ID), | |
107 | # method='put') |
|
107 | # method='put') | |
108 | # url('users_group', id=ID) |
|
108 | # url('users_group', id=ID) | |
109 |
|
109 | |||
110 | c.users_group = UsersGroup.get(id) |
|
110 | c.users_group = UsersGroup.get(id) | |
111 | c.group_members_obj = [x.user for x in c.users_group.members] |
|
111 | c.group_members_obj = [x.user for x in c.users_group.members] | |
112 | c.group_members = [(x.user_id, x.username) for x in |
|
112 | c.group_members = [(x.user_id, x.username) for x in | |
113 | c.group_members_obj] |
|
113 | c.group_members_obj] | |
114 |
|
114 | |||
115 | c.available_members = [(x.user_id, x.username) for x in |
|
115 | c.available_members = [(x.user_id, x.username) for x in | |
116 | self.sa.query(User).all()] |
|
116 | self.sa.query(User).all()] | |
117 |
|
117 | |||
118 | available_members = [safe_unicode(x[0]) for x in c.available_members] |
|
118 | available_members = [safe_unicode(x[0]) for x in c.available_members] | |
119 |
|
119 | |||
120 | users_group_form = UsersGroupForm(edit=True, |
|
120 | users_group_form = UsersGroupForm(edit=True, | |
121 | old_data=c.users_group.get_dict(), |
|
121 | old_data=c.users_group.get_dict(), | |
122 | available_members=available_members)() |
|
122 | available_members=available_members)() | |
123 |
|
123 | |||
124 | try: |
|
124 | try: | |
125 | form_result = users_group_form.to_python(request.POST) |
|
125 | form_result = users_group_form.to_python(request.POST) | |
126 | UsersGroupModel().update(c.users_group, form_result) |
|
126 | UsersGroupModel().update(c.users_group, form_result) | |
127 | h.flash(_('updated users group %s') \ |
|
127 | h.flash(_('updated users group %s') \ | |
128 | % form_result['users_group_name'], |
|
128 | % form_result['users_group_name'], | |
129 | category='success') |
|
129 | category='success') | |
130 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) |
|
130 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) | |
131 | Session.commit() |
|
131 | Session.commit() | |
132 | except formencode.Invalid, errors: |
|
132 | except formencode.Invalid, errors: | |
133 | e = errors.error_dict or {} |
|
133 | e = errors.error_dict or {} | |
134 |
|
134 | |||
135 | perm = Permission.get_by_key('hg.create.repository') |
|
135 | perm = Permission.get_by_key('hg.create.repository') | |
136 | e.update({'create_repo_perm': |
|
136 | e.update({'create_repo_perm': | |
137 | UsersGroupModel().has_perm(id, perm)}) |
|
137 | UsersGroupModel().has_perm(id, perm)}) | |
138 |
|
138 | |||
139 | return htmlfill.render( |
|
139 | return htmlfill.render( | |
140 | render('admin/users_groups/users_group_edit.html'), |
|
140 | render('admin/users_groups/users_group_edit.html'), | |
141 | defaults=errors.value, |
|
141 | defaults=errors.value, | |
142 | errors=e, |
|
142 | errors=e, | |
143 | prefix_error=False, |
|
143 | prefix_error=False, | |
144 | encoding="UTF-8") |
|
144 | encoding="UTF-8") | |
145 | except Exception: |
|
145 | except Exception: | |
146 | log.error(traceback.format_exc()) |
|
146 | log.error(traceback.format_exc()) | |
147 | h.flash(_('error occurred during update of users group %s') \ |
|
147 | h.flash(_('error occurred during update of users group %s') \ | |
148 | % request.POST.get('users_group_name'), category='error') |
|
148 | % request.POST.get('users_group_name'), category='error') | |
149 |
|
149 | |||
150 | return redirect(url('users_groups')) |
|
150 | return redirect(url('users_groups')) | |
151 |
|
151 | |||
152 | def delete(self, id): |
|
152 | def delete(self, id): | |
153 | """DELETE /users_groups/id: Delete an existing item""" |
|
153 | """DELETE /users_groups/id: Delete an existing item""" | |
154 | # Forms posted to this method should contain a hidden field: |
|
154 | # Forms posted to this method should contain a hidden field: | |
155 | # <input type="hidden" name="_method" value="DELETE" /> |
|
155 | # <input type="hidden" name="_method" value="DELETE" /> | |
156 | # Or using helpers: |
|
156 | # Or using helpers: | |
157 | # h.form(url('users_group', id=ID), |
|
157 | # h.form(url('users_group', id=ID), | |
158 | # method='delete') |
|
158 | # method='delete') | |
159 | # url('users_group', id=ID) |
|
159 | # url('users_group', id=ID) | |
160 |
|
160 | |||
161 | try: |
|
161 | try: | |
162 | UsersGroupModel().delete(id) |
|
162 | UsersGroupModel().delete(id) | |
|
163 | Session.commit() | |||
163 | h.flash(_('successfully deleted users group'), category='success') |
|
164 | h.flash(_('successfully deleted users group'), category='success') | |
164 | Session.commit() |
|
|||
165 | except UsersGroupsAssignedException, e: |
|
165 | except UsersGroupsAssignedException, e: | |
166 | h.flash(e, category='error') |
|
166 | h.flash(e, category='error') | |
167 | except Exception: |
|
167 | except Exception: | |
|
168 | log.error(traceback.format_exc()) | |||
168 | h.flash(_('An error occurred during deletion of users group'), |
|
169 | h.flash(_('An error occurred during deletion of users group'), | |
169 | category='error') |
|
170 | category='error') | |
170 | return redirect(url('users_groups')) |
|
171 | return redirect(url('users_groups')) | |
171 |
|
172 | |||
172 | def show(self, id, format='html'): |
|
173 | def show(self, id, format='html'): | |
173 | """GET /users_groups/id: Show a specific item""" |
|
174 | """GET /users_groups/id: Show a specific item""" | |
174 | # url('users_group', id=ID) |
|
175 | # url('users_group', id=ID) | |
175 |
|
176 | |||
176 | def edit(self, id, format='html'): |
|
177 | def edit(self, id, format='html'): | |
177 | """GET /users_groups/id/edit: Form to edit an existing item""" |
|
178 | """GET /users_groups/id/edit: Form to edit an existing item""" | |
178 | # url('edit_users_group', id=ID) |
|
179 | # url('edit_users_group', id=ID) | |
179 |
|
180 | |||
180 | c.users_group = self.sa.query(UsersGroup).get(id) |
|
181 | c.users_group = self.sa.query(UsersGroup).get(id) | |
181 | if not c.users_group: |
|
182 | if not c.users_group: | |
182 | return redirect(url('users_groups')) |
|
183 | return redirect(url('users_groups')) | |
183 |
|
184 | |||
184 | c.users_group.permissions = {} |
|
185 | c.users_group.permissions = {} | |
185 | c.group_members_obj = [x.user for x in c.users_group.members] |
|
186 | c.group_members_obj = [x.user for x in c.users_group.members] | |
186 | c.group_members = [(x.user_id, x.username) for x in |
|
187 | c.group_members = [(x.user_id, x.username) for x in | |
187 | c.group_members_obj] |
|
188 | c.group_members_obj] | |
188 | c.available_members = [(x.user_id, x.username) for x in |
|
189 | c.available_members = [(x.user_id, x.username) for x in | |
189 | self.sa.query(User).all()] |
|
190 | self.sa.query(User).all()] | |
190 | defaults = c.users_group.get_dict() |
|
191 | defaults = c.users_group.get_dict() | |
191 | perm = Permission.get_by_key('hg.create.repository') |
|
192 | perm = Permission.get_by_key('hg.create.repository') | |
192 | defaults.update({'create_repo_perm': |
|
193 | defaults.update({'create_repo_perm': | |
193 | UsersGroupModel().has_perm(c.users_group, perm)}) |
|
194 | UsersGroupModel().has_perm(c.users_group, perm)}) | |
194 | return htmlfill.render( |
|
195 | return htmlfill.render( | |
195 | render('admin/users_groups/users_group_edit.html'), |
|
196 | render('admin/users_groups/users_group_edit.html'), | |
196 | defaults=defaults, |
|
197 | defaults=defaults, | |
197 | encoding="UTF-8", |
|
198 | encoding="UTF-8", | |
198 | force_defaults=False |
|
199 | force_defaults=False | |
199 | ) |
|
200 | ) | |
200 |
|
201 | |||
201 | def update_perm(self, id): |
|
202 | def update_perm(self, id): | |
202 | """PUT /users_perm/id: Update an existing item""" |
|
203 | """PUT /users_perm/id: Update an existing item""" | |
203 | # url('users_group_perm', id=ID, method='put') |
|
204 | # url('users_group_perm', id=ID, method='put') | |
204 |
|
205 | |||
205 | grant_perm = request.POST.get('create_repo_perm', False) |
|
206 | grant_perm = request.POST.get('create_repo_perm', False) | |
206 |
|
207 | |||
207 | if grant_perm: |
|
208 | if grant_perm: | |
208 | perm = Permission.get_by_key('hg.create.none') |
|
209 | perm = Permission.get_by_key('hg.create.none') | |
209 | UsersGroupModel().revoke_perm(id, perm) |
|
210 | UsersGroupModel().revoke_perm(id, perm) | |
210 |
|
211 | |||
211 | perm = Permission.get_by_key('hg.create.repository') |
|
212 | perm = Permission.get_by_key('hg.create.repository') | |
212 | UsersGroupModel().grant_perm(id, perm) |
|
213 | UsersGroupModel().grant_perm(id, perm) | |
213 | h.flash(_("Granted 'repository create' permission to user"), |
|
214 | h.flash(_("Granted 'repository create' permission to user"), | |
214 | category='success') |
|
215 | category='success') | |
215 |
|
216 | |||
216 | Session.commit() |
|
217 | Session.commit() | |
217 | else: |
|
218 | else: | |
218 | perm = Permission.get_by_key('hg.create.repository') |
|
219 | perm = Permission.get_by_key('hg.create.repository') | |
219 | UsersGroupModel().revoke_perm(id, perm) |
|
220 | UsersGroupModel().revoke_perm(id, perm) | |
220 |
|
221 | |||
221 | perm = Permission.get_by_key('hg.create.none') |
|
222 | perm = Permission.get_by_key('hg.create.none') | |
222 | UsersGroupModel().grant_perm(id, perm) |
|
223 | UsersGroupModel().grant_perm(id, perm) | |
223 | h.flash(_("Revoked 'repository create' permission to user"), |
|
224 | h.flash(_("Revoked 'repository create' permission to user"), | |
224 | category='success') |
|
225 | category='success') | |
225 | Session.commit() |
|
226 | Session.commit() | |
226 | return redirect(url('edit_users_group', id=id)) |
|
227 | return redirect(url('edit_users_group', id=id)) |
@@ -1,465 +1,465 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.__init__ |
|
3 | rhodecode.lib.__init__ | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Some simple helper functions |
|
6 | Some simple helper functions | |
7 |
|
7 | |||
8 | :created_on: Jan 5, 2011 |
|
8 | :created_on: Jan 5, 2011 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import re |
|
27 | import re | |
28 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
28 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def __get_lem(): |
|
31 | def __get_lem(): | |
32 | from pygments import lexers |
|
32 | from pygments import lexers | |
33 | from string import lower |
|
33 | from string import lower | |
34 | from collections import defaultdict |
|
34 | from collections import defaultdict | |
35 |
|
35 | |||
36 | d = defaultdict(lambda: []) |
|
36 | d = defaultdict(lambda: []) | |
37 |
|
37 | |||
38 | def __clean(s): |
|
38 | def __clean(s): | |
39 | s = s.lstrip('*') |
|
39 | s = s.lstrip('*') | |
40 | s = s.lstrip('.') |
|
40 | s = s.lstrip('.') | |
41 |
|
41 | |||
42 | if s.find('[') != -1: |
|
42 | if s.find('[') != -1: | |
43 | exts = [] |
|
43 | exts = [] | |
44 | start, stop = s.find('['), s.find(']') |
|
44 | start, stop = s.find('['), s.find(']') | |
45 |
|
45 | |||
46 | for suffix in s[start + 1:stop]: |
|
46 | for suffix in s[start + 1:stop]: | |
47 | exts.append(s[:s.find('[')] + suffix) |
|
47 | exts.append(s[:s.find('[')] + suffix) | |
48 | return map(lower, exts) |
|
48 | return map(lower, exts) | |
49 | else: |
|
49 | else: | |
50 | return map(lower, [s]) |
|
50 | return map(lower, [s]) | |
51 |
|
51 | |||
52 | for lx, t in sorted(lexers.LEXERS.items()): |
|
52 | for lx, t in sorted(lexers.LEXERS.items()): | |
53 | m = map(__clean, t[-2]) |
|
53 | m = map(__clean, t[-2]) | |
54 | if m: |
|
54 | if m: | |
55 | m = reduce(lambda x, y: x + y, m) |
|
55 | m = reduce(lambda x, y: x + y, m) | |
56 | for ext in m: |
|
56 | for ext in m: | |
57 | desc = lx.replace('Lexer', '') |
|
57 | desc = lx.replace('Lexer', '') | |
58 | d[ext].append(desc) |
|
58 | d[ext].append(desc) | |
59 |
|
59 | |||
60 | return dict(d) |
|
60 | return dict(d) | |
61 |
|
61 | |||
62 | # language map is also used by whoosh indexer, which for those specified |
|
62 | # language map is also used by whoosh indexer, which for those specified | |
63 | # extensions will index it's content |
|
63 | # extensions will index it's content | |
64 | LANGUAGES_EXTENSIONS_MAP = __get_lem() |
|
64 | LANGUAGES_EXTENSIONS_MAP = __get_lem() | |
65 |
|
65 | |||
66 | # Additional mappings that are not present in the pygments lexers |
|
66 | # Additional mappings that are not present in the pygments lexers | |
67 | # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP |
|
67 | # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP | |
68 | ADDITIONAL_MAPPINGS = {'xaml': 'XAML'} |
|
68 | ADDITIONAL_MAPPINGS = {'xaml': 'XAML'} | |
69 |
|
69 | |||
70 | LANGUAGES_EXTENSIONS_MAP.update(ADDITIONAL_MAPPINGS) |
|
70 | LANGUAGES_EXTENSIONS_MAP.update(ADDITIONAL_MAPPINGS) | |
71 |
|
71 | |||
72 | # list of readme files to search in file tree and display in summary |
|
72 | # list of readme files to search in file tree and display in summary | |
73 | # attached weights defines the search order lower is first |
|
73 | # attached weights defines the search order lower is first | |
74 | ALL_READMES = [ |
|
74 | ALL_READMES = [ | |
75 | ('readme', 0), ('README', 0), ('Readme', 0), |
|
75 | ('readme', 0), ('README', 0), ('Readme', 0), | |
76 | ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), |
|
76 | ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1), | |
77 | ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), |
|
77 | ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2), | |
78 | ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), |
|
78 | ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2), | |
79 | ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), |
|
79 | ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2), | |
80 | ] |
|
80 | ] | |
81 |
|
81 | |||
82 | # extension together with weights to search lower is first |
|
82 | # extension together with weights to search lower is first | |
83 | RST_EXTS = [ |
|
83 | RST_EXTS = [ | |
84 | ('', 0), ('.rst', 1), ('.rest', 1), |
|
84 | ('', 0), ('.rst', 1), ('.rest', 1), | |
85 | ('.RST', 2), ('.REST', 2), |
|
85 | ('.RST', 2), ('.REST', 2), | |
86 | ('.txt', 3), ('.TXT', 3) |
|
86 | ('.txt', 3), ('.TXT', 3) | |
87 | ] |
|
87 | ] | |
88 |
|
88 | |||
89 | MARKDOWN_EXTS = [ |
|
89 | MARKDOWN_EXTS = [ | |
90 | ('.md', 1), ('.MD', 1), |
|
90 | ('.md', 1), ('.MD', 1), | |
91 | ('.mkdn', 2), ('.MKDN', 2), |
|
91 | ('.mkdn', 2), ('.MKDN', 2), | |
92 | ('.mdown', 3), ('.MDOWN', 3), |
|
92 | ('.mdown', 3), ('.MDOWN', 3), | |
93 | ('.markdown', 4), ('.MARKDOWN', 4) |
|
93 | ('.markdown', 4), ('.MARKDOWN', 4) | |
94 | ] |
|
94 | ] | |
95 |
|
95 | |||
96 | PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] |
|
96 | PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)] | |
97 |
|
97 | |||
98 | ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS |
|
98 | ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS | |
99 |
|
99 | |||
100 |
|
100 | |||
101 | def str2bool(_str): |
|
101 | def str2bool(_str): | |
102 | """ |
|
102 | """ | |
103 | returs True/False value from given string, it tries to translate the |
|
103 | returs True/False value from given string, it tries to translate the | |
104 | string into boolean |
|
104 | string into boolean | |
105 |
|
105 | |||
106 | :param _str: string value to translate into boolean |
|
106 | :param _str: string value to translate into boolean | |
107 | :rtype: boolean |
|
107 | :rtype: boolean | |
108 | :returns: boolean from given string |
|
108 | :returns: boolean from given string | |
109 | """ |
|
109 | """ | |
110 | if _str is None: |
|
110 | if _str is None: | |
111 | return False |
|
111 | return False | |
112 | if _str in (True, False): |
|
112 | if _str in (True, False): | |
113 | return _str |
|
113 | return _str | |
114 | _str = str(_str).strip().lower() |
|
114 | _str = str(_str).strip().lower() | |
115 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
115 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | def convert_line_endings(line, mode): |
|
118 | def convert_line_endings(line, mode): | |
119 | """ |
|
119 | """ | |
120 | Converts a given line "line end" accordingly to given mode |
|
120 | Converts a given line "line end" accordingly to given mode | |
121 |
|
121 | |||
122 | Available modes are:: |
|
122 | Available modes are:: | |
123 | 0 - Unix |
|
123 | 0 - Unix | |
124 | 1 - Mac |
|
124 | 1 - Mac | |
125 | 2 - DOS |
|
125 | 2 - DOS | |
126 |
|
126 | |||
127 | :param line: given line to convert |
|
127 | :param line: given line to convert | |
128 | :param mode: mode to convert to |
|
128 | :param mode: mode to convert to | |
129 | :rtype: str |
|
129 | :rtype: str | |
130 | :return: converted line according to mode |
|
130 | :return: converted line according to mode | |
131 | """ |
|
131 | """ | |
132 | from string import replace |
|
132 | from string import replace | |
133 |
|
133 | |||
134 | if mode == 0: |
|
134 | if mode == 0: | |
135 | line = replace(line, '\r\n', '\n') |
|
135 | line = replace(line, '\r\n', '\n') | |
136 | line = replace(line, '\r', '\n') |
|
136 | line = replace(line, '\r', '\n') | |
137 | elif mode == 1: |
|
137 | elif mode == 1: | |
138 | line = replace(line, '\r\n', '\r') |
|
138 | line = replace(line, '\r\n', '\r') | |
139 | line = replace(line, '\n', '\r') |
|
139 | line = replace(line, '\n', '\r') | |
140 | elif mode == 2: |
|
140 | elif mode == 2: | |
141 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
141 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
142 | return line |
|
142 | return line | |
143 |
|
143 | |||
144 |
|
144 | |||
145 | def detect_mode(line, default): |
|
145 | def detect_mode(line, default): | |
146 | """ |
|
146 | """ | |
147 | Detects line break for given line, if line break couldn't be found |
|
147 | Detects line break for given line, if line break couldn't be found | |
148 | given default value is returned |
|
148 | given default value is returned | |
149 |
|
149 | |||
150 | :param line: str line |
|
150 | :param line: str line | |
151 | :param default: default |
|
151 | :param default: default | |
152 | :rtype: int |
|
152 | :rtype: int | |
153 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
153 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
154 | """ |
|
154 | """ | |
155 | if line.endswith('\r\n'): |
|
155 | if line.endswith('\r\n'): | |
156 | return 2 |
|
156 | return 2 | |
157 | elif line.endswith('\n'): |
|
157 | elif line.endswith('\n'): | |
158 | return 0 |
|
158 | return 0 | |
159 | elif line.endswith('\r'): |
|
159 | elif line.endswith('\r'): | |
160 | return 1 |
|
160 | return 1 | |
161 | else: |
|
161 | else: | |
162 | return default |
|
162 | return default | |
163 |
|
163 | |||
164 |
|
164 | |||
165 | def generate_api_key(username, salt=None): |
|
165 | def generate_api_key(username, salt=None): | |
166 | """ |
|
166 | """ | |
167 | Generates unique API key for given username, if salt is not given |
|
167 | Generates unique API key for given username, if salt is not given | |
168 | it'll be generated from some random string |
|
168 | it'll be generated from some random string | |
169 |
|
169 | |||
170 | :param username: username as string |
|
170 | :param username: username as string | |
171 | :param salt: salt to hash generate KEY |
|
171 | :param salt: salt to hash generate KEY | |
172 | :rtype: str |
|
172 | :rtype: str | |
173 | :returns: sha1 hash from username+salt |
|
173 | :returns: sha1 hash from username+salt | |
174 | """ |
|
174 | """ | |
175 | from tempfile import _RandomNameSequence |
|
175 | from tempfile import _RandomNameSequence | |
176 | import hashlib |
|
176 | import hashlib | |
177 |
|
177 | |||
178 | if salt is None: |
|
178 | if salt is None: | |
179 | salt = _RandomNameSequence().next() |
|
179 | salt = _RandomNameSequence().next() | |
180 |
|
180 | |||
181 | return hashlib.sha1(username + salt).hexdigest() |
|
181 | return hashlib.sha1(username + salt).hexdigest() | |
182 |
|
182 | |||
183 |
|
183 | |||
184 | def safe_unicode(str_, from_encoding=None): |
|
184 | def safe_unicode(str_, from_encoding=None): | |
185 | """ |
|
185 | """ | |
186 | safe unicode function. Does few trick to turn str_ into unicode |
|
186 | safe unicode function. Does few trick to turn str_ into unicode | |
187 |
|
187 | |||
188 | In case of UnicodeDecode error we try to return it with encoding detected |
|
188 | In case of UnicodeDecode error we try to return it with encoding detected | |
189 | by chardet library if it fails fallback to unicode with errors replaced |
|
189 | by chardet library if it fails fallback to unicode with errors replaced | |
190 |
|
190 | |||
191 | :param str_: string to decode |
|
191 | :param str_: string to decode | |
192 | :rtype: unicode |
|
192 | :rtype: unicode | |
193 | :returns: unicode object |
|
193 | :returns: unicode object | |
194 | """ |
|
194 | """ | |
195 | if isinstance(str_, unicode): |
|
195 | if isinstance(str_, unicode): | |
196 | return str_ |
|
196 | return str_ | |
197 |
|
197 | |||
198 | if not from_encoding: |
|
198 | if not from_encoding: | |
199 | import rhodecode |
|
199 | import rhodecode | |
200 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') |
|
200 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
201 | from_encoding = DEFAULT_ENCODING |
|
201 | from_encoding = DEFAULT_ENCODING | |
202 |
|
202 | |||
203 | try: |
|
203 | try: | |
204 | return unicode(str_) |
|
204 | return unicode(str_) | |
205 | except UnicodeDecodeError: |
|
205 | except UnicodeDecodeError: | |
206 | pass |
|
206 | pass | |
207 |
|
207 | |||
208 | try: |
|
208 | try: | |
209 | return unicode(str_, from_encoding) |
|
209 | return unicode(str_, from_encoding) | |
210 | except UnicodeDecodeError: |
|
210 | except UnicodeDecodeError: | |
211 | pass |
|
211 | pass | |
212 |
|
212 | |||
213 | try: |
|
213 | try: | |
214 | import chardet |
|
214 | import chardet | |
215 | encoding = chardet.detect(str_)['encoding'] |
|
215 | encoding = chardet.detect(str_)['encoding'] | |
216 | if encoding is None: |
|
216 | if encoding is None: | |
217 | raise Exception() |
|
217 | raise Exception() | |
218 | return str_.decode(encoding) |
|
218 | return str_.decode(encoding) | |
219 | except (ImportError, UnicodeDecodeError, Exception): |
|
219 | except (ImportError, UnicodeDecodeError, Exception): | |
220 | return unicode(str_, from_encoding, 'replace') |
|
220 | return unicode(str_, from_encoding, 'replace') | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | def safe_str(unicode_, to_encoding=None): |
|
223 | def safe_str(unicode_, to_encoding=None): | |
224 | """ |
|
224 | """ | |
225 | safe str function. Does few trick to turn unicode_ into string |
|
225 | safe str function. Does few trick to turn unicode_ into string | |
226 |
|
226 | |||
227 | In case of UnicodeEncodeError we try to return it with encoding detected |
|
227 | In case of UnicodeEncodeError we try to return it with encoding detected | |
228 | by chardet library if it fails fallback to string with errors replaced |
|
228 | by chardet library if it fails fallback to string with errors replaced | |
229 |
|
229 | |||
230 | :param unicode_: unicode to encode |
|
230 | :param unicode_: unicode to encode | |
231 | :rtype: str |
|
231 | :rtype: str | |
232 | :returns: str object |
|
232 | :returns: str object | |
233 | """ |
|
233 | """ | |
234 |
|
234 | |||
235 | # if it's not basestr cast to str |
|
235 | # if it's not basestr cast to str | |
236 | if not isinstance(unicode_, basestring): |
|
236 | if not isinstance(unicode_, basestring): | |
237 | return str(unicode_) |
|
237 | return str(unicode_) | |
238 |
|
238 | |||
239 | if isinstance(unicode_, str): |
|
239 | if isinstance(unicode_, str): | |
240 | return unicode_ |
|
240 | return unicode_ | |
241 |
|
241 | |||
242 | if not to_encoding: |
|
242 | if not to_encoding: | |
243 | import rhodecode |
|
243 | import rhodecode | |
244 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') |
|
244 | DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8') | |
245 | to_encoding = DEFAULT_ENCODING |
|
245 | to_encoding = DEFAULT_ENCODING | |
246 |
|
246 | |||
247 | try: |
|
247 | try: | |
248 | return unicode_.encode(to_encoding) |
|
248 | return unicode_.encode(to_encoding) | |
249 | except UnicodeEncodeError: |
|
249 | except UnicodeEncodeError: | |
250 | pass |
|
250 | pass | |
251 |
|
251 | |||
252 | try: |
|
252 | try: | |
253 | import chardet |
|
253 | import chardet | |
254 | encoding = chardet.detect(unicode_)['encoding'] |
|
254 | encoding = chardet.detect(unicode_)['encoding'] | |
255 | print encoding |
|
255 | print encoding | |
256 | if encoding is None: |
|
256 | if encoding is None: | |
257 | raise UnicodeEncodeError() |
|
257 | raise UnicodeEncodeError() | |
258 |
|
258 | |||
259 | return unicode_.encode(encoding) |
|
259 | return unicode_.encode(encoding) | |
260 | except (ImportError, UnicodeEncodeError): |
|
260 | except (ImportError, UnicodeEncodeError): | |
261 | return unicode_.encode(to_encoding, 'replace') |
|
261 | return unicode_.encode(to_encoding, 'replace') | |
262 |
|
262 | |||
263 | return safe_str |
|
263 | return safe_str | |
264 |
|
264 | |||
265 |
|
265 | |||
266 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): |
|
266 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): | |
267 | """ |
|
267 | """ | |
268 | Custom engine_from_config functions that makes sure we use NullPool for |
|
268 | Custom engine_from_config functions that makes sure we use NullPool for | |
269 | file based sqlite databases. This prevents errors on sqlite. This only |
|
269 | file based sqlite databases. This prevents errors on sqlite. This only | |
270 | applies to sqlalchemy versions < 0.7.0 |
|
270 | applies to sqlalchemy versions < 0.7.0 | |
271 |
|
271 | |||
272 | """ |
|
272 | """ | |
273 | import sqlalchemy |
|
273 | import sqlalchemy | |
274 | from sqlalchemy import engine_from_config as efc |
|
274 | from sqlalchemy import engine_from_config as efc | |
275 | import logging |
|
275 | import logging | |
276 |
|
276 | |||
277 | if int(sqlalchemy.__version__.split('.')[1]) < 7: |
|
277 | if int(sqlalchemy.__version__.split('.')[1]) < 7: | |
278 |
|
278 | |||
279 | # This solution should work for sqlalchemy < 0.7.0, and should use |
|
279 | # This solution should work for sqlalchemy < 0.7.0, and should use | |
280 | # proxy=TimerProxy() for execution time profiling |
|
280 | # proxy=TimerProxy() for execution time profiling | |
281 |
|
281 | |||
282 | from sqlalchemy.pool import NullPool |
|
282 | from sqlalchemy.pool import NullPool | |
283 | url = configuration[prefix + 'url'] |
|
283 | url = configuration[prefix + 'url'] | |
284 |
|
284 | |||
285 | if url.startswith('sqlite'): |
|
285 | if url.startswith('sqlite'): | |
286 | kwargs.update({'poolclass': NullPool}) |
|
286 | kwargs.update({'poolclass': NullPool}) | |
287 | return efc(configuration, prefix, **kwargs) |
|
287 | return efc(configuration, prefix, **kwargs) | |
288 | else: |
|
288 | else: | |
289 | import time |
|
289 | import time | |
290 | from sqlalchemy import event |
|
290 | from sqlalchemy import event | |
291 | from sqlalchemy.engine import Engine |
|
291 | from sqlalchemy.engine import Engine | |
292 |
|
292 | |||
293 | log = logging.getLogger('sqlalchemy.engine') |
|
293 | log = logging.getLogger('sqlalchemy.engine') | |
294 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) |
|
294 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) | |
295 | engine = efc(configuration, prefix, **kwargs) |
|
295 | engine = efc(configuration, prefix, **kwargs) | |
296 |
|
296 | |||
297 | def color_sql(sql): |
|
297 | def color_sql(sql): | |
298 | COLOR_SEQ = "\033[1;%dm" |
|
298 | COLOR_SEQ = "\033[1;%dm" | |
299 | COLOR_SQL = YELLOW |
|
299 | COLOR_SQL = YELLOW | |
300 | normal = '\x1b[0m' |
|
300 | normal = '\x1b[0m' | |
301 | return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal]) |
|
301 | return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal]) | |
302 |
|
302 | |||
303 | if configuration['debug']: |
|
303 | if configuration['debug']: | |
304 | #attach events only for debug configuration |
|
304 | #attach events only for debug configuration | |
305 |
|
305 | |||
306 | def before_cursor_execute(conn, cursor, statement, |
|
306 | def before_cursor_execute(conn, cursor, statement, | |
307 | parameters, context, executemany): |
|
307 | parameters, context, executemany): | |
308 | context._query_start_time = time.time() |
|
308 | context._query_start_time = time.time() | |
309 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) |
|
309 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) | |
310 |
|
310 | |||
311 |
|
311 | |||
312 | def after_cursor_execute(conn, cursor, statement, |
|
312 | def after_cursor_execute(conn, cursor, statement, | |
313 | parameters, context, executemany): |
|
313 | parameters, context, executemany): | |
314 | total = time.time() - context._query_start_time |
|
314 | total = time.time() - context._query_start_time | |
315 | log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total)) |
|
315 | log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total)) | |
316 |
|
316 | |||
317 | event.listen(engine, "before_cursor_execute", |
|
317 | event.listen(engine, "before_cursor_execute", | |
318 | before_cursor_execute) |
|
318 | before_cursor_execute) | |
319 | event.listen(engine, "after_cursor_execute", |
|
319 | event.listen(engine, "after_cursor_execute", | |
320 | after_cursor_execute) |
|
320 | after_cursor_execute) | |
321 |
|
321 | |||
322 | return engine |
|
322 | return engine | |
323 |
|
323 | |||
324 |
|
324 | |||
325 | def age(curdate): |
|
325 | def age(curdate): | |
326 | """ |
|
326 | """ | |
327 | turns a datetime into an age string. |
|
327 | turns a datetime into an age string. | |
328 |
|
328 | |||
329 | :param curdate: datetime object |
|
329 | :param curdate: datetime object | |
330 | :rtype: unicode |
|
330 | :rtype: unicode | |
331 | :returns: unicode words describing age |
|
331 | :returns: unicode words describing age | |
332 | """ |
|
332 | """ | |
333 |
|
333 | |||
334 | from datetime import datetime |
|
334 | from datetime import datetime | |
335 | from webhelpers.date import time_ago_in_words |
|
335 | from webhelpers.date import time_ago_in_words | |
336 |
|
336 | |||
337 | _ = lambda s: s |
|
337 | _ = lambda s: s | |
338 |
|
338 | |||
339 | if not curdate: |
|
339 | if not curdate: | |
340 | return '' |
|
340 | return '' | |
341 |
|
341 | |||
342 | agescales = [(_(u"year"), 3600 * 24 * 365), |
|
342 | agescales = [(_(u"year"), 3600 * 24 * 365), | |
343 | (_(u"month"), 3600 * 24 * 30), |
|
343 | (_(u"month"), 3600 * 24 * 30), | |
344 | (_(u"day"), 3600 * 24), |
|
344 | (_(u"day"), 3600 * 24), | |
345 | (_(u"hour"), 3600), |
|
345 | (_(u"hour"), 3600), | |
346 | (_(u"minute"), 60), |
|
346 | (_(u"minute"), 60), | |
347 | (_(u"second"), 1), ] |
|
347 | (_(u"second"), 1), ] | |
348 |
|
348 | |||
349 | age = datetime.now() - curdate |
|
349 | age = datetime.now() - curdate | |
350 | age_seconds = (age.days * agescales[2][1]) + age.seconds |
|
350 | age_seconds = (age.days * agescales[2][1]) + age.seconds | |
351 | pos = 1 |
|
351 | pos = 1 | |
352 | for scale in agescales: |
|
352 | for scale in agescales: | |
353 | if scale[1] <= age_seconds: |
|
353 | if scale[1] <= age_seconds: | |
354 | if pos == 6: |
|
354 | if pos == 6: | |
355 | pos = 5 |
|
355 | pos = 5 | |
356 | return '%s %s' % (time_ago_in_words(curdate, |
|
356 | return '%s %s' % (time_ago_in_words(curdate, | |
357 | agescales[pos][0]), _('ago')) |
|
357 | agescales[pos][0]), _('ago')) | |
358 | pos += 1 |
|
358 | pos += 1 | |
359 |
|
359 | |||
360 | return _(u'just now') |
|
360 | return _(u'just now') | |
361 |
|
361 | |||
362 |
|
362 | |||
363 | def uri_filter(uri): |
|
363 | def uri_filter(uri): | |
364 | """ |
|
364 | """ | |
365 | Removes user:password from given url string |
|
365 | Removes user:password from given url string | |
366 |
|
366 | |||
367 | :param uri: |
|
367 | :param uri: | |
368 | :rtype: unicode |
|
368 | :rtype: unicode | |
369 | :returns: filtered list of strings |
|
369 | :returns: filtered list of strings | |
370 | """ |
|
370 | """ | |
371 | if not uri: |
|
371 | if not uri: | |
372 | return '' |
|
372 | return '' | |
373 |
|
373 | |||
374 | proto = '' |
|
374 | proto = '' | |
375 |
|
375 | |||
376 | for pat in ('https://', 'http://'): |
|
376 | for pat in ('https://', 'http://'): | |
377 | if uri.startswith(pat): |
|
377 | if uri.startswith(pat): | |
378 | uri = uri[len(pat):] |
|
378 | uri = uri[len(pat):] | |
379 | proto = pat |
|
379 | proto = pat | |
380 | break |
|
380 | break | |
381 |
|
381 | |||
382 | # remove passwords and username |
|
382 | # remove passwords and username | |
383 | uri = uri[uri.find('@') + 1:] |
|
383 | uri = uri[uri.find('@') + 1:] | |
384 |
|
384 | |||
385 | # get the port |
|
385 | # get the port | |
386 | cred_pos = uri.find(':') |
|
386 | cred_pos = uri.find(':') | |
387 | if cred_pos == -1: |
|
387 | if cred_pos == -1: | |
388 | host, port = uri, None |
|
388 | host, port = uri, None | |
389 | else: |
|
389 | else: | |
390 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
390 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
391 |
|
391 | |||
392 | return filter(None, [proto, host, port]) |
|
392 | return filter(None, [proto, host, port]) | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | def credentials_filter(uri): |
|
395 | def credentials_filter(uri): | |
396 | """ |
|
396 | """ | |
397 | Returns a url with removed credentials |
|
397 | Returns a url with removed credentials | |
398 |
|
398 | |||
399 | :param uri: |
|
399 | :param uri: | |
400 | """ |
|
400 | """ | |
401 |
|
401 | |||
402 | uri = uri_filter(uri) |
|
402 | uri = uri_filter(uri) | |
403 | #check if we have port |
|
403 | #check if we have port | |
404 | if len(uri) > 2 and uri[2]: |
|
404 | if len(uri) > 2 and uri[2]: | |
405 | uri[2] = ':' + uri[2] |
|
405 | uri[2] = ':' + uri[2] | |
406 |
|
406 | |||
407 | return ''.join(uri) |
|
407 | return ''.join(uri) | |
408 |
|
408 | |||
409 |
|
409 | |||
410 | def get_changeset_safe(repo, rev): |
|
410 | def get_changeset_safe(repo, rev): | |
411 | """ |
|
411 | """ | |
412 | Safe version of get_changeset if this changeset doesn't exists for a |
|
412 | Safe version of get_changeset if this changeset doesn't exists for a | |
413 | repo it returns a Dummy one instead |
|
413 | repo it returns a Dummy one instead | |
414 |
|
414 | |||
415 | :param repo: |
|
415 | :param repo: | |
416 | :param rev: |
|
416 | :param rev: | |
417 | """ |
|
417 | """ | |
418 | from rhodecode.lib.vcs.backends.base import BaseRepository |
|
418 | from rhodecode.lib.vcs.backends.base import BaseRepository | |
419 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
419 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
420 | if not isinstance(repo, BaseRepository): |
|
420 | if not isinstance(repo, BaseRepository): | |
421 | raise Exception('You must pass an Repository ' |
|
421 | raise Exception('You must pass an Repository ' | |
422 | 'object as first argument got %s', type(repo)) |
|
422 | 'object as first argument got %s', type(repo)) | |
423 |
|
423 | |||
424 | try: |
|
424 | try: | |
425 | cs = repo.get_changeset(rev) |
|
425 | cs = repo.get_changeset(rev) | |
426 | except RepositoryError: |
|
426 | except RepositoryError: | |
427 | from rhodecode.lib.utils import EmptyChangeset |
|
427 | from rhodecode.lib.utils import EmptyChangeset | |
428 | cs = EmptyChangeset(requested_revision=rev) |
|
428 | cs = EmptyChangeset(requested_revision=rev) | |
429 | return cs |
|
429 | return cs | |
430 |
|
430 | |||
431 |
|
431 | |||
432 | def get_current_revision(quiet=False): |
|
432 | def get_current_revision(quiet=False): | |
433 | """ |
|
433 | """ | |
434 | Returns tuple of (number, id) from repository containing this package |
|
434 | Returns tuple of (number, id) from repository containing this package | |
435 | or None if repository could not be found. |
|
435 | or None if repository could not be found. | |
436 |
|
436 | |||
437 | :param quiet: prints error for fetching revision if True |
|
437 | :param quiet: prints error for fetching revision if True | |
438 | """ |
|
438 | """ | |
439 |
|
439 | |||
440 | try: |
|
440 | try: | |
441 | from rhodecode.lib.vcs import get_repo |
|
441 | from rhodecode.lib.vcs import get_repo | |
442 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
442 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
443 | repopath = os.path.join(os.path.dirname(__file__), '..', '..') |
|
443 | repopath = os.path.join(os.path.dirname(__file__), '..', '..') | |
444 | scm = get_scm(repopath)[0] |
|
444 | scm = get_scm(repopath)[0] | |
445 | repo = get_repo(path=repopath, alias=scm) |
|
445 | repo = get_repo(path=repopath, alias=scm) | |
446 | tip = repo.get_changeset() |
|
446 | tip = repo.get_changeset() | |
447 | return (tip.revision, tip.short_id) |
|
447 | return (tip.revision, tip.short_id) | |
448 | except Exception, err: |
|
448 | except Exception, err: | |
449 | if not quiet: |
|
449 | if not quiet: | |
450 | print ("Cannot retrieve rhodecode's revision. Original error " |
|
450 | print ("Cannot retrieve rhodecode's revision. Original error " | |
451 | "was: %s" % err) |
|
451 | "was: %s" % err) | |
452 | return None |
|
452 | return None | |
453 |
|
453 | |||
454 |
|
454 | |||
455 | def extract_mentioned_users(s): |
|
455 | def extract_mentioned_users(s): | |
456 | """ |
|
456 | """ | |
457 | Returns unique usernames from given string s that have @mention |
|
457 | Returns unique usernames from given string s that have @mention | |
458 |
|
458 | |||
459 | :param s: string to get mentions |
|
459 | :param s: string to get mentions | |
460 | """ |
|
460 | """ | |
461 | usrs = {} |
|
461 | usrs = {} | |
462 | for username in re.findall(r'(?:^@|\s@)(\w+)', s): |
|
462 | for username in re.findall(r'(?:^@|\s@)(\w+)', s): | |
463 | usrs[username] = username |
|
463 | usrs[username] = username | |
464 |
|
464 | |||
465 | return sorted(usrs.keys()) |
|
465 | return sorted(usrs.keys()) |
@@ -1,299 +1,301 b'' | |||||
1 | """caching_query.py |
|
1 | """caching_query.py | |
2 |
|
2 | |||
3 | Represent persistence structures which allow the usage of |
|
3 | Represent persistence structures which allow the usage of | |
4 | Beaker caching with SQLAlchemy. |
|
4 | Beaker caching with SQLAlchemy. | |
5 |
|
5 | |||
6 | The three new concepts introduced here are: |
|
6 | The three new concepts introduced here are: | |
7 |
|
7 | |||
8 | * CachingQuery - a Query subclass that caches and |
|
8 | * CachingQuery - a Query subclass that caches and | |
9 | retrieves results in/from Beaker. |
|
9 | retrieves results in/from Beaker. | |
10 | * FromCache - a query option that establishes caching |
|
10 | * FromCache - a query option that establishes caching | |
11 | parameters on a Query |
|
11 | parameters on a Query | |
12 | * RelationshipCache - a variant of FromCache which is specific |
|
12 | * RelationshipCache - a variant of FromCache which is specific | |
13 | to a query invoked during a lazy load. |
|
13 | to a query invoked during a lazy load. | |
14 | * _params_from_query - extracts value parameters from |
|
14 | * _params_from_query - extracts value parameters from | |
15 | a Query. |
|
15 | a Query. | |
16 |
|
16 | |||
17 | The rest of what's here are standard SQLAlchemy and |
|
17 | The rest of what's here are standard SQLAlchemy and | |
18 | Beaker constructs. |
|
18 | Beaker constructs. | |
19 |
|
19 | |||
20 | """ |
|
20 | """ | |
21 | import beaker |
|
21 | import beaker | |
22 | from beaker.exceptions import BeakerException |
|
22 | from beaker.exceptions import BeakerException | |
23 |
|
23 | |||
24 | from sqlalchemy.orm.interfaces import MapperOption |
|
24 | from sqlalchemy.orm.interfaces import MapperOption | |
25 | from sqlalchemy.orm.query import Query |
|
25 | from sqlalchemy.orm.query import Query | |
26 | from sqlalchemy.sql import visitors |
|
26 | from sqlalchemy.sql import visitors | |
|
27 | from rhodecode.lib import safe_str | |||
27 |
|
28 | |||
28 |
|
29 | |||
29 | class CachingQuery(Query): |
|
30 | class CachingQuery(Query): | |
30 | """A Query subclass which optionally loads full results from a Beaker |
|
31 | """A Query subclass which optionally loads full results from a Beaker | |
31 | cache region. |
|
32 | cache region. | |
32 |
|
33 | |||
33 | The CachingQuery stores additional state that allows it to consult |
|
34 | The CachingQuery stores additional state that allows it to consult | |
34 | a Beaker cache before accessing the database: |
|
35 | a Beaker cache before accessing the database: | |
35 |
|
36 | |||
36 | * A "region", which is a cache region argument passed to a |
|
37 | * A "region", which is a cache region argument passed to a | |
37 | Beaker CacheManager, specifies a particular cache configuration |
|
38 | Beaker CacheManager, specifies a particular cache configuration | |
38 | (including backend implementation, expiration times, etc.) |
|
39 | (including backend implementation, expiration times, etc.) | |
39 | * A "namespace", which is a qualifying name that identifies a |
|
40 | * A "namespace", which is a qualifying name that identifies a | |
40 | group of keys within the cache. A query that filters on a name |
|
41 | group of keys within the cache. A query that filters on a name | |
41 | might use the name "by_name", a query that filters on a date range |
|
42 | might use the name "by_name", a query that filters on a date range | |
42 | to a joined table might use the name "related_date_range". |
|
43 | to a joined table might use the name "related_date_range". | |
43 |
|
44 | |||
44 | When the above state is present, a Beaker cache is retrieved. |
|
45 | When the above state is present, a Beaker cache is retrieved. | |
45 |
|
46 | |||
46 | The "namespace" name is first concatenated with |
|
47 | The "namespace" name is first concatenated with | |
47 | a string composed of the individual entities and columns the Query |
|
48 | a string composed of the individual entities and columns the Query | |
48 | requests, i.e. such as ``Query(User.id, User.name)``. |
|
49 | requests, i.e. such as ``Query(User.id, User.name)``. | |
49 |
|
50 | |||
50 | The Beaker cache is then loaded from the cache manager based |
|
51 | The Beaker cache is then loaded from the cache manager based | |
51 | on the region and composed namespace. The key within the cache |
|
52 | on the region and composed namespace. The key within the cache | |
52 | itself is then constructed against the bind parameters specified |
|
53 | itself is then constructed against the bind parameters specified | |
53 | by this query, which are usually literals defined in the |
|
54 | by this query, which are usually literals defined in the | |
54 | WHERE clause. |
|
55 | WHERE clause. | |
55 |
|
56 | |||
56 | The FromCache and RelationshipCache mapper options below represent |
|
57 | The FromCache and RelationshipCache mapper options below represent | |
57 | the "public" method of configuring this state upon the CachingQuery. |
|
58 | the "public" method of configuring this state upon the CachingQuery. | |
58 |
|
59 | |||
59 | """ |
|
60 | """ | |
60 |
|
61 | |||
61 | def __init__(self, manager, *args, **kw): |
|
62 | def __init__(self, manager, *args, **kw): | |
62 | self.cache_manager = manager |
|
63 | self.cache_manager = manager | |
63 | Query.__init__(self, *args, **kw) |
|
64 | Query.__init__(self, *args, **kw) | |
64 |
|
65 | |||
65 | def __iter__(self): |
|
66 | def __iter__(self): | |
66 | """override __iter__ to pull results from Beaker |
|
67 | """override __iter__ to pull results from Beaker | |
67 | if particular attributes have been configured. |
|
68 | if particular attributes have been configured. | |
68 |
|
69 | |||
69 | Note that this approach does *not* detach the loaded objects from |
|
70 | Note that this approach does *not* detach the loaded objects from | |
70 | the current session. If the cache backend is an in-process cache |
|
71 | the current session. If the cache backend is an in-process cache | |
71 | (like "memory") and lives beyond the scope of the current session's |
|
72 | (like "memory") and lives beyond the scope of the current session's | |
72 | transaction, those objects may be expired. The method here can be |
|
73 | transaction, those objects may be expired. The method here can be | |
73 | modified to first expunge() each loaded item from the current |
|
74 | modified to first expunge() each loaded item from the current | |
74 | session before returning the list of items, so that the items |
|
75 | session before returning the list of items, so that the items | |
75 | in the cache are not the same ones in the current Session. |
|
76 | in the cache are not the same ones in the current Session. | |
76 |
|
77 | |||
77 | """ |
|
78 | """ | |
78 | if hasattr(self, '_cache_parameters'): |
|
79 | if hasattr(self, '_cache_parameters'): | |
79 | return self.get_value(createfunc=lambda: |
|
80 | return self.get_value(createfunc=lambda: | |
80 | list(Query.__iter__(self))) |
|
81 | list(Query.__iter__(self))) | |
81 | else: |
|
82 | else: | |
82 | return Query.__iter__(self) |
|
83 | return Query.__iter__(self) | |
83 |
|
84 | |||
84 | def invalidate(self): |
|
85 | def invalidate(self): | |
85 | """Invalidate the value represented by this Query.""" |
|
86 | """Invalidate the value represented by this Query.""" | |
86 |
|
87 | |||
87 | cache, cache_key = _get_cache_parameters(self) |
|
88 | cache, cache_key = _get_cache_parameters(self) | |
88 | cache.remove(cache_key) |
|
89 | cache.remove(cache_key) | |
89 |
|
90 | |||
90 | def get_value(self, merge=True, createfunc=None): |
|
91 | def get_value(self, merge=True, createfunc=None): | |
91 | """Return the value from the cache for this query. |
|
92 | """Return the value from the cache for this query. | |
92 |
|
93 | |||
93 | Raise KeyError if no value present and no |
|
94 | Raise KeyError if no value present and no | |
94 | createfunc specified. |
|
95 | createfunc specified. | |
95 |
|
96 | |||
96 | """ |
|
97 | """ | |
97 | cache, cache_key = _get_cache_parameters(self) |
|
98 | cache, cache_key = _get_cache_parameters(self) | |
98 | ret = cache.get_value(cache_key, createfunc=createfunc) |
|
99 | ret = cache.get_value(cache_key, createfunc=createfunc) | |
99 | if merge: |
|
100 | if merge: | |
100 | ret = self.merge_result(ret, load=False) |
|
101 | ret = self.merge_result(ret, load=False) | |
101 | return ret |
|
102 | return ret | |
102 |
|
103 | |||
103 | def set_value(self, value): |
|
104 | def set_value(self, value): | |
104 | """Set the value in the cache for this query.""" |
|
105 | """Set the value in the cache for this query.""" | |
105 |
|
106 | |||
106 | cache, cache_key = _get_cache_parameters(self) |
|
107 | cache, cache_key = _get_cache_parameters(self) | |
107 | cache.put(cache_key, value) |
|
108 | cache.put(cache_key, value) | |
108 |
|
109 | |||
109 |
|
110 | |||
110 | def query_callable(manager, query_cls=CachingQuery): |
|
111 | def query_callable(manager, query_cls=CachingQuery): | |
111 | def query(*arg, **kw): |
|
112 | def query(*arg, **kw): | |
112 | return query_cls(manager, *arg, **kw) |
|
113 | return query_cls(manager, *arg, **kw) | |
113 | return query |
|
114 | return query | |
114 |
|
115 | |||
115 |
|
116 | |||
116 | def get_cache_region(name, region): |
|
117 | def get_cache_region(name, region): | |
117 | if region not in beaker.cache.cache_regions: |
|
118 | if region not in beaker.cache.cache_regions: | |
118 | raise BeakerException('Cache region `%s` not configured ' |
|
119 | raise BeakerException('Cache region `%s` not configured ' | |
119 | 'Check if proper cache settings are in the .ini files' % region) |
|
120 | 'Check if proper cache settings are in the .ini files' % region) | |
120 | kw = beaker.cache.cache_regions[region] |
|
121 | kw = beaker.cache.cache_regions[region] | |
121 | return beaker.cache.Cache._get_cache(name, kw) |
|
122 | return beaker.cache.Cache._get_cache(name, kw) | |
122 |
|
123 | |||
123 |
|
124 | |||
124 | def _get_cache_parameters(query): |
|
125 | def _get_cache_parameters(query): | |
125 | """For a query with cache_region and cache_namespace configured, |
|
126 | """For a query with cache_region and cache_namespace configured, | |
126 | return the correspoinding Cache instance and cache key, based |
|
127 | return the correspoinding Cache instance and cache key, based | |
127 | on this query's current criterion and parameter values. |
|
128 | on this query's current criterion and parameter values. | |
128 |
|
129 | |||
129 | """ |
|
130 | """ | |
130 | if not hasattr(query, '_cache_parameters'): |
|
131 | if not hasattr(query, '_cache_parameters'): | |
131 | raise ValueError("This Query does not have caching " |
|
132 | raise ValueError("This Query does not have caching " | |
132 | "parameters configured.") |
|
133 | "parameters configured.") | |
133 |
|
134 | |||
134 | region, namespace, cache_key = query._cache_parameters |
|
135 | region, namespace, cache_key = query._cache_parameters | |
135 |
|
136 | |||
136 | namespace = _namespace_from_query(namespace, query) |
|
137 | namespace = _namespace_from_query(namespace, query) | |
137 |
|
138 | |||
138 | if cache_key is None: |
|
139 | if cache_key is None: | |
139 | # cache key - the value arguments from this query's parameters. |
|
140 | # cache key - the value arguments from this query's parameters. | |
140 | args = [str(x) for x in _params_from_query(query)] |
|
141 | args = [safe_str(x) for x in _params_from_query(query)] | |
141 | args.extend(filter(lambda k:k not in ['None', None, u'None'], |
|
142 | args.extend(filter(lambda k: k not in ['None', None, u'None'], | |
142 | [str(query._limit), str(query._offset)])) |
|
143 | [str(query._limit), str(query._offset)])) | |
|
144 | ||||
143 | cache_key = " ".join(args) |
|
145 | cache_key = " ".join(args) | |
144 |
|
146 | |||
145 | if cache_key is None: |
|
147 | if cache_key is None: | |
146 | raise Exception('Cache key cannot be None') |
|
148 | raise Exception('Cache key cannot be None') | |
147 |
|
149 | |||
148 | # get cache |
|
150 | # get cache | |
149 | #cache = query.cache_manager.get_cache_region(namespace, region) |
|
151 | #cache = query.cache_manager.get_cache_region(namespace, region) | |
150 | cache = get_cache_region(namespace, region) |
|
152 | cache = get_cache_region(namespace, region) | |
151 | # optional - hash the cache_key too for consistent length |
|
153 | # optional - hash the cache_key too for consistent length | |
152 | # import uuid |
|
154 | # import uuid | |
153 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) |
|
155 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) | |
154 |
|
156 | |||
155 | return cache, cache_key |
|
157 | return cache, cache_key | |
156 |
|
158 | |||
157 |
|
159 | |||
158 | def _namespace_from_query(namespace, query): |
|
160 | def _namespace_from_query(namespace, query): | |
159 | # cache namespace - the token handed in by the |
|
161 | # cache namespace - the token handed in by the | |
160 | # option + class we're querying against |
|
162 | # option + class we're querying against | |
161 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) |
|
163 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) | |
162 |
|
164 | |||
163 | # memcached wants this |
|
165 | # memcached wants this | |
164 | namespace = namespace.replace(' ', '_') |
|
166 | namespace = namespace.replace(' ', '_') | |
165 |
|
167 | |||
166 | return namespace |
|
168 | return namespace | |
167 |
|
169 | |||
168 |
|
170 | |||
169 | def _set_cache_parameters(query, region, namespace, cache_key): |
|
171 | def _set_cache_parameters(query, region, namespace, cache_key): | |
170 |
|
172 | |||
171 | if hasattr(query, '_cache_parameters'): |
|
173 | if hasattr(query, '_cache_parameters'): | |
172 | region, namespace, cache_key = query._cache_parameters |
|
174 | region, namespace, cache_key = query._cache_parameters | |
173 | raise ValueError("This query is already configured " |
|
175 | raise ValueError("This query is already configured " | |
174 | "for region %r namespace %r" % |
|
176 | "for region %r namespace %r" % | |
175 | (region, namespace) |
|
177 | (region, namespace) | |
176 | ) |
|
178 | ) | |
177 | query._cache_parameters = region, namespace, cache_key |
|
179 | query._cache_parameters = region, namespace, cache_key | |
178 |
|
180 | |||
179 |
|
181 | |||
180 | class FromCache(MapperOption): |
|
182 | class FromCache(MapperOption): | |
181 | """Specifies that a Query should load results from a cache.""" |
|
183 | """Specifies that a Query should load results from a cache.""" | |
182 |
|
184 | |||
183 | propagate_to_loaders = False |
|
185 | propagate_to_loaders = False | |
184 |
|
186 | |||
185 | def __init__(self, region, namespace, cache_key=None): |
|
187 | def __init__(self, region, namespace, cache_key=None): | |
186 | """Construct a new FromCache. |
|
188 | """Construct a new FromCache. | |
187 |
|
189 | |||
188 | :param region: the cache region. Should be a |
|
190 | :param region: the cache region. Should be a | |
189 | region configured in the Beaker CacheManager. |
|
191 | region configured in the Beaker CacheManager. | |
190 |
|
192 | |||
191 | :param namespace: the cache namespace. Should |
|
193 | :param namespace: the cache namespace. Should | |
192 | be a name uniquely describing the target Query's |
|
194 | be a name uniquely describing the target Query's | |
193 | lexical structure. |
|
195 | lexical structure. | |
194 |
|
196 | |||
195 | :param cache_key: optional. A string cache key |
|
197 | :param cache_key: optional. A string cache key | |
196 | that will serve as the key to the query. Use this |
|
198 | that will serve as the key to the query. Use this | |
197 | if your query has a huge amount of parameters (such |
|
199 | if your query has a huge amount of parameters (such | |
198 | as when using in_()) which correspond more simply to |
|
200 | as when using in_()) which correspond more simply to | |
199 | some other identifier. |
|
201 | some other identifier. | |
200 |
|
202 | |||
201 | """ |
|
203 | """ | |
202 | self.region = region |
|
204 | self.region = region | |
203 | self.namespace = namespace |
|
205 | self.namespace = namespace | |
204 | self.cache_key = cache_key |
|
206 | self.cache_key = cache_key | |
205 |
|
207 | |||
206 | def process_query(self, query): |
|
208 | def process_query(self, query): | |
207 | """Process a Query during normal loading operation.""" |
|
209 | """Process a Query during normal loading operation.""" | |
208 |
|
210 | |||
209 | _set_cache_parameters(query, self.region, self.namespace, |
|
211 | _set_cache_parameters(query, self.region, self.namespace, | |
210 | self.cache_key) |
|
212 | self.cache_key) | |
211 |
|
213 | |||
212 |
|
214 | |||
213 | class RelationshipCache(MapperOption): |
|
215 | class RelationshipCache(MapperOption): | |
214 | """Specifies that a Query as called within a "lazy load" |
|
216 | """Specifies that a Query as called within a "lazy load" | |
215 | should load results from a cache.""" |
|
217 | should load results from a cache.""" | |
216 |
|
218 | |||
217 | propagate_to_loaders = True |
|
219 | propagate_to_loaders = True | |
218 |
|
220 | |||
219 | def __init__(self, region, namespace, attribute): |
|
221 | def __init__(self, region, namespace, attribute): | |
220 | """Construct a new RelationshipCache. |
|
222 | """Construct a new RelationshipCache. | |
221 |
|
223 | |||
222 | :param region: the cache region. Should be a |
|
224 | :param region: the cache region. Should be a | |
223 | region configured in the Beaker CacheManager. |
|
225 | region configured in the Beaker CacheManager. | |
224 |
|
226 | |||
225 | :param namespace: the cache namespace. Should |
|
227 | :param namespace: the cache namespace. Should | |
226 | be a name uniquely describing the target Query's |
|
228 | be a name uniquely describing the target Query's | |
227 | lexical structure. |
|
229 | lexical structure. | |
228 |
|
230 | |||
229 | :param attribute: A Class.attribute which |
|
231 | :param attribute: A Class.attribute which | |
230 | indicates a particular class relationship() whose |
|
232 | indicates a particular class relationship() whose | |
231 | lazy loader should be pulled from the cache. |
|
233 | lazy loader should be pulled from the cache. | |
232 |
|
234 | |||
233 | """ |
|
235 | """ | |
234 | self.region = region |
|
236 | self.region = region | |
235 | self.namespace = namespace |
|
237 | self.namespace = namespace | |
236 | self._relationship_options = { |
|
238 | self._relationship_options = { | |
237 | (attribute.property.parent.class_, attribute.property.key): self |
|
239 | (attribute.property.parent.class_, attribute.property.key): self | |
238 | } |
|
240 | } | |
239 |
|
241 | |||
240 | def process_query_conditionally(self, query): |
|
242 | def process_query_conditionally(self, query): | |
241 | """Process a Query that is used within a lazy loader. |
|
243 | """Process a Query that is used within a lazy loader. | |
242 |
|
244 | |||
243 | (the process_query_conditionally() method is a SQLAlchemy |
|
245 | (the process_query_conditionally() method is a SQLAlchemy | |
244 | hook invoked only within lazyload.) |
|
246 | hook invoked only within lazyload.) | |
245 |
|
247 | |||
246 | """ |
|
248 | """ | |
247 | if query._current_path: |
|
249 | if query._current_path: | |
248 | mapper, key = query._current_path[-2:] |
|
250 | mapper, key = query._current_path[-2:] | |
249 |
|
251 | |||
250 | for cls in mapper.class_.__mro__: |
|
252 | for cls in mapper.class_.__mro__: | |
251 | if (cls, key) in self._relationship_options: |
|
253 | if (cls, key) in self._relationship_options: | |
252 | relationship_option = \ |
|
254 | relationship_option = \ | |
253 | self._relationship_options[(cls, key)] |
|
255 | self._relationship_options[(cls, key)] | |
254 | _set_cache_parameters( |
|
256 | _set_cache_parameters( | |
255 | query, |
|
257 | query, | |
256 | relationship_option.region, |
|
258 | relationship_option.region, | |
257 | relationship_option.namespace, |
|
259 | relationship_option.namespace, | |
258 | None) |
|
260 | None) | |
259 |
|
261 | |||
260 | def and_(self, option): |
|
262 | def and_(self, option): | |
261 | """Chain another RelationshipCache option to this one. |
|
263 | """Chain another RelationshipCache option to this one. | |
262 |
|
264 | |||
263 | While many RelationshipCache objects can be specified on a single |
|
265 | While many RelationshipCache objects can be specified on a single | |
264 | Query separately, chaining them together allows for a more efficient |
|
266 | Query separately, chaining them together allows for a more efficient | |
265 | lookup during load. |
|
267 | lookup during load. | |
266 |
|
268 | |||
267 | """ |
|
269 | """ | |
268 | self._relationship_options.update(option._relationship_options) |
|
270 | self._relationship_options.update(option._relationship_options) | |
269 | return self |
|
271 | return self | |
270 |
|
272 | |||
271 |
|
273 | |||
272 | def _params_from_query(query): |
|
274 | def _params_from_query(query): | |
273 | """Pull the bind parameter values from a query. |
|
275 | """Pull the bind parameter values from a query. | |
274 |
|
276 | |||
275 | This takes into account any scalar attribute bindparam set up. |
|
277 | This takes into account any scalar attribute bindparam set up. | |
276 |
|
278 | |||
277 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) |
|
279 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) | |
278 | would return [5, 7]. |
|
280 | would return [5, 7]. | |
279 |
|
281 | |||
280 | """ |
|
282 | """ | |
281 | v = [] |
|
283 | v = [] | |
282 | def visit_bindparam(bind): |
|
284 | def visit_bindparam(bind): | |
283 |
|
285 | |||
284 | if bind.key in query._params: |
|
286 | if bind.key in query._params: | |
285 | value = query._params[bind.key] |
|
287 | value = query._params[bind.key] | |
286 | elif bind.callable: |
|
288 | elif bind.callable: | |
287 | # lazyloader may dig a callable in here, intended |
|
289 | # lazyloader may dig a callable in here, intended | |
288 | # to late-evaluate params after autoflush is called. |
|
290 | # to late-evaluate params after autoflush is called. | |
289 | # convert to a scalar value. |
|
291 | # convert to a scalar value. | |
290 | value = bind.callable() |
|
292 | value = bind.callable() | |
291 | else: |
|
293 | else: | |
292 | value = bind.value |
|
294 | value = bind.value | |
293 |
|
295 | |||
294 | v.append(value) |
|
296 | v.append(value) | |
295 | if query._criterion is not None: |
|
297 | if query._criterion is not None: | |
296 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) |
|
298 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) | |
297 | for f in query._from_obj: |
|
299 | for f in query._from_obj: | |
298 | visitors.traverse(f, {}, {'bindparam':visit_bindparam}) |
|
300 | visitors.traverse(f, {}, {'bindparam':visit_bindparam}) | |
299 | return v |
|
301 | return v |
@@ -1,54 +1,62 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.middleware.https_fixup |
|
3 | rhodecode.lib.middleware.https_fixup | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | middleware to handle https correctly |
|
6 | middleware to handle https correctly | |
7 |
|
7 | |||
8 | :created_on: May 23, 2010 |
|
8 | :created_on: May 23, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | from rhodecode.lib import str2bool |
|
26 | from rhodecode.lib import str2bool | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class HttpsFixup(object): |
|
29 | class HttpsFixup(object): | |
30 |
|
30 | |||
31 | def __init__(self, app, config): |
|
31 | def __init__(self, app, config): | |
32 | self.application = app |
|
32 | self.application = app | |
33 | self.config = config |
|
33 | self.config = config | |
34 |
|
34 | |||
35 | def __call__(self, environ, start_response): |
|
35 | def __call__(self, environ, start_response): | |
36 | self.__fixup(environ) |
|
36 | self.__fixup(environ) | |
37 | return self.application(environ, start_response) |
|
37 | return self.application(environ, start_response) | |
38 |
|
38 | |||
39 | def __fixup(self, environ): |
|
39 | def __fixup(self, environ): | |
40 | """ |
|
40 | """ | |
41 | Function to fixup the environ as needed. In order to use this |
|
41 | Function to fixup the environ as needed. In order to use this | |
42 | middleware you should set this header inside your |
|
42 | middleware you should set this header inside your | |
43 | proxy ie. nginx, apache etc. |
|
43 | proxy ie. nginx, apache etc. | |
44 | """ |
|
44 | """ | |
45 | proto = environ.get('HTTP_X_URL_SCHEME') |
|
|||
46 |
|
45 | |||
47 | if str2bool(self.config.get('force_https')): |
|
46 | if str2bool(self.config.get('force_https')): | |
48 | proto = 'https' |
|
47 | proto = 'https' | |
49 |
|
48 | else: | ||
|
49 | if 'HTTP_X_URL_SCHEME' in environ: | |||
|
50 | proto = environ.get('HTTP_X_URL_SCHEME') | |||
|
51 | elif 'HTTP_X_FORWARDED_SCHEME' in environ: | |||
|
52 | proto = environ.get('HTTP_X_FORWARDED_SCHEME') | |||
|
53 | elif 'HTTP_X_FORWARDED_PROTO' in environ: | |||
|
54 | proto = environ.get('HTTP_X_FORWARDED_PROTO') | |||
|
55 | else: | |||
|
56 | proto = 'http' | |||
50 | if proto == 'https': |
|
57 | if proto == 'https': | |
51 | environ['wsgi.url_scheme'] = proto |
|
58 | environ['wsgi.url_scheme'] = proto | |
52 | else: |
|
59 | else: | |
53 | environ['wsgi.url_scheme'] = 'http' |
|
60 | environ['wsgi.url_scheme'] = 'http' | |
|
61 | ||||
54 | return None |
|
62 | return None |
@@ -1,248 +1,247 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.middleware.simplegit |
|
3 | rhodecode.lib.middleware.simplegit | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | SimpleGit middleware for handling git protocol request (push/clone etc.) |
|
6 | SimpleGit middleware for handling git protocol request (push/clone etc.) | |
7 | It's implemented with basic auth function |
|
7 | It's implemented with basic auth function | |
8 |
|
8 | |||
9 | :created_on: Apr 28, 2010 |
|
9 | :created_on: Apr 28, 2010 | |
10 | :author: marcink |
|
10 | :author: marcink | |
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
12 | :license: GPLv3, see COPYING for more details. |
|
12 | :license: GPLv3, see COPYING for more details. | |
13 | """ |
|
13 | """ | |
14 | # This program is free software: you can redistribute it and/or modify |
|
14 | # This program is free software: you can redistribute it and/or modify | |
15 | # it under the terms of the GNU General Public License as published by |
|
15 | # it under the terms of the GNU General Public License as published by | |
16 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | # the Free Software Foundation, either version 3 of the License, or | |
17 | # (at your option) any later version. |
|
17 | # (at your option) any later version. | |
18 | # |
|
18 | # | |
19 | # This program is distributed in the hope that it will be useful, |
|
19 | # This program is distributed in the hope that it will be useful, | |
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
22 | # GNU General Public License for more details. |
|
22 | # GNU General Public License for more details. | |
23 | # |
|
23 | # | |
24 | # You should have received a copy of the GNU General Public License |
|
24 | # You should have received a copy of the GNU General Public License | |
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
26 |
|
26 | |||
27 | import os |
|
27 | import os | |
|
28 | import re | |||
28 | import logging |
|
29 | import logging | |
29 | import traceback |
|
30 | import traceback | |
30 |
|
31 | |||
31 | from dulwich import server as dulserver |
|
32 | from dulwich import server as dulserver | |
32 |
|
33 | |||
33 |
|
34 | |||
34 | class SimpleGitUploadPackHandler(dulserver.UploadPackHandler): |
|
35 | class SimpleGitUploadPackHandler(dulserver.UploadPackHandler): | |
35 |
|
36 | |||
36 | def handle(self): |
|
37 | def handle(self): | |
37 | write = lambda x: self.proto.write_sideband(1, x) |
|
38 | write = lambda x: self.proto.write_sideband(1, x) | |
38 |
|
39 | |||
39 | graph_walker = dulserver.ProtocolGraphWalker(self, |
|
40 | graph_walker = dulserver.ProtocolGraphWalker(self, | |
40 | self.repo.object_store, |
|
41 | self.repo.object_store, | |
41 | self.repo.get_peeled) |
|
42 | self.repo.get_peeled) | |
42 | objects_iter = self.repo.fetch_objects( |
|
43 | objects_iter = self.repo.fetch_objects( | |
43 | graph_walker.determine_wants, graph_walker, self.progress, |
|
44 | graph_walker.determine_wants, graph_walker, self.progress, | |
44 | get_tagged=self.get_tagged) |
|
45 | get_tagged=self.get_tagged) | |
45 |
|
46 | |||
46 | # Do they want any objects? |
|
47 | # Do they want any objects? | |
47 | if objects_iter is None or len(objects_iter) == 0: |
|
48 | if objects_iter is None or len(objects_iter) == 0: | |
48 | return |
|
49 | return | |
49 |
|
50 | |||
50 | self.progress("counting objects: %d, done.\n" % len(objects_iter)) |
|
51 | self.progress("counting objects: %d, done.\n" % len(objects_iter)) | |
51 | dulserver.write_pack_objects(dulserver.ProtocolFile(None, write), |
|
52 | dulserver.write_pack_objects(dulserver.ProtocolFile(None, write), | |
52 | objects_iter, len(objects_iter)) |
|
53 | objects_iter, len(objects_iter)) | |
53 | messages = [] |
|
54 | messages = [] | |
54 | messages.append('thank you for using rhodecode') |
|
55 | messages.append('thank you for using rhodecode') | |
55 |
|
56 | |||
56 | for msg in messages: |
|
57 | for msg in messages: | |
57 | self.progress(msg + "\n") |
|
58 | self.progress(msg + "\n") | |
58 | # we are done |
|
59 | # we are done | |
59 | self.proto.write("0000") |
|
60 | self.proto.write("0000") | |
60 |
|
61 | |||
61 | dulserver.DEFAULT_HANDLERS = { |
|
62 | dulserver.DEFAULT_HANDLERS = { | |
62 | 'git-upload-pack': SimpleGitUploadPackHandler, |
|
63 | 'git-upload-pack': SimpleGitUploadPackHandler, | |
63 | 'git-receive-pack': dulserver.ReceivePackHandler, |
|
64 | 'git-receive-pack': dulserver.ReceivePackHandler, | |
64 | } |
|
65 | } | |
65 |
|
66 | |||
66 | from dulwich.repo import Repo |
|
67 | from dulwich.repo import Repo | |
67 | from dulwich.web import HTTPGitApplication |
|
68 | from dulwich.web import HTTPGitApplication | |
68 |
|
69 | |||
69 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
70 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE | |
70 |
|
71 | |||
71 | from rhodecode.lib import safe_str |
|
72 | from rhodecode.lib import safe_str | |
72 | from rhodecode.lib.base import BaseVCSController |
|
73 | from rhodecode.lib.base import BaseVCSController | |
73 | from rhodecode.lib.auth import get_container_username |
|
74 | from rhodecode.lib.auth import get_container_username | |
74 | from rhodecode.lib.utils import is_valid_repo |
|
75 | from rhodecode.lib.utils import is_valid_repo | |
75 | from rhodecode.model.db import User |
|
76 | from rhodecode.model.db import User | |
76 |
|
77 | |||
77 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError |
|
78 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError | |
78 |
|
79 | |||
79 | log = logging.getLogger(__name__) |
|
80 | log = logging.getLogger(__name__) | |
80 |
|
81 | |||
81 |
|
82 | |||
82 | def is_git(environ): |
|
83 | GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)') | |
83 | """Returns True if request's target is git server. |
|
84 | ||
84 | ``HTTP_USER_AGENT`` would then have git client version given. |
|
|||
85 |
|
85 | |||
86 | :param environ: |
|
86 | def is_git(environ): | |
87 | """ |
|
87 | path_info = environ['PATH_INFO'] | |
88 | http_user_agent = environ.get('HTTP_USER_AGENT') |
|
88 | isgit_path = GIT_PROTO_PAT.match(path_info) | |
89 | if http_user_agent and http_user_agent.startswith('git'): |
|
89 | log.debug('is a git path %s pathinfo : %s' % (isgit_path, path_info)) | |
90 | return True |
|
90 | return isgit_path | |
91 | return False |
|
|||
92 |
|
91 | |||
93 |
|
92 | |||
94 | class SimpleGit(BaseVCSController): |
|
93 | class SimpleGit(BaseVCSController): | |
95 |
|
94 | |||
96 | def _handle_request(self, environ, start_response): |
|
95 | def _handle_request(self, environ, start_response): | |
|
96 | ||||
97 | if not is_git(environ): |
|
97 | if not is_git(environ): | |
98 | return self.application(environ, start_response) |
|
98 | return self.application(environ, start_response) | |
99 |
|
99 | |||
100 | proxy_key = 'HTTP_X_REAL_IP' |
|
100 | proxy_key = 'HTTP_X_REAL_IP' | |
101 | def_key = 'REMOTE_ADDR' |
|
101 | def_key = 'REMOTE_ADDR' | |
102 | ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0')) |
|
102 | ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0')) | |
103 | username = None |
|
103 | username = None | |
104 | # skip passing error to error controller |
|
104 | # skip passing error to error controller | |
105 | environ['pylons.status_code_redirect'] = True |
|
105 | environ['pylons.status_code_redirect'] = True | |
106 |
|
106 | |||
107 | #====================================================================== |
|
107 | #====================================================================== | |
108 | # EXTRACT REPOSITORY NAME FROM ENV |
|
108 | # EXTRACT REPOSITORY NAME FROM ENV | |
109 | #====================================================================== |
|
109 | #====================================================================== | |
110 | try: |
|
110 | try: | |
111 | repo_name = self.__get_repository(environ) |
|
111 | repo_name = self.__get_repository(environ) | |
112 | log.debug('Extracted repo name is %s' % repo_name) |
|
112 | log.debug('Extracted repo name is %s' % repo_name) | |
113 | except: |
|
113 | except: | |
114 | return HTTPInternalServerError()(environ, start_response) |
|
114 | return HTTPInternalServerError()(environ, start_response) | |
115 |
|
115 | |||
116 | #====================================================================== |
|
116 | #====================================================================== | |
117 | # GET ACTION PULL or PUSH |
|
117 | # GET ACTION PULL or PUSH | |
118 | #====================================================================== |
|
118 | #====================================================================== | |
119 | action = self.__get_action(environ) |
|
119 | action = self.__get_action(environ) | |
120 |
|
120 | |||
121 | #====================================================================== |
|
121 | #====================================================================== | |
122 | # CHECK ANONYMOUS PERMISSION |
|
122 | # CHECK ANONYMOUS PERMISSION | |
123 | #====================================================================== |
|
123 | #====================================================================== | |
124 | if action in ['pull', 'push']: |
|
124 | if action in ['pull', 'push']: | |
125 | anonymous_user = self.__get_user('default') |
|
125 | anonymous_user = self.__get_user('default') | |
126 | username = anonymous_user.username |
|
126 | username = anonymous_user.username | |
127 | anonymous_perm = self._check_permission(action, anonymous_user, |
|
127 | anonymous_perm = self._check_permission(action, anonymous_user, | |
128 | repo_name) |
|
128 | repo_name) | |
129 |
|
129 | |||
130 | if anonymous_perm is not True or anonymous_user.active is False: |
|
130 | if anonymous_perm is not True or anonymous_user.active is False: | |
131 | if anonymous_perm is not True: |
|
131 | if anonymous_perm is not True: | |
132 | log.debug('Not enough credentials to access this ' |
|
132 | log.debug('Not enough credentials to access this ' | |
133 | 'repository as anonymous user') |
|
133 | 'repository as anonymous user') | |
134 | if anonymous_user.active is False: |
|
134 | if anonymous_user.active is False: | |
135 | log.debug('Anonymous access is disabled, running ' |
|
135 | log.debug('Anonymous access is disabled, running ' | |
136 | 'authentication') |
|
136 | 'authentication') | |
137 | #============================================================== |
|
137 | #============================================================== | |
138 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
138 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE | |
139 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
139 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS | |
140 | #============================================================== |
|
140 | #============================================================== | |
141 |
|
141 | |||
142 | # Attempting to retrieve username from the container |
|
142 | # Attempting to retrieve username from the container | |
143 | username = get_container_username(environ, self.config) |
|
143 | username = get_container_username(environ, self.config) | |
144 |
|
144 | |||
145 | # If not authenticated by the container, running basic auth |
|
145 | # If not authenticated by the container, running basic auth | |
146 | if not username: |
|
146 | if not username: | |
147 | self.authenticate.realm = \ |
|
147 | self.authenticate.realm = \ | |
148 | safe_str(self.config['rhodecode_realm']) |
|
148 | safe_str(self.config['rhodecode_realm']) | |
149 | result = self.authenticate(environ) |
|
149 | result = self.authenticate(environ) | |
150 | if isinstance(result, str): |
|
150 | if isinstance(result, str): | |
151 | AUTH_TYPE.update(environ, 'basic') |
|
151 | AUTH_TYPE.update(environ, 'basic') | |
152 | REMOTE_USER.update(environ, result) |
|
152 | REMOTE_USER.update(environ, result) | |
153 | username = result |
|
153 | username = result | |
154 | else: |
|
154 | else: | |
155 | return result.wsgi_application(environ, start_response) |
|
155 | return result.wsgi_application(environ, start_response) | |
156 |
|
156 | |||
157 | #============================================================== |
|
157 | #============================================================== | |
158 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
158 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME | |
159 | #============================================================== |
|
159 | #============================================================== | |
160 | if action in ['pull', 'push']: |
|
160 | if action in ['pull', 'push']: | |
161 | try: |
|
161 | try: | |
162 | user = self.__get_user(username) |
|
162 | user = self.__get_user(username) | |
163 | if user is None or not user.active: |
|
163 | if user is None or not user.active: | |
164 | return HTTPForbidden()(environ, start_response) |
|
164 | return HTTPForbidden()(environ, start_response) | |
165 | username = user.username |
|
165 | username = user.username | |
166 | except: |
|
166 | except: | |
167 | log.error(traceback.format_exc()) |
|
167 | log.error(traceback.format_exc()) | |
168 | return HTTPInternalServerError()(environ, |
|
168 | return HTTPInternalServerError()(environ, | |
169 | start_response) |
|
169 | start_response) | |
170 |
|
170 | |||
171 | #check permissions for this repository |
|
171 | #check permissions for this repository | |
172 | perm = self._check_permission(action, user, |
|
172 | perm = self._check_permission(action, user, | |
173 | repo_name) |
|
173 | repo_name) | |
174 | if perm is not True: |
|
174 | if perm is not True: | |
175 | return HTTPForbidden()(environ, start_response) |
|
175 | return HTTPForbidden()(environ, start_response) | |
176 |
|
176 | |||
177 | #=================================================================== |
|
177 | #=================================================================== | |
178 | # GIT REQUEST HANDLING |
|
178 | # GIT REQUEST HANDLING | |
179 | #=================================================================== |
|
179 | #=================================================================== | |
180 |
|
180 | |||
181 | repo_path = safe_str(os.path.join(self.basepath, repo_name)) |
|
181 | repo_path = safe_str(os.path.join(self.basepath, repo_name)) | |
182 | log.debug('Repository path is %s' % repo_path) |
|
182 | log.debug('Repository path is %s' % repo_path) | |
183 |
|
183 | |||
184 | # quick check if that dir exists... |
|
184 | # quick check if that dir exists... | |
185 | if is_valid_repo(repo_name, self.basepath) is False: |
|
185 | if is_valid_repo(repo_name, self.basepath) is False: | |
186 | return HTTPNotFound()(environ, start_response) |
|
186 | return HTTPNotFound()(environ, start_response) | |
187 |
|
187 | |||
188 | try: |
|
188 | try: | |
189 | #invalidate cache on push |
|
189 | #invalidate cache on push | |
190 | if action == 'push': |
|
190 | if action == 'push': | |
191 | self._invalidate_cache(repo_name) |
|
191 | self._invalidate_cache(repo_name) | |
192 | log.info('%s action on GIT repo "%s"' % (action, repo_name)) |
|
192 | log.info('%s action on GIT repo "%s"' % (action, repo_name)) | |
193 | app = self.__make_app(repo_name, repo_path) |
|
193 | app = self.__make_app(repo_name, repo_path) | |
194 | return app(environ, start_response) |
|
194 | return app(environ, start_response) | |
195 | except Exception: |
|
195 | except Exception: | |
196 | log.error(traceback.format_exc()) |
|
196 | log.error(traceback.format_exc()) | |
197 | return HTTPInternalServerError()(environ, start_response) |
|
197 | return HTTPInternalServerError()(environ, start_response) | |
198 |
|
198 | |||
199 | def __make_app(self, repo_name, repo_path): |
|
199 | def __make_app(self, repo_name, repo_path): | |
200 | """ |
|
200 | """ | |
201 | Make an wsgi application using dulserver |
|
201 | Make an wsgi application using dulserver | |
202 |
|
202 | |||
203 | :param repo_name: name of the repository |
|
203 | :param repo_name: name of the repository | |
204 | :param repo_path: full path to the repository |
|
204 | :param repo_path: full path to the repository | |
205 | """ |
|
205 | """ | |
206 |
|
206 | |||
207 | _d = {'/' + repo_name: Repo(repo_path)} |
|
207 | _d = {'/' + repo_name: Repo(repo_path)} | |
208 | backend = dulserver.DictBackend(_d) |
|
208 | backend = dulserver.DictBackend(_d) | |
209 | gitserve = HTTPGitApplication(backend) |
|
209 | gitserve = HTTPGitApplication(backend) | |
210 |
|
210 | |||
211 | return gitserve |
|
211 | return gitserve | |
212 |
|
212 | |||
213 | def __get_repository(self, environ): |
|
213 | def __get_repository(self, environ): | |
214 | """ |
|
214 | """ | |
215 | Get's repository name out of PATH_INFO header |
|
215 | Get's repository name out of PATH_INFO header | |
216 |
|
216 | |||
217 | :param environ: environ where PATH_INFO is stored |
|
217 | :param environ: environ where PATH_INFO is stored | |
218 | """ |
|
218 | """ | |
219 | try: |
|
219 | try: | |
220 | environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO']) |
|
220 | environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO']) | |
221 |
repo_name = |
|
221 | repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1) | |
222 | if repo_name.endswith('/'): |
|
|||
223 | repo_name = repo_name.rstrip('/') |
|
|||
224 | except: |
|
222 | except: | |
225 | log.error(traceback.format_exc()) |
|
223 | log.error(traceback.format_exc()) | |
226 | raise |
|
224 | raise | |
227 | repo_name = repo_name.split('/')[0] |
|
225 | ||
228 | return repo_name |
|
226 | return repo_name | |
229 |
|
227 | |||
230 | def __get_user(self, username): |
|
228 | def __get_user(self, username): | |
231 | return User.get_by_username(username) |
|
229 | return User.get_by_username(username) | |
232 |
|
230 | |||
233 | def __get_action(self, environ): |
|
231 | def __get_action(self, environ): | |
234 | """Maps git request commands into a pull or push command. |
|
232 | """Maps git request commands into a pull or push command. | |
235 |
|
233 | |||
236 | :param environ: |
|
234 | :param environ: | |
237 | """ |
|
235 | """ | |
238 | service = environ['QUERY_STRING'].split('=') |
|
236 | service = environ['QUERY_STRING'].split('=') | |
239 | if len(service) > 1: |
|
237 | if len(service) > 1: | |
240 | service_cmd = service[1] |
|
238 | service_cmd = service[1] | |
241 |
mapping = { |
|
239 | mapping = { | |
242 |
|
|
240 | 'git-receive-pack': 'push', | |
243 | } |
|
241 | 'git-upload-pack': 'pull', | |
|
242 | } | |||
244 |
|
243 | |||
245 | return mapping.get(service_cmd, |
|
244 | return mapping.get(service_cmd, | |
246 | service_cmd if service_cmd else 'other') |
|
245 | service_cmd if service_cmd else 'other') | |
247 | else: |
|
246 | else: | |
248 | return 'other' |
|
247 | return 'other' |
@@ -1,616 +1,622 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.utils |
|
3 | rhodecode.lib.utils | |
4 | ~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Utilities library for RhodeCode |
|
6 | Utilities library for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 18, 2010 |
|
8 | :created_on: Apr 18, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import datetime |
|
28 | import datetime | |
29 | import traceback |
|
29 | import traceback | |
30 | import paste |
|
30 | import paste | |
31 | import beaker |
|
31 | import beaker | |
32 | import tarfile |
|
32 | import tarfile | |
33 | import shutil |
|
33 | import shutil | |
34 | from os.path import abspath |
|
34 | from os.path import abspath | |
35 | from os.path import dirname as dn, join as jn |
|
35 | from os.path import dirname as dn, join as jn | |
36 |
|
36 | |||
37 | from paste.script.command import Command, BadCommand |
|
37 | from paste.script.command import Command, BadCommand | |
38 |
|
38 | |||
39 | from mercurial import ui, config |
|
39 | from mercurial import ui, config | |
40 |
|
40 | |||
41 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
41 | from webhelpers.text import collapse, remove_formatting, strip_tags | |
42 |
|
42 | |||
43 | from rhodecode.lib.vcs import get_backend |
|
43 | from rhodecode.lib.vcs import get_backend | |
44 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
44 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
45 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
45 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
46 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
46 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
47 | from rhodecode.lib.vcs.exceptions import VCSError |
|
47 | from rhodecode.lib.vcs.exceptions import VCSError | |
48 |
|
48 | |||
49 | from rhodecode.lib.caching_query import FromCache |
|
49 | from rhodecode.lib.caching_query import FromCache | |
50 |
|
50 | |||
51 | from rhodecode.model import meta |
|
51 | from rhodecode.model import meta | |
52 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ |
|
52 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ | |
53 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm |
|
53 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm | |
54 | from rhodecode.model.meta import Session |
|
54 | from rhodecode.model.meta import Session | |
55 | from rhodecode.model.repos_group import ReposGroupModel |
|
55 | from rhodecode.model.repos_group import ReposGroupModel | |
56 |
|
56 | |||
57 | log = logging.getLogger(__name__) |
|
57 | log = logging.getLogger(__name__) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def recursive_replace(str_, replace=' '): |
|
60 | def recursive_replace(str_, replace=' '): | |
61 | """Recursive replace of given sign to just one instance |
|
61 | """Recursive replace of given sign to just one instance | |
62 |
|
62 | |||
63 | :param str_: given string |
|
63 | :param str_: given string | |
64 | :param replace: char to find and replace multiple instances |
|
64 | :param replace: char to find and replace multiple instances | |
65 |
|
65 | |||
66 | Examples:: |
|
66 | Examples:: | |
67 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
67 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
68 | 'Mighty-Mighty-Bo-sstones' |
|
68 | 'Mighty-Mighty-Bo-sstones' | |
69 | """ |
|
69 | """ | |
70 |
|
70 | |||
71 | if str_.find(replace * 2) == -1: |
|
71 | if str_.find(replace * 2) == -1: | |
72 | return str_ |
|
72 | return str_ | |
73 | else: |
|
73 | else: | |
74 | str_ = str_.replace(replace * 2, replace) |
|
74 | str_ = str_.replace(replace * 2, replace) | |
75 | return recursive_replace(str_, replace) |
|
75 | return recursive_replace(str_, replace) | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | def repo_name_slug(value): |
|
78 | def repo_name_slug(value): | |
79 | """Return slug of name of repository |
|
79 | """Return slug of name of repository | |
80 | This function is called on each creation/modification |
|
80 | This function is called on each creation/modification | |
81 | of repository to prevent bad names in repo |
|
81 | of repository to prevent bad names in repo | |
82 | """ |
|
82 | """ | |
83 |
|
83 | |||
84 | slug = remove_formatting(value) |
|
84 | slug = remove_formatting(value) | |
85 | slug = strip_tags(slug) |
|
85 | slug = strip_tags(slug) | |
86 |
|
86 | |||
87 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
87 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
88 | slug = slug.replace(c, '-') |
|
88 | slug = slug.replace(c, '-') | |
89 | slug = recursive_replace(slug, '-') |
|
89 | slug = recursive_replace(slug, '-') | |
90 | slug = collapse(slug, '-') |
|
90 | slug = collapse(slug, '-') | |
91 | return slug |
|
91 | return slug | |
92 |
|
92 | |||
93 |
|
93 | |||
94 | def get_repo_slug(request): |
|
94 | def get_repo_slug(request): | |
95 |
|
|
95 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |
|
96 | if _repo: | |||
|
97 | _repo = _repo.rstrip('/') | |||
|
98 | return _repo | |||
96 |
|
99 | |||
97 |
|
100 | |||
98 | def get_repos_group_slug(request): |
|
101 | def get_repos_group_slug(request): | |
99 |
|
|
102 | _group = request.environ['pylons.routes_dict'].get('group_name') | |
|
103 | if _group: | |||
|
104 | _group = _group.rstrip('/') | |||
|
105 | return _group | |||
100 |
|
106 | |||
101 |
|
107 | |||
102 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
108 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): | |
103 | """ |
|
109 | """ | |
104 | Action logger for various actions made by users |
|
110 | Action logger for various actions made by users | |
105 |
|
111 | |||
106 | :param user: user that made this action, can be a unique username string or |
|
112 | :param user: user that made this action, can be a unique username string or | |
107 | object containing user_id attribute |
|
113 | object containing user_id attribute | |
108 | :param action: action to log, should be on of predefined unique actions for |
|
114 | :param action: action to log, should be on of predefined unique actions for | |
109 | easy translations |
|
115 | easy translations | |
110 | :param repo: string name of repository or object containing repo_id, |
|
116 | :param repo: string name of repository or object containing repo_id, | |
111 | that action was made on |
|
117 | that action was made on | |
112 | :param ipaddr: optional ip address from what the action was made |
|
118 | :param ipaddr: optional ip address from what the action was made | |
113 | :param sa: optional sqlalchemy session |
|
119 | :param sa: optional sqlalchemy session | |
114 |
|
120 | |||
115 | """ |
|
121 | """ | |
116 |
|
122 | |||
117 | if not sa: |
|
123 | if not sa: | |
118 | sa = meta.Session |
|
124 | sa = meta.Session | |
119 |
|
125 | |||
120 | try: |
|
126 | try: | |
121 | if hasattr(user, 'user_id'): |
|
127 | if hasattr(user, 'user_id'): | |
122 | user_obj = user |
|
128 | user_obj = user | |
123 | elif isinstance(user, basestring): |
|
129 | elif isinstance(user, basestring): | |
124 | user_obj = User.get_by_username(user) |
|
130 | user_obj = User.get_by_username(user) | |
125 | else: |
|
131 | else: | |
126 | raise Exception('You have to provide user object or username') |
|
132 | raise Exception('You have to provide user object or username') | |
127 |
|
133 | |||
128 | if hasattr(repo, 'repo_id'): |
|
134 | if hasattr(repo, 'repo_id'): | |
129 | repo_obj = Repository.get(repo.repo_id) |
|
135 | repo_obj = Repository.get(repo.repo_id) | |
130 | repo_name = repo_obj.repo_name |
|
136 | repo_name = repo_obj.repo_name | |
131 | elif isinstance(repo, basestring): |
|
137 | elif isinstance(repo, basestring): | |
132 | repo_name = repo.lstrip('/') |
|
138 | repo_name = repo.lstrip('/') | |
133 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
139 | repo_obj = Repository.get_by_repo_name(repo_name) | |
134 | else: |
|
140 | else: | |
135 | raise Exception('You have to provide repository to action logger') |
|
141 | raise Exception('You have to provide repository to action logger') | |
136 |
|
142 | |||
137 | user_log = UserLog() |
|
143 | user_log = UserLog() | |
138 | user_log.user_id = user_obj.user_id |
|
144 | user_log.user_id = user_obj.user_id | |
139 | user_log.action = action |
|
145 | user_log.action = action | |
140 |
|
146 | |||
141 | user_log.repository_id = repo_obj.repo_id |
|
147 | user_log.repository_id = repo_obj.repo_id | |
142 | user_log.repository_name = repo_name |
|
148 | user_log.repository_name = repo_name | |
143 |
|
149 | |||
144 | user_log.action_date = datetime.datetime.now() |
|
150 | user_log.action_date = datetime.datetime.now() | |
145 | user_log.user_ip = ipaddr |
|
151 | user_log.user_ip = ipaddr | |
146 | sa.add(user_log) |
|
152 | sa.add(user_log) | |
147 |
|
153 | |||
148 | log.info('Adding user %s, action %s on %s' % (user_obj, action, repo)) |
|
154 | log.info('Adding user %s, action %s on %s' % (user_obj, action, repo)) | |
149 | if commit: |
|
155 | if commit: | |
150 | sa.commit() |
|
156 | sa.commit() | |
151 | except: |
|
157 | except: | |
152 | log.error(traceback.format_exc()) |
|
158 | log.error(traceback.format_exc()) | |
153 | raise |
|
159 | raise | |
154 |
|
160 | |||
155 |
|
161 | |||
156 | def get_repos(path, recursive=False): |
|
162 | def get_repos(path, recursive=False): | |
157 | """ |
|
163 | """ | |
158 | Scans given path for repos and return (name,(type,path)) tuple |
|
164 | Scans given path for repos and return (name,(type,path)) tuple | |
159 |
|
165 | |||
160 | :param path: path to scan for repositories |
|
166 | :param path: path to scan for repositories | |
161 | :param recursive: recursive search and return names with subdirs in front |
|
167 | :param recursive: recursive search and return names with subdirs in front | |
162 | """ |
|
168 | """ | |
163 |
|
169 | |||
164 | # remove ending slash for better results |
|
170 | # remove ending slash for better results | |
165 | path = path.rstrip(os.sep) |
|
171 | path = path.rstrip(os.sep) | |
166 |
|
172 | |||
167 | def _get_repos(p): |
|
173 | def _get_repos(p): | |
168 | if not os.access(p, os.W_OK): |
|
174 | if not os.access(p, os.W_OK): | |
169 | return |
|
175 | return | |
170 | for dirpath in os.listdir(p): |
|
176 | for dirpath in os.listdir(p): | |
171 | if os.path.isfile(os.path.join(p, dirpath)): |
|
177 | if os.path.isfile(os.path.join(p, dirpath)): | |
172 | continue |
|
178 | continue | |
173 | cur_path = os.path.join(p, dirpath) |
|
179 | cur_path = os.path.join(p, dirpath) | |
174 | try: |
|
180 | try: | |
175 | scm_info = get_scm(cur_path) |
|
181 | scm_info = get_scm(cur_path) | |
176 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
182 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info | |
177 | except VCSError: |
|
183 | except VCSError: | |
178 | if not recursive: |
|
184 | if not recursive: | |
179 | continue |
|
185 | continue | |
180 | #check if this dir containts other repos for recursive scan |
|
186 | #check if this dir containts other repos for recursive scan | |
181 | rec_path = os.path.join(p, dirpath) |
|
187 | rec_path = os.path.join(p, dirpath) | |
182 | if os.path.isdir(rec_path): |
|
188 | if os.path.isdir(rec_path): | |
183 | for inner_scm in _get_repos(rec_path): |
|
189 | for inner_scm in _get_repos(rec_path): | |
184 | yield inner_scm |
|
190 | yield inner_scm | |
185 |
|
191 | |||
186 | return _get_repos(path) |
|
192 | return _get_repos(path) | |
187 |
|
193 | |||
188 |
|
194 | |||
189 | def is_valid_repo(repo_name, base_path): |
|
195 | def is_valid_repo(repo_name, base_path): | |
190 | """ |
|
196 | """ | |
191 | Returns True if given path is a valid repository False otherwise |
|
197 | Returns True if given path is a valid repository False otherwise | |
192 | :param repo_name: |
|
198 | :param repo_name: | |
193 | :param base_path: |
|
199 | :param base_path: | |
194 |
|
200 | |||
195 | :return True: if given path is a valid repository |
|
201 | :return True: if given path is a valid repository | |
196 | """ |
|
202 | """ | |
197 | full_path = os.path.join(base_path, repo_name) |
|
203 | full_path = os.path.join(base_path, repo_name) | |
198 |
|
204 | |||
199 | try: |
|
205 | try: | |
200 | get_scm(full_path) |
|
206 | get_scm(full_path) | |
201 | return True |
|
207 | return True | |
202 | except VCSError: |
|
208 | except VCSError: | |
203 | return False |
|
209 | return False | |
204 |
|
210 | |||
205 |
|
211 | |||
206 | def is_valid_repos_group(repos_group_name, base_path): |
|
212 | def is_valid_repos_group(repos_group_name, base_path): | |
207 | """ |
|
213 | """ | |
208 | Returns True if given path is a repos group False otherwise |
|
214 | Returns True if given path is a repos group False otherwise | |
209 |
|
215 | |||
210 | :param repo_name: |
|
216 | :param repo_name: | |
211 | :param base_path: |
|
217 | :param base_path: | |
212 | """ |
|
218 | """ | |
213 | full_path = os.path.join(base_path, repos_group_name) |
|
219 | full_path = os.path.join(base_path, repos_group_name) | |
214 |
|
220 | |||
215 | # check if it's not a repo |
|
221 | # check if it's not a repo | |
216 | if is_valid_repo(repos_group_name, base_path): |
|
222 | if is_valid_repo(repos_group_name, base_path): | |
217 | return False |
|
223 | return False | |
218 |
|
224 | |||
219 | # check if it's a valid path |
|
225 | # check if it's a valid path | |
220 | if os.path.isdir(full_path): |
|
226 | if os.path.isdir(full_path): | |
221 | return True |
|
227 | return True | |
222 |
|
228 | |||
223 | return False |
|
229 | return False | |
224 |
|
230 | |||
225 |
|
231 | |||
226 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
|
232 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): | |
227 | while True: |
|
233 | while True: | |
228 | ok = raw_input(prompt) |
|
234 | ok = raw_input(prompt) | |
229 | if ok in ('y', 'ye', 'yes'): |
|
235 | if ok in ('y', 'ye', 'yes'): | |
230 | return True |
|
236 | return True | |
231 | if ok in ('n', 'no', 'nop', 'nope'): |
|
237 | if ok in ('n', 'no', 'nop', 'nope'): | |
232 | return False |
|
238 | return False | |
233 | retries = retries - 1 |
|
239 | retries = retries - 1 | |
234 | if retries < 0: |
|
240 | if retries < 0: | |
235 | raise IOError |
|
241 | raise IOError | |
236 | print complaint |
|
242 | print complaint | |
237 |
|
243 | |||
238 | #propagated from mercurial documentation |
|
244 | #propagated from mercurial documentation | |
239 | ui_sections = ['alias', 'auth', |
|
245 | ui_sections = ['alias', 'auth', | |
240 | 'decode/encode', 'defaults', |
|
246 | 'decode/encode', 'defaults', | |
241 | 'diff', 'email', |
|
247 | 'diff', 'email', | |
242 | 'extensions', 'format', |
|
248 | 'extensions', 'format', | |
243 | 'merge-patterns', 'merge-tools', |
|
249 | 'merge-patterns', 'merge-tools', | |
244 | 'hooks', 'http_proxy', |
|
250 | 'hooks', 'http_proxy', | |
245 | 'smtp', 'patch', |
|
251 | 'smtp', 'patch', | |
246 | 'paths', 'profiling', |
|
252 | 'paths', 'profiling', | |
247 | 'server', 'trusted', |
|
253 | 'server', 'trusted', | |
248 | 'ui', 'web', ] |
|
254 | 'ui', 'web', ] | |
249 |
|
255 | |||
250 |
|
256 | |||
251 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
257 | def make_ui(read_from='file', path=None, checkpaths=True): | |
252 | """A function that will read python rc files or database |
|
258 | """A function that will read python rc files or database | |
253 | and make an mercurial ui object from read options |
|
259 | and make an mercurial ui object from read options | |
254 |
|
260 | |||
255 | :param path: path to mercurial config file |
|
261 | :param path: path to mercurial config file | |
256 | :param checkpaths: check the path |
|
262 | :param checkpaths: check the path | |
257 | :param read_from: read from 'file' or 'db' |
|
263 | :param read_from: read from 'file' or 'db' | |
258 | """ |
|
264 | """ | |
259 |
|
265 | |||
260 | baseui = ui.ui() |
|
266 | baseui = ui.ui() | |
261 |
|
267 | |||
262 | # clean the baseui object |
|
268 | # clean the baseui object | |
263 | baseui._ocfg = config.config() |
|
269 | baseui._ocfg = config.config() | |
264 | baseui._ucfg = config.config() |
|
270 | baseui._ucfg = config.config() | |
265 | baseui._tcfg = config.config() |
|
271 | baseui._tcfg = config.config() | |
266 |
|
272 | |||
267 | if read_from == 'file': |
|
273 | if read_from == 'file': | |
268 | if not os.path.isfile(path): |
|
274 | if not os.path.isfile(path): | |
269 | log.debug('hgrc file is not present at %s skipping...' % path) |
|
275 | log.debug('hgrc file is not present at %s skipping...' % path) | |
270 | return False |
|
276 | return False | |
271 | log.debug('reading hgrc from %s' % path) |
|
277 | log.debug('reading hgrc from %s' % path) | |
272 | cfg = config.config() |
|
278 | cfg = config.config() | |
273 | cfg.read(path) |
|
279 | cfg.read(path) | |
274 | for section in ui_sections: |
|
280 | for section in ui_sections: | |
275 | for k, v in cfg.items(section): |
|
281 | for k, v in cfg.items(section): | |
276 | log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) |
|
282 | log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) | |
277 | baseui.setconfig(section, k, v) |
|
283 | baseui.setconfig(section, k, v) | |
278 |
|
284 | |||
279 | elif read_from == 'db': |
|
285 | elif read_from == 'db': | |
280 | sa = meta.Session |
|
286 | sa = meta.Session | |
281 | ret = sa.query(RhodeCodeUi)\ |
|
287 | ret = sa.query(RhodeCodeUi)\ | |
282 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ |
|
288 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ | |
283 | .all() |
|
289 | .all() | |
284 |
|
290 | |||
285 | hg_ui = ret |
|
291 | hg_ui = ret | |
286 | for ui_ in hg_ui: |
|
292 | for ui_ in hg_ui: | |
287 | if ui_.ui_active: |
|
293 | if ui_.ui_active: | |
288 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
294 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, | |
289 | ui_.ui_key, ui_.ui_value) |
|
295 | ui_.ui_key, ui_.ui_value) | |
290 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
296 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | |
291 |
|
297 | |||
292 | meta.Session.remove() |
|
298 | meta.Session.remove() | |
293 | return baseui |
|
299 | return baseui | |
294 |
|
300 | |||
295 |
|
301 | |||
296 | def set_rhodecode_config(config): |
|
302 | def set_rhodecode_config(config): | |
297 | """ |
|
303 | """ | |
298 | Updates pylons config with new settings from database |
|
304 | Updates pylons config with new settings from database | |
299 |
|
305 | |||
300 | :param config: |
|
306 | :param config: | |
301 | """ |
|
307 | """ | |
302 | hgsettings = RhodeCodeSetting.get_app_settings() |
|
308 | hgsettings = RhodeCodeSetting.get_app_settings() | |
303 |
|
309 | |||
304 | for k, v in hgsettings.items(): |
|
310 | for k, v in hgsettings.items(): | |
305 | config[k] = v |
|
311 | config[k] = v | |
306 |
|
312 | |||
307 |
|
313 | |||
308 | def invalidate_cache(cache_key, *args): |
|
314 | def invalidate_cache(cache_key, *args): | |
309 | """ |
|
315 | """ | |
310 | Puts cache invalidation task into db for |
|
316 | Puts cache invalidation task into db for | |
311 | further global cache invalidation |
|
317 | further global cache invalidation | |
312 | """ |
|
318 | """ | |
313 |
|
319 | |||
314 | from rhodecode.model.scm import ScmModel |
|
320 | from rhodecode.model.scm import ScmModel | |
315 |
|
321 | |||
316 | if cache_key.startswith('get_repo_cached_'): |
|
322 | if cache_key.startswith('get_repo_cached_'): | |
317 | name = cache_key.split('get_repo_cached_')[-1] |
|
323 | name = cache_key.split('get_repo_cached_')[-1] | |
318 | ScmModel().mark_for_invalidation(name) |
|
324 | ScmModel().mark_for_invalidation(name) | |
319 |
|
325 | |||
320 |
|
326 | |||
321 | class EmptyChangeset(BaseChangeset): |
|
327 | class EmptyChangeset(BaseChangeset): | |
322 | """ |
|
328 | """ | |
323 | An dummy empty changeset. It's possible to pass hash when creating |
|
329 | An dummy empty changeset. It's possible to pass hash when creating | |
324 | an EmptyChangeset |
|
330 | an EmptyChangeset | |
325 | """ |
|
331 | """ | |
326 |
|
332 | |||
327 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
333 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, | |
328 | alias=None): |
|
334 | alias=None): | |
329 | self._empty_cs = cs |
|
335 | self._empty_cs = cs | |
330 | self.revision = -1 |
|
336 | self.revision = -1 | |
331 | self.message = '' |
|
337 | self.message = '' | |
332 | self.author = '' |
|
338 | self.author = '' | |
333 | self.date = '' |
|
339 | self.date = '' | |
334 | self.repository = repo |
|
340 | self.repository = repo | |
335 | self.requested_revision = requested_revision |
|
341 | self.requested_revision = requested_revision | |
336 | self.alias = alias |
|
342 | self.alias = alias | |
337 |
|
343 | |||
338 | @LazyProperty |
|
344 | @LazyProperty | |
339 | def raw_id(self): |
|
345 | def raw_id(self): | |
340 | """ |
|
346 | """ | |
341 | Returns raw string identifying this changeset, useful for web |
|
347 | Returns raw string identifying this changeset, useful for web | |
342 | representation. |
|
348 | representation. | |
343 | """ |
|
349 | """ | |
344 |
|
350 | |||
345 | return self._empty_cs |
|
351 | return self._empty_cs | |
346 |
|
352 | |||
347 | @LazyProperty |
|
353 | @LazyProperty | |
348 | def branch(self): |
|
354 | def branch(self): | |
349 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
355 | return get_backend(self.alias).DEFAULT_BRANCH_NAME | |
350 |
|
356 | |||
351 | @LazyProperty |
|
357 | @LazyProperty | |
352 | def short_id(self): |
|
358 | def short_id(self): | |
353 | return self.raw_id[:12] |
|
359 | return self.raw_id[:12] | |
354 |
|
360 | |||
355 | def get_file_changeset(self, path): |
|
361 | def get_file_changeset(self, path): | |
356 | return self |
|
362 | return self | |
357 |
|
363 | |||
358 | def get_file_content(self, path): |
|
364 | def get_file_content(self, path): | |
359 | return u'' |
|
365 | return u'' | |
360 |
|
366 | |||
361 | def get_file_size(self, path): |
|
367 | def get_file_size(self, path): | |
362 | return 0 |
|
368 | return 0 | |
363 |
|
369 | |||
364 |
|
370 | |||
365 | def map_groups(groups): |
|
371 | def map_groups(groups): | |
366 | """ |
|
372 | """ | |
367 | Checks for groups existence, and creates groups structures. |
|
373 | Checks for groups existence, and creates groups structures. | |
368 | It returns last group in structure |
|
374 | It returns last group in structure | |
369 |
|
375 | |||
370 | :param groups: list of groups structure |
|
376 | :param groups: list of groups structure | |
371 | """ |
|
377 | """ | |
372 | sa = meta.Session |
|
378 | sa = meta.Session | |
373 |
|
379 | |||
374 | parent = None |
|
380 | parent = None | |
375 | group = None |
|
381 | group = None | |
376 |
|
382 | |||
377 | # last element is repo in nested groups structure |
|
383 | # last element is repo in nested groups structure | |
378 | groups = groups[:-1] |
|
384 | groups = groups[:-1] | |
379 | rgm = ReposGroupModel(sa) |
|
385 | rgm = ReposGroupModel(sa) | |
380 | for lvl, group_name in enumerate(groups): |
|
386 | for lvl, group_name in enumerate(groups): | |
381 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
387 | group_name = '/'.join(groups[:lvl] + [group_name]) | |
382 | group = RepoGroup.get_by_group_name(group_name) |
|
388 | group = RepoGroup.get_by_group_name(group_name) | |
383 | desc = '%s group' % group_name |
|
389 | desc = '%s group' % group_name | |
384 |
|
390 | |||
385 | # # WTF that doesn't work !? |
|
391 | # # WTF that doesn't work !? | |
386 | # if group is None: |
|
392 | # if group is None: | |
387 | # group = rgm.create(group_name, desc, parent, just_db=True) |
|
393 | # group = rgm.create(group_name, desc, parent, just_db=True) | |
388 | # sa.commit() |
|
394 | # sa.commit() | |
389 |
|
395 | |||
390 | if group is None: |
|
396 | if group is None: | |
391 | log.debug('creating group level: %s group_name: %s' % (lvl, group_name)) |
|
397 | log.debug('creating group level: %s group_name: %s' % (lvl, group_name)) | |
392 | group = RepoGroup(group_name, parent) |
|
398 | group = RepoGroup(group_name, parent) | |
393 | group.group_description = desc |
|
399 | group.group_description = desc | |
394 | sa.add(group) |
|
400 | sa.add(group) | |
395 | rgm._create_default_perms(group) |
|
401 | rgm._create_default_perms(group) | |
396 | sa.commit() |
|
402 | sa.commit() | |
397 | parent = group |
|
403 | parent = group | |
398 | return group |
|
404 | return group | |
399 |
|
405 | |||
400 |
|
406 | |||
401 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
407 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |
402 | """ |
|
408 | """ | |
403 | maps all repos given in initial_repo_list, non existing repositories |
|
409 | maps all repos given in initial_repo_list, non existing repositories | |
404 | are created, if remove_obsolete is True it also check for db entries |
|
410 | are created, if remove_obsolete is True it also check for db entries | |
405 | that are not in initial_repo_list and removes them. |
|
411 | that are not in initial_repo_list and removes them. | |
406 |
|
412 | |||
407 | :param initial_repo_list: list of repositories found by scanning methods |
|
413 | :param initial_repo_list: list of repositories found by scanning methods | |
408 | :param remove_obsolete: check for obsolete entries in database |
|
414 | :param remove_obsolete: check for obsolete entries in database | |
409 | """ |
|
415 | """ | |
410 | from rhodecode.model.repo import RepoModel |
|
416 | from rhodecode.model.repo import RepoModel | |
411 | sa = meta.Session |
|
417 | sa = meta.Session | |
412 | rm = RepoModel() |
|
418 | rm = RepoModel() | |
413 | user = sa.query(User).filter(User.admin == True).first() |
|
419 | user = sa.query(User).filter(User.admin == True).first() | |
414 | if user is None: |
|
420 | if user is None: | |
415 | raise Exception('Missing administrative account !') |
|
421 | raise Exception('Missing administrative account !') | |
416 | added = [] |
|
422 | added = [] | |
417 |
|
423 | |||
418 | for name, repo in initial_repo_list.items(): |
|
424 | for name, repo in initial_repo_list.items(): | |
419 | group = map_groups(name.split(Repository.url_sep())) |
|
425 | group = map_groups(name.split(Repository.url_sep())) | |
420 | if not rm.get_by_repo_name(name, cache=False): |
|
426 | if not rm.get_by_repo_name(name, cache=False): | |
421 | log.info('repository %s not found creating default' % name) |
|
427 | log.info('repository %s not found creating default' % name) | |
422 | added.append(name) |
|
428 | added.append(name) | |
423 | form_data = { |
|
429 | form_data = { | |
424 | 'repo_name': name, |
|
430 | 'repo_name': name, | |
425 | 'repo_name_full': name, |
|
431 | 'repo_name_full': name, | |
426 | 'repo_type': repo.alias, |
|
432 | 'repo_type': repo.alias, | |
427 | 'description': repo.description \ |
|
433 | 'description': repo.description \ | |
428 | if repo.description != 'unknown' else '%s repository' % name, |
|
434 | if repo.description != 'unknown' else '%s repository' % name, | |
429 | 'private': False, |
|
435 | 'private': False, | |
430 | 'group_id': getattr(group, 'group_id', None) |
|
436 | 'group_id': getattr(group, 'group_id', None) | |
431 | } |
|
437 | } | |
432 | rm.create(form_data, user, just_db=True) |
|
438 | rm.create(form_data, user, just_db=True) | |
433 | sa.commit() |
|
439 | sa.commit() | |
434 | removed = [] |
|
440 | removed = [] | |
435 | if remove_obsolete: |
|
441 | if remove_obsolete: | |
436 | #remove from database those repositories that are not in the filesystem |
|
442 | #remove from database those repositories that are not in the filesystem | |
437 | for repo in sa.query(Repository).all(): |
|
443 | for repo in sa.query(Repository).all(): | |
438 | if repo.repo_name not in initial_repo_list.keys(): |
|
444 | if repo.repo_name not in initial_repo_list.keys(): | |
439 | removed.append(repo.repo_name) |
|
445 | removed.append(repo.repo_name) | |
440 | sa.delete(repo) |
|
446 | sa.delete(repo) | |
441 | sa.commit() |
|
447 | sa.commit() | |
442 |
|
448 | |||
443 | return added, removed |
|
449 | return added, removed | |
444 |
|
450 | |||
445 |
|
451 | |||
446 | # set cache regions for beaker so celery can utilise it |
|
452 | # set cache regions for beaker so celery can utilise it | |
447 | def add_cache(settings): |
|
453 | def add_cache(settings): | |
448 | cache_settings = {'regions': None} |
|
454 | cache_settings = {'regions': None} | |
449 | for key in settings.keys(): |
|
455 | for key in settings.keys(): | |
450 | for prefix in ['beaker.cache.', 'cache.']: |
|
456 | for prefix in ['beaker.cache.', 'cache.']: | |
451 | if key.startswith(prefix): |
|
457 | if key.startswith(prefix): | |
452 | name = key.split(prefix)[1].strip() |
|
458 | name = key.split(prefix)[1].strip() | |
453 | cache_settings[name] = settings[key].strip() |
|
459 | cache_settings[name] = settings[key].strip() | |
454 | if cache_settings['regions']: |
|
460 | if cache_settings['regions']: | |
455 | for region in cache_settings['regions'].split(','): |
|
461 | for region in cache_settings['regions'].split(','): | |
456 | region = region.strip() |
|
462 | region = region.strip() | |
457 | region_settings = {} |
|
463 | region_settings = {} | |
458 | for key, value in cache_settings.items(): |
|
464 | for key, value in cache_settings.items(): | |
459 | if key.startswith(region): |
|
465 | if key.startswith(region): | |
460 | region_settings[key.split('.')[1]] = value |
|
466 | region_settings[key.split('.')[1]] = value | |
461 | region_settings['expire'] = int(region_settings.get('expire', |
|
467 | region_settings['expire'] = int(region_settings.get('expire', | |
462 | 60)) |
|
468 | 60)) | |
463 | region_settings.setdefault('lock_dir', |
|
469 | region_settings.setdefault('lock_dir', | |
464 | cache_settings.get('lock_dir')) |
|
470 | cache_settings.get('lock_dir')) | |
465 | region_settings.setdefault('data_dir', |
|
471 | region_settings.setdefault('data_dir', | |
466 | cache_settings.get('data_dir')) |
|
472 | cache_settings.get('data_dir')) | |
467 |
|
473 | |||
468 | if 'type' not in region_settings: |
|
474 | if 'type' not in region_settings: | |
469 | region_settings['type'] = cache_settings.get('type', |
|
475 | region_settings['type'] = cache_settings.get('type', | |
470 | 'memory') |
|
476 | 'memory') | |
471 | beaker.cache.cache_regions[region] = region_settings |
|
477 | beaker.cache.cache_regions[region] = region_settings | |
472 |
|
478 | |||
473 |
|
479 | |||
474 | #============================================================================== |
|
480 | #============================================================================== | |
475 | # TEST FUNCTIONS AND CREATORS |
|
481 | # TEST FUNCTIONS AND CREATORS | |
476 | #============================================================================== |
|
482 | #============================================================================== | |
477 | def create_test_index(repo_location, config, full_index): |
|
483 | def create_test_index(repo_location, config, full_index): | |
478 | """ |
|
484 | """ | |
479 | Makes default test index |
|
485 | Makes default test index | |
480 |
|
486 | |||
481 | :param config: test config |
|
487 | :param config: test config | |
482 | :param full_index: |
|
488 | :param full_index: | |
483 | """ |
|
489 | """ | |
484 |
|
490 | |||
485 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
491 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
486 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
492 | from rhodecode.lib.pidlock import DaemonLock, LockHeld | |
487 |
|
493 | |||
488 | repo_location = repo_location |
|
494 | repo_location = repo_location | |
489 |
|
495 | |||
490 | index_location = os.path.join(config['app_conf']['index_dir']) |
|
496 | index_location = os.path.join(config['app_conf']['index_dir']) | |
491 | if not os.path.exists(index_location): |
|
497 | if not os.path.exists(index_location): | |
492 | os.makedirs(index_location) |
|
498 | os.makedirs(index_location) | |
493 |
|
499 | |||
494 | try: |
|
500 | try: | |
495 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) |
|
501 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) | |
496 | WhooshIndexingDaemon(index_location=index_location, |
|
502 | WhooshIndexingDaemon(index_location=index_location, | |
497 | repo_location=repo_location)\ |
|
503 | repo_location=repo_location)\ | |
498 | .run(full_index=full_index) |
|
504 | .run(full_index=full_index) | |
499 | l.release() |
|
505 | l.release() | |
500 | except LockHeld: |
|
506 | except LockHeld: | |
501 | pass |
|
507 | pass | |
502 |
|
508 | |||
503 |
|
509 | |||
504 | def create_test_env(repos_test_path, config): |
|
510 | def create_test_env(repos_test_path, config): | |
505 | """ |
|
511 | """ | |
506 | Makes a fresh database and |
|
512 | Makes a fresh database and | |
507 | install test repository into tmp dir |
|
513 | install test repository into tmp dir | |
508 | """ |
|
514 | """ | |
509 | from rhodecode.lib.db_manage import DbManage |
|
515 | from rhodecode.lib.db_manage import DbManage | |
510 | from rhodecode.tests import HG_REPO, TESTS_TMP_PATH |
|
516 | from rhodecode.tests import HG_REPO, TESTS_TMP_PATH | |
511 |
|
517 | |||
512 | # PART ONE create db |
|
518 | # PART ONE create db | |
513 | dbconf = config['sqlalchemy.db1.url'] |
|
519 | dbconf = config['sqlalchemy.db1.url'] | |
514 | log.debug('making test db %s' % dbconf) |
|
520 | log.debug('making test db %s' % dbconf) | |
515 |
|
521 | |||
516 | # create test dir if it doesn't exist |
|
522 | # create test dir if it doesn't exist | |
517 | if not os.path.isdir(repos_test_path): |
|
523 | if not os.path.isdir(repos_test_path): | |
518 | log.debug('Creating testdir %s' % repos_test_path) |
|
524 | log.debug('Creating testdir %s' % repos_test_path) | |
519 | os.makedirs(repos_test_path) |
|
525 | os.makedirs(repos_test_path) | |
520 |
|
526 | |||
521 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
527 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], | |
522 | tests=True) |
|
528 | tests=True) | |
523 | dbmanage.create_tables(override=True) |
|
529 | dbmanage.create_tables(override=True) | |
524 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
530 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) | |
525 | dbmanage.create_default_user() |
|
531 | dbmanage.create_default_user() | |
526 | dbmanage.admin_prompt() |
|
532 | dbmanage.admin_prompt() | |
527 | dbmanage.create_permissions() |
|
533 | dbmanage.create_permissions() | |
528 | dbmanage.populate_default_permissions() |
|
534 | dbmanage.populate_default_permissions() | |
529 | Session.commit() |
|
535 | Session.commit() | |
530 | # PART TWO make test repo |
|
536 | # PART TWO make test repo | |
531 | log.debug('making test vcs repositories') |
|
537 | log.debug('making test vcs repositories') | |
532 |
|
538 | |||
533 | idx_path = config['app_conf']['index_dir'] |
|
539 | idx_path = config['app_conf']['index_dir'] | |
534 | data_path = config['app_conf']['cache_dir'] |
|
540 | data_path = config['app_conf']['cache_dir'] | |
535 |
|
541 | |||
536 | #clean index and data |
|
542 | #clean index and data | |
537 | if idx_path and os.path.exists(idx_path): |
|
543 | if idx_path and os.path.exists(idx_path): | |
538 | log.debug('remove %s' % idx_path) |
|
544 | log.debug('remove %s' % idx_path) | |
539 | shutil.rmtree(idx_path) |
|
545 | shutil.rmtree(idx_path) | |
540 |
|
546 | |||
541 | if data_path and os.path.exists(data_path): |
|
547 | if data_path and os.path.exists(data_path): | |
542 | log.debug('remove %s' % data_path) |
|
548 | log.debug('remove %s' % data_path) | |
543 | shutil.rmtree(data_path) |
|
549 | shutil.rmtree(data_path) | |
544 |
|
550 | |||
545 | #CREATE DEFAULT HG REPOSITORY |
|
551 | #CREATE DEFAULT HG REPOSITORY | |
546 | cur_dir = dn(dn(abspath(__file__))) |
|
552 | cur_dir = dn(dn(abspath(__file__))) | |
547 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
|
553 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) | |
548 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
554 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) | |
549 | tar.close() |
|
555 | tar.close() | |
550 |
|
556 | |||
551 |
|
557 | |||
552 | #============================================================================== |
|
558 | #============================================================================== | |
553 | # PASTER COMMANDS |
|
559 | # PASTER COMMANDS | |
554 | #============================================================================== |
|
560 | #============================================================================== | |
555 | class BasePasterCommand(Command): |
|
561 | class BasePasterCommand(Command): | |
556 | """ |
|
562 | """ | |
557 | Abstract Base Class for paster commands. |
|
563 | Abstract Base Class for paster commands. | |
558 |
|
564 | |||
559 | The celery commands are somewhat aggressive about loading |
|
565 | The celery commands are somewhat aggressive about loading | |
560 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
566 | celery.conf, and since our module sets the `CELERY_LOADER` | |
561 | environment variable to our loader, we have to bootstrap a bit and |
|
567 | environment variable to our loader, we have to bootstrap a bit and | |
562 | make sure we've had a chance to load the pylons config off of the |
|
568 | make sure we've had a chance to load the pylons config off of the | |
563 | command line, otherwise everything fails. |
|
569 | command line, otherwise everything fails. | |
564 | """ |
|
570 | """ | |
565 | min_args = 1 |
|
571 | min_args = 1 | |
566 | min_args_error = "Please provide a paster config file as an argument." |
|
572 | min_args_error = "Please provide a paster config file as an argument." | |
567 | takes_config_file = 1 |
|
573 | takes_config_file = 1 | |
568 | requires_config_file = True |
|
574 | requires_config_file = True | |
569 |
|
575 | |||
570 | def notify_msg(self, msg, log=False): |
|
576 | def notify_msg(self, msg, log=False): | |
571 | """Make a notification to user, additionally if logger is passed |
|
577 | """Make a notification to user, additionally if logger is passed | |
572 | it logs this action using given logger |
|
578 | it logs this action using given logger | |
573 |
|
579 | |||
574 | :param msg: message that will be printed to user |
|
580 | :param msg: message that will be printed to user | |
575 | :param log: logging instance, to use to additionally log this message |
|
581 | :param log: logging instance, to use to additionally log this message | |
576 |
|
582 | |||
577 | """ |
|
583 | """ | |
578 | if log and isinstance(log, logging): |
|
584 | if log and isinstance(log, logging): | |
579 | log(msg) |
|
585 | log(msg) | |
580 |
|
586 | |||
581 | def run(self, args): |
|
587 | def run(self, args): | |
582 | """ |
|
588 | """ | |
583 | Overrides Command.run |
|
589 | Overrides Command.run | |
584 |
|
590 | |||
585 | Checks for a config file argument and loads it. |
|
591 | Checks for a config file argument and loads it. | |
586 | """ |
|
592 | """ | |
587 | if len(args) < self.min_args: |
|
593 | if len(args) < self.min_args: | |
588 | raise BadCommand( |
|
594 | raise BadCommand( | |
589 | self.min_args_error % {'min_args': self.min_args, |
|
595 | self.min_args_error % {'min_args': self.min_args, | |
590 | 'actual_args': len(args)}) |
|
596 | 'actual_args': len(args)}) | |
591 |
|
597 | |||
592 | # Decrement because we're going to lob off the first argument. |
|
598 | # Decrement because we're going to lob off the first argument. | |
593 | # @@ This is hacky |
|
599 | # @@ This is hacky | |
594 | self.min_args -= 1 |
|
600 | self.min_args -= 1 | |
595 | self.bootstrap_config(args[0]) |
|
601 | self.bootstrap_config(args[0]) | |
596 | self.update_parser() |
|
602 | self.update_parser() | |
597 | return super(BasePasterCommand, self).run(args[1:]) |
|
603 | return super(BasePasterCommand, self).run(args[1:]) | |
598 |
|
604 | |||
599 | def update_parser(self): |
|
605 | def update_parser(self): | |
600 | """ |
|
606 | """ | |
601 | Abstract method. Allows for the class's parser to be updated |
|
607 | Abstract method. Allows for the class's parser to be updated | |
602 | before the superclass's `run` method is called. Necessary to |
|
608 | before the superclass's `run` method is called. Necessary to | |
603 | allow options/arguments to be passed through to the underlying |
|
609 | allow options/arguments to be passed through to the underlying | |
604 | celery command. |
|
610 | celery command. | |
605 | """ |
|
611 | """ | |
606 | raise NotImplementedError("Abstract Method.") |
|
612 | raise NotImplementedError("Abstract Method.") | |
607 |
|
613 | |||
608 | def bootstrap_config(self, conf): |
|
614 | def bootstrap_config(self, conf): | |
609 | """ |
|
615 | """ | |
610 | Loads the pylons configuration. |
|
616 | Loads the pylons configuration. | |
611 | """ |
|
617 | """ | |
612 | from pylons import config as pylonsconfig |
|
618 | from pylons import config as pylonsconfig | |
613 |
|
619 | |||
614 | path_to_ini_file = os.path.realpath(conf) |
|
620 | path_to_ini_file = os.path.realpath(conf) | |
615 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
621 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) | |
616 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
622 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
@@ -1,1203 +1,1216 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.model.db |
|
3 | rhodecode.model.db | |
4 | ~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Database Models for RhodeCode |
|
6 | Database Models for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 08, 2010 |
|
8 | :created_on: Apr 08, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import datetime |
|
28 | import datetime | |
29 | import traceback |
|
29 | import traceback | |
30 | from collections import defaultdict |
|
30 | from collections import defaultdict | |
31 |
|
31 | |||
32 | from sqlalchemy import * |
|
32 | from sqlalchemy import * | |
33 | from sqlalchemy.ext.hybrid import hybrid_property |
|
33 | from sqlalchemy.ext.hybrid import hybrid_property | |
34 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
34 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates | |
35 | from beaker.cache import cache_region, region_invalidate |
|
35 | from beaker.cache import cache_region, region_invalidate | |
36 |
|
36 | |||
37 | from rhodecode.lib.vcs import get_backend |
|
37 | from rhodecode.lib.vcs import get_backend | |
38 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
38 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
39 | from rhodecode.lib.vcs.exceptions import VCSError |
|
39 | from rhodecode.lib.vcs.exceptions import VCSError | |
40 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
40 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
41 |
|
41 | |||
42 | from rhodecode.lib import str2bool, safe_str, get_changeset_safe, safe_unicode |
|
42 | from rhodecode.lib import str2bool, safe_str, get_changeset_safe, safe_unicode | |
43 | from rhodecode.lib.compat import json |
|
43 | from rhodecode.lib.compat import json | |
44 | from rhodecode.lib.caching_query import FromCache |
|
44 | from rhodecode.lib.caching_query import FromCache | |
45 |
|
45 | |||
46 | from rhodecode.model.meta import Base, Session |
|
46 | from rhodecode.model.meta import Base, Session | |
|
47 | import hashlib | |||
47 |
|
48 | |||
48 |
|
49 | |||
49 | log = logging.getLogger(__name__) |
|
50 | log = logging.getLogger(__name__) | |
50 |
|
51 | |||
51 | #============================================================================== |
|
52 | #============================================================================== | |
52 | # BASE CLASSES |
|
53 | # BASE CLASSES | |
53 | #============================================================================== |
|
54 | #============================================================================== | |
54 |
|
55 | |||
|
56 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() | |||
|
57 | ||||
55 |
|
58 | |||
56 | class ModelSerializer(json.JSONEncoder): |
|
59 | class ModelSerializer(json.JSONEncoder): | |
57 | """ |
|
60 | """ | |
58 | Simple Serializer for JSON, |
|
61 | Simple Serializer for JSON, | |
59 |
|
62 | |||
60 | usage:: |
|
63 | usage:: | |
61 |
|
64 | |||
62 | to make object customized for serialization implement a __json__ |
|
65 | to make object customized for serialization implement a __json__ | |
63 | method that will return a dict for serialization into json |
|
66 | method that will return a dict for serialization into json | |
64 |
|
67 | |||
65 | example:: |
|
68 | example:: | |
66 |
|
69 | |||
67 | class Task(object): |
|
70 | class Task(object): | |
68 |
|
71 | |||
69 | def __init__(self, name, value): |
|
72 | def __init__(self, name, value): | |
70 | self.name = name |
|
73 | self.name = name | |
71 | self.value = value |
|
74 | self.value = value | |
72 |
|
75 | |||
73 | def __json__(self): |
|
76 | def __json__(self): | |
74 | return dict(name=self.name, |
|
77 | return dict(name=self.name, | |
75 | value=self.value) |
|
78 | value=self.value) | |
76 |
|
79 | |||
77 | """ |
|
80 | """ | |
78 |
|
81 | |||
79 | def default(self, obj): |
|
82 | def default(self, obj): | |
80 |
|
83 | |||
81 | if hasattr(obj, '__json__'): |
|
84 | if hasattr(obj, '__json__'): | |
82 | return obj.__json__() |
|
85 | return obj.__json__() | |
83 | else: |
|
86 | else: | |
84 | return json.JSONEncoder.default(self, obj) |
|
87 | return json.JSONEncoder.default(self, obj) | |
85 |
|
88 | |||
86 |
|
89 | |||
87 | class BaseModel(object): |
|
90 | class BaseModel(object): | |
88 | """ |
|
91 | """ | |
89 | Base Model for all classess |
|
92 | Base Model for all classess | |
90 | """ |
|
93 | """ | |
91 |
|
94 | |||
92 | @classmethod |
|
95 | @classmethod | |
93 | def _get_keys(cls): |
|
96 | def _get_keys(cls): | |
94 | """return column names for this model """ |
|
97 | """return column names for this model """ | |
95 | return class_mapper(cls).c.keys() |
|
98 | return class_mapper(cls).c.keys() | |
96 |
|
99 | |||
97 | def get_dict(self): |
|
100 | def get_dict(self): | |
98 | """ |
|
101 | """ | |
99 | return dict with keys and values corresponding |
|
102 | return dict with keys and values corresponding | |
100 | to this model data """ |
|
103 | to this model data """ | |
101 |
|
104 | |||
102 | d = {} |
|
105 | d = {} | |
103 | for k in self._get_keys(): |
|
106 | for k in self._get_keys(): | |
104 | d[k] = getattr(self, k) |
|
107 | d[k] = getattr(self, k) | |
105 |
|
108 | |||
106 | # also use __json__() if present to get additional fields |
|
109 | # also use __json__() if present to get additional fields | |
107 | for k, val in getattr(self, '__json__', lambda: {})().iteritems(): |
|
110 | for k, val in getattr(self, '__json__', lambda: {})().iteritems(): | |
108 | d[k] = val |
|
111 | d[k] = val | |
109 | return d |
|
112 | return d | |
110 |
|
113 | |||
111 | def get_appstruct(self): |
|
114 | def get_appstruct(self): | |
112 | """return list with keys and values tupples corresponding |
|
115 | """return list with keys and values tupples corresponding | |
113 | to this model data """ |
|
116 | to this model data """ | |
114 |
|
117 | |||
115 | l = [] |
|
118 | l = [] | |
116 | for k in self._get_keys(): |
|
119 | for k in self._get_keys(): | |
117 | l.append((k, getattr(self, k),)) |
|
120 | l.append((k, getattr(self, k),)) | |
118 | return l |
|
121 | return l | |
119 |
|
122 | |||
120 | def populate_obj(self, populate_dict): |
|
123 | def populate_obj(self, populate_dict): | |
121 | """populate model with data from given populate_dict""" |
|
124 | """populate model with data from given populate_dict""" | |
122 |
|
125 | |||
123 | for k in self._get_keys(): |
|
126 | for k in self._get_keys(): | |
124 | if k in populate_dict: |
|
127 | if k in populate_dict: | |
125 | setattr(self, k, populate_dict[k]) |
|
128 | setattr(self, k, populate_dict[k]) | |
126 |
|
129 | |||
127 | @classmethod |
|
130 | @classmethod | |
128 | def query(cls): |
|
131 | def query(cls): | |
129 | return Session.query(cls) |
|
132 | return Session.query(cls) | |
130 |
|
133 | |||
131 | @classmethod |
|
134 | @classmethod | |
132 | def get(cls, id_): |
|
135 | def get(cls, id_): | |
133 | if id_: |
|
136 | if id_: | |
134 | return cls.query().get(id_) |
|
137 | return cls.query().get(id_) | |
135 |
|
138 | |||
136 | @classmethod |
|
139 | @classmethod | |
137 | def getAll(cls): |
|
140 | def getAll(cls): | |
138 | return cls.query().all() |
|
141 | return cls.query().all() | |
139 |
|
142 | |||
140 | @classmethod |
|
143 | @classmethod | |
141 | def delete(cls, id_): |
|
144 | def delete(cls, id_): | |
142 | obj = cls.query().get(id_) |
|
145 | obj = cls.query().get(id_) | |
143 | Session.delete(obj) |
|
146 | Session.delete(obj) | |
144 |
|
147 | |||
145 |
|
148 | |||
146 | class RhodeCodeSetting(Base, BaseModel): |
|
149 | class RhodeCodeSetting(Base, BaseModel): | |
147 | __tablename__ = 'rhodecode_settings' |
|
150 | __tablename__ = 'rhodecode_settings' | |
148 | __table_args__ = ( |
|
151 | __table_args__ = ( | |
149 | UniqueConstraint('app_settings_name'), |
|
152 | UniqueConstraint('app_settings_name'), | |
150 | {'extend_existing': True} |
|
153 | {'extend_existing': True} | |
151 | ) |
|
154 | ) | |
152 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
153 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
156 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
154 | _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
157 | _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
155 |
|
158 | |||
156 | def __init__(self, k='', v=''): |
|
159 | def __init__(self, k='', v=''): | |
157 | self.app_settings_name = k |
|
160 | self.app_settings_name = k | |
158 | self.app_settings_value = v |
|
161 | self.app_settings_value = v | |
159 |
|
162 | |||
160 | @validates('_app_settings_value') |
|
163 | @validates('_app_settings_value') | |
161 | def validate_settings_value(self, key, val): |
|
164 | def validate_settings_value(self, key, val): | |
162 | assert type(val) == unicode |
|
165 | assert type(val) == unicode | |
163 | return val |
|
166 | return val | |
164 |
|
167 | |||
165 | @hybrid_property |
|
168 | @hybrid_property | |
166 | def app_settings_value(self): |
|
169 | def app_settings_value(self): | |
167 | v = self._app_settings_value |
|
170 | v = self._app_settings_value | |
168 | if self.app_settings_name == 'ldap_active': |
|
171 | if self.app_settings_name == 'ldap_active': | |
169 | v = str2bool(v) |
|
172 | v = str2bool(v) | |
170 | return v |
|
173 | return v | |
171 |
|
174 | |||
172 | @app_settings_value.setter |
|
175 | @app_settings_value.setter | |
173 | def app_settings_value(self, val): |
|
176 | def app_settings_value(self, val): | |
174 | """ |
|
177 | """ | |
175 | Setter that will always make sure we use unicode in app_settings_value |
|
178 | Setter that will always make sure we use unicode in app_settings_value | |
176 |
|
179 | |||
177 | :param val: |
|
180 | :param val: | |
178 | """ |
|
181 | """ | |
179 | self._app_settings_value = safe_unicode(val) |
|
182 | self._app_settings_value = safe_unicode(val) | |
180 |
|
183 | |||
181 | def __repr__(self): |
|
184 | def __repr__(self): | |
182 | return "<%s('%s:%s')>" % ( |
|
185 | return "<%s('%s:%s')>" % ( | |
183 | self.__class__.__name__, |
|
186 | self.__class__.__name__, | |
184 | self.app_settings_name, self.app_settings_value |
|
187 | self.app_settings_name, self.app_settings_value | |
185 | ) |
|
188 | ) | |
186 |
|
189 | |||
187 | @classmethod |
|
190 | @classmethod | |
188 | def get_by_name(cls, ldap_key): |
|
191 | def get_by_name(cls, ldap_key): | |
189 | return cls.query()\ |
|
192 | return cls.query()\ | |
190 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
193 | .filter(cls.app_settings_name == ldap_key).scalar() | |
191 |
|
194 | |||
192 | @classmethod |
|
195 | @classmethod | |
193 | def get_app_settings(cls, cache=False): |
|
196 | def get_app_settings(cls, cache=False): | |
194 |
|
197 | |||
195 | ret = cls.query() |
|
198 | ret = cls.query() | |
196 |
|
199 | |||
197 | if cache: |
|
200 | if cache: | |
198 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
201 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) | |
199 |
|
202 | |||
200 | if not ret: |
|
203 | if not ret: | |
201 | raise Exception('Could not get application settings !') |
|
204 | raise Exception('Could not get application settings !') | |
202 | settings = {} |
|
205 | settings = {} | |
203 | for each in ret: |
|
206 | for each in ret: | |
204 | settings['rhodecode_' + each.app_settings_name] = \ |
|
207 | settings['rhodecode_' + each.app_settings_name] = \ | |
205 | each.app_settings_value |
|
208 | each.app_settings_value | |
206 |
|
209 | |||
207 | return settings |
|
210 | return settings | |
208 |
|
211 | |||
209 | @classmethod |
|
212 | @classmethod | |
210 | def get_ldap_settings(cls, cache=False): |
|
213 | def get_ldap_settings(cls, cache=False): | |
211 | ret = cls.query()\ |
|
214 | ret = cls.query()\ | |
212 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
215 | .filter(cls.app_settings_name.startswith('ldap_')).all() | |
213 | fd = {} |
|
216 | fd = {} | |
214 | for row in ret: |
|
217 | for row in ret: | |
215 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
218 | fd.update({row.app_settings_name:row.app_settings_value}) | |
216 |
|
219 | |||
217 | return fd |
|
220 | return fd | |
218 |
|
221 | |||
219 |
|
222 | |||
220 | class RhodeCodeUi(Base, BaseModel): |
|
223 | class RhodeCodeUi(Base, BaseModel): | |
221 | __tablename__ = 'rhodecode_ui' |
|
224 | __tablename__ = 'rhodecode_ui' | |
222 | __table_args__ = ( |
|
225 | __table_args__ = ( | |
223 | UniqueConstraint('ui_key'), |
|
226 | UniqueConstraint('ui_key'), | |
224 | {'extend_existing': True} |
|
227 | {'extend_existing': True} | |
225 | ) |
|
228 | ) | |
226 |
|
229 | |||
227 | HOOK_UPDATE = 'changegroup.update' |
|
230 | HOOK_UPDATE = 'changegroup.update' | |
228 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
231 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |
229 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
232 | HOOK_PUSH = 'pretxnchangegroup.push_logger' | |
230 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
233 | HOOK_PULL = 'preoutgoing.pull_logger' | |
231 |
|
234 | |||
232 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
235 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
233 | ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
236 | ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
234 | ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
237 | ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
235 | ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
238 | ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
236 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
239 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) | |
237 |
|
240 | |||
238 | @classmethod |
|
241 | @classmethod | |
239 | def get_by_key(cls, key): |
|
242 | def get_by_key(cls, key): | |
240 | return cls.query().filter(cls.ui_key == key) |
|
243 | return cls.query().filter(cls.ui_key == key) | |
241 |
|
244 | |||
242 | @classmethod |
|
245 | @classmethod | |
243 | def get_builtin_hooks(cls): |
|
246 | def get_builtin_hooks(cls): | |
244 | q = cls.query() |
|
247 | q = cls.query() | |
245 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, |
|
248 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, | |
246 | cls.HOOK_REPO_SIZE, |
|
249 | cls.HOOK_REPO_SIZE, | |
247 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
250 | cls.HOOK_PUSH, cls.HOOK_PULL])) | |
248 | return q.all() |
|
251 | return q.all() | |
249 |
|
252 | |||
250 | @classmethod |
|
253 | @classmethod | |
251 | def get_custom_hooks(cls): |
|
254 | def get_custom_hooks(cls): | |
252 | q = cls.query() |
|
255 | q = cls.query() | |
253 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, |
|
256 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, | |
254 | cls.HOOK_REPO_SIZE, |
|
257 | cls.HOOK_REPO_SIZE, | |
255 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
258 | cls.HOOK_PUSH, cls.HOOK_PULL])) | |
256 | q = q.filter(cls.ui_section == 'hooks') |
|
259 | q = q.filter(cls.ui_section == 'hooks') | |
257 | return q.all() |
|
260 | return q.all() | |
258 |
|
261 | |||
259 | @classmethod |
|
262 | @classmethod | |
260 | def create_or_update_hook(cls, key, val): |
|
263 | def create_or_update_hook(cls, key, val): | |
261 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
264 | new_ui = cls.get_by_key(key).scalar() or cls() | |
262 | new_ui.ui_section = 'hooks' |
|
265 | new_ui.ui_section = 'hooks' | |
263 | new_ui.ui_active = True |
|
266 | new_ui.ui_active = True | |
264 | new_ui.ui_key = key |
|
267 | new_ui.ui_key = key | |
265 | new_ui.ui_value = val |
|
268 | new_ui.ui_value = val | |
266 |
|
269 | |||
267 | Session.add(new_ui) |
|
270 | Session.add(new_ui) | |
268 |
|
271 | |||
269 |
|
272 | |||
270 | class User(Base, BaseModel): |
|
273 | class User(Base, BaseModel): | |
271 | __tablename__ = 'users' |
|
274 | __tablename__ = 'users' | |
272 | __table_args__ = ( |
|
275 | __table_args__ = ( | |
273 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
276 | UniqueConstraint('username'), UniqueConstraint('email'), | |
274 | {'extend_existing': True} |
|
277 | {'extend_existing': True} | |
275 | ) |
|
278 | ) | |
276 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
279 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
277 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
280 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
278 | password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
281 | password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
279 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
282 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) | |
280 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
283 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |
281 | name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
284 | name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
282 | lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
285 | lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
283 | _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
286 | _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
284 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
287 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
285 | ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
288 | ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
286 | api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
289 | api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
287 |
|
290 | |||
288 | user_log = relationship('UserLog', cascade='all') |
|
291 | user_log = relationship('UserLog', cascade='all') | |
289 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
292 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |
290 |
|
293 | |||
291 | repositories = relationship('Repository') |
|
294 | repositories = relationship('Repository') | |
292 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
295 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |
293 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
296 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |
294 |
|
297 | |||
295 | group_member = relationship('UsersGroupMember', cascade='all') |
|
298 | group_member = relationship('UsersGroupMember', cascade='all') | |
296 |
|
299 | |||
297 | notifications = relationship('UserNotification',) |
|
300 | notifications = relationship('UserNotification',) | |
298 |
|
301 | |||
299 | @hybrid_property |
|
302 | @hybrid_property | |
300 | def email(self): |
|
303 | def email(self): | |
301 | return self._email |
|
304 | return self._email | |
302 |
|
305 | |||
303 | @email.setter |
|
306 | @email.setter | |
304 | def email(self, val): |
|
307 | def email(self, val): | |
305 | self._email = val.lower() if val else None |
|
308 | self._email = val.lower() if val else None | |
306 |
|
309 | |||
307 | @property |
|
310 | @property | |
308 | def full_name(self): |
|
311 | def full_name(self): | |
309 | return '%s %s' % (self.name, self.lastname) |
|
312 | return '%s %s' % (self.name, self.lastname) | |
310 |
|
313 | |||
311 | @property |
|
314 | @property | |
312 | def full_name_or_username(self): |
|
315 | def full_name_or_username(self): | |
313 | return ('%s %s' % (self.name, self.lastname) |
|
316 | return ('%s %s' % (self.name, self.lastname) | |
314 | if (self.name and self.lastname) else self.username) |
|
317 | if (self.name and self.lastname) else self.username) | |
315 |
|
318 | |||
316 | @property |
|
319 | @property | |
317 | def full_contact(self): |
|
320 | def full_contact(self): | |
318 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
321 | return '%s %s <%s>' % (self.name, self.lastname, self.email) | |
319 |
|
322 | |||
320 | @property |
|
323 | @property | |
321 | def short_contact(self): |
|
324 | def short_contact(self): | |
322 | return '%s %s' % (self.name, self.lastname) |
|
325 | return '%s %s' % (self.name, self.lastname) | |
323 |
|
326 | |||
324 | @property |
|
327 | @property | |
325 | def is_admin(self): |
|
328 | def is_admin(self): | |
326 | return self.admin |
|
329 | return self.admin | |
327 |
|
330 | |||
328 | def __repr__(self): |
|
331 | def __repr__(self): | |
329 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
332 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, | |
330 | self.user_id, self.username) |
|
333 | self.user_id, self.username) | |
331 |
|
334 | |||
332 | @classmethod |
|
335 | @classmethod | |
333 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
336 | def get_by_username(cls, username, case_insensitive=False, cache=False): | |
334 | if case_insensitive: |
|
337 | if case_insensitive: | |
335 | q = cls.query().filter(cls.username.ilike(username)) |
|
338 | q = cls.query().filter(cls.username.ilike(username)) | |
336 | else: |
|
339 | else: | |
337 | q = cls.query().filter(cls.username == username) |
|
340 | q = cls.query().filter(cls.username == username) | |
338 |
|
341 | |||
339 | if cache: |
|
342 | if cache: | |
340 |
q = q.options(FromCache( |
|
343 | q = q.options(FromCache( | |
341 | "get_user_%s" % username)) |
|
344 | "sql_cache_short", | |
|
345 | "get_user_%s" % _hash_key(username) | |||
|
346 | ) | |||
|
347 | ) | |||
342 | return q.scalar() |
|
348 | return q.scalar() | |
343 |
|
349 | |||
344 | @classmethod |
|
350 | @classmethod | |
345 | def get_by_api_key(cls, api_key, cache=False): |
|
351 | def get_by_api_key(cls, api_key, cache=False): | |
346 | q = cls.query().filter(cls.api_key == api_key) |
|
352 | q = cls.query().filter(cls.api_key == api_key) | |
347 |
|
353 | |||
348 | if cache: |
|
354 | if cache: | |
349 | q = q.options(FromCache("sql_cache_short", |
|
355 | q = q.options(FromCache("sql_cache_short", | |
350 | "get_api_key_%s" % api_key)) |
|
356 | "get_api_key_%s" % api_key)) | |
351 | return q.scalar() |
|
357 | return q.scalar() | |
352 |
|
358 | |||
353 | @classmethod |
|
359 | @classmethod | |
354 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
360 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |
355 | if case_insensitive: |
|
361 | if case_insensitive: | |
356 | q = cls.query().filter(cls.email.ilike(email)) |
|
362 | q = cls.query().filter(cls.email.ilike(email)) | |
357 | else: |
|
363 | else: | |
358 | q = cls.query().filter(cls.email == email) |
|
364 | q = cls.query().filter(cls.email == email) | |
359 |
|
365 | |||
360 | if cache: |
|
366 | if cache: | |
361 | q = q.options(FromCache("sql_cache_short", |
|
367 | q = q.options(FromCache("sql_cache_short", | |
362 | "get_api_key_%s" % email)) |
|
368 | "get_api_key_%s" % email)) | |
363 | return q.scalar() |
|
369 | return q.scalar() | |
364 |
|
370 | |||
365 | def update_lastlogin(self): |
|
371 | def update_lastlogin(self): | |
366 | """Update user lastlogin""" |
|
372 | """Update user lastlogin""" | |
367 | self.last_login = datetime.datetime.now() |
|
373 | self.last_login = datetime.datetime.now() | |
368 | Session.add(self) |
|
374 | Session.add(self) | |
369 | log.debug('updated user %s lastlogin' % self.username) |
|
375 | log.debug('updated user %s lastlogin' % self.username) | |
370 |
|
376 | |||
371 | def __json__(self): |
|
377 | def __json__(self): | |
372 | return dict( |
|
378 | return dict( | |
373 | email=self.email, |
|
379 | email=self.email, | |
374 | full_name=self.full_name, |
|
380 | full_name=self.full_name, | |
375 | full_name_or_username=self.full_name_or_username, |
|
381 | full_name_or_username=self.full_name_or_username, | |
376 | short_contact=self.short_contact, |
|
382 | short_contact=self.short_contact, | |
377 | full_contact=self.full_contact |
|
383 | full_contact=self.full_contact | |
378 | ) |
|
384 | ) | |
379 |
|
385 | |||
380 |
|
386 | |||
381 | class UserLog(Base, BaseModel): |
|
387 | class UserLog(Base, BaseModel): | |
382 | __tablename__ = 'user_logs' |
|
388 | __tablename__ = 'user_logs' | |
383 | __table_args__ = {'extend_existing': True} |
|
389 | __table_args__ = {'extend_existing': True} | |
384 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
390 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
385 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
391 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
386 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
392 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |
387 | repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
393 | repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
388 | user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
394 | user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
389 | action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
395 | action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
390 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
396 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
391 |
|
397 | |||
392 | @property |
|
398 | @property | |
393 | def action_as_day(self): |
|
399 | def action_as_day(self): | |
394 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
400 | return datetime.date(*self.action_date.timetuple()[:3]) | |
395 |
|
401 | |||
396 | user = relationship('User') |
|
402 | user = relationship('User') | |
397 | repository = relationship('Repository',cascade='') |
|
403 | repository = relationship('Repository', cascade='') | |
398 |
|
404 | |||
399 |
|
405 | |||
400 | class UsersGroup(Base, BaseModel): |
|
406 | class UsersGroup(Base, BaseModel): | |
401 | __tablename__ = 'users_groups' |
|
407 | __tablename__ = 'users_groups' | |
402 | __table_args__ = {'extend_existing': True} |
|
408 | __table_args__ = {'extend_existing': True} | |
403 |
|
409 | |||
404 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
410 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
405 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
411 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
406 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
412 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |
407 |
|
413 | |||
408 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
414 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |
|
415 | users_group_to_perm = relationship('UsersGroupToPerm', cascade='all') | |||
409 |
|
416 | |||
410 | def __repr__(self): |
|
417 | def __repr__(self): | |
411 | return '<userGroup(%s)>' % (self.users_group_name) |
|
418 | return '<userGroup(%s)>' % (self.users_group_name) | |
412 |
|
419 | |||
413 | @classmethod |
|
420 | @classmethod | |
414 | def get_by_group_name(cls, group_name, cache=False, |
|
421 | def get_by_group_name(cls, group_name, cache=False, | |
415 | case_insensitive=False): |
|
422 | case_insensitive=False): | |
416 | if case_insensitive: |
|
423 | if case_insensitive: | |
417 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
424 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) | |
418 | else: |
|
425 | else: | |
419 | q = cls.query().filter(cls.users_group_name == group_name) |
|
426 | q = cls.query().filter(cls.users_group_name == group_name) | |
420 | if cache: |
|
427 | if cache: | |
421 |
q = q.options(FromCache( |
|
428 | q = q.options(FromCache( | |
422 | "get_user_%s" % group_name)) |
|
429 | "sql_cache_short", | |
|
430 | "get_user_%s" % _hash_key(group_name) | |||
|
431 | ) | |||
|
432 | ) | |||
423 | return q.scalar() |
|
433 | return q.scalar() | |
424 |
|
434 | |||
425 | @classmethod |
|
435 | @classmethod | |
426 | def get(cls, users_group_id, cache=False): |
|
436 | def get(cls, users_group_id, cache=False): | |
427 | users_group = cls.query() |
|
437 | users_group = cls.query() | |
428 | if cache: |
|
438 | if cache: | |
429 | users_group = users_group.options(FromCache("sql_cache_short", |
|
439 | users_group = users_group.options(FromCache("sql_cache_short", | |
430 | "get_users_group_%s" % users_group_id)) |
|
440 | "get_users_group_%s" % users_group_id)) | |
431 | return users_group.get(users_group_id) |
|
441 | return users_group.get(users_group_id) | |
432 |
|
442 | |||
433 |
|
443 | |||
434 | class UsersGroupMember(Base, BaseModel): |
|
444 | class UsersGroupMember(Base, BaseModel): | |
435 | __tablename__ = 'users_groups_members' |
|
445 | __tablename__ = 'users_groups_members' | |
436 | __table_args__ = {'extend_existing': True} |
|
446 | __table_args__ = {'extend_existing': True} | |
437 |
|
447 | |||
438 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
448 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
439 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
449 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
440 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
450 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
441 |
|
451 | |||
442 | user = relationship('User', lazy='joined') |
|
452 | user = relationship('User', lazy='joined') | |
443 | users_group = relationship('UsersGroup') |
|
453 | users_group = relationship('UsersGroup') | |
444 |
|
454 | |||
445 | def __init__(self, gr_id='', u_id=''): |
|
455 | def __init__(self, gr_id='', u_id=''): | |
446 | self.users_group_id = gr_id |
|
456 | self.users_group_id = gr_id | |
447 | self.user_id = u_id |
|
457 | self.user_id = u_id | |
448 |
|
458 | |||
449 |
|
459 | |||
450 | class Repository(Base, BaseModel): |
|
460 | class Repository(Base, BaseModel): | |
451 | __tablename__ = 'repositories' |
|
461 | __tablename__ = 'repositories' | |
452 | __table_args__ = ( |
|
462 | __table_args__ = ( | |
453 | UniqueConstraint('repo_name'), |
|
463 | UniqueConstraint('repo_name'), | |
454 | {'extend_existing': True}, |
|
464 | {'extend_existing': True}, | |
455 | ) |
|
465 | ) | |
456 |
|
466 | |||
457 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
467 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
458 | repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
468 | repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
459 | clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
469 | clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) | |
460 | repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg') |
|
470 | repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg') | |
461 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
471 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
462 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
472 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) | |
463 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
473 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) | |
464 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
474 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) | |
465 | description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
475 | description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
466 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
476 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
467 |
|
477 | |||
468 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
478 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) | |
469 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
479 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) | |
470 |
|
480 | |||
471 | user = relationship('User') |
|
481 | user = relationship('User') | |
472 | fork = relationship('Repository', remote_side=repo_id) |
|
482 | fork = relationship('Repository', remote_side=repo_id) | |
473 | group = relationship('RepoGroup') |
|
483 | group = relationship('RepoGroup') | |
474 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
484 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') | |
475 | users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') |
|
485 | users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') | |
476 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
486 | stats = relationship('Statistics', cascade='all', uselist=False) | |
477 |
|
487 | |||
478 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
488 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') | |
479 |
|
489 | |||
480 | logs = relationship('UserLog') |
|
490 | logs = relationship('UserLog') | |
481 |
|
491 | |||
482 | def __repr__(self): |
|
492 | def __repr__(self): | |
483 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
493 | return "<%s('%s:%s')>" % (self.__class__.__name__, | |
484 | self.repo_id, self.repo_name) |
|
494 | self.repo_id, self.repo_name) | |
485 |
|
495 | |||
486 | @classmethod |
|
496 | @classmethod | |
487 | def url_sep(cls): |
|
497 | def url_sep(cls): | |
488 | return '/' |
|
498 | return '/' | |
489 |
|
499 | |||
490 | @classmethod |
|
500 | @classmethod | |
491 | def get_by_repo_name(cls, repo_name): |
|
501 | def get_by_repo_name(cls, repo_name): | |
492 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
502 | q = Session.query(cls).filter(cls.repo_name == repo_name) | |
493 | q = q.options(joinedload(Repository.fork))\ |
|
503 | q = q.options(joinedload(Repository.fork))\ | |
494 | .options(joinedload(Repository.user))\ |
|
504 | .options(joinedload(Repository.user))\ | |
495 | .options(joinedload(Repository.group)) |
|
505 | .options(joinedload(Repository.group)) | |
496 | return q.scalar() |
|
506 | return q.scalar() | |
497 |
|
507 | |||
498 | @classmethod |
|
508 | @classmethod | |
499 | def get_repo_forks(cls, repo_id): |
|
509 | def get_repo_forks(cls, repo_id): | |
500 | return cls.query().filter(Repository.fork_id == repo_id) |
|
510 | return cls.query().filter(Repository.fork_id == repo_id) | |
501 |
|
511 | |||
502 | @classmethod |
|
512 | @classmethod | |
503 | def base_path(cls): |
|
513 | def base_path(cls): | |
504 | """ |
|
514 | """ | |
505 | Returns base path when all repos are stored |
|
515 | Returns base path when all repos are stored | |
506 |
|
516 | |||
507 | :param cls: |
|
517 | :param cls: | |
508 | """ |
|
518 | """ | |
509 | q = Session.query(RhodeCodeUi)\ |
|
519 | q = Session.query(RhodeCodeUi)\ | |
510 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) |
|
520 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) | |
511 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
521 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
512 | return q.one().ui_value |
|
522 | return q.one().ui_value | |
513 |
|
523 | |||
514 | @property |
|
524 | @property | |
515 | def just_name(self): |
|
525 | def just_name(self): | |
516 | return self.repo_name.split(Repository.url_sep())[-1] |
|
526 | return self.repo_name.split(Repository.url_sep())[-1] | |
517 |
|
527 | |||
518 | @property |
|
528 | @property | |
519 | def groups_with_parents(self): |
|
529 | def groups_with_parents(self): | |
520 | groups = [] |
|
530 | groups = [] | |
521 | if self.group is None: |
|
531 | if self.group is None: | |
522 | return groups |
|
532 | return groups | |
523 |
|
533 | |||
524 | cur_gr = self.group |
|
534 | cur_gr = self.group | |
525 | groups.insert(0, cur_gr) |
|
535 | groups.insert(0, cur_gr) | |
526 | while 1: |
|
536 | while 1: | |
527 | gr = getattr(cur_gr, 'parent_group', None) |
|
537 | gr = getattr(cur_gr, 'parent_group', None) | |
528 | cur_gr = cur_gr.parent_group |
|
538 | cur_gr = cur_gr.parent_group | |
529 | if gr is None: |
|
539 | if gr is None: | |
530 | break |
|
540 | break | |
531 | groups.insert(0, gr) |
|
541 | groups.insert(0, gr) | |
532 |
|
542 | |||
533 | return groups |
|
543 | return groups | |
534 |
|
544 | |||
535 | @property |
|
545 | @property | |
536 | def groups_and_repo(self): |
|
546 | def groups_and_repo(self): | |
537 | return self.groups_with_parents, self.just_name |
|
547 | return self.groups_with_parents, self.just_name | |
538 |
|
548 | |||
539 | @LazyProperty |
|
549 | @LazyProperty | |
540 | def repo_path(self): |
|
550 | def repo_path(self): | |
541 | """ |
|
551 | """ | |
542 | Returns base full path for that repository means where it actually |
|
552 | Returns base full path for that repository means where it actually | |
543 | exists on a filesystem |
|
553 | exists on a filesystem | |
544 | """ |
|
554 | """ | |
545 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
555 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == | |
546 | Repository.url_sep()) |
|
556 | Repository.url_sep()) | |
547 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
557 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
548 | return q.one().ui_value |
|
558 | return q.one().ui_value | |
549 |
|
559 | |||
550 | @property |
|
560 | @property | |
551 | def repo_full_path(self): |
|
561 | def repo_full_path(self): | |
552 | p = [self.repo_path] |
|
562 | p = [self.repo_path] | |
553 | # we need to split the name by / since this is how we store the |
|
563 | # we need to split the name by / since this is how we store the | |
554 | # names in the database, but that eventually needs to be converted |
|
564 | # names in the database, but that eventually needs to be converted | |
555 | # into a valid system path |
|
565 | # into a valid system path | |
556 | p += self.repo_name.split(Repository.url_sep()) |
|
566 | p += self.repo_name.split(Repository.url_sep()) | |
557 | return os.path.join(*p) |
|
567 | return os.path.join(*p) | |
558 |
|
568 | |||
559 | def get_new_name(self, repo_name): |
|
569 | def get_new_name(self, repo_name): | |
560 | """ |
|
570 | """ | |
561 | returns new full repository name based on assigned group and new new |
|
571 | returns new full repository name based on assigned group and new new | |
562 |
|
572 | |||
563 | :param group_name: |
|
573 | :param group_name: | |
564 | """ |
|
574 | """ | |
565 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
575 | path_prefix = self.group.full_path_splitted if self.group else [] | |
566 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
576 | return Repository.url_sep().join(path_prefix + [repo_name]) | |
567 |
|
577 | |||
568 | @property |
|
578 | @property | |
569 | def _ui(self): |
|
579 | def _ui(self): | |
570 | """ |
|
580 | """ | |
571 | Creates an db based ui object for this repository |
|
581 | Creates an db based ui object for this repository | |
572 | """ |
|
582 | """ | |
573 | from mercurial import ui |
|
583 | from mercurial import ui | |
574 | from mercurial import config |
|
584 | from mercurial import config | |
575 | baseui = ui.ui() |
|
585 | baseui = ui.ui() | |
576 |
|
586 | |||
577 | #clean the baseui object |
|
587 | #clean the baseui object | |
578 | baseui._ocfg = config.config() |
|
588 | baseui._ocfg = config.config() | |
579 | baseui._ucfg = config.config() |
|
589 | baseui._ucfg = config.config() | |
580 | baseui._tcfg = config.config() |
|
590 | baseui._tcfg = config.config() | |
581 |
|
591 | |||
582 | ret = RhodeCodeUi.query()\ |
|
592 | ret = RhodeCodeUi.query()\ | |
583 | .options(FromCache("sql_cache_short", "repository_repo_ui")).all() |
|
593 | .options(FromCache("sql_cache_short", "repository_repo_ui")).all() | |
584 |
|
594 | |||
585 | hg_ui = ret |
|
595 | hg_ui = ret | |
586 | for ui_ in hg_ui: |
|
596 | for ui_ in hg_ui: | |
587 | if ui_.ui_active: |
|
597 | if ui_.ui_active: | |
588 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
598 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, | |
589 | ui_.ui_key, ui_.ui_value) |
|
599 | ui_.ui_key, ui_.ui_value) | |
590 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
600 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | |
591 |
|
601 | |||
592 | return baseui |
|
602 | return baseui | |
593 |
|
603 | |||
594 | @classmethod |
|
604 | @classmethod | |
595 | def is_valid(cls, repo_name): |
|
605 | def is_valid(cls, repo_name): | |
596 | """ |
|
606 | """ | |
597 | returns True if given repo name is a valid filesystem repository |
|
607 | returns True if given repo name is a valid filesystem repository | |
598 |
|
608 | |||
599 | :param cls: |
|
609 | :param cls: | |
600 | :param repo_name: |
|
610 | :param repo_name: | |
601 | """ |
|
611 | """ | |
602 | from rhodecode.lib.utils import is_valid_repo |
|
612 | from rhodecode.lib.utils import is_valid_repo | |
603 |
|
613 | |||
604 | return is_valid_repo(repo_name, cls.base_path()) |
|
614 | return is_valid_repo(repo_name, cls.base_path()) | |
605 |
|
615 | |||
606 | #========================================================================== |
|
616 | #========================================================================== | |
607 | # SCM PROPERTIES |
|
617 | # SCM PROPERTIES | |
608 | #========================================================================== |
|
618 | #========================================================================== | |
609 |
|
619 | |||
610 | def get_changeset(self, rev): |
|
620 | def get_changeset(self, rev): | |
611 | return get_changeset_safe(self.scm_instance, rev) |
|
621 | return get_changeset_safe(self.scm_instance, rev) | |
612 |
|
622 | |||
613 | @property |
|
623 | @property | |
614 | def tip(self): |
|
624 | def tip(self): | |
615 | return self.get_changeset('tip') |
|
625 | return self.get_changeset('tip') | |
616 |
|
626 | |||
617 | @property |
|
627 | @property | |
618 | def author(self): |
|
628 | def author(self): | |
619 | return self.tip.author |
|
629 | return self.tip.author | |
620 |
|
630 | |||
621 | @property |
|
631 | @property | |
622 | def last_change(self): |
|
632 | def last_change(self): | |
623 | return self.scm_instance.last_change |
|
633 | return self.scm_instance.last_change | |
624 |
|
634 | |||
625 | def comments(self, revisions=None): |
|
635 | def comments(self, revisions=None): | |
626 | """ |
|
636 | """ | |
627 | Returns comments for this repository grouped by revisions |
|
637 | Returns comments for this repository grouped by revisions | |
628 |
|
638 | |||
629 | :param revisions: filter query by revisions only |
|
639 | :param revisions: filter query by revisions only | |
630 | """ |
|
640 | """ | |
631 | cmts = ChangesetComment.query()\ |
|
641 | cmts = ChangesetComment.query()\ | |
632 | .filter(ChangesetComment.repo == self) |
|
642 | .filter(ChangesetComment.repo == self) | |
633 | if revisions: |
|
643 | if revisions: | |
634 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
644 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |
635 | grouped = defaultdict(list) |
|
645 | grouped = defaultdict(list) | |
636 | for cmt in cmts.all(): |
|
646 | for cmt in cmts.all(): | |
637 | grouped[cmt.revision].append(cmt) |
|
647 | grouped[cmt.revision].append(cmt) | |
638 | return grouped |
|
648 | return grouped | |
639 |
|
649 | |||
640 | #========================================================================== |
|
650 | #========================================================================== | |
641 | # SCM CACHE INSTANCE |
|
651 | # SCM CACHE INSTANCE | |
642 | #========================================================================== |
|
652 | #========================================================================== | |
643 |
|
653 | |||
644 | @property |
|
654 | @property | |
645 | def invalidate(self): |
|
655 | def invalidate(self): | |
646 | return CacheInvalidation.invalidate(self.repo_name) |
|
656 | return CacheInvalidation.invalidate(self.repo_name) | |
647 |
|
657 | |||
648 | def set_invalidate(self): |
|
658 | def set_invalidate(self): | |
649 | """ |
|
659 | """ | |
650 | set a cache for invalidation for this instance |
|
660 | set a cache for invalidation for this instance | |
651 | """ |
|
661 | """ | |
652 | CacheInvalidation.set_invalidate(self.repo_name) |
|
662 | CacheInvalidation.set_invalidate(self.repo_name) | |
653 |
|
663 | |||
654 | @LazyProperty |
|
664 | @LazyProperty | |
655 | def scm_instance(self): |
|
665 | def scm_instance(self): | |
656 | return self.__get_instance() |
|
666 | return self.__get_instance() | |
657 |
|
667 | |||
658 | @property |
|
668 | @property | |
659 | def scm_instance_cached(self): |
|
669 | def scm_instance_cached(self): | |
660 | @cache_region('long_term') |
|
670 | @cache_region('long_term') | |
661 | def _c(repo_name): |
|
671 | def _c(repo_name): | |
662 | return self.__get_instance() |
|
672 | return self.__get_instance() | |
663 | rn = self.repo_name |
|
673 | rn = self.repo_name | |
664 | log.debug('Getting cached instance of repo') |
|
674 | log.debug('Getting cached instance of repo') | |
665 | inv = self.invalidate |
|
675 | inv = self.invalidate | |
666 | if inv is not None: |
|
676 | if inv is not None: | |
667 | region_invalidate(_c, None, rn) |
|
677 | region_invalidate(_c, None, rn) | |
668 | # update our cache |
|
678 | # update our cache | |
669 | CacheInvalidation.set_valid(inv.cache_key) |
|
679 | CacheInvalidation.set_valid(inv.cache_key) | |
670 | return _c(rn) |
|
680 | return _c(rn) | |
671 |
|
681 | |||
672 | def __get_instance(self): |
|
682 | def __get_instance(self): | |
673 | repo_full_path = self.repo_full_path |
|
683 | repo_full_path = self.repo_full_path | |
674 | try: |
|
684 | try: | |
675 | alias = get_scm(repo_full_path)[0] |
|
685 | alias = get_scm(repo_full_path)[0] | |
676 | log.debug('Creating instance of %s repository' % alias) |
|
686 | log.debug('Creating instance of %s repository' % alias) | |
677 | backend = get_backend(alias) |
|
687 | backend = get_backend(alias) | |
678 | except VCSError: |
|
688 | except VCSError: | |
679 | log.error(traceback.format_exc()) |
|
689 | log.error(traceback.format_exc()) | |
680 | log.error('Perhaps this repository is in db and not in ' |
|
690 | log.error('Perhaps this repository is in db and not in ' | |
681 | 'filesystem run rescan repositories with ' |
|
691 | 'filesystem run rescan repositories with ' | |
682 | '"destroy old data " option from admin panel') |
|
692 | '"destroy old data " option from admin panel') | |
683 | return |
|
693 | return | |
684 |
|
694 | |||
685 | if alias == 'hg': |
|
695 | if alias == 'hg': | |
686 |
|
696 | |||
687 | repo = backend(safe_str(repo_full_path), create=False, |
|
697 | repo = backend(safe_str(repo_full_path), create=False, | |
688 | baseui=self._ui) |
|
698 | baseui=self._ui) | |
689 | # skip hidden web repository |
|
699 | # skip hidden web repository | |
690 | if repo._get_hidden(): |
|
700 | if repo._get_hidden(): | |
691 | return |
|
701 | return | |
692 | else: |
|
702 | else: | |
693 | repo = backend(repo_full_path, create=False) |
|
703 | repo = backend(repo_full_path, create=False) | |
694 |
|
704 | |||
695 | return repo |
|
705 | return repo | |
696 |
|
706 | |||
697 |
|
707 | |||
698 | class RepoGroup(Base, BaseModel): |
|
708 | class RepoGroup(Base, BaseModel): | |
699 | __tablename__ = 'groups' |
|
709 | __tablename__ = 'groups' | |
700 | __table_args__ = ( |
|
710 | __table_args__ = ( | |
701 | UniqueConstraint('group_name', 'group_parent_id'), |
|
711 | UniqueConstraint('group_name', 'group_parent_id'), | |
702 | CheckConstraint('group_id != group_parent_id'), |
|
712 | CheckConstraint('group_id != group_parent_id'), | |
703 | {'extend_existing': True}, |
|
713 | {'extend_existing': True}, | |
704 | ) |
|
714 | ) | |
705 | __mapper_args__ = {'order_by': 'group_name'} |
|
715 | __mapper_args__ = {'order_by': 'group_name'} | |
706 |
|
716 | |||
707 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
717 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
708 | group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
718 | group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
709 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
719 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
710 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
720 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
711 |
|
721 | |||
712 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
722 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |
713 | users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all') |
|
723 | users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all') | |
714 |
|
724 | |||
715 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
725 | parent_group = relationship('RepoGroup', remote_side=group_id) | |
716 |
|
726 | |||
717 | def __init__(self, group_name='', parent_group=None): |
|
727 | def __init__(self, group_name='', parent_group=None): | |
718 | self.group_name = group_name |
|
728 | self.group_name = group_name | |
719 | self.parent_group = parent_group |
|
729 | self.parent_group = parent_group | |
720 |
|
730 | |||
721 | def __repr__(self): |
|
731 | def __repr__(self): | |
722 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
732 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, | |
723 | self.group_name) |
|
733 | self.group_name) | |
724 |
|
734 | |||
725 | @classmethod |
|
735 | @classmethod | |
726 | def groups_choices(cls): |
|
736 | def groups_choices(cls): | |
727 | from webhelpers.html import literal as _literal |
|
737 | from webhelpers.html import literal as _literal | |
728 | repo_groups = [('', '')] |
|
738 | repo_groups = [('', '')] | |
729 | sep = ' » ' |
|
739 | sep = ' » ' | |
730 | _name = lambda k: _literal(sep.join(k)) |
|
740 | _name = lambda k: _literal(sep.join(k)) | |
731 |
|
741 | |||
732 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) |
|
742 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) | |
733 | for x in cls.query().all()]) |
|
743 | for x in cls.query().all()]) | |
734 |
|
744 | |||
735 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) |
|
745 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) | |
736 | return repo_groups |
|
746 | return repo_groups | |
737 |
|
747 | |||
738 | @classmethod |
|
748 | @classmethod | |
739 | def url_sep(cls): |
|
749 | def url_sep(cls): | |
740 | return '/' |
|
750 | return '/' | |
741 |
|
751 | |||
742 | @classmethod |
|
752 | @classmethod | |
743 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
753 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |
744 | if case_insensitive: |
|
754 | if case_insensitive: | |
745 | gr = cls.query()\ |
|
755 | gr = cls.query()\ | |
746 | .filter(cls.group_name.ilike(group_name)) |
|
756 | .filter(cls.group_name.ilike(group_name)) | |
747 | else: |
|
757 | else: | |
748 | gr = cls.query()\ |
|
758 | gr = cls.query()\ | |
749 | .filter(cls.group_name == group_name) |
|
759 | .filter(cls.group_name == group_name) | |
750 | if cache: |
|
760 | if cache: | |
751 |
gr = gr.options(FromCache( |
|
761 | gr = gr.options(FromCache( | |
752 | "get_group_%s" % group_name)) |
|
762 | "sql_cache_short", | |
|
763 | "get_group_%s" % _hash_key(group_name) | |||
|
764 | ) | |||
|
765 | ) | |||
753 | return gr.scalar() |
|
766 | return gr.scalar() | |
754 |
|
767 | |||
755 | @property |
|
768 | @property | |
756 | def parents(self): |
|
769 | def parents(self): | |
757 | parents_recursion_limit = 5 |
|
770 | parents_recursion_limit = 5 | |
758 | groups = [] |
|
771 | groups = [] | |
759 | if self.parent_group is None: |
|
772 | if self.parent_group is None: | |
760 | return groups |
|
773 | return groups | |
761 | cur_gr = self.parent_group |
|
774 | cur_gr = self.parent_group | |
762 | groups.insert(0, cur_gr) |
|
775 | groups.insert(0, cur_gr) | |
763 | cnt = 0 |
|
776 | cnt = 0 | |
764 | while 1: |
|
777 | while 1: | |
765 | cnt += 1 |
|
778 | cnt += 1 | |
766 | gr = getattr(cur_gr, 'parent_group', None) |
|
779 | gr = getattr(cur_gr, 'parent_group', None) | |
767 | cur_gr = cur_gr.parent_group |
|
780 | cur_gr = cur_gr.parent_group | |
768 | if gr is None: |
|
781 | if gr is None: | |
769 | break |
|
782 | break | |
770 | if cnt == parents_recursion_limit: |
|
783 | if cnt == parents_recursion_limit: | |
771 | # this will prevent accidental infinit loops |
|
784 | # this will prevent accidental infinit loops | |
772 | log.error('group nested more than %s' % |
|
785 | log.error('group nested more than %s' % | |
773 | parents_recursion_limit) |
|
786 | parents_recursion_limit) | |
774 | break |
|
787 | break | |
775 |
|
788 | |||
776 | groups.insert(0, gr) |
|
789 | groups.insert(0, gr) | |
777 | return groups |
|
790 | return groups | |
778 |
|
791 | |||
779 | @property |
|
792 | @property | |
780 | def children(self): |
|
793 | def children(self): | |
781 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
794 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |
782 |
|
795 | |||
783 | @property |
|
796 | @property | |
784 | def name(self): |
|
797 | def name(self): | |
785 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
798 | return self.group_name.split(RepoGroup.url_sep())[-1] | |
786 |
|
799 | |||
787 | @property |
|
800 | @property | |
788 | def full_path(self): |
|
801 | def full_path(self): | |
789 | return self.group_name |
|
802 | return self.group_name | |
790 |
|
803 | |||
791 | @property |
|
804 | @property | |
792 | def full_path_splitted(self): |
|
805 | def full_path_splitted(self): | |
793 | return self.group_name.split(RepoGroup.url_sep()) |
|
806 | return self.group_name.split(RepoGroup.url_sep()) | |
794 |
|
807 | |||
795 | @property |
|
808 | @property | |
796 | def repositories(self): |
|
809 | def repositories(self): | |
797 | return Repository.query().filter(Repository.group == self) |
|
810 | return Repository.query().filter(Repository.group == self) | |
798 |
|
811 | |||
799 | @property |
|
812 | @property | |
800 | def repositories_recursive_count(self): |
|
813 | def repositories_recursive_count(self): | |
801 | cnt = self.repositories.count() |
|
814 | cnt = self.repositories.count() | |
802 |
|
815 | |||
803 | def children_count(group): |
|
816 | def children_count(group): | |
804 | cnt = 0 |
|
817 | cnt = 0 | |
805 | for child in group.children: |
|
818 | for child in group.children: | |
806 | cnt += child.repositories.count() |
|
819 | cnt += child.repositories.count() | |
807 | cnt += children_count(child) |
|
820 | cnt += children_count(child) | |
808 | return cnt |
|
821 | return cnt | |
809 |
|
822 | |||
810 | return cnt + children_count(self) |
|
823 | return cnt + children_count(self) | |
811 |
|
824 | |||
812 | def get_new_name(self, group_name): |
|
825 | def get_new_name(self, group_name): | |
813 | """ |
|
826 | """ | |
814 | returns new full group name based on parent and new name |
|
827 | returns new full group name based on parent and new name | |
815 |
|
828 | |||
816 | :param group_name: |
|
829 | :param group_name: | |
817 | """ |
|
830 | """ | |
818 | path_prefix = (self.parent_group.full_path_splitted if |
|
831 | path_prefix = (self.parent_group.full_path_splitted if | |
819 | self.parent_group else []) |
|
832 | self.parent_group else []) | |
820 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
833 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |
821 |
|
834 | |||
822 |
|
835 | |||
823 | class Permission(Base, BaseModel): |
|
836 | class Permission(Base, BaseModel): | |
824 | __tablename__ = 'permissions' |
|
837 | __tablename__ = 'permissions' | |
825 | __table_args__ = {'extend_existing': True} |
|
838 | __table_args__ = {'extend_existing': True} | |
826 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
839 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
827 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
840 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
828 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
841 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
829 |
|
842 | |||
830 | def __repr__(self): |
|
843 | def __repr__(self): | |
831 | return "<%s('%s:%s')>" % ( |
|
844 | return "<%s('%s:%s')>" % ( | |
832 | self.__class__.__name__, self.permission_id, self.permission_name |
|
845 | self.__class__.__name__, self.permission_id, self.permission_name | |
833 | ) |
|
846 | ) | |
834 |
|
847 | |||
835 | @classmethod |
|
848 | @classmethod | |
836 | def get_by_key(cls, key): |
|
849 | def get_by_key(cls, key): | |
837 | return cls.query().filter(cls.permission_name == key).scalar() |
|
850 | return cls.query().filter(cls.permission_name == key).scalar() | |
838 |
|
851 | |||
839 | @classmethod |
|
852 | @classmethod | |
840 | def get_default_perms(cls, default_user_id): |
|
853 | def get_default_perms(cls, default_user_id): | |
841 | q = Session.query(UserRepoToPerm, Repository, cls)\ |
|
854 | q = Session.query(UserRepoToPerm, Repository, cls)\ | |
842 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
855 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |
843 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ |
|
856 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ | |
844 | .filter(UserRepoToPerm.user_id == default_user_id) |
|
857 | .filter(UserRepoToPerm.user_id == default_user_id) | |
845 |
|
858 | |||
846 | return q.all() |
|
859 | return q.all() | |
847 |
|
860 | |||
848 | @classmethod |
|
861 | @classmethod | |
849 | def get_default_group_perms(cls, default_user_id): |
|
862 | def get_default_group_perms(cls, default_user_id): | |
850 | q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\ |
|
863 | q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\ | |
851 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
864 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |
852 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ |
|
865 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ | |
853 | .filter(UserRepoGroupToPerm.user_id == default_user_id) |
|
866 | .filter(UserRepoGroupToPerm.user_id == default_user_id) | |
854 |
|
867 | |||
855 | return q.all() |
|
868 | return q.all() | |
856 |
|
869 | |||
857 |
|
870 | |||
858 | class UserRepoToPerm(Base, BaseModel): |
|
871 | class UserRepoToPerm(Base, BaseModel): | |
859 | __tablename__ = 'repo_to_perm' |
|
872 | __tablename__ = 'repo_to_perm' | |
860 | __table_args__ = ( |
|
873 | __table_args__ = ( | |
861 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
874 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |
862 | {'extend_existing': True} |
|
875 | {'extend_existing': True} | |
863 | ) |
|
876 | ) | |
864 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
877 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
865 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
878 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
866 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
879 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
867 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
880 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
868 |
|
881 | |||
869 | user = relationship('User') |
|
882 | user = relationship('User') | |
870 | repository = relationship('Repository') |
|
883 | repository = relationship('Repository') | |
871 | permission = relationship('Permission') |
|
884 | permission = relationship('Permission') | |
872 |
|
885 | |||
873 | @classmethod |
|
886 | @classmethod | |
874 | def create(cls, user, repository, permission): |
|
887 | def create(cls, user, repository, permission): | |
875 | n = cls() |
|
888 | n = cls() | |
876 | n.user = user |
|
889 | n.user = user | |
877 | n.repository = repository |
|
890 | n.repository = repository | |
878 | n.permission = permission |
|
891 | n.permission = permission | |
879 | Session.add(n) |
|
892 | Session.add(n) | |
880 | return n |
|
893 | return n | |
881 |
|
894 | |||
882 | def __repr__(self): |
|
895 | def __repr__(self): | |
883 | return '<user:%s => %s >' % (self.user, self.repository) |
|
896 | return '<user:%s => %s >' % (self.user, self.repository) | |
884 |
|
897 | |||
885 |
|
898 | |||
886 | class UserToPerm(Base, BaseModel): |
|
899 | class UserToPerm(Base, BaseModel): | |
887 | __tablename__ = 'user_to_perm' |
|
900 | __tablename__ = 'user_to_perm' | |
888 | __table_args__ = ( |
|
901 | __table_args__ = ( | |
889 | UniqueConstraint('user_id', 'permission_id'), |
|
902 | UniqueConstraint('user_id', 'permission_id'), | |
890 | {'extend_existing': True} |
|
903 | {'extend_existing': True} | |
891 | ) |
|
904 | ) | |
892 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
905 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
893 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
906 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
894 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
907 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
895 |
|
908 | |||
896 | user = relationship('User') |
|
909 | user = relationship('User') | |
897 | permission = relationship('Permission', lazy='joined') |
|
910 | permission = relationship('Permission', lazy='joined') | |
898 |
|
911 | |||
899 |
|
912 | |||
900 | class UsersGroupRepoToPerm(Base, BaseModel): |
|
913 | class UsersGroupRepoToPerm(Base, BaseModel): | |
901 | __tablename__ = 'users_group_repo_to_perm' |
|
914 | __tablename__ = 'users_group_repo_to_perm' | |
902 | __table_args__ = ( |
|
915 | __table_args__ = ( | |
903 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
916 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |
904 | {'extend_existing': True} |
|
917 | {'extend_existing': True} | |
905 | ) |
|
918 | ) | |
906 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
919 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
907 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
920 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
908 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
921 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
909 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
922 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
910 |
|
923 | |||
911 | users_group = relationship('UsersGroup') |
|
924 | users_group = relationship('UsersGroup') | |
912 | permission = relationship('Permission') |
|
925 | permission = relationship('Permission') | |
913 | repository = relationship('Repository') |
|
926 | repository = relationship('Repository') | |
914 |
|
927 | |||
915 | @classmethod |
|
928 | @classmethod | |
916 | def create(cls, users_group, repository, permission): |
|
929 | def create(cls, users_group, repository, permission): | |
917 | n = cls() |
|
930 | n = cls() | |
918 | n.users_group = users_group |
|
931 | n.users_group = users_group | |
919 | n.repository = repository |
|
932 | n.repository = repository | |
920 | n.permission = permission |
|
933 | n.permission = permission | |
921 | Session.add(n) |
|
934 | Session.add(n) | |
922 | return n |
|
935 | return n | |
923 |
|
936 | |||
924 | def __repr__(self): |
|
937 | def __repr__(self): | |
925 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
938 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) | |
926 |
|
939 | |||
927 |
|
940 | |||
928 | class UsersGroupToPerm(Base, BaseModel): |
|
941 | class UsersGroupToPerm(Base, BaseModel): | |
929 | __tablename__ = 'users_group_to_perm' |
|
942 | __tablename__ = 'users_group_to_perm' | |
930 | __table_args__ = ( |
|
943 | __table_args__ = ( | |
931 | UniqueConstraint('users_group_id', 'permission_id',), |
|
944 | UniqueConstraint('users_group_id', 'permission_id',), | |
932 | {'extend_existing': True} |
|
945 | {'extend_existing': True} | |
933 | ) |
|
946 | ) | |
934 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
947 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
935 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
948 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
936 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
949 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
937 |
|
950 | |||
938 | users_group = relationship('UsersGroup') |
|
951 | users_group = relationship('UsersGroup') | |
939 | permission = relationship('Permission') |
|
952 | permission = relationship('Permission') | |
940 |
|
953 | |||
941 |
|
954 | |||
942 | class UserRepoGroupToPerm(Base, BaseModel): |
|
955 | class UserRepoGroupToPerm(Base, BaseModel): | |
943 | __tablename__ = 'user_repo_group_to_perm' |
|
956 | __tablename__ = 'user_repo_group_to_perm' | |
944 | __table_args__ = ( |
|
957 | __table_args__ = ( | |
945 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
958 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |
946 | {'extend_existing': True} |
|
959 | {'extend_existing': True} | |
947 | ) |
|
960 | ) | |
948 |
|
961 | |||
949 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
962 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
950 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
963 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
951 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
964 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
952 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
965 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
953 |
|
966 | |||
954 | user = relationship('User') |
|
967 | user = relationship('User') | |
955 | group = relationship('RepoGroup') |
|
968 | group = relationship('RepoGroup') | |
956 | permission = relationship('Permission') |
|
969 | permission = relationship('Permission') | |
957 |
|
970 | |||
958 |
|
971 | |||
959 | class UsersGroupRepoGroupToPerm(Base, BaseModel): |
|
972 | class UsersGroupRepoGroupToPerm(Base, BaseModel): | |
960 | __tablename__ = 'users_group_repo_group_to_perm' |
|
973 | __tablename__ = 'users_group_repo_group_to_perm' | |
961 | __table_args__ = ( |
|
974 | __table_args__ = ( | |
962 | UniqueConstraint('users_group_id', 'group_id'), |
|
975 | UniqueConstraint('users_group_id', 'group_id'), | |
963 | {'extend_existing': True} |
|
976 | {'extend_existing': True} | |
964 | ) |
|
977 | ) | |
965 |
|
978 | |||
966 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
979 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
967 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
980 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
968 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
981 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
969 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
982 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
970 |
|
983 | |||
971 | users_group = relationship('UsersGroup') |
|
984 | users_group = relationship('UsersGroup') | |
972 | permission = relationship('Permission') |
|
985 | permission = relationship('Permission') | |
973 | group = relationship('RepoGroup') |
|
986 | group = relationship('RepoGroup') | |
974 |
|
987 | |||
975 |
|
988 | |||
976 | class Statistics(Base, BaseModel): |
|
989 | class Statistics(Base, BaseModel): | |
977 | __tablename__ = 'statistics' |
|
990 | __tablename__ = 'statistics' | |
978 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing': True}) |
|
991 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing': True}) | |
979 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
992 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
980 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
993 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |
981 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
994 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |
982 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
995 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |
983 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
996 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |
984 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
997 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |
985 |
|
998 | |||
986 | repository = relationship('Repository', single_parent=True) |
|
999 | repository = relationship('Repository', single_parent=True) | |
987 |
|
1000 | |||
988 |
|
1001 | |||
989 | class UserFollowing(Base, BaseModel): |
|
1002 | class UserFollowing(Base, BaseModel): | |
990 | __tablename__ = 'user_followings' |
|
1003 | __tablename__ = 'user_followings' | |
991 | __table_args__ = ( |
|
1004 | __table_args__ = ( | |
992 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
1005 | UniqueConstraint('user_id', 'follows_repository_id'), | |
993 | UniqueConstraint('user_id', 'follows_user_id'), |
|
1006 | UniqueConstraint('user_id', 'follows_user_id'), | |
994 | {'extend_existing': True} |
|
1007 | {'extend_existing': True} | |
995 | ) |
|
1008 | ) | |
996 |
|
1009 | |||
997 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1010 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1011 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
999 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1012 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |
1000 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1013 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
1001 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1014 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
1002 |
|
1015 | |||
1003 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1016 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |
1004 |
|
1017 | |||
1005 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1018 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |
1006 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1019 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |
1007 |
|
1020 | |||
1008 | @classmethod |
|
1021 | @classmethod | |
1009 | def get_repo_followers(cls, repo_id): |
|
1022 | def get_repo_followers(cls, repo_id): | |
1010 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1023 | return cls.query().filter(cls.follows_repo_id == repo_id) | |
1011 |
|
1024 | |||
1012 |
|
1025 | |||
1013 | class CacheInvalidation(Base, BaseModel): |
|
1026 | class CacheInvalidation(Base, BaseModel): | |
1014 | __tablename__ = 'cache_invalidation' |
|
1027 | __tablename__ = 'cache_invalidation' | |
1015 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing': True}) |
|
1028 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing': True}) | |
1016 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1029 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1017 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1030 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1018 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1031 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
1019 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1032 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |
1020 |
|
1033 | |||
1021 | def __init__(self, cache_key, cache_args=''): |
|
1034 | def __init__(self, cache_key, cache_args=''): | |
1022 | self.cache_key = cache_key |
|
1035 | self.cache_key = cache_key | |
1023 | self.cache_args = cache_args |
|
1036 | self.cache_args = cache_args | |
1024 | self.cache_active = False |
|
1037 | self.cache_active = False | |
1025 |
|
1038 | |||
1026 | def __repr__(self): |
|
1039 | def __repr__(self): | |
1027 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
1040 | return "<%s('%s:%s')>" % (self.__class__.__name__, | |
1028 | self.cache_id, self.cache_key) |
|
1041 | self.cache_id, self.cache_key) | |
1029 |
|
1042 | |||
1030 | @classmethod |
|
1043 | @classmethod | |
1031 | def _get_key(cls, key): |
|
1044 | def _get_key(cls, key): | |
1032 | """ |
|
1045 | """ | |
1033 | Wrapper for generating a key |
|
1046 | Wrapper for generating a key | |
1034 |
|
1047 | |||
1035 | :param key: |
|
1048 | :param key: | |
1036 | """ |
|
1049 | """ | |
1037 | import rhodecode |
|
1050 | import rhodecode | |
1038 | prefix = '' |
|
1051 | prefix = '' | |
1039 | iid = rhodecode.CONFIG.get('instance_id') |
|
1052 | iid = rhodecode.CONFIG.get('instance_id') | |
1040 | if iid: |
|
1053 | if iid: | |
1041 |
prefix = iid |
|
1054 | prefix = iid | |
1042 | return "%s%s" % (prefix, key) |
|
1055 | return "%s%s" % (prefix, key) | |
1043 |
|
1056 | |||
1044 | @classmethod |
|
1057 | @classmethod | |
1045 | def get_by_key(cls, key): |
|
1058 | def get_by_key(cls, key): | |
1046 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1059 | return cls.query().filter(cls.cache_key == key).scalar() | |
1047 |
|
1060 | |||
1048 | @classmethod |
|
1061 | @classmethod | |
1049 | def invalidate(cls, key): |
|
1062 | def invalidate(cls, key): | |
1050 | """ |
|
1063 | """ | |
1051 | Returns Invalidation object if this given key should be invalidated |
|
1064 | Returns Invalidation object if this given key should be invalidated | |
1052 | None otherwise. `cache_active = False` means that this cache |
|
1065 | None otherwise. `cache_active = False` means that this cache | |
1053 | state is not valid and needs to be invalidated |
|
1066 | state is not valid and needs to be invalidated | |
1054 |
|
1067 | |||
1055 | :param key: |
|
1068 | :param key: | |
1056 | """ |
|
1069 | """ | |
1057 | return cls.query()\ |
|
1070 | return cls.query()\ | |
1058 | .filter(CacheInvalidation.cache_key == key)\ |
|
1071 | .filter(CacheInvalidation.cache_key == key)\ | |
1059 | .filter(CacheInvalidation.cache_active == False)\ |
|
1072 | .filter(CacheInvalidation.cache_active == False)\ | |
1060 | .scalar() |
|
1073 | .scalar() | |
1061 |
|
1074 | |||
1062 | @classmethod |
|
1075 | @classmethod | |
1063 | def set_invalidate(cls, key): |
|
1076 | def set_invalidate(cls, key): | |
1064 | """ |
|
1077 | """ | |
1065 | Mark this Cache key for invalidation |
|
1078 | Mark this Cache key for invalidation | |
1066 |
|
1079 | |||
1067 | :param key: |
|
1080 | :param key: | |
1068 | """ |
|
1081 | """ | |
1069 |
|
1082 | |||
1070 | log.debug('marking %s for invalidation' % key) |
|
1083 | log.debug('marking %s for invalidation' % key) | |
1071 | inv_obj = Session.query(cls)\ |
|
1084 | inv_obj = Session.query(cls)\ | |
1072 | .filter(cls.cache_key == key).scalar() |
|
1085 | .filter(cls.cache_key == key).scalar() | |
1073 | if inv_obj: |
|
1086 | if inv_obj: | |
1074 | inv_obj.cache_active = False |
|
1087 | inv_obj.cache_active = False | |
1075 | else: |
|
1088 | else: | |
1076 | log.debug('cache key not found in invalidation db -> creating one') |
|
1089 | log.debug('cache key not found in invalidation db -> creating one') | |
1077 | inv_obj = CacheInvalidation(key) |
|
1090 | inv_obj = CacheInvalidation(key) | |
1078 |
|
1091 | |||
1079 | try: |
|
1092 | try: | |
1080 | Session.add(inv_obj) |
|
1093 | Session.add(inv_obj) | |
1081 | Session.commit() |
|
1094 | Session.commit() | |
1082 | except Exception: |
|
1095 | except Exception: | |
1083 | log.error(traceback.format_exc()) |
|
1096 | log.error(traceback.format_exc()) | |
1084 | Session.rollback() |
|
1097 | Session.rollback() | |
1085 |
|
1098 | |||
1086 | @classmethod |
|
1099 | @classmethod | |
1087 | def set_valid(cls, key): |
|
1100 | def set_valid(cls, key): | |
1088 | """ |
|
1101 | """ | |
1089 | Mark this cache key as active and currently cached |
|
1102 | Mark this cache key as active and currently cached | |
1090 |
|
1103 | |||
1091 | :param key: |
|
1104 | :param key: | |
1092 | """ |
|
1105 | """ | |
1093 | inv_obj = cls.get_by_key(key) |
|
1106 | inv_obj = cls.get_by_key(key) | |
1094 | inv_obj.cache_active = True |
|
1107 | inv_obj.cache_active = True | |
1095 | Session.add(inv_obj) |
|
1108 | Session.add(inv_obj) | |
1096 | Session.commit() |
|
1109 | Session.commit() | |
1097 |
|
1110 | |||
1098 |
|
1111 | |||
1099 | class ChangesetComment(Base, BaseModel): |
|
1112 | class ChangesetComment(Base, BaseModel): | |
1100 | __tablename__ = 'changeset_comments' |
|
1113 | __tablename__ = 'changeset_comments' | |
1101 | __table_args__ = ({'extend_existing': True},) |
|
1114 | __table_args__ = ({'extend_existing': True},) | |
1102 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1115 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |
1103 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1116 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
1104 | revision = Column('revision', String(40), nullable=False) |
|
1117 | revision = Column('revision', String(40), nullable=False) | |
1105 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
1118 | line_no = Column('line_no', Unicode(10), nullable=True) | |
1106 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
1119 | f_path = Column('f_path', Unicode(1000), nullable=True) | |
1107 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
1120 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |
1108 | text = Column('text', Unicode(25000), nullable=False) |
|
1121 | text = Column('text', Unicode(25000), nullable=False) | |
1109 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
1122 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |
1110 |
|
1123 | |||
1111 | author = relationship('User', lazy='joined') |
|
1124 | author = relationship('User', lazy='joined') | |
1112 | repo = relationship('Repository') |
|
1125 | repo = relationship('Repository') | |
1113 |
|
1126 | |||
1114 | @classmethod |
|
1127 | @classmethod | |
1115 | def get_users(cls, revision): |
|
1128 | def get_users(cls, revision): | |
1116 | """ |
|
1129 | """ | |
1117 | Returns user associated with this changesetComment. ie those |
|
1130 | Returns user associated with this changesetComment. ie those | |
1118 | who actually commented |
|
1131 | who actually commented | |
1119 |
|
1132 | |||
1120 | :param cls: |
|
1133 | :param cls: | |
1121 | :param revision: |
|
1134 | :param revision: | |
1122 | """ |
|
1135 | """ | |
1123 | return Session.query(User)\ |
|
1136 | return Session.query(User)\ | |
1124 | .filter(cls.revision == revision)\ |
|
1137 | .filter(cls.revision == revision)\ | |
1125 | .join(ChangesetComment.author).all() |
|
1138 | .join(ChangesetComment.author).all() | |
1126 |
|
1139 | |||
1127 |
|
1140 | |||
1128 | class Notification(Base, BaseModel): |
|
1141 | class Notification(Base, BaseModel): | |
1129 | __tablename__ = 'notifications' |
|
1142 | __tablename__ = 'notifications' | |
1130 | __table_args__ = ({'extend_existing': True},) |
|
1143 | __table_args__ = ({'extend_existing': True},) | |
1131 |
|
1144 | |||
1132 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1145 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |
1133 | TYPE_MESSAGE = u'message' |
|
1146 | TYPE_MESSAGE = u'message' | |
1134 | TYPE_MENTION = u'mention' |
|
1147 | TYPE_MENTION = u'mention' | |
1135 | TYPE_REGISTRATION = u'registration' |
|
1148 | TYPE_REGISTRATION = u'registration' | |
1136 |
|
1149 | |||
1137 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1150 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |
1138 | subject = Column('subject', Unicode(512), nullable=True) |
|
1151 | subject = Column('subject', Unicode(512), nullable=True) | |
1139 | body = Column('body', Unicode(50000), nullable=True) |
|
1152 | body = Column('body', Unicode(50000), nullable=True) | |
1140 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1153 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |
1141 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1154 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1142 | type_ = Column('type', Unicode(256)) |
|
1155 | type_ = Column('type', Unicode(256)) | |
1143 |
|
1156 | |||
1144 | created_by_user = relationship('User') |
|
1157 | created_by_user = relationship('User') | |
1145 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1158 | notifications_to_users = relationship('UserNotification', lazy='joined', | |
1146 | cascade="all, delete, delete-orphan") |
|
1159 | cascade="all, delete, delete-orphan") | |
1147 |
|
1160 | |||
1148 | @property |
|
1161 | @property | |
1149 | def recipients(self): |
|
1162 | def recipients(self): | |
1150 | return [x.user for x in UserNotification.query()\ |
|
1163 | return [x.user for x in UserNotification.query()\ | |
1151 | .filter(UserNotification.notification == self).all()] |
|
1164 | .filter(UserNotification.notification == self).all()] | |
1152 |
|
1165 | |||
1153 | @classmethod |
|
1166 | @classmethod | |
1154 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
1167 | def create(cls, created_by, subject, body, recipients, type_=None): | |
1155 | if type_ is None: |
|
1168 | if type_ is None: | |
1156 | type_ = Notification.TYPE_MESSAGE |
|
1169 | type_ = Notification.TYPE_MESSAGE | |
1157 |
|
1170 | |||
1158 | notification = cls() |
|
1171 | notification = cls() | |
1159 | notification.created_by_user = created_by |
|
1172 | notification.created_by_user = created_by | |
1160 | notification.subject = subject |
|
1173 | notification.subject = subject | |
1161 | notification.body = body |
|
1174 | notification.body = body | |
1162 | notification.type_ = type_ |
|
1175 | notification.type_ = type_ | |
1163 | notification.created_on = datetime.datetime.now() |
|
1176 | notification.created_on = datetime.datetime.now() | |
1164 |
|
1177 | |||
1165 | for u in recipients: |
|
1178 | for u in recipients: | |
1166 | assoc = UserNotification() |
|
1179 | assoc = UserNotification() | |
1167 | assoc.notification = notification |
|
1180 | assoc.notification = notification | |
1168 | u.notifications.append(assoc) |
|
1181 | u.notifications.append(assoc) | |
1169 | Session.add(notification) |
|
1182 | Session.add(notification) | |
1170 | return notification |
|
1183 | return notification | |
1171 |
|
1184 | |||
1172 | @property |
|
1185 | @property | |
1173 | def description(self): |
|
1186 | def description(self): | |
1174 | from rhodecode.model.notification import NotificationModel |
|
1187 | from rhodecode.model.notification import NotificationModel | |
1175 | return NotificationModel().make_description(self) |
|
1188 | return NotificationModel().make_description(self) | |
1176 |
|
1189 | |||
1177 |
|
1190 | |||
1178 | class UserNotification(Base, BaseModel): |
|
1191 | class UserNotification(Base, BaseModel): | |
1179 | __tablename__ = 'user_to_notification' |
|
1192 | __tablename__ = 'user_to_notification' | |
1180 | __table_args__ = ( |
|
1193 | __table_args__ = ( | |
1181 | UniqueConstraint('user_id', 'notification_id'), |
|
1194 | UniqueConstraint('user_id', 'notification_id'), | |
1182 | {'extend_existing': True} |
|
1195 | {'extend_existing': True} | |
1183 | ) |
|
1196 | ) | |
1184 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1197 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |
1185 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1198 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |
1186 | read = Column('read', Boolean, default=False) |
|
1199 | read = Column('read', Boolean, default=False) | |
1187 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1200 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |
1188 |
|
1201 | |||
1189 | user = relationship('User', lazy="joined") |
|
1202 | user = relationship('User', lazy="joined") | |
1190 | notification = relationship('Notification', lazy="joined", |
|
1203 | notification = relationship('Notification', lazy="joined", | |
1191 | order_by=lambda: Notification.created_on.desc(),) |
|
1204 | order_by=lambda: Notification.created_on.desc(),) | |
1192 |
|
1205 | |||
1193 | def mark_as_read(self): |
|
1206 | def mark_as_read(self): | |
1194 | self.read = True |
|
1207 | self.read = True | |
1195 | Session.add(self) |
|
1208 | Session.add(self) | |
1196 |
|
1209 | |||
1197 |
|
1210 | |||
1198 | class DbMigrateVersion(Base, BaseModel): |
|
1211 | class DbMigrateVersion(Base, BaseModel): | |
1199 | __tablename__ = 'db_migrate_version' |
|
1212 | __tablename__ = 'db_migrate_version' | |
1200 | __table_args__ = {'extend_existing': True} |
|
1213 | __table_args__ = {'extend_existing': True} | |
1201 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1214 | repository_id = Column('repository_id', String(250), primary_key=True) | |
1202 | repository_path = Column('repository_path', Text) |
|
1215 | repository_path = Column('repository_path', Text) | |
1203 | version = Column('version', Integer) |
|
1216 | version = Column('version', Integer) |
@@ -1,758 +1,773 b'' | |||||
1 | """ this is forms validation classes |
|
1 | """ this is forms validation classes | |
2 | http://formencode.org/module-formencode.validators.html |
|
2 | http://formencode.org/module-formencode.validators.html | |
3 | for list off all availible validators |
|
3 | for list off all availible validators | |
4 |
|
4 | |||
5 | we can create our own validators |
|
5 | we can create our own validators | |
6 |
|
6 | |||
7 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
7 | The table below outlines the options which can be used in a schema in addition to the validators themselves | |
8 | pre_validators [] These validators will be applied before the schema |
|
8 | pre_validators [] These validators will be applied before the schema | |
9 | chained_validators [] These validators will be applied after the schema |
|
9 | chained_validators [] These validators will be applied after the schema | |
10 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
10 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present | |
11 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
11 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed | |
12 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
12 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. | |
13 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
13 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already | |
14 |
|
14 | |||
15 |
|
15 | |||
16 | <name> = formencode.validators.<name of validator> |
|
16 | <name> = formencode.validators.<name of validator> | |
17 | <name> must equal form name |
|
17 | <name> must equal form name | |
18 | list=[1,2,3,4,5] |
|
18 | list=[1,2,3,4,5] | |
19 | for SELECT use formencode.All(OneOf(list), Int()) |
|
19 | for SELECT use formencode.All(OneOf(list), Int()) | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | import os |
|
22 | import os | |
23 | import re |
|
23 | import re | |
24 | import logging |
|
24 | import logging | |
25 | import traceback |
|
25 | import traceback | |
26 |
|
26 | |||
27 | import formencode |
|
27 | import formencode | |
28 | from formencode import All |
|
28 | from formencode import All | |
29 | from formencode.validators import UnicodeString, OneOf, Int, Number, Regex, \ |
|
29 | from formencode.validators import UnicodeString, OneOf, Int, Number, Regex, \ | |
30 | Email, Bool, StringBoolean, Set |
|
30 | Email, Bool, StringBoolean, Set | |
31 |
|
31 | |||
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 | from webhelpers.pylonslib.secure_form import authentication_token |
|
33 | from webhelpers.pylonslib.secure_form import authentication_token | |
34 |
|
34 | |||
35 | from rhodecode.config.routing import ADMIN_PREFIX |
|
35 | from rhodecode.config.routing import ADMIN_PREFIX | |
36 | from rhodecode.lib.utils import repo_name_slug |
|
36 | from rhodecode.lib.utils import repo_name_slug | |
37 | from rhodecode.lib.auth import authenticate, get_crypt_password |
|
37 | from rhodecode.lib.auth import authenticate, get_crypt_password | |
38 | from rhodecode.lib.exceptions import LdapImportError |
|
38 | from rhodecode.lib.exceptions import LdapImportError | |
39 | from rhodecode.model.db import User, UsersGroup, RepoGroup, Repository |
|
39 | from rhodecode.model.db import User, UsersGroup, RepoGroup, Repository | |
40 | from rhodecode import BACKENDS |
|
40 | from rhodecode import BACKENDS | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | #this is needed to translate the messages using _() in validators |
|
45 | #this is needed to translate the messages using _() in validators | |
46 | class State_obj(object): |
|
46 | class State_obj(object): | |
47 | _ = staticmethod(_) |
|
47 | _ = staticmethod(_) | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | #============================================================================== |
|
50 | #============================================================================== | |
51 | # VALIDATORS |
|
51 | # VALIDATORS | |
52 | #============================================================================== |
|
52 | #============================================================================== | |
53 | class ValidAuthToken(formencode.validators.FancyValidator): |
|
53 | class ValidAuthToken(formencode.validators.FancyValidator): | |
54 | messages = {'invalid_token': _('Token mismatch')} |
|
54 | messages = {'invalid_token': _('Token mismatch')} | |
55 |
|
55 | |||
56 | def validate_python(self, value, state): |
|
56 | def validate_python(self, value, state): | |
57 |
|
57 | |||
58 | if value != authentication_token(): |
|
58 | if value != authentication_token(): | |
59 | raise formencode.Invalid( |
|
59 | raise formencode.Invalid( | |
60 | self.message('invalid_token', |
|
60 | self.message('invalid_token', | |
61 | state, search_number=value), |
|
61 | state, search_number=value), | |
62 | value, |
|
62 | value, | |
63 | state |
|
63 | state | |
64 | ) |
|
64 | ) | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def ValidUsername(edit, old_data): |
|
67 | def ValidUsername(edit, old_data): | |
68 | class _ValidUsername(formencode.validators.FancyValidator): |
|
68 | class _ValidUsername(formencode.validators.FancyValidator): | |
69 |
|
69 | |||
70 | def validate_python(self, value, state): |
|
70 | def validate_python(self, value, state): | |
71 | if value in ['default', 'new_user']: |
|
71 | if value in ['default', 'new_user']: | |
72 | raise formencode.Invalid(_('Invalid username'), value, state) |
|
72 | raise formencode.Invalid(_('Invalid username'), value, state) | |
73 | #check if user is unique |
|
73 | #check if user is unique | |
74 | old_un = None |
|
74 | old_un = None | |
75 | if edit: |
|
75 | if edit: | |
76 | old_un = User.get(old_data.get('user_id')).username |
|
76 | old_un = User.get(old_data.get('user_id')).username | |
77 |
|
77 | |||
78 | if old_un != value or not edit: |
|
78 | if old_un != value or not edit: | |
79 | if User.get_by_username(value, case_insensitive=True): |
|
79 | if User.get_by_username(value, case_insensitive=True): | |
80 | raise formencode.Invalid(_('This username already ' |
|
80 | raise formencode.Invalid(_('This username already ' | |
81 | 'exists') , value, state) |
|
81 | 'exists') , value, state) | |
82 |
|
82 | |||
83 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
83 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: | |
84 | raise formencode.Invalid( |
|
84 | raise formencode.Invalid( | |
85 | _('Username may only contain alphanumeric characters ' |
|
85 | _('Username may only contain alphanumeric characters ' | |
86 | 'underscores, periods or dashes and must begin with ' |
|
86 | 'underscores, periods or dashes and must begin with ' | |
87 | 'alphanumeric character'), |
|
87 | 'alphanumeric character'), | |
88 | value, |
|
88 | value, | |
89 | state |
|
89 | state | |
90 | ) |
|
90 | ) | |
91 |
|
91 | |||
92 | return _ValidUsername |
|
92 | return _ValidUsername | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | def ValidUsersGroup(edit, old_data): |
|
95 | def ValidUsersGroup(edit, old_data): | |
96 |
|
96 | |||
97 | class _ValidUsersGroup(formencode.validators.FancyValidator): |
|
97 | class _ValidUsersGroup(formencode.validators.FancyValidator): | |
98 |
|
98 | |||
99 | def validate_python(self, value, state): |
|
99 | def validate_python(self, value, state): | |
100 | if value in ['default']: |
|
100 | if value in ['default']: | |
101 | raise formencode.Invalid(_('Invalid group name'), value, state) |
|
101 | raise formencode.Invalid(_('Invalid group name'), value, state) | |
102 | #check if group is unique |
|
102 | #check if group is unique | |
103 | old_ugname = None |
|
103 | old_ugname = None | |
104 | if edit: |
|
104 | if edit: | |
105 | old_ugname = UsersGroup.get( |
|
105 | old_ugname = UsersGroup.get( | |
106 | old_data.get('users_group_id')).users_group_name |
|
106 | old_data.get('users_group_id')).users_group_name | |
107 |
|
107 | |||
108 | if old_ugname != value or not edit: |
|
108 | if old_ugname != value or not edit: | |
109 | if UsersGroup.get_by_group_name(value, cache=False, |
|
109 | if UsersGroup.get_by_group_name(value, cache=False, | |
110 | case_insensitive=True): |
|
110 | case_insensitive=True): | |
111 | raise formencode.Invalid(_('This users group ' |
|
111 | raise formencode.Invalid(_('This users group ' | |
112 | 'already exists'), value, |
|
112 | 'already exists'), value, | |
113 | state) |
|
113 | state) | |
114 |
|
114 | |||
115 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
115 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: | |
116 | raise formencode.Invalid( |
|
116 | raise formencode.Invalid( | |
117 | _('RepoGroup name may only contain alphanumeric characters ' |
|
117 | _('RepoGroup name may only contain alphanumeric characters ' | |
118 | 'underscores, periods or dashes and must begin with ' |
|
118 | 'underscores, periods or dashes and must begin with ' | |
119 | 'alphanumeric character'), |
|
119 | 'alphanumeric character'), | |
120 | value, |
|
120 | value, | |
121 | state |
|
121 | state | |
122 | ) |
|
122 | ) | |
123 |
|
123 | |||
124 | return _ValidUsersGroup |
|
124 | return _ValidUsersGroup | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def ValidReposGroup(edit, old_data): |
|
127 | def ValidReposGroup(edit, old_data): | |
128 | class _ValidReposGroup(formencode.validators.FancyValidator): |
|
128 | class _ValidReposGroup(formencode.validators.FancyValidator): | |
129 |
|
129 | |||
130 | def validate_python(self, value, state): |
|
130 | def validate_python(self, value, state): | |
131 | # TODO WRITE VALIDATIONS |
|
131 | # TODO WRITE VALIDATIONS | |
132 | group_name = value.get('group_name') |
|
132 | group_name = value.get('group_name') | |
133 | group_parent_id = value.get('group_parent_id') |
|
133 | group_parent_id = value.get('group_parent_id') | |
134 |
|
134 | |||
135 | # slugify repo group just in case :) |
|
135 | # slugify repo group just in case :) | |
136 | slug = repo_name_slug(group_name) |
|
136 | slug = repo_name_slug(group_name) | |
137 |
|
137 | |||
138 | # check for parent of self |
|
138 | # check for parent of self | |
139 | parent_of_self = lambda: ( |
|
139 | parent_of_self = lambda: ( | |
140 | old_data['group_id'] == int(group_parent_id) |
|
140 | old_data['group_id'] == int(group_parent_id) | |
141 | if group_parent_id else False |
|
141 | if group_parent_id else False | |
142 | ) |
|
142 | ) | |
143 | if edit and parent_of_self(): |
|
143 | if edit and parent_of_self(): | |
144 | e_dict = { |
|
144 | e_dict = { | |
145 | 'group_parent_id': _('Cannot assign this group as parent') |
|
145 | 'group_parent_id': _('Cannot assign this group as parent') | |
146 | } |
|
146 | } | |
147 | raise formencode.Invalid('', value, state, |
|
147 | raise formencode.Invalid('', value, state, | |
148 | error_dict=e_dict) |
|
148 | error_dict=e_dict) | |
149 |
|
149 | |||
150 | old_gname = None |
|
150 | old_gname = None | |
151 | if edit: |
|
151 | if edit: | |
152 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
152 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name | |
153 |
|
153 | |||
154 | if old_gname != group_name or not edit: |
|
154 | if old_gname != group_name or not edit: | |
155 |
|
155 | |||
156 | # check group |
|
156 | # check group | |
157 | gr = RepoGroup.query()\ |
|
157 | gr = RepoGroup.query()\ | |
158 | .filter(RepoGroup.group_name == slug)\ |
|
158 | .filter(RepoGroup.group_name == slug)\ | |
159 | .filter(RepoGroup.group_parent_id == group_parent_id)\ |
|
159 | .filter(RepoGroup.group_parent_id == group_parent_id)\ | |
160 | .scalar() |
|
160 | .scalar() | |
161 |
|
161 | |||
162 | if gr: |
|
162 | if gr: | |
163 | e_dict = { |
|
163 | e_dict = { | |
164 | 'group_name': _('This group already exists') |
|
164 | 'group_name': _('This group already exists') | |
165 | } |
|
165 | } | |
166 | raise formencode.Invalid('', value, state, |
|
166 | raise formencode.Invalid('', value, state, | |
167 | error_dict=e_dict) |
|
167 | error_dict=e_dict) | |
168 |
|
168 | |||
169 | # check for same repo |
|
169 | # check for same repo | |
170 | repo = Repository.query()\ |
|
170 | repo = Repository.query()\ | |
171 | .filter(Repository.repo_name == slug)\ |
|
171 | .filter(Repository.repo_name == slug)\ | |
172 | .scalar() |
|
172 | .scalar() | |
173 |
|
173 | |||
174 | if repo: |
|
174 | if repo: | |
175 | e_dict = { |
|
175 | e_dict = { | |
176 | 'group_name': _('Repository with this name already exists') |
|
176 | 'group_name': _('Repository with this name already exists') | |
177 | } |
|
177 | } | |
178 | raise formencode.Invalid('', value, state, |
|
178 | raise formencode.Invalid('', value, state, | |
179 | error_dict=e_dict) |
|
179 | error_dict=e_dict) | |
180 |
|
180 | |||
181 | return _ValidReposGroup |
|
181 | return _ValidReposGroup | |
182 |
|
182 | |||
183 |
|
183 | |||
184 | class ValidPassword(formencode.validators.FancyValidator): |
|
184 | class ValidPassword(formencode.validators.FancyValidator): | |
185 |
|
185 | |||
186 | def to_python(self, value, state): |
|
186 | def to_python(self, value, state): | |
187 |
|
187 | |||
188 | if not value: |
|
188 | if not value: | |
189 | return |
|
189 | return | |
190 |
|
190 | |||
191 | if value.get('password'): |
|
191 | if value.get('password'): | |
192 | try: |
|
192 | try: | |
193 | value['password'] = get_crypt_password(value['password']) |
|
193 | value['password'] = get_crypt_password(value['password']) | |
194 | except UnicodeEncodeError: |
|
194 | except UnicodeEncodeError: | |
195 | e_dict = {'password': _('Invalid characters in password')} |
|
195 | e_dict = {'password': _('Invalid characters in password')} | |
196 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
196 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
197 |
|
197 | |||
198 | if value.get('password_confirmation'): |
|
198 | if value.get('password_confirmation'): | |
199 | try: |
|
199 | try: | |
200 | value['password_confirmation'] = \ |
|
200 | value['password_confirmation'] = \ | |
201 | get_crypt_password(value['password_confirmation']) |
|
201 | get_crypt_password(value['password_confirmation']) | |
202 | except UnicodeEncodeError: |
|
202 | except UnicodeEncodeError: | |
203 | e_dict = { |
|
203 | e_dict = { | |
204 | 'password_confirmation': _('Invalid characters in password') |
|
204 | 'password_confirmation': _('Invalid characters in password') | |
205 | } |
|
205 | } | |
206 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
206 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
207 |
|
207 | |||
208 | if value.get('new_password'): |
|
208 | if value.get('new_password'): | |
209 | try: |
|
209 | try: | |
210 | value['new_password'] = \ |
|
210 | value['new_password'] = \ | |
211 | get_crypt_password(value['new_password']) |
|
211 | get_crypt_password(value['new_password']) | |
212 | except UnicodeEncodeError: |
|
212 | except UnicodeEncodeError: | |
213 | e_dict = {'new_password': _('Invalid characters in password')} |
|
213 | e_dict = {'new_password': _('Invalid characters in password')} | |
214 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
214 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
215 |
|
215 | |||
216 | return value |
|
216 | return value | |
217 |
|
217 | |||
218 |
|
218 | |||
219 | class ValidPasswordsMatch(formencode.validators.FancyValidator): |
|
219 | class ValidPasswordsMatch(formencode.validators.FancyValidator): | |
220 |
|
220 | |||
221 | def validate_python(self, value, state): |
|
221 | def validate_python(self, value, state): | |
222 |
|
222 | |||
223 | pass_val = value.get('password') or value.get('new_password') |
|
223 | pass_val = value.get('password') or value.get('new_password') | |
224 | if pass_val != value['password_confirmation']: |
|
224 | if pass_val != value['password_confirmation']: | |
225 | e_dict = {'password_confirmation': |
|
225 | e_dict = {'password_confirmation': | |
226 | _('Passwords do not match')} |
|
226 | _('Passwords do not match')} | |
227 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
227 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
228 |
|
228 | |||
229 |
|
229 | |||
230 | class ValidAuth(formencode.validators.FancyValidator): |
|
230 | class ValidAuth(formencode.validators.FancyValidator): | |
231 | messages = { |
|
231 | messages = { | |
232 | 'invalid_password':_('invalid password'), |
|
232 | 'invalid_password':_('invalid password'), | |
233 | 'invalid_login':_('invalid user name'), |
|
233 | 'invalid_login':_('invalid user name'), | |
234 | 'disabled_account':_('Your account is disabled') |
|
234 | 'disabled_account':_('Your account is disabled') | |
235 | } |
|
235 | } | |
236 |
|
236 | |||
237 | # error mapping |
|
237 | # error mapping | |
238 | e_dict = {'username': messages['invalid_login'], |
|
238 | e_dict = {'username': messages['invalid_login'], | |
239 | 'password': messages['invalid_password']} |
|
239 | 'password': messages['invalid_password']} | |
240 | e_dict_disable = {'username': messages['disabled_account']} |
|
240 | e_dict_disable = {'username': messages['disabled_account']} | |
241 |
|
241 | |||
242 | def validate_python(self, value, state): |
|
242 | def validate_python(self, value, state): | |
243 | password = value['password'] |
|
243 | password = value['password'] | |
244 | username = value['username'] |
|
244 | username = value['username'] | |
245 | user = User.get_by_username(username) |
|
245 | user = User.get_by_username(username) | |
246 |
|
246 | |||
247 | if authenticate(username, password): |
|
247 | if authenticate(username, password): | |
248 | return value |
|
248 | return value | |
249 | else: |
|
249 | else: | |
250 | if user and user.active is False: |
|
250 | if user and user.active is False: | |
251 | log.warning('user %s is disabled' % username) |
|
251 | log.warning('user %s is disabled' % username) | |
252 | raise formencode.Invalid( |
|
252 | raise formencode.Invalid( | |
253 | self.message('disabled_account', |
|
253 | self.message('disabled_account', | |
254 | state=State_obj), |
|
254 | state=State_obj), | |
255 | value, state, |
|
255 | value, state, | |
256 | error_dict=self.e_dict_disable |
|
256 | error_dict=self.e_dict_disable | |
257 | ) |
|
257 | ) | |
258 | else: |
|
258 | else: | |
259 | log.warning('user %s failed to authenticate' % username) |
|
259 | log.warning('user %s failed to authenticate' % username) | |
260 | raise formencode.Invalid( |
|
260 | raise formencode.Invalid( | |
261 | self.message('invalid_password', |
|
261 | self.message('invalid_password', | |
262 | state=State_obj), value, state, |
|
262 | state=State_obj), value, state, | |
263 | error_dict=self.e_dict |
|
263 | error_dict=self.e_dict | |
264 | ) |
|
264 | ) | |
265 |
|
265 | |||
266 |
|
266 | |||
267 | class ValidRepoUser(formencode.validators.FancyValidator): |
|
267 | class ValidRepoUser(formencode.validators.FancyValidator): | |
268 |
|
268 | |||
269 | def to_python(self, value, state): |
|
269 | def to_python(self, value, state): | |
270 | try: |
|
270 | try: | |
271 | User.query().filter(User.active == True)\ |
|
271 | User.query().filter(User.active == True)\ | |
272 | .filter(User.username == value).one() |
|
272 | .filter(User.username == value).one() | |
273 | except Exception: |
|
273 | except Exception: | |
274 | raise formencode.Invalid(_('This username is not valid'), |
|
274 | raise formencode.Invalid(_('This username is not valid'), | |
275 | value, state) |
|
275 | value, state) | |
276 | return value |
|
276 | return value | |
277 |
|
277 | |||
278 |
|
278 | |||
279 | def ValidRepoName(edit, old_data): |
|
279 | def ValidRepoName(edit, old_data): | |
280 | class _ValidRepoName(formencode.validators.FancyValidator): |
|
280 | class _ValidRepoName(formencode.validators.FancyValidator): | |
281 | def to_python(self, value, state): |
|
281 | def to_python(self, value, state): | |
282 |
|
282 | |||
283 | repo_name = value.get('repo_name') |
|
283 | repo_name = value.get('repo_name') | |
284 |
|
284 | |||
285 | slug = repo_name_slug(repo_name) |
|
285 | slug = repo_name_slug(repo_name) | |
286 | if slug in [ADMIN_PREFIX, '']: |
|
286 | if slug in [ADMIN_PREFIX, '']: | |
287 | e_dict = {'repo_name': _('This repository name is disallowed')} |
|
287 | e_dict = {'repo_name': _('This repository name is disallowed')} | |
288 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
288 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
289 |
|
289 | |||
290 | if value.get('repo_group'): |
|
290 | if value.get('repo_group'): | |
291 | gr = RepoGroup.get(value.get('repo_group')) |
|
291 | gr = RepoGroup.get(value.get('repo_group')) | |
292 | group_path = gr.full_path |
|
292 | group_path = gr.full_path | |
293 | # value needs to be aware of group name in order to check |
|
293 | # value needs to be aware of group name in order to check | |
294 | # db key This is an actual just the name to store in the |
|
294 | # db key This is an actual just the name to store in the | |
295 | # database |
|
295 | # database | |
296 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
296 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name | |
297 |
|
297 | |||
298 | else: |
|
298 | else: | |
299 | group_path = '' |
|
299 | group_path = '' | |
300 | repo_name_full = repo_name |
|
300 | repo_name_full = repo_name | |
301 |
|
301 | |||
302 | value['repo_name_full'] = repo_name_full |
|
302 | value['repo_name_full'] = repo_name_full | |
303 | rename = old_data.get('repo_name') != repo_name_full |
|
303 | rename = old_data.get('repo_name') != repo_name_full | |
304 | create = not edit |
|
304 | create = not edit | |
305 | if rename or create: |
|
305 | if rename or create: | |
306 |
|
306 | |||
307 | if group_path != '': |
|
307 | if group_path != '': | |
308 | if Repository.get_by_repo_name(repo_name_full): |
|
308 | if Repository.get_by_repo_name(repo_name_full): | |
309 | e_dict = { |
|
309 | e_dict = { | |
310 | 'repo_name': _('This repository already exists in ' |
|
310 | 'repo_name': _('This repository already exists in ' | |
311 | 'a group "%s"') % gr.group_name |
|
311 | 'a group "%s"') % gr.group_name | |
312 | } |
|
312 | } | |
313 | raise formencode.Invalid('', value, state, |
|
313 | raise formencode.Invalid('', value, state, | |
314 | error_dict=e_dict) |
|
314 | error_dict=e_dict) | |
315 | elif RepoGroup.get_by_group_name(repo_name_full): |
|
315 | elif RepoGroup.get_by_group_name(repo_name_full): | |
316 | e_dict = { |
|
316 | e_dict = { | |
317 | 'repo_name': _('There is a group with this name ' |
|
317 | 'repo_name': _('There is a group with this name ' | |
318 | 'already "%s"') % repo_name_full |
|
318 | 'already "%s"') % repo_name_full | |
319 | } |
|
319 | } | |
320 | raise formencode.Invalid('', value, state, |
|
320 | raise formencode.Invalid('', value, state, | |
321 | error_dict=e_dict) |
|
321 | error_dict=e_dict) | |
322 |
|
322 | |||
323 | elif Repository.get_by_repo_name(repo_name_full): |
|
323 | elif Repository.get_by_repo_name(repo_name_full): | |
324 | e_dict = {'repo_name': _('This repository ' |
|
324 | e_dict = {'repo_name': _('This repository ' | |
325 | 'already exists')} |
|
325 | 'already exists')} | |
326 | raise formencode.Invalid('', value, state, |
|
326 | raise formencode.Invalid('', value, state, | |
327 | error_dict=e_dict) |
|
327 | error_dict=e_dict) | |
328 |
|
328 | |||
329 | return value |
|
329 | return value | |
330 |
|
330 | |||
331 | return _ValidRepoName |
|
331 | return _ValidRepoName | |
332 |
|
332 | |||
333 |
|
333 | |||
334 | def ValidForkName(*args, **kwargs): |
|
334 | def ValidForkName(*args, **kwargs): | |
335 | return ValidRepoName(*args, **kwargs) |
|
335 | return ValidRepoName(*args, **kwargs) | |
336 |
|
336 | |||
337 |
|
337 | |||
338 | def SlugifyName(): |
|
338 | def SlugifyName(): | |
339 | class _SlugifyName(formencode.validators.FancyValidator): |
|
339 | class _SlugifyName(formencode.validators.FancyValidator): | |
340 |
|
340 | |||
341 | def to_python(self, value, state): |
|
341 | def to_python(self, value, state): | |
342 | return repo_name_slug(value) |
|
342 | return repo_name_slug(value) | |
343 |
|
343 | |||
344 | return _SlugifyName |
|
344 | return _SlugifyName | |
345 |
|
345 | |||
346 |
|
346 | |||
347 | def ValidCloneUri(): |
|
347 | def ValidCloneUri(): | |
348 | from mercurial.httprepo import httprepository, httpsrepository |
|
|||
349 | from rhodecode.lib.utils import make_ui |
|
348 | from rhodecode.lib.utils import make_ui | |
350 |
|
349 | |||
|
350 | def url_handler(repo_type, url, proto, ui=None): | |||
|
351 | if repo_type == 'hg': | |||
|
352 | from mercurial.httprepo import httprepository, httpsrepository | |||
|
353 | if proto == 'https': | |||
|
354 | httpsrepository(make_ui('db'), url).capabilities | |||
|
355 | elif proto == 'http': | |||
|
356 | httprepository(make_ui('db'), url).capabilities | |||
|
357 | elif repo_type == 'git': | |||
|
358 | #TODO: write a git url validator | |||
|
359 | pass | |||
|
360 | ||||
351 | class _ValidCloneUri(formencode.validators.FancyValidator): |
|
361 | class _ValidCloneUri(formencode.validators.FancyValidator): | |
352 |
|
362 | |||
353 | def to_python(self, value, state): |
|
363 | def to_python(self, value, state): | |
354 | if not value: |
|
364 | ||
|
365 | repo_type = value.get('repo_type') | |||
|
366 | url = value.get('clone_uri') | |||
|
367 | e_dict = {'clone_uri': _('invalid clone url')} | |||
|
368 | ||||
|
369 | if not url: | |||
355 | pass |
|
370 | pass | |
356 |
elif |
|
371 | elif url.startswith('https'): | |
357 | try: |
|
372 | try: | |
358 | httpsrepository(make_ui('db'), value).capabilities |
|
373 | url_handler(repo_type, url, 'https', make_ui('db')) | |
359 | except Exception: |
|
374 | except Exception: | |
360 | log.error(traceback.format_exc()) |
|
375 | log.error(traceback.format_exc()) | |
361 |
raise formencode.Invalid( |
|
376 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
362 | state) |
|
377 | elif url.startswith('http'): | |
363 | elif value.startswith('http'): |
|
|||
364 | try: |
|
378 | try: | |
365 | httprepository(make_ui('db'), value).capabilities |
|
379 | url_handler(repo_type, url, 'http', make_ui('db')) | |
366 | except Exception: |
|
380 | except Exception: | |
367 | log.error(traceback.format_exc()) |
|
381 | log.error(traceback.format_exc()) | |
368 |
raise formencode.Invalid( |
|
382 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
369 | state) |
|
|||
370 | else: |
|
383 | else: | |
371 |
|
|
384 | e_dict = {'clone_uri': _('Invalid clone url, provide a ' | |
372 |
|
|
385 | 'valid clone http\s url')} | |
373 | state) |
|
386 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
|
387 | ||||
374 | return value |
|
388 | return value | |
375 |
|
389 | |||
376 | return _ValidCloneUri |
|
390 | return _ValidCloneUri | |
377 |
|
391 | |||
378 |
|
392 | |||
379 | def ValidForkType(old_data): |
|
393 | def ValidForkType(old_data): | |
380 | class _ValidForkType(formencode.validators.FancyValidator): |
|
394 | class _ValidForkType(formencode.validators.FancyValidator): | |
381 |
|
395 | |||
382 | def to_python(self, value, state): |
|
396 | def to_python(self, value, state): | |
383 | if old_data['repo_type'] != value: |
|
397 | if old_data['repo_type'] != value: | |
384 | raise formencode.Invalid(_('Fork have to be the same ' |
|
398 | raise formencode.Invalid(_('Fork have to be the same ' | |
385 | 'type as original'), value, state) |
|
399 | 'type as original'), value, state) | |
386 |
|
400 | |||
387 | return value |
|
401 | return value | |
388 | return _ValidForkType |
|
402 | return _ValidForkType | |
389 |
|
403 | |||
390 |
|
404 | |||
391 | def ValidPerms(type_='repo'): |
|
405 | def ValidPerms(type_='repo'): | |
392 | if type_ == 'group': |
|
406 | if type_ == 'group': | |
393 | EMPTY_PERM = 'group.none' |
|
407 | EMPTY_PERM = 'group.none' | |
394 | elif type_ == 'repo': |
|
408 | elif type_ == 'repo': | |
395 | EMPTY_PERM = 'repository.none' |
|
409 | EMPTY_PERM = 'repository.none' | |
396 |
|
410 | |||
397 | class _ValidPerms(formencode.validators.FancyValidator): |
|
411 | class _ValidPerms(formencode.validators.FancyValidator): | |
398 | messages = { |
|
412 | messages = { | |
399 | 'perm_new_member_name': |
|
413 | 'perm_new_member_name': | |
400 | _('This username or users group name is not valid') |
|
414 | _('This username or users group name is not valid') | |
401 | } |
|
415 | } | |
402 |
|
416 | |||
403 | def to_python(self, value, state): |
|
417 | def to_python(self, value, state): | |
404 | perms_update = [] |
|
418 | perms_update = [] | |
405 | perms_new = [] |
|
419 | perms_new = [] | |
406 | # build a list of permission to update and new permission to create |
|
420 | # build a list of permission to update and new permission to create | |
407 | for k, v in value.items(): |
|
421 | for k, v in value.items(): | |
408 | # means new added member to permissions |
|
422 | # means new added member to permissions | |
409 | if k.startswith('perm_new_member'): |
|
423 | if k.startswith('perm_new_member'): | |
410 | new_perm = value.get('perm_new_member', False) |
|
424 | new_perm = value.get('perm_new_member', False) | |
411 | new_member = value.get('perm_new_member_name', False) |
|
425 | new_member = value.get('perm_new_member_name', False) | |
412 | new_type = value.get('perm_new_member_type') |
|
426 | new_type = value.get('perm_new_member_type') | |
413 |
|
427 | |||
414 | if new_member and new_perm: |
|
428 | if new_member and new_perm: | |
415 | if (new_member, new_perm, new_type) not in perms_new: |
|
429 | if (new_member, new_perm, new_type) not in perms_new: | |
416 | perms_new.append((new_member, new_perm, new_type)) |
|
430 | perms_new.append((new_member, new_perm, new_type)) | |
417 | elif k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
431 | elif k.startswith('u_perm_') or k.startswith('g_perm_'): | |
418 | member = k[7:] |
|
432 | member = k[7:] | |
419 | t = {'u': 'user', |
|
433 | t = {'u': 'user', | |
420 | 'g': 'users_group' |
|
434 | 'g': 'users_group' | |
421 | }[k[0]] |
|
435 | }[k[0]] | |
422 | if member == 'default': |
|
436 | if member == 'default': | |
423 | if value.get('private'): |
|
437 | if value.get('private'): | |
424 | # set none for default when updating to private repo |
|
438 | # set none for default when updating to private repo | |
425 | v = EMPTY_PERM |
|
439 | v = EMPTY_PERM | |
426 | perms_update.append((member, v, t)) |
|
440 | perms_update.append((member, v, t)) | |
427 |
|
441 | |||
428 | value['perms_updates'] = perms_update |
|
442 | value['perms_updates'] = perms_update | |
429 | value['perms_new'] = perms_new |
|
443 | value['perms_new'] = perms_new | |
430 |
|
444 | |||
431 | # update permissions |
|
445 | # update permissions | |
432 | for k, v, t in perms_new: |
|
446 | for k, v, t in perms_new: | |
433 | try: |
|
447 | try: | |
434 | if t is 'user': |
|
448 | if t is 'user': | |
435 | self.user_db = User.query()\ |
|
449 | self.user_db = User.query()\ | |
436 | .filter(User.active == True)\ |
|
450 | .filter(User.active == True)\ | |
437 | .filter(User.username == k).one() |
|
451 | .filter(User.username == k).one() | |
438 | if t is 'users_group': |
|
452 | if t is 'users_group': | |
439 | self.user_db = UsersGroup.query()\ |
|
453 | self.user_db = UsersGroup.query()\ | |
440 | .filter(UsersGroup.users_group_active == True)\ |
|
454 | .filter(UsersGroup.users_group_active == True)\ | |
441 | .filter(UsersGroup.users_group_name == k).one() |
|
455 | .filter(UsersGroup.users_group_name == k).one() | |
442 |
|
456 | |||
443 | except Exception: |
|
457 | except Exception: | |
444 | msg = self.message('perm_new_member_name', |
|
458 | msg = self.message('perm_new_member_name', | |
445 | state=State_obj) |
|
459 | state=State_obj) | |
446 | raise formencode.Invalid( |
|
460 | raise formencode.Invalid( | |
447 | msg, value, state, error_dict={'perm_new_member_name': msg} |
|
461 | msg, value, state, error_dict={'perm_new_member_name': msg} | |
448 | ) |
|
462 | ) | |
449 | return value |
|
463 | return value | |
450 | return _ValidPerms |
|
464 | return _ValidPerms | |
451 |
|
465 | |||
452 |
|
466 | |||
453 | class ValidSettings(formencode.validators.FancyValidator): |
|
467 | class ValidSettings(formencode.validators.FancyValidator): | |
454 |
|
468 | |||
455 | def to_python(self, value, state): |
|
469 | def to_python(self, value, state): | |
456 | # settings form can't edit user |
|
470 | # settings form can't edit user | |
457 | if 'user' in value: |
|
471 | if 'user' in value: | |
458 | del['value']['user'] |
|
472 | del['value']['user'] | |
459 | return value |
|
473 | return value | |
460 |
|
474 | |||
461 |
|
475 | |||
462 | class ValidPath(formencode.validators.FancyValidator): |
|
476 | class ValidPath(formencode.validators.FancyValidator): | |
463 | def to_python(self, value, state): |
|
477 | def to_python(self, value, state): | |
464 |
|
478 | |||
465 | if not os.path.isdir(value): |
|
479 | if not os.path.isdir(value): | |
466 | msg = _('This is not a valid path') |
|
480 | msg = _('This is not a valid path') | |
467 | raise formencode.Invalid(msg, value, state, |
|
481 | raise formencode.Invalid(msg, value, state, | |
468 | error_dict={'paths_root_path': msg}) |
|
482 | error_dict={'paths_root_path': msg}) | |
469 | return value |
|
483 | return value | |
470 |
|
484 | |||
471 |
|
485 | |||
472 | def UniqSystemEmail(old_data): |
|
486 | def UniqSystemEmail(old_data): | |
473 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
|
487 | class _UniqSystemEmail(formencode.validators.FancyValidator): | |
474 | def to_python(self, value, state): |
|
488 | def to_python(self, value, state): | |
475 | value = value.lower() |
|
489 | value = value.lower() | |
476 | if old_data.get('email', '').lower() != value: |
|
490 | if old_data.get('email', '').lower() != value: | |
477 | user = User.get_by_email(value, case_insensitive=True) |
|
491 | user = User.get_by_email(value, case_insensitive=True) | |
478 | if user: |
|
492 | if user: | |
479 | raise formencode.Invalid( |
|
493 | raise formencode.Invalid( | |
480 | _("This e-mail address is already taken"), value, state |
|
494 | _("This e-mail address is already taken"), value, state | |
481 | ) |
|
495 | ) | |
482 | return value |
|
496 | return value | |
483 |
|
497 | |||
484 | return _UniqSystemEmail |
|
498 | return _UniqSystemEmail | |
485 |
|
499 | |||
486 |
|
500 | |||
487 | class ValidSystemEmail(formencode.validators.FancyValidator): |
|
501 | class ValidSystemEmail(formencode.validators.FancyValidator): | |
488 | def to_python(self, value, state): |
|
502 | def to_python(self, value, state): | |
489 | value = value.lower() |
|
503 | value = value.lower() | |
490 | user = User.get_by_email(value, case_insensitive=True) |
|
504 | user = User.get_by_email(value, case_insensitive=True) | |
491 | if user is None: |
|
505 | if user is None: | |
492 | raise formencode.Invalid( |
|
506 | raise formencode.Invalid( | |
493 | _("This e-mail address doesn't exist."), value, state |
|
507 | _("This e-mail address doesn't exist."), value, state | |
494 | ) |
|
508 | ) | |
495 |
|
509 | |||
496 | return value |
|
510 | return value | |
497 |
|
511 | |||
498 |
|
512 | |||
499 | class LdapLibValidator(formencode.validators.FancyValidator): |
|
513 | class LdapLibValidator(formencode.validators.FancyValidator): | |
500 |
|
514 | |||
501 | def to_python(self, value, state): |
|
515 | def to_python(self, value, state): | |
502 |
|
516 | |||
503 | try: |
|
517 | try: | |
504 | import ldap |
|
518 | import ldap | |
505 | except ImportError: |
|
519 | except ImportError: | |
506 | raise LdapImportError |
|
520 | raise LdapImportError | |
507 | return value |
|
521 | return value | |
508 |
|
522 | |||
509 |
|
523 | |||
510 | class AttrLoginValidator(formencode.validators.FancyValidator): |
|
524 | class AttrLoginValidator(formencode.validators.FancyValidator): | |
511 |
|
525 | |||
512 | def to_python(self, value, state): |
|
526 | def to_python(self, value, state): | |
513 |
|
527 | |||
514 | if not value or not isinstance(value, (str, unicode)): |
|
528 | if not value or not isinstance(value, (str, unicode)): | |
515 | raise formencode.Invalid( |
|
529 | raise formencode.Invalid( | |
516 | _("The LDAP Login attribute of the CN must be specified - " |
|
530 | _("The LDAP Login attribute of the CN must be specified - " | |
517 | "this is the name of the attribute that is equivalent " |
|
531 | "this is the name of the attribute that is equivalent " | |
518 | "to 'username'"), value, state |
|
532 | "to 'username'"), value, state | |
519 | ) |
|
533 | ) | |
520 |
|
534 | |||
521 | return value |
|
535 | return value | |
522 |
|
536 | |||
523 |
|
537 | |||
524 | #============================================================================== |
|
538 | #============================================================================== | |
525 | # FORMS |
|
539 | # FORMS | |
526 | #============================================================================== |
|
540 | #============================================================================== | |
527 | class LoginForm(formencode.Schema): |
|
541 | class LoginForm(formencode.Schema): | |
528 | allow_extra_fields = True |
|
542 | allow_extra_fields = True | |
529 | filter_extra_fields = True |
|
543 | filter_extra_fields = True | |
530 | username = UnicodeString( |
|
544 | username = UnicodeString( | |
531 | strip=True, |
|
545 | strip=True, | |
532 | min=1, |
|
546 | min=1, | |
533 | not_empty=True, |
|
547 | not_empty=True, | |
534 | messages={ |
|
548 | messages={ | |
535 | 'empty': _('Please enter a login'), |
|
549 | 'empty': _('Please enter a login'), | |
536 | 'tooShort': _('Enter a value %(min)i characters long or more')} |
|
550 | 'tooShort': _('Enter a value %(min)i characters long or more')} | |
537 | ) |
|
551 | ) | |
538 |
|
552 | |||
539 | password = UnicodeString( |
|
553 | password = UnicodeString( | |
540 | strip=True, |
|
554 | strip=True, | |
541 | min=3, |
|
555 | min=3, | |
542 | not_empty=True, |
|
556 | not_empty=True, | |
543 | messages={ |
|
557 | messages={ | |
544 | 'empty': _('Please enter a password'), |
|
558 | 'empty': _('Please enter a password'), | |
545 | 'tooShort': _('Enter %(min)i characters or more')} |
|
559 | 'tooShort': _('Enter %(min)i characters or more')} | |
546 | ) |
|
560 | ) | |
547 |
|
561 | |||
548 | remember = StringBoolean(if_missing=False) |
|
562 | remember = StringBoolean(if_missing=False) | |
549 |
|
563 | |||
550 | chained_validators = [ValidAuth] |
|
564 | chained_validators = [ValidAuth] | |
551 |
|
565 | |||
552 |
|
566 | |||
553 | def UserForm(edit=False, old_data={}): |
|
567 | def UserForm(edit=False, old_data={}): | |
554 | class _UserForm(formencode.Schema): |
|
568 | class _UserForm(formencode.Schema): | |
555 | allow_extra_fields = True |
|
569 | allow_extra_fields = True | |
556 | filter_extra_fields = True |
|
570 | filter_extra_fields = True | |
557 | username = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
571 | username = All(UnicodeString(strip=True, min=1, not_empty=True), | |
558 | ValidUsername(edit, old_data)) |
|
572 | ValidUsername(edit, old_data)) | |
559 | if edit: |
|
573 | if edit: | |
560 | new_password = All(UnicodeString(strip=True, min=6, not_empty=False)) |
|
574 | new_password = All(UnicodeString(strip=True, min=6, not_empty=False)) | |
561 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
575 | password_confirmation = All(UnicodeString(strip=True, min=6, | |
562 | not_empty=False)) |
|
576 | not_empty=False)) | |
563 | admin = StringBoolean(if_missing=False) |
|
577 | admin = StringBoolean(if_missing=False) | |
564 | else: |
|
578 | else: | |
565 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
579 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) | |
566 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
580 | password_confirmation = All(UnicodeString(strip=True, min=6, | |
567 | not_empty=False)) |
|
581 | not_empty=False)) | |
568 |
|
582 | |||
569 | active = StringBoolean(if_missing=False) |
|
583 | active = StringBoolean(if_missing=False) | |
570 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
584 | name = UnicodeString(strip=True, min=1, not_empty=False) | |
571 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
585 | lastname = UnicodeString(strip=True, min=1, not_empty=False) | |
572 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
586 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) | |
573 |
|
587 | |||
574 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
588 | chained_validators = [ValidPasswordsMatch, ValidPassword] | |
575 |
|
589 | |||
576 | return _UserForm |
|
590 | return _UserForm | |
577 |
|
591 | |||
578 |
|
592 | |||
579 | def UsersGroupForm(edit=False, old_data={}, available_members=[]): |
|
593 | def UsersGroupForm(edit=False, old_data={}, available_members=[]): | |
580 | class _UsersGroupForm(formencode.Schema): |
|
594 | class _UsersGroupForm(formencode.Schema): | |
581 | allow_extra_fields = True |
|
595 | allow_extra_fields = True | |
582 | filter_extra_fields = True |
|
596 | filter_extra_fields = True | |
583 |
|
597 | |||
584 | users_group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
598 | users_group_name = All(UnicodeString(strip=True, min=1, not_empty=True), | |
585 | ValidUsersGroup(edit, old_data)) |
|
599 | ValidUsersGroup(edit, old_data)) | |
586 |
|
600 | |||
587 | users_group_active = StringBoolean(if_missing=False) |
|
601 | users_group_active = StringBoolean(if_missing=False) | |
588 |
|
602 | |||
589 | if edit: |
|
603 | if edit: | |
590 | users_group_members = OneOf(available_members, hideList=False, |
|
604 | users_group_members = OneOf(available_members, hideList=False, | |
591 | testValueList=True, |
|
605 | testValueList=True, | |
592 | if_missing=None, not_empty=False) |
|
606 | if_missing=None, not_empty=False) | |
593 |
|
607 | |||
594 | return _UsersGroupForm |
|
608 | return _UsersGroupForm | |
595 |
|
609 | |||
596 |
|
610 | |||
597 | def ReposGroupForm(edit=False, old_data={}, available_groups=[]): |
|
611 | def ReposGroupForm(edit=False, old_data={}, available_groups=[]): | |
598 | class _ReposGroupForm(formencode.Schema): |
|
612 | class _ReposGroupForm(formencode.Schema): | |
599 | allow_extra_fields = True |
|
613 | allow_extra_fields = True | |
600 | filter_extra_fields = False |
|
614 | filter_extra_fields = False | |
601 |
|
615 | |||
602 | group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
616 | group_name = All(UnicodeString(strip=True, min=1, not_empty=True), | |
603 | SlugifyName()) |
|
617 | SlugifyName()) | |
604 | group_description = UnicodeString(strip=True, min=1, |
|
618 | group_description = UnicodeString(strip=True, min=1, | |
605 | not_empty=True) |
|
619 | not_empty=True) | |
606 | group_parent_id = OneOf(available_groups, hideList=False, |
|
620 | group_parent_id = OneOf(available_groups, hideList=False, | |
607 | testValueList=True, |
|
621 | testValueList=True, | |
608 | if_missing=None, not_empty=False) |
|
622 | if_missing=None, not_empty=False) | |
609 |
|
623 | |||
610 | chained_validators = [ValidReposGroup(edit, old_data), ValidPerms('group')] |
|
624 | chained_validators = [ValidReposGroup(edit, old_data), ValidPerms('group')] | |
611 |
|
625 | |||
612 | return _ReposGroupForm |
|
626 | return _ReposGroupForm | |
613 |
|
627 | |||
614 |
|
628 | |||
615 | def RegisterForm(edit=False, old_data={}): |
|
629 | def RegisterForm(edit=False, old_data={}): | |
616 | class _RegisterForm(formencode.Schema): |
|
630 | class _RegisterForm(formencode.Schema): | |
617 | allow_extra_fields = True |
|
631 | allow_extra_fields = True | |
618 | filter_extra_fields = True |
|
632 | filter_extra_fields = True | |
619 | username = All(ValidUsername(edit, old_data), |
|
633 | username = All(ValidUsername(edit, old_data), | |
620 | UnicodeString(strip=True, min=1, not_empty=True)) |
|
634 | UnicodeString(strip=True, min=1, not_empty=True)) | |
621 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
635 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) | |
622 | password_confirmation = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
636 | password_confirmation = All(UnicodeString(strip=True, min=6, not_empty=True)) | |
623 | active = StringBoolean(if_missing=False) |
|
637 | active = StringBoolean(if_missing=False) | |
624 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
638 | name = UnicodeString(strip=True, min=1, not_empty=False) | |
625 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
639 | lastname = UnicodeString(strip=True, min=1, not_empty=False) | |
626 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
640 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) | |
627 |
|
641 | |||
628 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
642 | chained_validators = [ValidPasswordsMatch, ValidPassword] | |
629 |
|
643 | |||
630 | return _RegisterForm |
|
644 | return _RegisterForm | |
631 |
|
645 | |||
632 |
|
646 | |||
633 | def PasswordResetForm(): |
|
647 | def PasswordResetForm(): | |
634 | class _PasswordResetForm(formencode.Schema): |
|
648 | class _PasswordResetForm(formencode.Schema): | |
635 | allow_extra_fields = True |
|
649 | allow_extra_fields = True | |
636 | filter_extra_fields = True |
|
650 | filter_extra_fields = True | |
637 | email = All(ValidSystemEmail(), Email(not_empty=True)) |
|
651 | email = All(ValidSystemEmail(), Email(not_empty=True)) | |
638 | return _PasswordResetForm |
|
652 | return _PasswordResetForm | |
639 |
|
653 | |||
640 |
|
654 | |||
641 | def RepoForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
655 | def RepoForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), | |
642 | repo_groups=[]): |
|
656 | repo_groups=[]): | |
643 | class _RepoForm(formencode.Schema): |
|
657 | class _RepoForm(formencode.Schema): | |
644 | allow_extra_fields = True |
|
658 | allow_extra_fields = True | |
645 | filter_extra_fields = False |
|
659 | filter_extra_fields = False | |
646 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
660 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), | |
647 | SlugifyName()) |
|
661 | SlugifyName()) | |
648 |
clone_uri = All(UnicodeString(strip=True, min=1, not_empty=False) |
|
662 | clone_uri = All(UnicodeString(strip=True, min=1, not_empty=False)) | |
649 | ValidCloneUri()()) |
|
|||
650 | repo_group = OneOf(repo_groups, hideList=True) |
|
663 | repo_group = OneOf(repo_groups, hideList=True) | |
651 | repo_type = OneOf(supported_backends) |
|
664 | repo_type = OneOf(supported_backends) | |
652 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
665 | description = UnicodeString(strip=True, min=1, not_empty=True) | |
653 | private = StringBoolean(if_missing=False) |
|
666 | private = StringBoolean(if_missing=False) | |
654 | enable_statistics = StringBoolean(if_missing=False) |
|
667 | enable_statistics = StringBoolean(if_missing=False) | |
655 | enable_downloads = StringBoolean(if_missing=False) |
|
668 | enable_downloads = StringBoolean(if_missing=False) | |
656 |
|
669 | |||
657 | if edit: |
|
670 | if edit: | |
658 | #this is repo owner |
|
671 | #this is repo owner | |
659 | user = All(UnicodeString(not_empty=True), ValidRepoUser) |
|
672 | user = All(UnicodeString(not_empty=True), ValidRepoUser) | |
660 |
|
673 | |||
661 |
chained_validators = [Valid |
|
674 | chained_validators = [ValidCloneUri()(), | |
|
675 | ValidRepoName(edit, old_data), | |||
|
676 | ValidPerms()] | |||
662 | return _RepoForm |
|
677 | return _RepoForm | |
663 |
|
678 | |||
664 |
|
679 | |||
665 | def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
680 | def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), | |
666 | repo_groups=[]): |
|
681 | repo_groups=[]): | |
667 | class _RepoForkForm(formencode.Schema): |
|
682 | class _RepoForkForm(formencode.Schema): | |
668 | allow_extra_fields = True |
|
683 | allow_extra_fields = True | |
669 | filter_extra_fields = False |
|
684 | filter_extra_fields = False | |
670 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
685 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), | |
671 | SlugifyName()) |
|
686 | SlugifyName()) | |
672 | repo_group = OneOf(repo_groups, hideList=True) |
|
687 | repo_group = OneOf(repo_groups, hideList=True) | |
673 | repo_type = All(ValidForkType(old_data), OneOf(supported_backends)) |
|
688 | repo_type = All(ValidForkType(old_data), OneOf(supported_backends)) | |
674 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
689 | description = UnicodeString(strip=True, min=1, not_empty=True) | |
675 | private = StringBoolean(if_missing=False) |
|
690 | private = StringBoolean(if_missing=False) | |
676 | copy_permissions = StringBoolean(if_missing=False) |
|
691 | copy_permissions = StringBoolean(if_missing=False) | |
677 | update_after_clone = StringBoolean(if_missing=False) |
|
692 | update_after_clone = StringBoolean(if_missing=False) | |
678 | fork_parent_id = UnicodeString() |
|
693 | fork_parent_id = UnicodeString() | |
679 | chained_validators = [ValidForkName(edit, old_data)] |
|
694 | chained_validators = [ValidForkName(edit, old_data)] | |
680 |
|
695 | |||
681 | return _RepoForkForm |
|
696 | return _RepoForkForm | |
682 |
|
697 | |||
683 |
|
698 | |||
684 | def RepoSettingsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
699 | def RepoSettingsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), | |
685 | repo_groups=[]): |
|
700 | repo_groups=[]): | |
686 | class _RepoForm(formencode.Schema): |
|
701 | class _RepoForm(formencode.Schema): | |
687 | allow_extra_fields = True |
|
702 | allow_extra_fields = True | |
688 | filter_extra_fields = False |
|
703 | filter_extra_fields = False | |
689 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
704 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), | |
690 | SlugifyName()) |
|
705 | SlugifyName()) | |
691 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
706 | description = UnicodeString(strip=True, min=1, not_empty=True) | |
692 | repo_group = OneOf(repo_groups, hideList=True) |
|
707 | repo_group = OneOf(repo_groups, hideList=True) | |
693 | private = StringBoolean(if_missing=False) |
|
708 | private = StringBoolean(if_missing=False) | |
694 |
|
709 | |||
695 | chained_validators = [ValidRepoName(edit, old_data), ValidPerms(), |
|
710 | chained_validators = [ValidRepoName(edit, old_data), ValidPerms(), | |
696 | ValidSettings] |
|
711 | ValidSettings] | |
697 | return _RepoForm |
|
712 | return _RepoForm | |
698 |
|
713 | |||
699 |
|
714 | |||
700 | def ApplicationSettingsForm(): |
|
715 | def ApplicationSettingsForm(): | |
701 | class _ApplicationSettingsForm(formencode.Schema): |
|
716 | class _ApplicationSettingsForm(formencode.Schema): | |
702 | allow_extra_fields = True |
|
717 | allow_extra_fields = True | |
703 | filter_extra_fields = False |
|
718 | filter_extra_fields = False | |
704 | rhodecode_title = UnicodeString(strip=True, min=1, not_empty=True) |
|
719 | rhodecode_title = UnicodeString(strip=True, min=1, not_empty=True) | |
705 | rhodecode_realm = UnicodeString(strip=True, min=1, not_empty=True) |
|
720 | rhodecode_realm = UnicodeString(strip=True, min=1, not_empty=True) | |
706 | rhodecode_ga_code = UnicodeString(strip=True, min=1, not_empty=False) |
|
721 | rhodecode_ga_code = UnicodeString(strip=True, min=1, not_empty=False) | |
707 |
|
722 | |||
708 | return _ApplicationSettingsForm |
|
723 | return _ApplicationSettingsForm | |
709 |
|
724 | |||
710 |
|
725 | |||
711 | def ApplicationUiSettingsForm(): |
|
726 | def ApplicationUiSettingsForm(): | |
712 | class _ApplicationUiSettingsForm(formencode.Schema): |
|
727 | class _ApplicationUiSettingsForm(formencode.Schema): | |
713 | allow_extra_fields = True |
|
728 | allow_extra_fields = True | |
714 | filter_extra_fields = False |
|
729 | filter_extra_fields = False | |
715 | web_push_ssl = OneOf(['true', 'false'], if_missing='false') |
|
730 | web_push_ssl = OneOf(['true', 'false'], if_missing='false') | |
716 | paths_root_path = All(ValidPath(), UnicodeString(strip=True, min=1, not_empty=True)) |
|
731 | paths_root_path = All(ValidPath(), UnicodeString(strip=True, min=1, not_empty=True)) | |
717 | hooks_changegroup_update = OneOf(['True', 'False'], if_missing=False) |
|
732 | hooks_changegroup_update = OneOf(['True', 'False'], if_missing=False) | |
718 | hooks_changegroup_repo_size = OneOf(['True', 'False'], if_missing=False) |
|
733 | hooks_changegroup_repo_size = OneOf(['True', 'False'], if_missing=False) | |
719 | hooks_pretxnchangegroup_push_logger = OneOf(['True', 'False'], if_missing=False) |
|
734 | hooks_pretxnchangegroup_push_logger = OneOf(['True', 'False'], if_missing=False) | |
720 | hooks_preoutgoing_pull_logger = OneOf(['True', 'False'], if_missing=False) |
|
735 | hooks_preoutgoing_pull_logger = OneOf(['True', 'False'], if_missing=False) | |
721 |
|
736 | |||
722 | return _ApplicationUiSettingsForm |
|
737 | return _ApplicationUiSettingsForm | |
723 |
|
738 | |||
724 |
|
739 | |||
725 | def DefaultPermissionsForm(perms_choices, register_choices, create_choices): |
|
740 | def DefaultPermissionsForm(perms_choices, register_choices, create_choices): | |
726 | class _DefaultPermissionsForm(formencode.Schema): |
|
741 | class _DefaultPermissionsForm(formencode.Schema): | |
727 | allow_extra_fields = True |
|
742 | allow_extra_fields = True | |
728 | filter_extra_fields = True |
|
743 | filter_extra_fields = True | |
729 | overwrite_default = StringBoolean(if_missing=False) |
|
744 | overwrite_default = StringBoolean(if_missing=False) | |
730 | anonymous = OneOf(['True', 'False'], if_missing=False) |
|
745 | anonymous = OneOf(['True', 'False'], if_missing=False) | |
731 | default_perm = OneOf(perms_choices) |
|
746 | default_perm = OneOf(perms_choices) | |
732 | default_register = OneOf(register_choices) |
|
747 | default_register = OneOf(register_choices) | |
733 | default_create = OneOf(create_choices) |
|
748 | default_create = OneOf(create_choices) | |
734 |
|
749 | |||
735 | return _DefaultPermissionsForm |
|
750 | return _DefaultPermissionsForm | |
736 |
|
751 | |||
737 |
|
752 | |||
738 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, tls_kind_choices): |
|
753 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, tls_kind_choices): | |
739 | class _LdapSettingsForm(formencode.Schema): |
|
754 | class _LdapSettingsForm(formencode.Schema): | |
740 | allow_extra_fields = True |
|
755 | allow_extra_fields = True | |
741 | filter_extra_fields = True |
|
756 | filter_extra_fields = True | |
742 | pre_validators = [LdapLibValidator] |
|
757 | pre_validators = [LdapLibValidator] | |
743 | ldap_active = StringBoolean(if_missing=False) |
|
758 | ldap_active = StringBoolean(if_missing=False) | |
744 | ldap_host = UnicodeString(strip=True,) |
|
759 | ldap_host = UnicodeString(strip=True,) | |
745 | ldap_port = Number(strip=True,) |
|
760 | ldap_port = Number(strip=True,) | |
746 | ldap_tls_kind = OneOf(tls_kind_choices) |
|
761 | ldap_tls_kind = OneOf(tls_kind_choices) | |
747 | ldap_tls_reqcert = OneOf(tls_reqcert_choices) |
|
762 | ldap_tls_reqcert = OneOf(tls_reqcert_choices) | |
748 | ldap_dn_user = UnicodeString(strip=True,) |
|
763 | ldap_dn_user = UnicodeString(strip=True,) | |
749 | ldap_dn_pass = UnicodeString(strip=True,) |
|
764 | ldap_dn_pass = UnicodeString(strip=True,) | |
750 | ldap_base_dn = UnicodeString(strip=True,) |
|
765 | ldap_base_dn = UnicodeString(strip=True,) | |
751 | ldap_filter = UnicodeString(strip=True,) |
|
766 | ldap_filter = UnicodeString(strip=True,) | |
752 | ldap_search_scope = OneOf(search_scope_choices) |
|
767 | ldap_search_scope = OneOf(search_scope_choices) | |
753 | ldap_attr_login = All(AttrLoginValidator, UnicodeString(strip=True,)) |
|
768 | ldap_attr_login = All(AttrLoginValidator, UnicodeString(strip=True,)) | |
754 | ldap_attr_firstname = UnicodeString(strip=True,) |
|
769 | ldap_attr_firstname = UnicodeString(strip=True,) | |
755 | ldap_attr_lastname = UnicodeString(strip=True,) |
|
770 | ldap_attr_lastname = UnicodeString(strip=True,) | |
756 | ldap_attr_email = UnicodeString(strip=True,) |
|
771 | ldap_attr_email = UnicodeString(strip=True,) | |
757 |
|
772 | |||
758 | return _LdapSettingsForm |
|
773 | return _LdapSettingsForm |
@@ -1,310 +1,310 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.model.user_group |
|
3 | rhodecode.model.user_group | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | users groups model for RhodeCode |
|
6 | users groups model for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Jan 25, 2011 |
|
8 | :created_on: Jan 25, 2011 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import shutil |
|
29 | import shutil | |
30 |
|
30 | |||
31 | from rhodecode.lib import LazyProperty |
|
31 | from rhodecode.lib import LazyProperty | |
32 |
|
32 | |||
33 | from rhodecode.model import BaseModel |
|
33 | from rhodecode.model import BaseModel | |
34 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ |
|
34 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ | |
35 | User, Permission, UsersGroupRepoGroupToPerm, UsersGroup |
|
35 | User, Permission, UsersGroupRepoGroupToPerm, UsersGroup | |
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | class ReposGroupModel(BaseModel): |
|
40 | class ReposGroupModel(BaseModel): | |
41 |
|
41 | |||
42 | def __get_user(self, user): |
|
42 | def __get_user(self, user): | |
43 | return self._get_instance(User, user, callback=User.get_by_username) |
|
43 | return self._get_instance(User, user, callback=User.get_by_username) | |
44 |
|
44 | |||
45 | def __get_users_group(self, users_group): |
|
45 | def __get_users_group(self, users_group): | |
46 | return self._get_instance(UsersGroup, users_group, |
|
46 | return self._get_instance(UsersGroup, users_group, | |
47 | callback=UsersGroup.get_by_group_name) |
|
47 | callback=UsersGroup.get_by_group_name) | |
48 |
|
48 | |||
49 | def __get_repos_group(self, repos_group): |
|
49 | def __get_repos_group(self, repos_group): | |
50 | return self._get_instance(RepoGroup, repos_group, |
|
50 | return self._get_instance(RepoGroup, repos_group, | |
51 | callback=RepoGroup.get_by_group_name) |
|
51 | callback=RepoGroup.get_by_group_name) | |
52 |
|
52 | |||
53 | def __get_perm(self, permission): |
|
53 | def __get_perm(self, permission): | |
54 | return self._get_instance(Permission, permission, |
|
54 | return self._get_instance(Permission, permission, | |
55 | callback=Permission.get_by_key) |
|
55 | callback=Permission.get_by_key) | |
56 |
|
56 | |||
57 | @LazyProperty |
|
57 | @LazyProperty | |
58 | def repos_path(self): |
|
58 | def repos_path(self): | |
59 | """ |
|
59 | """ | |
60 | Get's the repositories root path from database |
|
60 | Get's the repositories root path from database | |
61 | """ |
|
61 | """ | |
62 |
|
62 | |||
63 | q = RhodeCodeUi.get_by_key('/').one() |
|
63 | q = RhodeCodeUi.get_by_key('/').one() | |
64 | return q.ui_value |
|
64 | return q.ui_value | |
65 |
|
65 | |||
66 | def _create_default_perms(self, new_group): |
|
66 | def _create_default_perms(self, new_group): | |
67 | # create default permission |
|
67 | # create default permission | |
68 | repo_group_to_perm = UserRepoGroupToPerm() |
|
68 | repo_group_to_perm = UserRepoGroupToPerm() | |
69 | default_perm = 'group.read' |
|
69 | default_perm = 'group.read' | |
70 | for p in User.get_by_username('default').user_perms: |
|
70 | for p in User.get_by_username('default').user_perms: | |
71 | if p.permission.permission_name.startswith('group.'): |
|
71 | if p.permission.permission_name.startswith('group.'): | |
72 | default_perm = p.permission.permission_name |
|
72 | default_perm = p.permission.permission_name | |
73 | break |
|
73 | break | |
74 |
|
74 | |||
75 | repo_group_to_perm.permission_id = self.sa.query(Permission)\ |
|
75 | repo_group_to_perm.permission_id = self.sa.query(Permission)\ | |
76 | .filter(Permission.permission_name == default_perm)\ |
|
76 | .filter(Permission.permission_name == default_perm)\ | |
77 | .one().permission_id |
|
77 | .one().permission_id | |
78 |
|
78 | |||
79 | repo_group_to_perm.group = new_group |
|
79 | repo_group_to_perm.group = new_group | |
80 | repo_group_to_perm.user_id = User.get_by_username('default').user_id |
|
80 | repo_group_to_perm.user_id = User.get_by_username('default').user_id | |
81 |
|
81 | |||
82 | self.sa.add(repo_group_to_perm) |
|
82 | self.sa.add(repo_group_to_perm) | |
83 |
|
83 | |||
84 | def __create_group(self, group_name): |
|
84 | def __create_group(self, group_name): | |
85 | """ |
|
85 | """ | |
86 | makes repositories group on filesystem |
|
86 | makes repositories group on filesystem | |
87 |
|
87 | |||
88 | :param repo_name: |
|
88 | :param repo_name: | |
89 | :param parent_id: |
|
89 | :param parent_id: | |
90 | """ |
|
90 | """ | |
91 |
|
91 | |||
92 | create_path = os.path.join(self.repos_path, group_name) |
|
92 | create_path = os.path.join(self.repos_path, group_name) | |
93 | log.debug('creating new group in %s' % create_path) |
|
93 | log.debug('creating new group in %s' % create_path) | |
94 |
|
94 | |||
95 | if os.path.isdir(create_path): |
|
95 | if os.path.isdir(create_path): | |
96 | raise Exception('That directory already exists !') |
|
96 | raise Exception('That directory already exists !') | |
97 |
|
97 | |||
98 | os.makedirs(create_path) |
|
98 | os.makedirs(create_path) | |
99 |
|
99 | |||
100 | def __rename_group(self, old, new): |
|
100 | def __rename_group(self, old, new): | |
101 | """ |
|
101 | """ | |
102 | Renames a group on filesystem |
|
102 | Renames a group on filesystem | |
103 |
|
103 | |||
104 | :param group_name: |
|
104 | :param group_name: | |
105 | """ |
|
105 | """ | |
106 |
|
106 | |||
107 | if old == new: |
|
107 | if old == new: | |
108 | log.debug('skipping group rename') |
|
108 | log.debug('skipping group rename') | |
109 | return |
|
109 | return | |
110 |
|
110 | |||
111 | log.debug('renaming repos group from %s to %s' % (old, new)) |
|
111 | log.debug('renaming repos group from %s to %s' % (old, new)) | |
112 |
|
112 | |||
113 | old_path = os.path.join(self.repos_path, old) |
|
113 | old_path = os.path.join(self.repos_path, old) | |
114 | new_path = os.path.join(self.repos_path, new) |
|
114 | new_path = os.path.join(self.repos_path, new) | |
115 |
|
115 | |||
116 | log.debug('renaming repos paths from %s to %s' % (old_path, new_path)) |
|
116 | log.debug('renaming repos paths from %s to %s' % (old_path, new_path)) | |
117 |
|
117 | |||
118 | if os.path.isdir(new_path): |
|
118 | if os.path.isdir(new_path): | |
119 | raise Exception('Was trying to rename to already ' |
|
119 | raise Exception('Was trying to rename to already ' | |
120 | 'existing dir %s' % new_path) |
|
120 | 'existing dir %s' % new_path) | |
121 | shutil.move(old_path, new_path) |
|
121 | shutil.move(old_path, new_path) | |
122 |
|
122 | |||
123 | def __delete_group(self, group): |
|
123 | def __delete_group(self, group): | |
124 | """ |
|
124 | """ | |
125 | Deletes a group from a filesystem |
|
125 | Deletes a group from a filesystem | |
126 |
|
126 | |||
127 | :param group: instance of group from database |
|
127 | :param group: instance of group from database | |
128 | """ |
|
128 | """ | |
129 | paths = group.full_path.split(RepoGroup.url_sep()) |
|
129 | paths = group.full_path.split(RepoGroup.url_sep()) | |
130 | paths = os.sep.join(paths) |
|
130 | paths = os.sep.join(paths) | |
131 |
|
131 | |||
132 | rm_path = os.path.join(self.repos_path, paths) |
|
132 | rm_path = os.path.join(self.repos_path, paths) | |
133 | if os.path.isdir(rm_path): |
|
133 | if os.path.isdir(rm_path): | |
134 | # delete only if that path really exists |
|
134 | # delete only if that path really exists | |
135 | os.rmdir(rm_path) |
|
135 | os.rmdir(rm_path) | |
136 |
|
136 | |||
137 | def create(self, group_name, group_description, parent, just_db=False): |
|
137 | def create(self, group_name, group_description, parent, just_db=False): | |
138 | try: |
|
138 | try: | |
139 | new_repos_group = RepoGroup() |
|
139 | new_repos_group = RepoGroup() | |
140 | new_repos_group.group_description = group_description |
|
140 | new_repos_group.group_description = group_description | |
141 | new_repos_group.parent_group = self.__get_repos_group(parent) |
|
141 | new_repos_group.parent_group = self.__get_repos_group(parent) | |
142 | new_repos_group.group_name = new_repos_group.get_new_name(group_name) |
|
142 | new_repos_group.group_name = new_repos_group.get_new_name(group_name) | |
143 |
|
143 | |||
144 | self.sa.add(new_repos_group) |
|
144 | self.sa.add(new_repos_group) | |
145 | self._create_default_perms(new_repos_group) |
|
145 | self._create_default_perms(new_repos_group) | |
146 |
|
146 | |||
147 | if not just_db: |
|
147 | if not just_db: | |
148 | # we need to flush here, in order to check if database won't |
|
148 | # we need to flush here, in order to check if database won't | |
149 | # throw any exceptions, create filesystem dirs at the very end |
|
149 | # throw any exceptions, create filesystem dirs at the very end | |
150 | self.sa.flush() |
|
150 | self.sa.flush() | |
151 | self.__create_group(new_repos_group.group_name) |
|
151 | self.__create_group(new_repos_group.group_name) | |
152 |
|
152 | |||
153 | return new_repos_group |
|
153 | return new_repos_group | |
154 | except: |
|
154 | except: | |
155 | log.error(traceback.format_exc()) |
|
155 | log.error(traceback.format_exc()) | |
156 | raise |
|
156 | raise | |
157 |
|
157 | |||
158 | def update(self, repos_group_id, form_data): |
|
158 | def update(self, repos_group_id, form_data): | |
159 |
|
159 | |||
160 | try: |
|
160 | try: | |
161 | repos_group = RepoGroup.get(repos_group_id) |
|
161 | repos_group = RepoGroup.get(repos_group_id) | |
162 |
|
162 | |||
163 | # update permissions |
|
163 | # update permissions | |
164 | for member, perm, member_type in form_data['perms_updates']: |
|
164 | for member, perm, member_type in form_data['perms_updates']: | |
165 | if member_type == 'user': |
|
165 | if member_type == 'user': | |
166 | # this updates also current one if found |
|
166 | # this updates also current one if found | |
167 | ReposGroupModel().grant_user_permission( |
|
167 | ReposGroupModel().grant_user_permission( | |
168 | repos_group=repos_group, user=member, perm=perm |
|
168 | repos_group=repos_group, user=member, perm=perm | |
169 | ) |
|
169 | ) | |
170 | else: |
|
170 | else: | |
171 | ReposGroupModel().grant_users_group_permission( |
|
171 | ReposGroupModel().grant_users_group_permission( | |
172 | repos_group=repos_group, group_name=member, perm=perm |
|
172 | repos_group=repos_group, group_name=member, perm=perm | |
173 | ) |
|
173 | ) | |
174 | # set new permissions |
|
174 | # set new permissions | |
175 | for member, perm, member_type in form_data['perms_new']: |
|
175 | for member, perm, member_type in form_data['perms_new']: | |
176 | if member_type == 'user': |
|
176 | if member_type == 'user': | |
177 | ReposGroupModel().grant_user_permission( |
|
177 | ReposGroupModel().grant_user_permission( | |
178 | repos_group=repos_group, user=member, perm=perm |
|
178 | repos_group=repos_group, user=member, perm=perm | |
179 | ) |
|
179 | ) | |
180 | else: |
|
180 | else: | |
181 | ReposGroupModel().grant_users_group_permission( |
|
181 | ReposGroupModel().grant_users_group_permission( | |
182 | repos_group=repos_group, group_name=member, perm=perm |
|
182 | repos_group=repos_group, group_name=member, perm=perm | |
183 | ) |
|
183 | ) | |
184 |
|
184 | |||
185 | old_path = repos_group.full_path |
|
185 | old_path = repos_group.full_path | |
186 |
|
186 | |||
187 | # change properties |
|
187 | # change properties | |
188 | repos_group.group_description = form_data['group_description'] |
|
188 | repos_group.group_description = form_data['group_description'] | |
189 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) |
|
189 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) | |
|
190 | repos_group.group_parent_id = form_data['group_parent_id'] | |||
190 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) |
|
191 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) | |
191 |
|
||||
192 | new_path = repos_group.full_path |
|
192 | new_path = repos_group.full_path | |
193 |
|
193 | |||
194 | self.sa.add(repos_group) |
|
194 | self.sa.add(repos_group) | |
195 |
|
195 | |||
196 | self.__rename_group(old_path, new_path) |
|
|||
197 |
|
||||
198 | # we need to get all repositories from this new group and |
|
196 | # we need to get all repositories from this new group and | |
199 | # rename them accordingly to new group path |
|
197 | # rename them accordingly to new group path | |
200 | for r in repos_group.repositories: |
|
198 | for r in repos_group.repositories: | |
201 | r.repo_name = r.get_new_name(r.just_name) |
|
199 | r.repo_name = r.get_new_name(r.just_name) | |
202 | self.sa.add(r) |
|
200 | self.sa.add(r) | |
203 |
|
201 | |||
|
202 | self.__rename_group(old_path, new_path) | |||
|
203 | ||||
204 | return repos_group |
|
204 | return repos_group | |
205 | except: |
|
205 | except: | |
206 | log.error(traceback.format_exc()) |
|
206 | log.error(traceback.format_exc()) | |
207 | raise |
|
207 | raise | |
208 |
|
208 | |||
209 | def delete(self, users_group_id): |
|
209 | def delete(self, users_group_id): | |
210 | try: |
|
210 | try: | |
211 | users_group = RepoGroup.get(users_group_id) |
|
211 | users_group = RepoGroup.get(users_group_id) | |
212 | self.sa.delete(users_group) |
|
212 | self.sa.delete(users_group) | |
213 | self.__delete_group(users_group) |
|
213 | self.__delete_group(users_group) | |
214 | except: |
|
214 | except: | |
215 | log.error(traceback.format_exc()) |
|
215 | log.error(traceback.format_exc()) | |
216 | raise |
|
216 | raise | |
217 |
|
217 | |||
218 | def grant_user_permission(self, repos_group, user, perm): |
|
218 | def grant_user_permission(self, repos_group, user, perm): | |
219 | """ |
|
219 | """ | |
220 | Grant permission for user on given repositories group, or update |
|
220 | Grant permission for user on given repositories group, or update | |
221 | existing one if found |
|
221 | existing one if found | |
222 |
|
222 | |||
223 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
223 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
224 | or repositories_group name |
|
224 | or repositories_group name | |
225 | :param user: Instance of User, user_id or username |
|
225 | :param user: Instance of User, user_id or username | |
226 | :param perm: Instance of Permission, or permission_name |
|
226 | :param perm: Instance of Permission, or permission_name | |
227 | """ |
|
227 | """ | |
228 |
|
228 | |||
229 | repos_group = self.__get_repos_group(repos_group) |
|
229 | repos_group = self.__get_repos_group(repos_group) | |
230 | user = self.__get_user(user) |
|
230 | user = self.__get_user(user) | |
231 | permission = self.__get_perm(perm) |
|
231 | permission = self.__get_perm(perm) | |
232 |
|
232 | |||
233 | # check if we have that permission already |
|
233 | # check if we have that permission already | |
234 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
234 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
235 | .filter(UserRepoGroupToPerm.user == user)\ |
|
235 | .filter(UserRepoGroupToPerm.user == user)\ | |
236 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
236 | .filter(UserRepoGroupToPerm.group == repos_group)\ | |
237 | .scalar() |
|
237 | .scalar() | |
238 | if obj is None: |
|
238 | if obj is None: | |
239 | # create new ! |
|
239 | # create new ! | |
240 | obj = UserRepoGroupToPerm() |
|
240 | obj = UserRepoGroupToPerm() | |
241 | obj.group = repos_group |
|
241 | obj.group = repos_group | |
242 | obj.user = user |
|
242 | obj.user = user | |
243 | obj.permission = permission |
|
243 | obj.permission = permission | |
244 | self.sa.add(obj) |
|
244 | self.sa.add(obj) | |
245 |
|
245 | |||
246 | def revoke_user_permission(self, repos_group, user): |
|
246 | def revoke_user_permission(self, repos_group, user): | |
247 | """ |
|
247 | """ | |
248 | Revoke permission for user on given repositories group |
|
248 | Revoke permission for user on given repositories group | |
249 |
|
249 | |||
250 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
250 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
251 | or repositories_group name |
|
251 | or repositories_group name | |
252 | :param user: Instance of User, user_id or username |
|
252 | :param user: Instance of User, user_id or username | |
253 | """ |
|
253 | """ | |
254 |
|
254 | |||
255 | repos_group = self.__get_repos_group(repos_group) |
|
255 | repos_group = self.__get_repos_group(repos_group) | |
256 | user = self.__get_user(user) |
|
256 | user = self.__get_user(user) | |
257 |
|
257 | |||
258 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
258 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
259 | .filter(UserRepoGroupToPerm.user == user)\ |
|
259 | .filter(UserRepoGroupToPerm.user == user)\ | |
260 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
260 | .filter(UserRepoGroupToPerm.group == repos_group)\ | |
261 | .one() |
|
261 | .one() | |
262 | self.sa.delete(obj) |
|
262 | self.sa.delete(obj) | |
263 |
|
263 | |||
264 | def grant_users_group_permission(self, repos_group, group_name, perm): |
|
264 | def grant_users_group_permission(self, repos_group, group_name, perm): | |
265 | """ |
|
265 | """ | |
266 | Grant permission for users group on given repositories group, or update |
|
266 | Grant permission for users group on given repositories group, or update | |
267 | existing one if found |
|
267 | existing one if found | |
268 |
|
268 | |||
269 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
269 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
270 | or repositories_group name |
|
270 | or repositories_group name | |
271 | :param group_name: Instance of UserGroup, users_group_id, |
|
271 | :param group_name: Instance of UserGroup, users_group_id, | |
272 | or users group name |
|
272 | or users group name | |
273 | :param perm: Instance of Permission, or permission_name |
|
273 | :param perm: Instance of Permission, or permission_name | |
274 | """ |
|
274 | """ | |
275 | repos_group = self.__get_repos_group(repos_group) |
|
275 | repos_group = self.__get_repos_group(repos_group) | |
276 | group_name = self.__get_users_group(group_name) |
|
276 | group_name = self.__get_users_group(group_name) | |
277 | permission = self.__get_perm(perm) |
|
277 | permission = self.__get_perm(perm) | |
278 |
|
278 | |||
279 | # check if we have that permission already |
|
279 | # check if we have that permission already | |
280 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
280 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ | |
281 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
281 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ | |
282 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
282 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ | |
283 | .scalar() |
|
283 | .scalar() | |
284 |
|
284 | |||
285 | if obj is None: |
|
285 | if obj is None: | |
286 | # create new |
|
286 | # create new | |
287 | obj = UsersGroupRepoGroupToPerm() |
|
287 | obj = UsersGroupRepoGroupToPerm() | |
288 |
|
288 | |||
289 | obj.group = repos_group |
|
289 | obj.group = repos_group | |
290 | obj.users_group = group_name |
|
290 | obj.users_group = group_name | |
291 | obj.permission = permission |
|
291 | obj.permission = permission | |
292 | self.sa.add(obj) |
|
292 | self.sa.add(obj) | |
293 |
|
293 | |||
294 | def revoke_users_group_permission(self, repos_group, group_name): |
|
294 | def revoke_users_group_permission(self, repos_group, group_name): | |
295 | """ |
|
295 | """ | |
296 | Revoke permission for users group on given repositories group |
|
296 | Revoke permission for users group on given repositories group | |
297 |
|
297 | |||
298 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
298 | :param repos_group: Instance of ReposGroup, repositories_group_id, | |
299 | or repositories_group name |
|
299 | or repositories_group name | |
300 | :param group_name: Instance of UserGroup, users_group_id, |
|
300 | :param group_name: Instance of UserGroup, users_group_id, | |
301 | or users group name |
|
301 | or users group name | |
302 | """ |
|
302 | """ | |
303 | repos_group = self.__get_repos_group(repos_group) |
|
303 | repos_group = self.__get_repos_group(repos_group) | |
304 | group_name = self.__get_users_group(group_name) |
|
304 | group_name = self.__get_users_group(group_name) | |
305 |
|
305 | |||
306 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
306 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ | |
307 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
307 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ | |
308 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
308 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ | |
309 | .one() |
|
309 | .one() | |
310 | self.sa.delete(obj) |
|
310 | self.sa.delete(obj) |
@@ -1,87 +1,134 b'' | |||||
1 | from rhodecode.tests import * |
|
1 | from rhodecode.tests import * | |
2 | from rhodecode.model.db import UsersGroup |
|
2 | from rhodecode.model.db import UsersGroup, UsersGroupToPerm, Permission | |
3 |
|
3 | |||
4 | TEST_USERS_GROUP = 'admins_test' |
|
4 | TEST_USERS_GROUP = 'admins_test' | |
5 |
|
5 | |||
|
6 | ||||
6 | class TestAdminUsersGroupsController(TestController): |
|
7 | class TestAdminUsersGroupsController(TestController): | |
7 |
|
8 | |||
8 | def test_index(self): |
|
9 | def test_index(self): | |
9 | response = self.app.get(url('users_groups')) |
|
10 | response = self.app.get(url('users_groups')) | |
10 | # Test response... |
|
11 | # Test response... | |
11 |
|
12 | |||
12 | def test_index_as_xml(self): |
|
13 | def test_index_as_xml(self): | |
13 | response = self.app.get(url('formatted_users_groups', format='xml')) |
|
14 | response = self.app.get(url('formatted_users_groups', format='xml')) | |
14 |
|
15 | |||
15 | def test_create(self): |
|
16 | def test_create(self): | |
16 | self.log_user() |
|
17 | self.log_user() | |
17 | users_group_name = TEST_USERS_GROUP |
|
18 | users_group_name = TEST_USERS_GROUP | |
18 | response = self.app.post(url('users_groups'), |
|
19 | response = self.app.post(url('users_groups'), | |
19 | {'users_group_name':users_group_name, |
|
20 | {'users_group_name': users_group_name, | |
20 | 'active':True}) |
|
21 | 'active':True}) | |
21 | response.follow() |
|
22 | response.follow() | |
22 |
|
23 | |||
23 | self.checkSessionFlash(response, |
|
24 | self.checkSessionFlash(response, | |
24 | 'created users group %s' % TEST_USERS_GROUP) |
|
25 | 'created users group %s' % TEST_USERS_GROUP) | |
25 |
|
26 | |||
26 | def test_new(self): |
|
27 | def test_new(self): | |
27 | response = self.app.get(url('new_users_group')) |
|
28 | response = self.app.get(url('new_users_group')) | |
28 |
|
29 | |||
29 | def test_new_as_xml(self): |
|
30 | def test_new_as_xml(self): | |
30 | response = self.app.get(url('formatted_new_users_group', format='xml')) |
|
31 | response = self.app.get(url('formatted_new_users_group', format='xml')) | |
31 |
|
32 | |||
32 | def test_update(self): |
|
33 | def test_update(self): | |
33 | response = self.app.put(url('users_group', id=1)) |
|
34 | response = self.app.put(url('users_group', id=1)) | |
34 |
|
35 | |||
35 | def test_update_browser_fakeout(self): |
|
36 | def test_update_browser_fakeout(self): | |
36 | response = self.app.post(url('users_group', id=1), |
|
37 | response = self.app.post(url('users_group', id=1), | |
37 | params=dict(_method='put')) |
|
38 | params=dict(_method='put')) | |
38 |
|
39 | |||
39 | def test_delete(self): |
|
40 | def test_delete(self): | |
40 | self.log_user() |
|
41 | self.log_user() | |
41 | users_group_name = TEST_USERS_GROUP + 'another' |
|
42 | users_group_name = TEST_USERS_GROUP + 'another' | |
42 | response = self.app.post(url('users_groups'), |
|
43 | response = self.app.post(url('users_groups'), | |
43 | {'users_group_name':users_group_name, |
|
44 | {'users_group_name':users_group_name, | |
44 | 'active':True}) |
|
45 | 'active':True}) | |
45 | response.follow() |
|
46 | response.follow() | |
46 |
|
47 | |||
47 | self.checkSessionFlash(response, |
|
48 | self.checkSessionFlash(response, | |
48 | 'created users group %s' % users_group_name) |
|
49 | 'created users group %s' % users_group_name) | |
49 |
|
50 | |||
50 |
|
||||
51 | gr = self.Session.query(UsersGroup)\ |
|
51 | gr = self.Session.query(UsersGroup)\ | |
52 | .filter(UsersGroup.users_group_name == |
|
52 | .filter(UsersGroup.users_group_name == | |
53 | users_group_name).one() |
|
53 | users_group_name).one() | |
54 |
|
54 | |||
55 | response = self.app.delete(url('users_group', id=gr.users_group_id)) |
|
55 | response = self.app.delete(url('users_group', id=gr.users_group_id)) | |
56 |
|
56 | |||
57 | gr = self.Session.query(UsersGroup)\ |
|
57 | gr = self.Session.query(UsersGroup)\ | |
58 | .filter(UsersGroup.users_group_name == |
|
58 | .filter(UsersGroup.users_group_name == | |
59 | users_group_name).scalar() |
|
59 | users_group_name).scalar() | |
60 |
|
60 | |||
61 | self.assertEqual(gr, None) |
|
61 | self.assertEqual(gr, None) | |
62 |
|
62 | |||
|
63 | def test_enable_repository_read_on_group(self): | |||
|
64 | self.log_user() | |||
|
65 | users_group_name = TEST_USERS_GROUP + 'another2' | |||
|
66 | response = self.app.post(url('users_groups'), | |||
|
67 | {'users_group_name': users_group_name, | |||
|
68 | 'active':True}) | |||
|
69 | response.follow() | |||
|
70 | ||||
|
71 | ug = UsersGroup.get_by_group_name(users_group_name) | |||
|
72 | self.checkSessionFlash(response, | |||
|
73 | 'created users group %s' % users_group_name) | |||
|
74 | ||||
|
75 | response = self.app.put(url('users_group_perm', id=ug.users_group_id), | |||
|
76 | {'create_repo_perm': True}) | |||
|
77 | ||||
|
78 | response.follow() | |||
|
79 | ug = UsersGroup.get_by_group_name(users_group_name) | |||
|
80 | p = Permission.get_by_key('hg.create.repository') | |||
|
81 | # check if user has this perm | |||
|
82 | perms = UsersGroupToPerm.query()\ | |||
|
83 | .filter(UsersGroupToPerm.users_group == ug).all() | |||
|
84 | perms = [[x.__dict__['users_group_id'], | |||
|
85 | x.__dict__['permission_id'],] for x in perms] | |||
|
86 | self.assertEqual( | |||
|
87 | perms, | |||
|
88 | [[ug.users_group_id, p.permission_id]] | |||
|
89 | ) | |||
|
90 | ||||
|
91 | # DELETE ! | |||
|
92 | ug = UsersGroup.get_by_group_name(users_group_name) | |||
|
93 | ugid = ug.users_group_id | |||
|
94 | response = self.app.delete(url('users_group', id=ug.users_group_id)) | |||
|
95 | response = response.follow() | |||
|
96 | gr = self.Session.query(UsersGroup)\ | |||
|
97 | .filter(UsersGroup.users_group_name == | |||
|
98 | users_group_name).scalar() | |||
|
99 | ||||
|
100 | self.assertEqual(gr, None) | |||
|
101 | p = Permission.get_by_key('hg.create.repository') | |||
|
102 | perms = UsersGroupToPerm.query()\ | |||
|
103 | .filter(UsersGroupToPerm.users_group_id == ugid).all() | |||
|
104 | perms = [[x.__dict__['users_group_id'], | |||
|
105 | x.__dict__['permission_id'],] for x in perms] | |||
|
106 | self.assertEqual( | |||
|
107 | perms, | |||
|
108 | [] | |||
|
109 | ) | |||
63 |
|
110 | |||
64 | def test_delete_browser_fakeout(self): |
|
111 | def test_delete_browser_fakeout(self): | |
65 | response = self.app.post(url('users_group', id=1), |
|
112 | response = self.app.post(url('users_group', id=1), | |
66 | params=dict(_method='delete')) |
|
113 | params=dict(_method='delete')) | |
67 |
|
114 | |||
68 | def test_show(self): |
|
115 | def test_show(self): | |
69 | response = self.app.get(url('users_group', id=1)) |
|
116 | response = self.app.get(url('users_group', id=1)) | |
70 |
|
117 | |||
71 | def test_show_as_xml(self): |
|
118 | def test_show_as_xml(self): | |
72 | response = self.app.get(url('formatted_users_group', id=1, format='xml')) |
|
119 | response = self.app.get(url('formatted_users_group', id=1, format='xml')) | |
73 |
|
120 | |||
74 | def test_edit(self): |
|
121 | def test_edit(self): | |
75 | response = self.app.get(url('edit_users_group', id=1)) |
|
122 | response = self.app.get(url('edit_users_group', id=1)) | |
76 |
|
123 | |||
77 | def test_edit_as_xml(self): |
|
124 | def test_edit_as_xml(self): | |
78 | response = self.app.get(url('formatted_edit_users_group', id=1, format='xml')) |
|
125 | response = self.app.get(url('formatted_edit_users_group', id=1, format='xml')) | |
79 |
|
126 | |||
80 | def test_assign_members(self): |
|
127 | def test_assign_members(self): | |
81 | pass |
|
128 | pass | |
82 |
|
129 | |||
83 | def test_add_create_permission(self): |
|
130 | def test_add_create_permission(self): | |
84 | pass |
|
131 | pass | |
85 |
|
132 | |||
86 | def test_revoke_members(self): |
|
133 | def test_revoke_members(self): | |
87 | pass |
|
134 | pass |
@@ -1,555 +1,580 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import unittest |
|
2 | import unittest | |
3 | from rhodecode.tests import * |
|
3 | from rhodecode.tests import * | |
4 |
|
4 | |||
5 | from rhodecode.model.repos_group import ReposGroupModel |
|
5 | from rhodecode.model.repos_group import ReposGroupModel | |
6 | from rhodecode.model.repo import RepoModel |
|
6 | from rhodecode.model.repo import RepoModel | |
7 | from rhodecode.model.db import RepoGroup, User, Notification, UserNotification, \ |
|
7 | from rhodecode.model.db import RepoGroup, User, Notification, UserNotification, \ | |
8 | UsersGroup, UsersGroupMember, Permission |
|
8 | UsersGroup, UsersGroupMember, Permission | |
9 | from sqlalchemy.exc import IntegrityError |
|
9 | from sqlalchemy.exc import IntegrityError | |
10 | from rhodecode.model.user import UserModel |
|
10 | from rhodecode.model.user import UserModel | |
11 |
|
11 | |||
12 | from rhodecode.model.meta import Session |
|
12 | from rhodecode.model.meta import Session | |
13 | from rhodecode.model.notification import NotificationModel |
|
13 | from rhodecode.model.notification import NotificationModel | |
14 | from rhodecode.model.users_group import UsersGroupModel |
|
14 | from rhodecode.model.users_group import UsersGroupModel | |
15 | from rhodecode.lib.auth import AuthUser |
|
15 | from rhodecode.lib.auth import AuthUser | |
16 |
|
16 | |||
17 |
|
17 | |||
18 | def _make_group(path, desc='desc', parent_id=None, |
|
18 | def _make_group(path, desc='desc', parent_id=None, | |
19 | skip_if_exists=False): |
|
19 | skip_if_exists=False): | |
20 |
|
20 | |||
21 | gr = RepoGroup.get_by_group_name(path) |
|
21 | gr = RepoGroup.get_by_group_name(path) | |
22 | if gr and skip_if_exists: |
|
22 | if gr and skip_if_exists: | |
23 | return gr |
|
23 | return gr | |
24 |
|
24 | |||
25 | gr = ReposGroupModel().create(path, desc, parent_id) |
|
25 | gr = ReposGroupModel().create(path, desc, parent_id) | |
26 | Session.commit() |
|
|||
27 | return gr |
|
26 | return gr | |
28 |
|
27 | |||
29 |
|
28 | |||
30 | class TestReposGroups(unittest.TestCase): |
|
29 | class TestReposGroups(unittest.TestCase): | |
31 |
|
30 | |||
32 | def setUp(self): |
|
31 | def setUp(self): | |
33 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
32 | self.g1 = _make_group('test1', skip_if_exists=True) | |
|
33 | Session.commit() | |||
34 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
34 | self.g2 = _make_group('test2', skip_if_exists=True) | |
|
35 | Session.commit() | |||
35 | self.g3 = _make_group('test3', skip_if_exists=True) |
|
36 | self.g3 = _make_group('test3', skip_if_exists=True) | |
|
37 | Session.commit() | |||
36 |
|
38 | |||
37 | def tearDown(self): |
|
39 | def tearDown(self): | |
38 | print 'out' |
|
40 | print 'out' | |
39 |
|
41 | |||
40 | def __check_path(self, *path): |
|
42 | def __check_path(self, *path): | |
|
43 | """ | |||
|
44 | Checks the path for existance ! | |||
|
45 | """ | |||
41 | path = [TESTS_TMP_PATH] + list(path) |
|
46 | path = [TESTS_TMP_PATH] + list(path) | |
42 | path = os.path.join(*path) |
|
47 | path = os.path.join(*path) | |
43 | return os.path.isdir(path) |
|
48 | return os.path.isdir(path) | |
44 |
|
49 | |||
45 | def _check_folders(self): |
|
50 | def _check_folders(self): | |
46 | print os.listdir(TESTS_TMP_PATH) |
|
51 | print os.listdir(TESTS_TMP_PATH) | |
47 |
|
52 | |||
48 | def __delete_group(self, id_): |
|
53 | def __delete_group(self, id_): | |
49 | ReposGroupModel().delete(id_) |
|
54 | ReposGroupModel().delete(id_) | |
50 |
|
55 | |||
51 | def __update_group(self, id_, path, desc='desc', parent_id=None): |
|
56 | def __update_group(self, id_, path, desc='desc', parent_id=None): | |
52 |
form_data = dict( |
|
57 | form_data = dict( | |
53 | group_description=desc, |
|
58 | group_name=path, | |
54 | group_parent_id=parent_id, |
|
59 | group_description=desc, | |
55 | perms_updates=[], |
|
60 | group_parent_id=parent_id, | |
56 |
|
|
61 | perms_updates=[], | |
57 |
|
62 | perms_new=[] | ||
|
63 | ) | |||
58 | gr = ReposGroupModel().update(id_, form_data) |
|
64 | gr = ReposGroupModel().update(id_, form_data) | |
59 | return gr |
|
65 | return gr | |
60 |
|
66 | |||
61 | def test_create_group(self): |
|
67 | def test_create_group(self): | |
62 | g = _make_group('newGroup') |
|
68 | g = _make_group('newGroup') | |
63 | self.assertEqual(g.full_path, 'newGroup') |
|
69 | self.assertEqual(g.full_path, 'newGroup') | |
64 |
|
70 | |||
65 | self.assertTrue(self.__check_path('newGroup')) |
|
71 | self.assertTrue(self.__check_path('newGroup')) | |
66 |
|
72 | |||
67 | def test_create_same_name_group(self): |
|
73 | def test_create_same_name_group(self): | |
68 | self.assertRaises(IntegrityError, lambda:_make_group('newGroup')) |
|
74 | self.assertRaises(IntegrityError, lambda:_make_group('newGroup')) | |
69 | Session.rollback() |
|
75 | Session.rollback() | |
70 |
|
76 | |||
71 | def test_same_subgroup(self): |
|
77 | def test_same_subgroup(self): | |
72 | sg1 = _make_group('sub1', parent_id=self.g1.group_id) |
|
78 | sg1 = _make_group('sub1', parent_id=self.g1.group_id) | |
73 | self.assertEqual(sg1.parent_group, self.g1) |
|
79 | self.assertEqual(sg1.parent_group, self.g1) | |
74 | self.assertEqual(sg1.full_path, 'test1/sub1') |
|
80 | self.assertEqual(sg1.full_path, 'test1/sub1') | |
75 | self.assertTrue(self.__check_path('test1', 'sub1')) |
|
81 | self.assertTrue(self.__check_path('test1', 'sub1')) | |
76 |
|
82 | |||
77 | ssg1 = _make_group('subsub1', parent_id=sg1.group_id) |
|
83 | ssg1 = _make_group('subsub1', parent_id=sg1.group_id) | |
78 | self.assertEqual(ssg1.parent_group, sg1) |
|
84 | self.assertEqual(ssg1.parent_group, sg1) | |
79 | self.assertEqual(ssg1.full_path, 'test1/sub1/subsub1') |
|
85 | self.assertEqual(ssg1.full_path, 'test1/sub1/subsub1') | |
80 | self.assertTrue(self.__check_path('test1', 'sub1', 'subsub1')) |
|
86 | self.assertTrue(self.__check_path('test1', 'sub1', 'subsub1')) | |
81 |
|
87 | |||
82 | def test_remove_group(self): |
|
88 | def test_remove_group(self): | |
83 | sg1 = _make_group('deleteme') |
|
89 | sg1 = _make_group('deleteme') | |
84 | self.__delete_group(sg1.group_id) |
|
90 | self.__delete_group(sg1.group_id) | |
85 |
|
91 | |||
86 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
92 | self.assertEqual(RepoGroup.get(sg1.group_id), None) | |
87 | self.assertFalse(self.__check_path('deteteme')) |
|
93 | self.assertFalse(self.__check_path('deteteme')) | |
88 |
|
94 | |||
89 | sg1 = _make_group('deleteme', parent_id=self.g1.group_id) |
|
95 | sg1 = _make_group('deleteme', parent_id=self.g1.group_id) | |
90 | self.__delete_group(sg1.group_id) |
|
96 | self.__delete_group(sg1.group_id) | |
91 |
|
97 | |||
92 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
98 | self.assertEqual(RepoGroup.get(sg1.group_id), None) | |
93 | self.assertFalse(self.__check_path('test1', 'deteteme')) |
|
99 | self.assertFalse(self.__check_path('test1', 'deteteme')) | |
94 |
|
100 | |||
95 | def test_rename_single_group(self): |
|
101 | def test_rename_single_group(self): | |
96 | sg1 = _make_group('initial') |
|
102 | sg1 = _make_group('initial') | |
97 |
|
103 | |||
98 | new_sg1 = self.__update_group(sg1.group_id, 'after') |
|
104 | new_sg1 = self.__update_group(sg1.group_id, 'after') | |
99 | self.assertTrue(self.__check_path('after')) |
|
105 | self.assertTrue(self.__check_path('after')) | |
100 | self.assertEqual(RepoGroup.get_by_group_name('initial'), None) |
|
106 | self.assertEqual(RepoGroup.get_by_group_name('initial'), None) | |
101 |
|
107 | |||
102 | def test_update_group_parent(self): |
|
108 | def test_update_group_parent(self): | |
103 |
|
109 | |||
104 | sg1 = _make_group('initial', parent_id=self.g1.group_id) |
|
110 | sg1 = _make_group('initial', parent_id=self.g1.group_id) | |
105 |
|
111 | |||
106 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g1.group_id) |
|
112 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g1.group_id) | |
107 | self.assertTrue(self.__check_path('test1', 'after')) |
|
113 | self.assertTrue(self.__check_path('test1', 'after')) | |
108 | self.assertEqual(RepoGroup.get_by_group_name('test1/initial'), None) |
|
114 | self.assertEqual(RepoGroup.get_by_group_name('test1/initial'), None) | |
109 |
|
115 | |||
110 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g3.group_id) |
|
116 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g3.group_id) | |
111 | self.assertTrue(self.__check_path('test3', 'after')) |
|
117 | self.assertTrue(self.__check_path('test3', 'after')) | |
112 | self.assertEqual(RepoGroup.get_by_group_name('test3/initial'), None) |
|
118 | self.assertEqual(RepoGroup.get_by_group_name('test3/initial'), None) | |
113 |
|
119 | |||
114 | new_sg1 = self.__update_group(sg1.group_id, 'hello') |
|
120 | new_sg1 = self.__update_group(sg1.group_id, 'hello') | |
115 | self.assertTrue(self.__check_path('hello')) |
|
121 | self.assertTrue(self.__check_path('hello')) | |
116 |
|
122 | |||
117 | self.assertEqual(RepoGroup.get_by_group_name('hello'), new_sg1) |
|
123 | self.assertEqual(RepoGroup.get_by_group_name('hello'), new_sg1) | |
118 |
|
124 | |||
119 | def test_subgrouping_with_repo(self): |
|
125 | def test_subgrouping_with_repo(self): | |
120 |
|
126 | |||
121 | g1 = _make_group('g1') |
|
127 | g1 = _make_group('g1') | |
122 | g2 = _make_group('g2') |
|
128 | g2 = _make_group('g2') | |
123 |
|
129 | |||
124 | # create new repo |
|
130 | # create new repo | |
125 | form_data = dict(repo_name='john', |
|
131 | form_data = dict(repo_name='john', | |
126 | repo_name_full='john', |
|
132 | repo_name_full='john', | |
127 | fork_name=None, |
|
133 | fork_name=None, | |
128 | description=None, |
|
134 | description=None, | |
129 | repo_group=None, |
|
135 | repo_group=None, | |
130 | private=False, |
|
136 | private=False, | |
131 | repo_type='hg', |
|
137 | repo_type='hg', | |
132 | clone_uri=None) |
|
138 | clone_uri=None) | |
133 | cur_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
139 | cur_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) | |
134 | r = RepoModel().create(form_data, cur_user) |
|
140 | r = RepoModel().create(form_data, cur_user) | |
135 |
|
141 | |||
136 | self.assertEqual(r.repo_name, 'john') |
|
142 | self.assertEqual(r.repo_name, 'john') | |
137 |
|
143 | |||
138 | # put repo into group |
|
144 | # put repo into group | |
139 | form_data = form_data |
|
145 | form_data = form_data | |
140 | form_data['repo_group'] = g1.group_id |
|
146 | form_data['repo_group'] = g1.group_id | |
141 | form_data['perms_new'] = [] |
|
147 | form_data['perms_new'] = [] | |
142 | form_data['perms_updates'] = [] |
|
148 | form_data['perms_updates'] = [] | |
143 | RepoModel().update(r.repo_name, form_data) |
|
149 | RepoModel().update(r.repo_name, form_data) | |
144 | self.assertEqual(r.repo_name, 'g1/john') |
|
150 | self.assertEqual(r.repo_name, 'g1/john') | |
145 |
|
151 | |||
146 | self.__update_group(g1.group_id, 'g1', parent_id=g2.group_id) |
|
152 | self.__update_group(g1.group_id, 'g1', parent_id=g2.group_id) | |
147 | self.assertTrue(self.__check_path('g2', 'g1')) |
|
153 | self.assertTrue(self.__check_path('g2', 'g1')) | |
148 |
|
154 | |||
149 | # test repo |
|
155 | # test repo | |
150 | self.assertEqual(r.repo_name, os.path.join('g2', 'g1', r.just_name)) |
|
156 | self.assertEqual(r.repo_name, os.path.join('g2', 'g1', r.just_name)) | |
151 |
|
157 | |||
152 |
|
158 | |||
|
159 | def test_move_to_root(self): | |||
|
160 | g1 = _make_group('t11') | |||
|
161 | Session.commit() | |||
|
162 | g2 = _make_group('t22',parent_id=g1.group_id) | |||
|
163 | Session.commit() | |||
|
164 | ||||
|
165 | self.assertEqual(g2.full_path,'t11/t22') | |||
|
166 | self.assertTrue(self.__check_path('t11', 't22')) | |||
|
167 | ||||
|
168 | g2 = self.__update_group(g2.group_id, 'g22', parent_id=None) | |||
|
169 | Session.commit() | |||
|
170 | ||||
|
171 | self.assertEqual(g2.group_name,'g22') | |||
|
172 | # we moved out group from t1 to '' so it's full path should be 'g2' | |||
|
173 | self.assertEqual(g2.full_path,'g22') | |||
|
174 | self.assertFalse(self.__check_path('t11', 't22')) | |||
|
175 | self.assertTrue(self.__check_path('g22')) | |||
|
176 | ||||
|
177 | ||||
153 | class TestUser(unittest.TestCase): |
|
178 | class TestUser(unittest.TestCase): | |
154 | def __init__(self, methodName='runTest'): |
|
179 | def __init__(self, methodName='runTest'): | |
155 | Session.remove() |
|
180 | Session.remove() | |
156 | super(TestUser, self).__init__(methodName=methodName) |
|
181 | super(TestUser, self).__init__(methodName=methodName) | |
157 |
|
182 | |||
158 | def test_create_and_remove(self): |
|
183 | def test_create_and_remove(self): | |
159 | usr = UserModel().create_or_update(username=u'test_user', password=u'qweqwe', |
|
184 | usr = UserModel().create_or_update(username=u'test_user', password=u'qweqwe', | |
160 | email=u'u232@rhodecode.org', |
|
185 | email=u'u232@rhodecode.org', | |
161 | name=u'u1', lastname=u'u1') |
|
186 | name=u'u1', lastname=u'u1') | |
162 | Session.commit() |
|
187 | Session.commit() | |
163 | self.assertEqual(User.get_by_username(u'test_user'), usr) |
|
188 | self.assertEqual(User.get_by_username(u'test_user'), usr) | |
164 |
|
189 | |||
165 | # make users group |
|
190 | # make users group | |
166 | users_group = UsersGroupModel().create('some_example_group') |
|
191 | users_group = UsersGroupModel().create('some_example_group') | |
167 | Session.commit() |
|
192 | Session.commit() | |
168 |
|
193 | |||
169 | UsersGroupModel().add_user_to_group(users_group, usr) |
|
194 | UsersGroupModel().add_user_to_group(users_group, usr) | |
170 | Session.commit() |
|
195 | Session.commit() | |
171 |
|
196 | |||
172 | self.assertEqual(UsersGroup.get(users_group.users_group_id), users_group) |
|
197 | self.assertEqual(UsersGroup.get(users_group.users_group_id), users_group) | |
173 | self.assertEqual(UsersGroupMember.query().count(), 1) |
|
198 | self.assertEqual(UsersGroupMember.query().count(), 1) | |
174 | UserModel().delete(usr.user_id) |
|
199 | UserModel().delete(usr.user_id) | |
175 | Session.commit() |
|
200 | Session.commit() | |
176 |
|
201 | |||
177 | self.assertEqual(UsersGroupMember.query().all(), []) |
|
202 | self.assertEqual(UsersGroupMember.query().all(), []) | |
178 |
|
203 | |||
179 |
|
204 | |||
180 | class TestNotifications(unittest.TestCase): |
|
205 | class TestNotifications(unittest.TestCase): | |
181 |
|
206 | |||
182 | def __init__(self, methodName='runTest'): |
|
207 | def __init__(self, methodName='runTest'): | |
183 | Session.remove() |
|
208 | Session.remove() | |
184 | self.u1 = UserModel().create_or_update(username=u'u1', |
|
209 | self.u1 = UserModel().create_or_update(username=u'u1', | |
185 | password=u'qweqwe', |
|
210 | password=u'qweqwe', | |
186 | email=u'u1@rhodecode.org', |
|
211 | email=u'u1@rhodecode.org', | |
187 | name=u'u1', lastname=u'u1') |
|
212 | name=u'u1', lastname=u'u1') | |
188 | Session.commit() |
|
213 | Session.commit() | |
189 | self.u1 = self.u1.user_id |
|
214 | self.u1 = self.u1.user_id | |
190 |
|
215 | |||
191 | self.u2 = UserModel().create_or_update(username=u'u2', |
|
216 | self.u2 = UserModel().create_or_update(username=u'u2', | |
192 | password=u'qweqwe', |
|
217 | password=u'qweqwe', | |
193 | email=u'u2@rhodecode.org', |
|
218 | email=u'u2@rhodecode.org', | |
194 | name=u'u2', lastname=u'u3') |
|
219 | name=u'u2', lastname=u'u3') | |
195 | Session.commit() |
|
220 | Session.commit() | |
196 | self.u2 = self.u2.user_id |
|
221 | self.u2 = self.u2.user_id | |
197 |
|
222 | |||
198 | self.u3 = UserModel().create_or_update(username=u'u3', |
|
223 | self.u3 = UserModel().create_or_update(username=u'u3', | |
199 | password=u'qweqwe', |
|
224 | password=u'qweqwe', | |
200 | email=u'u3@rhodecode.org', |
|
225 | email=u'u3@rhodecode.org', | |
201 | name=u'u3', lastname=u'u3') |
|
226 | name=u'u3', lastname=u'u3') | |
202 | Session.commit() |
|
227 | Session.commit() | |
203 | self.u3 = self.u3.user_id |
|
228 | self.u3 = self.u3.user_id | |
204 |
|
229 | |||
205 | super(TestNotifications, self).__init__(methodName=methodName) |
|
230 | super(TestNotifications, self).__init__(methodName=methodName) | |
206 |
|
231 | |||
207 | def _clean_notifications(self): |
|
232 | def _clean_notifications(self): | |
208 | for n in Notification.query().all(): |
|
233 | for n in Notification.query().all(): | |
209 | Session.delete(n) |
|
234 | Session.delete(n) | |
210 |
|
235 | |||
211 | Session.commit() |
|
236 | Session.commit() | |
212 | self.assertEqual(Notification.query().all(), []) |
|
237 | self.assertEqual(Notification.query().all(), []) | |
213 |
|
238 | |||
214 | def tearDown(self): |
|
239 | def tearDown(self): | |
215 | self._clean_notifications() |
|
240 | self._clean_notifications() | |
216 |
|
241 | |||
217 | def test_create_notification(self): |
|
242 | def test_create_notification(self): | |
218 | self.assertEqual([], Notification.query().all()) |
|
243 | self.assertEqual([], Notification.query().all()) | |
219 | self.assertEqual([], UserNotification.query().all()) |
|
244 | self.assertEqual([], UserNotification.query().all()) | |
220 |
|
245 | |||
221 | usrs = [self.u1, self.u2] |
|
246 | usrs = [self.u1, self.u2] | |
222 | notification = NotificationModel().create(created_by=self.u1, |
|
247 | notification = NotificationModel().create(created_by=self.u1, | |
223 | subject=u'subj', body=u'hi there', |
|
248 | subject=u'subj', body=u'hi there', | |
224 | recipients=usrs) |
|
249 | recipients=usrs) | |
225 | Session.commit() |
|
250 | Session.commit() | |
226 | u1 = User.get(self.u1) |
|
251 | u1 = User.get(self.u1) | |
227 | u2 = User.get(self.u2) |
|
252 | u2 = User.get(self.u2) | |
228 | u3 = User.get(self.u3) |
|
253 | u3 = User.get(self.u3) | |
229 | notifications = Notification.query().all() |
|
254 | notifications = Notification.query().all() | |
230 | self.assertEqual(len(notifications), 1) |
|
255 | self.assertEqual(len(notifications), 1) | |
231 |
|
256 | |||
232 | unotification = UserNotification.query()\ |
|
257 | unotification = UserNotification.query()\ | |
233 | .filter(UserNotification.notification == notification).all() |
|
258 | .filter(UserNotification.notification == notification).all() | |
234 |
|
259 | |||
235 | self.assertEqual(notifications[0].recipients, [u1, u2]) |
|
260 | self.assertEqual(notifications[0].recipients, [u1, u2]) | |
236 | self.assertEqual(notification.notification_id, |
|
261 | self.assertEqual(notification.notification_id, | |
237 | notifications[0].notification_id) |
|
262 | notifications[0].notification_id) | |
238 | self.assertEqual(len(unotification), len(usrs)) |
|
263 | self.assertEqual(len(unotification), len(usrs)) | |
239 | self.assertEqual([x.user.user_id for x in unotification], usrs) |
|
264 | self.assertEqual([x.user.user_id for x in unotification], usrs) | |
240 |
|
265 | |||
241 | def test_user_notifications(self): |
|
266 | def test_user_notifications(self): | |
242 | self.assertEqual([], Notification.query().all()) |
|
267 | self.assertEqual([], Notification.query().all()) | |
243 | self.assertEqual([], UserNotification.query().all()) |
|
268 | self.assertEqual([], UserNotification.query().all()) | |
244 |
|
269 | |||
245 | notification1 = NotificationModel().create(created_by=self.u1, |
|
270 | notification1 = NotificationModel().create(created_by=self.u1, | |
246 | subject=u'subj', body=u'hi there1', |
|
271 | subject=u'subj', body=u'hi there1', | |
247 | recipients=[self.u3]) |
|
272 | recipients=[self.u3]) | |
248 | Session.commit() |
|
273 | Session.commit() | |
249 | notification2 = NotificationModel().create(created_by=self.u1, |
|
274 | notification2 = NotificationModel().create(created_by=self.u1, | |
250 | subject=u'subj', body=u'hi there2', |
|
275 | subject=u'subj', body=u'hi there2', | |
251 | recipients=[self.u3]) |
|
276 | recipients=[self.u3]) | |
252 | Session.commit() |
|
277 | Session.commit() | |
253 | u3 = Session.query(User).get(self.u3) |
|
278 | u3 = Session.query(User).get(self.u3) | |
254 |
|
279 | |||
255 | self.assertEqual(sorted([x.notification for x in u3.notifications]), |
|
280 | self.assertEqual(sorted([x.notification for x in u3.notifications]), | |
256 | sorted([notification2, notification1])) |
|
281 | sorted([notification2, notification1])) | |
257 |
|
282 | |||
258 | def test_delete_notifications(self): |
|
283 | def test_delete_notifications(self): | |
259 | self.assertEqual([], Notification.query().all()) |
|
284 | self.assertEqual([], Notification.query().all()) | |
260 | self.assertEqual([], UserNotification.query().all()) |
|
285 | self.assertEqual([], UserNotification.query().all()) | |
261 |
|
286 | |||
262 | notification = NotificationModel().create(created_by=self.u1, |
|
287 | notification = NotificationModel().create(created_by=self.u1, | |
263 | subject=u'title', body=u'hi there3', |
|
288 | subject=u'title', body=u'hi there3', | |
264 | recipients=[self.u3, self.u1, self.u2]) |
|
289 | recipients=[self.u3, self.u1, self.u2]) | |
265 | Session.commit() |
|
290 | Session.commit() | |
266 | notifications = Notification.query().all() |
|
291 | notifications = Notification.query().all() | |
267 | self.assertTrue(notification in notifications) |
|
292 | self.assertTrue(notification in notifications) | |
268 |
|
293 | |||
269 | Notification.delete(notification.notification_id) |
|
294 | Notification.delete(notification.notification_id) | |
270 | Session.commit() |
|
295 | Session.commit() | |
271 |
|
296 | |||
272 | notifications = Notification.query().all() |
|
297 | notifications = Notification.query().all() | |
273 | self.assertFalse(notification in notifications) |
|
298 | self.assertFalse(notification in notifications) | |
274 |
|
299 | |||
275 | un = UserNotification.query().filter(UserNotification.notification |
|
300 | un = UserNotification.query().filter(UserNotification.notification | |
276 | == notification).all() |
|
301 | == notification).all() | |
277 | self.assertEqual(un, []) |
|
302 | self.assertEqual(un, []) | |
278 |
|
303 | |||
279 | def test_delete_association(self): |
|
304 | def test_delete_association(self): | |
280 |
|
305 | |||
281 | self.assertEqual([], Notification.query().all()) |
|
306 | self.assertEqual([], Notification.query().all()) | |
282 | self.assertEqual([], UserNotification.query().all()) |
|
307 | self.assertEqual([], UserNotification.query().all()) | |
283 |
|
308 | |||
284 | notification = NotificationModel().create(created_by=self.u1, |
|
309 | notification = NotificationModel().create(created_by=self.u1, | |
285 | subject=u'title', body=u'hi there3', |
|
310 | subject=u'title', body=u'hi there3', | |
286 | recipients=[self.u3, self.u1, self.u2]) |
|
311 | recipients=[self.u3, self.u1, self.u2]) | |
287 | Session.commit() |
|
312 | Session.commit() | |
288 |
|
313 | |||
289 | unotification = UserNotification.query()\ |
|
314 | unotification = UserNotification.query()\ | |
290 | .filter(UserNotification.notification == |
|
315 | .filter(UserNotification.notification == | |
291 | notification)\ |
|
316 | notification)\ | |
292 | .filter(UserNotification.user_id == self.u3)\ |
|
317 | .filter(UserNotification.user_id == self.u3)\ | |
293 | .scalar() |
|
318 | .scalar() | |
294 |
|
319 | |||
295 | self.assertEqual(unotification.user_id, self.u3) |
|
320 | self.assertEqual(unotification.user_id, self.u3) | |
296 |
|
321 | |||
297 | NotificationModel().delete(self.u3, |
|
322 | NotificationModel().delete(self.u3, | |
298 | notification.notification_id) |
|
323 | notification.notification_id) | |
299 | Session.commit() |
|
324 | Session.commit() | |
300 |
|
325 | |||
301 | u3notification = UserNotification.query()\ |
|
326 | u3notification = UserNotification.query()\ | |
302 | .filter(UserNotification.notification == |
|
327 | .filter(UserNotification.notification == | |
303 | notification)\ |
|
328 | notification)\ | |
304 | .filter(UserNotification.user_id == self.u3)\ |
|
329 | .filter(UserNotification.user_id == self.u3)\ | |
305 | .scalar() |
|
330 | .scalar() | |
306 |
|
331 | |||
307 | self.assertEqual(u3notification, None) |
|
332 | self.assertEqual(u3notification, None) | |
308 |
|
333 | |||
309 | # notification object is still there |
|
334 | # notification object is still there | |
310 | self.assertEqual(Notification.query().all(), [notification]) |
|
335 | self.assertEqual(Notification.query().all(), [notification]) | |
311 |
|
336 | |||
312 | #u1 and u2 still have assignments |
|
337 | #u1 and u2 still have assignments | |
313 | u1notification = UserNotification.query()\ |
|
338 | u1notification = UserNotification.query()\ | |
314 | .filter(UserNotification.notification == |
|
339 | .filter(UserNotification.notification == | |
315 | notification)\ |
|
340 | notification)\ | |
316 | .filter(UserNotification.user_id == self.u1)\ |
|
341 | .filter(UserNotification.user_id == self.u1)\ | |
317 | .scalar() |
|
342 | .scalar() | |
318 | self.assertNotEqual(u1notification, None) |
|
343 | self.assertNotEqual(u1notification, None) | |
319 | u2notification = UserNotification.query()\ |
|
344 | u2notification = UserNotification.query()\ | |
320 | .filter(UserNotification.notification == |
|
345 | .filter(UserNotification.notification == | |
321 | notification)\ |
|
346 | notification)\ | |
322 | .filter(UserNotification.user_id == self.u2)\ |
|
347 | .filter(UserNotification.user_id == self.u2)\ | |
323 | .scalar() |
|
348 | .scalar() | |
324 | self.assertNotEqual(u2notification, None) |
|
349 | self.assertNotEqual(u2notification, None) | |
325 |
|
350 | |||
326 | def test_notification_counter(self): |
|
351 | def test_notification_counter(self): | |
327 | self._clean_notifications() |
|
352 | self._clean_notifications() | |
328 | self.assertEqual([], Notification.query().all()) |
|
353 | self.assertEqual([], Notification.query().all()) | |
329 | self.assertEqual([], UserNotification.query().all()) |
|
354 | self.assertEqual([], UserNotification.query().all()) | |
330 |
|
355 | |||
331 | NotificationModel().create(created_by=self.u1, |
|
356 | NotificationModel().create(created_by=self.u1, | |
332 | subject=u'title', body=u'hi there_delete', |
|
357 | subject=u'title', body=u'hi there_delete', | |
333 | recipients=[self.u3, self.u1]) |
|
358 | recipients=[self.u3, self.u1]) | |
334 | Session.commit() |
|
359 | Session.commit() | |
335 |
|
360 | |||
336 | self.assertEqual(NotificationModel() |
|
361 | self.assertEqual(NotificationModel() | |
337 | .get_unread_cnt_for_user(self.u1), 1) |
|
362 | .get_unread_cnt_for_user(self.u1), 1) | |
338 | self.assertEqual(NotificationModel() |
|
363 | self.assertEqual(NotificationModel() | |
339 | .get_unread_cnt_for_user(self.u2), 0) |
|
364 | .get_unread_cnt_for_user(self.u2), 0) | |
340 | self.assertEqual(NotificationModel() |
|
365 | self.assertEqual(NotificationModel() | |
341 | .get_unread_cnt_for_user(self.u3), 1) |
|
366 | .get_unread_cnt_for_user(self.u3), 1) | |
342 |
|
367 | |||
343 | notification = NotificationModel().create(created_by=self.u1, |
|
368 | notification = NotificationModel().create(created_by=self.u1, | |
344 | subject=u'title', body=u'hi there3', |
|
369 | subject=u'title', body=u'hi there3', | |
345 | recipients=[self.u3, self.u1, self.u2]) |
|
370 | recipients=[self.u3, self.u1, self.u2]) | |
346 | Session.commit() |
|
371 | Session.commit() | |
347 |
|
372 | |||
348 | self.assertEqual(NotificationModel() |
|
373 | self.assertEqual(NotificationModel() | |
349 | .get_unread_cnt_for_user(self.u1), 2) |
|
374 | .get_unread_cnt_for_user(self.u1), 2) | |
350 | self.assertEqual(NotificationModel() |
|
375 | self.assertEqual(NotificationModel() | |
351 | .get_unread_cnt_for_user(self.u2), 1) |
|
376 | .get_unread_cnt_for_user(self.u2), 1) | |
352 | self.assertEqual(NotificationModel() |
|
377 | self.assertEqual(NotificationModel() | |
353 | .get_unread_cnt_for_user(self.u3), 2) |
|
378 | .get_unread_cnt_for_user(self.u3), 2) | |
354 |
|
379 | |||
355 |
|
380 | |||
356 | class TestUsers(unittest.TestCase): |
|
381 | class TestUsers(unittest.TestCase): | |
357 |
|
382 | |||
358 | def __init__(self, methodName='runTest'): |
|
383 | def __init__(self, methodName='runTest'): | |
359 | super(TestUsers, self).__init__(methodName=methodName) |
|
384 | super(TestUsers, self).__init__(methodName=methodName) | |
360 |
|
385 | |||
361 | def setUp(self): |
|
386 | def setUp(self): | |
362 | self.u1 = UserModel().create_or_update(username=u'u1', |
|
387 | self.u1 = UserModel().create_or_update(username=u'u1', | |
363 | password=u'qweqwe', |
|
388 | password=u'qweqwe', | |
364 | email=u'u1@rhodecode.org', |
|
389 | email=u'u1@rhodecode.org', | |
365 | name=u'u1', lastname=u'u1') |
|
390 | name=u'u1', lastname=u'u1') | |
366 |
|
391 | |||
367 | def tearDown(self): |
|
392 | def tearDown(self): | |
368 | perm = Permission.query().all() |
|
393 | perm = Permission.query().all() | |
369 | for p in perm: |
|
394 | for p in perm: | |
370 | UserModel().revoke_perm(self.u1, p) |
|
395 | UserModel().revoke_perm(self.u1, p) | |
371 |
|
396 | |||
372 | UserModel().delete(self.u1) |
|
397 | UserModel().delete(self.u1) | |
373 | Session.commit() |
|
398 | Session.commit() | |
374 |
|
399 | |||
375 | def test_add_perm(self): |
|
400 | def test_add_perm(self): | |
376 | perm = Permission.query().all()[0] |
|
401 | perm = Permission.query().all()[0] | |
377 | UserModel().grant_perm(self.u1, perm) |
|
402 | UserModel().grant_perm(self.u1, perm) | |
378 | Session.commit() |
|
403 | Session.commit() | |
379 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) |
|
404 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) | |
380 |
|
405 | |||
381 | def test_has_perm(self): |
|
406 | def test_has_perm(self): | |
382 | perm = Permission.query().all() |
|
407 | perm = Permission.query().all() | |
383 | for p in perm: |
|
408 | for p in perm: | |
384 | has_p = UserModel().has_perm(self.u1, p) |
|
409 | has_p = UserModel().has_perm(self.u1, p) | |
385 | self.assertEqual(False, has_p) |
|
410 | self.assertEqual(False, has_p) | |
386 |
|
411 | |||
387 | def test_revoke_perm(self): |
|
412 | def test_revoke_perm(self): | |
388 | perm = Permission.query().all()[0] |
|
413 | perm = Permission.query().all()[0] | |
389 | UserModel().grant_perm(self.u1, perm) |
|
414 | UserModel().grant_perm(self.u1, perm) | |
390 | Session.commit() |
|
415 | Session.commit() | |
391 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) |
|
416 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) | |
392 |
|
417 | |||
393 | #revoke |
|
418 | #revoke | |
394 | UserModel().revoke_perm(self.u1, perm) |
|
419 | UserModel().revoke_perm(self.u1, perm) | |
395 | Session.commit() |
|
420 | Session.commit() | |
396 | self.assertEqual(UserModel().has_perm(self.u1, perm), False) |
|
421 | self.assertEqual(UserModel().has_perm(self.u1, perm), False) | |
397 |
|
422 | |||
398 |
|
423 | |||
399 | class TestPermissions(unittest.TestCase): |
|
424 | class TestPermissions(unittest.TestCase): | |
400 | def __init__(self, methodName='runTest'): |
|
425 | def __init__(self, methodName='runTest'): | |
401 | super(TestPermissions, self).__init__(methodName=methodName) |
|
426 | super(TestPermissions, self).__init__(methodName=methodName) | |
402 |
|
427 | |||
403 | def setUp(self): |
|
428 | def setUp(self): | |
404 | self.u1 = UserModel().create_or_update( |
|
429 | self.u1 = UserModel().create_or_update( | |
405 | username=u'u1', password=u'qweqwe', |
|
430 | username=u'u1', password=u'qweqwe', | |
406 | email=u'u1@rhodecode.org', name=u'u1', lastname=u'u1' |
|
431 | email=u'u1@rhodecode.org', name=u'u1', lastname=u'u1' | |
407 | ) |
|
432 | ) | |
408 | self.a1 = UserModel().create_or_update( |
|
433 | self.a1 = UserModel().create_or_update( | |
409 | username=u'a1', password=u'qweqwe', |
|
434 | username=u'a1', password=u'qweqwe', | |
410 | email=u'a1@rhodecode.org', name=u'a1', lastname=u'a1', admin=True |
|
435 | email=u'a1@rhodecode.org', name=u'a1', lastname=u'a1', admin=True | |
411 | ) |
|
436 | ) | |
412 | Session.commit() |
|
437 | Session.commit() | |
413 |
|
438 | |||
414 | def tearDown(self): |
|
439 | def tearDown(self): | |
415 | UserModel().delete(self.u1) |
|
440 | UserModel().delete(self.u1) | |
416 | UserModel().delete(self.a1) |
|
441 | UserModel().delete(self.a1) | |
417 | if hasattr(self, 'g1'): |
|
442 | if hasattr(self, 'g1'): | |
418 | ReposGroupModel().delete(self.g1.group_id) |
|
443 | ReposGroupModel().delete(self.g1.group_id) | |
419 | if hasattr(self, 'g2'): |
|
444 | if hasattr(self, 'g2'): | |
420 | ReposGroupModel().delete(self.g2.group_id) |
|
445 | ReposGroupModel().delete(self.g2.group_id) | |
421 |
|
446 | |||
422 | if hasattr(self, 'ug1'): |
|
447 | if hasattr(self, 'ug1'): | |
423 | UsersGroupModel().delete(self.ug1, force=True) |
|
448 | UsersGroupModel().delete(self.ug1, force=True) | |
424 |
|
449 | |||
425 | Session.commit() |
|
450 | Session.commit() | |
426 |
|
451 | |||
427 | def test_default_perms_set(self): |
|
452 | def test_default_perms_set(self): | |
428 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
453 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
429 | perms = { |
|
454 | perms = { | |
430 | 'repositories_groups': {}, |
|
455 | 'repositories_groups': {}, | |
431 | 'global': set([u'hg.create.repository', u'repository.read', |
|
456 | 'global': set([u'hg.create.repository', u'repository.read', | |
432 | u'hg.register.manual_activate']), |
|
457 | u'hg.register.manual_activate']), | |
433 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
458 | 'repositories': {u'vcs_test_hg': u'repository.read'} | |
434 | } |
|
459 | } | |
435 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
460 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
436 | perms['repositories'][HG_REPO]) |
|
461 | perms['repositories'][HG_REPO]) | |
437 | new_perm = 'repository.write' |
|
462 | new_perm = 'repository.write' | |
438 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) |
|
463 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) | |
439 | Session.commit() |
|
464 | Session.commit() | |
440 |
|
465 | |||
441 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
466 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
442 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], new_perm) |
|
467 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], new_perm) | |
443 |
|
468 | |||
444 | def test_default_admin_perms_set(self): |
|
469 | def test_default_admin_perms_set(self): | |
445 | a1_auth = AuthUser(user_id=self.a1.user_id) |
|
470 | a1_auth = AuthUser(user_id=self.a1.user_id) | |
446 | perms = { |
|
471 | perms = { | |
447 | 'repositories_groups': {}, |
|
472 | 'repositories_groups': {}, | |
448 | 'global': set([u'hg.admin']), |
|
473 | 'global': set([u'hg.admin']), | |
449 | 'repositories': {u'vcs_test_hg': u'repository.admin'} |
|
474 | 'repositories': {u'vcs_test_hg': u'repository.admin'} | |
450 | } |
|
475 | } | |
451 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], |
|
476 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], | |
452 | perms['repositories'][HG_REPO]) |
|
477 | perms['repositories'][HG_REPO]) | |
453 | new_perm = 'repository.write' |
|
478 | new_perm = 'repository.write' | |
454 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.a1, perm=new_perm) |
|
479 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.a1, perm=new_perm) | |
455 | Session.commit() |
|
480 | Session.commit() | |
456 | # cannot really downgrade admins permissions !? they still get's set as |
|
481 | # cannot really downgrade admins permissions !? they still get's set as | |
457 | # admin ! |
|
482 | # admin ! | |
458 | u1_auth = AuthUser(user_id=self.a1.user_id) |
|
483 | u1_auth = AuthUser(user_id=self.a1.user_id) | |
459 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
484 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
460 | perms['repositories'][HG_REPO]) |
|
485 | perms['repositories'][HG_REPO]) | |
461 |
|
486 | |||
462 | def test_default_group_perms(self): |
|
487 | def test_default_group_perms(self): | |
463 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
488 | self.g1 = _make_group('test1', skip_if_exists=True) | |
464 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
489 | self.g2 = _make_group('test2', skip_if_exists=True) | |
465 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
490 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
466 | perms = { |
|
491 | perms = { | |
467 | 'repositories_groups': {u'test1': 'group.read', u'test2': 'group.read'}, |
|
492 | 'repositories_groups': {u'test1': 'group.read', u'test2': 'group.read'}, | |
468 | 'global': set([u'hg.create.repository', u'repository.read', u'hg.register.manual_activate']), |
|
493 | 'global': set([u'hg.create.repository', u'repository.read', u'hg.register.manual_activate']), | |
469 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
494 | 'repositories': {u'vcs_test_hg': u'repository.read'} | |
470 | } |
|
495 | } | |
471 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
496 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
472 | perms['repositories'][HG_REPO]) |
|
497 | perms['repositories'][HG_REPO]) | |
473 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
498 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
474 | perms['repositories_groups']) |
|
499 | perms['repositories_groups']) | |
475 |
|
500 | |||
476 | def test_default_admin_group_perms(self): |
|
501 | def test_default_admin_group_perms(self): | |
477 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
502 | self.g1 = _make_group('test1', skip_if_exists=True) | |
478 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
503 | self.g2 = _make_group('test2', skip_if_exists=True) | |
479 | a1_auth = AuthUser(user_id=self.a1.user_id) |
|
504 | a1_auth = AuthUser(user_id=self.a1.user_id) | |
480 | perms = { |
|
505 | perms = { | |
481 | 'repositories_groups': {u'test1': 'group.admin', u'test2': 'group.admin'}, |
|
506 | 'repositories_groups': {u'test1': 'group.admin', u'test2': 'group.admin'}, | |
482 | 'global': set(['hg.admin']), |
|
507 | 'global': set(['hg.admin']), | |
483 | 'repositories': {u'vcs_test_hg': 'repository.admin'} |
|
508 | 'repositories': {u'vcs_test_hg': 'repository.admin'} | |
484 | } |
|
509 | } | |
485 |
|
510 | |||
486 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], |
|
511 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], | |
487 | perms['repositories'][HG_REPO]) |
|
512 | perms['repositories'][HG_REPO]) | |
488 | self.assertEqual(a1_auth.permissions['repositories_groups'], |
|
513 | self.assertEqual(a1_auth.permissions['repositories_groups'], | |
489 | perms['repositories_groups']) |
|
514 | perms['repositories_groups']) | |
490 |
|
515 | |||
491 | def test_propagated_permission_from_users_group(self): |
|
516 | def test_propagated_permission_from_users_group(self): | |
492 | # make group |
|
517 | # make group | |
493 | self.ug1 = UsersGroupModel().create('G1') |
|
518 | self.ug1 = UsersGroupModel().create('G1') | |
494 | # add user to group |
|
519 | # add user to group | |
495 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) |
|
520 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) | |
496 |
|
521 | |||
497 | # set permission to lower |
|
522 | # set permission to lower | |
498 | new_perm = 'repository.none' |
|
523 | new_perm = 'repository.none' | |
499 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) |
|
524 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) | |
500 | Session.commit() |
|
525 | Session.commit() | |
501 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
526 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
502 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
527 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
503 | new_perm) |
|
528 | new_perm) | |
504 |
|
529 | |||
505 | # grant perm for group this should override permission from user |
|
530 | # grant perm for group this should override permission from user | |
506 | new_perm = 'repository.write' |
|
531 | new_perm = 'repository.write' | |
507 | RepoModel().grant_users_group_permission(repo=HG_REPO, |
|
532 | RepoModel().grant_users_group_permission(repo=HG_REPO, | |
508 | group_name=self.ug1, |
|
533 | group_name=self.ug1, | |
509 | perm=new_perm) |
|
534 | perm=new_perm) | |
510 | # check perms |
|
535 | # check perms | |
511 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
536 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
512 | perms = { |
|
537 | perms = { | |
513 | 'repositories_groups': {}, |
|
538 | 'repositories_groups': {}, | |
514 | 'global': set([u'hg.create.repository', u'repository.read', |
|
539 | 'global': set([u'hg.create.repository', u'repository.read', | |
515 | u'hg.register.manual_activate']), |
|
540 | u'hg.register.manual_activate']), | |
516 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
541 | 'repositories': {u'vcs_test_hg': u'repository.read'} | |
517 | } |
|
542 | } | |
518 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
543 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
519 | new_perm) |
|
544 | new_perm) | |
520 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
545 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
521 | perms['repositories_groups']) |
|
546 | perms['repositories_groups']) | |
522 |
|
547 | |||
523 | def test_propagated_permission_from_users_group_lower_weight(self): |
|
548 | def test_propagated_permission_from_users_group_lower_weight(self): | |
524 | # make group |
|
549 | # make group | |
525 | self.ug1 = UsersGroupModel().create('G1') |
|
550 | self.ug1 = UsersGroupModel().create('G1') | |
526 | # add user to group |
|
551 | # add user to group | |
527 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) |
|
552 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) | |
528 |
|
553 | |||
529 | # set permission to lower |
|
554 | # set permission to lower | |
530 | new_perm_h = 'repository.write' |
|
555 | new_perm_h = 'repository.write' | |
531 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, |
|
556 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, | |
532 | perm=new_perm_h) |
|
557 | perm=new_perm_h) | |
533 | Session.commit() |
|
558 | Session.commit() | |
534 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
559 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
535 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
560 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
536 | new_perm_h) |
|
561 | new_perm_h) | |
537 |
|
562 | |||
538 | # grant perm for group this should NOT override permission from user |
|
563 | # grant perm for group this should NOT override permission from user | |
539 | # since it's lower than granted |
|
564 | # since it's lower than granted | |
540 | new_perm_l = 'repository.read' |
|
565 | new_perm_l = 'repository.read' | |
541 | RepoModel().grant_users_group_permission(repo=HG_REPO, |
|
566 | RepoModel().grant_users_group_permission(repo=HG_REPO, | |
542 | group_name=self.ug1, |
|
567 | group_name=self.ug1, | |
543 | perm=new_perm_l) |
|
568 | perm=new_perm_l) | |
544 | # check perms |
|
569 | # check perms | |
545 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
570 | u1_auth = AuthUser(user_id=self.u1.user_id) | |
546 | perms = { |
|
571 | perms = { | |
547 | 'repositories_groups': {}, |
|
572 | 'repositories_groups': {}, | |
548 | 'global': set([u'hg.create.repository', u'repository.read', |
|
573 | 'global': set([u'hg.create.repository', u'repository.read', | |
549 | u'hg.register.manual_activate']), |
|
574 | u'hg.register.manual_activate']), | |
550 | 'repositories': {u'vcs_test_hg': u'repository.write'} |
|
575 | 'repositories': {u'vcs_test_hg': u'repository.write'} | |
551 | } |
|
576 | } | |
552 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
577 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], | |
553 | new_perm_h) |
|
578 | new_perm_h) | |
554 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
579 | self.assertEqual(u1_auth.permissions['repositories_groups'], | |
555 | perms['repositories_groups']) |
|
580 | perms['repositories_groups']) |
General Comments 0
You need to be logged in to leave comments.
Login now