Show More
@@ -1,18 +1,19 b'' | |||
|
1 | 1 | List of contributors to RhodeCode project: |
|
2 | 2 | Marcin Kuźmiński <marcin@python-works.com> |
|
3 | 3 | Lukasz Balcerzak <lukaszbalcerzak@gmail.com> |
|
4 | 4 | Jason Harris <jason@jasonfharris.com> |
|
5 | 5 | Thayne Harbaugh <thayne@fusionio.com> |
|
6 | 6 | cejones |
|
7 | 7 | Thomas Waldmann <tw-public@gmx.de> |
|
8 | 8 | Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it> |
|
9 | 9 | Dmitri Kuznetsov |
|
10 | 10 | Jared Bunting <jared.bunting@peachjean.com> |
|
11 | 11 | Steve Romanow <slestak989@gmail.com> |
|
12 | 12 | Augosto Hermann <augusto.herrmann@planejamento.gov.br> |
|
13 | 13 | Ankit Solanki <ankit.solanki@gmail.com> |
|
14 | 14 | Liad Shani <liadff@gmail.com> |
|
15 | 15 | Les Peabody <lpeabody@gmail.com> |
|
16 | 16 | Jonas Oberschweiber <jonas.oberschweiber@d-velop.de> |
|
17 | 17 | Matt Zuba <matt.zuba@goodwillaz.org> |
|
18 | Aras Pranckevicius <aras@unity3d.com> No newline at end of file | |
|
18 | Aras Pranckevicius <aras@unity3d.com> | |
|
19 | Tony Bussieres <t.bussieres@gmail.com> |
@@ -1,516 +1,535 b'' | |||
|
1 | 1 | .. _changelog: |
|
2 | 2 | |
|
3 | 3 | Changelog |
|
4 | 4 | ========= |
|
5 | 5 | |
|
6 | 6 | |
|
7 | 1.3.2 (**2012-02-28**) | |
|
8 | ---------------------- | |
|
9 | ||
|
10 | news | |
|
11 | ++++ | |
|
12 | ||
|
13 | ||
|
14 | fixes | |
|
15 | +++++ | |
|
16 | ||
|
17 | - fixed git protocol issues with repos-groups | |
|
18 | - fixed git remote repos validator that prevented from cloning remote git repos | |
|
19 | - fixes #370 ending slashes fixes for repo and groups | |
|
20 | - fixes #368 improved git-protocol detection to handle other clients | |
|
21 | - fixes #366 When Setting Repository Group To Blank Repo Group Wont Be | |
|
22 | Moved To Root | |
|
23 | - fixes #371 fixed issues with beaker/sqlalchemy and non-ascii cache keys | |
|
24 | - fixed #373 missing cascade drop on user_group_to_perm table | |
|
25 | ||
|
7 | 26 | 1.3.1 (**2012-02-27**) |
|
8 | 27 | ---------------------- |
|
9 | 28 | |
|
10 | 29 | news |
|
11 | 30 | ++++ |
|
12 | 31 | |
|
13 | 32 | |
|
14 | 33 | fixes |
|
15 | 34 | +++++ |
|
16 | 35 | |
|
17 | 36 | - redirection loop occurs when remember-me wasn't checked during login |
|
18 | 37 | - fixes issues with git blob history generation |
|
19 | 38 | - don't fetch branch for git in file history dropdown. Causes unneeded slowness |
|
20 | 39 | |
|
21 | 40 | 1.3.0 (**2012-02-26**) |
|
22 | 41 | ---------------------- |
|
23 | 42 | |
|
24 | 43 | news |
|
25 | 44 | ++++ |
|
26 | 45 | |
|
27 | 46 | - code review, inspired by github code-comments |
|
28 | 47 | - #215 rst and markdown README files support |
|
29 | 48 | - #252 Container-based and proxy pass-through authentication support |
|
30 | 49 | - #44 branch browser. Filtering of changelog by branches |
|
31 | 50 | - mercurial bookmarks support |
|
32 | 51 | - new hover top menu, optimized to add maximum size for important views |
|
33 | 52 | - configurable clone url template with possibility to specify protocol like |
|
34 | 53 | ssh:// or http:// and also manually alter other parts of clone_url. |
|
35 | 54 | - enabled largefiles extension by default |
|
36 | 55 | - optimized summary file pages and saved a lot of unused space in them |
|
37 | 56 | - #239 option to manually mark repository as fork |
|
38 | 57 | - #320 mapping of commit authors to RhodeCode users |
|
39 | 58 | - #304 hashes are displayed using monospace font |
|
40 | 59 | - diff configuration, toggle white lines and context lines |
|
41 | 60 | - #307 configurable diffs, whitespace toggle, increasing context lines |
|
42 | 61 | - sorting on branches, tags and bookmarks using YUI datatable |
|
43 | 62 | - improved file filter on files page |
|
44 | 63 | - implements #330 api method for listing nodes ar particular revision |
|
45 | 64 | - #73 added linking issues in commit messages to chosen issue tracker url |
|
46 | 65 | based on user defined regular expression |
|
47 | 66 | - added linking of changesets in commit messages |
|
48 | 67 | - new compact changelog with expandable commit messages |
|
49 | 68 | - firstname and lastname are optional in user creation |
|
50 | 69 | - #348 added post-create repository hook |
|
51 | 70 | - #212 global encoding settings is now configurable from .ini files |
|
52 | 71 | - #227 added repository groups permissions |
|
53 | 72 | - markdown gets codehilite extensions |
|
54 | 73 | - new API methods, delete_repositories, grante/revoke permissions for groups |
|
55 | 74 | and repos |
|
56 | 75 | |
|
57 | 76 | |
|
58 | 77 | fixes |
|
59 | 78 | +++++ |
|
60 | 79 | |
|
61 | 80 | - rewrote dbsession management for atomic operations, and better error handling |
|
62 | 81 | - fixed sorting of repo tables |
|
63 | 82 | - #326 escape of special html entities in diffs |
|
64 | 83 | - normalized user_name => username in api attributes |
|
65 | 84 | - fixes #298 ldap created users with mixed case emails created conflicts |
|
66 | 85 | on saving a form |
|
67 | 86 | - fixes issue when owner of a repo couldn't revoke permissions for users |
|
68 | 87 | and groups |
|
69 | 88 | - fixes #271 rare JSON serialization problem with statistics |
|
70 | 89 | - fixes #337 missing validation check for conflicting names of a group with a |
|
71 | 90 | repositories group |
|
72 | 91 | - #340 fixed session problem for mysql and celery tasks |
|
73 | 92 | - fixed #331 RhodeCode mangles repository names if the a repository group |
|
74 | 93 | contains the "full path" to the repositories |
|
75 | 94 | - #355 RhodeCode doesn't store encrypted LDAP passwords |
|
76 | 95 | |
|
77 | 96 | 1.2.5 (**2012-01-28**) |
|
78 | 97 | ---------------------- |
|
79 | 98 | |
|
80 | 99 | news |
|
81 | 100 | ++++ |
|
82 | 101 | |
|
83 | 102 | fixes |
|
84 | 103 | +++++ |
|
85 | 104 | |
|
86 | 105 | - #340 Celery complains about MySQL server gone away, added session cleanup |
|
87 | 106 | for celery tasks |
|
88 | 107 | - #341 "scanning for repositories in None" log message during Rescan was missing |
|
89 | 108 | a parameter |
|
90 | 109 | - fixed creating archives with subrepos. Some hooks were triggered during that |
|
91 | 110 | operation leading to crash. |
|
92 | 111 | - fixed missing email in account page. |
|
93 | 112 | - Reverted Mercurial to 2.0.1 for windows due to bug in Mercurial that makes |
|
94 | 113 | forking on windows impossible |
|
95 | 114 | |
|
96 | 115 | 1.2.4 (**2012-01-19**) |
|
97 | 116 | ---------------------- |
|
98 | 117 | |
|
99 | 118 | news |
|
100 | 119 | ++++ |
|
101 | 120 | |
|
102 | 121 | - RhodeCode is bundled with mercurial series 2.0.X by default, with |
|
103 | 122 | full support to largefiles extension. Enabled by default in new installations |
|
104 | 123 | - #329 Ability to Add/Remove Groups to/from a Repository via AP |
|
105 | 124 | - added requires.txt file with requirements |
|
106 | 125 | |
|
107 | 126 | fixes |
|
108 | 127 | +++++ |
|
109 | 128 | |
|
110 | 129 | - fixes db session issues with celery when emailing admins |
|
111 | 130 | - #331 RhodeCode mangles repository names if the a repository group |
|
112 | 131 | contains the "full path" to the repositories |
|
113 | 132 | - #298 Conflicting e-mail addresses for LDAP and RhodeCode users |
|
114 | 133 | - DB session cleanup after hg protocol operations, fixes issues with |
|
115 | 134 | `mysql has gone away` errors |
|
116 | 135 | - #333 doc fixes for get_repo api function |
|
117 | 136 | - #271 rare JSON serialization problem with statistics enabled |
|
118 | 137 | - #337 Fixes issues with validation of repository name conflicting with |
|
119 | 138 | a group name. A proper message is now displayed. |
|
120 | 139 | - #292 made ldap_dn in user edit readonly, to get rid of confusion that field |
|
121 | 140 | doesn't work |
|
122 | 141 | - #316 fixes issues with web description in hgrc files |
|
123 | 142 | |
|
124 | 143 | 1.2.3 (**2011-11-02**) |
|
125 | 144 | ---------------------- |
|
126 | 145 | |
|
127 | 146 | news |
|
128 | 147 | ++++ |
|
129 | 148 | |
|
130 | 149 | - added option to manage repos group for non admin users |
|
131 | 150 | - added following API methods for get_users, create_user, get_users_groups, |
|
132 | 151 | get_users_group, create_users_group, add_user_to_users_groups, get_repos, |
|
133 | 152 | get_repo, create_repo, add_user_to_repo |
|
134 | 153 | - implements #237 added password confirmation for my account |
|
135 | 154 | and admin edit user. |
|
136 | 155 | - implements #291 email notification for global events are now sent to all |
|
137 | 156 | administrator users, and global config email. |
|
138 | 157 | |
|
139 | 158 | fixes |
|
140 | 159 | +++++ |
|
141 | 160 | |
|
142 | 161 | - added option for passing auth method for smtp mailer |
|
143 | 162 | - #276 issue with adding a single user with id>10 to usergroups |
|
144 | 163 | - #277 fixes windows LDAP settings in which missing values breaks the ldap auth |
|
145 | 164 | - #288 fixes managing of repos in a group for non admin user |
|
146 | 165 | |
|
147 | 166 | 1.2.2 (**2011-10-17**) |
|
148 | 167 | ---------------------- |
|
149 | 168 | |
|
150 | 169 | news |
|
151 | 170 | ++++ |
|
152 | 171 | |
|
153 | 172 | - #226 repo groups are available by path instead of numerical id |
|
154 | 173 | |
|
155 | 174 | fixes |
|
156 | 175 | +++++ |
|
157 | 176 | |
|
158 | 177 | - #259 Groups with the same name but with different parent group |
|
159 | 178 | - #260 Put repo in group, then move group to another group -> repo becomes unavailable |
|
160 | 179 | - #258 RhodeCode 1.2 assumes egg folder is writable (lockfiles problems) |
|
161 | 180 | - #265 ldap save fails sometimes on converting attributes to booleans, |
|
162 | 181 | added getter and setter into model that will prevent from this on db model level |
|
163 | 182 | - fixed problems with timestamps issues #251 and #213 |
|
164 | 183 | - fixes #266 RhodeCode allows to create repo with the same name and in |
|
165 | 184 | the same parent as group |
|
166 | 185 | - fixes #245 Rescan of the repositories on Windows |
|
167 | 186 | - fixes #248 cannot edit repos inside a group on windows |
|
168 | 187 | - fixes #219 forking problems on windows |
|
169 | 188 | |
|
170 | 189 | 1.2.1 (**2011-10-08**) |
|
171 | 190 | ---------------------- |
|
172 | 191 | |
|
173 | 192 | news |
|
174 | 193 | ++++ |
|
175 | 194 | |
|
176 | 195 | |
|
177 | 196 | fixes |
|
178 | 197 | +++++ |
|
179 | 198 | |
|
180 | 199 | - fixed problems with basic auth and push problems |
|
181 | 200 | - gui fixes |
|
182 | 201 | - fixed logger |
|
183 | 202 | |
|
184 | 203 | 1.2.0 (**2011-10-07**) |
|
185 | 204 | ---------------------- |
|
186 | 205 | |
|
187 | 206 | news |
|
188 | 207 | ++++ |
|
189 | 208 | |
|
190 | 209 | - implemented #47 repository groups |
|
191 | 210 | - implemented #89 Can setup google analytics code from settings menu |
|
192 | 211 | - implemented #91 added nicer looking archive urls with more download options |
|
193 | 212 | like tags, branches |
|
194 | 213 | - implemented #44 into file browsing, and added follow branch option |
|
195 | 214 | - implemented #84 downloads can be enabled/disabled for each repository |
|
196 | 215 | - anonymous repository can be cloned without having to pass default:default |
|
197 | 216 | into clone url |
|
198 | 217 | - fixed #90 whoosh indexer can index chooses repositories passed in command |
|
199 | 218 | line |
|
200 | 219 | - extended journal with day aggregates and paging |
|
201 | 220 | - implemented #107 source code lines highlight ranges |
|
202 | 221 | - implemented #93 customizable changelog on combined revision ranges - |
|
203 | 222 | equivalent of githubs compare view |
|
204 | 223 | - implemented #108 extended and more powerful LDAP configuration |
|
205 | 224 | - implemented #56 users groups |
|
206 | 225 | - major code rewrites optimized codes for speed and memory usage |
|
207 | 226 | - raw and diff downloads are now in git format |
|
208 | 227 | - setup command checks for write access to given path |
|
209 | 228 | - fixed many issues with international characters and unicode. It uses utf8 |
|
210 | 229 | decode with replace to provide less errors even with non utf8 encoded strings |
|
211 | 230 | - #125 added API KEY access to feeds |
|
212 | 231 | - #109 Repository can be created from external Mercurial link (aka. remote |
|
213 | 232 | repository, and manually updated (via pull) from admin panel |
|
214 | 233 | - beta git support - push/pull server + basic view for git repos |
|
215 | 234 | - added followers page and forks page |
|
216 | 235 | - server side file creation (with binary file upload interface) |
|
217 | 236 | and edition with commits powered by codemirror |
|
218 | 237 | - #111 file browser file finder, quick lookup files on whole file tree |
|
219 | 238 | - added quick login sliding menu into main page |
|
220 | 239 | - changelog uses lazy loading of affected files details, in some scenarios |
|
221 | 240 | this can improve speed of changelog page dramatically especially for |
|
222 | 241 | larger repositories. |
|
223 | 242 | - implements #214 added support for downloading subrepos in download menu. |
|
224 | 243 | - Added basic API for direct operations on rhodecode via JSON |
|
225 | 244 | - Implemented advanced hook management |
|
226 | 245 | |
|
227 | 246 | fixes |
|
228 | 247 | +++++ |
|
229 | 248 | |
|
230 | 249 | - fixed file browser bug, when switching into given form revision the url was |
|
231 | 250 | not changing |
|
232 | 251 | - fixed propagation to error controller on simplehg and simplegit middlewares |
|
233 | 252 | - fixed error when trying to make a download on empty repository |
|
234 | 253 | - fixed problem with '[' chars in commit messages in journal |
|
235 | 254 | - fixed #99 Unicode errors, on file node paths with non utf-8 characters |
|
236 | 255 | - journal fork fixes |
|
237 | 256 | - removed issue with space inside renamed repository after deletion |
|
238 | 257 | - fixed strange issue on formencode imports |
|
239 | 258 | - fixed #126 Deleting repository on Windows, rename used incompatible chars. |
|
240 | 259 | - #150 fixes for errors on repositories mapped in db but corrupted in |
|
241 | 260 | filesystem |
|
242 | 261 | - fixed problem with ascendant characters in realm #181 |
|
243 | 262 | - fixed problem with sqlite file based database connection pool |
|
244 | 263 | - whoosh indexer and code stats share the same dynamic extensions map |
|
245 | 264 | - fixes #188 - relationship delete of repo_to_perm entry on user removal |
|
246 | 265 | - fixes issue #189 Trending source files shows "show more" when no more exist |
|
247 | 266 | - fixes issue #197 Relative paths for pidlocks |
|
248 | 267 | - fixes issue #198 password will require only 3 chars now for login form |
|
249 | 268 | - fixes issue #199 wrong redirection for non admin users after creating a repository |
|
250 | 269 | - fixes issues #202, bad db constraint made impossible to attach same group |
|
251 | 270 | more than one time. Affects only mysql/postgres |
|
252 | 271 | - fixes #218 os.kill patch for windows was missing sig param |
|
253 | 272 | - improved rendering of dag (they are not trimmed anymore when number of |
|
254 | 273 | heads exceeds 5) |
|
255 | 274 | |
|
256 | 275 | 1.1.8 (**2011-04-12**) |
|
257 | 276 | ---------------------- |
|
258 | 277 | |
|
259 | 278 | news |
|
260 | 279 | ++++ |
|
261 | 280 | |
|
262 | 281 | - improved windows support |
|
263 | 282 | |
|
264 | 283 | fixes |
|
265 | 284 | +++++ |
|
266 | 285 | |
|
267 | 286 | - fixed #140 freeze of python dateutil library, since new version is python2.x |
|
268 | 287 | incompatible |
|
269 | 288 | - setup-app will check for write permission in given path |
|
270 | 289 | - cleaned up license info issue #149 |
|
271 | 290 | - fixes for issues #137,#116 and problems with unicode and accented characters. |
|
272 | 291 | - fixes crashes on gravatar, when passed in email as unicode |
|
273 | 292 | - fixed tooltip flickering problems |
|
274 | 293 | - fixed came_from redirection on windows |
|
275 | 294 | - fixed logging modules, and sql formatters |
|
276 | 295 | - windows fixes for os.kill issue #133 |
|
277 | 296 | - fixes path splitting for windows issues #148 |
|
278 | 297 | - fixed issue #143 wrong import on migration to 1.1.X |
|
279 | 298 | - fixed problems with displaying binary files, thanks to Thomas Waldmann |
|
280 | 299 | - removed name from archive files since it's breaking ui for long repo names |
|
281 | 300 | - fixed issue with archive headers sent to browser, thanks to Thomas Waldmann |
|
282 | 301 | - fixed compatibility for 1024px displays, and larger dpi settings, thanks to |
|
283 | 302 | Thomas Waldmann |
|
284 | 303 | - fixed issue #166 summary pager was skipping 10 revisions on second page |
|
285 | 304 | |
|
286 | 305 | |
|
287 | 306 | 1.1.7 (**2011-03-23**) |
|
288 | 307 | ---------------------- |
|
289 | 308 | |
|
290 | 309 | news |
|
291 | 310 | ++++ |
|
292 | 311 | |
|
293 | 312 | fixes |
|
294 | 313 | +++++ |
|
295 | 314 | |
|
296 | 315 | - fixed (again) #136 installation support for FreeBSD |
|
297 | 316 | |
|
298 | 317 | |
|
299 | 318 | 1.1.6 (**2011-03-21**) |
|
300 | 319 | ---------------------- |
|
301 | 320 | |
|
302 | 321 | news |
|
303 | 322 | ++++ |
|
304 | 323 | |
|
305 | 324 | fixes |
|
306 | 325 | +++++ |
|
307 | 326 | |
|
308 | 327 | - fixed #136 installation support for FreeBSD |
|
309 | 328 | - RhodeCode will check for python version during installation |
|
310 | 329 | |
|
311 | 330 | 1.1.5 (**2011-03-17**) |
|
312 | 331 | ---------------------- |
|
313 | 332 | |
|
314 | 333 | news |
|
315 | 334 | ++++ |
|
316 | 335 | |
|
317 | 336 | - basic windows support, by exchanging pybcrypt into sha256 for windows only |
|
318 | 337 | highly inspired by idea of mantis406 |
|
319 | 338 | |
|
320 | 339 | fixes |
|
321 | 340 | +++++ |
|
322 | 341 | |
|
323 | 342 | - fixed sorting by author in main page |
|
324 | 343 | - fixed crashes with diffs on binary files |
|
325 | 344 | - fixed #131 problem with boolean values for LDAP |
|
326 | 345 | - fixed #122 mysql problems thanks to striker69 |
|
327 | 346 | - fixed problem with errors on calling raw/raw_files/annotate functions |
|
328 | 347 | with unknown revisions |
|
329 | 348 | - fixed returned rawfiles attachment names with international character |
|
330 | 349 | - cleaned out docs, big thanks to Jason Harris |
|
331 | 350 | |
|
332 | 351 | 1.1.4 (**2011-02-19**) |
|
333 | 352 | ---------------------- |
|
334 | 353 | |
|
335 | 354 | news |
|
336 | 355 | ++++ |
|
337 | 356 | |
|
338 | 357 | fixes |
|
339 | 358 | +++++ |
|
340 | 359 | |
|
341 | 360 | - fixed formencode import problem on settings page, that caused server crash |
|
342 | 361 | when that page was accessed as first after server start |
|
343 | 362 | - journal fixes |
|
344 | 363 | - fixed option to access repository just by entering http://server/<repo_name> |
|
345 | 364 | |
|
346 | 365 | 1.1.3 (**2011-02-16**) |
|
347 | 366 | ---------------------- |
|
348 | 367 | |
|
349 | 368 | news |
|
350 | 369 | ++++ |
|
351 | 370 | |
|
352 | 371 | - implemented #102 allowing the '.' character in username |
|
353 | 372 | - added option to access repository just by entering http://server/<repo_name> |
|
354 | 373 | - celery task ignores result for better performance |
|
355 | 374 | |
|
356 | 375 | fixes |
|
357 | 376 | +++++ |
|
358 | 377 | |
|
359 | 378 | - fixed ehlo command and non auth mail servers on smtp_lib. Thanks to |
|
360 | 379 | apollo13 and Johan Walles |
|
361 | 380 | - small fixes in journal |
|
362 | 381 | - fixed problems with getting setting for celery from .ini files |
|
363 | 382 | - registration, password reset and login boxes share the same title as main |
|
364 | 383 | application now |
|
365 | 384 | - fixed #113: to high permissions to fork repository |
|
366 | 385 | - fixed problem with '[' chars in commit messages in journal |
|
367 | 386 | - removed issue with space inside renamed repository after deletion |
|
368 | 387 | - db transaction fixes when filesystem repository creation failed |
|
369 | 388 | - fixed #106 relation issues on databases different than sqlite |
|
370 | 389 | - fixed static files paths links to use of url() method |
|
371 | 390 | |
|
372 | 391 | 1.1.2 (**2011-01-12**) |
|
373 | 392 | ---------------------- |
|
374 | 393 | |
|
375 | 394 | news |
|
376 | 395 | ++++ |
|
377 | 396 | |
|
378 | 397 | |
|
379 | 398 | fixes |
|
380 | 399 | +++++ |
|
381 | 400 | |
|
382 | 401 | - fixes #98 protection against float division of percentage stats |
|
383 | 402 | - fixed graph bug |
|
384 | 403 | - forced webhelpers version since it was making troubles during installation |
|
385 | 404 | |
|
386 | 405 | 1.1.1 (**2011-01-06**) |
|
387 | 406 | ---------------------- |
|
388 | 407 | |
|
389 | 408 | news |
|
390 | 409 | ++++ |
|
391 | 410 | |
|
392 | 411 | - added force https option into ini files for easier https usage (no need to |
|
393 | 412 | set server headers with this options) |
|
394 | 413 | - small css updates |
|
395 | 414 | |
|
396 | 415 | fixes |
|
397 | 416 | +++++ |
|
398 | 417 | |
|
399 | 418 | - fixed #96 redirect loop on files view on repositories without changesets |
|
400 | 419 | - fixed #97 unicode string passed into server header in special cases (mod_wsgi) |
|
401 | 420 | and server crashed with errors |
|
402 | 421 | - fixed large tooltips problems on main page |
|
403 | 422 | - fixed #92 whoosh indexer is more error proof |
|
404 | 423 | |
|
405 | 424 | 1.1.0 (**2010-12-18**) |
|
406 | 425 | ---------------------- |
|
407 | 426 | |
|
408 | 427 | news |
|
409 | 428 | ++++ |
|
410 | 429 | |
|
411 | 430 | - rewrite of internals for vcs >=0.1.10 |
|
412 | 431 | - uses mercurial 1.7 with dotencode disabled for maintaining compatibility |
|
413 | 432 | with older clients |
|
414 | 433 | - anonymous access, authentication via ldap |
|
415 | 434 | - performance upgrade for cached repos list - each repository has its own |
|
416 | 435 | cache that's invalidated when needed. |
|
417 | 436 | - performance upgrades on repositories with large amount of commits (20K+) |
|
418 | 437 | - main page quick filter for filtering repositories |
|
419 | 438 | - user dashboards with ability to follow chosen repositories actions |
|
420 | 439 | - sends email to admin on new user registration |
|
421 | 440 | - added cache/statistics reset options into repository settings |
|
422 | 441 | - more detailed action logger (based on hooks) with pushed changesets lists |
|
423 | 442 | and options to disable those hooks from admin panel |
|
424 | 443 | - introduced new enhanced changelog for merges that shows more accurate results |
|
425 | 444 | - new improved and faster code stats (based on pygments lexers mapping tables, |
|
426 | 445 | showing up to 10 trending sources for each repository. Additionally stats |
|
427 | 446 | can be disabled in repository settings. |
|
428 | 447 | - gui optimizations, fixed application width to 1024px |
|
429 | 448 | - added cut off (for large files/changesets) limit into config files |
|
430 | 449 | - whoosh, celeryd, upgrade moved to paster command |
|
431 | 450 | - other than sqlite database backends can be used |
|
432 | 451 | |
|
433 | 452 | fixes |
|
434 | 453 | +++++ |
|
435 | 454 | |
|
436 | 455 | - fixes #61 forked repo was showing only after cache expired |
|
437 | 456 | - fixes #76 no confirmation on user deletes |
|
438 | 457 | - fixes #66 Name field misspelled |
|
439 | 458 | - fixes #72 block user removal when he owns repositories |
|
440 | 459 | - fixes #69 added password confirmation fields |
|
441 | 460 | - fixes #87 RhodeCode crashes occasionally on updating repository owner |
|
442 | 461 | - fixes #82 broken annotations on files with more than 1 blank line at the end |
|
443 | 462 | - a lot of fixes and tweaks for file browser |
|
444 | 463 | - fixed detached session issues |
|
445 | 464 | - fixed when user had no repos he would see all repos listed in my account |
|
446 | 465 | - fixed ui() instance bug when global hgrc settings was loaded for server |
|
447 | 466 | instance and all hgrc options were merged with our db ui() object |
|
448 | 467 | - numerous small bugfixes |
|
449 | 468 | |
|
450 | 469 | (special thanks for TkSoh for detailed feedback) |
|
451 | 470 | |
|
452 | 471 | |
|
453 | 472 | 1.0.2 (**2010-11-12**) |
|
454 | 473 | ---------------------- |
|
455 | 474 | |
|
456 | 475 | news |
|
457 | 476 | ++++ |
|
458 | 477 | |
|
459 | 478 | - tested under python2.7 |
|
460 | 479 | - bumped sqlalchemy and celery versions |
|
461 | 480 | |
|
462 | 481 | fixes |
|
463 | 482 | +++++ |
|
464 | 483 | |
|
465 | 484 | - fixed #59 missing graph.js |
|
466 | 485 | - fixed repo_size crash when repository had broken symlinks |
|
467 | 486 | - fixed python2.5 crashes. |
|
468 | 487 | |
|
469 | 488 | |
|
470 | 489 | 1.0.1 (**2010-11-10**) |
|
471 | 490 | ---------------------- |
|
472 | 491 | |
|
473 | 492 | news |
|
474 | 493 | ++++ |
|
475 | 494 | |
|
476 | 495 | - small css updated |
|
477 | 496 | |
|
478 | 497 | fixes |
|
479 | 498 | +++++ |
|
480 | 499 | |
|
481 | 500 | - fixed #53 python2.5 incompatible enumerate calls |
|
482 | 501 | - fixed #52 disable mercurial extension for web |
|
483 | 502 | - fixed #51 deleting repositories don't delete it's dependent objects |
|
484 | 503 | |
|
485 | 504 | |
|
486 | 505 | 1.0.0 (**2010-11-02**) |
|
487 | 506 | ---------------------- |
|
488 | 507 | |
|
489 | 508 | - security bugfix simplehg wasn't checking for permissions on commands |
|
490 | 509 | other than pull or push. |
|
491 | 510 | - fixed doubled messages after push or pull in admin journal |
|
492 | 511 | - templating and css corrections, fixed repo switcher on chrome, updated titles |
|
493 | 512 | - admin menu accessible from options menu on repository view |
|
494 | 513 | - permissions cached queries |
|
495 | 514 | |
|
496 | 515 | 1.0.0rc4 (**2010-10-12**) |
|
497 | 516 | -------------------------- |
|
498 | 517 | |
|
499 | 518 | - fixed python2.5 missing simplejson imports (thanks to Jens Bäckman) |
|
500 | 519 | - removed cache_manager settings from sqlalchemy meta |
|
501 | 520 | - added sqlalchemy cache settings to ini files |
|
502 | 521 | - validated password length and added second try of failure on paster setup-app |
|
503 | 522 | - fixed setup database destroy prompt even when there was no db |
|
504 | 523 | |
|
505 | 524 | |
|
506 | 525 | 1.0.0rc3 (**2010-10-11**) |
|
507 | 526 | ------------------------- |
|
508 | 527 | |
|
509 | 528 | - fixed i18n during installation. |
|
510 | 529 | |
|
511 | 530 | 1.0.0rc2 (**2010-10-11**) |
|
512 | 531 | ------------------------- |
|
513 | 532 | |
|
514 | 533 | - Disabled dirsize in file browser, it's causing nasty bug when dir renames |
|
515 | 534 | occure. After vcs is fixed it'll be put back again. |
|
516 | 535 | - templating/css rewrites, optimized css. No newline at end of file |
@@ -1,313 +1,318 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.admin.repos_groups |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Repositories groups controller for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Mar 23, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | import formencode |
|
29 | 29 | |
|
30 | 30 | from formencode import htmlfill |
|
31 | 31 | |
|
32 | 32 | from pylons import request, tmpl_context as c, url |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from pylons.i18n.translation import _ |
|
35 | 35 | |
|
36 | 36 | from sqlalchemy.exc import IntegrityError |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib import helpers as h |
|
39 | 39 | from rhodecode.lib.auth import LoginRequired, HasPermissionAnyDecorator,\ |
|
40 | 40 | HasReposGroupPermissionAnyDecorator |
|
41 | 41 | from rhodecode.lib.base import BaseController, render |
|
42 | 42 | from rhodecode.model.db import RepoGroup |
|
43 | 43 | from rhodecode.model.repos_group import ReposGroupModel |
|
44 | 44 | from rhodecode.model.forms import ReposGroupForm |
|
45 | 45 | from rhodecode.model.meta import Session |
|
46 | 46 | from rhodecode.model.repo import RepoModel |
|
47 | 47 | from webob.exc import HTTPInternalServerError |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | class ReposGroupsController(BaseController): |
|
53 | 53 | """REST Controller styled on the Atom Publishing Protocol""" |
|
54 | 54 | # To properly map this controller, ensure your config/routing.py |
|
55 | 55 | # file has a resource setup: |
|
56 | 56 | # map.resource('repos_group', 'repos_groups') |
|
57 | 57 | |
|
58 | 58 | @LoginRequired() |
|
59 | 59 | def __before__(self): |
|
60 | 60 | super(ReposGroupsController, self).__before__() |
|
61 | 61 | |
|
62 | 62 | def __load_defaults(self): |
|
63 | 63 | c.repo_groups = RepoGroup.groups_choices() |
|
64 | 64 | c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups) |
|
65 | 65 | |
|
66 | 66 | repo_model = RepoModel() |
|
67 | 67 | c.users_array = repo_model.get_users_js() |
|
68 | 68 | c.users_groups_array = repo_model.get_users_groups_js() |
|
69 | 69 | |
|
70 | 70 | def __load_data(self, group_id): |
|
71 | 71 | """ |
|
72 | 72 | Load defaults settings for edit, and update |
|
73 | 73 | |
|
74 | 74 | :param group_id: |
|
75 | 75 | """ |
|
76 | 76 | self.__load_defaults() |
|
77 | 77 | |
|
78 | 78 | repo_group = RepoGroup.get(group_id) |
|
79 | 79 | |
|
80 | 80 | data = repo_group.get_dict() |
|
81 | 81 | |
|
82 | 82 | data['group_name'] = repo_group.name |
|
83 | 83 | |
|
84 | 84 | # fill repository users |
|
85 | 85 | for p in repo_group.repo_group_to_perm: |
|
86 | 86 | data.update({'u_perm_%s' % p.user.username: |
|
87 | 87 | p.permission.permission_name}) |
|
88 | 88 | |
|
89 | 89 | # fill repository groups |
|
90 | 90 | for p in repo_group.users_group_to_perm: |
|
91 | 91 | data.update({'g_perm_%s' % p.users_group.users_group_name: |
|
92 | 92 | p.permission.permission_name}) |
|
93 | 93 | |
|
94 | 94 | return data |
|
95 | 95 | |
|
96 | 96 | @HasPermissionAnyDecorator('hg.admin') |
|
97 | 97 | def index(self, format='html'): |
|
98 | 98 | """GET /repos_groups: All items in the collection""" |
|
99 | 99 | # url('repos_groups') |
|
100 | 100 | sk = lambda g: g.parents[0].group_name if g.parents else g.group_name |
|
101 | 101 | c.groups = sorted(RepoGroup.query().all(), key=sk) |
|
102 | 102 | return render('admin/repos_groups/repos_groups_show.html') |
|
103 | 103 | |
|
104 | 104 | @HasPermissionAnyDecorator('hg.admin') |
|
105 | 105 | def create(self): |
|
106 | 106 | """POST /repos_groups: Create a new item""" |
|
107 | 107 | # url('repos_groups') |
|
108 | 108 | self.__load_defaults() |
|
109 | 109 | repos_group_form = ReposGroupForm(available_groups = |
|
110 | 110 | c.repo_groups_choices)() |
|
111 | 111 | try: |
|
112 | 112 | form_result = repos_group_form.to_python(dict(request.POST)) |
|
113 | 113 | ReposGroupModel().create( |
|
114 | 114 | group_name=form_result['group_name'], |
|
115 | 115 | group_description=form_result['group_description'], |
|
116 | 116 | parent=form_result['group_parent_id'] |
|
117 | 117 | ) |
|
118 | 118 | Session.commit() |
|
119 | 119 | h.flash(_('created repos group %s') \ |
|
120 | 120 | % form_result['group_name'], category='success') |
|
121 | 121 | #TODO: in futureaction_logger(, '', '', '', self.sa) |
|
122 | 122 | except formencode.Invalid, errors: |
|
123 | 123 | |
|
124 | 124 | return htmlfill.render( |
|
125 | 125 | render('admin/repos_groups/repos_groups_add.html'), |
|
126 | 126 | defaults=errors.value, |
|
127 | 127 | errors=errors.error_dict or {}, |
|
128 | 128 | prefix_error=False, |
|
129 | 129 | encoding="UTF-8") |
|
130 | 130 | except Exception: |
|
131 | 131 | log.error(traceback.format_exc()) |
|
132 | 132 | h.flash(_('error occurred during creation of repos group %s') \ |
|
133 | 133 | % request.POST.get('group_name'), category='error') |
|
134 | 134 | |
|
135 | 135 | return redirect(url('repos_groups')) |
|
136 | 136 | |
|
137 | 137 | @HasPermissionAnyDecorator('hg.admin') |
|
138 | 138 | def new(self, format='html'): |
|
139 | 139 | """GET /repos_groups/new: Form to create a new item""" |
|
140 | 140 | # url('new_repos_group') |
|
141 | 141 | self.__load_defaults() |
|
142 | 142 | return render('admin/repos_groups/repos_groups_add.html') |
|
143 | 143 | |
|
144 | 144 | @HasPermissionAnyDecorator('hg.admin') |
|
145 | 145 | def update(self, id): |
|
146 | 146 | """PUT /repos_groups/id: Update an existing item""" |
|
147 | 147 | # Forms posted to this method should contain a hidden field: |
|
148 | 148 | # <input type="hidden" name="_method" value="PUT" /> |
|
149 | 149 | # Or using helpers: |
|
150 | 150 | # h.form(url('repos_group', id=ID), |
|
151 | 151 | # method='put') |
|
152 | 152 | # url('repos_group', id=ID) |
|
153 | 153 | |
|
154 | 154 | self.__load_defaults() |
|
155 | 155 | c.repos_group = RepoGroup.get(id) |
|
156 | 156 | |
|
157 | 157 | repos_group_form = ReposGroupForm( |
|
158 | 158 | edit=True, |
|
159 | 159 | old_data=c.repos_group.get_dict(), |
|
160 | 160 | available_groups=c.repo_groups_choices |
|
161 | 161 | )() |
|
162 | 162 | try: |
|
163 | 163 | form_result = repos_group_form.to_python(dict(request.POST)) |
|
164 | 164 | ReposGroupModel().update(id, form_result) |
|
165 | 165 | Session.commit() |
|
166 | 166 | h.flash(_('updated repos group %s') \ |
|
167 | 167 | % form_result['group_name'], category='success') |
|
168 | 168 | #TODO: in futureaction_logger(, '', '', '', self.sa) |
|
169 | 169 | except formencode.Invalid, errors: |
|
170 | 170 | |
|
171 | 171 | return htmlfill.render( |
|
172 | 172 | render('admin/repos_groups/repos_groups_edit.html'), |
|
173 | 173 | defaults=errors.value, |
|
174 | 174 | errors=errors.error_dict or {}, |
|
175 | 175 | prefix_error=False, |
|
176 | 176 | encoding="UTF-8") |
|
177 | 177 | except Exception: |
|
178 | 178 | log.error(traceback.format_exc()) |
|
179 | 179 | h.flash(_('error occurred during update of repos group %s') \ |
|
180 | 180 | % request.POST.get('group_name'), category='error') |
|
181 | 181 | |
|
182 | 182 | return redirect(url('repos_groups')) |
|
183 | 183 | |
|
184 | 184 | @HasPermissionAnyDecorator('hg.admin') |
|
185 | 185 | def delete(self, id): |
|
186 | 186 | """DELETE /repos_groups/id: Delete an existing item""" |
|
187 | 187 | # Forms posted to this method should contain a hidden field: |
|
188 | 188 | # <input type="hidden" name="_method" value="DELETE" /> |
|
189 | 189 | # Or using helpers: |
|
190 | 190 | # h.form(url('repos_group', id=ID), |
|
191 | 191 | # method='delete') |
|
192 | 192 | # url('repos_group', id=ID) |
|
193 | 193 | |
|
194 | 194 | gr = RepoGroup.get(id) |
|
195 | 195 | repos = gr.repositories.all() |
|
196 | 196 | if repos: |
|
197 | 197 | h.flash(_('This group contains %s repositores and cannot be ' |
|
198 | 198 | 'deleted' % len(repos)), |
|
199 | 199 | category='error') |
|
200 | 200 | return redirect(url('repos_groups')) |
|
201 | 201 | |
|
202 | 202 | try: |
|
203 | 203 | ReposGroupModel().delete(id) |
|
204 | 204 | Session.commit() |
|
205 | 205 | h.flash(_('removed repos group %s' % gr.group_name), category='success') |
|
206 | 206 | #TODO: in future action_logger(, '', '', '', self.sa) |
|
207 | 207 | except IntegrityError, e: |
|
208 | 208 | if e.message.find('groups_group_parent_id_fkey') != -1: |
|
209 | 209 | log.error(traceback.format_exc()) |
|
210 | 210 | h.flash(_('Cannot delete this group it still contains ' |
|
211 | 211 | 'subgroups'), |
|
212 | 212 | category='warning') |
|
213 | 213 | else: |
|
214 | 214 | log.error(traceback.format_exc()) |
|
215 | 215 | h.flash(_('error occurred during deletion of repos ' |
|
216 | 216 | 'group %s' % gr.group_name), category='error') |
|
217 | 217 | |
|
218 | 218 | except Exception: |
|
219 | 219 | log.error(traceback.format_exc()) |
|
220 | 220 | h.flash(_('error occurred during deletion of repos ' |
|
221 | 221 | 'group %s' % gr.group_name), category='error') |
|
222 | 222 | |
|
223 | 223 | return redirect(url('repos_groups')) |
|
224 | 224 | |
|
225 | 225 | @HasReposGroupPermissionAnyDecorator('group.admin') |
|
226 | 226 | def delete_repos_group_user_perm(self, group_name): |
|
227 | 227 | """ |
|
228 | 228 | DELETE an existing repositories group permission user |
|
229 | 229 | |
|
230 | 230 | :param group_name: |
|
231 | 231 | """ |
|
232 | 232 | |
|
233 | 233 | try: |
|
234 | 234 | ReposGroupModel().revoke_user_permission( |
|
235 | 235 | repos_group=group_name, user=request.POST['user_id'] |
|
236 | 236 | ) |
|
237 | 237 | Session.commit() |
|
238 | 238 | except Exception: |
|
239 | 239 | log.error(traceback.format_exc()) |
|
240 | 240 | h.flash(_('An error occurred during deletion of group user'), |
|
241 | 241 | category='error') |
|
242 | 242 | raise HTTPInternalServerError() |
|
243 | 243 | |
|
244 | 244 | @HasReposGroupPermissionAnyDecorator('group.admin') |
|
245 | 245 | def delete_repos_group_users_group_perm(self, group_name): |
|
246 | 246 | """ |
|
247 | 247 | DELETE an existing repositories group permission users group |
|
248 | 248 | |
|
249 | 249 | :param group_name: |
|
250 | 250 | """ |
|
251 | 251 | |
|
252 | 252 | try: |
|
253 | 253 | ReposGroupModel().revoke_users_group_permission( |
|
254 | 254 | repos_group=group_name, |
|
255 | 255 | group_name=request.POST['users_group_id'] |
|
256 | 256 | ) |
|
257 | 257 | Session.commit() |
|
258 | 258 | except Exception: |
|
259 | 259 | log.error(traceback.format_exc()) |
|
260 | 260 | h.flash(_('An error occurred during deletion of group' |
|
261 | 261 | ' users groups'), |
|
262 | 262 | category='error') |
|
263 | 263 | raise HTTPInternalServerError() |
|
264 | 264 | |
|
265 | 265 | def show_by_name(self, group_name): |
|
266 | """ | |
|
267 | This is a proxy that does a lookup group_name -> id, and shows | |
|
268 | the group by id view instead | |
|
269 | """ | |
|
270 | group_name = group_name.rstrip('/') | |
|
266 | 271 | id_ = RepoGroup.get_by_group_name(group_name).group_id |
|
267 | 272 | return self.show(id_) |
|
268 | 273 | |
|
269 | 274 | @HasReposGroupPermissionAnyDecorator('group.read', 'group.write', |
|
270 | 275 | 'group.admin') |
|
271 | 276 | def show(self, id, format='html'): |
|
272 | 277 | """GET /repos_groups/id: Show a specific item""" |
|
273 | 278 | # url('repos_group', id=ID) |
|
274 | 279 | |
|
275 | 280 | c.group = RepoGroup.get(id) |
|
276 | 281 | |
|
277 | 282 | if c.group: |
|
278 | 283 | c.group_repos = c.group.repositories.all() |
|
279 | 284 | else: |
|
280 | 285 | return redirect(url('home')) |
|
281 | 286 | |
|
282 | 287 | #overwrite our cached list with current filter |
|
283 | 288 | gr_filter = c.group_repos |
|
284 | 289 | c.cached_repo_list = self.scm_model.get_repos(all_repos=gr_filter) |
|
285 | 290 | |
|
286 | 291 | c.repos_list = c.cached_repo_list |
|
287 | 292 | |
|
288 | 293 | c.repo_cnt = 0 |
|
289 | 294 | |
|
290 | 295 | c.groups = self.sa.query(RepoGroup).order_by(RepoGroup.group_name)\ |
|
291 | 296 | .filter(RepoGroup.group_parent_id == id).all() |
|
292 | 297 | |
|
293 | 298 | return render('admin/repos_groups/repos_groups.html') |
|
294 | 299 | |
|
295 | 300 | @HasPermissionAnyDecorator('hg.admin') |
|
296 | 301 | def edit(self, id, format='html'): |
|
297 | 302 | """GET /repos_groups/id/edit: Form to edit an existing item""" |
|
298 | 303 | # url('edit_repos_group', id=ID) |
|
299 | 304 | |
|
300 | 305 | id_ = int(id) |
|
301 | 306 | |
|
302 | 307 | c.repos_group = RepoGroup.get(id_) |
|
303 | 308 | defaults = self.__load_data(id_) |
|
304 | 309 | |
|
305 | 310 | # we need to exclude this group from the group list for editing |
|
306 | 311 | c.repo_groups = filter(lambda x: x[0] != id_, c.repo_groups) |
|
307 | 312 | |
|
308 | 313 | return htmlfill.render( |
|
309 | 314 | render('admin/repos_groups/repos_groups_edit.html'), |
|
310 | 315 | defaults=defaults, |
|
311 | 316 | encoding="UTF-8", |
|
312 | 317 | force_defaults=False |
|
313 | 318 | ) |
@@ -1,226 +1,227 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.admin.users_groups |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Users Groups crud controller for pylons |
|
7 | 7 | |
|
8 | 8 | :created_on: Jan 25, 2011 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | import formencode |
|
29 | 29 | |
|
30 | 30 | from formencode import htmlfill |
|
31 | 31 | from pylons import request, session, tmpl_context as c, url, config |
|
32 | 32 | from pylons.controllers.util import abort, redirect |
|
33 | 33 | from pylons.i18n.translation import _ |
|
34 | 34 | |
|
35 | 35 | from rhodecode.lib.exceptions import UsersGroupsAssignedException |
|
36 | 36 | from rhodecode.lib import helpers as h, safe_unicode |
|
37 | 37 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
38 | 38 | from rhodecode.lib.base import BaseController, render |
|
39 | 39 | |
|
40 | 40 | from rhodecode.model.users_group import UsersGroupModel |
|
41 | 41 | |
|
42 | 42 | from rhodecode.model.db import User, UsersGroup, Permission, UsersGroupToPerm |
|
43 | 43 | from rhodecode.model.forms import UsersGroupForm |
|
44 | 44 | from rhodecode.model.meta import Session |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class UsersGroupsController(BaseController): |
|
50 | 50 | """REST Controller styled on the Atom Publishing Protocol""" |
|
51 | 51 | # To properly map this controller, ensure your config/routing.py |
|
52 | 52 | # file has a resource setup: |
|
53 | 53 | # map.resource('users_group', 'users_groups') |
|
54 | 54 | |
|
55 | 55 | @LoginRequired() |
|
56 | 56 | @HasPermissionAllDecorator('hg.admin') |
|
57 | 57 | def __before__(self): |
|
58 | 58 | c.admin_user = session.get('admin_user') |
|
59 | 59 | c.admin_username = session.get('admin_username') |
|
60 | 60 | super(UsersGroupsController, self).__before__() |
|
61 | 61 | c.available_permissions = config['available_permissions'] |
|
62 | 62 | |
|
63 | 63 | def index(self, format='html'): |
|
64 | 64 | """GET /users_groups: All items in the collection""" |
|
65 | 65 | # url('users_groups') |
|
66 | 66 | c.users_groups_list = self.sa.query(UsersGroup).all() |
|
67 | 67 | return render('admin/users_groups/users_groups.html') |
|
68 | 68 | |
|
69 | 69 | def create(self): |
|
70 | 70 | """POST /users_groups: Create a new item""" |
|
71 | 71 | # url('users_groups') |
|
72 | 72 | |
|
73 | 73 | users_group_form = UsersGroupForm()() |
|
74 | 74 | try: |
|
75 | 75 | form_result = users_group_form.to_python(dict(request.POST)) |
|
76 | 76 | UsersGroupModel().create(name=form_result['users_group_name'], |
|
77 | 77 | active=form_result['users_group_active']) |
|
78 | 78 | h.flash(_('created users group %s') \ |
|
79 | 79 | % form_result['users_group_name'], category='success') |
|
80 | 80 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) |
|
81 | 81 | Session.commit() |
|
82 | 82 | except formencode.Invalid, errors: |
|
83 | 83 | return htmlfill.render( |
|
84 | 84 | render('admin/users_groups/users_group_add.html'), |
|
85 | 85 | defaults=errors.value, |
|
86 | 86 | errors=errors.error_dict or {}, |
|
87 | 87 | prefix_error=False, |
|
88 | 88 | encoding="UTF-8") |
|
89 | 89 | except Exception: |
|
90 | 90 | log.error(traceback.format_exc()) |
|
91 | 91 | h.flash(_('error occurred during creation of users group %s') \ |
|
92 | 92 | % request.POST.get('users_group_name'), category='error') |
|
93 | 93 | |
|
94 | 94 | return redirect(url('users_groups')) |
|
95 | 95 | |
|
96 | 96 | def new(self, format='html'): |
|
97 | 97 | """GET /users_groups/new: Form to create a new item""" |
|
98 | 98 | # url('new_users_group') |
|
99 | 99 | return render('admin/users_groups/users_group_add.html') |
|
100 | 100 | |
|
101 | 101 | def update(self, id): |
|
102 | 102 | """PUT /users_groups/id: Update an existing item""" |
|
103 | 103 | # Forms posted to this method should contain a hidden field: |
|
104 | 104 | # <input type="hidden" name="_method" value="PUT" /> |
|
105 | 105 | # Or using helpers: |
|
106 | 106 | # h.form(url('users_group', id=ID), |
|
107 | 107 | # method='put') |
|
108 | 108 | # url('users_group', id=ID) |
|
109 | 109 | |
|
110 | 110 | c.users_group = UsersGroup.get(id) |
|
111 | 111 | c.group_members_obj = [x.user for x in c.users_group.members] |
|
112 | 112 | c.group_members = [(x.user_id, x.username) for x in |
|
113 | 113 | c.group_members_obj] |
|
114 | 114 | |
|
115 | 115 | c.available_members = [(x.user_id, x.username) for x in |
|
116 | 116 | self.sa.query(User).all()] |
|
117 | 117 | |
|
118 | 118 | available_members = [safe_unicode(x[0]) for x in c.available_members] |
|
119 | 119 | |
|
120 | 120 | users_group_form = UsersGroupForm(edit=True, |
|
121 | 121 | old_data=c.users_group.get_dict(), |
|
122 | 122 | available_members=available_members)() |
|
123 | 123 | |
|
124 | 124 | try: |
|
125 | 125 | form_result = users_group_form.to_python(request.POST) |
|
126 | 126 | UsersGroupModel().update(c.users_group, form_result) |
|
127 | 127 | h.flash(_('updated users group %s') \ |
|
128 | 128 | % form_result['users_group_name'], |
|
129 | 129 | category='success') |
|
130 | 130 | #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa) |
|
131 | 131 | Session.commit() |
|
132 | 132 | except formencode.Invalid, errors: |
|
133 | 133 | e = errors.error_dict or {} |
|
134 | 134 | |
|
135 | 135 | perm = Permission.get_by_key('hg.create.repository') |
|
136 | 136 | e.update({'create_repo_perm': |
|
137 | 137 | UsersGroupModel().has_perm(id, perm)}) |
|
138 | 138 | |
|
139 | 139 | return htmlfill.render( |
|
140 | 140 | render('admin/users_groups/users_group_edit.html'), |
|
141 | 141 | defaults=errors.value, |
|
142 | 142 | errors=e, |
|
143 | 143 | prefix_error=False, |
|
144 | 144 | encoding="UTF-8") |
|
145 | 145 | except Exception: |
|
146 | 146 | log.error(traceback.format_exc()) |
|
147 | 147 | h.flash(_('error occurred during update of users group %s') \ |
|
148 | 148 | % request.POST.get('users_group_name'), category='error') |
|
149 | 149 | |
|
150 | 150 | return redirect(url('users_groups')) |
|
151 | 151 | |
|
152 | 152 | def delete(self, id): |
|
153 | 153 | """DELETE /users_groups/id: Delete an existing item""" |
|
154 | 154 | # Forms posted to this method should contain a hidden field: |
|
155 | 155 | # <input type="hidden" name="_method" value="DELETE" /> |
|
156 | 156 | # Or using helpers: |
|
157 | 157 | # h.form(url('users_group', id=ID), |
|
158 | 158 | # method='delete') |
|
159 | 159 | # url('users_group', id=ID) |
|
160 | 160 | |
|
161 | 161 | try: |
|
162 | 162 | UsersGroupModel().delete(id) |
|
163 | Session.commit() | |
|
163 | 164 | h.flash(_('successfully deleted users group'), category='success') |
|
164 | Session.commit() | |
|
165 | 165 | except UsersGroupsAssignedException, e: |
|
166 | 166 | h.flash(e, category='error') |
|
167 | 167 | except Exception: |
|
168 | log.error(traceback.format_exc()) | |
|
168 | 169 | h.flash(_('An error occurred during deletion of users group'), |
|
169 | 170 | category='error') |
|
170 | 171 | return redirect(url('users_groups')) |
|
171 | 172 | |
|
172 | 173 | def show(self, id, format='html'): |
|
173 | 174 | """GET /users_groups/id: Show a specific item""" |
|
174 | 175 | # url('users_group', id=ID) |
|
175 | 176 | |
|
176 | 177 | def edit(self, id, format='html'): |
|
177 | 178 | """GET /users_groups/id/edit: Form to edit an existing item""" |
|
178 | 179 | # url('edit_users_group', id=ID) |
|
179 | 180 | |
|
180 | 181 | c.users_group = self.sa.query(UsersGroup).get(id) |
|
181 | 182 | if not c.users_group: |
|
182 | 183 | return redirect(url('users_groups')) |
|
183 | 184 | |
|
184 | 185 | c.users_group.permissions = {} |
|
185 | 186 | c.group_members_obj = [x.user for x in c.users_group.members] |
|
186 | 187 | c.group_members = [(x.user_id, x.username) for x in |
|
187 | 188 | c.group_members_obj] |
|
188 | 189 | c.available_members = [(x.user_id, x.username) for x in |
|
189 | 190 | self.sa.query(User).all()] |
|
190 | 191 | defaults = c.users_group.get_dict() |
|
191 | 192 | perm = Permission.get_by_key('hg.create.repository') |
|
192 | 193 | defaults.update({'create_repo_perm': |
|
193 | 194 | UsersGroupModel().has_perm(c.users_group, perm)}) |
|
194 | 195 | return htmlfill.render( |
|
195 | 196 | render('admin/users_groups/users_group_edit.html'), |
|
196 | 197 | defaults=defaults, |
|
197 | 198 | encoding="UTF-8", |
|
198 | 199 | force_defaults=False |
|
199 | 200 | ) |
|
200 | 201 | |
|
201 | 202 | def update_perm(self, id): |
|
202 | 203 | """PUT /users_perm/id: Update an existing item""" |
|
203 | 204 | # url('users_group_perm', id=ID, method='put') |
|
204 | 205 | |
|
205 | 206 | grant_perm = request.POST.get('create_repo_perm', False) |
|
206 | 207 | |
|
207 | 208 | if grant_perm: |
|
208 | 209 | perm = Permission.get_by_key('hg.create.none') |
|
209 | 210 | UsersGroupModel().revoke_perm(id, perm) |
|
210 | 211 | |
|
211 | 212 | perm = Permission.get_by_key('hg.create.repository') |
|
212 | 213 | UsersGroupModel().grant_perm(id, perm) |
|
213 | 214 | h.flash(_("Granted 'repository create' permission to user"), |
|
214 | 215 | category='success') |
|
215 | 216 | |
|
216 | 217 | Session.commit() |
|
217 | 218 | else: |
|
218 | 219 | perm = Permission.get_by_key('hg.create.repository') |
|
219 | 220 | UsersGroupModel().revoke_perm(id, perm) |
|
220 | 221 | |
|
221 | 222 | perm = Permission.get_by_key('hg.create.none') |
|
222 | 223 | UsersGroupModel().grant_perm(id, perm) |
|
223 | 224 | h.flash(_("Revoked 'repository create' permission to user"), |
|
224 | 225 | category='success') |
|
225 | 226 | Session.commit() |
|
226 | 227 | return redirect(url('edit_users_group', id=id)) |
@@ -1,299 +1,301 b'' | |||
|
1 | 1 | """caching_query.py |
|
2 | 2 | |
|
3 | 3 | Represent persistence structures which allow the usage of |
|
4 | 4 | Beaker caching with SQLAlchemy. |
|
5 | 5 | |
|
6 | 6 | The three new concepts introduced here are: |
|
7 | 7 | |
|
8 | 8 | * CachingQuery - a Query subclass that caches and |
|
9 | 9 | retrieves results in/from Beaker. |
|
10 | 10 | * FromCache - a query option that establishes caching |
|
11 | 11 | parameters on a Query |
|
12 | 12 | * RelationshipCache - a variant of FromCache which is specific |
|
13 | 13 | to a query invoked during a lazy load. |
|
14 | 14 | * _params_from_query - extracts value parameters from |
|
15 | 15 | a Query. |
|
16 | 16 | |
|
17 | 17 | The rest of what's here are standard SQLAlchemy and |
|
18 | 18 | Beaker constructs. |
|
19 | 19 | |
|
20 | 20 | """ |
|
21 | 21 | import beaker |
|
22 | 22 | from beaker.exceptions import BeakerException |
|
23 | 23 | |
|
24 | 24 | from sqlalchemy.orm.interfaces import MapperOption |
|
25 | 25 | from sqlalchemy.orm.query import Query |
|
26 | 26 | from sqlalchemy.sql import visitors |
|
27 | from rhodecode.lib import safe_str | |
|
27 | 28 | |
|
28 | 29 | |
|
29 | 30 | class CachingQuery(Query): |
|
30 | 31 | """A Query subclass which optionally loads full results from a Beaker |
|
31 | 32 | cache region. |
|
32 | 33 | |
|
33 | 34 | The CachingQuery stores additional state that allows it to consult |
|
34 | 35 | a Beaker cache before accessing the database: |
|
35 | 36 | |
|
36 | 37 | * A "region", which is a cache region argument passed to a |
|
37 | 38 | Beaker CacheManager, specifies a particular cache configuration |
|
38 | 39 | (including backend implementation, expiration times, etc.) |
|
39 | 40 | * A "namespace", which is a qualifying name that identifies a |
|
40 | 41 | group of keys within the cache. A query that filters on a name |
|
41 | 42 | might use the name "by_name", a query that filters on a date range |
|
42 | 43 | to a joined table might use the name "related_date_range". |
|
43 | 44 | |
|
44 | 45 | When the above state is present, a Beaker cache is retrieved. |
|
45 | 46 | |
|
46 | 47 | The "namespace" name is first concatenated with |
|
47 | 48 | a string composed of the individual entities and columns the Query |
|
48 | 49 | requests, i.e. such as ``Query(User.id, User.name)``. |
|
49 | 50 | |
|
50 | 51 | The Beaker cache is then loaded from the cache manager based |
|
51 | 52 | on the region and composed namespace. The key within the cache |
|
52 | 53 | itself is then constructed against the bind parameters specified |
|
53 | 54 | by this query, which are usually literals defined in the |
|
54 | 55 | WHERE clause. |
|
55 | 56 | |
|
56 | 57 | The FromCache and RelationshipCache mapper options below represent |
|
57 | 58 | the "public" method of configuring this state upon the CachingQuery. |
|
58 | 59 | |
|
59 | 60 | """ |
|
60 | 61 | |
|
61 | 62 | def __init__(self, manager, *args, **kw): |
|
62 | 63 | self.cache_manager = manager |
|
63 | 64 | Query.__init__(self, *args, **kw) |
|
64 | 65 | |
|
65 | 66 | def __iter__(self): |
|
66 | 67 | """override __iter__ to pull results from Beaker |
|
67 | 68 | if particular attributes have been configured. |
|
68 | 69 | |
|
69 | 70 | Note that this approach does *not* detach the loaded objects from |
|
70 | 71 | the current session. If the cache backend is an in-process cache |
|
71 | 72 | (like "memory") and lives beyond the scope of the current session's |
|
72 | 73 | transaction, those objects may be expired. The method here can be |
|
73 | 74 | modified to first expunge() each loaded item from the current |
|
74 | 75 | session before returning the list of items, so that the items |
|
75 | 76 | in the cache are not the same ones in the current Session. |
|
76 | 77 | |
|
77 | 78 | """ |
|
78 | 79 | if hasattr(self, '_cache_parameters'): |
|
79 | 80 | return self.get_value(createfunc=lambda: |
|
80 | 81 | list(Query.__iter__(self))) |
|
81 | 82 | else: |
|
82 | 83 | return Query.__iter__(self) |
|
83 | 84 | |
|
84 | 85 | def invalidate(self): |
|
85 | 86 | """Invalidate the value represented by this Query.""" |
|
86 | 87 | |
|
87 | 88 | cache, cache_key = _get_cache_parameters(self) |
|
88 | 89 | cache.remove(cache_key) |
|
89 | 90 | |
|
90 | 91 | def get_value(self, merge=True, createfunc=None): |
|
91 | 92 | """Return the value from the cache for this query. |
|
92 | 93 | |
|
93 | 94 | Raise KeyError if no value present and no |
|
94 | 95 | createfunc specified. |
|
95 | 96 | |
|
96 | 97 | """ |
|
97 | 98 | cache, cache_key = _get_cache_parameters(self) |
|
98 | 99 | ret = cache.get_value(cache_key, createfunc=createfunc) |
|
99 | 100 | if merge: |
|
100 | 101 | ret = self.merge_result(ret, load=False) |
|
101 | 102 | return ret |
|
102 | 103 | |
|
103 | 104 | def set_value(self, value): |
|
104 | 105 | """Set the value in the cache for this query.""" |
|
105 | 106 | |
|
106 | 107 | cache, cache_key = _get_cache_parameters(self) |
|
107 | 108 | cache.put(cache_key, value) |
|
108 | 109 | |
|
109 | 110 | |
|
110 | 111 | def query_callable(manager, query_cls=CachingQuery): |
|
111 | 112 | def query(*arg, **kw): |
|
112 | 113 | return query_cls(manager, *arg, **kw) |
|
113 | 114 | return query |
|
114 | 115 | |
|
115 | 116 | |
|
116 | 117 | def get_cache_region(name, region): |
|
117 | 118 | if region not in beaker.cache.cache_regions: |
|
118 | 119 | raise BeakerException('Cache region `%s` not configured ' |
|
119 | 120 | 'Check if proper cache settings are in the .ini files' % region) |
|
120 | 121 | kw = beaker.cache.cache_regions[region] |
|
121 | 122 | return beaker.cache.Cache._get_cache(name, kw) |
|
122 | 123 | |
|
123 | 124 | |
|
124 | 125 | def _get_cache_parameters(query): |
|
125 | 126 | """For a query with cache_region and cache_namespace configured, |
|
126 | 127 | return the correspoinding Cache instance and cache key, based |
|
127 | 128 | on this query's current criterion and parameter values. |
|
128 | 129 | |
|
129 | 130 | """ |
|
130 | 131 | if not hasattr(query, '_cache_parameters'): |
|
131 | 132 | raise ValueError("This Query does not have caching " |
|
132 | 133 | "parameters configured.") |
|
133 | 134 | |
|
134 | 135 | region, namespace, cache_key = query._cache_parameters |
|
135 | 136 | |
|
136 | 137 | namespace = _namespace_from_query(namespace, query) |
|
137 | 138 | |
|
138 | 139 | if cache_key is None: |
|
139 | 140 | # cache key - the value arguments from this query's parameters. |
|
140 | args = [str(x) for x in _params_from_query(query)] | |
|
141 | args = [safe_str(x) for x in _params_from_query(query)] | |
|
141 | 142 | args.extend(filter(lambda k:k not in ['None', None, u'None'], |
|
142 | 143 | [str(query._limit), str(query._offset)])) |
|
144 | ||
|
143 | 145 | cache_key = " ".join(args) |
|
144 | 146 | |
|
145 | 147 | if cache_key is None: |
|
146 | 148 | raise Exception('Cache key cannot be None') |
|
147 | 149 | |
|
148 | 150 | # get cache |
|
149 | 151 | #cache = query.cache_manager.get_cache_region(namespace, region) |
|
150 | 152 | cache = get_cache_region(namespace, region) |
|
151 | 153 | # optional - hash the cache_key too for consistent length |
|
152 | 154 | # import uuid |
|
153 | 155 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) |
|
154 | 156 | |
|
155 | 157 | return cache, cache_key |
|
156 | 158 | |
|
157 | 159 | |
|
158 | 160 | def _namespace_from_query(namespace, query): |
|
159 | 161 | # cache namespace - the token handed in by the |
|
160 | 162 | # option + class we're querying against |
|
161 | 163 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) |
|
162 | 164 | |
|
163 | 165 | # memcached wants this |
|
164 | 166 | namespace = namespace.replace(' ', '_') |
|
165 | 167 | |
|
166 | 168 | return namespace |
|
167 | 169 | |
|
168 | 170 | |
|
169 | 171 | def _set_cache_parameters(query, region, namespace, cache_key): |
|
170 | 172 | |
|
171 | 173 | if hasattr(query, '_cache_parameters'): |
|
172 | 174 | region, namespace, cache_key = query._cache_parameters |
|
173 | 175 | raise ValueError("This query is already configured " |
|
174 | 176 | "for region %r namespace %r" % |
|
175 | 177 | (region, namespace) |
|
176 | 178 | ) |
|
177 | 179 | query._cache_parameters = region, namespace, cache_key |
|
178 | 180 | |
|
179 | 181 | |
|
180 | 182 | class FromCache(MapperOption): |
|
181 | 183 | """Specifies that a Query should load results from a cache.""" |
|
182 | 184 | |
|
183 | 185 | propagate_to_loaders = False |
|
184 | 186 | |
|
185 | 187 | def __init__(self, region, namespace, cache_key=None): |
|
186 | 188 | """Construct a new FromCache. |
|
187 | 189 | |
|
188 | 190 | :param region: the cache region. Should be a |
|
189 | 191 | region configured in the Beaker CacheManager. |
|
190 | 192 | |
|
191 | 193 | :param namespace: the cache namespace. Should |
|
192 | 194 | be a name uniquely describing the target Query's |
|
193 | 195 | lexical structure. |
|
194 | 196 | |
|
195 | 197 | :param cache_key: optional. A string cache key |
|
196 | 198 | that will serve as the key to the query. Use this |
|
197 | 199 | if your query has a huge amount of parameters (such |
|
198 | 200 | as when using in_()) which correspond more simply to |
|
199 | 201 | some other identifier. |
|
200 | 202 | |
|
201 | 203 | """ |
|
202 | 204 | self.region = region |
|
203 | 205 | self.namespace = namespace |
|
204 | 206 | self.cache_key = cache_key |
|
205 | 207 | |
|
206 | 208 | def process_query(self, query): |
|
207 | 209 | """Process a Query during normal loading operation.""" |
|
208 | 210 | |
|
209 | 211 | _set_cache_parameters(query, self.region, self.namespace, |
|
210 | 212 | self.cache_key) |
|
211 | 213 | |
|
212 | 214 | |
|
213 | 215 | class RelationshipCache(MapperOption): |
|
214 | 216 | """Specifies that a Query as called within a "lazy load" |
|
215 | 217 | should load results from a cache.""" |
|
216 | 218 | |
|
217 | 219 | propagate_to_loaders = True |
|
218 | 220 | |
|
219 | 221 | def __init__(self, region, namespace, attribute): |
|
220 | 222 | """Construct a new RelationshipCache. |
|
221 | 223 | |
|
222 | 224 | :param region: the cache region. Should be a |
|
223 | 225 | region configured in the Beaker CacheManager. |
|
224 | 226 | |
|
225 | 227 | :param namespace: the cache namespace. Should |
|
226 | 228 | be a name uniquely describing the target Query's |
|
227 | 229 | lexical structure. |
|
228 | 230 | |
|
229 | 231 | :param attribute: A Class.attribute which |
|
230 | 232 | indicates a particular class relationship() whose |
|
231 | 233 | lazy loader should be pulled from the cache. |
|
232 | 234 | |
|
233 | 235 | """ |
|
234 | 236 | self.region = region |
|
235 | 237 | self.namespace = namespace |
|
236 | 238 | self._relationship_options = { |
|
237 | 239 | (attribute.property.parent.class_, attribute.property.key): self |
|
238 | 240 | } |
|
239 | 241 | |
|
240 | 242 | def process_query_conditionally(self, query): |
|
241 | 243 | """Process a Query that is used within a lazy loader. |
|
242 | 244 | |
|
243 | 245 | (the process_query_conditionally() method is a SQLAlchemy |
|
244 | 246 | hook invoked only within lazyload.) |
|
245 | 247 | |
|
246 | 248 | """ |
|
247 | 249 | if query._current_path: |
|
248 | 250 | mapper, key = query._current_path[-2:] |
|
249 | 251 | |
|
250 | 252 | for cls in mapper.class_.__mro__: |
|
251 | 253 | if (cls, key) in self._relationship_options: |
|
252 | 254 | relationship_option = \ |
|
253 | 255 | self._relationship_options[(cls, key)] |
|
254 | 256 | _set_cache_parameters( |
|
255 | 257 | query, |
|
256 | 258 | relationship_option.region, |
|
257 | 259 | relationship_option.namespace, |
|
258 | 260 | None) |
|
259 | 261 | |
|
260 | 262 | def and_(self, option): |
|
261 | 263 | """Chain another RelationshipCache option to this one. |
|
262 | 264 | |
|
263 | 265 | While many RelationshipCache objects can be specified on a single |
|
264 | 266 | Query separately, chaining them together allows for a more efficient |
|
265 | 267 | lookup during load. |
|
266 | 268 | |
|
267 | 269 | """ |
|
268 | 270 | self._relationship_options.update(option._relationship_options) |
|
269 | 271 | return self |
|
270 | 272 | |
|
271 | 273 | |
|
272 | 274 | def _params_from_query(query): |
|
273 | 275 | """Pull the bind parameter values from a query. |
|
274 | 276 | |
|
275 | 277 | This takes into account any scalar attribute bindparam set up. |
|
276 | 278 | |
|
277 | 279 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) |
|
278 | 280 | would return [5, 7]. |
|
279 | 281 | |
|
280 | 282 | """ |
|
281 | 283 | v = [] |
|
282 | 284 | def visit_bindparam(bind): |
|
283 | 285 | |
|
284 | 286 | if bind.key in query._params: |
|
285 | 287 | value = query._params[bind.key] |
|
286 | 288 | elif bind.callable: |
|
287 | 289 | # lazyloader may dig a callable in here, intended |
|
288 | 290 | # to late-evaluate params after autoflush is called. |
|
289 | 291 | # convert to a scalar value. |
|
290 | 292 | value = bind.callable() |
|
291 | 293 | else: |
|
292 | 294 | value = bind.value |
|
293 | 295 | |
|
294 | 296 | v.append(value) |
|
295 | 297 | if query._criterion is not None: |
|
296 | 298 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) |
|
297 | 299 | for f in query._from_obj: |
|
298 | 300 | visitors.traverse(f, {}, {'bindparam':visit_bindparam}) |
|
299 | 301 | return v |
@@ -1,54 +1,62 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.middleware.https_fixup |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | middleware to handle https correctly |
|
7 | 7 | |
|
8 | 8 | :created_on: May 23, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | from rhodecode.lib import str2bool |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class HttpsFixup(object): |
|
30 | 30 | |
|
31 | 31 | def __init__(self, app, config): |
|
32 | 32 | self.application = app |
|
33 | 33 | self.config = config |
|
34 | 34 | |
|
35 | 35 | def __call__(self, environ, start_response): |
|
36 | 36 | self.__fixup(environ) |
|
37 | 37 | return self.application(environ, start_response) |
|
38 | 38 | |
|
39 | 39 | def __fixup(self, environ): |
|
40 | 40 | """ |
|
41 | 41 | Function to fixup the environ as needed. In order to use this |
|
42 | 42 | middleware you should set this header inside your |
|
43 | 43 | proxy ie. nginx, apache etc. |
|
44 | 44 | """ |
|
45 | proto = environ.get('HTTP_X_URL_SCHEME') | |
|
46 | 45 | |
|
47 | 46 | if str2bool(self.config.get('force_https')): |
|
48 | 47 | proto = 'https' |
|
49 | ||
|
48 | else: | |
|
49 | if 'HTTP_X_URL_SCHEME' in environ: | |
|
50 | proto = environ.get('HTTP_X_URL_SCHEME') | |
|
51 | elif 'HTTP_X_FORWARDED_SCHEME' in environ: | |
|
52 | proto = environ.get('HTTP_X_FORWARDED_SCHEME') | |
|
53 | elif 'HTTP_X_FORWARDED_PROTO' in environ: | |
|
54 | proto = environ.get('HTTP_X_FORWARDED_PROTO') | |
|
55 | else: | |
|
56 | proto = 'http' | |
|
50 | 57 | if proto == 'https': |
|
51 | 58 | environ['wsgi.url_scheme'] = proto |
|
52 | 59 | else: |
|
53 | 60 | environ['wsgi.url_scheme'] = 'http' |
|
61 | ||
|
54 | 62 | return None |
@@ -1,248 +1,247 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.middleware.simplegit |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | SimpleGit middleware for handling git protocol request (push/clone etc.) |
|
7 | 7 | It's implemented with basic auth function |
|
8 | 8 | |
|
9 | 9 | :created_on: Apr 28, 2010 |
|
10 | 10 | :author: marcink |
|
11 | 11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
12 | 12 | :license: GPLv3, see COPYING for more details. |
|
13 | 13 | """ |
|
14 | 14 | # This program is free software: you can redistribute it and/or modify |
|
15 | 15 | # it under the terms of the GNU General Public License as published by |
|
16 | 16 | # the Free Software Foundation, either version 3 of the License, or |
|
17 | 17 | # (at your option) any later version. |
|
18 | 18 | # |
|
19 | 19 | # This program is distributed in the hope that it will be useful, |
|
20 | 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
21 | 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
22 | 22 | # GNU General Public License for more details. |
|
23 | 23 | # |
|
24 | 24 | # You should have received a copy of the GNU General Public License |
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | |
|
27 | 27 | import os |
|
28 | import re | |
|
28 | 29 | import logging |
|
29 | 30 | import traceback |
|
30 | 31 | |
|
31 | 32 | from dulwich import server as dulserver |
|
32 | 33 | |
|
33 | 34 | |
|
34 | 35 | class SimpleGitUploadPackHandler(dulserver.UploadPackHandler): |
|
35 | 36 | |
|
36 | 37 | def handle(self): |
|
37 | 38 | write = lambda x: self.proto.write_sideband(1, x) |
|
38 | 39 | |
|
39 | 40 | graph_walker = dulserver.ProtocolGraphWalker(self, |
|
40 | 41 | self.repo.object_store, |
|
41 | 42 | self.repo.get_peeled) |
|
42 | 43 | objects_iter = self.repo.fetch_objects( |
|
43 | 44 | graph_walker.determine_wants, graph_walker, self.progress, |
|
44 | 45 | get_tagged=self.get_tagged) |
|
45 | 46 | |
|
46 | 47 | # Do they want any objects? |
|
47 | 48 | if objects_iter is None or len(objects_iter) == 0: |
|
48 | 49 | return |
|
49 | 50 | |
|
50 | 51 | self.progress("counting objects: %d, done.\n" % len(objects_iter)) |
|
51 | 52 | dulserver.write_pack_objects(dulserver.ProtocolFile(None, write), |
|
52 | 53 | objects_iter, len(objects_iter)) |
|
53 | 54 | messages = [] |
|
54 | 55 | messages.append('thank you for using rhodecode') |
|
55 | 56 | |
|
56 | 57 | for msg in messages: |
|
57 | 58 | self.progress(msg + "\n") |
|
58 | 59 | # we are done |
|
59 | 60 | self.proto.write("0000") |
|
60 | 61 | |
|
61 | 62 | dulserver.DEFAULT_HANDLERS = { |
|
62 | 63 | 'git-upload-pack': SimpleGitUploadPackHandler, |
|
63 | 64 | 'git-receive-pack': dulserver.ReceivePackHandler, |
|
64 | 65 | } |
|
65 | 66 | |
|
66 | 67 | from dulwich.repo import Repo |
|
67 | 68 | from dulwich.web import HTTPGitApplication |
|
68 | 69 | |
|
69 | 70 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
70 | 71 | |
|
71 | 72 | from rhodecode.lib import safe_str |
|
72 | 73 | from rhodecode.lib.base import BaseVCSController |
|
73 | 74 | from rhodecode.lib.auth import get_container_username |
|
74 | 75 | from rhodecode.lib.utils import is_valid_repo |
|
75 | 76 | from rhodecode.model.db import User |
|
76 | 77 | |
|
77 | 78 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError |
|
78 | 79 | |
|
79 | 80 | log = logging.getLogger(__name__) |
|
80 | 81 | |
|
81 | 82 | |
|
82 | def is_git(environ): | |
|
83 | """Returns True if request's target is git server. | |
|
84 | ``HTTP_USER_AGENT`` would then have git client version given. | |
|
83 | GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)') | |
|
84 | ||
|
85 | 85 | |
|
86 | :param environ: | |
|
87 | """ | |
|
88 | http_user_agent = environ.get('HTTP_USER_AGENT') | |
|
89 | if http_user_agent and http_user_agent.startswith('git'): | |
|
90 | return True | |
|
91 | return False | |
|
86 | def is_git(environ): | |
|
87 | path_info = environ['PATH_INFO'] | |
|
88 | isgit_path = GIT_PROTO_PAT.match(path_info) | |
|
89 | log.debug('is a git path %s pathinfo : %s' % (isgit_path, path_info)) | |
|
90 | return isgit_path | |
|
92 | 91 | |
|
93 | 92 | |
|
94 | 93 | class SimpleGit(BaseVCSController): |
|
95 | 94 | |
|
96 | 95 | def _handle_request(self, environ, start_response): |
|
96 | ||
|
97 | 97 | if not is_git(environ): |
|
98 | 98 | return self.application(environ, start_response) |
|
99 | 99 | |
|
100 | 100 | proxy_key = 'HTTP_X_REAL_IP' |
|
101 | 101 | def_key = 'REMOTE_ADDR' |
|
102 | 102 | ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0')) |
|
103 | 103 | username = None |
|
104 | 104 | # skip passing error to error controller |
|
105 | 105 | environ['pylons.status_code_redirect'] = True |
|
106 | 106 | |
|
107 | 107 | #====================================================================== |
|
108 | 108 | # EXTRACT REPOSITORY NAME FROM ENV |
|
109 | 109 | #====================================================================== |
|
110 | 110 | try: |
|
111 | 111 | repo_name = self.__get_repository(environ) |
|
112 | 112 | log.debug('Extracted repo name is %s' % repo_name) |
|
113 | 113 | except: |
|
114 | 114 | return HTTPInternalServerError()(environ, start_response) |
|
115 | 115 | |
|
116 | 116 | #====================================================================== |
|
117 | 117 | # GET ACTION PULL or PUSH |
|
118 | 118 | #====================================================================== |
|
119 | 119 | action = self.__get_action(environ) |
|
120 | 120 | |
|
121 | 121 | #====================================================================== |
|
122 | 122 | # CHECK ANONYMOUS PERMISSION |
|
123 | 123 | #====================================================================== |
|
124 | 124 | if action in ['pull', 'push']: |
|
125 | 125 | anonymous_user = self.__get_user('default') |
|
126 | 126 | username = anonymous_user.username |
|
127 | 127 | anonymous_perm = self._check_permission(action, anonymous_user, |
|
128 | 128 | repo_name) |
|
129 | 129 | |
|
130 | 130 | if anonymous_perm is not True or anonymous_user.active is False: |
|
131 | 131 | if anonymous_perm is not True: |
|
132 | 132 | log.debug('Not enough credentials to access this ' |
|
133 | 133 | 'repository as anonymous user') |
|
134 | 134 | if anonymous_user.active is False: |
|
135 | 135 | log.debug('Anonymous access is disabled, running ' |
|
136 | 136 | 'authentication') |
|
137 | 137 | #============================================================== |
|
138 | 138 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
139 | 139 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
140 | 140 | #============================================================== |
|
141 | 141 | |
|
142 | 142 | # Attempting to retrieve username from the container |
|
143 | 143 | username = get_container_username(environ, self.config) |
|
144 | 144 | |
|
145 | 145 | # If not authenticated by the container, running basic auth |
|
146 | 146 | if not username: |
|
147 | 147 | self.authenticate.realm = \ |
|
148 | 148 | safe_str(self.config['rhodecode_realm']) |
|
149 | 149 | result = self.authenticate(environ) |
|
150 | 150 | if isinstance(result, str): |
|
151 | 151 | AUTH_TYPE.update(environ, 'basic') |
|
152 | 152 | REMOTE_USER.update(environ, result) |
|
153 | 153 | username = result |
|
154 | 154 | else: |
|
155 | 155 | return result.wsgi_application(environ, start_response) |
|
156 | 156 | |
|
157 | 157 | #============================================================== |
|
158 | 158 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
159 | 159 | #============================================================== |
|
160 | 160 | if action in ['pull', 'push']: |
|
161 | 161 | try: |
|
162 | 162 | user = self.__get_user(username) |
|
163 | 163 | if user is None or not user.active: |
|
164 | 164 | return HTTPForbidden()(environ, start_response) |
|
165 | 165 | username = user.username |
|
166 | 166 | except: |
|
167 | 167 | log.error(traceback.format_exc()) |
|
168 | 168 | return HTTPInternalServerError()(environ, |
|
169 | 169 | start_response) |
|
170 | 170 | |
|
171 | 171 | #check permissions for this repository |
|
172 | 172 | perm = self._check_permission(action, user, |
|
173 | 173 | repo_name) |
|
174 | 174 | if perm is not True: |
|
175 | 175 | return HTTPForbidden()(environ, start_response) |
|
176 | 176 | |
|
177 | 177 | #=================================================================== |
|
178 | 178 | # GIT REQUEST HANDLING |
|
179 | 179 | #=================================================================== |
|
180 | 180 | |
|
181 | 181 | repo_path = safe_str(os.path.join(self.basepath, repo_name)) |
|
182 | 182 | log.debug('Repository path is %s' % repo_path) |
|
183 | 183 | |
|
184 | 184 | # quick check if that dir exists... |
|
185 | 185 | if is_valid_repo(repo_name, self.basepath) is False: |
|
186 | 186 | return HTTPNotFound()(environ, start_response) |
|
187 | 187 | |
|
188 | 188 | try: |
|
189 | 189 | #invalidate cache on push |
|
190 | 190 | if action == 'push': |
|
191 | 191 | self._invalidate_cache(repo_name) |
|
192 | 192 | log.info('%s action on GIT repo "%s"' % (action, repo_name)) |
|
193 | 193 | app = self.__make_app(repo_name, repo_path) |
|
194 | 194 | return app(environ, start_response) |
|
195 | 195 | except Exception: |
|
196 | 196 | log.error(traceback.format_exc()) |
|
197 | 197 | return HTTPInternalServerError()(environ, start_response) |
|
198 | 198 | |
|
199 | 199 | def __make_app(self, repo_name, repo_path): |
|
200 | 200 | """ |
|
201 | 201 | Make an wsgi application using dulserver |
|
202 | 202 | |
|
203 | 203 | :param repo_name: name of the repository |
|
204 | 204 | :param repo_path: full path to the repository |
|
205 | 205 | """ |
|
206 | 206 | |
|
207 | 207 | _d = {'/' + repo_name: Repo(repo_path)} |
|
208 | 208 | backend = dulserver.DictBackend(_d) |
|
209 | 209 | gitserve = HTTPGitApplication(backend) |
|
210 | 210 | |
|
211 | 211 | return gitserve |
|
212 | 212 | |
|
213 | 213 | def __get_repository(self, environ): |
|
214 | 214 | """ |
|
215 | 215 | Get's repository name out of PATH_INFO header |
|
216 | 216 | |
|
217 | 217 | :param environ: environ where PATH_INFO is stored |
|
218 | 218 | """ |
|
219 | 219 | try: |
|
220 | 220 | environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO']) |
|
221 |
repo_name = |
|
|
222 | if repo_name.endswith('/'): | |
|
223 | repo_name = repo_name.rstrip('/') | |
|
221 | repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1) | |
|
224 | 222 | except: |
|
225 | 223 | log.error(traceback.format_exc()) |
|
226 | 224 | raise |
|
227 | repo_name = repo_name.split('/')[0] | |
|
225 | ||
|
228 | 226 | return repo_name |
|
229 | 227 | |
|
230 | 228 | def __get_user(self, username): |
|
231 | 229 | return User.get_by_username(username) |
|
232 | 230 | |
|
233 | 231 | def __get_action(self, environ): |
|
234 | 232 | """Maps git request commands into a pull or push command. |
|
235 | 233 | |
|
236 | 234 | :param environ: |
|
237 | 235 | """ |
|
238 | 236 | service = environ['QUERY_STRING'].split('=') |
|
239 | 237 | if len(service) > 1: |
|
240 | 238 | service_cmd = service[1] |
|
241 |
mapping = { |
|
|
239 | mapping = { | |
|
240 | 'git-receive-pack': 'push', | |
|
242 | 241 |
|
|
243 | 242 |
|
|
244 | 243 | |
|
245 | 244 | return mapping.get(service_cmd, |
|
246 | 245 | service_cmd if service_cmd else 'other') |
|
247 | 246 | else: |
|
248 | 247 | return 'other' |
@@ -1,616 +1,622 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.utils |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Utilities library for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 18, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | import datetime |
|
29 | 29 | import traceback |
|
30 | 30 | import paste |
|
31 | 31 | import beaker |
|
32 | 32 | import tarfile |
|
33 | 33 | import shutil |
|
34 | 34 | from os.path import abspath |
|
35 | 35 | from os.path import dirname as dn, join as jn |
|
36 | 36 | |
|
37 | 37 | from paste.script.command import Command, BadCommand |
|
38 | 38 | |
|
39 | 39 | from mercurial import ui, config |
|
40 | 40 | |
|
41 | 41 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
42 | 42 | |
|
43 | 43 | from rhodecode.lib.vcs import get_backend |
|
44 | 44 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
45 | 45 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
46 | 46 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
47 | 47 | from rhodecode.lib.vcs.exceptions import VCSError |
|
48 | 48 | |
|
49 | 49 | from rhodecode.lib.caching_query import FromCache |
|
50 | 50 | |
|
51 | 51 | from rhodecode.model import meta |
|
52 | 52 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ |
|
53 | 53 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm |
|
54 | 54 | from rhodecode.model.meta import Session |
|
55 | 55 | from rhodecode.model.repos_group import ReposGroupModel |
|
56 | 56 | |
|
57 | 57 | log = logging.getLogger(__name__) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def recursive_replace(str_, replace=' '): |
|
61 | 61 | """Recursive replace of given sign to just one instance |
|
62 | 62 | |
|
63 | 63 | :param str_: given string |
|
64 | 64 | :param replace: char to find and replace multiple instances |
|
65 | 65 | |
|
66 | 66 | Examples:: |
|
67 | 67 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
68 | 68 | 'Mighty-Mighty-Bo-sstones' |
|
69 | 69 | """ |
|
70 | 70 | |
|
71 | 71 | if str_.find(replace * 2) == -1: |
|
72 | 72 | return str_ |
|
73 | 73 | else: |
|
74 | 74 | str_ = str_.replace(replace * 2, replace) |
|
75 | 75 | return recursive_replace(str_, replace) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def repo_name_slug(value): |
|
79 | 79 | """Return slug of name of repository |
|
80 | 80 | This function is called on each creation/modification |
|
81 | 81 | of repository to prevent bad names in repo |
|
82 | 82 | """ |
|
83 | 83 | |
|
84 | 84 | slug = remove_formatting(value) |
|
85 | 85 | slug = strip_tags(slug) |
|
86 | 86 | |
|
87 | 87 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
88 | 88 | slug = slug.replace(c, '-') |
|
89 | 89 | slug = recursive_replace(slug, '-') |
|
90 | 90 | slug = collapse(slug, '-') |
|
91 | 91 | return slug |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | def get_repo_slug(request): |
|
95 |
|
|
|
95 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |
|
96 | if _repo: | |
|
97 | _repo = _repo.rstrip('/') | |
|
98 | return _repo | |
|
96 | 99 | |
|
97 | 100 | |
|
98 | 101 | def get_repos_group_slug(request): |
|
99 |
|
|
|
102 | _group = request.environ['pylons.routes_dict'].get('group_name') | |
|
103 | if _group: | |
|
104 | _group = _group.rstrip('/') | |
|
105 | return _group | |
|
100 | 106 | |
|
101 | 107 | |
|
102 | 108 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
103 | 109 | """ |
|
104 | 110 | Action logger for various actions made by users |
|
105 | 111 | |
|
106 | 112 | :param user: user that made this action, can be a unique username string or |
|
107 | 113 | object containing user_id attribute |
|
108 | 114 | :param action: action to log, should be on of predefined unique actions for |
|
109 | 115 | easy translations |
|
110 | 116 | :param repo: string name of repository or object containing repo_id, |
|
111 | 117 | that action was made on |
|
112 | 118 | :param ipaddr: optional ip address from what the action was made |
|
113 | 119 | :param sa: optional sqlalchemy session |
|
114 | 120 | |
|
115 | 121 | """ |
|
116 | 122 | |
|
117 | 123 | if not sa: |
|
118 | 124 | sa = meta.Session |
|
119 | 125 | |
|
120 | 126 | try: |
|
121 | 127 | if hasattr(user, 'user_id'): |
|
122 | 128 | user_obj = user |
|
123 | 129 | elif isinstance(user, basestring): |
|
124 | 130 | user_obj = User.get_by_username(user) |
|
125 | 131 | else: |
|
126 | 132 | raise Exception('You have to provide user object or username') |
|
127 | 133 | |
|
128 | 134 | if hasattr(repo, 'repo_id'): |
|
129 | 135 | repo_obj = Repository.get(repo.repo_id) |
|
130 | 136 | repo_name = repo_obj.repo_name |
|
131 | 137 | elif isinstance(repo, basestring): |
|
132 | 138 | repo_name = repo.lstrip('/') |
|
133 | 139 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
134 | 140 | else: |
|
135 | 141 | raise Exception('You have to provide repository to action logger') |
|
136 | 142 | |
|
137 | 143 | user_log = UserLog() |
|
138 | 144 | user_log.user_id = user_obj.user_id |
|
139 | 145 | user_log.action = action |
|
140 | 146 | |
|
141 | 147 | user_log.repository_id = repo_obj.repo_id |
|
142 | 148 | user_log.repository_name = repo_name |
|
143 | 149 | |
|
144 | 150 | user_log.action_date = datetime.datetime.now() |
|
145 | 151 | user_log.user_ip = ipaddr |
|
146 | 152 | sa.add(user_log) |
|
147 | 153 | |
|
148 | 154 | log.info('Adding user %s, action %s on %s' % (user_obj, action, repo)) |
|
149 | 155 | if commit: |
|
150 | 156 | sa.commit() |
|
151 | 157 | except: |
|
152 | 158 | log.error(traceback.format_exc()) |
|
153 | 159 | raise |
|
154 | 160 | |
|
155 | 161 | |
|
156 | 162 | def get_repos(path, recursive=False): |
|
157 | 163 | """ |
|
158 | 164 | Scans given path for repos and return (name,(type,path)) tuple |
|
159 | 165 | |
|
160 | 166 | :param path: path to scan for repositories |
|
161 | 167 | :param recursive: recursive search and return names with subdirs in front |
|
162 | 168 | """ |
|
163 | 169 | |
|
164 | 170 | # remove ending slash for better results |
|
165 | 171 | path = path.rstrip(os.sep) |
|
166 | 172 | |
|
167 | 173 | def _get_repos(p): |
|
168 | 174 | if not os.access(p, os.W_OK): |
|
169 | 175 | return |
|
170 | 176 | for dirpath in os.listdir(p): |
|
171 | 177 | if os.path.isfile(os.path.join(p, dirpath)): |
|
172 | 178 | continue |
|
173 | 179 | cur_path = os.path.join(p, dirpath) |
|
174 | 180 | try: |
|
175 | 181 | scm_info = get_scm(cur_path) |
|
176 | 182 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
177 | 183 | except VCSError: |
|
178 | 184 | if not recursive: |
|
179 | 185 | continue |
|
180 | 186 | #check if this dir containts other repos for recursive scan |
|
181 | 187 | rec_path = os.path.join(p, dirpath) |
|
182 | 188 | if os.path.isdir(rec_path): |
|
183 | 189 | for inner_scm in _get_repos(rec_path): |
|
184 | 190 | yield inner_scm |
|
185 | 191 | |
|
186 | 192 | return _get_repos(path) |
|
187 | 193 | |
|
188 | 194 | |
|
189 | 195 | def is_valid_repo(repo_name, base_path): |
|
190 | 196 | """ |
|
191 | 197 | Returns True if given path is a valid repository False otherwise |
|
192 | 198 | :param repo_name: |
|
193 | 199 | :param base_path: |
|
194 | 200 | |
|
195 | 201 | :return True: if given path is a valid repository |
|
196 | 202 | """ |
|
197 | 203 | full_path = os.path.join(base_path, repo_name) |
|
198 | 204 | |
|
199 | 205 | try: |
|
200 | 206 | get_scm(full_path) |
|
201 | 207 | return True |
|
202 | 208 | except VCSError: |
|
203 | 209 | return False |
|
204 | 210 | |
|
205 | 211 | |
|
206 | 212 | def is_valid_repos_group(repos_group_name, base_path): |
|
207 | 213 | """ |
|
208 | 214 | Returns True if given path is a repos group False otherwise |
|
209 | 215 | |
|
210 | 216 | :param repo_name: |
|
211 | 217 | :param base_path: |
|
212 | 218 | """ |
|
213 | 219 | full_path = os.path.join(base_path, repos_group_name) |
|
214 | 220 | |
|
215 | 221 | # check if it's not a repo |
|
216 | 222 | if is_valid_repo(repos_group_name, base_path): |
|
217 | 223 | return False |
|
218 | 224 | |
|
219 | 225 | # check if it's a valid path |
|
220 | 226 | if os.path.isdir(full_path): |
|
221 | 227 | return True |
|
222 | 228 | |
|
223 | 229 | return False |
|
224 | 230 | |
|
225 | 231 | |
|
226 | 232 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
|
227 | 233 | while True: |
|
228 | 234 | ok = raw_input(prompt) |
|
229 | 235 | if ok in ('y', 'ye', 'yes'): |
|
230 | 236 | return True |
|
231 | 237 | if ok in ('n', 'no', 'nop', 'nope'): |
|
232 | 238 | return False |
|
233 | 239 | retries = retries - 1 |
|
234 | 240 | if retries < 0: |
|
235 | 241 | raise IOError |
|
236 | 242 | print complaint |
|
237 | 243 | |
|
238 | 244 | #propagated from mercurial documentation |
|
239 | 245 | ui_sections = ['alias', 'auth', |
|
240 | 246 | 'decode/encode', 'defaults', |
|
241 | 247 | 'diff', 'email', |
|
242 | 248 | 'extensions', 'format', |
|
243 | 249 | 'merge-patterns', 'merge-tools', |
|
244 | 250 | 'hooks', 'http_proxy', |
|
245 | 251 | 'smtp', 'patch', |
|
246 | 252 | 'paths', 'profiling', |
|
247 | 253 | 'server', 'trusted', |
|
248 | 254 | 'ui', 'web', ] |
|
249 | 255 | |
|
250 | 256 | |
|
251 | 257 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
252 | 258 | """A function that will read python rc files or database |
|
253 | 259 | and make an mercurial ui object from read options |
|
254 | 260 | |
|
255 | 261 | :param path: path to mercurial config file |
|
256 | 262 | :param checkpaths: check the path |
|
257 | 263 | :param read_from: read from 'file' or 'db' |
|
258 | 264 | """ |
|
259 | 265 | |
|
260 | 266 | baseui = ui.ui() |
|
261 | 267 | |
|
262 | 268 | # clean the baseui object |
|
263 | 269 | baseui._ocfg = config.config() |
|
264 | 270 | baseui._ucfg = config.config() |
|
265 | 271 | baseui._tcfg = config.config() |
|
266 | 272 | |
|
267 | 273 | if read_from == 'file': |
|
268 | 274 | if not os.path.isfile(path): |
|
269 | 275 | log.debug('hgrc file is not present at %s skipping...' % path) |
|
270 | 276 | return False |
|
271 | 277 | log.debug('reading hgrc from %s' % path) |
|
272 | 278 | cfg = config.config() |
|
273 | 279 | cfg.read(path) |
|
274 | 280 | for section in ui_sections: |
|
275 | 281 | for k, v in cfg.items(section): |
|
276 | 282 | log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) |
|
277 | 283 | baseui.setconfig(section, k, v) |
|
278 | 284 | |
|
279 | 285 | elif read_from == 'db': |
|
280 | 286 | sa = meta.Session |
|
281 | 287 | ret = sa.query(RhodeCodeUi)\ |
|
282 | 288 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ |
|
283 | 289 | .all() |
|
284 | 290 | |
|
285 | 291 | hg_ui = ret |
|
286 | 292 | for ui_ in hg_ui: |
|
287 | 293 | if ui_.ui_active: |
|
288 | 294 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
289 | 295 | ui_.ui_key, ui_.ui_value) |
|
290 | 296 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
291 | 297 | |
|
292 | 298 | meta.Session.remove() |
|
293 | 299 | return baseui |
|
294 | 300 | |
|
295 | 301 | |
|
296 | 302 | def set_rhodecode_config(config): |
|
297 | 303 | """ |
|
298 | 304 | Updates pylons config with new settings from database |
|
299 | 305 | |
|
300 | 306 | :param config: |
|
301 | 307 | """ |
|
302 | 308 | hgsettings = RhodeCodeSetting.get_app_settings() |
|
303 | 309 | |
|
304 | 310 | for k, v in hgsettings.items(): |
|
305 | 311 | config[k] = v |
|
306 | 312 | |
|
307 | 313 | |
|
308 | 314 | def invalidate_cache(cache_key, *args): |
|
309 | 315 | """ |
|
310 | 316 | Puts cache invalidation task into db for |
|
311 | 317 | further global cache invalidation |
|
312 | 318 | """ |
|
313 | 319 | |
|
314 | 320 | from rhodecode.model.scm import ScmModel |
|
315 | 321 | |
|
316 | 322 | if cache_key.startswith('get_repo_cached_'): |
|
317 | 323 | name = cache_key.split('get_repo_cached_')[-1] |
|
318 | 324 | ScmModel().mark_for_invalidation(name) |
|
319 | 325 | |
|
320 | 326 | |
|
321 | 327 | class EmptyChangeset(BaseChangeset): |
|
322 | 328 | """ |
|
323 | 329 | An dummy empty changeset. It's possible to pass hash when creating |
|
324 | 330 | an EmptyChangeset |
|
325 | 331 | """ |
|
326 | 332 | |
|
327 | 333 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
328 | 334 | alias=None): |
|
329 | 335 | self._empty_cs = cs |
|
330 | 336 | self.revision = -1 |
|
331 | 337 | self.message = '' |
|
332 | 338 | self.author = '' |
|
333 | 339 | self.date = '' |
|
334 | 340 | self.repository = repo |
|
335 | 341 | self.requested_revision = requested_revision |
|
336 | 342 | self.alias = alias |
|
337 | 343 | |
|
338 | 344 | @LazyProperty |
|
339 | 345 | def raw_id(self): |
|
340 | 346 | """ |
|
341 | 347 | Returns raw string identifying this changeset, useful for web |
|
342 | 348 | representation. |
|
343 | 349 | """ |
|
344 | 350 | |
|
345 | 351 | return self._empty_cs |
|
346 | 352 | |
|
347 | 353 | @LazyProperty |
|
348 | 354 | def branch(self): |
|
349 | 355 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
350 | 356 | |
|
351 | 357 | @LazyProperty |
|
352 | 358 | def short_id(self): |
|
353 | 359 | return self.raw_id[:12] |
|
354 | 360 | |
|
355 | 361 | def get_file_changeset(self, path): |
|
356 | 362 | return self |
|
357 | 363 | |
|
358 | 364 | def get_file_content(self, path): |
|
359 | 365 | return u'' |
|
360 | 366 | |
|
361 | 367 | def get_file_size(self, path): |
|
362 | 368 | return 0 |
|
363 | 369 | |
|
364 | 370 | |
|
365 | 371 | def map_groups(groups): |
|
366 | 372 | """ |
|
367 | 373 | Checks for groups existence, and creates groups structures. |
|
368 | 374 | It returns last group in structure |
|
369 | 375 | |
|
370 | 376 | :param groups: list of groups structure |
|
371 | 377 | """ |
|
372 | 378 | sa = meta.Session |
|
373 | 379 | |
|
374 | 380 | parent = None |
|
375 | 381 | group = None |
|
376 | 382 | |
|
377 | 383 | # last element is repo in nested groups structure |
|
378 | 384 | groups = groups[:-1] |
|
379 | 385 | rgm = ReposGroupModel(sa) |
|
380 | 386 | for lvl, group_name in enumerate(groups): |
|
381 | 387 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
382 | 388 | group = RepoGroup.get_by_group_name(group_name) |
|
383 | 389 | desc = '%s group' % group_name |
|
384 | 390 | |
|
385 | 391 | # # WTF that doesn't work !? |
|
386 | 392 | # if group is None: |
|
387 | 393 | # group = rgm.create(group_name, desc, parent, just_db=True) |
|
388 | 394 | # sa.commit() |
|
389 | 395 | |
|
390 | 396 | if group is None: |
|
391 | 397 | log.debug('creating group level: %s group_name: %s' % (lvl, group_name)) |
|
392 | 398 | group = RepoGroup(group_name, parent) |
|
393 | 399 | group.group_description = desc |
|
394 | 400 | sa.add(group) |
|
395 | 401 | rgm._create_default_perms(group) |
|
396 | 402 | sa.commit() |
|
397 | 403 | parent = group |
|
398 | 404 | return group |
|
399 | 405 | |
|
400 | 406 | |
|
401 | 407 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
402 | 408 | """ |
|
403 | 409 | maps all repos given in initial_repo_list, non existing repositories |
|
404 | 410 | are created, if remove_obsolete is True it also check for db entries |
|
405 | 411 | that are not in initial_repo_list and removes them. |
|
406 | 412 | |
|
407 | 413 | :param initial_repo_list: list of repositories found by scanning methods |
|
408 | 414 | :param remove_obsolete: check for obsolete entries in database |
|
409 | 415 | """ |
|
410 | 416 | from rhodecode.model.repo import RepoModel |
|
411 | 417 | sa = meta.Session |
|
412 | 418 | rm = RepoModel() |
|
413 | 419 | user = sa.query(User).filter(User.admin == True).first() |
|
414 | 420 | if user is None: |
|
415 | 421 | raise Exception('Missing administrative account !') |
|
416 | 422 | added = [] |
|
417 | 423 | |
|
418 | 424 | for name, repo in initial_repo_list.items(): |
|
419 | 425 | group = map_groups(name.split(Repository.url_sep())) |
|
420 | 426 | if not rm.get_by_repo_name(name, cache=False): |
|
421 | 427 | log.info('repository %s not found creating default' % name) |
|
422 | 428 | added.append(name) |
|
423 | 429 | form_data = { |
|
424 | 430 | 'repo_name': name, |
|
425 | 431 | 'repo_name_full': name, |
|
426 | 432 | 'repo_type': repo.alias, |
|
427 | 433 | 'description': repo.description \ |
|
428 | 434 | if repo.description != 'unknown' else '%s repository' % name, |
|
429 | 435 | 'private': False, |
|
430 | 436 | 'group_id': getattr(group, 'group_id', None) |
|
431 | 437 | } |
|
432 | 438 | rm.create(form_data, user, just_db=True) |
|
433 | 439 | sa.commit() |
|
434 | 440 | removed = [] |
|
435 | 441 | if remove_obsolete: |
|
436 | 442 | #remove from database those repositories that are not in the filesystem |
|
437 | 443 | for repo in sa.query(Repository).all(): |
|
438 | 444 | if repo.repo_name not in initial_repo_list.keys(): |
|
439 | 445 | removed.append(repo.repo_name) |
|
440 | 446 | sa.delete(repo) |
|
441 | 447 | sa.commit() |
|
442 | 448 | |
|
443 | 449 | return added, removed |
|
444 | 450 | |
|
445 | 451 | |
|
446 | 452 | # set cache regions for beaker so celery can utilise it |
|
447 | 453 | def add_cache(settings): |
|
448 | 454 | cache_settings = {'regions': None} |
|
449 | 455 | for key in settings.keys(): |
|
450 | 456 | for prefix in ['beaker.cache.', 'cache.']: |
|
451 | 457 | if key.startswith(prefix): |
|
452 | 458 | name = key.split(prefix)[1].strip() |
|
453 | 459 | cache_settings[name] = settings[key].strip() |
|
454 | 460 | if cache_settings['regions']: |
|
455 | 461 | for region in cache_settings['regions'].split(','): |
|
456 | 462 | region = region.strip() |
|
457 | 463 | region_settings = {} |
|
458 | 464 | for key, value in cache_settings.items(): |
|
459 | 465 | if key.startswith(region): |
|
460 | 466 | region_settings[key.split('.')[1]] = value |
|
461 | 467 | region_settings['expire'] = int(region_settings.get('expire', |
|
462 | 468 | 60)) |
|
463 | 469 | region_settings.setdefault('lock_dir', |
|
464 | 470 | cache_settings.get('lock_dir')) |
|
465 | 471 | region_settings.setdefault('data_dir', |
|
466 | 472 | cache_settings.get('data_dir')) |
|
467 | 473 | |
|
468 | 474 | if 'type' not in region_settings: |
|
469 | 475 | region_settings['type'] = cache_settings.get('type', |
|
470 | 476 | 'memory') |
|
471 | 477 | beaker.cache.cache_regions[region] = region_settings |
|
472 | 478 | |
|
473 | 479 | |
|
474 | 480 | #============================================================================== |
|
475 | 481 | # TEST FUNCTIONS AND CREATORS |
|
476 | 482 | #============================================================================== |
|
477 | 483 | def create_test_index(repo_location, config, full_index): |
|
478 | 484 | """ |
|
479 | 485 | Makes default test index |
|
480 | 486 | |
|
481 | 487 | :param config: test config |
|
482 | 488 | :param full_index: |
|
483 | 489 | """ |
|
484 | 490 | |
|
485 | 491 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
486 | 492 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
487 | 493 | |
|
488 | 494 | repo_location = repo_location |
|
489 | 495 | |
|
490 | 496 | index_location = os.path.join(config['app_conf']['index_dir']) |
|
491 | 497 | if not os.path.exists(index_location): |
|
492 | 498 | os.makedirs(index_location) |
|
493 | 499 | |
|
494 | 500 | try: |
|
495 | 501 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) |
|
496 | 502 | WhooshIndexingDaemon(index_location=index_location, |
|
497 | 503 | repo_location=repo_location)\ |
|
498 | 504 | .run(full_index=full_index) |
|
499 | 505 | l.release() |
|
500 | 506 | except LockHeld: |
|
501 | 507 | pass |
|
502 | 508 | |
|
503 | 509 | |
|
504 | 510 | def create_test_env(repos_test_path, config): |
|
505 | 511 | """ |
|
506 | 512 | Makes a fresh database and |
|
507 | 513 | install test repository into tmp dir |
|
508 | 514 | """ |
|
509 | 515 | from rhodecode.lib.db_manage import DbManage |
|
510 | 516 | from rhodecode.tests import HG_REPO, TESTS_TMP_PATH |
|
511 | 517 | |
|
512 | 518 | # PART ONE create db |
|
513 | 519 | dbconf = config['sqlalchemy.db1.url'] |
|
514 | 520 | log.debug('making test db %s' % dbconf) |
|
515 | 521 | |
|
516 | 522 | # create test dir if it doesn't exist |
|
517 | 523 | if not os.path.isdir(repos_test_path): |
|
518 | 524 | log.debug('Creating testdir %s' % repos_test_path) |
|
519 | 525 | os.makedirs(repos_test_path) |
|
520 | 526 | |
|
521 | 527 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
522 | 528 | tests=True) |
|
523 | 529 | dbmanage.create_tables(override=True) |
|
524 | 530 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
525 | 531 | dbmanage.create_default_user() |
|
526 | 532 | dbmanage.admin_prompt() |
|
527 | 533 | dbmanage.create_permissions() |
|
528 | 534 | dbmanage.populate_default_permissions() |
|
529 | 535 | Session.commit() |
|
530 | 536 | # PART TWO make test repo |
|
531 | 537 | log.debug('making test vcs repositories') |
|
532 | 538 | |
|
533 | 539 | idx_path = config['app_conf']['index_dir'] |
|
534 | 540 | data_path = config['app_conf']['cache_dir'] |
|
535 | 541 | |
|
536 | 542 | #clean index and data |
|
537 | 543 | if idx_path and os.path.exists(idx_path): |
|
538 | 544 | log.debug('remove %s' % idx_path) |
|
539 | 545 | shutil.rmtree(idx_path) |
|
540 | 546 | |
|
541 | 547 | if data_path and os.path.exists(data_path): |
|
542 | 548 | log.debug('remove %s' % data_path) |
|
543 | 549 | shutil.rmtree(data_path) |
|
544 | 550 | |
|
545 | 551 | #CREATE DEFAULT HG REPOSITORY |
|
546 | 552 | cur_dir = dn(dn(abspath(__file__))) |
|
547 | 553 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
|
548 | 554 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
549 | 555 | tar.close() |
|
550 | 556 | |
|
551 | 557 | |
|
552 | 558 | #============================================================================== |
|
553 | 559 | # PASTER COMMANDS |
|
554 | 560 | #============================================================================== |
|
555 | 561 | class BasePasterCommand(Command): |
|
556 | 562 | """ |
|
557 | 563 | Abstract Base Class for paster commands. |
|
558 | 564 | |
|
559 | 565 | The celery commands are somewhat aggressive about loading |
|
560 | 566 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
561 | 567 | environment variable to our loader, we have to bootstrap a bit and |
|
562 | 568 | make sure we've had a chance to load the pylons config off of the |
|
563 | 569 | command line, otherwise everything fails. |
|
564 | 570 | """ |
|
565 | 571 | min_args = 1 |
|
566 | 572 | min_args_error = "Please provide a paster config file as an argument." |
|
567 | 573 | takes_config_file = 1 |
|
568 | 574 | requires_config_file = True |
|
569 | 575 | |
|
570 | 576 | def notify_msg(self, msg, log=False): |
|
571 | 577 | """Make a notification to user, additionally if logger is passed |
|
572 | 578 | it logs this action using given logger |
|
573 | 579 | |
|
574 | 580 | :param msg: message that will be printed to user |
|
575 | 581 | :param log: logging instance, to use to additionally log this message |
|
576 | 582 | |
|
577 | 583 | """ |
|
578 | 584 | if log and isinstance(log, logging): |
|
579 | 585 | log(msg) |
|
580 | 586 | |
|
581 | 587 | def run(self, args): |
|
582 | 588 | """ |
|
583 | 589 | Overrides Command.run |
|
584 | 590 | |
|
585 | 591 | Checks for a config file argument and loads it. |
|
586 | 592 | """ |
|
587 | 593 | if len(args) < self.min_args: |
|
588 | 594 | raise BadCommand( |
|
589 | 595 | self.min_args_error % {'min_args': self.min_args, |
|
590 | 596 | 'actual_args': len(args)}) |
|
591 | 597 | |
|
592 | 598 | # Decrement because we're going to lob off the first argument. |
|
593 | 599 | # @@ This is hacky |
|
594 | 600 | self.min_args -= 1 |
|
595 | 601 | self.bootstrap_config(args[0]) |
|
596 | 602 | self.update_parser() |
|
597 | 603 | return super(BasePasterCommand, self).run(args[1:]) |
|
598 | 604 | |
|
599 | 605 | def update_parser(self): |
|
600 | 606 | """ |
|
601 | 607 | Abstract method. Allows for the class's parser to be updated |
|
602 | 608 | before the superclass's `run` method is called. Necessary to |
|
603 | 609 | allow options/arguments to be passed through to the underlying |
|
604 | 610 | celery command. |
|
605 | 611 | """ |
|
606 | 612 | raise NotImplementedError("Abstract Method.") |
|
607 | 613 | |
|
608 | 614 | def bootstrap_config(self, conf): |
|
609 | 615 | """ |
|
610 | 616 | Loads the pylons configuration. |
|
611 | 617 | """ |
|
612 | 618 | from pylons import config as pylonsconfig |
|
613 | 619 | |
|
614 | 620 | path_to_ini_file = os.path.realpath(conf) |
|
615 | 621 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
616 | 622 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
@@ -1,1203 +1,1216 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.model.db |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Database Models for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 08, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | import datetime |
|
29 | 29 | import traceback |
|
30 | 30 | from collections import defaultdict |
|
31 | 31 | |
|
32 | 32 | from sqlalchemy import * |
|
33 | 33 | from sqlalchemy.ext.hybrid import hybrid_property |
|
34 | 34 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
35 | 35 | from beaker.cache import cache_region, region_invalidate |
|
36 | 36 | |
|
37 | 37 | from rhodecode.lib.vcs import get_backend |
|
38 | 38 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
39 | 39 | from rhodecode.lib.vcs.exceptions import VCSError |
|
40 | 40 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
41 | 41 | |
|
42 | 42 | from rhodecode.lib import str2bool, safe_str, get_changeset_safe, safe_unicode |
|
43 | 43 | from rhodecode.lib.compat import json |
|
44 | 44 | from rhodecode.lib.caching_query import FromCache |
|
45 | 45 | |
|
46 | 46 | from rhodecode.model.meta import Base, Session |
|
47 | import hashlib | |
|
47 | 48 | |
|
48 | 49 | |
|
49 | 50 | log = logging.getLogger(__name__) |
|
50 | 51 | |
|
51 | 52 | #============================================================================== |
|
52 | 53 | # BASE CLASSES |
|
53 | 54 | #============================================================================== |
|
54 | 55 | |
|
56 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() | |
|
57 | ||
|
55 | 58 | |
|
56 | 59 | class ModelSerializer(json.JSONEncoder): |
|
57 | 60 | """ |
|
58 | 61 | Simple Serializer for JSON, |
|
59 | 62 | |
|
60 | 63 | usage:: |
|
61 | 64 | |
|
62 | 65 | to make object customized for serialization implement a __json__ |
|
63 | 66 | method that will return a dict for serialization into json |
|
64 | 67 | |
|
65 | 68 | example:: |
|
66 | 69 | |
|
67 | 70 | class Task(object): |
|
68 | 71 | |
|
69 | 72 | def __init__(self, name, value): |
|
70 | 73 | self.name = name |
|
71 | 74 | self.value = value |
|
72 | 75 | |
|
73 | 76 | def __json__(self): |
|
74 | 77 | return dict(name=self.name, |
|
75 | 78 | value=self.value) |
|
76 | 79 | |
|
77 | 80 | """ |
|
78 | 81 | |
|
79 | 82 | def default(self, obj): |
|
80 | 83 | |
|
81 | 84 | if hasattr(obj, '__json__'): |
|
82 | 85 | return obj.__json__() |
|
83 | 86 | else: |
|
84 | 87 | return json.JSONEncoder.default(self, obj) |
|
85 | 88 | |
|
86 | 89 | |
|
87 | 90 | class BaseModel(object): |
|
88 | 91 | """ |
|
89 | 92 | Base Model for all classess |
|
90 | 93 | """ |
|
91 | 94 | |
|
92 | 95 | @classmethod |
|
93 | 96 | def _get_keys(cls): |
|
94 | 97 | """return column names for this model """ |
|
95 | 98 | return class_mapper(cls).c.keys() |
|
96 | 99 | |
|
97 | 100 | def get_dict(self): |
|
98 | 101 | """ |
|
99 | 102 | return dict with keys and values corresponding |
|
100 | 103 | to this model data """ |
|
101 | 104 | |
|
102 | 105 | d = {} |
|
103 | 106 | for k in self._get_keys(): |
|
104 | 107 | d[k] = getattr(self, k) |
|
105 | 108 | |
|
106 | 109 | # also use __json__() if present to get additional fields |
|
107 | 110 | for k, val in getattr(self, '__json__', lambda: {})().iteritems(): |
|
108 | 111 | d[k] = val |
|
109 | 112 | return d |
|
110 | 113 | |
|
111 | 114 | def get_appstruct(self): |
|
112 | 115 | """return list with keys and values tupples corresponding |
|
113 | 116 | to this model data """ |
|
114 | 117 | |
|
115 | 118 | l = [] |
|
116 | 119 | for k in self._get_keys(): |
|
117 | 120 | l.append((k, getattr(self, k),)) |
|
118 | 121 | return l |
|
119 | 122 | |
|
120 | 123 | def populate_obj(self, populate_dict): |
|
121 | 124 | """populate model with data from given populate_dict""" |
|
122 | 125 | |
|
123 | 126 | for k in self._get_keys(): |
|
124 | 127 | if k in populate_dict: |
|
125 | 128 | setattr(self, k, populate_dict[k]) |
|
126 | 129 | |
|
127 | 130 | @classmethod |
|
128 | 131 | def query(cls): |
|
129 | 132 | return Session.query(cls) |
|
130 | 133 | |
|
131 | 134 | @classmethod |
|
132 | 135 | def get(cls, id_): |
|
133 | 136 | if id_: |
|
134 | 137 | return cls.query().get(id_) |
|
135 | 138 | |
|
136 | 139 | @classmethod |
|
137 | 140 | def getAll(cls): |
|
138 | 141 | return cls.query().all() |
|
139 | 142 | |
|
140 | 143 | @classmethod |
|
141 | 144 | def delete(cls, id_): |
|
142 | 145 | obj = cls.query().get(id_) |
|
143 | 146 | Session.delete(obj) |
|
144 | 147 | |
|
145 | 148 | |
|
146 | 149 | class RhodeCodeSetting(Base, BaseModel): |
|
147 | 150 | __tablename__ = 'rhodecode_settings' |
|
148 | 151 | __table_args__ = ( |
|
149 | 152 | UniqueConstraint('app_settings_name'), |
|
150 | 153 | {'extend_existing': True} |
|
151 | 154 | ) |
|
152 | 155 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
153 | 156 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
154 | 157 | _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
155 | 158 | |
|
156 | 159 | def __init__(self, k='', v=''): |
|
157 | 160 | self.app_settings_name = k |
|
158 | 161 | self.app_settings_value = v |
|
159 | 162 | |
|
160 | 163 | @validates('_app_settings_value') |
|
161 | 164 | def validate_settings_value(self, key, val): |
|
162 | 165 | assert type(val) == unicode |
|
163 | 166 | return val |
|
164 | 167 | |
|
165 | 168 | @hybrid_property |
|
166 | 169 | def app_settings_value(self): |
|
167 | 170 | v = self._app_settings_value |
|
168 | 171 | if self.app_settings_name == 'ldap_active': |
|
169 | 172 | v = str2bool(v) |
|
170 | 173 | return v |
|
171 | 174 | |
|
172 | 175 | @app_settings_value.setter |
|
173 | 176 | def app_settings_value(self, val): |
|
174 | 177 | """ |
|
175 | 178 | Setter that will always make sure we use unicode in app_settings_value |
|
176 | 179 | |
|
177 | 180 | :param val: |
|
178 | 181 | """ |
|
179 | 182 | self._app_settings_value = safe_unicode(val) |
|
180 | 183 | |
|
181 | 184 | def __repr__(self): |
|
182 | 185 | return "<%s('%s:%s')>" % ( |
|
183 | 186 | self.__class__.__name__, |
|
184 | 187 | self.app_settings_name, self.app_settings_value |
|
185 | 188 | ) |
|
186 | 189 | |
|
187 | 190 | @classmethod |
|
188 | 191 | def get_by_name(cls, ldap_key): |
|
189 | 192 | return cls.query()\ |
|
190 | 193 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
191 | 194 | |
|
192 | 195 | @classmethod |
|
193 | 196 | def get_app_settings(cls, cache=False): |
|
194 | 197 | |
|
195 | 198 | ret = cls.query() |
|
196 | 199 | |
|
197 | 200 | if cache: |
|
198 | 201 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
199 | 202 | |
|
200 | 203 | if not ret: |
|
201 | 204 | raise Exception('Could not get application settings !') |
|
202 | 205 | settings = {} |
|
203 | 206 | for each in ret: |
|
204 | 207 | settings['rhodecode_' + each.app_settings_name] = \ |
|
205 | 208 | each.app_settings_value |
|
206 | 209 | |
|
207 | 210 | return settings |
|
208 | 211 | |
|
209 | 212 | @classmethod |
|
210 | 213 | def get_ldap_settings(cls, cache=False): |
|
211 | 214 | ret = cls.query()\ |
|
212 | 215 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
213 | 216 | fd = {} |
|
214 | 217 | for row in ret: |
|
215 | 218 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
216 | 219 | |
|
217 | 220 | return fd |
|
218 | 221 | |
|
219 | 222 | |
|
220 | 223 | class RhodeCodeUi(Base, BaseModel): |
|
221 | 224 | __tablename__ = 'rhodecode_ui' |
|
222 | 225 | __table_args__ = ( |
|
223 | 226 | UniqueConstraint('ui_key'), |
|
224 | 227 | {'extend_existing': True} |
|
225 | 228 | ) |
|
226 | 229 | |
|
227 | 230 | HOOK_UPDATE = 'changegroup.update' |
|
228 | 231 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
229 | 232 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
230 | 233 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
231 | 234 | |
|
232 | 235 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
233 | 236 | ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
234 | 237 | ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
235 | 238 | ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
236 | 239 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
237 | 240 | |
|
238 | 241 | @classmethod |
|
239 | 242 | def get_by_key(cls, key): |
|
240 | 243 | return cls.query().filter(cls.ui_key == key) |
|
241 | 244 | |
|
242 | 245 | @classmethod |
|
243 | 246 | def get_builtin_hooks(cls): |
|
244 | 247 | q = cls.query() |
|
245 | 248 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, |
|
246 | 249 | cls.HOOK_REPO_SIZE, |
|
247 | 250 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
248 | 251 | return q.all() |
|
249 | 252 | |
|
250 | 253 | @classmethod |
|
251 | 254 | def get_custom_hooks(cls): |
|
252 | 255 | q = cls.query() |
|
253 | 256 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, |
|
254 | 257 | cls.HOOK_REPO_SIZE, |
|
255 | 258 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
256 | 259 | q = q.filter(cls.ui_section == 'hooks') |
|
257 | 260 | return q.all() |
|
258 | 261 | |
|
259 | 262 | @classmethod |
|
260 | 263 | def create_or_update_hook(cls, key, val): |
|
261 | 264 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
262 | 265 | new_ui.ui_section = 'hooks' |
|
263 | 266 | new_ui.ui_active = True |
|
264 | 267 | new_ui.ui_key = key |
|
265 | 268 | new_ui.ui_value = val |
|
266 | 269 | |
|
267 | 270 | Session.add(new_ui) |
|
268 | 271 | |
|
269 | 272 | |
|
270 | 273 | class User(Base, BaseModel): |
|
271 | 274 | __tablename__ = 'users' |
|
272 | 275 | __table_args__ = ( |
|
273 | 276 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
274 | 277 | {'extend_existing': True} |
|
275 | 278 | ) |
|
276 | 279 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
277 | 280 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
278 | 281 | password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
279 | 282 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
280 | 283 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
281 | 284 | name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
282 | 285 | lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
283 | 286 | _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
284 | 287 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
285 | 288 | ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
286 | 289 | api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
287 | 290 | |
|
288 | 291 | user_log = relationship('UserLog', cascade='all') |
|
289 | 292 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
290 | 293 | |
|
291 | 294 | repositories = relationship('Repository') |
|
292 | 295 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
293 | 296 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
294 | 297 | |
|
295 | 298 | group_member = relationship('UsersGroupMember', cascade='all') |
|
296 | 299 | |
|
297 | 300 | notifications = relationship('UserNotification',) |
|
298 | 301 | |
|
299 | 302 | @hybrid_property |
|
300 | 303 | def email(self): |
|
301 | 304 | return self._email |
|
302 | 305 | |
|
303 | 306 | @email.setter |
|
304 | 307 | def email(self, val): |
|
305 | 308 | self._email = val.lower() if val else None |
|
306 | 309 | |
|
307 | 310 | @property |
|
308 | 311 | def full_name(self): |
|
309 | 312 | return '%s %s' % (self.name, self.lastname) |
|
310 | 313 | |
|
311 | 314 | @property |
|
312 | 315 | def full_name_or_username(self): |
|
313 | 316 | return ('%s %s' % (self.name, self.lastname) |
|
314 | 317 | if (self.name and self.lastname) else self.username) |
|
315 | 318 | |
|
316 | 319 | @property |
|
317 | 320 | def full_contact(self): |
|
318 | 321 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
319 | 322 | |
|
320 | 323 | @property |
|
321 | 324 | def short_contact(self): |
|
322 | 325 | return '%s %s' % (self.name, self.lastname) |
|
323 | 326 | |
|
324 | 327 | @property |
|
325 | 328 | def is_admin(self): |
|
326 | 329 | return self.admin |
|
327 | 330 | |
|
328 | 331 | def __repr__(self): |
|
329 | 332 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
330 | 333 | self.user_id, self.username) |
|
331 | 334 | |
|
332 | 335 | @classmethod |
|
333 | 336 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
334 | 337 | if case_insensitive: |
|
335 | 338 | q = cls.query().filter(cls.username.ilike(username)) |
|
336 | 339 | else: |
|
337 | 340 | q = cls.query().filter(cls.username == username) |
|
338 | 341 | |
|
339 | 342 | if cache: |
|
340 |
q = q.options(FromCache( |
|
|
341 | "get_user_%s" % username)) | |
|
343 | q = q.options(FromCache( | |
|
344 | "sql_cache_short", | |
|
345 | "get_user_%s" % _hash_key(username) | |
|
346 | ) | |
|
347 | ) | |
|
342 | 348 | return q.scalar() |
|
343 | 349 | |
|
344 | 350 | @classmethod |
|
345 | 351 | def get_by_api_key(cls, api_key, cache=False): |
|
346 | 352 | q = cls.query().filter(cls.api_key == api_key) |
|
347 | 353 | |
|
348 | 354 | if cache: |
|
349 | 355 | q = q.options(FromCache("sql_cache_short", |
|
350 | 356 | "get_api_key_%s" % api_key)) |
|
351 | 357 | return q.scalar() |
|
352 | 358 | |
|
353 | 359 | @classmethod |
|
354 | 360 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
355 | 361 | if case_insensitive: |
|
356 | 362 | q = cls.query().filter(cls.email.ilike(email)) |
|
357 | 363 | else: |
|
358 | 364 | q = cls.query().filter(cls.email == email) |
|
359 | 365 | |
|
360 | 366 | if cache: |
|
361 | 367 | q = q.options(FromCache("sql_cache_short", |
|
362 | 368 | "get_api_key_%s" % email)) |
|
363 | 369 | return q.scalar() |
|
364 | 370 | |
|
365 | 371 | def update_lastlogin(self): |
|
366 | 372 | """Update user lastlogin""" |
|
367 | 373 | self.last_login = datetime.datetime.now() |
|
368 | 374 | Session.add(self) |
|
369 | 375 | log.debug('updated user %s lastlogin' % self.username) |
|
370 | 376 | |
|
371 | 377 | def __json__(self): |
|
372 | 378 | return dict( |
|
373 | 379 | email=self.email, |
|
374 | 380 | full_name=self.full_name, |
|
375 | 381 | full_name_or_username=self.full_name_or_username, |
|
376 | 382 | short_contact=self.short_contact, |
|
377 | 383 | full_contact=self.full_contact |
|
378 | 384 | ) |
|
379 | 385 | |
|
380 | 386 | |
|
381 | 387 | class UserLog(Base, BaseModel): |
|
382 | 388 | __tablename__ = 'user_logs' |
|
383 | 389 | __table_args__ = {'extend_existing': True} |
|
384 | 390 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
385 | 391 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
386 | 392 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
387 | 393 | repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
388 | 394 | user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
389 | 395 | action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
390 | 396 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
391 | 397 | |
|
392 | 398 | @property |
|
393 | 399 | def action_as_day(self): |
|
394 | 400 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
395 | 401 | |
|
396 | 402 | user = relationship('User') |
|
397 | 403 | repository = relationship('Repository',cascade='') |
|
398 | 404 | |
|
399 | 405 | |
|
400 | 406 | class UsersGroup(Base, BaseModel): |
|
401 | 407 | __tablename__ = 'users_groups' |
|
402 | 408 | __table_args__ = {'extend_existing': True} |
|
403 | 409 | |
|
404 | 410 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
405 | 411 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
406 | 412 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
407 | 413 | |
|
408 | 414 | members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
415 | users_group_to_perm = relationship('UsersGroupToPerm', cascade='all') | |
|
409 | 416 | |
|
410 | 417 | def __repr__(self): |
|
411 | 418 | return '<userGroup(%s)>' % (self.users_group_name) |
|
412 | 419 | |
|
413 | 420 | @classmethod |
|
414 | 421 | def get_by_group_name(cls, group_name, cache=False, |
|
415 | 422 | case_insensitive=False): |
|
416 | 423 | if case_insensitive: |
|
417 | 424 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
418 | 425 | else: |
|
419 | 426 | q = cls.query().filter(cls.users_group_name == group_name) |
|
420 | 427 | if cache: |
|
421 |
q = q.options(FromCache( |
|
|
422 | "get_user_%s" % group_name)) | |
|
428 | q = q.options(FromCache( | |
|
429 | "sql_cache_short", | |
|
430 | "get_user_%s" % _hash_key(group_name) | |
|
431 | ) | |
|
432 | ) | |
|
423 | 433 | return q.scalar() |
|
424 | 434 | |
|
425 | 435 | @classmethod |
|
426 | 436 | def get(cls, users_group_id, cache=False): |
|
427 | 437 | users_group = cls.query() |
|
428 | 438 | if cache: |
|
429 | 439 | users_group = users_group.options(FromCache("sql_cache_short", |
|
430 | 440 | "get_users_group_%s" % users_group_id)) |
|
431 | 441 | return users_group.get(users_group_id) |
|
432 | 442 | |
|
433 | 443 | |
|
434 | 444 | class UsersGroupMember(Base, BaseModel): |
|
435 | 445 | __tablename__ = 'users_groups_members' |
|
436 | 446 | __table_args__ = {'extend_existing': True} |
|
437 | 447 | |
|
438 | 448 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
439 | 449 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
440 | 450 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
441 | 451 | |
|
442 | 452 | user = relationship('User', lazy='joined') |
|
443 | 453 | users_group = relationship('UsersGroup') |
|
444 | 454 | |
|
445 | 455 | def __init__(self, gr_id='', u_id=''): |
|
446 | 456 | self.users_group_id = gr_id |
|
447 | 457 | self.user_id = u_id |
|
448 | 458 | |
|
449 | 459 | |
|
450 | 460 | class Repository(Base, BaseModel): |
|
451 | 461 | __tablename__ = 'repositories' |
|
452 | 462 | __table_args__ = ( |
|
453 | 463 | UniqueConstraint('repo_name'), |
|
454 | 464 | {'extend_existing': True}, |
|
455 | 465 | ) |
|
456 | 466 | |
|
457 | 467 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
458 | 468 | repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
459 | 469 | clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
460 | 470 | repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg') |
|
461 | 471 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
462 | 472 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
463 | 473 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
464 | 474 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
465 | 475 | description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
466 | 476 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
467 | 477 | |
|
468 | 478 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
469 | 479 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
470 | 480 | |
|
471 | 481 | user = relationship('User') |
|
472 | 482 | fork = relationship('Repository', remote_side=repo_id) |
|
473 | 483 | group = relationship('RepoGroup') |
|
474 | 484 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
475 | 485 | users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all') |
|
476 | 486 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
477 | 487 | |
|
478 | 488 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
479 | 489 | |
|
480 | 490 | logs = relationship('UserLog') |
|
481 | 491 | |
|
482 | 492 | def __repr__(self): |
|
483 | 493 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
484 | 494 | self.repo_id, self.repo_name) |
|
485 | 495 | |
|
486 | 496 | @classmethod |
|
487 | 497 | def url_sep(cls): |
|
488 | 498 | return '/' |
|
489 | 499 | |
|
490 | 500 | @classmethod |
|
491 | 501 | def get_by_repo_name(cls, repo_name): |
|
492 | 502 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
493 | 503 | q = q.options(joinedload(Repository.fork))\ |
|
494 | 504 | .options(joinedload(Repository.user))\ |
|
495 | 505 | .options(joinedload(Repository.group)) |
|
496 | 506 | return q.scalar() |
|
497 | 507 | |
|
498 | 508 | @classmethod |
|
499 | 509 | def get_repo_forks(cls, repo_id): |
|
500 | 510 | return cls.query().filter(Repository.fork_id == repo_id) |
|
501 | 511 | |
|
502 | 512 | @classmethod |
|
503 | 513 | def base_path(cls): |
|
504 | 514 | """ |
|
505 | 515 | Returns base path when all repos are stored |
|
506 | 516 | |
|
507 | 517 | :param cls: |
|
508 | 518 | """ |
|
509 | 519 | q = Session.query(RhodeCodeUi)\ |
|
510 | 520 | .filter(RhodeCodeUi.ui_key == cls.url_sep()) |
|
511 | 521 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
512 | 522 | return q.one().ui_value |
|
513 | 523 | |
|
514 | 524 | @property |
|
515 | 525 | def just_name(self): |
|
516 | 526 | return self.repo_name.split(Repository.url_sep())[-1] |
|
517 | 527 | |
|
518 | 528 | @property |
|
519 | 529 | def groups_with_parents(self): |
|
520 | 530 | groups = [] |
|
521 | 531 | if self.group is None: |
|
522 | 532 | return groups |
|
523 | 533 | |
|
524 | 534 | cur_gr = self.group |
|
525 | 535 | groups.insert(0, cur_gr) |
|
526 | 536 | while 1: |
|
527 | 537 | gr = getattr(cur_gr, 'parent_group', None) |
|
528 | 538 | cur_gr = cur_gr.parent_group |
|
529 | 539 | if gr is None: |
|
530 | 540 | break |
|
531 | 541 | groups.insert(0, gr) |
|
532 | 542 | |
|
533 | 543 | return groups |
|
534 | 544 | |
|
535 | 545 | @property |
|
536 | 546 | def groups_and_repo(self): |
|
537 | 547 | return self.groups_with_parents, self.just_name |
|
538 | 548 | |
|
539 | 549 | @LazyProperty |
|
540 | 550 | def repo_path(self): |
|
541 | 551 | """ |
|
542 | 552 | Returns base full path for that repository means where it actually |
|
543 | 553 | exists on a filesystem |
|
544 | 554 | """ |
|
545 | 555 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
546 | 556 | Repository.url_sep()) |
|
547 | 557 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
548 | 558 | return q.one().ui_value |
|
549 | 559 | |
|
550 | 560 | @property |
|
551 | 561 | def repo_full_path(self): |
|
552 | 562 | p = [self.repo_path] |
|
553 | 563 | # we need to split the name by / since this is how we store the |
|
554 | 564 | # names in the database, but that eventually needs to be converted |
|
555 | 565 | # into a valid system path |
|
556 | 566 | p += self.repo_name.split(Repository.url_sep()) |
|
557 | 567 | return os.path.join(*p) |
|
558 | 568 | |
|
559 | 569 | def get_new_name(self, repo_name): |
|
560 | 570 | """ |
|
561 | 571 | returns new full repository name based on assigned group and new new |
|
562 | 572 | |
|
563 | 573 | :param group_name: |
|
564 | 574 | """ |
|
565 | 575 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
566 | 576 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
567 | 577 | |
|
568 | 578 | @property |
|
569 | 579 | def _ui(self): |
|
570 | 580 | """ |
|
571 | 581 | Creates an db based ui object for this repository |
|
572 | 582 | """ |
|
573 | 583 | from mercurial import ui |
|
574 | 584 | from mercurial import config |
|
575 | 585 | baseui = ui.ui() |
|
576 | 586 | |
|
577 | 587 | #clean the baseui object |
|
578 | 588 | baseui._ocfg = config.config() |
|
579 | 589 | baseui._ucfg = config.config() |
|
580 | 590 | baseui._tcfg = config.config() |
|
581 | 591 | |
|
582 | 592 | ret = RhodeCodeUi.query()\ |
|
583 | 593 | .options(FromCache("sql_cache_short", "repository_repo_ui")).all() |
|
584 | 594 | |
|
585 | 595 | hg_ui = ret |
|
586 | 596 | for ui_ in hg_ui: |
|
587 | 597 | if ui_.ui_active: |
|
588 | 598 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
589 | 599 | ui_.ui_key, ui_.ui_value) |
|
590 | 600 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
591 | 601 | |
|
592 | 602 | return baseui |
|
593 | 603 | |
|
594 | 604 | @classmethod |
|
595 | 605 | def is_valid(cls, repo_name): |
|
596 | 606 | """ |
|
597 | 607 | returns True if given repo name is a valid filesystem repository |
|
598 | 608 | |
|
599 | 609 | :param cls: |
|
600 | 610 | :param repo_name: |
|
601 | 611 | """ |
|
602 | 612 | from rhodecode.lib.utils import is_valid_repo |
|
603 | 613 | |
|
604 | 614 | return is_valid_repo(repo_name, cls.base_path()) |
|
605 | 615 | |
|
606 | 616 | #========================================================================== |
|
607 | 617 | # SCM PROPERTIES |
|
608 | 618 | #========================================================================== |
|
609 | 619 | |
|
610 | 620 | def get_changeset(self, rev): |
|
611 | 621 | return get_changeset_safe(self.scm_instance, rev) |
|
612 | 622 | |
|
613 | 623 | @property |
|
614 | 624 | def tip(self): |
|
615 | 625 | return self.get_changeset('tip') |
|
616 | 626 | |
|
617 | 627 | @property |
|
618 | 628 | def author(self): |
|
619 | 629 | return self.tip.author |
|
620 | 630 | |
|
621 | 631 | @property |
|
622 | 632 | def last_change(self): |
|
623 | 633 | return self.scm_instance.last_change |
|
624 | 634 | |
|
625 | 635 | def comments(self, revisions=None): |
|
626 | 636 | """ |
|
627 | 637 | Returns comments for this repository grouped by revisions |
|
628 | 638 | |
|
629 | 639 | :param revisions: filter query by revisions only |
|
630 | 640 | """ |
|
631 | 641 | cmts = ChangesetComment.query()\ |
|
632 | 642 | .filter(ChangesetComment.repo == self) |
|
633 | 643 | if revisions: |
|
634 | 644 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
635 | 645 | grouped = defaultdict(list) |
|
636 | 646 | for cmt in cmts.all(): |
|
637 | 647 | grouped[cmt.revision].append(cmt) |
|
638 | 648 | return grouped |
|
639 | 649 | |
|
640 | 650 | #========================================================================== |
|
641 | 651 | # SCM CACHE INSTANCE |
|
642 | 652 | #========================================================================== |
|
643 | 653 | |
|
644 | 654 | @property |
|
645 | 655 | def invalidate(self): |
|
646 | 656 | return CacheInvalidation.invalidate(self.repo_name) |
|
647 | 657 | |
|
648 | 658 | def set_invalidate(self): |
|
649 | 659 | """ |
|
650 | 660 | set a cache for invalidation for this instance |
|
651 | 661 | """ |
|
652 | 662 | CacheInvalidation.set_invalidate(self.repo_name) |
|
653 | 663 | |
|
654 | 664 | @LazyProperty |
|
655 | 665 | def scm_instance(self): |
|
656 | 666 | return self.__get_instance() |
|
657 | 667 | |
|
658 | 668 | @property |
|
659 | 669 | def scm_instance_cached(self): |
|
660 | 670 | @cache_region('long_term') |
|
661 | 671 | def _c(repo_name): |
|
662 | 672 | return self.__get_instance() |
|
663 | 673 | rn = self.repo_name |
|
664 | 674 | log.debug('Getting cached instance of repo') |
|
665 | 675 | inv = self.invalidate |
|
666 | 676 | if inv is not None: |
|
667 | 677 | region_invalidate(_c, None, rn) |
|
668 | 678 | # update our cache |
|
669 | 679 | CacheInvalidation.set_valid(inv.cache_key) |
|
670 | 680 | return _c(rn) |
|
671 | 681 | |
|
672 | 682 | def __get_instance(self): |
|
673 | 683 | repo_full_path = self.repo_full_path |
|
674 | 684 | try: |
|
675 | 685 | alias = get_scm(repo_full_path)[0] |
|
676 | 686 | log.debug('Creating instance of %s repository' % alias) |
|
677 | 687 | backend = get_backend(alias) |
|
678 | 688 | except VCSError: |
|
679 | 689 | log.error(traceback.format_exc()) |
|
680 | 690 | log.error('Perhaps this repository is in db and not in ' |
|
681 | 691 | 'filesystem run rescan repositories with ' |
|
682 | 692 | '"destroy old data " option from admin panel') |
|
683 | 693 | return |
|
684 | 694 | |
|
685 | 695 | if alias == 'hg': |
|
686 | 696 | |
|
687 | 697 | repo = backend(safe_str(repo_full_path), create=False, |
|
688 | 698 | baseui=self._ui) |
|
689 | 699 | # skip hidden web repository |
|
690 | 700 | if repo._get_hidden(): |
|
691 | 701 | return |
|
692 | 702 | else: |
|
693 | 703 | repo = backend(repo_full_path, create=False) |
|
694 | 704 | |
|
695 | 705 | return repo |
|
696 | 706 | |
|
697 | 707 | |
|
698 | 708 | class RepoGroup(Base, BaseModel): |
|
699 | 709 | __tablename__ = 'groups' |
|
700 | 710 | __table_args__ = ( |
|
701 | 711 | UniqueConstraint('group_name', 'group_parent_id'), |
|
702 | 712 | CheckConstraint('group_id != group_parent_id'), |
|
703 | 713 | {'extend_existing': True}, |
|
704 | 714 | ) |
|
705 | 715 | __mapper_args__ = {'order_by': 'group_name'} |
|
706 | 716 | |
|
707 | 717 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
708 | 718 | group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
709 | 719 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
710 | 720 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
711 | 721 | |
|
712 | 722 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
713 | 723 | users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all') |
|
714 | 724 | |
|
715 | 725 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
716 | 726 | |
|
717 | 727 | def __init__(self, group_name='', parent_group=None): |
|
718 | 728 | self.group_name = group_name |
|
719 | 729 | self.parent_group = parent_group |
|
720 | 730 | |
|
721 | 731 | def __repr__(self): |
|
722 | 732 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
723 | 733 | self.group_name) |
|
724 | 734 | |
|
725 | 735 | @classmethod |
|
726 | 736 | def groups_choices(cls): |
|
727 | 737 | from webhelpers.html import literal as _literal |
|
728 | 738 | repo_groups = [('', '')] |
|
729 | 739 | sep = ' » ' |
|
730 | 740 | _name = lambda k: _literal(sep.join(k)) |
|
731 | 741 | |
|
732 | 742 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) |
|
733 | 743 | for x in cls.query().all()]) |
|
734 | 744 | |
|
735 | 745 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) |
|
736 | 746 | return repo_groups |
|
737 | 747 | |
|
738 | 748 | @classmethod |
|
739 | 749 | def url_sep(cls): |
|
740 | 750 | return '/' |
|
741 | 751 | |
|
742 | 752 | @classmethod |
|
743 | 753 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
744 | 754 | if case_insensitive: |
|
745 | 755 | gr = cls.query()\ |
|
746 | 756 | .filter(cls.group_name.ilike(group_name)) |
|
747 | 757 | else: |
|
748 | 758 | gr = cls.query()\ |
|
749 | 759 | .filter(cls.group_name == group_name) |
|
750 | 760 | if cache: |
|
751 |
gr = gr.options(FromCache( |
|
|
752 | "get_group_%s" % group_name)) | |
|
761 | gr = gr.options(FromCache( | |
|
762 | "sql_cache_short", | |
|
763 | "get_group_%s" % _hash_key(group_name) | |
|
764 | ) | |
|
765 | ) | |
|
753 | 766 | return gr.scalar() |
|
754 | 767 | |
|
755 | 768 | @property |
|
756 | 769 | def parents(self): |
|
757 | 770 | parents_recursion_limit = 5 |
|
758 | 771 | groups = [] |
|
759 | 772 | if self.parent_group is None: |
|
760 | 773 | return groups |
|
761 | 774 | cur_gr = self.parent_group |
|
762 | 775 | groups.insert(0, cur_gr) |
|
763 | 776 | cnt = 0 |
|
764 | 777 | while 1: |
|
765 | 778 | cnt += 1 |
|
766 | 779 | gr = getattr(cur_gr, 'parent_group', None) |
|
767 | 780 | cur_gr = cur_gr.parent_group |
|
768 | 781 | if gr is None: |
|
769 | 782 | break |
|
770 | 783 | if cnt == parents_recursion_limit: |
|
771 | 784 | # this will prevent accidental infinit loops |
|
772 | 785 | log.error('group nested more than %s' % |
|
773 | 786 | parents_recursion_limit) |
|
774 | 787 | break |
|
775 | 788 | |
|
776 | 789 | groups.insert(0, gr) |
|
777 | 790 | return groups |
|
778 | 791 | |
|
779 | 792 | @property |
|
780 | 793 | def children(self): |
|
781 | 794 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
782 | 795 | |
|
783 | 796 | @property |
|
784 | 797 | def name(self): |
|
785 | 798 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
786 | 799 | |
|
787 | 800 | @property |
|
788 | 801 | def full_path(self): |
|
789 | 802 | return self.group_name |
|
790 | 803 | |
|
791 | 804 | @property |
|
792 | 805 | def full_path_splitted(self): |
|
793 | 806 | return self.group_name.split(RepoGroup.url_sep()) |
|
794 | 807 | |
|
795 | 808 | @property |
|
796 | 809 | def repositories(self): |
|
797 | 810 | return Repository.query().filter(Repository.group == self) |
|
798 | 811 | |
|
799 | 812 | @property |
|
800 | 813 | def repositories_recursive_count(self): |
|
801 | 814 | cnt = self.repositories.count() |
|
802 | 815 | |
|
803 | 816 | def children_count(group): |
|
804 | 817 | cnt = 0 |
|
805 | 818 | for child in group.children: |
|
806 | 819 | cnt += child.repositories.count() |
|
807 | 820 | cnt += children_count(child) |
|
808 | 821 | return cnt |
|
809 | 822 | |
|
810 | 823 | return cnt + children_count(self) |
|
811 | 824 | |
|
812 | 825 | def get_new_name(self, group_name): |
|
813 | 826 | """ |
|
814 | 827 | returns new full group name based on parent and new name |
|
815 | 828 | |
|
816 | 829 | :param group_name: |
|
817 | 830 | """ |
|
818 | 831 | path_prefix = (self.parent_group.full_path_splitted if |
|
819 | 832 | self.parent_group else []) |
|
820 | 833 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
821 | 834 | |
|
822 | 835 | |
|
823 | 836 | class Permission(Base, BaseModel): |
|
824 | 837 | __tablename__ = 'permissions' |
|
825 | 838 | __table_args__ = {'extend_existing': True} |
|
826 | 839 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
827 | 840 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
828 | 841 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
829 | 842 | |
|
830 | 843 | def __repr__(self): |
|
831 | 844 | return "<%s('%s:%s')>" % ( |
|
832 | 845 | self.__class__.__name__, self.permission_id, self.permission_name |
|
833 | 846 | ) |
|
834 | 847 | |
|
835 | 848 | @classmethod |
|
836 | 849 | def get_by_key(cls, key): |
|
837 | 850 | return cls.query().filter(cls.permission_name == key).scalar() |
|
838 | 851 | |
|
839 | 852 | @classmethod |
|
840 | 853 | def get_default_perms(cls, default_user_id): |
|
841 | 854 | q = Session.query(UserRepoToPerm, Repository, cls)\ |
|
842 | 855 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
843 | 856 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ |
|
844 | 857 | .filter(UserRepoToPerm.user_id == default_user_id) |
|
845 | 858 | |
|
846 | 859 | return q.all() |
|
847 | 860 | |
|
848 | 861 | @classmethod |
|
849 | 862 | def get_default_group_perms(cls, default_user_id): |
|
850 | 863 | q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\ |
|
851 | 864 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
852 | 865 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ |
|
853 | 866 | .filter(UserRepoGroupToPerm.user_id == default_user_id) |
|
854 | 867 | |
|
855 | 868 | return q.all() |
|
856 | 869 | |
|
857 | 870 | |
|
858 | 871 | class UserRepoToPerm(Base, BaseModel): |
|
859 | 872 | __tablename__ = 'repo_to_perm' |
|
860 | 873 | __table_args__ = ( |
|
861 | 874 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
862 | 875 | {'extend_existing': True} |
|
863 | 876 | ) |
|
864 | 877 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
865 | 878 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
866 | 879 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
867 | 880 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
868 | 881 | |
|
869 | 882 | user = relationship('User') |
|
870 | 883 | repository = relationship('Repository') |
|
871 | 884 | permission = relationship('Permission') |
|
872 | 885 | |
|
873 | 886 | @classmethod |
|
874 | 887 | def create(cls, user, repository, permission): |
|
875 | 888 | n = cls() |
|
876 | 889 | n.user = user |
|
877 | 890 | n.repository = repository |
|
878 | 891 | n.permission = permission |
|
879 | 892 | Session.add(n) |
|
880 | 893 | return n |
|
881 | 894 | |
|
882 | 895 | def __repr__(self): |
|
883 | 896 | return '<user:%s => %s >' % (self.user, self.repository) |
|
884 | 897 | |
|
885 | 898 | |
|
886 | 899 | class UserToPerm(Base, BaseModel): |
|
887 | 900 | __tablename__ = 'user_to_perm' |
|
888 | 901 | __table_args__ = ( |
|
889 | 902 | UniqueConstraint('user_id', 'permission_id'), |
|
890 | 903 | {'extend_existing': True} |
|
891 | 904 | ) |
|
892 | 905 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
893 | 906 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
894 | 907 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
895 | 908 | |
|
896 | 909 | user = relationship('User') |
|
897 | 910 | permission = relationship('Permission', lazy='joined') |
|
898 | 911 | |
|
899 | 912 | |
|
900 | 913 | class UsersGroupRepoToPerm(Base, BaseModel): |
|
901 | 914 | __tablename__ = 'users_group_repo_to_perm' |
|
902 | 915 | __table_args__ = ( |
|
903 | 916 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
904 | 917 | {'extend_existing': True} |
|
905 | 918 | ) |
|
906 | 919 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
907 | 920 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
908 | 921 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
909 | 922 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
910 | 923 | |
|
911 | 924 | users_group = relationship('UsersGroup') |
|
912 | 925 | permission = relationship('Permission') |
|
913 | 926 | repository = relationship('Repository') |
|
914 | 927 | |
|
915 | 928 | @classmethod |
|
916 | 929 | def create(cls, users_group, repository, permission): |
|
917 | 930 | n = cls() |
|
918 | 931 | n.users_group = users_group |
|
919 | 932 | n.repository = repository |
|
920 | 933 | n.permission = permission |
|
921 | 934 | Session.add(n) |
|
922 | 935 | return n |
|
923 | 936 | |
|
924 | 937 | def __repr__(self): |
|
925 | 938 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
926 | 939 | |
|
927 | 940 | |
|
928 | 941 | class UsersGroupToPerm(Base, BaseModel): |
|
929 | 942 | __tablename__ = 'users_group_to_perm' |
|
930 | 943 | __table_args__ = ( |
|
931 | 944 | UniqueConstraint('users_group_id', 'permission_id',), |
|
932 | 945 | {'extend_existing': True} |
|
933 | 946 | ) |
|
934 | 947 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
935 | 948 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
936 | 949 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
937 | 950 | |
|
938 | 951 | users_group = relationship('UsersGroup') |
|
939 | 952 | permission = relationship('Permission') |
|
940 | 953 | |
|
941 | 954 | |
|
942 | 955 | class UserRepoGroupToPerm(Base, BaseModel): |
|
943 | 956 | __tablename__ = 'user_repo_group_to_perm' |
|
944 | 957 | __table_args__ = ( |
|
945 | 958 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
946 | 959 | {'extend_existing': True} |
|
947 | 960 | ) |
|
948 | 961 | |
|
949 | 962 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
950 | 963 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
951 | 964 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
952 | 965 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
953 | 966 | |
|
954 | 967 | user = relationship('User') |
|
955 | 968 | group = relationship('RepoGroup') |
|
956 | 969 | permission = relationship('Permission') |
|
957 | 970 | |
|
958 | 971 | |
|
959 | 972 | class UsersGroupRepoGroupToPerm(Base, BaseModel): |
|
960 | 973 | __tablename__ = 'users_group_repo_group_to_perm' |
|
961 | 974 | __table_args__ = ( |
|
962 | 975 | UniqueConstraint('users_group_id', 'group_id'), |
|
963 | 976 | {'extend_existing': True} |
|
964 | 977 | ) |
|
965 | 978 | |
|
966 | 979 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
967 | 980 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
968 | 981 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
969 | 982 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
970 | 983 | |
|
971 | 984 | users_group = relationship('UsersGroup') |
|
972 | 985 | permission = relationship('Permission') |
|
973 | 986 | group = relationship('RepoGroup') |
|
974 | 987 | |
|
975 | 988 | |
|
976 | 989 | class Statistics(Base, BaseModel): |
|
977 | 990 | __tablename__ = 'statistics' |
|
978 | 991 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing': True}) |
|
979 | 992 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
980 | 993 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
981 | 994 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
982 | 995 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
983 | 996 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
984 | 997 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
985 | 998 | |
|
986 | 999 | repository = relationship('Repository', single_parent=True) |
|
987 | 1000 | |
|
988 | 1001 | |
|
989 | 1002 | class UserFollowing(Base, BaseModel): |
|
990 | 1003 | __tablename__ = 'user_followings' |
|
991 | 1004 | __table_args__ = ( |
|
992 | 1005 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
993 | 1006 | UniqueConstraint('user_id', 'follows_user_id'), |
|
994 | 1007 | {'extend_existing': True} |
|
995 | 1008 | ) |
|
996 | 1009 | |
|
997 | 1010 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
998 | 1011 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
999 | 1012 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1000 | 1013 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1001 | 1014 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1002 | 1015 | |
|
1003 | 1016 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1004 | 1017 | |
|
1005 | 1018 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1006 | 1019 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1007 | 1020 | |
|
1008 | 1021 | @classmethod |
|
1009 | 1022 | def get_repo_followers(cls, repo_id): |
|
1010 | 1023 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1011 | 1024 | |
|
1012 | 1025 | |
|
1013 | 1026 | class CacheInvalidation(Base, BaseModel): |
|
1014 | 1027 | __tablename__ = 'cache_invalidation' |
|
1015 | 1028 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing': True}) |
|
1016 | 1029 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1017 | 1030 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1018 | 1031 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1019 | 1032 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1020 | 1033 | |
|
1021 | 1034 | def __init__(self, cache_key, cache_args=''): |
|
1022 | 1035 | self.cache_key = cache_key |
|
1023 | 1036 | self.cache_args = cache_args |
|
1024 | 1037 | self.cache_active = False |
|
1025 | 1038 | |
|
1026 | 1039 | def __repr__(self): |
|
1027 | 1040 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
1028 | 1041 | self.cache_id, self.cache_key) |
|
1029 | 1042 | |
|
1030 | 1043 | @classmethod |
|
1031 | 1044 | def _get_key(cls, key): |
|
1032 | 1045 | """ |
|
1033 | 1046 | Wrapper for generating a key |
|
1034 | 1047 | |
|
1035 | 1048 | :param key: |
|
1036 | 1049 | """ |
|
1037 | 1050 | import rhodecode |
|
1038 | 1051 | prefix = '' |
|
1039 | 1052 | iid = rhodecode.CONFIG.get('instance_id') |
|
1040 | 1053 | if iid: |
|
1041 | 1054 |
prefix = iid |
|
1042 | 1055 | return "%s%s" % (prefix, key) |
|
1043 | 1056 | |
|
1044 | 1057 | @classmethod |
|
1045 | 1058 | def get_by_key(cls, key): |
|
1046 | 1059 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1047 | 1060 | |
|
1048 | 1061 | @classmethod |
|
1049 | 1062 | def invalidate(cls, key): |
|
1050 | 1063 | """ |
|
1051 | 1064 | Returns Invalidation object if this given key should be invalidated |
|
1052 | 1065 | None otherwise. `cache_active = False` means that this cache |
|
1053 | 1066 | state is not valid and needs to be invalidated |
|
1054 | 1067 | |
|
1055 | 1068 | :param key: |
|
1056 | 1069 | """ |
|
1057 | 1070 | return cls.query()\ |
|
1058 | 1071 | .filter(CacheInvalidation.cache_key == key)\ |
|
1059 | 1072 | .filter(CacheInvalidation.cache_active == False)\ |
|
1060 | 1073 | .scalar() |
|
1061 | 1074 | |
|
1062 | 1075 | @classmethod |
|
1063 | 1076 | def set_invalidate(cls, key): |
|
1064 | 1077 | """ |
|
1065 | 1078 | Mark this Cache key for invalidation |
|
1066 | 1079 | |
|
1067 | 1080 | :param key: |
|
1068 | 1081 | """ |
|
1069 | 1082 | |
|
1070 | 1083 | log.debug('marking %s for invalidation' % key) |
|
1071 | 1084 | inv_obj = Session.query(cls)\ |
|
1072 | 1085 | .filter(cls.cache_key == key).scalar() |
|
1073 | 1086 | if inv_obj: |
|
1074 | 1087 | inv_obj.cache_active = False |
|
1075 | 1088 | else: |
|
1076 | 1089 | log.debug('cache key not found in invalidation db -> creating one') |
|
1077 | 1090 | inv_obj = CacheInvalidation(key) |
|
1078 | 1091 | |
|
1079 | 1092 | try: |
|
1080 | 1093 | Session.add(inv_obj) |
|
1081 | 1094 | Session.commit() |
|
1082 | 1095 | except Exception: |
|
1083 | 1096 | log.error(traceback.format_exc()) |
|
1084 | 1097 | Session.rollback() |
|
1085 | 1098 | |
|
1086 | 1099 | @classmethod |
|
1087 | 1100 | def set_valid(cls, key): |
|
1088 | 1101 | """ |
|
1089 | 1102 | Mark this cache key as active and currently cached |
|
1090 | 1103 | |
|
1091 | 1104 | :param key: |
|
1092 | 1105 | """ |
|
1093 | 1106 | inv_obj = cls.get_by_key(key) |
|
1094 | 1107 | inv_obj.cache_active = True |
|
1095 | 1108 | Session.add(inv_obj) |
|
1096 | 1109 | Session.commit() |
|
1097 | 1110 | |
|
1098 | 1111 | |
|
1099 | 1112 | class ChangesetComment(Base, BaseModel): |
|
1100 | 1113 | __tablename__ = 'changeset_comments' |
|
1101 | 1114 | __table_args__ = ({'extend_existing': True},) |
|
1102 | 1115 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1103 | 1116 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1104 | 1117 | revision = Column('revision', String(40), nullable=False) |
|
1105 | 1118 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
1106 | 1119 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
1107 | 1120 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
1108 | 1121 | text = Column('text', Unicode(25000), nullable=False) |
|
1109 | 1122 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
1110 | 1123 | |
|
1111 | 1124 | author = relationship('User', lazy='joined') |
|
1112 | 1125 | repo = relationship('Repository') |
|
1113 | 1126 | |
|
1114 | 1127 | @classmethod |
|
1115 | 1128 | def get_users(cls, revision): |
|
1116 | 1129 | """ |
|
1117 | 1130 | Returns user associated with this changesetComment. ie those |
|
1118 | 1131 | who actually commented |
|
1119 | 1132 | |
|
1120 | 1133 | :param cls: |
|
1121 | 1134 | :param revision: |
|
1122 | 1135 | """ |
|
1123 | 1136 | return Session.query(User)\ |
|
1124 | 1137 | .filter(cls.revision == revision)\ |
|
1125 | 1138 | .join(ChangesetComment.author).all() |
|
1126 | 1139 | |
|
1127 | 1140 | |
|
1128 | 1141 | class Notification(Base, BaseModel): |
|
1129 | 1142 | __tablename__ = 'notifications' |
|
1130 | 1143 | __table_args__ = ({'extend_existing': True},) |
|
1131 | 1144 | |
|
1132 | 1145 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1133 | 1146 | TYPE_MESSAGE = u'message' |
|
1134 | 1147 | TYPE_MENTION = u'mention' |
|
1135 | 1148 | TYPE_REGISTRATION = u'registration' |
|
1136 | 1149 | |
|
1137 | 1150 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1138 | 1151 | subject = Column('subject', Unicode(512), nullable=True) |
|
1139 | 1152 | body = Column('body', Unicode(50000), nullable=True) |
|
1140 | 1153 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1141 | 1154 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1142 | 1155 | type_ = Column('type', Unicode(256)) |
|
1143 | 1156 | |
|
1144 | 1157 | created_by_user = relationship('User') |
|
1145 | 1158 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1146 | 1159 | cascade="all, delete, delete-orphan") |
|
1147 | 1160 | |
|
1148 | 1161 | @property |
|
1149 | 1162 | def recipients(self): |
|
1150 | 1163 | return [x.user for x in UserNotification.query()\ |
|
1151 | 1164 | .filter(UserNotification.notification == self).all()] |
|
1152 | 1165 | |
|
1153 | 1166 | @classmethod |
|
1154 | 1167 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
1155 | 1168 | if type_ is None: |
|
1156 | 1169 | type_ = Notification.TYPE_MESSAGE |
|
1157 | 1170 | |
|
1158 | 1171 | notification = cls() |
|
1159 | 1172 | notification.created_by_user = created_by |
|
1160 | 1173 | notification.subject = subject |
|
1161 | 1174 | notification.body = body |
|
1162 | 1175 | notification.type_ = type_ |
|
1163 | 1176 | notification.created_on = datetime.datetime.now() |
|
1164 | 1177 | |
|
1165 | 1178 | for u in recipients: |
|
1166 | 1179 | assoc = UserNotification() |
|
1167 | 1180 | assoc.notification = notification |
|
1168 | 1181 | u.notifications.append(assoc) |
|
1169 | 1182 | Session.add(notification) |
|
1170 | 1183 | return notification |
|
1171 | 1184 | |
|
1172 | 1185 | @property |
|
1173 | 1186 | def description(self): |
|
1174 | 1187 | from rhodecode.model.notification import NotificationModel |
|
1175 | 1188 | return NotificationModel().make_description(self) |
|
1176 | 1189 | |
|
1177 | 1190 | |
|
1178 | 1191 | class UserNotification(Base, BaseModel): |
|
1179 | 1192 | __tablename__ = 'user_to_notification' |
|
1180 | 1193 | __table_args__ = ( |
|
1181 | 1194 | UniqueConstraint('user_id', 'notification_id'), |
|
1182 | 1195 | {'extend_existing': True} |
|
1183 | 1196 | ) |
|
1184 | 1197 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1185 | 1198 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1186 | 1199 | read = Column('read', Boolean, default=False) |
|
1187 | 1200 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1188 | 1201 | |
|
1189 | 1202 | user = relationship('User', lazy="joined") |
|
1190 | 1203 | notification = relationship('Notification', lazy="joined", |
|
1191 | 1204 | order_by=lambda: Notification.created_on.desc(),) |
|
1192 | 1205 | |
|
1193 | 1206 | def mark_as_read(self): |
|
1194 | 1207 | self.read = True |
|
1195 | 1208 | Session.add(self) |
|
1196 | 1209 | |
|
1197 | 1210 | |
|
1198 | 1211 | class DbMigrateVersion(Base, BaseModel): |
|
1199 | 1212 | __tablename__ = 'db_migrate_version' |
|
1200 | 1213 | __table_args__ = {'extend_existing': True} |
|
1201 | 1214 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1202 | 1215 | repository_path = Column('repository_path', Text) |
|
1203 | 1216 | version = Column('version', Integer) |
@@ -1,758 +1,773 b'' | |||
|
1 | 1 | """ this is forms validation classes |
|
2 | 2 | http://formencode.org/module-formencode.validators.html |
|
3 | 3 | for list off all availible validators |
|
4 | 4 | |
|
5 | 5 | we can create our own validators |
|
6 | 6 | |
|
7 | 7 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
8 | 8 | pre_validators [] These validators will be applied before the schema |
|
9 | 9 | chained_validators [] These validators will be applied after the schema |
|
10 | 10 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
11 | 11 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
12 | 12 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
13 | 13 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
14 | 14 | |
|
15 | 15 | |
|
16 | 16 | <name> = formencode.validators.<name of validator> |
|
17 | 17 | <name> must equal form name |
|
18 | 18 | list=[1,2,3,4,5] |
|
19 | 19 | for SELECT use formencode.All(OneOf(list), Int()) |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | import os |
|
23 | 23 | import re |
|
24 | 24 | import logging |
|
25 | 25 | import traceback |
|
26 | 26 | |
|
27 | 27 | import formencode |
|
28 | 28 | from formencode import All |
|
29 | 29 | from formencode.validators import UnicodeString, OneOf, Int, Number, Regex, \ |
|
30 | 30 | Email, Bool, StringBoolean, Set |
|
31 | 31 | |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from webhelpers.pylonslib.secure_form import authentication_token |
|
34 | 34 | |
|
35 | 35 | from rhodecode.config.routing import ADMIN_PREFIX |
|
36 | 36 | from rhodecode.lib.utils import repo_name_slug |
|
37 | 37 | from rhodecode.lib.auth import authenticate, get_crypt_password |
|
38 | 38 | from rhodecode.lib.exceptions import LdapImportError |
|
39 | 39 | from rhodecode.model.db import User, UsersGroup, RepoGroup, Repository |
|
40 | 40 | from rhodecode import BACKENDS |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | #this is needed to translate the messages using _() in validators |
|
46 | 46 | class State_obj(object): |
|
47 | 47 | _ = staticmethod(_) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | #============================================================================== |
|
51 | 51 | # VALIDATORS |
|
52 | 52 | #============================================================================== |
|
53 | 53 | class ValidAuthToken(formencode.validators.FancyValidator): |
|
54 | 54 | messages = {'invalid_token': _('Token mismatch')} |
|
55 | 55 | |
|
56 | 56 | def validate_python(self, value, state): |
|
57 | 57 | |
|
58 | 58 | if value != authentication_token(): |
|
59 | 59 | raise formencode.Invalid( |
|
60 | 60 | self.message('invalid_token', |
|
61 | 61 | state, search_number=value), |
|
62 | 62 | value, |
|
63 | 63 | state |
|
64 | 64 | ) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | def ValidUsername(edit, old_data): |
|
68 | 68 | class _ValidUsername(formencode.validators.FancyValidator): |
|
69 | 69 | |
|
70 | 70 | def validate_python(self, value, state): |
|
71 | 71 | if value in ['default', 'new_user']: |
|
72 | 72 | raise formencode.Invalid(_('Invalid username'), value, state) |
|
73 | 73 | #check if user is unique |
|
74 | 74 | old_un = None |
|
75 | 75 | if edit: |
|
76 | 76 | old_un = User.get(old_data.get('user_id')).username |
|
77 | 77 | |
|
78 | 78 | if old_un != value or not edit: |
|
79 | 79 | if User.get_by_username(value, case_insensitive=True): |
|
80 | 80 | raise formencode.Invalid(_('This username already ' |
|
81 | 81 | 'exists') , value, state) |
|
82 | 82 | |
|
83 | 83 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
84 | 84 | raise formencode.Invalid( |
|
85 | 85 | _('Username may only contain alphanumeric characters ' |
|
86 | 86 | 'underscores, periods or dashes and must begin with ' |
|
87 | 87 | 'alphanumeric character'), |
|
88 | 88 | value, |
|
89 | 89 | state |
|
90 | 90 | ) |
|
91 | 91 | |
|
92 | 92 | return _ValidUsername |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def ValidUsersGroup(edit, old_data): |
|
96 | 96 | |
|
97 | 97 | class _ValidUsersGroup(formencode.validators.FancyValidator): |
|
98 | 98 | |
|
99 | 99 | def validate_python(self, value, state): |
|
100 | 100 | if value in ['default']: |
|
101 | 101 | raise formencode.Invalid(_('Invalid group name'), value, state) |
|
102 | 102 | #check if group is unique |
|
103 | 103 | old_ugname = None |
|
104 | 104 | if edit: |
|
105 | 105 | old_ugname = UsersGroup.get( |
|
106 | 106 | old_data.get('users_group_id')).users_group_name |
|
107 | 107 | |
|
108 | 108 | if old_ugname != value or not edit: |
|
109 | 109 | if UsersGroup.get_by_group_name(value, cache=False, |
|
110 | 110 | case_insensitive=True): |
|
111 | 111 | raise formencode.Invalid(_('This users group ' |
|
112 | 112 | 'already exists'), value, |
|
113 | 113 | state) |
|
114 | 114 | |
|
115 | 115 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
116 | 116 | raise formencode.Invalid( |
|
117 | 117 | _('RepoGroup name may only contain alphanumeric characters ' |
|
118 | 118 | 'underscores, periods or dashes and must begin with ' |
|
119 | 119 | 'alphanumeric character'), |
|
120 | 120 | value, |
|
121 | 121 | state |
|
122 | 122 | ) |
|
123 | 123 | |
|
124 | 124 | return _ValidUsersGroup |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def ValidReposGroup(edit, old_data): |
|
128 | 128 | class _ValidReposGroup(formencode.validators.FancyValidator): |
|
129 | 129 | |
|
130 | 130 | def validate_python(self, value, state): |
|
131 | 131 | # TODO WRITE VALIDATIONS |
|
132 | 132 | group_name = value.get('group_name') |
|
133 | 133 | group_parent_id = value.get('group_parent_id') |
|
134 | 134 | |
|
135 | 135 | # slugify repo group just in case :) |
|
136 | 136 | slug = repo_name_slug(group_name) |
|
137 | 137 | |
|
138 | 138 | # check for parent of self |
|
139 | 139 | parent_of_self = lambda: ( |
|
140 | 140 | old_data['group_id'] == int(group_parent_id) |
|
141 | 141 | if group_parent_id else False |
|
142 | 142 | ) |
|
143 | 143 | if edit and parent_of_self(): |
|
144 | 144 | e_dict = { |
|
145 | 145 | 'group_parent_id': _('Cannot assign this group as parent') |
|
146 | 146 | } |
|
147 | 147 | raise formencode.Invalid('', value, state, |
|
148 | 148 | error_dict=e_dict) |
|
149 | 149 | |
|
150 | 150 | old_gname = None |
|
151 | 151 | if edit: |
|
152 | 152 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
153 | 153 | |
|
154 | 154 | if old_gname != group_name or not edit: |
|
155 | 155 | |
|
156 | 156 | # check group |
|
157 | 157 | gr = RepoGroup.query()\ |
|
158 | 158 | .filter(RepoGroup.group_name == slug)\ |
|
159 | 159 | .filter(RepoGroup.group_parent_id == group_parent_id)\ |
|
160 | 160 | .scalar() |
|
161 | 161 | |
|
162 | 162 | if gr: |
|
163 | 163 | e_dict = { |
|
164 | 164 | 'group_name': _('This group already exists') |
|
165 | 165 | } |
|
166 | 166 | raise formencode.Invalid('', value, state, |
|
167 | 167 | error_dict=e_dict) |
|
168 | 168 | |
|
169 | 169 | # check for same repo |
|
170 | 170 | repo = Repository.query()\ |
|
171 | 171 | .filter(Repository.repo_name == slug)\ |
|
172 | 172 | .scalar() |
|
173 | 173 | |
|
174 | 174 | if repo: |
|
175 | 175 | e_dict = { |
|
176 | 176 | 'group_name': _('Repository with this name already exists') |
|
177 | 177 | } |
|
178 | 178 | raise formencode.Invalid('', value, state, |
|
179 | 179 | error_dict=e_dict) |
|
180 | 180 | |
|
181 | 181 | return _ValidReposGroup |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | class ValidPassword(formencode.validators.FancyValidator): |
|
185 | 185 | |
|
186 | 186 | def to_python(self, value, state): |
|
187 | 187 | |
|
188 | 188 | if not value: |
|
189 | 189 | return |
|
190 | 190 | |
|
191 | 191 | if value.get('password'): |
|
192 | 192 | try: |
|
193 | 193 | value['password'] = get_crypt_password(value['password']) |
|
194 | 194 | except UnicodeEncodeError: |
|
195 | 195 | e_dict = {'password': _('Invalid characters in password')} |
|
196 | 196 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
197 | 197 | |
|
198 | 198 | if value.get('password_confirmation'): |
|
199 | 199 | try: |
|
200 | 200 | value['password_confirmation'] = \ |
|
201 | 201 | get_crypt_password(value['password_confirmation']) |
|
202 | 202 | except UnicodeEncodeError: |
|
203 | 203 | e_dict = { |
|
204 | 204 | 'password_confirmation': _('Invalid characters in password') |
|
205 | 205 | } |
|
206 | 206 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
207 | 207 | |
|
208 | 208 | if value.get('new_password'): |
|
209 | 209 | try: |
|
210 | 210 | value['new_password'] = \ |
|
211 | 211 | get_crypt_password(value['new_password']) |
|
212 | 212 | except UnicodeEncodeError: |
|
213 | 213 | e_dict = {'new_password': _('Invalid characters in password')} |
|
214 | 214 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
215 | 215 | |
|
216 | 216 | return value |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | class ValidPasswordsMatch(formencode.validators.FancyValidator): |
|
220 | 220 | |
|
221 | 221 | def validate_python(self, value, state): |
|
222 | 222 | |
|
223 | 223 | pass_val = value.get('password') or value.get('new_password') |
|
224 | 224 | if pass_val != value['password_confirmation']: |
|
225 | 225 | e_dict = {'password_confirmation': |
|
226 | 226 | _('Passwords do not match')} |
|
227 | 227 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | class ValidAuth(formencode.validators.FancyValidator): |
|
231 | 231 | messages = { |
|
232 | 232 | 'invalid_password':_('invalid password'), |
|
233 | 233 | 'invalid_login':_('invalid user name'), |
|
234 | 234 | 'disabled_account':_('Your account is disabled') |
|
235 | 235 | } |
|
236 | 236 | |
|
237 | 237 | # error mapping |
|
238 | 238 | e_dict = {'username': messages['invalid_login'], |
|
239 | 239 | 'password': messages['invalid_password']} |
|
240 | 240 | e_dict_disable = {'username': messages['disabled_account']} |
|
241 | 241 | |
|
242 | 242 | def validate_python(self, value, state): |
|
243 | 243 | password = value['password'] |
|
244 | 244 | username = value['username'] |
|
245 | 245 | user = User.get_by_username(username) |
|
246 | 246 | |
|
247 | 247 | if authenticate(username, password): |
|
248 | 248 | return value |
|
249 | 249 | else: |
|
250 | 250 | if user and user.active is False: |
|
251 | 251 | log.warning('user %s is disabled' % username) |
|
252 | 252 | raise formencode.Invalid( |
|
253 | 253 | self.message('disabled_account', |
|
254 | 254 | state=State_obj), |
|
255 | 255 | value, state, |
|
256 | 256 | error_dict=self.e_dict_disable |
|
257 | 257 | ) |
|
258 | 258 | else: |
|
259 | 259 | log.warning('user %s failed to authenticate' % username) |
|
260 | 260 | raise formencode.Invalid( |
|
261 | 261 | self.message('invalid_password', |
|
262 | 262 | state=State_obj), value, state, |
|
263 | 263 | error_dict=self.e_dict |
|
264 | 264 | ) |
|
265 | 265 | |
|
266 | 266 | |
|
267 | 267 | class ValidRepoUser(formencode.validators.FancyValidator): |
|
268 | 268 | |
|
269 | 269 | def to_python(self, value, state): |
|
270 | 270 | try: |
|
271 | 271 | User.query().filter(User.active == True)\ |
|
272 | 272 | .filter(User.username == value).one() |
|
273 | 273 | except Exception: |
|
274 | 274 | raise formencode.Invalid(_('This username is not valid'), |
|
275 | 275 | value, state) |
|
276 | 276 | return value |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | def ValidRepoName(edit, old_data): |
|
280 | 280 | class _ValidRepoName(formencode.validators.FancyValidator): |
|
281 | 281 | def to_python(self, value, state): |
|
282 | 282 | |
|
283 | 283 | repo_name = value.get('repo_name') |
|
284 | 284 | |
|
285 | 285 | slug = repo_name_slug(repo_name) |
|
286 | 286 | if slug in [ADMIN_PREFIX, '']: |
|
287 | 287 | e_dict = {'repo_name': _('This repository name is disallowed')} |
|
288 | 288 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
289 | 289 | |
|
290 | 290 | if value.get('repo_group'): |
|
291 | 291 | gr = RepoGroup.get(value.get('repo_group')) |
|
292 | 292 | group_path = gr.full_path |
|
293 | 293 | # value needs to be aware of group name in order to check |
|
294 | 294 | # db key This is an actual just the name to store in the |
|
295 | 295 | # database |
|
296 | 296 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
297 | 297 | |
|
298 | 298 | else: |
|
299 | 299 | group_path = '' |
|
300 | 300 | repo_name_full = repo_name |
|
301 | 301 | |
|
302 | 302 | value['repo_name_full'] = repo_name_full |
|
303 | 303 | rename = old_data.get('repo_name') != repo_name_full |
|
304 | 304 | create = not edit |
|
305 | 305 | if rename or create: |
|
306 | 306 | |
|
307 | 307 | if group_path != '': |
|
308 | 308 | if Repository.get_by_repo_name(repo_name_full): |
|
309 | 309 | e_dict = { |
|
310 | 310 | 'repo_name': _('This repository already exists in ' |
|
311 | 311 | 'a group "%s"') % gr.group_name |
|
312 | 312 | } |
|
313 | 313 | raise formencode.Invalid('', value, state, |
|
314 | 314 | error_dict=e_dict) |
|
315 | 315 | elif RepoGroup.get_by_group_name(repo_name_full): |
|
316 | 316 | e_dict = { |
|
317 | 317 | 'repo_name': _('There is a group with this name ' |
|
318 | 318 | 'already "%s"') % repo_name_full |
|
319 | 319 | } |
|
320 | 320 | raise formencode.Invalid('', value, state, |
|
321 | 321 | error_dict=e_dict) |
|
322 | 322 | |
|
323 | 323 | elif Repository.get_by_repo_name(repo_name_full): |
|
324 | 324 | e_dict = {'repo_name': _('This repository ' |
|
325 | 325 | 'already exists')} |
|
326 | 326 | raise formencode.Invalid('', value, state, |
|
327 | 327 | error_dict=e_dict) |
|
328 | 328 | |
|
329 | 329 | return value |
|
330 | 330 | |
|
331 | 331 | return _ValidRepoName |
|
332 | 332 | |
|
333 | 333 | |
|
334 | 334 | def ValidForkName(*args, **kwargs): |
|
335 | 335 | return ValidRepoName(*args, **kwargs) |
|
336 | 336 | |
|
337 | 337 | |
|
338 | 338 | def SlugifyName(): |
|
339 | 339 | class _SlugifyName(formencode.validators.FancyValidator): |
|
340 | 340 | |
|
341 | 341 | def to_python(self, value, state): |
|
342 | 342 | return repo_name_slug(value) |
|
343 | 343 | |
|
344 | 344 | return _SlugifyName |
|
345 | 345 | |
|
346 | 346 | |
|
347 | 347 | def ValidCloneUri(): |
|
348 | from rhodecode.lib.utils import make_ui | |
|
349 | ||
|
350 | def url_handler(repo_type, url, proto, ui=None): | |
|
351 | if repo_type == 'hg': | |
|
348 | 352 | from mercurial.httprepo import httprepository, httpsrepository |
|
349 | from rhodecode.lib.utils import make_ui | |
|
353 | if proto == 'https': | |
|
354 | httpsrepository(make_ui('db'), url).capabilities | |
|
355 | elif proto == 'http': | |
|
356 | httprepository(make_ui('db'), url).capabilities | |
|
357 | elif repo_type == 'git': | |
|
358 | #TODO: write a git url validator | |
|
359 | pass | |
|
350 | 360 | |
|
351 | 361 | class _ValidCloneUri(formencode.validators.FancyValidator): |
|
352 | 362 | |
|
353 | 363 | def to_python(self, value, state): |
|
354 | if not value: | |
|
364 | ||
|
365 | repo_type = value.get('repo_type') | |
|
366 | url = value.get('clone_uri') | |
|
367 | e_dict = {'clone_uri': _('invalid clone url')} | |
|
368 | ||
|
369 | if not url: | |
|
355 | 370 | pass |
|
356 |
elif |
|
|
371 | elif url.startswith('https'): | |
|
357 | 372 | try: |
|
358 | httpsrepository(make_ui('db'), value).capabilities | |
|
373 | url_handler(repo_type, url, 'https', make_ui('db')) | |
|
359 | 374 | except Exception: |
|
360 | 375 | log.error(traceback.format_exc()) |
|
361 |
raise formencode.Invalid( |
|
|
362 | state) | |
|
363 | elif value.startswith('http'): | |
|
376 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
|
377 | elif url.startswith('http'): | |
|
364 | 378 | try: |
|
365 | httprepository(make_ui('db'), value).capabilities | |
|
379 | url_handler(repo_type, url, 'http', make_ui('db')) | |
|
366 | 380 | except Exception: |
|
367 | 381 | log.error(traceback.format_exc()) |
|
368 |
raise formencode.Invalid( |
|
|
369 | state) | |
|
382 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
|
370 | 383 | else: |
|
371 |
|
|
|
372 |
|
|
|
373 | state) | |
|
384 | e_dict = {'clone_uri': _('Invalid clone url, provide a ' | |
|
385 | 'valid clone http\s url')} | |
|
386 | raise formencode.Invalid('', value, state, error_dict=e_dict) | |
|
387 | ||
|
374 | 388 | return value |
|
375 | 389 | |
|
376 | 390 | return _ValidCloneUri |
|
377 | 391 | |
|
378 | 392 | |
|
379 | 393 | def ValidForkType(old_data): |
|
380 | 394 | class _ValidForkType(formencode.validators.FancyValidator): |
|
381 | 395 | |
|
382 | 396 | def to_python(self, value, state): |
|
383 | 397 | if old_data['repo_type'] != value: |
|
384 | 398 | raise formencode.Invalid(_('Fork have to be the same ' |
|
385 | 399 | 'type as original'), value, state) |
|
386 | 400 | |
|
387 | 401 | return value |
|
388 | 402 | return _ValidForkType |
|
389 | 403 | |
|
390 | 404 | |
|
391 | 405 | def ValidPerms(type_='repo'): |
|
392 | 406 | if type_ == 'group': |
|
393 | 407 | EMPTY_PERM = 'group.none' |
|
394 | 408 | elif type_ == 'repo': |
|
395 | 409 | EMPTY_PERM = 'repository.none' |
|
396 | 410 | |
|
397 | 411 | class _ValidPerms(formencode.validators.FancyValidator): |
|
398 | 412 | messages = { |
|
399 | 413 | 'perm_new_member_name': |
|
400 | 414 | _('This username or users group name is not valid') |
|
401 | 415 | } |
|
402 | 416 | |
|
403 | 417 | def to_python(self, value, state): |
|
404 | 418 | perms_update = [] |
|
405 | 419 | perms_new = [] |
|
406 | 420 | # build a list of permission to update and new permission to create |
|
407 | 421 | for k, v in value.items(): |
|
408 | 422 | # means new added member to permissions |
|
409 | 423 | if k.startswith('perm_new_member'): |
|
410 | 424 | new_perm = value.get('perm_new_member', False) |
|
411 | 425 | new_member = value.get('perm_new_member_name', False) |
|
412 | 426 | new_type = value.get('perm_new_member_type') |
|
413 | 427 | |
|
414 | 428 | if new_member and new_perm: |
|
415 | 429 | if (new_member, new_perm, new_type) not in perms_new: |
|
416 | 430 | perms_new.append((new_member, new_perm, new_type)) |
|
417 | 431 | elif k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
418 | 432 | member = k[7:] |
|
419 | 433 | t = {'u': 'user', |
|
420 | 434 | 'g': 'users_group' |
|
421 | 435 | }[k[0]] |
|
422 | 436 | if member == 'default': |
|
423 | 437 | if value.get('private'): |
|
424 | 438 | # set none for default when updating to private repo |
|
425 | 439 | v = EMPTY_PERM |
|
426 | 440 | perms_update.append((member, v, t)) |
|
427 | 441 | |
|
428 | 442 | value['perms_updates'] = perms_update |
|
429 | 443 | value['perms_new'] = perms_new |
|
430 | 444 | |
|
431 | 445 | # update permissions |
|
432 | 446 | for k, v, t in perms_new: |
|
433 | 447 | try: |
|
434 | 448 | if t is 'user': |
|
435 | 449 | self.user_db = User.query()\ |
|
436 | 450 | .filter(User.active == True)\ |
|
437 | 451 | .filter(User.username == k).one() |
|
438 | 452 | if t is 'users_group': |
|
439 | 453 | self.user_db = UsersGroup.query()\ |
|
440 | 454 | .filter(UsersGroup.users_group_active == True)\ |
|
441 | 455 | .filter(UsersGroup.users_group_name == k).one() |
|
442 | 456 | |
|
443 | 457 | except Exception: |
|
444 | 458 | msg = self.message('perm_new_member_name', |
|
445 | 459 | state=State_obj) |
|
446 | 460 | raise formencode.Invalid( |
|
447 | 461 | msg, value, state, error_dict={'perm_new_member_name': msg} |
|
448 | 462 | ) |
|
449 | 463 | return value |
|
450 | 464 | return _ValidPerms |
|
451 | 465 | |
|
452 | 466 | |
|
453 | 467 | class ValidSettings(formencode.validators.FancyValidator): |
|
454 | 468 | |
|
455 | 469 | def to_python(self, value, state): |
|
456 | 470 | # settings form can't edit user |
|
457 | 471 | if 'user' in value: |
|
458 | 472 | del['value']['user'] |
|
459 | 473 | return value |
|
460 | 474 | |
|
461 | 475 | |
|
462 | 476 | class ValidPath(formencode.validators.FancyValidator): |
|
463 | 477 | def to_python(self, value, state): |
|
464 | 478 | |
|
465 | 479 | if not os.path.isdir(value): |
|
466 | 480 | msg = _('This is not a valid path') |
|
467 | 481 | raise formencode.Invalid(msg, value, state, |
|
468 | 482 | error_dict={'paths_root_path': msg}) |
|
469 | 483 | return value |
|
470 | 484 | |
|
471 | 485 | |
|
472 | 486 | def UniqSystemEmail(old_data): |
|
473 | 487 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
|
474 | 488 | def to_python(self, value, state): |
|
475 | 489 | value = value.lower() |
|
476 | 490 | if old_data.get('email', '').lower() != value: |
|
477 | 491 | user = User.get_by_email(value, case_insensitive=True) |
|
478 | 492 | if user: |
|
479 | 493 | raise formencode.Invalid( |
|
480 | 494 | _("This e-mail address is already taken"), value, state |
|
481 | 495 | ) |
|
482 | 496 | return value |
|
483 | 497 | |
|
484 | 498 | return _UniqSystemEmail |
|
485 | 499 | |
|
486 | 500 | |
|
487 | 501 | class ValidSystemEmail(formencode.validators.FancyValidator): |
|
488 | 502 | def to_python(self, value, state): |
|
489 | 503 | value = value.lower() |
|
490 | 504 | user = User.get_by_email(value, case_insensitive=True) |
|
491 | 505 | if user is None: |
|
492 | 506 | raise formencode.Invalid( |
|
493 | 507 | _("This e-mail address doesn't exist."), value, state |
|
494 | 508 | ) |
|
495 | 509 | |
|
496 | 510 | return value |
|
497 | 511 | |
|
498 | 512 | |
|
499 | 513 | class LdapLibValidator(formencode.validators.FancyValidator): |
|
500 | 514 | |
|
501 | 515 | def to_python(self, value, state): |
|
502 | 516 | |
|
503 | 517 | try: |
|
504 | 518 | import ldap |
|
505 | 519 | except ImportError: |
|
506 | 520 | raise LdapImportError |
|
507 | 521 | return value |
|
508 | 522 | |
|
509 | 523 | |
|
510 | 524 | class AttrLoginValidator(formencode.validators.FancyValidator): |
|
511 | 525 | |
|
512 | 526 | def to_python(self, value, state): |
|
513 | 527 | |
|
514 | 528 | if not value or not isinstance(value, (str, unicode)): |
|
515 | 529 | raise formencode.Invalid( |
|
516 | 530 | _("The LDAP Login attribute of the CN must be specified - " |
|
517 | 531 | "this is the name of the attribute that is equivalent " |
|
518 | 532 | "to 'username'"), value, state |
|
519 | 533 | ) |
|
520 | 534 | |
|
521 | 535 | return value |
|
522 | 536 | |
|
523 | 537 | |
|
524 | 538 | #============================================================================== |
|
525 | 539 | # FORMS |
|
526 | 540 | #============================================================================== |
|
527 | 541 | class LoginForm(formencode.Schema): |
|
528 | 542 | allow_extra_fields = True |
|
529 | 543 | filter_extra_fields = True |
|
530 | 544 | username = UnicodeString( |
|
531 | 545 | strip=True, |
|
532 | 546 | min=1, |
|
533 | 547 | not_empty=True, |
|
534 | 548 | messages={ |
|
535 | 549 | 'empty': _('Please enter a login'), |
|
536 | 550 | 'tooShort': _('Enter a value %(min)i characters long or more')} |
|
537 | 551 | ) |
|
538 | 552 | |
|
539 | 553 | password = UnicodeString( |
|
540 | 554 | strip=True, |
|
541 | 555 | min=3, |
|
542 | 556 | not_empty=True, |
|
543 | 557 | messages={ |
|
544 | 558 | 'empty': _('Please enter a password'), |
|
545 | 559 | 'tooShort': _('Enter %(min)i characters or more')} |
|
546 | 560 | ) |
|
547 | 561 | |
|
548 | 562 | remember = StringBoolean(if_missing=False) |
|
549 | 563 | |
|
550 | 564 | chained_validators = [ValidAuth] |
|
551 | 565 | |
|
552 | 566 | |
|
553 | 567 | def UserForm(edit=False, old_data={}): |
|
554 | 568 | class _UserForm(formencode.Schema): |
|
555 | 569 | allow_extra_fields = True |
|
556 | 570 | filter_extra_fields = True |
|
557 | 571 | username = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
558 | 572 | ValidUsername(edit, old_data)) |
|
559 | 573 | if edit: |
|
560 | 574 | new_password = All(UnicodeString(strip=True, min=6, not_empty=False)) |
|
561 | 575 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
562 | 576 | not_empty=False)) |
|
563 | 577 | admin = StringBoolean(if_missing=False) |
|
564 | 578 | else: |
|
565 | 579 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
566 | 580 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
567 | 581 | not_empty=False)) |
|
568 | 582 | |
|
569 | 583 | active = StringBoolean(if_missing=False) |
|
570 | 584 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
571 | 585 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
572 | 586 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
573 | 587 | |
|
574 | 588 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
575 | 589 | |
|
576 | 590 | return _UserForm |
|
577 | 591 | |
|
578 | 592 | |
|
579 | 593 | def UsersGroupForm(edit=False, old_data={}, available_members=[]): |
|
580 | 594 | class _UsersGroupForm(formencode.Schema): |
|
581 | 595 | allow_extra_fields = True |
|
582 | 596 | filter_extra_fields = True |
|
583 | 597 | |
|
584 | 598 | users_group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
585 | 599 | ValidUsersGroup(edit, old_data)) |
|
586 | 600 | |
|
587 | 601 | users_group_active = StringBoolean(if_missing=False) |
|
588 | 602 | |
|
589 | 603 | if edit: |
|
590 | 604 | users_group_members = OneOf(available_members, hideList=False, |
|
591 | 605 | testValueList=True, |
|
592 | 606 | if_missing=None, not_empty=False) |
|
593 | 607 | |
|
594 | 608 | return _UsersGroupForm |
|
595 | 609 | |
|
596 | 610 | |
|
597 | 611 | def ReposGroupForm(edit=False, old_data={}, available_groups=[]): |
|
598 | 612 | class _ReposGroupForm(formencode.Schema): |
|
599 | 613 | allow_extra_fields = True |
|
600 | 614 | filter_extra_fields = False |
|
601 | 615 | |
|
602 | 616 | group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
603 | 617 | SlugifyName()) |
|
604 | 618 | group_description = UnicodeString(strip=True, min=1, |
|
605 | 619 | not_empty=True) |
|
606 | 620 | group_parent_id = OneOf(available_groups, hideList=False, |
|
607 | 621 | testValueList=True, |
|
608 | 622 | if_missing=None, not_empty=False) |
|
609 | 623 | |
|
610 | 624 | chained_validators = [ValidReposGroup(edit, old_data), ValidPerms('group')] |
|
611 | 625 | |
|
612 | 626 | return _ReposGroupForm |
|
613 | 627 | |
|
614 | 628 | |
|
615 | 629 | def RegisterForm(edit=False, old_data={}): |
|
616 | 630 | class _RegisterForm(formencode.Schema): |
|
617 | 631 | allow_extra_fields = True |
|
618 | 632 | filter_extra_fields = True |
|
619 | 633 | username = All(ValidUsername(edit, old_data), |
|
620 | 634 | UnicodeString(strip=True, min=1, not_empty=True)) |
|
621 | 635 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
622 | 636 | password_confirmation = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
623 | 637 | active = StringBoolean(if_missing=False) |
|
624 | 638 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
625 | 639 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
626 | 640 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
627 | 641 | |
|
628 | 642 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
629 | 643 | |
|
630 | 644 | return _RegisterForm |
|
631 | 645 | |
|
632 | 646 | |
|
633 | 647 | def PasswordResetForm(): |
|
634 | 648 | class _PasswordResetForm(formencode.Schema): |
|
635 | 649 | allow_extra_fields = True |
|
636 | 650 | filter_extra_fields = True |
|
637 | 651 | email = All(ValidSystemEmail(), Email(not_empty=True)) |
|
638 | 652 | return _PasswordResetForm |
|
639 | 653 | |
|
640 | 654 | |
|
641 | 655 | def RepoForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
642 | 656 | repo_groups=[]): |
|
643 | 657 | class _RepoForm(formencode.Schema): |
|
644 | 658 | allow_extra_fields = True |
|
645 | 659 | filter_extra_fields = False |
|
646 | 660 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
647 | 661 | SlugifyName()) |
|
648 |
clone_uri = All(UnicodeString(strip=True, min=1, not_empty=False) |
|
|
649 | ValidCloneUri()()) | |
|
662 | clone_uri = All(UnicodeString(strip=True, min=1, not_empty=False)) | |
|
650 | 663 | repo_group = OneOf(repo_groups, hideList=True) |
|
651 | 664 | repo_type = OneOf(supported_backends) |
|
652 | 665 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
653 | 666 | private = StringBoolean(if_missing=False) |
|
654 | 667 | enable_statistics = StringBoolean(if_missing=False) |
|
655 | 668 | enable_downloads = StringBoolean(if_missing=False) |
|
656 | 669 | |
|
657 | 670 | if edit: |
|
658 | 671 | #this is repo owner |
|
659 | 672 | user = All(UnicodeString(not_empty=True), ValidRepoUser) |
|
660 | 673 | |
|
661 |
chained_validators = [Valid |
|
|
674 | chained_validators = [ValidCloneUri()(), | |
|
675 | ValidRepoName(edit, old_data), | |
|
676 | ValidPerms()] | |
|
662 | 677 | return _RepoForm |
|
663 | 678 | |
|
664 | 679 | |
|
665 | 680 | def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
666 | 681 | repo_groups=[]): |
|
667 | 682 | class _RepoForkForm(formencode.Schema): |
|
668 | 683 | allow_extra_fields = True |
|
669 | 684 | filter_extra_fields = False |
|
670 | 685 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
671 | 686 | SlugifyName()) |
|
672 | 687 | repo_group = OneOf(repo_groups, hideList=True) |
|
673 | 688 | repo_type = All(ValidForkType(old_data), OneOf(supported_backends)) |
|
674 | 689 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
675 | 690 | private = StringBoolean(if_missing=False) |
|
676 | 691 | copy_permissions = StringBoolean(if_missing=False) |
|
677 | 692 | update_after_clone = StringBoolean(if_missing=False) |
|
678 | 693 | fork_parent_id = UnicodeString() |
|
679 | 694 | chained_validators = [ValidForkName(edit, old_data)] |
|
680 | 695 | |
|
681 | 696 | return _RepoForkForm |
|
682 | 697 | |
|
683 | 698 | |
|
684 | 699 | def RepoSettingsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
685 | 700 | repo_groups=[]): |
|
686 | 701 | class _RepoForm(formencode.Schema): |
|
687 | 702 | allow_extra_fields = True |
|
688 | 703 | filter_extra_fields = False |
|
689 | 704 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
690 | 705 | SlugifyName()) |
|
691 | 706 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
692 | 707 | repo_group = OneOf(repo_groups, hideList=True) |
|
693 | 708 | private = StringBoolean(if_missing=False) |
|
694 | 709 | |
|
695 | 710 | chained_validators = [ValidRepoName(edit, old_data), ValidPerms(), |
|
696 | 711 | ValidSettings] |
|
697 | 712 | return _RepoForm |
|
698 | 713 | |
|
699 | 714 | |
|
700 | 715 | def ApplicationSettingsForm(): |
|
701 | 716 | class _ApplicationSettingsForm(formencode.Schema): |
|
702 | 717 | allow_extra_fields = True |
|
703 | 718 | filter_extra_fields = False |
|
704 | 719 | rhodecode_title = UnicodeString(strip=True, min=1, not_empty=True) |
|
705 | 720 | rhodecode_realm = UnicodeString(strip=True, min=1, not_empty=True) |
|
706 | 721 | rhodecode_ga_code = UnicodeString(strip=True, min=1, not_empty=False) |
|
707 | 722 | |
|
708 | 723 | return _ApplicationSettingsForm |
|
709 | 724 | |
|
710 | 725 | |
|
711 | 726 | def ApplicationUiSettingsForm(): |
|
712 | 727 | class _ApplicationUiSettingsForm(formencode.Schema): |
|
713 | 728 | allow_extra_fields = True |
|
714 | 729 | filter_extra_fields = False |
|
715 | 730 | web_push_ssl = OneOf(['true', 'false'], if_missing='false') |
|
716 | 731 | paths_root_path = All(ValidPath(), UnicodeString(strip=True, min=1, not_empty=True)) |
|
717 | 732 | hooks_changegroup_update = OneOf(['True', 'False'], if_missing=False) |
|
718 | 733 | hooks_changegroup_repo_size = OneOf(['True', 'False'], if_missing=False) |
|
719 | 734 | hooks_pretxnchangegroup_push_logger = OneOf(['True', 'False'], if_missing=False) |
|
720 | 735 | hooks_preoutgoing_pull_logger = OneOf(['True', 'False'], if_missing=False) |
|
721 | 736 | |
|
722 | 737 | return _ApplicationUiSettingsForm |
|
723 | 738 | |
|
724 | 739 | |
|
725 | 740 | def DefaultPermissionsForm(perms_choices, register_choices, create_choices): |
|
726 | 741 | class _DefaultPermissionsForm(formencode.Schema): |
|
727 | 742 | allow_extra_fields = True |
|
728 | 743 | filter_extra_fields = True |
|
729 | 744 | overwrite_default = StringBoolean(if_missing=False) |
|
730 | 745 | anonymous = OneOf(['True', 'False'], if_missing=False) |
|
731 | 746 | default_perm = OneOf(perms_choices) |
|
732 | 747 | default_register = OneOf(register_choices) |
|
733 | 748 | default_create = OneOf(create_choices) |
|
734 | 749 | |
|
735 | 750 | return _DefaultPermissionsForm |
|
736 | 751 | |
|
737 | 752 | |
|
738 | 753 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, tls_kind_choices): |
|
739 | 754 | class _LdapSettingsForm(formencode.Schema): |
|
740 | 755 | allow_extra_fields = True |
|
741 | 756 | filter_extra_fields = True |
|
742 | 757 | pre_validators = [LdapLibValidator] |
|
743 | 758 | ldap_active = StringBoolean(if_missing=False) |
|
744 | 759 | ldap_host = UnicodeString(strip=True,) |
|
745 | 760 | ldap_port = Number(strip=True,) |
|
746 | 761 | ldap_tls_kind = OneOf(tls_kind_choices) |
|
747 | 762 | ldap_tls_reqcert = OneOf(tls_reqcert_choices) |
|
748 | 763 | ldap_dn_user = UnicodeString(strip=True,) |
|
749 | 764 | ldap_dn_pass = UnicodeString(strip=True,) |
|
750 | 765 | ldap_base_dn = UnicodeString(strip=True,) |
|
751 | 766 | ldap_filter = UnicodeString(strip=True,) |
|
752 | 767 | ldap_search_scope = OneOf(search_scope_choices) |
|
753 | 768 | ldap_attr_login = All(AttrLoginValidator, UnicodeString(strip=True,)) |
|
754 | 769 | ldap_attr_firstname = UnicodeString(strip=True,) |
|
755 | 770 | ldap_attr_lastname = UnicodeString(strip=True,) |
|
756 | 771 | ldap_attr_email = UnicodeString(strip=True,) |
|
757 | 772 | |
|
758 | 773 | return _LdapSettingsForm |
@@ -1,310 +1,310 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.model.user_group |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | users groups model for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Jan 25, 2011 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | import traceback |
|
29 | 29 | import shutil |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib import LazyProperty |
|
32 | 32 | |
|
33 | 33 | from rhodecode.model import BaseModel |
|
34 | 34 | from rhodecode.model.db import RepoGroup, RhodeCodeUi, UserRepoGroupToPerm, \ |
|
35 | 35 | User, Permission, UsersGroupRepoGroupToPerm, UsersGroup |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | class ReposGroupModel(BaseModel): |
|
41 | 41 | |
|
42 | 42 | def __get_user(self, user): |
|
43 | 43 | return self._get_instance(User, user, callback=User.get_by_username) |
|
44 | 44 | |
|
45 | 45 | def __get_users_group(self, users_group): |
|
46 | 46 | return self._get_instance(UsersGroup, users_group, |
|
47 | 47 | callback=UsersGroup.get_by_group_name) |
|
48 | 48 | |
|
49 | 49 | def __get_repos_group(self, repos_group): |
|
50 | 50 | return self._get_instance(RepoGroup, repos_group, |
|
51 | 51 | callback=RepoGroup.get_by_group_name) |
|
52 | 52 | |
|
53 | 53 | def __get_perm(self, permission): |
|
54 | 54 | return self._get_instance(Permission, permission, |
|
55 | 55 | callback=Permission.get_by_key) |
|
56 | 56 | |
|
57 | 57 | @LazyProperty |
|
58 | 58 | def repos_path(self): |
|
59 | 59 | """ |
|
60 | 60 | Get's the repositories root path from database |
|
61 | 61 | """ |
|
62 | 62 | |
|
63 | 63 | q = RhodeCodeUi.get_by_key('/').one() |
|
64 | 64 | return q.ui_value |
|
65 | 65 | |
|
66 | 66 | def _create_default_perms(self, new_group): |
|
67 | 67 | # create default permission |
|
68 | 68 | repo_group_to_perm = UserRepoGroupToPerm() |
|
69 | 69 | default_perm = 'group.read' |
|
70 | 70 | for p in User.get_by_username('default').user_perms: |
|
71 | 71 | if p.permission.permission_name.startswith('group.'): |
|
72 | 72 | default_perm = p.permission.permission_name |
|
73 | 73 | break |
|
74 | 74 | |
|
75 | 75 | repo_group_to_perm.permission_id = self.sa.query(Permission)\ |
|
76 | 76 | .filter(Permission.permission_name == default_perm)\ |
|
77 | 77 | .one().permission_id |
|
78 | 78 | |
|
79 | 79 | repo_group_to_perm.group = new_group |
|
80 | 80 | repo_group_to_perm.user_id = User.get_by_username('default').user_id |
|
81 | 81 | |
|
82 | 82 | self.sa.add(repo_group_to_perm) |
|
83 | 83 | |
|
84 | 84 | def __create_group(self, group_name): |
|
85 | 85 | """ |
|
86 | 86 | makes repositories group on filesystem |
|
87 | 87 | |
|
88 | 88 | :param repo_name: |
|
89 | 89 | :param parent_id: |
|
90 | 90 | """ |
|
91 | 91 | |
|
92 | 92 | create_path = os.path.join(self.repos_path, group_name) |
|
93 | 93 | log.debug('creating new group in %s' % create_path) |
|
94 | 94 | |
|
95 | 95 | if os.path.isdir(create_path): |
|
96 | 96 | raise Exception('That directory already exists !') |
|
97 | 97 | |
|
98 | 98 | os.makedirs(create_path) |
|
99 | 99 | |
|
100 | 100 | def __rename_group(self, old, new): |
|
101 | 101 | """ |
|
102 | 102 | Renames a group on filesystem |
|
103 | 103 | |
|
104 | 104 | :param group_name: |
|
105 | 105 | """ |
|
106 | 106 | |
|
107 | 107 | if old == new: |
|
108 | 108 | log.debug('skipping group rename') |
|
109 | 109 | return |
|
110 | 110 | |
|
111 | 111 | log.debug('renaming repos group from %s to %s' % (old, new)) |
|
112 | 112 | |
|
113 | 113 | old_path = os.path.join(self.repos_path, old) |
|
114 | 114 | new_path = os.path.join(self.repos_path, new) |
|
115 | 115 | |
|
116 | 116 | log.debug('renaming repos paths from %s to %s' % (old_path, new_path)) |
|
117 | 117 | |
|
118 | 118 | if os.path.isdir(new_path): |
|
119 | 119 | raise Exception('Was trying to rename to already ' |
|
120 | 120 | 'existing dir %s' % new_path) |
|
121 | 121 | shutil.move(old_path, new_path) |
|
122 | 122 | |
|
123 | 123 | def __delete_group(self, group): |
|
124 | 124 | """ |
|
125 | 125 | Deletes a group from a filesystem |
|
126 | 126 | |
|
127 | 127 | :param group: instance of group from database |
|
128 | 128 | """ |
|
129 | 129 | paths = group.full_path.split(RepoGroup.url_sep()) |
|
130 | 130 | paths = os.sep.join(paths) |
|
131 | 131 | |
|
132 | 132 | rm_path = os.path.join(self.repos_path, paths) |
|
133 | 133 | if os.path.isdir(rm_path): |
|
134 | 134 | # delete only if that path really exists |
|
135 | 135 | os.rmdir(rm_path) |
|
136 | 136 | |
|
137 | 137 | def create(self, group_name, group_description, parent, just_db=False): |
|
138 | 138 | try: |
|
139 | 139 | new_repos_group = RepoGroup() |
|
140 | 140 | new_repos_group.group_description = group_description |
|
141 | 141 | new_repos_group.parent_group = self.__get_repos_group(parent) |
|
142 | 142 | new_repos_group.group_name = new_repos_group.get_new_name(group_name) |
|
143 | 143 | |
|
144 | 144 | self.sa.add(new_repos_group) |
|
145 | 145 | self._create_default_perms(new_repos_group) |
|
146 | 146 | |
|
147 | 147 | if not just_db: |
|
148 | 148 | # we need to flush here, in order to check if database won't |
|
149 | 149 | # throw any exceptions, create filesystem dirs at the very end |
|
150 | 150 | self.sa.flush() |
|
151 | 151 | self.__create_group(new_repos_group.group_name) |
|
152 | 152 | |
|
153 | 153 | return new_repos_group |
|
154 | 154 | except: |
|
155 | 155 | log.error(traceback.format_exc()) |
|
156 | 156 | raise |
|
157 | 157 | |
|
158 | 158 | def update(self, repos_group_id, form_data): |
|
159 | 159 | |
|
160 | 160 | try: |
|
161 | 161 | repos_group = RepoGroup.get(repos_group_id) |
|
162 | 162 | |
|
163 | 163 | # update permissions |
|
164 | 164 | for member, perm, member_type in form_data['perms_updates']: |
|
165 | 165 | if member_type == 'user': |
|
166 | 166 | # this updates also current one if found |
|
167 | 167 | ReposGroupModel().grant_user_permission( |
|
168 | 168 | repos_group=repos_group, user=member, perm=perm |
|
169 | 169 | ) |
|
170 | 170 | else: |
|
171 | 171 | ReposGroupModel().grant_users_group_permission( |
|
172 | 172 | repos_group=repos_group, group_name=member, perm=perm |
|
173 | 173 | ) |
|
174 | 174 | # set new permissions |
|
175 | 175 | for member, perm, member_type in form_data['perms_new']: |
|
176 | 176 | if member_type == 'user': |
|
177 | 177 | ReposGroupModel().grant_user_permission( |
|
178 | 178 | repos_group=repos_group, user=member, perm=perm |
|
179 | 179 | ) |
|
180 | 180 | else: |
|
181 | 181 | ReposGroupModel().grant_users_group_permission( |
|
182 | 182 | repos_group=repos_group, group_name=member, perm=perm |
|
183 | 183 | ) |
|
184 | 184 | |
|
185 | 185 | old_path = repos_group.full_path |
|
186 | 186 | |
|
187 | 187 | # change properties |
|
188 | 188 | repos_group.group_description = form_data['group_description'] |
|
189 | 189 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) |
|
190 | repos_group.group_parent_id = form_data['group_parent_id'] | |
|
190 | 191 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) |
|
191 | ||
|
192 | 192 | new_path = repos_group.full_path |
|
193 | 193 | |
|
194 | 194 | self.sa.add(repos_group) |
|
195 | 195 | |
|
196 | self.__rename_group(old_path, new_path) | |
|
197 | ||
|
198 | 196 | # we need to get all repositories from this new group and |
|
199 | 197 | # rename them accordingly to new group path |
|
200 | 198 | for r in repos_group.repositories: |
|
201 | 199 | r.repo_name = r.get_new_name(r.just_name) |
|
202 | 200 | self.sa.add(r) |
|
203 | 201 | |
|
202 | self.__rename_group(old_path, new_path) | |
|
203 | ||
|
204 | 204 | return repos_group |
|
205 | 205 | except: |
|
206 | 206 | log.error(traceback.format_exc()) |
|
207 | 207 | raise |
|
208 | 208 | |
|
209 | 209 | def delete(self, users_group_id): |
|
210 | 210 | try: |
|
211 | 211 | users_group = RepoGroup.get(users_group_id) |
|
212 | 212 | self.sa.delete(users_group) |
|
213 | 213 | self.__delete_group(users_group) |
|
214 | 214 | except: |
|
215 | 215 | log.error(traceback.format_exc()) |
|
216 | 216 | raise |
|
217 | 217 | |
|
218 | 218 | def grant_user_permission(self, repos_group, user, perm): |
|
219 | 219 | """ |
|
220 | 220 | Grant permission for user on given repositories group, or update |
|
221 | 221 | existing one if found |
|
222 | 222 | |
|
223 | 223 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
224 | 224 | or repositories_group name |
|
225 | 225 | :param user: Instance of User, user_id or username |
|
226 | 226 | :param perm: Instance of Permission, or permission_name |
|
227 | 227 | """ |
|
228 | 228 | |
|
229 | 229 | repos_group = self.__get_repos_group(repos_group) |
|
230 | 230 | user = self.__get_user(user) |
|
231 | 231 | permission = self.__get_perm(perm) |
|
232 | 232 | |
|
233 | 233 | # check if we have that permission already |
|
234 | 234 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
235 | 235 | .filter(UserRepoGroupToPerm.user == user)\ |
|
236 | 236 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
237 | 237 | .scalar() |
|
238 | 238 | if obj is None: |
|
239 | 239 | # create new ! |
|
240 | 240 | obj = UserRepoGroupToPerm() |
|
241 | 241 | obj.group = repos_group |
|
242 | 242 | obj.user = user |
|
243 | 243 | obj.permission = permission |
|
244 | 244 | self.sa.add(obj) |
|
245 | 245 | |
|
246 | 246 | def revoke_user_permission(self, repos_group, user): |
|
247 | 247 | """ |
|
248 | 248 | Revoke permission for user on given repositories group |
|
249 | 249 | |
|
250 | 250 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
251 | 251 | or repositories_group name |
|
252 | 252 | :param user: Instance of User, user_id or username |
|
253 | 253 | """ |
|
254 | 254 | |
|
255 | 255 | repos_group = self.__get_repos_group(repos_group) |
|
256 | 256 | user = self.__get_user(user) |
|
257 | 257 | |
|
258 | 258 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
259 | 259 | .filter(UserRepoGroupToPerm.user == user)\ |
|
260 | 260 | .filter(UserRepoGroupToPerm.group == repos_group)\ |
|
261 | 261 | .one() |
|
262 | 262 | self.sa.delete(obj) |
|
263 | 263 | |
|
264 | 264 | def grant_users_group_permission(self, repos_group, group_name, perm): |
|
265 | 265 | """ |
|
266 | 266 | Grant permission for users group on given repositories group, or update |
|
267 | 267 | existing one if found |
|
268 | 268 | |
|
269 | 269 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
270 | 270 | or repositories_group name |
|
271 | 271 | :param group_name: Instance of UserGroup, users_group_id, |
|
272 | 272 | or users group name |
|
273 | 273 | :param perm: Instance of Permission, or permission_name |
|
274 | 274 | """ |
|
275 | 275 | repos_group = self.__get_repos_group(repos_group) |
|
276 | 276 | group_name = self.__get_users_group(group_name) |
|
277 | 277 | permission = self.__get_perm(perm) |
|
278 | 278 | |
|
279 | 279 | # check if we have that permission already |
|
280 | 280 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
281 | 281 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
282 | 282 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
283 | 283 | .scalar() |
|
284 | 284 | |
|
285 | 285 | if obj is None: |
|
286 | 286 | # create new |
|
287 | 287 | obj = UsersGroupRepoGroupToPerm() |
|
288 | 288 | |
|
289 | 289 | obj.group = repos_group |
|
290 | 290 | obj.users_group = group_name |
|
291 | 291 | obj.permission = permission |
|
292 | 292 | self.sa.add(obj) |
|
293 | 293 | |
|
294 | 294 | def revoke_users_group_permission(self, repos_group, group_name): |
|
295 | 295 | """ |
|
296 | 296 | Revoke permission for users group on given repositories group |
|
297 | 297 | |
|
298 | 298 | :param repos_group: Instance of ReposGroup, repositories_group_id, |
|
299 | 299 | or repositories_group name |
|
300 | 300 | :param group_name: Instance of UserGroup, users_group_id, |
|
301 | 301 | or users group name |
|
302 | 302 | """ |
|
303 | 303 | repos_group = self.__get_repos_group(repos_group) |
|
304 | 304 | group_name = self.__get_users_group(group_name) |
|
305 | 305 | |
|
306 | 306 | obj = self.sa.query(UsersGroupRepoGroupToPerm)\ |
|
307 | 307 | .filter(UsersGroupRepoGroupToPerm.group == repos_group)\ |
|
308 | 308 | .filter(UsersGroupRepoGroupToPerm.users_group == group_name)\ |
|
309 | 309 | .one() |
|
310 | 310 | self.sa.delete(obj) |
@@ -1,87 +1,134 b'' | |||
|
1 | 1 | from rhodecode.tests import * |
|
2 | from rhodecode.model.db import UsersGroup | |
|
2 | from rhodecode.model.db import UsersGroup, UsersGroupToPerm, Permission | |
|
3 | 3 | |
|
4 | 4 | TEST_USERS_GROUP = 'admins_test' |
|
5 | 5 | |
|
6 | ||
|
6 | 7 | class TestAdminUsersGroupsController(TestController): |
|
7 | 8 | |
|
8 | 9 | def test_index(self): |
|
9 | 10 | response = self.app.get(url('users_groups')) |
|
10 | 11 | # Test response... |
|
11 | 12 | |
|
12 | 13 | def test_index_as_xml(self): |
|
13 | 14 | response = self.app.get(url('formatted_users_groups', format='xml')) |
|
14 | 15 | |
|
15 | 16 | def test_create(self): |
|
16 | 17 | self.log_user() |
|
17 | 18 | users_group_name = TEST_USERS_GROUP |
|
18 | 19 | response = self.app.post(url('users_groups'), |
|
19 | 20 | {'users_group_name':users_group_name, |
|
20 | 21 | 'active':True}) |
|
21 | 22 | response.follow() |
|
22 | 23 | |
|
23 | 24 | self.checkSessionFlash(response, |
|
24 | 25 | 'created users group %s' % TEST_USERS_GROUP) |
|
25 | 26 | |
|
26 | 27 | def test_new(self): |
|
27 | 28 | response = self.app.get(url('new_users_group')) |
|
28 | 29 | |
|
29 | 30 | def test_new_as_xml(self): |
|
30 | 31 | response = self.app.get(url('formatted_new_users_group', format='xml')) |
|
31 | 32 | |
|
32 | 33 | def test_update(self): |
|
33 | 34 | response = self.app.put(url('users_group', id=1)) |
|
34 | 35 | |
|
35 | 36 | def test_update_browser_fakeout(self): |
|
36 | 37 | response = self.app.post(url('users_group', id=1), |
|
37 | 38 | params=dict(_method='put')) |
|
38 | 39 | |
|
39 | 40 | def test_delete(self): |
|
40 | 41 | self.log_user() |
|
41 | 42 | users_group_name = TEST_USERS_GROUP + 'another' |
|
42 | 43 | response = self.app.post(url('users_groups'), |
|
43 | 44 | {'users_group_name':users_group_name, |
|
44 | 45 | 'active':True}) |
|
45 | 46 | response.follow() |
|
46 | 47 | |
|
47 | 48 | self.checkSessionFlash(response, |
|
48 | 49 | 'created users group %s' % users_group_name) |
|
49 | 50 | |
|
50 | ||
|
51 | 51 | gr = self.Session.query(UsersGroup)\ |
|
52 | 52 | .filter(UsersGroup.users_group_name == |
|
53 | 53 | users_group_name).one() |
|
54 | 54 | |
|
55 | 55 | response = self.app.delete(url('users_group', id=gr.users_group_id)) |
|
56 | 56 | |
|
57 | 57 | gr = self.Session.query(UsersGroup)\ |
|
58 | 58 | .filter(UsersGroup.users_group_name == |
|
59 | 59 | users_group_name).scalar() |
|
60 | 60 | |
|
61 | 61 | self.assertEqual(gr, None) |
|
62 | 62 | |
|
63 | def test_enable_repository_read_on_group(self): | |
|
64 | self.log_user() | |
|
65 | users_group_name = TEST_USERS_GROUP + 'another2' | |
|
66 | response = self.app.post(url('users_groups'), | |
|
67 | {'users_group_name': users_group_name, | |
|
68 | 'active':True}) | |
|
69 | response.follow() | |
|
70 | ||
|
71 | ug = UsersGroup.get_by_group_name(users_group_name) | |
|
72 | self.checkSessionFlash(response, | |
|
73 | 'created users group %s' % users_group_name) | |
|
74 | ||
|
75 | response = self.app.put(url('users_group_perm', id=ug.users_group_id), | |
|
76 | {'create_repo_perm': True}) | |
|
77 | ||
|
78 | response.follow() | |
|
79 | ug = UsersGroup.get_by_group_name(users_group_name) | |
|
80 | p = Permission.get_by_key('hg.create.repository') | |
|
81 | # check if user has this perm | |
|
82 | perms = UsersGroupToPerm.query()\ | |
|
83 | .filter(UsersGroupToPerm.users_group == ug).all() | |
|
84 | perms = [[x.__dict__['users_group_id'], | |
|
85 | x.__dict__['permission_id'],] for x in perms] | |
|
86 | self.assertEqual( | |
|
87 | perms, | |
|
88 | [[ug.users_group_id, p.permission_id]] | |
|
89 | ) | |
|
90 | ||
|
91 | # DELETE ! | |
|
92 | ug = UsersGroup.get_by_group_name(users_group_name) | |
|
93 | ugid = ug.users_group_id | |
|
94 | response = self.app.delete(url('users_group', id=ug.users_group_id)) | |
|
95 | response = response.follow() | |
|
96 | gr = self.Session.query(UsersGroup)\ | |
|
97 | .filter(UsersGroup.users_group_name == | |
|
98 | users_group_name).scalar() | |
|
99 | ||
|
100 | self.assertEqual(gr, None) | |
|
101 | p = Permission.get_by_key('hg.create.repository') | |
|
102 | perms = UsersGroupToPerm.query()\ | |
|
103 | .filter(UsersGroupToPerm.users_group_id == ugid).all() | |
|
104 | perms = [[x.__dict__['users_group_id'], | |
|
105 | x.__dict__['permission_id'],] for x in perms] | |
|
106 | self.assertEqual( | |
|
107 | perms, | |
|
108 | [] | |
|
109 | ) | |
|
63 | 110 | |
|
64 | 111 | def test_delete_browser_fakeout(self): |
|
65 | 112 | response = self.app.post(url('users_group', id=1), |
|
66 | 113 | params=dict(_method='delete')) |
|
67 | 114 | |
|
68 | 115 | def test_show(self): |
|
69 | 116 | response = self.app.get(url('users_group', id=1)) |
|
70 | 117 | |
|
71 | 118 | def test_show_as_xml(self): |
|
72 | 119 | response = self.app.get(url('formatted_users_group', id=1, format='xml')) |
|
73 | 120 | |
|
74 | 121 | def test_edit(self): |
|
75 | 122 | response = self.app.get(url('edit_users_group', id=1)) |
|
76 | 123 | |
|
77 | 124 | def test_edit_as_xml(self): |
|
78 | 125 | response = self.app.get(url('formatted_edit_users_group', id=1, format='xml')) |
|
79 | 126 | |
|
80 | 127 | def test_assign_members(self): |
|
81 | 128 | pass |
|
82 | 129 | |
|
83 | 130 | def test_add_create_permission(self): |
|
84 | 131 | pass |
|
85 | 132 | |
|
86 | 133 | def test_revoke_members(self): |
|
87 | 134 | pass |
@@ -1,555 +1,580 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import unittest |
|
3 | 3 | from rhodecode.tests import * |
|
4 | 4 | |
|
5 | 5 | from rhodecode.model.repos_group import ReposGroupModel |
|
6 | 6 | from rhodecode.model.repo import RepoModel |
|
7 | 7 | from rhodecode.model.db import RepoGroup, User, Notification, UserNotification, \ |
|
8 | 8 | UsersGroup, UsersGroupMember, Permission |
|
9 | 9 | from sqlalchemy.exc import IntegrityError |
|
10 | 10 | from rhodecode.model.user import UserModel |
|
11 | 11 | |
|
12 | 12 | from rhodecode.model.meta import Session |
|
13 | 13 | from rhodecode.model.notification import NotificationModel |
|
14 | 14 | from rhodecode.model.users_group import UsersGroupModel |
|
15 | 15 | from rhodecode.lib.auth import AuthUser |
|
16 | 16 | |
|
17 | 17 | |
|
18 | 18 | def _make_group(path, desc='desc', parent_id=None, |
|
19 | 19 | skip_if_exists=False): |
|
20 | 20 | |
|
21 | 21 | gr = RepoGroup.get_by_group_name(path) |
|
22 | 22 | if gr and skip_if_exists: |
|
23 | 23 | return gr |
|
24 | 24 | |
|
25 | 25 | gr = ReposGroupModel().create(path, desc, parent_id) |
|
26 | Session.commit() | |
|
27 | 26 | return gr |
|
28 | 27 | |
|
29 | 28 | |
|
30 | 29 | class TestReposGroups(unittest.TestCase): |
|
31 | 30 | |
|
32 | 31 | def setUp(self): |
|
33 | 32 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
33 | Session.commit() | |
|
34 | 34 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
35 | Session.commit() | |
|
35 | 36 | self.g3 = _make_group('test3', skip_if_exists=True) |
|
37 | Session.commit() | |
|
36 | 38 | |
|
37 | 39 | def tearDown(self): |
|
38 | 40 | print 'out' |
|
39 | 41 | |
|
40 | 42 | def __check_path(self, *path): |
|
43 | """ | |
|
44 | Checks the path for existance ! | |
|
45 | """ | |
|
41 | 46 | path = [TESTS_TMP_PATH] + list(path) |
|
42 | 47 | path = os.path.join(*path) |
|
43 | 48 | return os.path.isdir(path) |
|
44 | 49 | |
|
45 | 50 | def _check_folders(self): |
|
46 | 51 | print os.listdir(TESTS_TMP_PATH) |
|
47 | 52 | |
|
48 | 53 | def __delete_group(self, id_): |
|
49 | 54 | ReposGroupModel().delete(id_) |
|
50 | 55 | |
|
51 | 56 | def __update_group(self, id_, path, desc='desc', parent_id=None): |
|
52 |
form_data = dict( |
|
|
57 | form_data = dict( | |
|
58 | group_name=path, | |
|
53 | 59 |
|
|
54 | 60 |
|
|
55 | 61 |
|
|
56 |
|
|
|
57 | ||
|
62 | perms_new=[] | |
|
63 | ) | |
|
58 | 64 | gr = ReposGroupModel().update(id_, form_data) |
|
59 | 65 | return gr |
|
60 | 66 | |
|
61 | 67 | def test_create_group(self): |
|
62 | 68 | g = _make_group('newGroup') |
|
63 | 69 | self.assertEqual(g.full_path, 'newGroup') |
|
64 | 70 | |
|
65 | 71 | self.assertTrue(self.__check_path('newGroup')) |
|
66 | 72 | |
|
67 | 73 | def test_create_same_name_group(self): |
|
68 | 74 | self.assertRaises(IntegrityError, lambda:_make_group('newGroup')) |
|
69 | 75 | Session.rollback() |
|
70 | 76 | |
|
71 | 77 | def test_same_subgroup(self): |
|
72 | 78 | sg1 = _make_group('sub1', parent_id=self.g1.group_id) |
|
73 | 79 | self.assertEqual(sg1.parent_group, self.g1) |
|
74 | 80 | self.assertEqual(sg1.full_path, 'test1/sub1') |
|
75 | 81 | self.assertTrue(self.__check_path('test1', 'sub1')) |
|
76 | 82 | |
|
77 | 83 | ssg1 = _make_group('subsub1', parent_id=sg1.group_id) |
|
78 | 84 | self.assertEqual(ssg1.parent_group, sg1) |
|
79 | 85 | self.assertEqual(ssg1.full_path, 'test1/sub1/subsub1') |
|
80 | 86 | self.assertTrue(self.__check_path('test1', 'sub1', 'subsub1')) |
|
81 | 87 | |
|
82 | 88 | def test_remove_group(self): |
|
83 | 89 | sg1 = _make_group('deleteme') |
|
84 | 90 | self.__delete_group(sg1.group_id) |
|
85 | 91 | |
|
86 | 92 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
87 | 93 | self.assertFalse(self.__check_path('deteteme')) |
|
88 | 94 | |
|
89 | 95 | sg1 = _make_group('deleteme', parent_id=self.g1.group_id) |
|
90 | 96 | self.__delete_group(sg1.group_id) |
|
91 | 97 | |
|
92 | 98 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
93 | 99 | self.assertFalse(self.__check_path('test1', 'deteteme')) |
|
94 | 100 | |
|
95 | 101 | def test_rename_single_group(self): |
|
96 | 102 | sg1 = _make_group('initial') |
|
97 | 103 | |
|
98 | 104 | new_sg1 = self.__update_group(sg1.group_id, 'after') |
|
99 | 105 | self.assertTrue(self.__check_path('after')) |
|
100 | 106 | self.assertEqual(RepoGroup.get_by_group_name('initial'), None) |
|
101 | 107 | |
|
102 | 108 | def test_update_group_parent(self): |
|
103 | 109 | |
|
104 | 110 | sg1 = _make_group('initial', parent_id=self.g1.group_id) |
|
105 | 111 | |
|
106 | 112 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g1.group_id) |
|
107 | 113 | self.assertTrue(self.__check_path('test1', 'after')) |
|
108 | 114 | self.assertEqual(RepoGroup.get_by_group_name('test1/initial'), None) |
|
109 | 115 | |
|
110 | 116 | new_sg1 = self.__update_group(sg1.group_id, 'after', parent_id=self.g3.group_id) |
|
111 | 117 | self.assertTrue(self.__check_path('test3', 'after')) |
|
112 | 118 | self.assertEqual(RepoGroup.get_by_group_name('test3/initial'), None) |
|
113 | 119 | |
|
114 | 120 | new_sg1 = self.__update_group(sg1.group_id, 'hello') |
|
115 | 121 | self.assertTrue(self.__check_path('hello')) |
|
116 | 122 | |
|
117 | 123 | self.assertEqual(RepoGroup.get_by_group_name('hello'), new_sg1) |
|
118 | 124 | |
|
119 | 125 | def test_subgrouping_with_repo(self): |
|
120 | 126 | |
|
121 | 127 | g1 = _make_group('g1') |
|
122 | 128 | g2 = _make_group('g2') |
|
123 | 129 | |
|
124 | 130 | # create new repo |
|
125 | 131 | form_data = dict(repo_name='john', |
|
126 | 132 | repo_name_full='john', |
|
127 | 133 | fork_name=None, |
|
128 | 134 | description=None, |
|
129 | 135 | repo_group=None, |
|
130 | 136 | private=False, |
|
131 | 137 | repo_type='hg', |
|
132 | 138 | clone_uri=None) |
|
133 | 139 | cur_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
134 | 140 | r = RepoModel().create(form_data, cur_user) |
|
135 | 141 | |
|
136 | 142 | self.assertEqual(r.repo_name, 'john') |
|
137 | 143 | |
|
138 | 144 | # put repo into group |
|
139 | 145 | form_data = form_data |
|
140 | 146 | form_data['repo_group'] = g1.group_id |
|
141 | 147 | form_data['perms_new'] = [] |
|
142 | 148 | form_data['perms_updates'] = [] |
|
143 | 149 | RepoModel().update(r.repo_name, form_data) |
|
144 | 150 | self.assertEqual(r.repo_name, 'g1/john') |
|
145 | 151 | |
|
146 | 152 | self.__update_group(g1.group_id, 'g1', parent_id=g2.group_id) |
|
147 | 153 | self.assertTrue(self.__check_path('g2', 'g1')) |
|
148 | 154 | |
|
149 | 155 | # test repo |
|
150 | 156 | self.assertEqual(r.repo_name, os.path.join('g2', 'g1', r.just_name)) |
|
151 | 157 | |
|
152 | 158 | |
|
159 | def test_move_to_root(self): | |
|
160 | g1 = _make_group('t11') | |
|
161 | Session.commit() | |
|
162 | g2 = _make_group('t22',parent_id=g1.group_id) | |
|
163 | Session.commit() | |
|
164 | ||
|
165 | self.assertEqual(g2.full_path,'t11/t22') | |
|
166 | self.assertTrue(self.__check_path('t11', 't22')) | |
|
167 | ||
|
168 | g2 = self.__update_group(g2.group_id, 'g22', parent_id=None) | |
|
169 | Session.commit() | |
|
170 | ||
|
171 | self.assertEqual(g2.group_name,'g22') | |
|
172 | # we moved out group from t1 to '' so it's full path should be 'g2' | |
|
173 | self.assertEqual(g2.full_path,'g22') | |
|
174 | self.assertFalse(self.__check_path('t11', 't22')) | |
|
175 | self.assertTrue(self.__check_path('g22')) | |
|
176 | ||
|
177 | ||
|
153 | 178 | class TestUser(unittest.TestCase): |
|
154 | 179 | def __init__(self, methodName='runTest'): |
|
155 | 180 | Session.remove() |
|
156 | 181 | super(TestUser, self).__init__(methodName=methodName) |
|
157 | 182 | |
|
158 | 183 | def test_create_and_remove(self): |
|
159 | 184 | usr = UserModel().create_or_update(username=u'test_user', password=u'qweqwe', |
|
160 | 185 | email=u'u232@rhodecode.org', |
|
161 | 186 | name=u'u1', lastname=u'u1') |
|
162 | 187 | Session.commit() |
|
163 | 188 | self.assertEqual(User.get_by_username(u'test_user'), usr) |
|
164 | 189 | |
|
165 | 190 | # make users group |
|
166 | 191 | users_group = UsersGroupModel().create('some_example_group') |
|
167 | 192 | Session.commit() |
|
168 | 193 | |
|
169 | 194 | UsersGroupModel().add_user_to_group(users_group, usr) |
|
170 | 195 | Session.commit() |
|
171 | 196 | |
|
172 | 197 | self.assertEqual(UsersGroup.get(users_group.users_group_id), users_group) |
|
173 | 198 | self.assertEqual(UsersGroupMember.query().count(), 1) |
|
174 | 199 | UserModel().delete(usr.user_id) |
|
175 | 200 | Session.commit() |
|
176 | 201 | |
|
177 | 202 | self.assertEqual(UsersGroupMember.query().all(), []) |
|
178 | 203 | |
|
179 | 204 | |
|
180 | 205 | class TestNotifications(unittest.TestCase): |
|
181 | 206 | |
|
182 | 207 | def __init__(self, methodName='runTest'): |
|
183 | 208 | Session.remove() |
|
184 | 209 | self.u1 = UserModel().create_or_update(username=u'u1', |
|
185 | 210 | password=u'qweqwe', |
|
186 | 211 | email=u'u1@rhodecode.org', |
|
187 | 212 | name=u'u1', lastname=u'u1') |
|
188 | 213 | Session.commit() |
|
189 | 214 | self.u1 = self.u1.user_id |
|
190 | 215 | |
|
191 | 216 | self.u2 = UserModel().create_or_update(username=u'u2', |
|
192 | 217 | password=u'qweqwe', |
|
193 | 218 | email=u'u2@rhodecode.org', |
|
194 | 219 | name=u'u2', lastname=u'u3') |
|
195 | 220 | Session.commit() |
|
196 | 221 | self.u2 = self.u2.user_id |
|
197 | 222 | |
|
198 | 223 | self.u3 = UserModel().create_or_update(username=u'u3', |
|
199 | 224 | password=u'qweqwe', |
|
200 | 225 | email=u'u3@rhodecode.org', |
|
201 | 226 | name=u'u3', lastname=u'u3') |
|
202 | 227 | Session.commit() |
|
203 | 228 | self.u3 = self.u3.user_id |
|
204 | 229 | |
|
205 | 230 | super(TestNotifications, self).__init__(methodName=methodName) |
|
206 | 231 | |
|
207 | 232 | def _clean_notifications(self): |
|
208 | 233 | for n in Notification.query().all(): |
|
209 | 234 | Session.delete(n) |
|
210 | 235 | |
|
211 | 236 | Session.commit() |
|
212 | 237 | self.assertEqual(Notification.query().all(), []) |
|
213 | 238 | |
|
214 | 239 | def tearDown(self): |
|
215 | 240 | self._clean_notifications() |
|
216 | 241 | |
|
217 | 242 | def test_create_notification(self): |
|
218 | 243 | self.assertEqual([], Notification.query().all()) |
|
219 | 244 | self.assertEqual([], UserNotification.query().all()) |
|
220 | 245 | |
|
221 | 246 | usrs = [self.u1, self.u2] |
|
222 | 247 | notification = NotificationModel().create(created_by=self.u1, |
|
223 | 248 | subject=u'subj', body=u'hi there', |
|
224 | 249 | recipients=usrs) |
|
225 | 250 | Session.commit() |
|
226 | 251 | u1 = User.get(self.u1) |
|
227 | 252 | u2 = User.get(self.u2) |
|
228 | 253 | u3 = User.get(self.u3) |
|
229 | 254 | notifications = Notification.query().all() |
|
230 | 255 | self.assertEqual(len(notifications), 1) |
|
231 | 256 | |
|
232 | 257 | unotification = UserNotification.query()\ |
|
233 | 258 | .filter(UserNotification.notification == notification).all() |
|
234 | 259 | |
|
235 | 260 | self.assertEqual(notifications[0].recipients, [u1, u2]) |
|
236 | 261 | self.assertEqual(notification.notification_id, |
|
237 | 262 | notifications[0].notification_id) |
|
238 | 263 | self.assertEqual(len(unotification), len(usrs)) |
|
239 | 264 | self.assertEqual([x.user.user_id for x in unotification], usrs) |
|
240 | 265 | |
|
241 | 266 | def test_user_notifications(self): |
|
242 | 267 | self.assertEqual([], Notification.query().all()) |
|
243 | 268 | self.assertEqual([], UserNotification.query().all()) |
|
244 | 269 | |
|
245 | 270 | notification1 = NotificationModel().create(created_by=self.u1, |
|
246 | 271 | subject=u'subj', body=u'hi there1', |
|
247 | 272 | recipients=[self.u3]) |
|
248 | 273 | Session.commit() |
|
249 | 274 | notification2 = NotificationModel().create(created_by=self.u1, |
|
250 | 275 | subject=u'subj', body=u'hi there2', |
|
251 | 276 | recipients=[self.u3]) |
|
252 | 277 | Session.commit() |
|
253 | 278 | u3 = Session.query(User).get(self.u3) |
|
254 | 279 | |
|
255 | 280 | self.assertEqual(sorted([x.notification for x in u3.notifications]), |
|
256 | 281 | sorted([notification2, notification1])) |
|
257 | 282 | |
|
258 | 283 | def test_delete_notifications(self): |
|
259 | 284 | self.assertEqual([], Notification.query().all()) |
|
260 | 285 | self.assertEqual([], UserNotification.query().all()) |
|
261 | 286 | |
|
262 | 287 | notification = NotificationModel().create(created_by=self.u1, |
|
263 | 288 | subject=u'title', body=u'hi there3', |
|
264 | 289 | recipients=[self.u3, self.u1, self.u2]) |
|
265 | 290 | Session.commit() |
|
266 | 291 | notifications = Notification.query().all() |
|
267 | 292 | self.assertTrue(notification in notifications) |
|
268 | 293 | |
|
269 | 294 | Notification.delete(notification.notification_id) |
|
270 | 295 | Session.commit() |
|
271 | 296 | |
|
272 | 297 | notifications = Notification.query().all() |
|
273 | 298 | self.assertFalse(notification in notifications) |
|
274 | 299 | |
|
275 | 300 | un = UserNotification.query().filter(UserNotification.notification |
|
276 | 301 | == notification).all() |
|
277 | 302 | self.assertEqual(un, []) |
|
278 | 303 | |
|
279 | 304 | def test_delete_association(self): |
|
280 | 305 | |
|
281 | 306 | self.assertEqual([], Notification.query().all()) |
|
282 | 307 | self.assertEqual([], UserNotification.query().all()) |
|
283 | 308 | |
|
284 | 309 | notification = NotificationModel().create(created_by=self.u1, |
|
285 | 310 | subject=u'title', body=u'hi there3', |
|
286 | 311 | recipients=[self.u3, self.u1, self.u2]) |
|
287 | 312 | Session.commit() |
|
288 | 313 | |
|
289 | 314 | unotification = UserNotification.query()\ |
|
290 | 315 | .filter(UserNotification.notification == |
|
291 | 316 | notification)\ |
|
292 | 317 | .filter(UserNotification.user_id == self.u3)\ |
|
293 | 318 | .scalar() |
|
294 | 319 | |
|
295 | 320 | self.assertEqual(unotification.user_id, self.u3) |
|
296 | 321 | |
|
297 | 322 | NotificationModel().delete(self.u3, |
|
298 | 323 | notification.notification_id) |
|
299 | 324 | Session.commit() |
|
300 | 325 | |
|
301 | 326 | u3notification = UserNotification.query()\ |
|
302 | 327 | .filter(UserNotification.notification == |
|
303 | 328 | notification)\ |
|
304 | 329 | .filter(UserNotification.user_id == self.u3)\ |
|
305 | 330 | .scalar() |
|
306 | 331 | |
|
307 | 332 | self.assertEqual(u3notification, None) |
|
308 | 333 | |
|
309 | 334 | # notification object is still there |
|
310 | 335 | self.assertEqual(Notification.query().all(), [notification]) |
|
311 | 336 | |
|
312 | 337 | #u1 and u2 still have assignments |
|
313 | 338 | u1notification = UserNotification.query()\ |
|
314 | 339 | .filter(UserNotification.notification == |
|
315 | 340 | notification)\ |
|
316 | 341 | .filter(UserNotification.user_id == self.u1)\ |
|
317 | 342 | .scalar() |
|
318 | 343 | self.assertNotEqual(u1notification, None) |
|
319 | 344 | u2notification = UserNotification.query()\ |
|
320 | 345 | .filter(UserNotification.notification == |
|
321 | 346 | notification)\ |
|
322 | 347 | .filter(UserNotification.user_id == self.u2)\ |
|
323 | 348 | .scalar() |
|
324 | 349 | self.assertNotEqual(u2notification, None) |
|
325 | 350 | |
|
326 | 351 | def test_notification_counter(self): |
|
327 | 352 | self._clean_notifications() |
|
328 | 353 | self.assertEqual([], Notification.query().all()) |
|
329 | 354 | self.assertEqual([], UserNotification.query().all()) |
|
330 | 355 | |
|
331 | 356 | NotificationModel().create(created_by=self.u1, |
|
332 | 357 | subject=u'title', body=u'hi there_delete', |
|
333 | 358 | recipients=[self.u3, self.u1]) |
|
334 | 359 | Session.commit() |
|
335 | 360 | |
|
336 | 361 | self.assertEqual(NotificationModel() |
|
337 | 362 | .get_unread_cnt_for_user(self.u1), 1) |
|
338 | 363 | self.assertEqual(NotificationModel() |
|
339 | 364 | .get_unread_cnt_for_user(self.u2), 0) |
|
340 | 365 | self.assertEqual(NotificationModel() |
|
341 | 366 | .get_unread_cnt_for_user(self.u3), 1) |
|
342 | 367 | |
|
343 | 368 | notification = NotificationModel().create(created_by=self.u1, |
|
344 | 369 | subject=u'title', body=u'hi there3', |
|
345 | 370 | recipients=[self.u3, self.u1, self.u2]) |
|
346 | 371 | Session.commit() |
|
347 | 372 | |
|
348 | 373 | self.assertEqual(NotificationModel() |
|
349 | 374 | .get_unread_cnt_for_user(self.u1), 2) |
|
350 | 375 | self.assertEqual(NotificationModel() |
|
351 | 376 | .get_unread_cnt_for_user(self.u2), 1) |
|
352 | 377 | self.assertEqual(NotificationModel() |
|
353 | 378 | .get_unread_cnt_for_user(self.u3), 2) |
|
354 | 379 | |
|
355 | 380 | |
|
356 | 381 | class TestUsers(unittest.TestCase): |
|
357 | 382 | |
|
358 | 383 | def __init__(self, methodName='runTest'): |
|
359 | 384 | super(TestUsers, self).__init__(methodName=methodName) |
|
360 | 385 | |
|
361 | 386 | def setUp(self): |
|
362 | 387 | self.u1 = UserModel().create_or_update(username=u'u1', |
|
363 | 388 | password=u'qweqwe', |
|
364 | 389 | email=u'u1@rhodecode.org', |
|
365 | 390 | name=u'u1', lastname=u'u1') |
|
366 | 391 | |
|
367 | 392 | def tearDown(self): |
|
368 | 393 | perm = Permission.query().all() |
|
369 | 394 | for p in perm: |
|
370 | 395 | UserModel().revoke_perm(self.u1, p) |
|
371 | 396 | |
|
372 | 397 | UserModel().delete(self.u1) |
|
373 | 398 | Session.commit() |
|
374 | 399 | |
|
375 | 400 | def test_add_perm(self): |
|
376 | 401 | perm = Permission.query().all()[0] |
|
377 | 402 | UserModel().grant_perm(self.u1, perm) |
|
378 | 403 | Session.commit() |
|
379 | 404 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) |
|
380 | 405 | |
|
381 | 406 | def test_has_perm(self): |
|
382 | 407 | perm = Permission.query().all() |
|
383 | 408 | for p in perm: |
|
384 | 409 | has_p = UserModel().has_perm(self.u1, p) |
|
385 | 410 | self.assertEqual(False, has_p) |
|
386 | 411 | |
|
387 | 412 | def test_revoke_perm(self): |
|
388 | 413 | perm = Permission.query().all()[0] |
|
389 | 414 | UserModel().grant_perm(self.u1, perm) |
|
390 | 415 | Session.commit() |
|
391 | 416 | self.assertEqual(UserModel().has_perm(self.u1, perm), True) |
|
392 | 417 | |
|
393 | 418 | #revoke |
|
394 | 419 | UserModel().revoke_perm(self.u1, perm) |
|
395 | 420 | Session.commit() |
|
396 | 421 | self.assertEqual(UserModel().has_perm(self.u1, perm), False) |
|
397 | 422 | |
|
398 | 423 | |
|
399 | 424 | class TestPermissions(unittest.TestCase): |
|
400 | 425 | def __init__(self, methodName='runTest'): |
|
401 | 426 | super(TestPermissions, self).__init__(methodName=methodName) |
|
402 | 427 | |
|
403 | 428 | def setUp(self): |
|
404 | 429 | self.u1 = UserModel().create_or_update( |
|
405 | 430 | username=u'u1', password=u'qweqwe', |
|
406 | 431 | email=u'u1@rhodecode.org', name=u'u1', lastname=u'u1' |
|
407 | 432 | ) |
|
408 | 433 | self.a1 = UserModel().create_or_update( |
|
409 | 434 | username=u'a1', password=u'qweqwe', |
|
410 | 435 | email=u'a1@rhodecode.org', name=u'a1', lastname=u'a1', admin=True |
|
411 | 436 | ) |
|
412 | 437 | Session.commit() |
|
413 | 438 | |
|
414 | 439 | def tearDown(self): |
|
415 | 440 | UserModel().delete(self.u1) |
|
416 | 441 | UserModel().delete(self.a1) |
|
417 | 442 | if hasattr(self, 'g1'): |
|
418 | 443 | ReposGroupModel().delete(self.g1.group_id) |
|
419 | 444 | if hasattr(self, 'g2'): |
|
420 | 445 | ReposGroupModel().delete(self.g2.group_id) |
|
421 | 446 | |
|
422 | 447 | if hasattr(self, 'ug1'): |
|
423 | 448 | UsersGroupModel().delete(self.ug1, force=True) |
|
424 | 449 | |
|
425 | 450 | Session.commit() |
|
426 | 451 | |
|
427 | 452 | def test_default_perms_set(self): |
|
428 | 453 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
429 | 454 | perms = { |
|
430 | 455 | 'repositories_groups': {}, |
|
431 | 456 | 'global': set([u'hg.create.repository', u'repository.read', |
|
432 | 457 | u'hg.register.manual_activate']), |
|
433 | 458 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
434 | 459 | } |
|
435 | 460 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
436 | 461 | perms['repositories'][HG_REPO]) |
|
437 | 462 | new_perm = 'repository.write' |
|
438 | 463 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) |
|
439 | 464 | Session.commit() |
|
440 | 465 | |
|
441 | 466 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
442 | 467 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], new_perm) |
|
443 | 468 | |
|
444 | 469 | def test_default_admin_perms_set(self): |
|
445 | 470 | a1_auth = AuthUser(user_id=self.a1.user_id) |
|
446 | 471 | perms = { |
|
447 | 472 | 'repositories_groups': {}, |
|
448 | 473 | 'global': set([u'hg.admin']), |
|
449 | 474 | 'repositories': {u'vcs_test_hg': u'repository.admin'} |
|
450 | 475 | } |
|
451 | 476 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], |
|
452 | 477 | perms['repositories'][HG_REPO]) |
|
453 | 478 | new_perm = 'repository.write' |
|
454 | 479 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.a1, perm=new_perm) |
|
455 | 480 | Session.commit() |
|
456 | 481 | # cannot really downgrade admins permissions !? they still get's set as |
|
457 | 482 | # admin ! |
|
458 | 483 | u1_auth = AuthUser(user_id=self.a1.user_id) |
|
459 | 484 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
460 | 485 | perms['repositories'][HG_REPO]) |
|
461 | 486 | |
|
462 | 487 | def test_default_group_perms(self): |
|
463 | 488 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
464 | 489 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
465 | 490 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
466 | 491 | perms = { |
|
467 | 492 | 'repositories_groups': {u'test1': 'group.read', u'test2': 'group.read'}, |
|
468 | 493 | 'global': set([u'hg.create.repository', u'repository.read', u'hg.register.manual_activate']), |
|
469 | 494 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
470 | 495 | } |
|
471 | 496 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
472 | 497 | perms['repositories'][HG_REPO]) |
|
473 | 498 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
474 | 499 | perms['repositories_groups']) |
|
475 | 500 | |
|
476 | 501 | def test_default_admin_group_perms(self): |
|
477 | 502 | self.g1 = _make_group('test1', skip_if_exists=True) |
|
478 | 503 | self.g2 = _make_group('test2', skip_if_exists=True) |
|
479 | 504 | a1_auth = AuthUser(user_id=self.a1.user_id) |
|
480 | 505 | perms = { |
|
481 | 506 | 'repositories_groups': {u'test1': 'group.admin', u'test2': 'group.admin'}, |
|
482 | 507 | 'global': set(['hg.admin']), |
|
483 | 508 | 'repositories': {u'vcs_test_hg': 'repository.admin'} |
|
484 | 509 | } |
|
485 | 510 | |
|
486 | 511 | self.assertEqual(a1_auth.permissions['repositories'][HG_REPO], |
|
487 | 512 | perms['repositories'][HG_REPO]) |
|
488 | 513 | self.assertEqual(a1_auth.permissions['repositories_groups'], |
|
489 | 514 | perms['repositories_groups']) |
|
490 | 515 | |
|
491 | 516 | def test_propagated_permission_from_users_group(self): |
|
492 | 517 | # make group |
|
493 | 518 | self.ug1 = UsersGroupModel().create('G1') |
|
494 | 519 | # add user to group |
|
495 | 520 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) |
|
496 | 521 | |
|
497 | 522 | # set permission to lower |
|
498 | 523 | new_perm = 'repository.none' |
|
499 | 524 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, perm=new_perm) |
|
500 | 525 | Session.commit() |
|
501 | 526 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
502 | 527 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
503 | 528 | new_perm) |
|
504 | 529 | |
|
505 | 530 | # grant perm for group this should override permission from user |
|
506 | 531 | new_perm = 'repository.write' |
|
507 | 532 | RepoModel().grant_users_group_permission(repo=HG_REPO, |
|
508 | 533 | group_name=self.ug1, |
|
509 | 534 | perm=new_perm) |
|
510 | 535 | # check perms |
|
511 | 536 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
512 | 537 | perms = { |
|
513 | 538 | 'repositories_groups': {}, |
|
514 | 539 | 'global': set([u'hg.create.repository', u'repository.read', |
|
515 | 540 | u'hg.register.manual_activate']), |
|
516 | 541 | 'repositories': {u'vcs_test_hg': u'repository.read'} |
|
517 | 542 | } |
|
518 | 543 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
519 | 544 | new_perm) |
|
520 | 545 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
521 | 546 | perms['repositories_groups']) |
|
522 | 547 | |
|
523 | 548 | def test_propagated_permission_from_users_group_lower_weight(self): |
|
524 | 549 | # make group |
|
525 | 550 | self.ug1 = UsersGroupModel().create('G1') |
|
526 | 551 | # add user to group |
|
527 | 552 | UsersGroupModel().add_user_to_group(self.ug1, self.u1) |
|
528 | 553 | |
|
529 | 554 | # set permission to lower |
|
530 | 555 | new_perm_h = 'repository.write' |
|
531 | 556 | RepoModel().grant_user_permission(repo=HG_REPO, user=self.u1, |
|
532 | 557 | perm=new_perm_h) |
|
533 | 558 | Session.commit() |
|
534 | 559 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
535 | 560 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
536 | 561 | new_perm_h) |
|
537 | 562 | |
|
538 | 563 | # grant perm for group this should NOT override permission from user |
|
539 | 564 | # since it's lower than granted |
|
540 | 565 | new_perm_l = 'repository.read' |
|
541 | 566 | RepoModel().grant_users_group_permission(repo=HG_REPO, |
|
542 | 567 | group_name=self.ug1, |
|
543 | 568 | perm=new_perm_l) |
|
544 | 569 | # check perms |
|
545 | 570 | u1_auth = AuthUser(user_id=self.u1.user_id) |
|
546 | 571 | perms = { |
|
547 | 572 | 'repositories_groups': {}, |
|
548 | 573 | 'global': set([u'hg.create.repository', u'repository.read', |
|
549 | 574 | u'hg.register.manual_activate']), |
|
550 | 575 | 'repositories': {u'vcs_test_hg': u'repository.write'} |
|
551 | 576 | } |
|
552 | 577 | self.assertEqual(u1_auth.permissions['repositories'][HG_REPO], |
|
553 | 578 | new_perm_h) |
|
554 | 579 | self.assertEqual(u1_auth.permissions['repositories_groups'], |
|
555 | 580 | perms['repositories_groups']) |
General Comments 0
You need to be logged in to leave comments.
Login now