Show More
@@ -0,0 +1,31 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | # This program is free software: you can redistribute it and/or modify | |
|
3 | # it under the terms of the GNU General Public License as published by | |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
|
5 | # (at your option) any later version. | |
|
6 | # | |
|
7 | # This program is distributed in the hope that it will be useful, | |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
10 | # GNU General Public License for more details. | |
|
11 | # | |
|
12 | # You should have received a copy of the GNU General Public License | |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
14 | from tgext.routes import RoutedController | |
|
15 | from kallithea.lib.base import BaseController | |
|
16 | from kallithea.controllers.error import ErrorController | |
|
17 | ||
|
18 | ||
|
19 | # With TurboGears, the RootController is the controller from which all routing | |
|
20 | # starts from. It is 'magically' found based on the fact that a controller | |
|
21 | # 'foo' is expected to have a class name FooController, located in a file | |
|
22 | # foo.py, inside config['paths']['controllers']. The name 'root' for the root | |
|
23 | # controller is the default name. The dictionary config['paths'] determines the | |
|
24 | # directories where templates, static files and controllers are found. It is | |
|
25 | # set up in tg.AppConfig based on AppConfig['package'] ('kallithea') and the | |
|
26 | # respective defaults 'templates', 'public' and 'controllers'. | |
|
27 | # Inherit from RoutedController to allow Kallithea to use regex-based routing. | |
|
28 | class RootController(RoutedController, BaseController): | |
|
29 | ||
|
30 | # the following assignment hooks in error handling | |
|
31 | error = ErrorController() |
@@ -1,8 +1,9 b'' | |||
|
1 | 1 | babel >= 0.9.6, < 2.4 |
|
2 | 2 | waitress >= 0.8.8, < 1.0 |
|
3 | 3 | pytest ~= 3.0 |
|
4 | 4 | pytest-runner |
|
5 | 5 | pytest-sugar>=0.7.0 |
|
6 | 6 | pytest-catchlog |
|
7 | 7 | mock |
|
8 | 8 | sphinx |
|
9 | webtest < 3 |
@@ -1,610 +1,616 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # Kallithea - Development config: # |
|
4 | 4 | # listening on *:5000 # |
|
5 | 5 | # sqlite and kallithea.db # |
|
6 | 6 | # initial_repo_scan = true # |
|
7 | 7 | # set debug = true # |
|
8 | 8 | # verbose and colorful logging # |
|
9 | 9 | # # |
|
10 | 10 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ################################################################################ |
|
13 | 13 | |
|
14 | 14 | [DEFAULT] |
|
15 | 15 | debug = true |
|
16 | 16 | pdebug = false |
|
17 | 17 | |
|
18 | 18 | ################################################################################ |
|
19 | 19 | ## Email settings ## |
|
20 | 20 | ## ## |
|
21 | 21 | ## Refer to the documentation ("Email settings") for more details. ## |
|
22 | 22 | ## ## |
|
23 | 23 | ## It is recommended to use a valid sender address that passes access ## |
|
24 | 24 | ## validation and spam filtering in mail servers. ## |
|
25 | 25 | ################################################################################ |
|
26 | 26 | |
|
27 | 27 | ## 'From' header for application emails. You can optionally add a name. |
|
28 | 28 | ## Default: |
|
29 | 29 | #app_email_from = Kallithea |
|
30 | 30 | ## Examples: |
|
31 | 31 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
32 | 32 | #app_email_from = kallithea-noreply@example.com |
|
33 | 33 | |
|
34 | 34 | ## Subject prefix for application emails. |
|
35 | 35 | ## A space between this prefix and the real subject is automatically added. |
|
36 | 36 | ## Default: |
|
37 | 37 | #email_prefix = |
|
38 | 38 | ## Example: |
|
39 | 39 | #email_prefix = [Kallithea] |
|
40 | 40 | |
|
41 | 41 | ## Recipients for error emails and fallback recipients of application mails. |
|
42 | 42 | ## Multiple addresses can be specified, space-separated. |
|
43 | 43 | ## Only addresses are allowed, do not add any name part. |
|
44 | 44 | ## Default: |
|
45 | 45 | #email_to = |
|
46 | 46 | ## Examples: |
|
47 | 47 | #email_to = admin@example.com |
|
48 | 48 | #email_to = admin@example.com another_admin@example.com |
|
49 | 49 | |
|
50 | 50 | ## 'From' header for error emails. You can optionally add a name. |
|
51 | 51 | ## Default: |
|
52 | 52 | #error_email_from = pylons@yourapp.com |
|
53 | 53 | ## Examples: |
|
54 | 54 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
55 | 55 | #error_email_from = paste_error@example.com |
|
56 | 56 | |
|
57 | 57 | ## SMTP server settings |
|
58 | 58 | ## If specifying credentials, make sure to use secure connections. |
|
59 | 59 | ## Default: Send unencrypted unauthenticated mails to the specified smtp_server. |
|
60 | 60 | ## For "SSL", use smtp_use_ssl = true and smtp_port = 465. |
|
61 | 61 | ## For "STARTTLS", use smtp_use_tls = true and smtp_port = 587. |
|
62 | 62 | #smtp_server = smtp.example.com |
|
63 | 63 | #smtp_username = |
|
64 | 64 | #smtp_password = |
|
65 | 65 | #smtp_port = 25 |
|
66 | 66 | #smtp_use_ssl = false |
|
67 | 67 | #smtp_use_tls = false |
|
68 | 68 | |
|
69 | 69 | [server:main] |
|
70 | 70 | ## Gearbox default web server ## |
|
71 | 71 | #use = egg:gearbox#wsgiref |
|
72 | 72 | ## nr of worker threads to spawn |
|
73 | 73 | #threadpool_workers = 1 |
|
74 | 74 | ## max request before thread respawn |
|
75 | 75 | #threadpool_max_requests = 100 |
|
76 | 76 | ## option to use threads of process |
|
77 | 77 | #use_threadpool = true |
|
78 | 78 | |
|
79 | 79 | ## Gearbox gevent web server ## |
|
80 | 80 | #use = egg:gearbox#gevent |
|
81 | 81 | |
|
82 | 82 | ## WAITRESS ## |
|
83 | 83 | use = egg:waitress#main |
|
84 | 84 | ## number of worker threads |
|
85 | 85 | threads = 1 |
|
86 | 86 | ## MAX BODY SIZE 100GB |
|
87 | 87 | max_request_body_size = 107374182400 |
|
88 | 88 | ## use poll instead of select, fixes fd limits, may not work on old |
|
89 | 89 | ## windows systems. |
|
90 | 90 | #asyncore_use_poll = True |
|
91 | 91 | |
|
92 | 92 | ## GUNICORN ## |
|
93 | 93 | #use = egg:gunicorn#main |
|
94 | 94 | ## number of process workers. You must set `instance_id = *` when this option |
|
95 | 95 | ## is set to more than one worker |
|
96 | 96 | #workers = 1 |
|
97 | 97 | ## process name |
|
98 | 98 | #proc_name = kallithea |
|
99 | 99 | ## type of worker class, one of sync, eventlet, gevent, tornado |
|
100 | 100 | ## recommended for bigger setup is using of of other than sync one |
|
101 | 101 | #worker_class = sync |
|
102 | 102 | #max_requests = 1000 |
|
103 | 103 | ## amount of time a worker can handle request before it gets killed and |
|
104 | 104 | ## restarted |
|
105 | 105 | #timeout = 3600 |
|
106 | 106 | |
|
107 | 107 | ## UWSGI ## |
|
108 | 108 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
109 | 109 | #[uwsgi] |
|
110 | 110 | #socket = /tmp/uwsgi.sock |
|
111 | 111 | #master = true |
|
112 | 112 | #http = 127.0.0.1:5000 |
|
113 | 113 | |
|
114 | 114 | ## set as deamon and redirect all output to file |
|
115 | 115 | #daemonize = ./uwsgi_kallithea.log |
|
116 | 116 | |
|
117 | 117 | ## master process PID |
|
118 | 118 | #pidfile = ./uwsgi_kallithea.pid |
|
119 | 119 | |
|
120 | 120 | ## stats server with workers statistics, use uwsgitop |
|
121 | 121 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
122 | 122 | #stats = 127.0.0.1:1717 |
|
123 | 123 | #memory-report = true |
|
124 | 124 | |
|
125 | 125 | ## log 5XX errors |
|
126 | 126 | #log-5xx = true |
|
127 | 127 | |
|
128 | 128 | ## Set the socket listen queue size. |
|
129 | 129 | #listen = 256 |
|
130 | 130 | |
|
131 | 131 | ## Gracefully Reload workers after the specified amount of managed requests |
|
132 | 132 | ## (avoid memory leaks). |
|
133 | 133 | #max-requests = 1000 |
|
134 | 134 | |
|
135 | 135 | ## enable large buffers |
|
136 | 136 | #buffer-size = 65535 |
|
137 | 137 | |
|
138 | 138 | ## socket and http timeouts ## |
|
139 | 139 | #http-timeout = 3600 |
|
140 | 140 | #socket-timeout = 3600 |
|
141 | 141 | |
|
142 | 142 | ## Log requests slower than the specified number of milliseconds. |
|
143 | 143 | #log-slow = 10 |
|
144 | 144 | |
|
145 | 145 | ## Exit if no app can be loaded. |
|
146 | 146 | #need-app = true |
|
147 | 147 | |
|
148 | 148 | ## Set lazy mode (load apps in workers instead of master). |
|
149 | 149 | #lazy = true |
|
150 | 150 | |
|
151 | 151 | ## scaling ## |
|
152 | 152 | ## set cheaper algorithm to use, if not set default will be used |
|
153 | 153 | #cheaper-algo = spare |
|
154 | 154 | |
|
155 | 155 | ## minimum number of workers to keep at all times |
|
156 | 156 | #cheaper = 1 |
|
157 | 157 | |
|
158 | 158 | ## number of workers to spawn at startup |
|
159 | 159 | #cheaper-initial = 1 |
|
160 | 160 | |
|
161 | 161 | ## maximum number of workers that can be spawned |
|
162 | 162 | #workers = 4 |
|
163 | 163 | |
|
164 | 164 | ## how many workers should be spawned at a time |
|
165 | 165 | #cheaper-step = 1 |
|
166 | 166 | |
|
167 | 167 | ## COMMON ## |
|
168 | 168 | #host = 127.0.0.1 |
|
169 | 169 | host = 0.0.0.0 |
|
170 | 170 | port = 5000 |
|
171 | 171 | |
|
172 | 172 | ## middleware for hosting the WSGI application under a URL prefix |
|
173 | 173 | #[filter:proxy-prefix] |
|
174 | 174 | #use = egg:PasteDeploy#prefix |
|
175 | 175 | #prefix = /<your-prefix> |
|
176 | 176 | |
|
177 | 177 | [app:main] |
|
178 | 178 | use = egg:kallithea |
|
179 | 179 | ## enable proxy prefix middleware |
|
180 | 180 | #filter-with = proxy-prefix |
|
181 | 181 | |
|
182 | 182 | full_stack = true |
|
183 | 183 | static_files = true |
|
184 | 184 | ## Available Languages: |
|
185 | 185 | ## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW |
|
186 | 186 | lang = |
|
187 | 187 | cache_dir = %(here)s/data |
|
188 | 188 | index_dir = %(here)s/data/index |
|
189 | 189 | |
|
190 | 190 | ## perform a full repository scan on each server start, this should be |
|
191 | 191 | ## set to false after first startup, to allow faster server restarts. |
|
192 | 192 | #initial_repo_scan = false |
|
193 | 193 | initial_repo_scan = true |
|
194 | 194 | |
|
195 | 195 | ## uncomment and set this path to use archive download cache |
|
196 | 196 | archive_cache_dir = %(here)s/tarballcache |
|
197 | 197 | |
|
198 | 198 | ## change this to unique ID for security |
|
199 | 199 | app_instance_uuid = development-not-secret |
|
200 | 200 | |
|
201 | 201 | ## cut off limit for large diffs (size in bytes) |
|
202 | 202 | cut_off_limit = 256000 |
|
203 | 203 | |
|
204 | 204 | ## force https in Kallithea, fixes https redirects, assumes it's always https |
|
205 | 205 | force_https = false |
|
206 | 206 | |
|
207 | 207 | ## use Strict-Transport-Security headers |
|
208 | 208 | use_htsts = false |
|
209 | 209 | |
|
210 | 210 | ## number of commits stats will parse on each iteration |
|
211 | 211 | commit_parse_limit = 25 |
|
212 | 212 | |
|
213 | 213 | ## path to git executable |
|
214 | 214 | git_path = git |
|
215 | 215 | |
|
216 | 216 | ## git rev filter option, --all is the default filter, if you need to |
|
217 | 217 | ## hide all refs in changelog switch this to --branches --tags |
|
218 | 218 | #git_rev_filter = --branches --tags |
|
219 | 219 | |
|
220 | 220 | ## RSS feed options |
|
221 | 221 | rss_cut_off_limit = 256000 |
|
222 | 222 | rss_items_per_page = 10 |
|
223 | 223 | rss_include_diff = false |
|
224 | 224 | |
|
225 | 225 | ## options for showing and identifying changesets |
|
226 | 226 | show_sha_length = 12 |
|
227 | 227 | show_revision_number = false |
|
228 | 228 | |
|
229 | 229 | ## Canonical URL to use when creating full URLs in UI and texts. |
|
230 | 230 | ## Useful when the site is available under different names or protocols. |
|
231 | 231 | ## Defaults to what is provided in the WSGI environment. |
|
232 | 232 | #canonical_url = https://kallithea.example.com/repos |
|
233 | 233 | |
|
234 | 234 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
235 | 235 | ## url that does rewrites to _admin/gists/<gistid>. |
|
236 | 236 | ## example: http://gist.example.com/{gistid}. Empty means use the internal |
|
237 | 237 | ## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid> |
|
238 | 238 | gist_alias_url = |
|
239 | 239 | |
|
240 | 240 | ## white list of API enabled controllers. This allows to add list of |
|
241 | 241 | ## controllers to which access will be enabled by api_key. eg: to enable |
|
242 | 242 | ## api access to raw_files put `FilesController:raw`, to enable access to patches |
|
243 | 243 | ## add `ChangesetController:changeset_patch`. This list should be "," separated |
|
244 | 244 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names |
|
245 | 245 | ## Recommended settings below are commented out: |
|
246 | 246 | api_access_controllers_whitelist = |
|
247 | 247 | # ChangesetController:changeset_patch, |
|
248 | 248 | # ChangesetController:changeset_raw, |
|
249 | 249 | # FilesController:raw, |
|
250 | 250 | # FilesController:archivefile |
|
251 | 251 | |
|
252 | 252 | ## default encoding used to convert from and to unicode |
|
253 | 253 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
254 | 254 | default_encoding = utf8 |
|
255 | 255 | |
|
256 | 256 | ## issue tracker for Kallithea (leave blank to disable, absent for default) |
|
257 | 257 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
258 | 258 | |
|
259 | 259 | ## issue tracking mapping for commits messages |
|
260 | 260 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
261 | 261 | |
|
262 | 262 | ## pattern to get the issues from commit messages |
|
263 | 263 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
264 | 264 | ## {id} will be all groups matched from this pattern |
|
265 | 265 | |
|
266 | 266 | issue_pat = (?:\s*#)(\d+) |
|
267 | 267 | |
|
268 | 268 | ## server url to the issue, each {id} will be replaced with match |
|
269 | 269 | ## fetched from the regex and {repo} is replaced with full repository name |
|
270 | 270 | ## including groups {repo_name} is replaced with just name of repo |
|
271 | 271 | |
|
272 | 272 | issue_server_link = https://issues.example.com/{repo}/issue/{id} |
|
273 | 273 | |
|
274 | 274 | ## prefix to add to link to indicate it's an url |
|
275 | 275 | ## #314 will be replaced by <issue_prefix><id> |
|
276 | 276 | |
|
277 | 277 | issue_prefix = # |
|
278 | 278 | |
|
279 | 279 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
280 | 280 | ## multiple patterns, to other issues server, wiki or others |
|
281 | 281 | ## below an example how to create a wiki pattern |
|
282 | 282 | # wiki-some-id -> https://wiki.example.com/some-id |
|
283 | 283 | |
|
284 | 284 | #issue_pat_wiki = (?:wiki-)(.+) |
|
285 | 285 | #issue_server_link_wiki = https://wiki.example.com/{id} |
|
286 | 286 | #issue_prefix_wiki = WIKI- |
|
287 | 287 | |
|
288 | 288 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
289 | 289 | ## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with |
|
290 | 290 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
291 | 291 | auth_ret_code = |
|
292 | 292 | |
|
293 | 293 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
294 | 294 | ## codes don't break the transactions while 4XX codes do |
|
295 | 295 | lock_ret_code = 423 |
|
296 | 296 | |
|
297 | 297 | ## allows to change the repository location in settings page |
|
298 | 298 | allow_repo_location_change = True |
|
299 | 299 | |
|
300 | 300 | ## allows to setup custom hooks in settings page |
|
301 | 301 | allow_custom_hooks_settings = True |
|
302 | 302 | |
|
303 | 303 | ## extra extensions for indexing, space separated and without the leading '.'. |
|
304 | 304 | # index.extensions = |
|
305 | 305 | # gemfile |
|
306 | 306 | # lock |
|
307 | 307 | |
|
308 | 308 | ## extra filenames for indexing, space separated |
|
309 | 309 | # index.filenames = |
|
310 | 310 | # .dockerignore |
|
311 | 311 | # .editorconfig |
|
312 | 312 | # INSTALL |
|
313 | 313 | # CHANGELOG |
|
314 | 314 | |
|
315 | 315 | #################################### |
|
316 | 316 | ### CELERY CONFIG #### |
|
317 | 317 | #################################### |
|
318 | 318 | |
|
319 | 319 | use_celery = false |
|
320 | 320 | |
|
321 | 321 | ## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq: |
|
322 | 322 | broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost |
|
323 | 323 | |
|
324 | 324 | celery.imports = kallithea.lib.celerylib.tasks |
|
325 | 325 | celery.accept.content = pickle |
|
326 | 326 | celery.result.backend = amqp |
|
327 | 327 | celery.result.dburi = amqp:// |
|
328 | 328 | celery.result.serialier = json |
|
329 | 329 | |
|
330 | 330 | #celery.send.task.error.emails = true |
|
331 | 331 | #celery.amqp.task.result.expires = 18000 |
|
332 | 332 | |
|
333 | 333 | celeryd.concurrency = 2 |
|
334 | 334 | celeryd.max.tasks.per.child = 1 |
|
335 | 335 | |
|
336 | 336 | ## If true, tasks will never be sent to the queue, but executed locally instead. |
|
337 | 337 | celery.always.eager = false |
|
338 | 338 | |
|
339 | 339 | #################################### |
|
340 | 340 | ### BEAKER CACHE #### |
|
341 | 341 | #################################### |
|
342 | 342 | |
|
343 | 343 | beaker.cache.data_dir = %(here)s/data/cache/data |
|
344 | 344 | beaker.cache.lock_dir = %(here)s/data/cache/lock |
|
345 | 345 | |
|
346 | 346 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
347 | 347 | |
|
348 | 348 | beaker.cache.short_term.type = memory |
|
349 | 349 | beaker.cache.short_term.expire = 60 |
|
350 | 350 | beaker.cache.short_term.key_length = 256 |
|
351 | 351 | |
|
352 | 352 | beaker.cache.long_term.type = memory |
|
353 | 353 | beaker.cache.long_term.expire = 36000 |
|
354 | 354 | beaker.cache.long_term.key_length = 256 |
|
355 | 355 | |
|
356 | 356 | beaker.cache.sql_cache_short.type = memory |
|
357 | 357 | beaker.cache.sql_cache_short.expire = 10 |
|
358 | 358 | beaker.cache.sql_cache_short.key_length = 256 |
|
359 | 359 | |
|
360 | 360 | #################################### |
|
361 | 361 | ### BEAKER SESSION #### |
|
362 | 362 | #################################### |
|
363 | 363 | |
|
364 | 364 | ## Name of session cookie. Should be unique for a given host and path, even when running |
|
365 | 365 | ## on different ports. Otherwise, cookie sessions will be shared and messed up. |
|
366 | 366 | beaker.session.key = kallithea |
|
367 | 367 | ## Sessions should always only be accessible by the browser, not directly by JavaScript. |
|
368 | 368 | beaker.session.httponly = true |
|
369 | 369 | ## Session lifetime. 2592000 seconds is 30 days. |
|
370 | 370 | beaker.session.timeout = 2592000 |
|
371 | 371 | |
|
372 | 372 | ## Server secret used with HMAC to ensure integrity of cookies. |
|
373 | 373 | beaker.session.secret = development-not-secret |
|
374 | 374 | ## Further, encrypt the data with AES. |
|
375 | 375 | #beaker.session.encrypt_key = <key_for_encryption> |
|
376 | 376 | #beaker.session.validate_key = <validation_key> |
|
377 | 377 | |
|
378 | 378 | ## Type of storage used for the session, current types are |
|
379 | 379 | ## dbm, file, memcached, database, and memory. |
|
380 | 380 | |
|
381 | 381 | ## File system storage of session data. (default) |
|
382 | 382 | #beaker.session.type = file |
|
383 | 383 | |
|
384 | 384 | ## Cookie only, store all session data inside the cookie. Requires secure secrets. |
|
385 | 385 | #beaker.session.type = cookie |
|
386 | 386 | |
|
387 | 387 | ## Database storage of session data. |
|
388 | 388 | #beaker.session.type = ext:database |
|
389 | 389 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
390 | 390 | #beaker.session.table_name = db_session |
|
391 | 391 | |
|
392 | 392 | ############################ |
|
393 | 393 | ## ERROR HANDLING SYSTEMS ## |
|
394 | 394 | ############################ |
|
395 | 395 | |
|
396 | 396 | #################### |
|
397 | 397 | ### [appenlight] ### |
|
398 | 398 | #################### |
|
399 | 399 | |
|
400 | 400 | ## AppEnlight is tailored to work with Kallithea, see |
|
401 | 401 | ## http://appenlight.com for details how to obtain an account |
|
402 | 402 | ## you must install python package `appenlight_client` to make it work |
|
403 | 403 | |
|
404 | 404 | ## appenlight enabled |
|
405 | 405 | appenlight = false |
|
406 | 406 | |
|
407 | 407 | appenlight.server_url = https://api.appenlight.com |
|
408 | 408 | appenlight.api_key = YOUR_API_KEY |
|
409 | 409 | |
|
410 | 410 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
411 | 411 | |
|
412 | 412 | ## enables 404 error logging (default False) |
|
413 | 413 | appenlight.report_404 = false |
|
414 | 414 | |
|
415 | 415 | ## time in seconds after request is considered being slow (default 1) |
|
416 | 416 | appenlight.slow_request_time = 1 |
|
417 | 417 | |
|
418 | 418 | ## record slow requests in application |
|
419 | 419 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
420 | 420 | appenlight.slow_requests = true |
|
421 | 421 | |
|
422 | 422 | ## enable hooking to application loggers |
|
423 | 423 | #appenlight.logging = true |
|
424 | 424 | |
|
425 | 425 | ## minimum log level for log capture |
|
426 | 426 | #appenlight.logging.level = WARNING |
|
427 | 427 | |
|
428 | 428 | ## send logs only from erroneous/slow requests |
|
429 | 429 | ## (saves API quota for intensive logging) |
|
430 | 430 | appenlight.logging_on_error = false |
|
431 | 431 | |
|
432 | 432 | ## list of additional keywords that should be grabbed from environ object |
|
433 | 433 | ## can be string with comma separated list of words in lowercase |
|
434 | 434 | ## (by default client will always send following info: |
|
435 | 435 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
436 | 436 | ## start with HTTP* this list be extended with additional keywords here |
|
437 | 437 | appenlight.environ_keys_whitelist = |
|
438 | 438 | |
|
439 | 439 | ## list of keywords that should be blanked from request object |
|
440 | 440 | ## can be string with comma separated list of words in lowercase |
|
441 | 441 | ## (by default client will always blank keys that contain following words |
|
442 | 442 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
443 | 443 | ## this list be extended with additional keywords set here |
|
444 | 444 | appenlight.request_keys_blacklist = |
|
445 | 445 | |
|
446 | 446 | ## list of namespaces that should be ignores when gathering log entries |
|
447 | 447 | ## can be string with comma separated list of namespaces |
|
448 | 448 | ## (by default the client ignores own entries: appenlight_client.client) |
|
449 | 449 | appenlight.log_namespace_blacklist = |
|
450 | 450 | |
|
451 | 451 | ################ |
|
452 | 452 | ### [sentry] ### |
|
453 | 453 | ################ |
|
454 | 454 | |
|
455 | 455 | ## sentry is a alternative open source error aggregator |
|
456 | 456 | ## you must install python packages `sentry` and `raven` to enable |
|
457 | 457 | |
|
458 | 458 | sentry.dsn = YOUR_DNS |
|
459 | 459 | sentry.servers = |
|
460 | 460 | sentry.name = |
|
461 | 461 | sentry.key = |
|
462 | 462 | sentry.public_key = |
|
463 | 463 | sentry.secret_key = |
|
464 | 464 | sentry.project = |
|
465 | 465 | sentry.site = |
|
466 | 466 | sentry.include_paths = |
|
467 | 467 | sentry.exclude_paths = |
|
468 | 468 | |
|
469 | 469 | ################################################################################ |
|
470 | 470 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
471 | 471 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
472 | 472 | ## execute malicious code after an exception is raised. ## |
|
473 | 473 | ################################################################################ |
|
474 | 474 | #set debug = false |
|
475 | 475 | set debug = true |
|
476 | 476 | |
|
477 | 477 | ################################## |
|
478 | 478 | ### LOGVIEW CONFIG ### |
|
479 | 479 | ################################## |
|
480 | 480 | |
|
481 | 481 | logview.sqlalchemy = #faa |
|
482 | 482 | logview.pylons.templating = #bfb |
|
483 | 483 | logview.pylons.util = #eee |
|
484 | 484 | |
|
485 | 485 | ######################################################### |
|
486 | 486 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
487 | 487 | ######################################################### |
|
488 | 488 | |
|
489 | 489 | # SQLITE [default] |
|
490 | 490 | sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60 |
|
491 | 491 | |
|
492 | 492 | # POSTGRESQL |
|
493 | 493 | #sqlalchemy.url = postgresql://user:pass@localhost/kallithea |
|
494 | 494 | |
|
495 | 495 | # MySQL |
|
496 | 496 | #sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8 |
|
497 | 497 | |
|
498 | 498 | # see sqlalchemy docs for others |
|
499 | 499 | |
|
500 | 500 | sqlalchemy.echo = false |
|
501 | 501 | sqlalchemy.pool_recycle = 3600 |
|
502 | 502 | |
|
503 | 503 | ################################ |
|
504 | 504 | ### ALEMBIC CONFIGURATION #### |
|
505 | 505 | ################################ |
|
506 | 506 | |
|
507 | 507 | [alembic] |
|
508 | 508 | script_location = kallithea:alembic |
|
509 | 509 | |
|
510 | 510 | ################################ |
|
511 | 511 | ### LOGGING CONFIGURATION #### |
|
512 | 512 | ################################ |
|
513 | 513 | |
|
514 | 514 | [loggers] |
|
515 | keys = root, routes, kallithea, sqlalchemy, gearbox, beaker, templates, whoosh_indexer | |
|
515 | keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer | |
|
516 | 516 | |
|
517 | 517 | [handlers] |
|
518 | 518 | keys = console, console_sql |
|
519 | 519 | |
|
520 | 520 | [formatters] |
|
521 | 521 | keys = generic, color_formatter, color_formatter_sql |
|
522 | 522 | |
|
523 | 523 | ############# |
|
524 | 524 | ## LOGGERS ## |
|
525 | 525 | ############# |
|
526 | 526 | |
|
527 | 527 | [logger_root] |
|
528 | 528 | level = NOTSET |
|
529 | 529 | handlers = console |
|
530 | 530 | |
|
531 | 531 | [logger_routes] |
|
532 | 532 | level = DEBUG |
|
533 | 533 | handlers = |
|
534 | 534 | qualname = routes.middleware |
|
535 | 535 | ## "level = DEBUG" logs the route matched and routing variables. |
|
536 | 536 | propagate = 1 |
|
537 | 537 | |
|
538 | 538 | [logger_beaker] |
|
539 | 539 | level = DEBUG |
|
540 | 540 | handlers = |
|
541 | 541 | qualname = beaker.container |
|
542 | 542 | propagate = 1 |
|
543 | 543 | |
|
544 | 544 | [logger_templates] |
|
545 | 545 | level = INFO |
|
546 | 546 | handlers = |
|
547 | 547 | qualname = pylons.templating |
|
548 | 548 | propagate = 1 |
|
549 | 549 | |
|
550 | 550 | [logger_kallithea] |
|
551 | 551 | level = DEBUG |
|
552 | 552 | handlers = |
|
553 | 553 | qualname = kallithea |
|
554 | 554 | propagate = 1 |
|
555 | 555 | |
|
556 | [logger_tg] | |
|
557 | level = DEBUG | |
|
558 | handlers = | |
|
559 | qualname = tg | |
|
560 | propagate = 1 | |
|
561 | ||
|
556 | 562 | [logger_gearbox] |
|
557 | 563 | level = DEBUG |
|
558 | 564 | handlers = |
|
559 | 565 | qualname = gearbox |
|
560 | 566 | propagate = 1 |
|
561 | 567 | |
|
562 | 568 | [logger_sqlalchemy] |
|
563 | 569 | level = INFO |
|
564 | 570 | handlers = console_sql |
|
565 | 571 | qualname = sqlalchemy.engine |
|
566 | 572 | propagate = 0 |
|
567 | 573 | |
|
568 | 574 | [logger_whoosh_indexer] |
|
569 | 575 | level = DEBUG |
|
570 | 576 | handlers = |
|
571 | 577 | qualname = whoosh_indexer |
|
572 | 578 | propagate = 1 |
|
573 | 579 | |
|
574 | 580 | ############## |
|
575 | 581 | ## HANDLERS ## |
|
576 | 582 | ############## |
|
577 | 583 | |
|
578 | 584 | [handler_console] |
|
579 | 585 | class = StreamHandler |
|
580 | 586 | args = (sys.stderr,) |
|
581 | 587 | #level = INFO |
|
582 | 588 | level = DEBUG |
|
583 | 589 | #formatter = generic |
|
584 | 590 | formatter = color_formatter |
|
585 | 591 | |
|
586 | 592 | [handler_console_sql] |
|
587 | 593 | class = StreamHandler |
|
588 | 594 | args = (sys.stderr,) |
|
589 | 595 | #level = WARN |
|
590 | 596 | level = DEBUG |
|
591 | 597 | #formatter = generic |
|
592 | 598 | formatter = color_formatter_sql |
|
593 | 599 | |
|
594 | 600 | ################ |
|
595 | 601 | ## FORMATTERS ## |
|
596 | 602 | ################ |
|
597 | 603 | |
|
598 | 604 | [formatter_generic] |
|
599 | 605 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
600 | 606 | datefmt = %Y-%m-%d %H:%M:%S |
|
601 | 607 | |
|
602 | 608 | [formatter_color_formatter] |
|
603 | 609 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
604 | 610 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
605 | 611 | datefmt = %Y-%m-%d %H:%M:%S |
|
606 | 612 | |
|
607 | 613 | [formatter_color_formatter_sql] |
|
608 | 614 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
609 | 615 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
610 | 616 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,67 +1,63 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea |
|
16 | 16 | ~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea, a web based repository management system. |
|
19 | 19 | |
|
20 | 20 | Versioning implementation: http://www.python.org/dev/peps/pep-0386/ |
|
21 | 21 | |
|
22 | 22 | This file was forked by the Kallithea project in July 2014. |
|
23 | 23 | Original author and date, and relevant copyright and licensing information is below: |
|
24 | 24 | :created_on: Apr 9, 2010 |
|
25 | 25 | :author: marcink |
|
26 | 26 | :copyright: (c) 2013 RhodeCode GmbH, (C) 2014 Bradley M. Kuhn, and others. |
|
27 | 27 | :license: GPLv3, see LICENSE.md for more details. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | import sys |
|
31 | 31 | import platform |
|
32 | 32 | |
|
33 | # temporary aliasing to allow early introduction of imports like 'from tg import request' | |
|
34 | import pylons | |
|
35 | sys.modules['tg'] = pylons | |
|
36 | ||
|
37 | 33 | VERSION = (0, 3, 99) |
|
38 | 34 | BACKENDS = { |
|
39 | 35 | 'hg': 'Mercurial repository', |
|
40 | 36 | 'git': 'Git repository', |
|
41 | 37 | } |
|
42 | 38 | |
|
43 | 39 | CELERY_ON = False |
|
44 | 40 | CELERY_EAGER = False |
|
45 | 41 | |
|
46 | 42 | CONFIG = {} |
|
47 | 43 | |
|
48 | 44 | # Linked module for extensions |
|
49 | 45 | EXTENSIONS = {} |
|
50 | 46 | |
|
51 | 47 | try: |
|
52 | 48 | import kallithea.brand |
|
53 | 49 | except ImportError: |
|
54 | 50 | pass |
|
55 | 51 | else: |
|
56 | 52 | assert False, 'Database rebranding is no longer supported; see README.' |
|
57 | 53 | |
|
58 | 54 | |
|
59 | 55 | __version__ = '.'.join(str(each) for each in VERSION) |
|
60 | 56 | __platform__ = platform.system() |
|
61 | 57 | __license__ = 'GPLv3' |
|
62 | 58 | __py_version__ = sys.version_info |
|
63 | 59 | __author__ = "Various Authors" |
|
64 | 60 | __url__ = 'https://kallithea-scm.org/' |
|
65 | 61 | |
|
66 | 62 | is_windows = __platform__ in ['Windows'] |
|
67 | 63 | is_unix = not is_windows |
@@ -1,185 +1,175 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | """ | |
|
15 | Global configuration file for TurboGears2 specific settings in Kallithea. | |
|
14 | 16 |
|
|
15 | import os | |
|
16 | import kallithea | |
|
17 | This file complements the .ini file. | |
|
18 | """ | |
|
19 | ||
|
17 | 20 | import platform |
|
18 | ||
|
19 | import pylons | |
|
20 | import mako.lookup | |
|
21 | import formencode | |
|
21 | import os, sys | |
|
22 | 22 | |
|
23 | import kallithea.lib.app_globals as app_globals | |
|
24 | ||
|
25 |
from |
|
|
23 | import tg | |
|
24 | from tg import hooks | |
|
25 | from tg.configuration import AppConfig | |
|
26 | from tg.support.converters import asbool | |
|
26 | 27 | |
|
27 |
from kallithea.lib import |
|
|
28 | from kallithea.lib.middleware.https_fixup import HttpsFixup | |
|
29 | from kallithea.lib.middleware.simplegit import SimpleGit | |
|
30 | from kallithea.lib.middleware.simplehg import SimpleHg | |
|
31 | from kallithea.config.routing import make_map | |
|
28 | 32 | from kallithea.lib.auth import set_available_permissions |
|
29 | from kallithea.lib.utils import repo2db_mapper, make_ui, set_app_settings, \ | |
|
30 | load_rcextensions, check_git_version, set_vcs_config, set_indexer_config | |
|
31 | from kallithea.lib.utils2 import engine_from_config, str2bool | |
|
32 |
from kallithea. |
|
|
33 | from kallithea.lib.db_manage import DbManage | |
|
34 | from kallithea.lib.utils import load_rcextensions, make_ui, set_app_settings, set_vcs_config, \ | |
|
35 | set_indexer_config, check_git_version, repo2db_mapper | |
|
36 | from kallithea.lib.utils2 import str2bool | |
|
33 | 37 | from kallithea.model.scm import ScmModel |
|
34 | 38 | |
|
35 | from routes.middleware import RoutesMiddleware | |
|
36 | from paste.cascade import Cascade | |
|
37 | from paste.registry import RegistryManager | |
|
38 | from paste.urlparser import StaticURLParser | |
|
39 | from paste.deploy.converters import asbool | |
|
39 | import formencode | |
|
40 | import kallithea | |
|
41 | ||
|
42 | ||
|
43 | class KallitheaAppConfig(AppConfig): | |
|
44 | # Note: AppConfig has a misleading name, as it's not the application | |
|
45 | # configuration, but the application configurator. The AppConfig values are | |
|
46 | # used as a template to create the actual configuration, which might | |
|
47 | # overwrite or extend the one provided by the configurator template. | |
|
48 | ||
|
49 | # To make it clear, AppConfig creates the config and sets into it the same | |
|
50 | # values that AppConfig itself has. Then the values from the config file and | |
|
51 | # gearbox options are loaded and merged into the configuration. Then an | |
|
52 | # after_init_config(conf) method of AppConfig is called for any change that | |
|
53 | # might depend on options provided by configuration files. | |
|
54 | ||
|
55 | def __init__(self): | |
|
56 | super(KallitheaAppConfig, self).__init__() | |
|
57 | ||
|
58 | self['package'] = kallithea | |
|
59 | ||
|
60 | self['prefer_toscawidgets2'] = False | |
|
61 | self['use_toscawidgets'] = False | |
|
62 | ||
|
63 | self['renderers'] = [] | |
|
64 | ||
|
65 | # Enable json in expose | |
|
66 | self['renderers'].append('json') | |
|
40 | 67 | |
|
41 | from pylons.middleware import ErrorHandler, StatusCodeRedirect | |
|
42 | from pylons.wsgiapp import PylonsApp | |
|
68 | # Configure template rendering | |
|
69 | self['renderers'].append('mako') | |
|
70 | self['default_renderer'] = 'mako' | |
|
71 | self['use_dotted_templatenames'] = False | |
|
72 | ||
|
73 | # Configure Sessions, store data as JSON to avoid pickle security issues | |
|
74 | self['session.enabled'] = True | |
|
75 | self['session.data_serializer'] = 'json' | |
|
76 | ||
|
77 | # Configure the base SQLALchemy Setup | |
|
78 | self['use_sqlalchemy'] = True | |
|
79 | self['model'] = kallithea.model.base | |
|
80 | self['DBSession'] = kallithea.model.meta.Session | |
|
81 | ||
|
82 | # Configure App without an authentication backend. | |
|
83 | self['auth_backend'] = None | |
|
43 | 84 | |
|
44 | from kallithea.lib.middleware.simplehg import SimpleHg | |
|
45 | from kallithea.lib.middleware.simplegit import SimpleGit | |
|
46 | from kallithea.lib.middleware.https_fixup import HttpsFixup | |
|
47 | from kallithea.lib.middleware.sessionmiddleware import SecureSessionMiddleware | |
|
48 | from kallithea.lib.middleware.wrapper import RequestWrapper | |
|
85 | # Use custom error page for these errors. By default, Turbogears2 does not add | |
|
86 | # 400 in this list. | |
|
87 | # Explicitly listing all is considered more robust than appending to defaults, | |
|
88 | # in light of possible future framework changes. | |
|
89 | self['errorpage.status_codes'] = [400, 401, 403, 404] | |
|
49 | 90 | |
|
50 | def setup_configuration(config, paths, app_conf, test_env, test_index): | |
|
91 | # Disable transaction manager -- currently Kallithea takes care of transactions itself | |
|
92 | self['tm.enabled'] = False | |
|
93 | ||
|
94 | base_config = KallitheaAppConfig() | |
|
95 | ||
|
96 | # TODO still needed as long as we use pylonslib | |
|
97 | sys.modules['pylons'] = tg | |
|
98 | ||
|
99 | def setup_configuration(app): | |
|
100 | config = app.config | |
|
51 | 101 | |
|
52 | 102 | # store some globals into kallithea |
|
53 | 103 | kallithea.CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
54 | 104 | kallithea.CELERY_EAGER = str2bool(config['app_conf'].get('celery.always.eager')) |
|
105 | kallithea.CONFIG = config | |
|
55 | 106 | |
|
56 | config['routes.map'] = make_map(config) | |
|
57 | config['pylons.app_globals'] = app_globals.Globals(config) | |
|
58 | config['pylons.h'] = helpers | |
|
59 | kallithea.CONFIG = config | |
|
107 | # Provide routes mapper to the RoutedController | |
|
108 | root_controller = app.find_controller('root') | |
|
109 | root_controller.mapper = config['routes.map'] = make_map(config) | |
|
60 | 110 | |
|
61 | 111 | load_rcextensions(root_path=config['here']) |
|
62 | 112 | |
|
63 | # Setup cache object as early as possible | |
|
64 | pylons.cache._push_object(config['pylons.app_globals'].cache) | |
|
65 | ||
|
66 | # Create the Mako TemplateLookup, with the default auto-escaping | |
|
67 | config['pylons.app_globals'].mako_lookup = mako.lookup.TemplateLookup( | |
|
68 | directories=paths['templates'], | |
|
69 | strict_undefined=True, | |
|
70 | module_directory=os.path.join(app_conf['cache_dir'], 'templates'), | |
|
71 | input_encoding='utf-8', default_filters=['escape'], | |
|
72 | imports=['from webhelpers.html import escape']) | |
|
73 | ||
|
74 | # sets the c attribute access when don't existing attribute are accessed | |
|
75 | config['pylons.strict_tmpl_context'] = True | |
|
113 | # FIXME move test setup code out of here | |
|
76 | 114 | test = os.path.split(config['__file__'])[-1] == 'test.ini' |
|
77 | 115 | if test: |
|
78 | if test_env is None: | |
|
79 |
|
|
|
80 | if test_index is None: | |
|
81 | test_index = not int(os.environ.get('KALLITHEA_WHOOSH_TEST_DISABLE', 0)) | |
|
116 | test_env = not int(os.environ.get('KALLITHEA_NO_TMP_PATH', 0)) | |
|
117 | test_index = not int(os.environ.get('KALLITHEA_WHOOSH_TEST_DISABLE', 0)) | |
|
82 | 118 | if os.environ.get('TEST_DB'): |
|
83 | # swap config if we pass enviroment variable | |
|
119 | # swap config if we pass environment variable | |
|
84 | 120 | config['sqlalchemy.url'] = os.environ.get('TEST_DB') |
|
85 | 121 | |
|
86 | 122 | from kallithea.tests.fixture import create_test_env, create_test_index |
|
87 | 123 | from kallithea.tests.base import TESTS_TMP_PATH |
|
88 | 124 | #set KALLITHEA_NO_TMP_PATH=1 to disable re-creating the database and |
|
89 | 125 | #test repos |
|
90 | 126 | if test_env: |
|
91 | 127 | create_test_env(TESTS_TMP_PATH, config) |
|
92 | 128 | #set KALLITHEA_WHOOSH_TEST_DISABLE=1 to disable whoosh index during tests |
|
93 | 129 | if test_index: |
|
94 | 130 | create_test_index(TESTS_TMP_PATH, config, True) |
|
95 | 131 | |
|
96 | # MULTIPLE DB configs | |
|
97 | # Setup the SQLAlchemy database engine | |
|
98 | sa_engine = engine_from_config(config, 'sqlalchemy.') | |
|
99 | init_model(sa_engine) | |
|
100 | ||
|
101 | 132 | set_available_permissions(config) |
|
102 | 133 | repos_path = make_ui('db').configitems('paths')[0][1] |
|
103 | 134 | config['base_path'] = repos_path |
|
104 | 135 | set_app_settings(config) |
|
105 | 136 | |
|
106 | 137 | instance_id = kallithea.CONFIG.get('instance_id', '*') |
|
107 | 138 | if instance_id == '*': |
|
108 | 139 | instance_id = '%s-%s' % (platform.uname()[1], os.getpid()) |
|
109 | 140 | kallithea.CONFIG['instance_id'] = instance_id |
|
110 | 141 | |
|
111 | # CONFIGURATION OPTIONS HERE (note: all config options will override | |
|
112 | # any Pylons config options) | |
|
142 | # update kallithea.CONFIG with the meanwhile changed 'config' | |
|
143 | kallithea.CONFIG.update(config) | |
|
113 | 144 | |
|
114 | # store config reference into our module to skip import magic of | |
|
115 | # pylons | |
|
116 |
kallithea.CONFIG. |
|
|
145 | # configure vcs and indexer libraries (they are supposed to be independent | |
|
146 | # as much as possible and thus avoid importing tg.config or | |
|
147 | # kallithea.CONFIG). | |
|
117 | 148 | set_vcs_config(kallithea.CONFIG) |
|
118 | 149 | set_indexer_config(kallithea.CONFIG) |
|
119 | 150 | |
|
120 | #check git version | |
|
121 | 151 | check_git_version() |
|
122 | 152 | |
|
123 | 153 | if str2bool(config.get('initial_repo_scan', True)): |
|
124 | 154 | repo2db_mapper(ScmModel().repo_scan(repos_path), |
|
125 | 155 | remove_obsolete=False, install_git_hooks=False) |
|
156 | ||
|
126 | 157 | formencode.api.set_stdtranslation(languages=[config.get('lang')]) |
|
127 | 158 | |
|
128 | return config | |
|
129 | ||
|
130 | def setup_application(config, global_conf, full_stack, static_files): | |
|
159 | hooks.register('configure_new_app', setup_configuration) | |
|
131 | 160 | |
|
132 | # The Pylons WSGI app | |
|
133 | app = PylonsApp(config=config) | |
|
134 | ||
|
135 | # Routing/Session/Cache Middleware | |
|
136 | app = RoutesMiddleware(app, config['routes.map'], use_method_override=False) | |
|
137 | app = SecureSessionMiddleware(app, config) | |
|
138 | 161 | |
|
139 | # CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares) | |
|
140 | if asbool(config['pdebug']): | |
|
141 | from kallithea.lib.profiler import ProfilingMiddleware | |
|
142 | app = ProfilingMiddleware(app) | |
|
143 | ||
|
144 | if asbool(full_stack): | |
|
145 | ||
|
146 | from kallithea.lib.middleware.sentry import Sentry | |
|
147 | from kallithea.lib.middleware.appenlight import AppEnlight | |
|
148 | if AppEnlight and asbool(config['app_conf'].get('appenlight')): | |
|
149 | app = AppEnlight(app, config) | |
|
150 | elif Sentry: | |
|
151 | app = Sentry(app, config) | |
|
152 | ||
|
153 | # Handle Python exceptions | |
|
154 | app = ErrorHandler(app, global_conf, **config['pylons.errorware']) | |
|
162 | def setup_application(app): | |
|
163 | config = app.config | |
|
155 | 164 | |
|
156 | # Display error documents for 401, 403, 404 status codes (and | |
|
157 | # 500 when debug is disabled) | |
|
158 | # Note: will buffer the output in memory! | |
|
159 | if asbool(config['debug']): | |
|
160 | app = StatusCodeRedirect(app) | |
|
161 | else: | |
|
162 | app = StatusCodeRedirect(app, [400, 401, 403, 404, 500]) | |
|
163 | ||
|
164 | # we want our low level middleware to get to the request ASAP. We don't | |
|
165 | # need any pylons stack middleware in them - especially no StatusCodeRedirect buffering | |
|
166 | app = SimpleHg(app, config) | |
|
167 | app = SimpleGit(app, config) | |
|
165 | # we want our low level middleware to get to the request ASAP. We don't | |
|
166 | # need any stack middleware in them - especially no StatusCodeRedirect buffering | |
|
167 | app = SimpleHg(app, config) | |
|
168 | app = SimpleGit(app, config) | |
|
168 | 169 | |
|
169 |
|
|
|
170 |
|
|
|
171 |
|
|
|
172 | ||
|
173 | app = RequestWrapper(app, config) # logging | |
|
174 | ||
|
175 | # Establish the Registry for this application | |
|
176 | app = RegistryManager(app) # thread / request-local module globals / variables | |
|
170 | # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy | |
|
171 | if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']): | |
|
172 | app = HttpsFixup(app, config) | |
|
173 | return app | |
|
177 | 174 | |
|
178 | if asbool(static_files): | |
|
179 | # Serve static files | |
|
180 | static_app = StaticURLParser(config['pylons.paths']['static_files']) | |
|
181 | app = Cascade([static_app, app]) | |
|
182 | ||
|
183 | app.config = config | |
|
184 | ||
|
185 | return app | |
|
175 | hooks.register('before_config', setup_application) |
@@ -1,44 +1,21 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | """ | |
|
15 | Pylons environment configuration | |
|
16 | """ | |
|
14 | """WSGI environment setup for Kallithea.""" | |
|
17 | 15 | |
|
18 | import os | |
|
19 | import kallithea | |
|
20 | import pylons | |
|
21 | ||
|
22 | from kallithea.config.app_cfg import setup_configuration | |
|
16 | from kallithea.config.app_cfg import base_config | |
|
23 | 17 | |
|
24 | def load_environment(global_conf, app_conf, | |
|
25 | test_env=None, test_index=None): | |
|
26 | """ | |
|
27 | Configure the Pylons environment via the ``pylons.config`` | |
|
28 | object | |
|
29 | """ | |
|
30 | config = pylons.configuration.PylonsConfig() | |
|
18 | __all__ = ['load_environment'] | |
|
31 | 19 | |
|
32 | # Pylons paths | |
|
33 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | |
|
34 | paths = dict( | |
|
35 | root=root, | |
|
36 | controllers=os.path.join(root, 'controllers'), | |
|
37 | static_files=os.path.join(root, 'public'), | |
|
38 | templates=[os.path.join(root, 'templates')] | |
|
39 | ) | |
|
40 | ||
|
41 | # Initialize config with the basic options | |
|
42 | config.init_app(global_conf, app_conf, package='kallithea', paths=paths) | |
|
43 | ||
|
44 | return setup_configuration(config, paths, app_conf, test_env, test_index) | |
|
20 | # Use base_config to setup the environment loader function | |
|
21 | load_environment = base_config.make_load_environment() |
@@ -1,43 +1,45 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | """ | |
|
15 | Pylons middleware initialization | |
|
16 | """ | |
|
14 | """WSGI middleware initialization for the Kallithea application.""" | |
|
17 | 15 | |
|
18 |
from kallithea.config.app_cfg import se |
|
|
16 | from kallithea.config.app_cfg import base_config | |
|
19 | 17 | from kallithea.config.environment import load_environment |
|
20 | 18 | |
|
21 | def make_app(global_conf, full_stack=True, static_files=True, **app_conf): | |
|
22 | """Create a Pylons WSGI application and return it | |
|
19 | __all__ = ['make_app'] | |
|
20 | ||
|
21 | # Use base_config to setup the necessary PasteDeploy application factory. | |
|
22 | # make_base_app will wrap the TurboGears2 app with all the middleware it needs. | |
|
23 | make_base_app = base_config.setup_tg_wsgi_app(load_environment) | |
|
23 | 24 | |
|
24 | ``global_conf`` | |
|
25 | The inherited configuration for this application. Normally from | |
|
26 | the [DEFAULT] section of the Paste ini file. | |
|
25 | ||
|
26 | def make_app(global_conf, full_stack=True, **app_conf): | |
|
27 | """ | |
|
28 | Set up Kallithea with the settings found in the PasteDeploy configuration | |
|
29 | file used. | |
|
27 | 30 | |
|
28 | ``full_stack`` | |
|
29 | Whether or not this application provides a full WSGI stack (by | |
|
30 | default, meaning it handles its own exceptions and errors). | |
|
31 | Disable full_stack when this application is "managed" by | |
|
32 | another WSGI middleware. | |
|
31 | :param global_conf: The global settings for Kallithea (those | |
|
32 | defined under the ``[DEFAULT]`` section). | |
|
33 | :type global_conf: dict | |
|
34 | :param full_stack: Should the whole TurboGears2 stack be set up? | |
|
35 | :type full_stack: str or bool | |
|
36 | :return: The Kallithea application with all the relevant middleware | |
|
37 | loaded. | |
|
33 | 38 | |
|
34 | ``app_conf`` | |
|
35 | The application's local configuration. Normally specified in | |
|
36 | the [app:<name>] section of the Paste ini file (where <name> | |
|
37 | defaults to main). | |
|
39 | This is the PasteDeploy factory for the Kallithea application. | |
|
38 | 40 | |
|
41 | ``app_conf`` contains all the application-specific settings (those defined | |
|
42 | under ``[app:main]``. | |
|
39 | 43 | """ |
|
40 | # Configure the Pylons environment | |
|
41 | config = load_environment(global_conf, app_conf) | |
|
42 | ||
|
43 | return setup_application(config, global_conf, full_stack, static_files) | |
|
44 | app = make_base_app(global_conf, full_stack=full_stack, **app_conf) | |
|
45 | return app |
@@ -1,827 +1,822 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | Routes configuration |
|
16 | 16 | |
|
17 | 17 | The more specific and detailed routes should be defined first so they |
|
18 | 18 | may take precedent over the more generic routes. For more information |
|
19 | 19 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
20 | 20 | """ |
|
21 | 21 | |
|
22 | 22 | from tg import request |
|
23 | 23 | from routes import Mapper |
|
24 | 24 | |
|
25 | 25 | # prefix for non repository related links needs to be prefixed with `/` |
|
26 | 26 | ADMIN_PREFIX = '/_admin' |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def make_map(config): |
|
30 | 30 | """Create, configure and return the routes Mapper""" |
|
31 |
rmap = Mapper(directory=config[' |
|
|
31 | rmap = Mapper(directory=config['paths']['controllers'], | |
|
32 | 32 | always_scan=config['debug']) |
|
33 | 33 | rmap.minimization = False |
|
34 | 34 | rmap.explicit = False |
|
35 | 35 | |
|
36 | 36 | from kallithea.lib.utils import (is_valid_repo, is_valid_repo_group, |
|
37 | 37 | get_repo_by_id) |
|
38 | 38 | |
|
39 | 39 | def check_repo(environ, match_dict): |
|
40 | 40 | """ |
|
41 | 41 | check for valid repository for proper 404 handling |
|
42 | 42 | |
|
43 | 43 | :param environ: |
|
44 | 44 | :param match_dict: |
|
45 | 45 | """ |
|
46 | 46 | repo_name = match_dict.get('repo_name') |
|
47 | 47 | |
|
48 | 48 | if match_dict.get('f_path'): |
|
49 | #fix for multiple initial slashes that causes errors | |
|
49 | # fix for multiple initial slashes that causes errors | |
|
50 | 50 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
51 | 51 | |
|
52 | 52 | by_id_match = get_repo_by_id(repo_name) |
|
53 | 53 | if by_id_match: |
|
54 | 54 | repo_name = by_id_match |
|
55 | 55 | match_dict['repo_name'] = repo_name |
|
56 | 56 | |
|
57 | 57 | return is_valid_repo(repo_name, config['base_path']) |
|
58 | 58 | |
|
59 | 59 | def check_group(environ, match_dict): |
|
60 | 60 | """ |
|
61 | 61 | check for valid repository group for proper 404 handling |
|
62 | 62 | |
|
63 | 63 | :param environ: |
|
64 | 64 | :param match_dict: |
|
65 | 65 | """ |
|
66 | 66 | repo_group_name = match_dict.get('group_name') |
|
67 | 67 | return is_valid_repo_group(repo_group_name, config['base_path']) |
|
68 | 68 | |
|
69 | 69 | def check_group_skip_path(environ, match_dict): |
|
70 | 70 | """ |
|
71 | 71 | check for valid repository group for proper 404 handling, but skips |
|
72 | 72 | verification of existing path |
|
73 | 73 | |
|
74 | 74 | :param environ: |
|
75 | 75 | :param match_dict: |
|
76 | 76 | """ |
|
77 | 77 | repo_group_name = match_dict.get('group_name') |
|
78 | 78 | return is_valid_repo_group(repo_group_name, config['base_path'], |
|
79 | 79 | skip_path_check=True) |
|
80 | 80 | |
|
81 | 81 | def check_user_group(environ, match_dict): |
|
82 | 82 | """ |
|
83 | 83 | check for valid user group for proper 404 handling |
|
84 | 84 | |
|
85 | 85 | :param environ: |
|
86 | 86 | :param match_dict: |
|
87 | 87 | """ |
|
88 | 88 | return True |
|
89 | 89 | |
|
90 | 90 | def check_int(environ, match_dict): |
|
91 | 91 | return match_dict.get('id').isdigit() |
|
92 | 92 | |
|
93 | # The ErrorController route (handles 404/500 error pages); it should | |
|
94 | # likely stay at the top, ensuring it can always be resolved | |
|
95 | rmap.connect('/error/{action}', controller='error') | |
|
96 | rmap.connect('/error/{action}/{id}', controller='error') | |
|
97 | ||
|
98 | 93 | #========================================================================== |
|
99 | 94 | # CUSTOM ROUTES HERE |
|
100 | 95 | #========================================================================== |
|
101 | 96 | |
|
102 | 97 | #MAIN PAGE |
|
103 | 98 | rmap.connect('home', '/', controller='home', action='index') |
|
104 | 99 | rmap.connect('about', '/about', controller='home', action='about') |
|
105 | 100 | rmap.connect('repo_switcher_data', '/_repos', controller='home', |
|
106 | 101 | action='repo_switcher_data') |
|
107 | 102 | |
|
108 | 103 | rmap.connect('rst_help', |
|
109 | 104 | "http://docutils.sourceforge.net/docs/user/rst/quickref.html", |
|
110 | 105 | _static=True) |
|
111 | 106 | rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True) |
|
112 | 107 | rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True) |
|
113 | 108 | |
|
114 | 109 | #ADMIN REPOSITORY ROUTES |
|
115 | 110 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
116 | 111 | controller='admin/repos') as m: |
|
117 | 112 | m.connect("repos", "/repos", |
|
118 | 113 | action="create", conditions=dict(method=["POST"])) |
|
119 | 114 | m.connect("repos", "/repos", |
|
120 | 115 | action="index", conditions=dict(method=["GET"])) |
|
121 | 116 | m.connect("new_repo", "/create_repository", |
|
122 | 117 | action="create_repository", conditions=dict(method=["GET"])) |
|
123 | 118 | m.connect("update_repo", "/repos/{repo_name:.*?}", |
|
124 | 119 | action="update", conditions=dict(method=["POST"], |
|
125 | 120 | function=check_repo)) |
|
126 | 121 | m.connect("delete_repo", "/repos/{repo_name:.*?}/delete", |
|
127 | 122 | action="delete", conditions=dict(method=["POST"])) |
|
128 | 123 | |
|
129 | 124 | #ADMIN REPOSITORY GROUPS ROUTES |
|
130 | 125 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
131 | 126 | controller='admin/repo_groups') as m: |
|
132 | 127 | m.connect("repos_groups", "/repo_groups", |
|
133 | 128 | action="create", conditions=dict(method=["POST"])) |
|
134 | 129 | m.connect("repos_groups", "/repo_groups", |
|
135 | 130 | action="index", conditions=dict(method=["GET"])) |
|
136 | 131 | m.connect("new_repos_group", "/repo_groups/new", |
|
137 | 132 | action="new", conditions=dict(method=["GET"])) |
|
138 | 133 | m.connect("update_repos_group", "/repo_groups/{group_name:.*?}", |
|
139 | 134 | action="update", conditions=dict(method=["POST"], |
|
140 | 135 | function=check_group)) |
|
141 | 136 | |
|
142 | 137 | m.connect("repos_group", "/repo_groups/{group_name:.*?}", |
|
143 | 138 | action="show", conditions=dict(method=["GET"], |
|
144 | 139 | function=check_group)) |
|
145 | 140 | |
|
146 | 141 | #EXTRAS REPO GROUP ROUTES |
|
147 | 142 | m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit", |
|
148 | 143 | action="edit", |
|
149 | 144 | conditions=dict(method=["GET"], function=check_group)) |
|
150 | 145 | |
|
151 | 146 | m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced", |
|
152 | 147 | action="edit_repo_group_advanced", |
|
153 | 148 | conditions=dict(method=["GET"], function=check_group)) |
|
154 | 149 | |
|
155 | 150 | m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions", |
|
156 | 151 | action="edit_repo_group_perms", |
|
157 | 152 | conditions=dict(method=["GET"], function=check_group)) |
|
158 | 153 | m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions", |
|
159 | 154 | action="update_perms", |
|
160 | 155 | conditions=dict(method=["POST"], function=check_group)) |
|
161 | 156 | m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete", |
|
162 | 157 | action="delete_perms", |
|
163 | 158 | conditions=dict(method=["POST"], function=check_group)) |
|
164 | 159 | |
|
165 | 160 | m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete", |
|
166 | 161 | action="delete", conditions=dict(method=["POST"], |
|
167 | 162 | function=check_group_skip_path)) |
|
168 | 163 | |
|
169 | 164 | |
|
170 | 165 | #ADMIN USER ROUTES |
|
171 | 166 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
172 | 167 | controller='admin/users') as m: |
|
173 | 168 | m.connect("new_user", "/users/new", |
|
174 | 169 | action="create", conditions=dict(method=["POST"])) |
|
175 | 170 | m.connect("users", "/users", |
|
176 | 171 | action="index", conditions=dict(method=["GET"])) |
|
177 | 172 | m.connect("formatted_users", "/users.{format}", |
|
178 | 173 | action="index", conditions=dict(method=["GET"])) |
|
179 | 174 | m.connect("new_user", "/users/new", |
|
180 | 175 | action="new", conditions=dict(method=["GET"])) |
|
181 | 176 | m.connect("update_user", "/users/{id}", |
|
182 | 177 | action="update", conditions=dict(method=["POST"])) |
|
183 | 178 | m.connect("delete_user", "/users/{id}/delete", |
|
184 | 179 | action="delete", conditions=dict(method=["POST"])) |
|
185 | 180 | m.connect("edit_user", "/users/{id}/edit", |
|
186 | 181 | action="edit", conditions=dict(method=["GET"])) |
|
187 | 182 | |
|
188 | 183 | #EXTRAS USER ROUTES |
|
189 | 184 | m.connect("edit_user_advanced", "/users/{id}/edit/advanced", |
|
190 | 185 | action="edit_advanced", conditions=dict(method=["GET"])) |
|
191 | 186 | |
|
192 | 187 | m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys", |
|
193 | 188 | action="edit_api_keys", conditions=dict(method=["GET"])) |
|
194 | 189 | m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys", |
|
195 | 190 | action="add_api_key", conditions=dict(method=["POST"])) |
|
196 | 191 | m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete", |
|
197 | 192 | action="delete_api_key", conditions=dict(method=["POST"])) |
|
198 | 193 | |
|
199 | 194 | m.connect("edit_user_perms", "/users/{id}/edit/permissions", |
|
200 | 195 | action="edit_perms", conditions=dict(method=["GET"])) |
|
201 | 196 | m.connect("edit_user_perms_update", "/users/{id}/edit/permissions", |
|
202 | 197 | action="update_perms", conditions=dict(method=["POST"])) |
|
203 | 198 | |
|
204 | 199 | m.connect("edit_user_emails", "/users/{id}/edit/emails", |
|
205 | 200 | action="edit_emails", conditions=dict(method=["GET"])) |
|
206 | 201 | m.connect("edit_user_emails_update", "/users/{id}/edit/emails", |
|
207 | 202 | action="add_email", conditions=dict(method=["POST"])) |
|
208 | 203 | m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete", |
|
209 | 204 | action="delete_email", conditions=dict(method=["POST"])) |
|
210 | 205 | |
|
211 | 206 | m.connect("edit_user_ips", "/users/{id}/edit/ips", |
|
212 | 207 | action="edit_ips", conditions=dict(method=["GET"])) |
|
213 | 208 | m.connect("edit_user_ips_update", "/users/{id}/edit/ips", |
|
214 | 209 | action="add_ip", conditions=dict(method=["POST"])) |
|
215 | 210 | m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete", |
|
216 | 211 | action="delete_ip", conditions=dict(method=["POST"])) |
|
217 | 212 | |
|
218 | 213 | #ADMIN USER GROUPS REST ROUTES |
|
219 | 214 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
220 | 215 | controller='admin/user_groups') as m: |
|
221 | 216 | m.connect("users_groups", "/user_groups", |
|
222 | 217 | action="create", conditions=dict(method=["POST"])) |
|
223 | 218 | m.connect("users_groups", "/user_groups", |
|
224 | 219 | action="index", conditions=dict(method=["GET"])) |
|
225 | 220 | m.connect("new_users_group", "/user_groups/new", |
|
226 | 221 | action="new", conditions=dict(method=["GET"])) |
|
227 | 222 | m.connect("update_users_group", "/user_groups/{id}", |
|
228 | 223 | action="update", conditions=dict(method=["POST"])) |
|
229 | 224 | m.connect("delete_users_group", "/user_groups/{id}/delete", |
|
230 | 225 | action="delete", conditions=dict(method=["POST"])) |
|
231 | 226 | m.connect("edit_users_group", "/user_groups/{id}/edit", |
|
232 | 227 | action="edit", conditions=dict(method=["GET"]), |
|
233 | 228 | function=check_user_group) |
|
234 | 229 | |
|
235 | 230 | #EXTRAS USER GROUP ROUTES |
|
236 | 231 | m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms", |
|
237 | 232 | action="edit_default_perms", conditions=dict(method=["GET"])) |
|
238 | 233 | m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms", |
|
239 | 234 | action="update_default_perms", conditions=dict(method=["POST"])) |
|
240 | 235 | |
|
241 | 236 | |
|
242 | 237 | m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms", |
|
243 | 238 | action="edit_perms", conditions=dict(method=["GET"])) |
|
244 | 239 | m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms", |
|
245 | 240 | action="update_perms", conditions=dict(method=["POST"])) |
|
246 | 241 | m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete", |
|
247 | 242 | action="delete_perms", conditions=dict(method=["POST"])) |
|
248 | 243 | |
|
249 | 244 | m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced", |
|
250 | 245 | action="edit_advanced", conditions=dict(method=["GET"])) |
|
251 | 246 | |
|
252 | 247 | m.connect("edit_user_group_members", "/user_groups/{id}/edit/members", |
|
253 | 248 | action="edit_members", conditions=dict(method=["GET"])) |
|
254 | 249 | |
|
255 | 250 | |
|
256 | 251 | |
|
257 | 252 | #ADMIN PERMISSIONS ROUTES |
|
258 | 253 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
259 | 254 | controller='admin/permissions') as m: |
|
260 | 255 | m.connect("admin_permissions", "/permissions", |
|
261 | 256 | action="permission_globals", conditions=dict(method=["POST"])) |
|
262 | 257 | m.connect("admin_permissions", "/permissions", |
|
263 | 258 | action="permission_globals", conditions=dict(method=["GET"])) |
|
264 | 259 | |
|
265 | 260 | m.connect("admin_permissions_ips", "/permissions/ips", |
|
266 | 261 | action="permission_ips", conditions=dict(method=["GET"])) |
|
267 | 262 | |
|
268 | 263 | m.connect("admin_permissions_perms", "/permissions/perms", |
|
269 | 264 | action="permission_perms", conditions=dict(method=["GET"])) |
|
270 | 265 | |
|
271 | 266 | |
|
272 | 267 | #ADMIN DEFAULTS ROUTES |
|
273 | 268 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
274 | 269 | controller='admin/defaults') as m: |
|
275 | 270 | m.connect('defaults', 'defaults', |
|
276 | 271 | action="index") |
|
277 | 272 | m.connect('defaults_update', 'defaults/{id}/update', |
|
278 | 273 | action="update", conditions=dict(method=["POST"])) |
|
279 | 274 | |
|
280 | 275 | #ADMIN AUTH SETTINGS |
|
281 | 276 | rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX, |
|
282 | 277 | controller='admin/auth_settings', action='auth_settings', |
|
283 | 278 | conditions=dict(method=["POST"])) |
|
284 | 279 | rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX, |
|
285 | 280 | controller='admin/auth_settings') |
|
286 | 281 | |
|
287 | 282 | #ADMIN SETTINGS ROUTES |
|
288 | 283 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
289 | 284 | controller='admin/settings') as m: |
|
290 | 285 | m.connect("admin_settings", "/settings", |
|
291 | 286 | action="settings_vcs", conditions=dict(method=["POST"])) |
|
292 | 287 | m.connect("admin_settings", "/settings", |
|
293 | 288 | action="settings_vcs", conditions=dict(method=["GET"])) |
|
294 | 289 | |
|
295 | 290 | m.connect("admin_settings_mapping", "/settings/mapping", |
|
296 | 291 | action="settings_mapping", conditions=dict(method=["POST"])) |
|
297 | 292 | m.connect("admin_settings_mapping", "/settings/mapping", |
|
298 | 293 | action="settings_mapping", conditions=dict(method=["GET"])) |
|
299 | 294 | |
|
300 | 295 | m.connect("admin_settings_global", "/settings/global", |
|
301 | 296 | action="settings_global", conditions=dict(method=["POST"])) |
|
302 | 297 | m.connect("admin_settings_global", "/settings/global", |
|
303 | 298 | action="settings_global", conditions=dict(method=["GET"])) |
|
304 | 299 | |
|
305 | 300 | m.connect("admin_settings_visual", "/settings/visual", |
|
306 | 301 | action="settings_visual", conditions=dict(method=["POST"])) |
|
307 | 302 | m.connect("admin_settings_visual", "/settings/visual", |
|
308 | 303 | action="settings_visual", conditions=dict(method=["GET"])) |
|
309 | 304 | |
|
310 | 305 | m.connect("admin_settings_email", "/settings/email", |
|
311 | 306 | action="settings_email", conditions=dict(method=["POST"])) |
|
312 | 307 | m.connect("admin_settings_email", "/settings/email", |
|
313 | 308 | action="settings_email", conditions=dict(method=["GET"])) |
|
314 | 309 | |
|
315 | 310 | m.connect("admin_settings_hooks", "/settings/hooks", |
|
316 | 311 | action="settings_hooks", conditions=dict(method=["POST"])) |
|
317 | 312 | m.connect("admin_settings_hooks_delete", "/settings/hooks/delete", |
|
318 | 313 | action="settings_hooks", conditions=dict(method=["POST"])) |
|
319 | 314 | m.connect("admin_settings_hooks", "/settings/hooks", |
|
320 | 315 | action="settings_hooks", conditions=dict(method=["GET"])) |
|
321 | 316 | |
|
322 | 317 | m.connect("admin_settings_search", "/settings/search", |
|
323 | 318 | action="settings_search", conditions=dict(method=["POST"])) |
|
324 | 319 | m.connect("admin_settings_search", "/settings/search", |
|
325 | 320 | action="settings_search", conditions=dict(method=["GET"])) |
|
326 | 321 | |
|
327 | 322 | m.connect("admin_settings_system", "/settings/system", |
|
328 | 323 | action="settings_system", conditions=dict(method=["POST"])) |
|
329 | 324 | m.connect("admin_settings_system", "/settings/system", |
|
330 | 325 | action="settings_system", conditions=dict(method=["GET"])) |
|
331 | 326 | m.connect("admin_settings_system_update", "/settings/system/updates", |
|
332 | 327 | action="settings_system_update", conditions=dict(method=["GET"])) |
|
333 | 328 | |
|
334 | 329 | #ADMIN MY ACCOUNT |
|
335 | 330 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
336 | 331 | controller='admin/my_account') as m: |
|
337 | 332 | |
|
338 | 333 | m.connect("my_account", "/my_account", |
|
339 | 334 | action="my_account", conditions=dict(method=["GET"])) |
|
340 | 335 | m.connect("my_account", "/my_account", |
|
341 | 336 | action="my_account", conditions=dict(method=["POST"])) |
|
342 | 337 | |
|
343 | 338 | m.connect("my_account_password", "/my_account/password", |
|
344 | 339 | action="my_account_password", conditions=dict(method=["GET"])) |
|
345 | 340 | m.connect("my_account_password", "/my_account/password", |
|
346 | 341 | action="my_account_password", conditions=dict(method=["POST"])) |
|
347 | 342 | |
|
348 | 343 | m.connect("my_account_repos", "/my_account/repos", |
|
349 | 344 | action="my_account_repos", conditions=dict(method=["GET"])) |
|
350 | 345 | |
|
351 | 346 | m.connect("my_account_watched", "/my_account/watched", |
|
352 | 347 | action="my_account_watched", conditions=dict(method=["GET"])) |
|
353 | 348 | |
|
354 | 349 | m.connect("my_account_perms", "/my_account/perms", |
|
355 | 350 | action="my_account_perms", conditions=dict(method=["GET"])) |
|
356 | 351 | |
|
357 | 352 | m.connect("my_account_emails", "/my_account/emails", |
|
358 | 353 | action="my_account_emails", conditions=dict(method=["GET"])) |
|
359 | 354 | m.connect("my_account_emails", "/my_account/emails", |
|
360 | 355 | action="my_account_emails_add", conditions=dict(method=["POST"])) |
|
361 | 356 | m.connect("my_account_emails_delete", "/my_account/emails/delete", |
|
362 | 357 | action="my_account_emails_delete", conditions=dict(method=["POST"])) |
|
363 | 358 | |
|
364 | 359 | m.connect("my_account_api_keys", "/my_account/api_keys", |
|
365 | 360 | action="my_account_api_keys", conditions=dict(method=["GET"])) |
|
366 | 361 | m.connect("my_account_api_keys", "/my_account/api_keys", |
|
367 | 362 | action="my_account_api_keys_add", conditions=dict(method=["POST"])) |
|
368 | 363 | m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete", |
|
369 | 364 | action="my_account_api_keys_delete", conditions=dict(method=["POST"])) |
|
370 | 365 | |
|
371 | 366 | #NOTIFICATION REST ROUTES |
|
372 | 367 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
373 | 368 | controller='admin/notifications') as m: |
|
374 | 369 | m.connect("notifications", "/notifications", |
|
375 | 370 | action="index", conditions=dict(method=["GET"])) |
|
376 | 371 | m.connect("notifications_mark_all_read", "/notifications/mark_all_read", |
|
377 | 372 | action="mark_all_read", conditions=dict(method=["GET"])) |
|
378 | 373 | m.connect("formatted_notifications", "/notifications.{format}", |
|
379 | 374 | action="index", conditions=dict(method=["GET"])) |
|
380 | 375 | m.connect("notification_update", "/notifications/{notification_id}/update", |
|
381 | 376 | action="update", conditions=dict(method=["POST"])) |
|
382 | 377 | m.connect("notification_delete", "/notifications/{notification_id}/delete", |
|
383 | 378 | action="delete", conditions=dict(method=["POST"])) |
|
384 | 379 | m.connect("notification", "/notifications/{notification_id}", |
|
385 | 380 | action="show", conditions=dict(method=["GET"])) |
|
386 | 381 | m.connect("formatted_notification", "/notifications/{notification_id}.{format}", |
|
387 | 382 | action="show", conditions=dict(method=["GET"])) |
|
388 | 383 | |
|
389 | 384 | #ADMIN GIST |
|
390 | 385 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
391 | 386 | controller='admin/gists') as m: |
|
392 | 387 | m.connect("gists", "/gists", |
|
393 | 388 | action="create", conditions=dict(method=["POST"])) |
|
394 | 389 | m.connect("gists", "/gists", |
|
395 | 390 | action="index", conditions=dict(method=["GET"])) |
|
396 | 391 | m.connect("new_gist", "/gists/new", |
|
397 | 392 | action="new", conditions=dict(method=["GET"])) |
|
398 | 393 | |
|
399 | 394 | |
|
400 | 395 | m.connect("gist_delete", "/gists/{gist_id}/delete", |
|
401 | 396 | action="delete", conditions=dict(method=["POST"])) |
|
402 | 397 | m.connect("edit_gist", "/gists/{gist_id}/edit", |
|
403 | 398 | action="edit", conditions=dict(method=["GET", "POST"])) |
|
404 | 399 | m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision", |
|
405 | 400 | action="check_revision", conditions=dict(method=["POST"])) |
|
406 | 401 | |
|
407 | 402 | |
|
408 | 403 | m.connect("gist", "/gists/{gist_id}", |
|
409 | 404 | action="show", conditions=dict(method=["GET"])) |
|
410 | 405 | m.connect("gist_rev", "/gists/{gist_id}/{revision}", |
|
411 | 406 | revision="tip", |
|
412 | 407 | action="show", conditions=dict(method=["GET"])) |
|
413 | 408 | m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}", |
|
414 | 409 | revision="tip", |
|
415 | 410 | action="show", conditions=dict(method=["GET"])) |
|
416 | 411 | m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}", |
|
417 | 412 | revision='tip', |
|
418 | 413 | action="show", conditions=dict(method=["GET"])) |
|
419 | 414 | |
|
420 | 415 | #ADMIN MAIN PAGES |
|
421 | 416 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
422 | 417 | controller='admin/admin') as m: |
|
423 | 418 | m.connect('admin_home', '', action='index') |
|
424 | 419 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
425 | 420 | action='add_repo') |
|
426 | 421 | #========================================================================== |
|
427 | 422 | # API V2 |
|
428 | 423 | #========================================================================== |
|
429 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
|
430 |
|
|
|
424 | with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api', | |
|
425 | action='_dispatch') as m: | |
|
431 | 426 | m.connect('api', '/api') |
|
432 | 427 | |
|
433 | 428 | #USER JOURNAL |
|
434 | 429 | rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, |
|
435 | 430 | controller='journal', action='index') |
|
436 | 431 | rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX, |
|
437 | 432 | controller='journal', action='journal_rss') |
|
438 | 433 | rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX, |
|
439 | 434 | controller='journal', action='journal_atom') |
|
440 | 435 | |
|
441 | 436 | rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX, |
|
442 | 437 | controller='journal', action="public_journal") |
|
443 | 438 | |
|
444 | 439 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX, |
|
445 | 440 | controller='journal', action="public_journal_rss") |
|
446 | 441 | |
|
447 | 442 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX, |
|
448 | 443 | controller='journal', action="public_journal_rss") |
|
449 | 444 | |
|
450 | 445 | rmap.connect('public_journal_atom', |
|
451 | 446 | '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal', |
|
452 | 447 | action="public_journal_atom") |
|
453 | 448 | |
|
454 | 449 | rmap.connect('public_journal_atom_old', |
|
455 | 450 | '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal', |
|
456 | 451 | action="public_journal_atom") |
|
457 | 452 | |
|
458 | 453 | rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX, |
|
459 | 454 | controller='journal', action='toggle_following', |
|
460 | 455 | conditions=dict(method=["POST"])) |
|
461 | 456 | |
|
462 | 457 | #SEARCH |
|
463 | 458 | rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',) |
|
464 | 459 | rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX, |
|
465 | 460 | controller='search', |
|
466 | 461 | conditions=dict(function=check_repo)) |
|
467 | 462 | rmap.connect('search_repo', '/{repo_name:.*?}/search', |
|
468 | 463 | controller='search', |
|
469 | 464 | conditions=dict(function=check_repo), |
|
470 | 465 | ) |
|
471 | 466 | |
|
472 | 467 | #LOGIN/LOGOUT/REGISTER/SIGN IN |
|
473 | 468 | rmap.connect('authentication_token', '%s/authentication_token' % ADMIN_PREFIX, controller='login', action='authentication_token') |
|
474 | 469 | rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login') |
|
475 | 470 | rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login', |
|
476 | 471 | action='logout') |
|
477 | 472 | |
|
478 | 473 | rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login', |
|
479 | 474 | action='register') |
|
480 | 475 | |
|
481 | 476 | rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX, |
|
482 | 477 | controller='login', action='password_reset') |
|
483 | 478 | |
|
484 | 479 | rmap.connect('reset_password_confirmation', |
|
485 | 480 | '%s/password_reset_confirmation' % ADMIN_PREFIX, |
|
486 | 481 | controller='login', action='password_reset_confirmation') |
|
487 | 482 | |
|
488 | 483 | #FEEDS |
|
489 | 484 | rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss', |
|
490 | 485 | controller='feed', action='rss', |
|
491 | 486 | conditions=dict(function=check_repo)) |
|
492 | 487 | |
|
493 | 488 | rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom', |
|
494 | 489 | controller='feed', action='atom', |
|
495 | 490 | conditions=dict(function=check_repo)) |
|
496 | 491 | |
|
497 | 492 | #========================================================================== |
|
498 | 493 | # REPOSITORY ROUTES |
|
499 | 494 | #========================================================================== |
|
500 | 495 | rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating', |
|
501 | 496 | controller='admin/repos', action='repo_creating') |
|
502 | 497 | rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check', |
|
503 | 498 | controller='admin/repos', action='repo_check') |
|
504 | 499 | |
|
505 | 500 | rmap.connect('summary_home', '/{repo_name:.*?}', |
|
506 | 501 | controller='summary', |
|
507 | 502 | conditions=dict(function=check_repo)) |
|
508 | 503 | |
|
509 | 504 | # must be here for proper group/repo catching |
|
510 | 505 | rmap.connect('repos_group_home', '/{group_name:.*}', |
|
511 | 506 | controller='admin/repo_groups', action="show_by_name", |
|
512 | 507 | conditions=dict(function=check_group)) |
|
513 | 508 | rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics', |
|
514 | 509 | controller='summary', action='statistics', |
|
515 | 510 | conditions=dict(function=check_repo)) |
|
516 | 511 | |
|
517 | 512 | rmap.connect('repo_size', '/{repo_name:.*?}/repo_size', |
|
518 | 513 | controller='summary', action='repo_size', |
|
519 | 514 | conditions=dict(function=check_repo)) |
|
520 | 515 | |
|
521 | 516 | rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data', |
|
522 | 517 | controller='home', action='repo_refs_data') |
|
523 | 518 | |
|
524 | 519 | rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}', |
|
525 | 520 | controller='changeset', revision='tip', |
|
526 | 521 | conditions=dict(function=check_repo)) |
|
527 | 522 | rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}', |
|
528 | 523 | controller='changeset', revision='tip', action="changeset_children", |
|
529 | 524 | conditions=dict(function=check_repo)) |
|
530 | 525 | rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}', |
|
531 | 526 | controller='changeset', revision='tip', action="changeset_parents", |
|
532 | 527 | conditions=dict(function=check_repo)) |
|
533 | 528 | |
|
534 | 529 | # repo edit options |
|
535 | 530 | rmap.connect("edit_repo", "/{repo_name:.*?}/settings", |
|
536 | 531 | controller='admin/repos', action="edit", |
|
537 | 532 | conditions=dict(method=["GET"], function=check_repo)) |
|
538 | 533 | |
|
539 | 534 | rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions", |
|
540 | 535 | controller='admin/repos', action="edit_permissions", |
|
541 | 536 | conditions=dict(method=["GET"], function=check_repo)) |
|
542 | 537 | rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions", |
|
543 | 538 | controller='admin/repos', action="edit_permissions_update", |
|
544 | 539 | conditions=dict(method=["POST"], function=check_repo)) |
|
545 | 540 | rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete", |
|
546 | 541 | controller='admin/repos', action="edit_permissions_revoke", |
|
547 | 542 | conditions=dict(method=["POST"], function=check_repo)) |
|
548 | 543 | |
|
549 | 544 | rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields", |
|
550 | 545 | controller='admin/repos', action="edit_fields", |
|
551 | 546 | conditions=dict(method=["GET"], function=check_repo)) |
|
552 | 547 | rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new", |
|
553 | 548 | controller='admin/repos', action="create_repo_field", |
|
554 | 549 | conditions=dict(method=["POST"], function=check_repo)) |
|
555 | 550 | rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete", |
|
556 | 551 | controller='admin/repos', action="delete_repo_field", |
|
557 | 552 | conditions=dict(method=["POST"], function=check_repo)) |
|
558 | 553 | |
|
559 | 554 | |
|
560 | 555 | rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced", |
|
561 | 556 | controller='admin/repos', action="edit_advanced", |
|
562 | 557 | conditions=dict(method=["GET"], function=check_repo)) |
|
563 | 558 | |
|
564 | 559 | rmap.connect("edit_repo_advanced_locking", "/{repo_name:.*?}/settings/advanced/locking", |
|
565 | 560 | controller='admin/repos', action="edit_advanced_locking", |
|
566 | 561 | conditions=dict(method=["POST"], function=check_repo)) |
|
567 | 562 | rmap.connect('toggle_locking', "/{repo_name:.*?}/settings/advanced/locking_toggle", |
|
568 | 563 | controller='admin/repos', action="toggle_locking", |
|
569 | 564 | conditions=dict(method=["GET"], function=check_repo)) |
|
570 | 565 | |
|
571 | 566 | rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal", |
|
572 | 567 | controller='admin/repos', action="edit_advanced_journal", |
|
573 | 568 | conditions=dict(method=["POST"], function=check_repo)) |
|
574 | 569 | |
|
575 | 570 | rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork", |
|
576 | 571 | controller='admin/repos', action="edit_advanced_fork", |
|
577 | 572 | conditions=dict(method=["POST"], function=check_repo)) |
|
578 | 573 | |
|
579 | 574 | |
|
580 | 575 | rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches", |
|
581 | 576 | controller='admin/repos', action="edit_caches", |
|
582 | 577 | conditions=dict(method=["GET"], function=check_repo)) |
|
583 | 578 | rmap.connect("update_repo_caches", "/{repo_name:.*?}/settings/caches", |
|
584 | 579 | controller='admin/repos', action="edit_caches", |
|
585 | 580 | conditions=dict(method=["POST"], function=check_repo)) |
|
586 | 581 | |
|
587 | 582 | |
|
588 | 583 | rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote", |
|
589 | 584 | controller='admin/repos', action="edit_remote", |
|
590 | 585 | conditions=dict(method=["GET"], function=check_repo)) |
|
591 | 586 | rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote", |
|
592 | 587 | controller='admin/repos', action="edit_remote", |
|
593 | 588 | conditions=dict(method=["POST"], function=check_repo)) |
|
594 | 589 | |
|
595 | 590 | rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics", |
|
596 | 591 | controller='admin/repos', action="edit_statistics", |
|
597 | 592 | conditions=dict(method=["GET"], function=check_repo)) |
|
598 | 593 | rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics", |
|
599 | 594 | controller='admin/repos', action="edit_statistics", |
|
600 | 595 | conditions=dict(method=["POST"], function=check_repo)) |
|
601 | 596 | |
|
602 | 597 | #still working url for backward compat. |
|
603 | 598 | rmap.connect('raw_changeset_home_depraced', |
|
604 | 599 | '/{repo_name:.*?}/raw-changeset/{revision}', |
|
605 | 600 | controller='changeset', action='changeset_raw', |
|
606 | 601 | revision='tip', conditions=dict(function=check_repo)) |
|
607 | 602 | |
|
608 | 603 | ## new URLs |
|
609 | 604 | rmap.connect('changeset_raw_home', |
|
610 | 605 | '/{repo_name:.*?}/changeset-diff/{revision}', |
|
611 | 606 | controller='changeset', action='changeset_raw', |
|
612 | 607 | revision='tip', conditions=dict(function=check_repo)) |
|
613 | 608 | |
|
614 | 609 | rmap.connect('changeset_patch_home', |
|
615 | 610 | '/{repo_name:.*?}/changeset-patch/{revision}', |
|
616 | 611 | controller='changeset', action='changeset_patch', |
|
617 | 612 | revision='tip', conditions=dict(function=check_repo)) |
|
618 | 613 | |
|
619 | 614 | rmap.connect('changeset_download_home', |
|
620 | 615 | '/{repo_name:.*?}/changeset-download/{revision}', |
|
621 | 616 | controller='changeset', action='changeset_download', |
|
622 | 617 | revision='tip', conditions=dict(function=check_repo)) |
|
623 | 618 | |
|
624 | 619 | rmap.connect('changeset_comment', |
|
625 | 620 | '/{repo_name:.*?}/changeset-comment/{revision}', |
|
626 | 621 | controller='changeset', revision='tip', action='comment', |
|
627 | 622 | conditions=dict(function=check_repo)) |
|
628 | 623 | |
|
629 | 624 | rmap.connect('changeset_comment_delete', |
|
630 | 625 | '/{repo_name:.*?}/changeset-comment/{comment_id}/delete', |
|
631 | 626 | controller='changeset', action='delete_comment', |
|
632 | 627 | conditions=dict(function=check_repo, method=["POST"])) |
|
633 | 628 | |
|
634 | 629 | rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}', |
|
635 | 630 | controller='changeset', action='changeset_info') |
|
636 | 631 | |
|
637 | 632 | rmap.connect('compare_home', |
|
638 | 633 | '/{repo_name:.*?}/compare', |
|
639 | 634 | controller='compare', action='index', |
|
640 | 635 | conditions=dict(function=check_repo)) |
|
641 | 636 | |
|
642 | 637 | rmap.connect('compare_url', |
|
643 | 638 | '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}', |
|
644 | 639 | controller='compare', action='compare', |
|
645 | 640 | conditions=dict(function=check_repo), |
|
646 | 641 | requirements=dict( |
|
647 | 642 | org_ref_type='(branch|book|tag|rev|__other_ref_type__)', |
|
648 | 643 | other_ref_type='(branch|book|tag|rev|__org_ref_type__)') |
|
649 | 644 | ) |
|
650 | 645 | |
|
651 | 646 | rmap.connect('pullrequest_home', |
|
652 | 647 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', |
|
653 | 648 | action='index', conditions=dict(function=check_repo, |
|
654 | 649 | method=["GET"])) |
|
655 | 650 | |
|
656 | 651 | rmap.connect('pullrequest_repo_info', |
|
657 | 652 | '/{repo_name:.*?}/pull-request-repo-info', |
|
658 | 653 | controller='pullrequests', action='repo_info', |
|
659 | 654 | conditions=dict(function=check_repo, method=["GET"])) |
|
660 | 655 | |
|
661 | 656 | rmap.connect('pullrequest', |
|
662 | 657 | '/{repo_name:.*?}/pull-request/new', controller='pullrequests', |
|
663 | 658 | action='create', conditions=dict(function=check_repo, |
|
664 | 659 | method=["POST"])) |
|
665 | 660 | |
|
666 | 661 | rmap.connect('pullrequest_show', |
|
667 | 662 | '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='', |
|
668 | 663 | controller='pullrequests', |
|
669 | 664 | action='show', conditions=dict(function=check_repo, |
|
670 | 665 | method=["GET"])) |
|
671 | 666 | rmap.connect('pullrequest_post', |
|
672 | 667 | '/{repo_name:.*?}/pull-request/{pull_request_id}', |
|
673 | 668 | controller='pullrequests', |
|
674 | 669 | action='post', conditions=dict(function=check_repo, |
|
675 | 670 | method=["POST"])) |
|
676 | 671 | rmap.connect('pullrequest_delete', |
|
677 | 672 | '/{repo_name:.*?}/pull-request/{pull_request_id}/delete', |
|
678 | 673 | controller='pullrequests', |
|
679 | 674 | action='delete', conditions=dict(function=check_repo, |
|
680 | 675 | method=["POST"])) |
|
681 | 676 | |
|
682 | 677 | rmap.connect('pullrequest_show_all', |
|
683 | 678 | '/{repo_name:.*?}/pull-request', |
|
684 | 679 | controller='pullrequests', |
|
685 | 680 | action='show_all', conditions=dict(function=check_repo, |
|
686 | 681 | method=["GET"])) |
|
687 | 682 | |
|
688 | 683 | rmap.connect('my_pullrequests', |
|
689 | 684 | '/my_pullrequests', |
|
690 | 685 | controller='pullrequests', |
|
691 | 686 | action='show_my', conditions=dict(method=["GET"])) |
|
692 | 687 | |
|
693 | 688 | rmap.connect('pullrequest_comment', |
|
694 | 689 | '/{repo_name:.*?}/pull-request-comment/{pull_request_id}', |
|
695 | 690 | controller='pullrequests', |
|
696 | 691 | action='comment', conditions=dict(function=check_repo, |
|
697 | 692 | method=["POST"])) |
|
698 | 693 | |
|
699 | 694 | rmap.connect('pullrequest_comment_delete', |
|
700 | 695 | '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete', |
|
701 | 696 | controller='pullrequests', action='delete_comment', |
|
702 | 697 | conditions=dict(function=check_repo, method=["POST"])) |
|
703 | 698 | |
|
704 | 699 | rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary', |
|
705 | 700 | controller='summary', conditions=dict(function=check_repo)) |
|
706 | 701 | |
|
707 | 702 | rmap.connect('changelog_home', '/{repo_name:.*?}/changelog', |
|
708 | 703 | controller='changelog', conditions=dict(function=check_repo)) |
|
709 | 704 | |
|
710 | 705 | rmap.connect('changelog_summary_home', '/{repo_name:.*?}/changelog_summary', |
|
711 | 706 | controller='changelog', action='changelog_summary', |
|
712 | 707 | conditions=dict(function=check_repo)) |
|
713 | 708 | |
|
714 | 709 | rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}', |
|
715 | 710 | controller='changelog', f_path=None, |
|
716 | 711 | conditions=dict(function=check_repo)) |
|
717 | 712 | |
|
718 | 713 | rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}', |
|
719 | 714 | controller='changelog', action='changelog_details', |
|
720 | 715 | conditions=dict(function=check_repo)) |
|
721 | 716 | |
|
722 | 717 | rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}', |
|
723 | 718 | controller='files', revision='tip', f_path='', |
|
724 | 719 | conditions=dict(function=check_repo)) |
|
725 | 720 | |
|
726 | 721 | rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}', |
|
727 | 722 | controller='files', revision='tip', f_path='', |
|
728 | 723 | conditions=dict(function=check_repo)) |
|
729 | 724 | |
|
730 | 725 | rmap.connect('files_history_home', |
|
731 | 726 | '/{repo_name:.*?}/history/{revision}/{f_path:.*}', |
|
732 | 727 | controller='files', action='history', revision='tip', f_path='', |
|
733 | 728 | conditions=dict(function=check_repo)) |
|
734 | 729 | |
|
735 | 730 | rmap.connect('files_authors_home', |
|
736 | 731 | '/{repo_name:.*?}/authors/{revision}/{f_path:.*}', |
|
737 | 732 | controller='files', action='authors', revision='tip', f_path='', |
|
738 | 733 | conditions=dict(function=check_repo)) |
|
739 | 734 | |
|
740 | 735 | rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}', |
|
741 | 736 | controller='files', action='diff', revision='tip', f_path='', |
|
742 | 737 | conditions=dict(function=check_repo)) |
|
743 | 738 | |
|
744 | 739 | rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}', |
|
745 | 740 | controller='files', action='diff_2way', revision='tip', f_path='', |
|
746 | 741 | conditions=dict(function=check_repo)) |
|
747 | 742 | |
|
748 | 743 | rmap.connect('files_rawfile_home', |
|
749 | 744 | '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}', |
|
750 | 745 | controller='files', action='rawfile', revision='tip', |
|
751 | 746 | f_path='', conditions=dict(function=check_repo)) |
|
752 | 747 | |
|
753 | 748 | rmap.connect('files_raw_home', |
|
754 | 749 | '/{repo_name:.*?}/raw/{revision}/{f_path:.*}', |
|
755 | 750 | controller='files', action='raw', revision='tip', f_path='', |
|
756 | 751 | conditions=dict(function=check_repo)) |
|
757 | 752 | |
|
758 | 753 | rmap.connect('files_annotate_home', |
|
759 | 754 | '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}', |
|
760 | 755 | controller='files', action='index', revision='tip', |
|
761 | 756 | f_path='', annotate=True, conditions=dict(function=check_repo)) |
|
762 | 757 | |
|
763 | 758 | rmap.connect('files_edit_home', |
|
764 | 759 | '/{repo_name:.*?}/edit/{revision}/{f_path:.*}', |
|
765 | 760 | controller='files', action='edit', revision='tip', |
|
766 | 761 | f_path='', conditions=dict(function=check_repo)) |
|
767 | 762 | |
|
768 | 763 | rmap.connect('files_add_home', |
|
769 | 764 | '/{repo_name:.*?}/add/{revision}/{f_path:.*}', |
|
770 | 765 | controller='files', action='add', revision='tip', |
|
771 | 766 | f_path='', conditions=dict(function=check_repo)) |
|
772 | 767 | |
|
773 | 768 | rmap.connect('files_delete_home', |
|
774 | 769 | '/{repo_name:.*?}/delete/{revision}/{f_path:.*}', |
|
775 | 770 | controller='files', action='delete', revision='tip', |
|
776 | 771 | f_path='', conditions=dict(function=check_repo)) |
|
777 | 772 | |
|
778 | 773 | rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}', |
|
779 | 774 | controller='files', action='archivefile', |
|
780 | 775 | conditions=dict(function=check_repo)) |
|
781 | 776 | |
|
782 | 777 | rmap.connect('files_nodelist_home', |
|
783 | 778 | '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}', |
|
784 | 779 | controller='files', action='nodelist', |
|
785 | 780 | conditions=dict(function=check_repo)) |
|
786 | 781 | |
|
787 | 782 | rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork', |
|
788 | 783 | controller='forks', action='fork_create', |
|
789 | 784 | conditions=dict(function=check_repo, method=["POST"])) |
|
790 | 785 | |
|
791 | 786 | rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork', |
|
792 | 787 | controller='forks', action='fork', |
|
793 | 788 | conditions=dict(function=check_repo)) |
|
794 | 789 | |
|
795 | 790 | rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks', |
|
796 | 791 | controller='forks', action='forks', |
|
797 | 792 | conditions=dict(function=check_repo)) |
|
798 | 793 | |
|
799 | 794 | rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers', |
|
800 | 795 | controller='followers', action='followers', |
|
801 | 796 | conditions=dict(function=check_repo)) |
|
802 | 797 | |
|
803 | 798 | return rmap |
|
804 | 799 | |
|
805 | 800 | |
|
806 | 801 | class UrlGenerator(object): |
|
807 | 802 | """Emulate pylons.url in providing a wrapper around routes.url |
|
808 | 803 | |
|
809 | 804 | This code was added during migration from Pylons to Turbogears2. Pylons |
|
810 | 805 | already provided a wrapper like this, but Turbogears2 does not. |
|
811 | 806 | |
|
812 | 807 | When the routing of Kallithea is changed to use less Routes and more |
|
813 | 808 | Turbogears2-style routing, this class may disappear or change. |
|
814 | 809 | |
|
815 | 810 | url() (the __call__ method) returns the URL based on a route name and |
|
816 | 811 | arguments. |
|
817 | 812 | url.current() returns the URL of the current page with arguments applied. |
|
818 | 813 | |
|
819 | 814 | Refer to documentation of Routes for details: |
|
820 | 815 | https://routes.readthedocs.io/en/latest/generating.html#generation |
|
821 | 816 | """ |
|
822 | 817 | def __call__(self, *args, **kwargs): |
|
823 | 818 | return request.environ['routes.url'](*args, **kwargs) |
|
824 | 819 | def current(self, *args, **kwargs): |
|
825 | 820 | return request.environ['routes.url'].current(*args, **kwargs) |
|
826 | 821 | |
|
827 | 822 | url = UrlGenerator() |
@@ -1,410 +1,410 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.repo_groups |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Repository groups controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Mar 23, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import formencode |
|
31 | 31 | import itertools |
|
32 | 32 | |
|
33 | 33 | from formencode import htmlfill |
|
34 | 34 | |
|
35 | from tg import request, tmpl_context as c | |
|
35 | from tg import request, tmpl_context as c, app_globals | |
|
36 | 36 | from tg.i18n import ugettext as _, ungettext |
|
37 | 37 | from webob.exc import HTTPFound, HTTPForbidden, HTTPNotFound, HTTPInternalServerError |
|
38 | 38 | |
|
39 | 39 | import kallithea |
|
40 | 40 | from kallithea.config.routing import url |
|
41 | 41 | from kallithea.lib import helpers as h |
|
42 | 42 | from kallithea.lib.auth import LoginRequired, \ |
|
43 | 43 | HasRepoGroupPermissionLevelDecorator, HasRepoGroupPermissionLevel, \ |
|
44 | 44 | HasPermissionAny |
|
45 | 45 | from kallithea.lib.base import BaseController, render |
|
46 | 46 | from kallithea.model.db import RepoGroup, Repository |
|
47 | 47 | from kallithea.model.scm import RepoGroupList, AvailableRepoGroupChoices |
|
48 | 48 | from kallithea.model.repo_group import RepoGroupModel |
|
49 | 49 | from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm |
|
50 | 50 | from kallithea.model.meta import Session |
|
51 | 51 | from kallithea.model.repo import RepoModel |
|
52 | 52 | from kallithea.lib.utils2 import safe_int |
|
53 | 53 | from sqlalchemy.sql.expression import func |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class RepoGroupsController(BaseController): |
|
60 | 60 | |
|
61 | 61 | @LoginRequired() |
|
62 | 62 | def _before(self, *args, **kwargs): |
|
63 | 63 | super(RepoGroupsController, self)._before(*args, **kwargs) |
|
64 | 64 | |
|
65 | 65 | def __load_defaults(self, extras=(), exclude=()): |
|
66 | 66 | """extras is used for keeping current parent ignoring permissions |
|
67 | 67 | exclude is used for not moving group to itself TODO: also exclude descendants |
|
68 | 68 | Note: only admin can create top level groups |
|
69 | 69 | """ |
|
70 | 70 | repo_groups = AvailableRepoGroupChoices([], 'admin', extras) |
|
71 | 71 | exclude_group_ids = set(rg.group_id for rg in exclude) |
|
72 | 72 | c.repo_groups = [rg for rg in repo_groups |
|
73 | 73 | if rg[0] not in exclude_group_ids] |
|
74 | 74 | |
|
75 | 75 | repo_model = RepoModel() |
|
76 | 76 | c.users_array = repo_model.get_users_js() |
|
77 | 77 | c.user_groups_array = repo_model.get_user_groups_js() |
|
78 | 78 | |
|
79 | 79 | def __load_data(self, group_id): |
|
80 | 80 | """ |
|
81 | 81 | Load defaults settings for edit, and update |
|
82 | 82 | |
|
83 | 83 | :param group_id: |
|
84 | 84 | """ |
|
85 | 85 | repo_group = RepoGroup.get_or_404(group_id) |
|
86 | 86 | data = repo_group.get_dict() |
|
87 | 87 | data['group_name'] = repo_group.name |
|
88 | 88 | |
|
89 | 89 | # fill repository group users |
|
90 | 90 | for p in repo_group.repo_group_to_perm: |
|
91 | 91 | data.update({'u_perm_%s' % p.user.username: |
|
92 | 92 | p.permission.permission_name}) |
|
93 | 93 | |
|
94 | 94 | # fill repository group groups |
|
95 | 95 | for p in repo_group.users_group_to_perm: |
|
96 | 96 | data.update({'g_perm_%s' % p.users_group.users_group_name: |
|
97 | 97 | p.permission.permission_name}) |
|
98 | 98 | |
|
99 | 99 | return data |
|
100 | 100 | |
|
101 | 101 | def _revoke_perms_on_yourself(self, form_result): |
|
102 | 102 | _up = filter(lambda u: request.authuser.username == u[0], |
|
103 | 103 | form_result['perms_updates']) |
|
104 | 104 | _new = filter(lambda u: request.authuser.username == u[0], |
|
105 | 105 | form_result['perms_new']) |
|
106 | 106 | if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin': |
|
107 | 107 | return True |
|
108 | 108 | return False |
|
109 | 109 | |
|
110 | 110 | def index(self, format='html'): |
|
111 | 111 | _list = RepoGroup.query(sorted=True).all() |
|
112 | 112 | group_iter = RepoGroupList(_list, perm_level='admin') |
|
113 | 113 | repo_groups_data = [] |
|
114 | 114 | total_records = len(group_iter) |
|
115 |
_tmpl_lookup = |
|
|
115 | _tmpl_lookup = app_globals.mako_lookup | |
|
116 | 116 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
117 | 117 | |
|
118 | 118 | repo_group_name = lambda repo_group_name, children_groups: ( |
|
119 | 119 | template.get_def("repo_group_name") |
|
120 | 120 | .render(repo_group_name, children_groups, _=_, h=h, c=c) |
|
121 | 121 | ) |
|
122 | 122 | repo_group_actions = lambda repo_group_id, repo_group_name, gr_count: ( |
|
123 | 123 | template.get_def("repo_group_actions") |
|
124 | 124 | .render(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c, |
|
125 | 125 | ungettext=ungettext) |
|
126 | 126 | ) |
|
127 | 127 | |
|
128 | 128 | for repo_gr in group_iter: |
|
129 | 129 | children_groups = map(h.safe_unicode, |
|
130 | 130 | itertools.chain((g.name for g in repo_gr.parents), |
|
131 | 131 | (x.name for x in [repo_gr]))) |
|
132 | 132 | repo_count = repo_gr.repositories.count() |
|
133 | 133 | repo_groups_data.append({ |
|
134 | 134 | "raw_name": repo_gr.group_name, |
|
135 | 135 | "group_name": repo_group_name(repo_gr.group_name, children_groups), |
|
136 | 136 | "desc": h.escape(repo_gr.group_description), |
|
137 | 137 | "repos": repo_count, |
|
138 | 138 | "owner": h.person(repo_gr.owner), |
|
139 | 139 | "action": repo_group_actions(repo_gr.group_id, repo_gr.group_name, |
|
140 | 140 | repo_count) |
|
141 | 141 | }) |
|
142 | 142 | |
|
143 | 143 | c.data = { |
|
144 | 144 | "totalRecords": total_records, |
|
145 | 145 | "startIndex": 0, |
|
146 | 146 | "sort": None, |
|
147 | 147 | "dir": "asc", |
|
148 | 148 | "records": repo_groups_data |
|
149 | 149 | } |
|
150 | 150 | |
|
151 | 151 | return render('admin/repo_groups/repo_groups.html') |
|
152 | 152 | |
|
153 | 153 | def create(self): |
|
154 | 154 | self.__load_defaults() |
|
155 | 155 | |
|
156 | 156 | # permissions for can create group based on parent_id are checked |
|
157 | 157 | # here in the Form |
|
158 | 158 | repo_group_form = RepoGroupForm(repo_groups=c.repo_groups) |
|
159 | 159 | try: |
|
160 | 160 | form_result = repo_group_form.to_python(dict(request.POST)) |
|
161 | 161 | gr = RepoGroupModel().create( |
|
162 | 162 | group_name=form_result['group_name'], |
|
163 | 163 | group_description=form_result['group_description'], |
|
164 | 164 | parent=form_result['parent_group_id'], |
|
165 | 165 | owner=request.authuser.user_id, # TODO: make editable |
|
166 | 166 | copy_permissions=form_result['group_copy_permissions'] |
|
167 | 167 | ) |
|
168 | 168 | Session().commit() |
|
169 | 169 | #TODO: in future action_logger(, '', '', '') |
|
170 | 170 | except formencode.Invalid as errors: |
|
171 | 171 | return htmlfill.render( |
|
172 | 172 | render('admin/repo_groups/repo_group_add.html'), |
|
173 | 173 | defaults=errors.value, |
|
174 | 174 | errors=errors.error_dict or {}, |
|
175 | 175 | prefix_error=False, |
|
176 | 176 | encoding="UTF-8", |
|
177 | 177 | force_defaults=False) |
|
178 | 178 | except Exception: |
|
179 | 179 | log.error(traceback.format_exc()) |
|
180 | 180 | h.flash(_('Error occurred during creation of repository group %s') \ |
|
181 | 181 | % request.POST.get('group_name'), category='error') |
|
182 | 182 | parent_group_id = form_result['parent_group_id'] |
|
183 | 183 | #TODO: maybe we should get back to the main view, not the admin one |
|
184 | 184 | raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id)) |
|
185 | 185 | h.flash(_('Created repository group %s') % gr.group_name, |
|
186 | 186 | category='success') |
|
187 | 187 | raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name)) |
|
188 | 188 | |
|
189 | 189 | def new(self): |
|
190 | 190 | if HasPermissionAny('hg.admin')('group create'): |
|
191 | 191 | #we're global admin, we're ok and we can create TOP level groups |
|
192 | 192 | pass |
|
193 | 193 | else: |
|
194 | 194 | # we pass in parent group into creation form, thus we know |
|
195 | 195 | # what would be the group, we can check perms here ! |
|
196 | 196 | group_id = safe_int(request.GET.get('parent_group')) |
|
197 | 197 | group = RepoGroup.get(group_id) if group_id else None |
|
198 | 198 | group_name = group.group_name if group else None |
|
199 | 199 | if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'): |
|
200 | 200 | pass |
|
201 | 201 | else: |
|
202 | 202 | raise HTTPForbidden() |
|
203 | 203 | |
|
204 | 204 | self.__load_defaults() |
|
205 | 205 | return render('admin/repo_groups/repo_group_add.html') |
|
206 | 206 | |
|
207 | 207 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
208 | 208 | def update(self, group_name): |
|
209 | 209 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
210 | 210 | self.__load_defaults(extras=[c.repo_group.parent_group], |
|
211 | 211 | exclude=[c.repo_group]) |
|
212 | 212 | |
|
213 | 213 | # TODO: kill allow_empty_group - it is only used for redundant form validation! |
|
214 | 214 | if HasPermissionAny('hg.admin')('group edit'): |
|
215 | 215 | #we're global admin, we're ok and we can create TOP level groups |
|
216 | 216 | allow_empty_group = True |
|
217 | 217 | elif not c.repo_group.parent_group: |
|
218 | 218 | allow_empty_group = True |
|
219 | 219 | else: |
|
220 | 220 | allow_empty_group = False |
|
221 | 221 | repo_group_form = RepoGroupForm( |
|
222 | 222 | edit=True, |
|
223 | 223 | old_data=c.repo_group.get_dict(), |
|
224 | 224 | repo_groups=c.repo_groups, |
|
225 | 225 | can_create_in_root=allow_empty_group, |
|
226 | 226 | )() |
|
227 | 227 | try: |
|
228 | 228 | form_result = repo_group_form.to_python(dict(request.POST)) |
|
229 | 229 | |
|
230 | 230 | new_gr = RepoGroupModel().update(group_name, form_result) |
|
231 | 231 | Session().commit() |
|
232 | 232 | h.flash(_('Updated repository group %s') \ |
|
233 | 233 | % form_result['group_name'], category='success') |
|
234 | 234 | # we now have new name ! |
|
235 | 235 | group_name = new_gr.group_name |
|
236 | 236 | #TODO: in future action_logger(, '', '', '') |
|
237 | 237 | except formencode.Invalid as errors: |
|
238 | 238 | c.active = 'settings' |
|
239 | 239 | return htmlfill.render( |
|
240 | 240 | render('admin/repo_groups/repo_group_edit.html'), |
|
241 | 241 | defaults=errors.value, |
|
242 | 242 | errors=errors.error_dict or {}, |
|
243 | 243 | prefix_error=False, |
|
244 | 244 | encoding="UTF-8", |
|
245 | 245 | force_defaults=False) |
|
246 | 246 | except Exception: |
|
247 | 247 | log.error(traceback.format_exc()) |
|
248 | 248 | h.flash(_('Error occurred during update of repository group %s') \ |
|
249 | 249 | % request.POST.get('group_name'), category='error') |
|
250 | 250 | |
|
251 | 251 | raise HTTPFound(location=url('edit_repo_group', group_name=group_name)) |
|
252 | 252 | |
|
253 | 253 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
254 | 254 | def delete(self, group_name): |
|
255 | 255 | gr = c.repo_group = RepoGroup.guess_instance(group_name) |
|
256 | 256 | repos = gr.repositories.all() |
|
257 | 257 | if repos: |
|
258 | 258 | h.flash(_('This group contains %s repositories and cannot be ' |
|
259 | 259 | 'deleted') % len(repos), category='warning') |
|
260 | 260 | raise HTTPFound(location=url('repos_groups')) |
|
261 | 261 | |
|
262 | 262 | children = gr.children.all() |
|
263 | 263 | if children: |
|
264 | 264 | h.flash(_('This group contains %s subgroups and cannot be deleted' |
|
265 | 265 | % (len(children))), category='warning') |
|
266 | 266 | raise HTTPFound(location=url('repos_groups')) |
|
267 | 267 | |
|
268 | 268 | try: |
|
269 | 269 | RepoGroupModel().delete(group_name) |
|
270 | 270 | Session().commit() |
|
271 | 271 | h.flash(_('Removed repository group %s') % group_name, |
|
272 | 272 | category='success') |
|
273 | 273 | #TODO: in future action_logger(, '', '', '') |
|
274 | 274 | except Exception: |
|
275 | 275 | log.error(traceback.format_exc()) |
|
276 | 276 | h.flash(_('Error occurred during deletion of repository group %s') |
|
277 | 277 | % group_name, category='error') |
|
278 | 278 | |
|
279 | 279 | if gr.parent_group: |
|
280 | 280 | raise HTTPFound(location=url('repos_group_home', group_name=gr.parent_group.group_name)) |
|
281 | 281 | raise HTTPFound(location=url('repos_groups')) |
|
282 | 282 | |
|
283 | 283 | def show_by_name(self, group_name): |
|
284 | 284 | """ |
|
285 | 285 | This is a proxy that does a lookup group_name -> id, and shows |
|
286 | 286 | the group by id view instead |
|
287 | 287 | """ |
|
288 | 288 | group_name = group_name.rstrip('/') |
|
289 | 289 | id_ = RepoGroup.get_by_group_name(group_name) |
|
290 | 290 | if id_: |
|
291 | 291 | return self.show(group_name) |
|
292 | 292 | raise HTTPNotFound |
|
293 | 293 | |
|
294 | 294 | @HasRepoGroupPermissionLevelDecorator('read') |
|
295 | 295 | def show(self, group_name): |
|
296 | 296 | c.active = 'settings' |
|
297 | 297 | |
|
298 | 298 | c.group = c.repo_group = RepoGroup.guess_instance(group_name) |
|
299 | 299 | |
|
300 | 300 | groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all() |
|
301 | 301 | c.groups = self.scm_model.get_repo_groups(groups) |
|
302 | 302 | |
|
303 | 303 | repos_list = Repository.query(sorted=True).filter_by(group=c.group).all() |
|
304 | 304 | repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list, |
|
305 | 305 | admin=False, short_name=True) |
|
306 | 306 | # data used to render the grid |
|
307 | 307 | c.data = repos_data |
|
308 | 308 | |
|
309 | 309 | return render('admin/repo_groups/repo_group_show.html') |
|
310 | 310 | |
|
311 | 311 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
312 | 312 | def edit(self, group_name): |
|
313 | 313 | c.active = 'settings' |
|
314 | 314 | |
|
315 | 315 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
316 | 316 | self.__load_defaults(extras=[c.repo_group.parent_group], |
|
317 | 317 | exclude=[c.repo_group]) |
|
318 | 318 | defaults = self.__load_data(c.repo_group.group_id) |
|
319 | 319 | |
|
320 | 320 | return htmlfill.render( |
|
321 | 321 | render('admin/repo_groups/repo_group_edit.html'), |
|
322 | 322 | defaults=defaults, |
|
323 | 323 | encoding="UTF-8", |
|
324 | 324 | force_defaults=False |
|
325 | 325 | ) |
|
326 | 326 | |
|
327 | 327 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
328 | 328 | def edit_repo_group_advanced(self, group_name): |
|
329 | 329 | c.active = 'advanced' |
|
330 | 330 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
331 | 331 | |
|
332 | 332 | return render('admin/repo_groups/repo_group_edit.html') |
|
333 | 333 | |
|
334 | 334 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
335 | 335 | def edit_repo_group_perms(self, group_name): |
|
336 | 336 | c.active = 'perms' |
|
337 | 337 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
338 | 338 | self.__load_defaults() |
|
339 | 339 | defaults = self.__load_data(c.repo_group.group_id) |
|
340 | 340 | |
|
341 | 341 | return htmlfill.render( |
|
342 | 342 | render('admin/repo_groups/repo_group_edit.html'), |
|
343 | 343 | defaults=defaults, |
|
344 | 344 | encoding="UTF-8", |
|
345 | 345 | force_defaults=False |
|
346 | 346 | ) |
|
347 | 347 | |
|
348 | 348 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
349 | 349 | def update_perms(self, group_name): |
|
350 | 350 | """ |
|
351 | 351 | Update permissions for given repository group |
|
352 | 352 | |
|
353 | 353 | :param group_name: |
|
354 | 354 | """ |
|
355 | 355 | |
|
356 | 356 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
357 | 357 | valid_recursive_choices = ['none', 'repos', 'groups', 'all'] |
|
358 | 358 | form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST) |
|
359 | 359 | if not request.authuser.is_admin: |
|
360 | 360 | if self._revoke_perms_on_yourself(form_result): |
|
361 | 361 | msg = _('Cannot revoke permission for yourself as admin') |
|
362 | 362 | h.flash(msg, category='warning') |
|
363 | 363 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) |
|
364 | 364 | recursive = form_result['recursive'] |
|
365 | 365 | # iterate over all members(if in recursive mode) of this groups and |
|
366 | 366 | # set the permissions ! |
|
367 | 367 | # this can be potentially heavy operation |
|
368 | 368 | RepoGroupModel()._update_permissions(c.repo_group, |
|
369 | 369 | form_result['perms_new'], |
|
370 | 370 | form_result['perms_updates'], |
|
371 | 371 | recursive) |
|
372 | 372 | #TODO: implement this |
|
373 | 373 | #action_logger(request.authuser, 'admin_changed_repo_permissions', |
|
374 | 374 | # repo_name, request.ip_addr) |
|
375 | 375 | Session().commit() |
|
376 | 376 | h.flash(_('Repository group permissions updated'), category='success') |
|
377 | 377 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) |
|
378 | 378 | |
|
379 | 379 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
380 | 380 | def delete_perms(self, group_name): |
|
381 | 381 | try: |
|
382 | 382 | obj_type = request.POST.get('obj_type') |
|
383 | 383 | obj_id = None |
|
384 | 384 | if obj_type == 'user': |
|
385 | 385 | obj_id = safe_int(request.POST.get('user_id')) |
|
386 | 386 | elif obj_type == 'user_group': |
|
387 | 387 | obj_id = safe_int(request.POST.get('user_group_id')) |
|
388 | 388 | |
|
389 | 389 | if not request.authuser.is_admin: |
|
390 | 390 | if obj_type == 'user' and request.authuser.user_id == obj_id: |
|
391 | 391 | msg = _('Cannot revoke permission for yourself as admin') |
|
392 | 392 | h.flash(msg, category='warning') |
|
393 | 393 | raise Exception('revoke admin permission on self') |
|
394 | 394 | recursive = request.POST.get('recursive', 'none') |
|
395 | 395 | if obj_type == 'user': |
|
396 | 396 | RepoGroupModel().delete_permission(repo_group=group_name, |
|
397 | 397 | obj=obj_id, obj_type='user', |
|
398 | 398 | recursive=recursive) |
|
399 | 399 | elif obj_type == 'user_group': |
|
400 | 400 | RepoGroupModel().delete_permission(repo_group=group_name, |
|
401 | 401 | obj=obj_id, |
|
402 | 402 | obj_type='user_group', |
|
403 | 403 | recursive=recursive) |
|
404 | 404 | |
|
405 | 405 | Session().commit() |
|
406 | 406 | except Exception: |
|
407 | 407 | log.error(traceback.format_exc()) |
|
408 | 408 | h.flash(_('An error occurred during revoking of permission'), |
|
409 | 409 | category='error') |
|
410 | 410 | raise HTTPInternalServerError() |
@@ -1,427 +1,427 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.user_groups |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | User Groups crud controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jan 25, 2011 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import formencode |
|
31 | 31 | |
|
32 | 32 | from formencode import htmlfill |
|
33 | from tg import request, tmpl_context as c, config | |
|
33 | from tg import request, tmpl_context as c, config, app_globals | |
|
34 | 34 | from tg.i18n import ugettext as _ |
|
35 | 35 | from webob.exc import HTTPFound |
|
36 | 36 | |
|
37 | 37 | from sqlalchemy.orm import joinedload |
|
38 | 38 | from sqlalchemy.sql.expression import func |
|
39 | 39 | from webob.exc import HTTPInternalServerError |
|
40 | 40 | |
|
41 | 41 | import kallithea |
|
42 | 42 | from kallithea.config.routing import url |
|
43 | 43 | from kallithea.lib import helpers as h |
|
44 | 44 | from kallithea.lib.exceptions import UserGroupsAssignedException, \ |
|
45 | 45 | RepoGroupAssignmentError |
|
46 | 46 | from kallithea.lib.utils2 import safe_unicode, safe_int |
|
47 | 47 | from kallithea.lib.auth import LoginRequired, \ |
|
48 | 48 | HasUserGroupPermissionLevelDecorator, HasPermissionAnyDecorator |
|
49 | 49 | from kallithea.lib.base import BaseController, render |
|
50 | 50 | from kallithea.model.scm import UserGroupList |
|
51 | 51 | from kallithea.model.user_group import UserGroupModel |
|
52 | 52 | from kallithea.model.repo import RepoModel |
|
53 | 53 | from kallithea.model.db import User, UserGroup, UserGroupToPerm, \ |
|
54 | 54 | UserGroupRepoToPerm, UserGroupRepoGroupToPerm |
|
55 | 55 | from kallithea.model.forms import UserGroupForm, UserGroupPermsForm, \ |
|
56 | 56 | CustomDefaultPermissionsForm |
|
57 | 57 | from kallithea.model.meta import Session |
|
58 | 58 | from kallithea.lib.utils import action_logger |
|
59 | 59 | |
|
60 | 60 | log = logging.getLogger(__name__) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class UserGroupsController(BaseController): |
|
64 | 64 | """REST Controller styled on the Atom Publishing Protocol""" |
|
65 | 65 | |
|
66 | 66 | @LoginRequired() |
|
67 | 67 | def _before(self, *args, **kwargs): |
|
68 | 68 | super(UserGroupsController, self)._before(*args, **kwargs) |
|
69 | 69 | c.available_permissions = config['available_permissions'] |
|
70 | 70 | |
|
71 | 71 | def __load_data(self, user_group_id): |
|
72 | 72 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
73 | 73 | key=lambda u: u.username.lower()) |
|
74 | 74 | |
|
75 | 75 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
76 | 76 | c.available_members = sorted(((x.user_id, x.username) for x in |
|
77 | 77 | User.query().all()), |
|
78 | 78 | key=lambda u: u[1].lower()) |
|
79 | 79 | |
|
80 | 80 | def __load_defaults(self, user_group_id): |
|
81 | 81 | """ |
|
82 | 82 | Load defaults settings for edit, and update |
|
83 | 83 | |
|
84 | 84 | :param user_group_id: |
|
85 | 85 | """ |
|
86 | 86 | user_group = UserGroup.get_or_404(user_group_id) |
|
87 | 87 | data = user_group.get_dict() |
|
88 | 88 | return data |
|
89 | 89 | |
|
90 | 90 | def index(self, format='html'): |
|
91 | 91 | _list = UserGroup.query() \ |
|
92 | 92 | .order_by(func.lower(UserGroup.users_group_name)) \ |
|
93 | 93 | .all() |
|
94 | 94 | group_iter = UserGroupList(_list, perm_level='admin') |
|
95 | 95 | user_groups_data = [] |
|
96 | 96 | total_records = len(group_iter) |
|
97 |
_tmpl_lookup = |
|
|
97 | _tmpl_lookup = app_globals.mako_lookup | |
|
98 | 98 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
99 | 99 | |
|
100 | 100 | user_group_name = lambda user_group_id, user_group_name: ( |
|
101 | 101 | template.get_def("user_group_name") |
|
102 | 102 | .render(user_group_id, user_group_name, _=_, h=h, c=c) |
|
103 | 103 | ) |
|
104 | 104 | user_group_actions = lambda user_group_id, user_group_name: ( |
|
105 | 105 | template.get_def("user_group_actions") |
|
106 | 106 | .render(user_group_id, user_group_name, _=_, h=h, c=c) |
|
107 | 107 | ) |
|
108 | 108 | for user_gr in group_iter: |
|
109 | 109 | |
|
110 | 110 | user_groups_data.append({ |
|
111 | 111 | "raw_name": user_gr.users_group_name, |
|
112 | 112 | "group_name": user_group_name(user_gr.users_group_id, |
|
113 | 113 | user_gr.users_group_name), |
|
114 | 114 | "desc": h.escape(user_gr.user_group_description), |
|
115 | 115 | "members": len(user_gr.members), |
|
116 | 116 | "active": h.boolicon(user_gr.users_group_active), |
|
117 | 117 | "owner": h.person(user_gr.owner.username), |
|
118 | 118 | "action": user_group_actions(user_gr.users_group_id, user_gr.users_group_name) |
|
119 | 119 | }) |
|
120 | 120 | |
|
121 | 121 | c.data = { |
|
122 | 122 | "totalRecords": total_records, |
|
123 | 123 | "startIndex": 0, |
|
124 | 124 | "sort": None, |
|
125 | 125 | "dir": "asc", |
|
126 | 126 | "records": user_groups_data |
|
127 | 127 | } |
|
128 | 128 | |
|
129 | 129 | return render('admin/user_groups/user_groups.html') |
|
130 | 130 | |
|
131 | 131 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
132 | 132 | def create(self): |
|
133 | 133 | users_group_form = UserGroupForm()() |
|
134 | 134 | try: |
|
135 | 135 | form_result = users_group_form.to_python(dict(request.POST)) |
|
136 | 136 | ug = UserGroupModel().create(name=form_result['users_group_name'], |
|
137 | 137 | description=form_result['user_group_description'], |
|
138 | 138 | owner=request.authuser.user_id, |
|
139 | 139 | active=form_result['users_group_active']) |
|
140 | 140 | |
|
141 | 141 | gr = form_result['users_group_name'] |
|
142 | 142 | action_logger(request.authuser, |
|
143 | 143 | 'admin_created_users_group:%s' % gr, |
|
144 | 144 | None, request.ip_addr) |
|
145 | 145 | h.flash(h.literal(_('Created user group %s') % h.link_to(h.escape(gr), url('edit_users_group', id=ug.users_group_id))), |
|
146 | 146 | category='success') |
|
147 | 147 | Session().commit() |
|
148 | 148 | except formencode.Invalid as errors: |
|
149 | 149 | return htmlfill.render( |
|
150 | 150 | render('admin/user_groups/user_group_add.html'), |
|
151 | 151 | defaults=errors.value, |
|
152 | 152 | errors=errors.error_dict or {}, |
|
153 | 153 | prefix_error=False, |
|
154 | 154 | encoding="UTF-8", |
|
155 | 155 | force_defaults=False) |
|
156 | 156 | except Exception: |
|
157 | 157 | log.error(traceback.format_exc()) |
|
158 | 158 | h.flash(_('Error occurred during creation of user group %s') \ |
|
159 | 159 | % request.POST.get('users_group_name'), category='error') |
|
160 | 160 | |
|
161 | 161 | raise HTTPFound(location=url('users_groups')) |
|
162 | 162 | |
|
163 | 163 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
164 | 164 | def new(self, format='html'): |
|
165 | 165 | return render('admin/user_groups/user_group_add.html') |
|
166 | 166 | |
|
167 | 167 | @HasUserGroupPermissionLevelDecorator('admin') |
|
168 | 168 | def update(self, id): |
|
169 | 169 | c.user_group = UserGroup.get_or_404(id) |
|
170 | 170 | c.active = 'settings' |
|
171 | 171 | self.__load_data(id) |
|
172 | 172 | |
|
173 | 173 | available_members = [safe_unicode(x[0]) for x in c.available_members] |
|
174 | 174 | |
|
175 | 175 | users_group_form = UserGroupForm(edit=True, |
|
176 | 176 | old_data=c.user_group.get_dict(), |
|
177 | 177 | available_members=available_members)() |
|
178 | 178 | |
|
179 | 179 | try: |
|
180 | 180 | form_result = users_group_form.to_python(request.POST) |
|
181 | 181 | UserGroupModel().update(c.user_group, form_result) |
|
182 | 182 | gr = form_result['users_group_name'] |
|
183 | 183 | action_logger(request.authuser, |
|
184 | 184 | 'admin_updated_users_group:%s' % gr, |
|
185 | 185 | None, request.ip_addr) |
|
186 | 186 | h.flash(_('Updated user group %s') % gr, category='success') |
|
187 | 187 | Session().commit() |
|
188 | 188 | except formencode.Invalid as errors: |
|
189 | 189 | ug_model = UserGroupModel() |
|
190 | 190 | defaults = errors.value |
|
191 | 191 | e = errors.error_dict or {} |
|
192 | 192 | defaults.update({ |
|
193 | 193 | 'create_repo_perm': ug_model.has_perm(id, |
|
194 | 194 | 'hg.create.repository'), |
|
195 | 195 | 'fork_repo_perm': ug_model.has_perm(id, |
|
196 | 196 | 'hg.fork.repository'), |
|
197 | 197 | }) |
|
198 | 198 | |
|
199 | 199 | return htmlfill.render( |
|
200 | 200 | render('admin/user_groups/user_group_edit.html'), |
|
201 | 201 | defaults=defaults, |
|
202 | 202 | errors=e, |
|
203 | 203 | prefix_error=False, |
|
204 | 204 | encoding="UTF-8", |
|
205 | 205 | force_defaults=False) |
|
206 | 206 | except Exception: |
|
207 | 207 | log.error(traceback.format_exc()) |
|
208 | 208 | h.flash(_('Error occurred during update of user group %s') \ |
|
209 | 209 | % request.POST.get('users_group_name'), category='error') |
|
210 | 210 | |
|
211 | 211 | raise HTTPFound(location=url('edit_users_group', id=id)) |
|
212 | 212 | |
|
213 | 213 | @HasUserGroupPermissionLevelDecorator('admin') |
|
214 | 214 | def delete(self, id): |
|
215 | 215 | usr_gr = UserGroup.get_or_404(id) |
|
216 | 216 | try: |
|
217 | 217 | UserGroupModel().delete(usr_gr) |
|
218 | 218 | Session().commit() |
|
219 | 219 | h.flash(_('Successfully deleted user group'), category='success') |
|
220 | 220 | except UserGroupsAssignedException as e: |
|
221 | 221 | h.flash(e, category='error') |
|
222 | 222 | except Exception: |
|
223 | 223 | log.error(traceback.format_exc()) |
|
224 | 224 | h.flash(_('An error occurred during deletion of user group'), |
|
225 | 225 | category='error') |
|
226 | 226 | raise HTTPFound(location=url('users_groups')) |
|
227 | 227 | |
|
228 | 228 | @HasUserGroupPermissionLevelDecorator('admin') |
|
229 | 229 | def edit(self, id, format='html'): |
|
230 | 230 | c.user_group = UserGroup.get_or_404(id) |
|
231 | 231 | c.active = 'settings' |
|
232 | 232 | self.__load_data(id) |
|
233 | 233 | |
|
234 | 234 | defaults = self.__load_defaults(id) |
|
235 | 235 | |
|
236 | 236 | return htmlfill.render( |
|
237 | 237 | render('admin/user_groups/user_group_edit.html'), |
|
238 | 238 | defaults=defaults, |
|
239 | 239 | encoding="UTF-8", |
|
240 | 240 | force_defaults=False |
|
241 | 241 | ) |
|
242 | 242 | |
|
243 | 243 | @HasUserGroupPermissionLevelDecorator('admin') |
|
244 | 244 | def edit_perms(self, id): |
|
245 | 245 | c.user_group = UserGroup.get_or_404(id) |
|
246 | 246 | c.active = 'perms' |
|
247 | 247 | |
|
248 | 248 | repo_model = RepoModel() |
|
249 | 249 | c.users_array = repo_model.get_users_js() |
|
250 | 250 | c.user_groups_array = repo_model.get_user_groups_js() |
|
251 | 251 | |
|
252 | 252 | defaults = {} |
|
253 | 253 | # fill user group users |
|
254 | 254 | for p in c.user_group.user_user_group_to_perm: |
|
255 | 255 | defaults.update({'u_perm_%s' % p.user.username: |
|
256 | 256 | p.permission.permission_name}) |
|
257 | 257 | |
|
258 | 258 | for p in c.user_group.user_group_user_group_to_perm: |
|
259 | 259 | defaults.update({'g_perm_%s' % p.user_group.users_group_name: |
|
260 | 260 | p.permission.permission_name}) |
|
261 | 261 | |
|
262 | 262 | return htmlfill.render( |
|
263 | 263 | render('admin/user_groups/user_group_edit.html'), |
|
264 | 264 | defaults=defaults, |
|
265 | 265 | encoding="UTF-8", |
|
266 | 266 | force_defaults=False |
|
267 | 267 | ) |
|
268 | 268 | |
|
269 | 269 | @HasUserGroupPermissionLevelDecorator('admin') |
|
270 | 270 | def update_perms(self, id): |
|
271 | 271 | """ |
|
272 | 272 | grant permission for given usergroup |
|
273 | 273 | |
|
274 | 274 | :param id: |
|
275 | 275 | """ |
|
276 | 276 | user_group = UserGroup.get_or_404(id) |
|
277 | 277 | form = UserGroupPermsForm()().to_python(request.POST) |
|
278 | 278 | |
|
279 | 279 | # set the permissions ! |
|
280 | 280 | try: |
|
281 | 281 | UserGroupModel()._update_permissions(user_group, form['perms_new'], |
|
282 | 282 | form['perms_updates']) |
|
283 | 283 | except RepoGroupAssignmentError: |
|
284 | 284 | h.flash(_('Target group cannot be the same'), category='error') |
|
285 | 285 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) |
|
286 | 286 | #TODO: implement this |
|
287 | 287 | #action_logger(request.authuser, 'admin_changed_repo_permissions', |
|
288 | 288 | # repo_name, request.ip_addr) |
|
289 | 289 | Session().commit() |
|
290 | 290 | h.flash(_('User group permissions updated'), category='success') |
|
291 | 291 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) |
|
292 | 292 | |
|
293 | 293 | @HasUserGroupPermissionLevelDecorator('admin') |
|
294 | 294 | def delete_perms(self, id): |
|
295 | 295 | try: |
|
296 | 296 | obj_type = request.POST.get('obj_type') |
|
297 | 297 | obj_id = None |
|
298 | 298 | if obj_type == 'user': |
|
299 | 299 | obj_id = safe_int(request.POST.get('user_id')) |
|
300 | 300 | elif obj_type == 'user_group': |
|
301 | 301 | obj_id = safe_int(request.POST.get('user_group_id')) |
|
302 | 302 | |
|
303 | 303 | if not request.authuser.is_admin: |
|
304 | 304 | if obj_type == 'user' and request.authuser.user_id == obj_id: |
|
305 | 305 | msg = _('Cannot revoke permission for yourself as admin') |
|
306 | 306 | h.flash(msg, category='warning') |
|
307 | 307 | raise Exception('revoke admin permission on self') |
|
308 | 308 | if obj_type == 'user': |
|
309 | 309 | UserGroupModel().revoke_user_permission(user_group=id, |
|
310 | 310 | user=obj_id) |
|
311 | 311 | elif obj_type == 'user_group': |
|
312 | 312 | UserGroupModel().revoke_user_group_permission(target_user_group=id, |
|
313 | 313 | user_group=obj_id) |
|
314 | 314 | Session().commit() |
|
315 | 315 | except Exception: |
|
316 | 316 | log.error(traceback.format_exc()) |
|
317 | 317 | h.flash(_('An error occurred during revoking of permission'), |
|
318 | 318 | category='error') |
|
319 | 319 | raise HTTPInternalServerError() |
|
320 | 320 | |
|
321 | 321 | @HasUserGroupPermissionLevelDecorator('admin') |
|
322 | 322 | def edit_default_perms(self, id): |
|
323 | 323 | c.user_group = UserGroup.get_or_404(id) |
|
324 | 324 | c.active = 'default_perms' |
|
325 | 325 | |
|
326 | 326 | permissions = { |
|
327 | 327 | 'repositories': {}, |
|
328 | 328 | 'repositories_groups': {} |
|
329 | 329 | } |
|
330 | 330 | ugroup_repo_perms = UserGroupRepoToPerm.query() \ |
|
331 | 331 | .options(joinedload(UserGroupRepoToPerm.permission)) \ |
|
332 | 332 | .options(joinedload(UserGroupRepoToPerm.repository)) \ |
|
333 | 333 | .filter(UserGroupRepoToPerm.users_group_id == id) \ |
|
334 | 334 | .all() |
|
335 | 335 | |
|
336 | 336 | for gr in ugroup_repo_perms: |
|
337 | 337 | permissions['repositories'][gr.repository.repo_name] \ |
|
338 | 338 | = gr.permission.permission_name |
|
339 | 339 | |
|
340 | 340 | ugroup_group_perms = UserGroupRepoGroupToPerm.query() \ |
|
341 | 341 | .options(joinedload(UserGroupRepoGroupToPerm.permission)) \ |
|
342 | 342 | .options(joinedload(UserGroupRepoGroupToPerm.group)) \ |
|
343 | 343 | .filter(UserGroupRepoGroupToPerm.users_group_id == id) \ |
|
344 | 344 | .all() |
|
345 | 345 | |
|
346 | 346 | for gr in ugroup_group_perms: |
|
347 | 347 | permissions['repositories_groups'][gr.group.group_name] \ |
|
348 | 348 | = gr.permission.permission_name |
|
349 | 349 | c.permissions = permissions |
|
350 | 350 | |
|
351 | 351 | ug_model = UserGroupModel() |
|
352 | 352 | |
|
353 | 353 | defaults = c.user_group.get_dict() |
|
354 | 354 | defaults.update({ |
|
355 | 355 | 'create_repo_perm': ug_model.has_perm(c.user_group, |
|
356 | 356 | 'hg.create.repository'), |
|
357 | 357 | 'create_user_group_perm': ug_model.has_perm(c.user_group, |
|
358 | 358 | 'hg.usergroup.create.true'), |
|
359 | 359 | 'fork_repo_perm': ug_model.has_perm(c.user_group, |
|
360 | 360 | 'hg.fork.repository'), |
|
361 | 361 | }) |
|
362 | 362 | |
|
363 | 363 | return htmlfill.render( |
|
364 | 364 | render('admin/user_groups/user_group_edit.html'), |
|
365 | 365 | defaults=defaults, |
|
366 | 366 | encoding="UTF-8", |
|
367 | 367 | force_defaults=False |
|
368 | 368 | ) |
|
369 | 369 | |
|
370 | 370 | @HasUserGroupPermissionLevelDecorator('admin') |
|
371 | 371 | def update_default_perms(self, id): |
|
372 | 372 | user_group = UserGroup.get_or_404(id) |
|
373 | 373 | |
|
374 | 374 | try: |
|
375 | 375 | form = CustomDefaultPermissionsForm()() |
|
376 | 376 | form_result = form.to_python(request.POST) |
|
377 | 377 | |
|
378 | 378 | inherit_perms = form_result['inherit_default_permissions'] |
|
379 | 379 | user_group.inherit_default_permissions = inherit_perms |
|
380 | 380 | usergroup_model = UserGroupModel() |
|
381 | 381 | |
|
382 | 382 | defs = UserGroupToPerm.query() \ |
|
383 | 383 | .filter(UserGroupToPerm.users_group == user_group) \ |
|
384 | 384 | .all() |
|
385 | 385 | for ug in defs: |
|
386 | 386 | Session().delete(ug) |
|
387 | 387 | |
|
388 | 388 | if form_result['create_repo_perm']: |
|
389 | 389 | usergroup_model.grant_perm(id, 'hg.create.repository') |
|
390 | 390 | else: |
|
391 | 391 | usergroup_model.grant_perm(id, 'hg.create.none') |
|
392 | 392 | if form_result['create_user_group_perm']: |
|
393 | 393 | usergroup_model.grant_perm(id, 'hg.usergroup.create.true') |
|
394 | 394 | else: |
|
395 | 395 | usergroup_model.grant_perm(id, 'hg.usergroup.create.false') |
|
396 | 396 | if form_result['fork_repo_perm']: |
|
397 | 397 | usergroup_model.grant_perm(id, 'hg.fork.repository') |
|
398 | 398 | else: |
|
399 | 399 | usergroup_model.grant_perm(id, 'hg.fork.none') |
|
400 | 400 | |
|
401 | 401 | h.flash(_("Updated permissions"), category='success') |
|
402 | 402 | Session().commit() |
|
403 | 403 | except Exception: |
|
404 | 404 | log.error(traceback.format_exc()) |
|
405 | 405 | h.flash(_('An error occurred during permissions saving'), |
|
406 | 406 | category='error') |
|
407 | 407 | |
|
408 | 408 | raise HTTPFound(location=url('edit_user_group_default_perms', id=id)) |
|
409 | 409 | |
|
410 | 410 | @HasUserGroupPermissionLevelDecorator('admin') |
|
411 | 411 | def edit_advanced(self, id): |
|
412 | 412 | c.user_group = UserGroup.get_or_404(id) |
|
413 | 413 | c.active = 'advanced' |
|
414 | 414 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
415 | 415 | key=lambda u: u.username.lower()) |
|
416 | 416 | return render('admin/user_groups/user_group_edit.html') |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | @HasUserGroupPermissionLevelDecorator('admin') |
|
420 | 420 | def edit_members(self, id): |
|
421 | 421 | c.user_group = UserGroup.get_or_404(id) |
|
422 | 422 | c.active = 'members' |
|
423 | 423 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
424 | 424 | key=lambda u: u.username.lower()) |
|
425 | 425 | |
|
426 | 426 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
427 | 427 | return render('admin/user_groups/user_group_edit.html') |
@@ -1,436 +1,436 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.users |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Users crud controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 4, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import formencode |
|
31 | 31 | |
|
32 | 32 | from formencode import htmlfill |
|
33 | from tg import request, tmpl_context as c, config | |
|
33 | from tg import request, tmpl_context as c, config, app_globals | |
|
34 | 34 | from tg.i18n import ugettext as _ |
|
35 | 35 | from sqlalchemy.sql.expression import func |
|
36 | 36 | from webob.exc import HTTPFound, HTTPNotFound |
|
37 | 37 | |
|
38 | 38 | import kallithea |
|
39 | 39 | from kallithea.config.routing import url |
|
40 | 40 | from kallithea.lib.exceptions import DefaultUserException, \ |
|
41 | 41 | UserOwnsReposException, UserCreationError |
|
42 | 42 | from kallithea.lib import helpers as h |
|
43 | 43 | from kallithea.lib.auth import LoginRequired, HasPermissionAnyDecorator, \ |
|
44 | 44 | AuthUser |
|
45 | 45 | from kallithea.lib import auth_modules |
|
46 | 46 | from kallithea.lib.base import BaseController, render |
|
47 | 47 | from kallithea.model.api_key import ApiKeyModel |
|
48 | 48 | |
|
49 | 49 | from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm |
|
50 | 50 | from kallithea.model.forms import UserForm, CustomDefaultPermissionsForm |
|
51 | 51 | from kallithea.model.user import UserModel |
|
52 | 52 | from kallithea.model.meta import Session |
|
53 | 53 | from kallithea.lib.utils import action_logger |
|
54 | 54 | from kallithea.lib.utils2 import datetime_to_time, safe_int, generate_api_key |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class UsersController(BaseController): |
|
60 | 60 | """REST Controller styled on the Atom Publishing Protocol""" |
|
61 | 61 | |
|
62 | 62 | @LoginRequired() |
|
63 | 63 | @HasPermissionAnyDecorator('hg.admin') |
|
64 | 64 | def _before(self, *args, **kwargs): |
|
65 | 65 | super(UsersController, self)._before(*args, **kwargs) |
|
66 | 66 | c.available_permissions = config['available_permissions'] |
|
67 | 67 | |
|
68 | 68 | def index(self, format='html'): |
|
69 | 69 | c.users_list = User.query().order_by(User.username) \ |
|
70 | 70 | .filter_by(is_default_user=False) \ |
|
71 | 71 | .order_by(func.lower(User.username)) \ |
|
72 | 72 | .all() |
|
73 | 73 | |
|
74 | 74 | users_data = [] |
|
75 | 75 | total_records = len(c.users_list) |
|
76 |
_tmpl_lookup = |
|
|
76 | _tmpl_lookup = app_globals.mako_lookup | |
|
77 | 77 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
78 | 78 | |
|
79 | 79 | grav_tmpl = '<div class="gravatar">%s</div>' |
|
80 | 80 | |
|
81 | 81 | username = lambda user_id, username: ( |
|
82 | 82 | template.get_def("user_name") |
|
83 | 83 | .render(user_id, username, _=_, h=h, c=c)) |
|
84 | 84 | |
|
85 | 85 | user_actions = lambda user_id, username: ( |
|
86 | 86 | template.get_def("user_actions") |
|
87 | 87 | .render(user_id, username, _=_, h=h, c=c)) |
|
88 | 88 | |
|
89 | 89 | for user in c.users_list: |
|
90 | 90 | users_data.append({ |
|
91 | 91 | "gravatar": grav_tmpl % h.gravatar(user.email, size=20), |
|
92 | 92 | "raw_name": user.username, |
|
93 | 93 | "username": username(user.user_id, user.username), |
|
94 | 94 | "firstname": h.escape(user.name), |
|
95 | 95 | "lastname": h.escape(user.lastname), |
|
96 | 96 | "last_login": h.fmt_date(user.last_login), |
|
97 | 97 | "last_login_raw": datetime_to_time(user.last_login), |
|
98 | 98 | "active": h.boolicon(user.active), |
|
99 | 99 | "admin": h.boolicon(user.admin), |
|
100 | 100 | "extern_type": user.extern_type, |
|
101 | 101 | "extern_name": user.extern_name, |
|
102 | 102 | "action": user_actions(user.user_id, user.username), |
|
103 | 103 | }) |
|
104 | 104 | |
|
105 | 105 | c.data = { |
|
106 | 106 | "totalRecords": total_records, |
|
107 | 107 | "startIndex": 0, |
|
108 | 108 | "sort": None, |
|
109 | 109 | "dir": "asc", |
|
110 | 110 | "records": users_data |
|
111 | 111 | } |
|
112 | 112 | |
|
113 | 113 | return render('admin/users/users.html') |
|
114 | 114 | |
|
115 | 115 | def create(self): |
|
116 | 116 | c.default_extern_type = User.DEFAULT_AUTH_TYPE |
|
117 | 117 | c.default_extern_name = '' |
|
118 | 118 | user_model = UserModel() |
|
119 | 119 | user_form = UserForm()() |
|
120 | 120 | try: |
|
121 | 121 | form_result = user_form.to_python(dict(request.POST)) |
|
122 | 122 | user = user_model.create(form_result) |
|
123 | 123 | action_logger(request.authuser, 'admin_created_user:%s' % user.username, |
|
124 | 124 | None, request.ip_addr) |
|
125 | 125 | h.flash(_('Created user %s') % user.username, |
|
126 | 126 | category='success') |
|
127 | 127 | Session().commit() |
|
128 | 128 | except formencode.Invalid as errors: |
|
129 | 129 | return htmlfill.render( |
|
130 | 130 | render('admin/users/user_add.html'), |
|
131 | 131 | defaults=errors.value, |
|
132 | 132 | errors=errors.error_dict or {}, |
|
133 | 133 | prefix_error=False, |
|
134 | 134 | encoding="UTF-8", |
|
135 | 135 | force_defaults=False) |
|
136 | 136 | except UserCreationError as e: |
|
137 | 137 | h.flash(e, 'error') |
|
138 | 138 | except Exception: |
|
139 | 139 | log.error(traceback.format_exc()) |
|
140 | 140 | h.flash(_('Error occurred during creation of user %s') \ |
|
141 | 141 | % request.POST.get('username'), category='error') |
|
142 | 142 | raise HTTPFound(location=url('edit_user', id=user.user_id)) |
|
143 | 143 | |
|
144 | 144 | def new(self, format='html'): |
|
145 | 145 | c.default_extern_type = User.DEFAULT_AUTH_TYPE |
|
146 | 146 | c.default_extern_name = '' |
|
147 | 147 | return render('admin/users/user_add.html') |
|
148 | 148 | |
|
149 | 149 | def update(self, id): |
|
150 | 150 | user_model = UserModel() |
|
151 | 151 | user = user_model.get(id) |
|
152 | 152 | _form = UserForm(edit=True, old_data={'user_id': id, |
|
153 | 153 | 'email': user.email})() |
|
154 | 154 | form_result = {} |
|
155 | 155 | try: |
|
156 | 156 | form_result = _form.to_python(dict(request.POST)) |
|
157 | 157 | skip_attrs = ['extern_type', 'extern_name', |
|
158 | 158 | ] + auth_modules.get_managed_fields(user) |
|
159 | 159 | |
|
160 | 160 | user_model.update(id, form_result, skip_attrs=skip_attrs) |
|
161 | 161 | usr = form_result['username'] |
|
162 | 162 | action_logger(request.authuser, 'admin_updated_user:%s' % usr, |
|
163 | 163 | None, request.ip_addr) |
|
164 | 164 | h.flash(_('User updated successfully'), category='success') |
|
165 | 165 | Session().commit() |
|
166 | 166 | except formencode.Invalid as errors: |
|
167 | 167 | defaults = errors.value |
|
168 | 168 | e = errors.error_dict or {} |
|
169 | 169 | defaults.update({ |
|
170 | 170 | 'create_repo_perm': user_model.has_perm(id, |
|
171 | 171 | 'hg.create.repository'), |
|
172 | 172 | 'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'), |
|
173 | 173 | }) |
|
174 | 174 | return htmlfill.render( |
|
175 | 175 | self._render_edit_profile(user), |
|
176 | 176 | defaults=defaults, |
|
177 | 177 | errors=e, |
|
178 | 178 | prefix_error=False, |
|
179 | 179 | encoding="UTF-8", |
|
180 | 180 | force_defaults=False) |
|
181 | 181 | except Exception: |
|
182 | 182 | log.error(traceback.format_exc()) |
|
183 | 183 | h.flash(_('Error occurred during update of user %s') \ |
|
184 | 184 | % form_result.get('username'), category='error') |
|
185 | 185 | raise HTTPFound(location=url('edit_user', id=id)) |
|
186 | 186 | |
|
187 | 187 | def delete(self, id): |
|
188 | 188 | usr = User.get_or_404(id) |
|
189 | 189 | try: |
|
190 | 190 | UserModel().delete(usr) |
|
191 | 191 | Session().commit() |
|
192 | 192 | h.flash(_('Successfully deleted user'), category='success') |
|
193 | 193 | except (UserOwnsReposException, DefaultUserException) as e: |
|
194 | 194 | h.flash(e, category='warning') |
|
195 | 195 | except Exception: |
|
196 | 196 | log.error(traceback.format_exc()) |
|
197 | 197 | h.flash(_('An error occurred during deletion of user'), |
|
198 | 198 | category='error') |
|
199 | 199 | raise HTTPFound(location=url('users')) |
|
200 | 200 | |
|
201 | 201 | def _get_user_or_raise_if_default(self, id): |
|
202 | 202 | try: |
|
203 | 203 | return User.get_or_404(id, allow_default=False) |
|
204 | 204 | except DefaultUserException: |
|
205 | 205 | h.flash(_("The default user cannot be edited"), category='warning') |
|
206 | 206 | raise HTTPNotFound |
|
207 | 207 | |
|
208 | 208 | def _render_edit_profile(self, user): |
|
209 | 209 | c.user = user |
|
210 | 210 | c.active = 'profile' |
|
211 | 211 | c.perm_user = AuthUser(dbuser=user) |
|
212 | 212 | managed_fields = auth_modules.get_managed_fields(user) |
|
213 | 213 | c.readonly = lambda n: 'readonly' if n in managed_fields else None |
|
214 | 214 | return render('admin/users/user_edit.html') |
|
215 | 215 | |
|
216 | 216 | def edit(self, id, format='html'): |
|
217 | 217 | user = self._get_user_or_raise_if_default(id) |
|
218 | 218 | defaults = user.get_dict() |
|
219 | 219 | |
|
220 | 220 | return htmlfill.render( |
|
221 | 221 | self._render_edit_profile(user), |
|
222 | 222 | defaults=defaults, |
|
223 | 223 | encoding="UTF-8", |
|
224 | 224 | force_defaults=False) |
|
225 | 225 | |
|
226 | 226 | def edit_advanced(self, id): |
|
227 | 227 | c.user = self._get_user_or_raise_if_default(id) |
|
228 | 228 | c.active = 'advanced' |
|
229 | 229 | c.perm_user = AuthUser(dbuser=c.user) |
|
230 | 230 | |
|
231 | 231 | umodel = UserModel() |
|
232 | 232 | defaults = c.user.get_dict() |
|
233 | 233 | defaults.update({ |
|
234 | 234 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
235 | 235 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
236 | 236 | 'hg.usergroup.create.true'), |
|
237 | 237 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
238 | 238 | }) |
|
239 | 239 | return htmlfill.render( |
|
240 | 240 | render('admin/users/user_edit.html'), |
|
241 | 241 | defaults=defaults, |
|
242 | 242 | encoding="UTF-8", |
|
243 | 243 | force_defaults=False) |
|
244 | 244 | |
|
245 | 245 | def edit_api_keys(self, id): |
|
246 | 246 | c.user = self._get_user_or_raise_if_default(id) |
|
247 | 247 | c.active = 'api_keys' |
|
248 | 248 | show_expired = True |
|
249 | 249 | c.lifetime_values = [ |
|
250 | 250 | (str(-1), _('Forever')), |
|
251 | 251 | (str(5), _('5 minutes')), |
|
252 | 252 | (str(60), _('1 hour')), |
|
253 | 253 | (str(60 * 24), _('1 day')), |
|
254 | 254 | (str(60 * 24 * 30), _('1 month')), |
|
255 | 255 | ] |
|
256 | 256 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
257 | 257 | c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id, |
|
258 | 258 | show_expired=show_expired) |
|
259 | 259 | defaults = c.user.get_dict() |
|
260 | 260 | return htmlfill.render( |
|
261 | 261 | render('admin/users/user_edit.html'), |
|
262 | 262 | defaults=defaults, |
|
263 | 263 | encoding="UTF-8", |
|
264 | 264 | force_defaults=False) |
|
265 | 265 | |
|
266 | 266 | def add_api_key(self, id): |
|
267 | 267 | c.user = self._get_user_or_raise_if_default(id) |
|
268 | 268 | |
|
269 | 269 | lifetime = safe_int(request.POST.get('lifetime'), -1) |
|
270 | 270 | description = request.POST.get('description') |
|
271 | 271 | ApiKeyModel().create(c.user.user_id, description, lifetime) |
|
272 | 272 | Session().commit() |
|
273 | 273 | h.flash(_("API key successfully created"), category='success') |
|
274 | 274 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
275 | 275 | |
|
276 | 276 | def delete_api_key(self, id): |
|
277 | 277 | c.user = self._get_user_or_raise_if_default(id) |
|
278 | 278 | |
|
279 | 279 | api_key = request.POST.get('del_api_key') |
|
280 | 280 | if request.POST.get('del_api_key_builtin'): |
|
281 | 281 | c.user.api_key = generate_api_key() |
|
282 | 282 | Session().commit() |
|
283 | 283 | h.flash(_("API key successfully reset"), category='success') |
|
284 | 284 | elif api_key: |
|
285 | 285 | ApiKeyModel().delete(api_key, c.user.user_id) |
|
286 | 286 | Session().commit() |
|
287 | 287 | h.flash(_("API key successfully deleted"), category='success') |
|
288 | 288 | |
|
289 | 289 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
290 | 290 | |
|
291 | 291 | def update_account(self, id): |
|
292 | 292 | pass |
|
293 | 293 | |
|
294 | 294 | def edit_perms(self, id): |
|
295 | 295 | c.user = self._get_user_or_raise_if_default(id) |
|
296 | 296 | c.active = 'perms' |
|
297 | 297 | c.perm_user = AuthUser(dbuser=c.user) |
|
298 | 298 | |
|
299 | 299 | umodel = UserModel() |
|
300 | 300 | defaults = c.user.get_dict() |
|
301 | 301 | defaults.update({ |
|
302 | 302 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
303 | 303 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
304 | 304 | 'hg.usergroup.create.true'), |
|
305 | 305 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
306 | 306 | }) |
|
307 | 307 | return htmlfill.render( |
|
308 | 308 | render('admin/users/user_edit.html'), |
|
309 | 309 | defaults=defaults, |
|
310 | 310 | encoding="UTF-8", |
|
311 | 311 | force_defaults=False) |
|
312 | 312 | |
|
313 | 313 | def update_perms(self, id): |
|
314 | 314 | user = self._get_user_or_raise_if_default(id) |
|
315 | 315 | |
|
316 | 316 | try: |
|
317 | 317 | form = CustomDefaultPermissionsForm()() |
|
318 | 318 | form_result = form.to_python(request.POST) |
|
319 | 319 | |
|
320 | 320 | inherit_perms = form_result['inherit_default_permissions'] |
|
321 | 321 | user.inherit_default_permissions = inherit_perms |
|
322 | 322 | user_model = UserModel() |
|
323 | 323 | |
|
324 | 324 | defs = UserToPerm.query() \ |
|
325 | 325 | .filter(UserToPerm.user == user) \ |
|
326 | 326 | .all() |
|
327 | 327 | for ug in defs: |
|
328 | 328 | Session().delete(ug) |
|
329 | 329 | |
|
330 | 330 | if form_result['create_repo_perm']: |
|
331 | 331 | user_model.grant_perm(id, 'hg.create.repository') |
|
332 | 332 | else: |
|
333 | 333 | user_model.grant_perm(id, 'hg.create.none') |
|
334 | 334 | if form_result['create_user_group_perm']: |
|
335 | 335 | user_model.grant_perm(id, 'hg.usergroup.create.true') |
|
336 | 336 | else: |
|
337 | 337 | user_model.grant_perm(id, 'hg.usergroup.create.false') |
|
338 | 338 | if form_result['fork_repo_perm']: |
|
339 | 339 | user_model.grant_perm(id, 'hg.fork.repository') |
|
340 | 340 | else: |
|
341 | 341 | user_model.grant_perm(id, 'hg.fork.none') |
|
342 | 342 | h.flash(_("Updated permissions"), category='success') |
|
343 | 343 | Session().commit() |
|
344 | 344 | except Exception: |
|
345 | 345 | log.error(traceback.format_exc()) |
|
346 | 346 | h.flash(_('An error occurred during permissions saving'), |
|
347 | 347 | category='error') |
|
348 | 348 | raise HTTPFound(location=url('edit_user_perms', id=id)) |
|
349 | 349 | |
|
350 | 350 | def edit_emails(self, id): |
|
351 | 351 | c.user = self._get_user_or_raise_if_default(id) |
|
352 | 352 | c.active = 'emails' |
|
353 | 353 | c.user_email_map = UserEmailMap.query() \ |
|
354 | 354 | .filter(UserEmailMap.user == c.user).all() |
|
355 | 355 | |
|
356 | 356 | defaults = c.user.get_dict() |
|
357 | 357 | return htmlfill.render( |
|
358 | 358 | render('admin/users/user_edit.html'), |
|
359 | 359 | defaults=defaults, |
|
360 | 360 | encoding="UTF-8", |
|
361 | 361 | force_defaults=False) |
|
362 | 362 | |
|
363 | 363 | def add_email(self, id): |
|
364 | 364 | user = self._get_user_or_raise_if_default(id) |
|
365 | 365 | email = request.POST.get('new_email') |
|
366 | 366 | user_model = UserModel() |
|
367 | 367 | |
|
368 | 368 | try: |
|
369 | 369 | user_model.add_extra_email(id, email) |
|
370 | 370 | Session().commit() |
|
371 | 371 | h.flash(_("Added email %s to user") % email, category='success') |
|
372 | 372 | except formencode.Invalid as error: |
|
373 | 373 | msg = error.error_dict['email'] |
|
374 | 374 | h.flash(msg, category='error') |
|
375 | 375 | except Exception: |
|
376 | 376 | log.error(traceback.format_exc()) |
|
377 | 377 | h.flash(_('An error occurred during email saving'), |
|
378 | 378 | category='error') |
|
379 | 379 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
380 | 380 | |
|
381 | 381 | def delete_email(self, id): |
|
382 | 382 | user = self._get_user_or_raise_if_default(id) |
|
383 | 383 | email_id = request.POST.get('del_email_id') |
|
384 | 384 | user_model = UserModel() |
|
385 | 385 | user_model.delete_extra_email(id, email_id) |
|
386 | 386 | Session().commit() |
|
387 | 387 | h.flash(_("Removed email from user"), category='success') |
|
388 | 388 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
389 | 389 | |
|
390 | 390 | def edit_ips(self, id): |
|
391 | 391 | c.user = self._get_user_or_raise_if_default(id) |
|
392 | 392 | c.active = 'ips' |
|
393 | 393 | c.user_ip_map = UserIpMap.query() \ |
|
394 | 394 | .filter(UserIpMap.user == c.user).all() |
|
395 | 395 | |
|
396 | 396 | c.inherit_default_ips = c.user.inherit_default_permissions |
|
397 | 397 | c.default_user_ip_map = UserIpMap.query() \ |
|
398 | 398 | .filter(UserIpMap.user == User.get_default_user()).all() |
|
399 | 399 | |
|
400 | 400 | defaults = c.user.get_dict() |
|
401 | 401 | return htmlfill.render( |
|
402 | 402 | render('admin/users/user_edit.html'), |
|
403 | 403 | defaults=defaults, |
|
404 | 404 | encoding="UTF-8", |
|
405 | 405 | force_defaults=False) |
|
406 | 406 | |
|
407 | 407 | def add_ip(self, id): |
|
408 | 408 | ip = request.POST.get('new_ip') |
|
409 | 409 | user_model = UserModel() |
|
410 | 410 | |
|
411 | 411 | try: |
|
412 | 412 | user_model.add_extra_ip(id, ip) |
|
413 | 413 | Session().commit() |
|
414 | 414 | h.flash(_("Added IP address %s to user whitelist") % ip, category='success') |
|
415 | 415 | except formencode.Invalid as error: |
|
416 | 416 | msg = error.error_dict['ip'] |
|
417 | 417 | h.flash(msg, category='error') |
|
418 | 418 | except Exception: |
|
419 | 419 | log.error(traceback.format_exc()) |
|
420 | 420 | h.flash(_('An error occurred while adding IP address'), |
|
421 | 421 | category='error') |
|
422 | 422 | |
|
423 | 423 | if 'default_user' in request.POST: |
|
424 | 424 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
425 | 425 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
|
426 | 426 | |
|
427 | 427 | def delete_ip(self, id): |
|
428 | 428 | ip_id = request.POST.get('del_ip_id') |
|
429 | 429 | user_model = UserModel() |
|
430 | 430 | user_model.delete_extra_ip(id, ip_id) |
|
431 | 431 | Session().commit() |
|
432 | 432 | h.flash(_("Removed IP address from user whitelist"), category='success') |
|
433 | 433 | |
|
434 | 434 | if 'default_user' in request.POST: |
|
435 | 435 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
436 | 436 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
@@ -1,297 +1,278 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.api |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | JSON RPC controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Aug 20, 2011 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import inspect |
|
29 | 29 | import logging |
|
30 | 30 | import types |
|
31 | 31 | import traceback |
|
32 | 32 | import time |
|
33 | 33 | import itertools |
|
34 | 34 | |
|
35 | from paste.response import replace_header | |
|
36 | from pylons.controllers import WSGIController | |
|
37 | from pylons.controllers.util import Response | |
|
38 | from tg import request | |
|
35 | from tg import Response, response, request, TGController | |
|
39 | 36 | |
|
40 | from webob.exc import HTTPError | |
|
37 | from webob.exc import HTTPError, HTTPException, WSGIHTTPException | |
|
41 | 38 | |
|
42 | 39 | from kallithea.model.db import User |
|
43 | 40 | from kallithea.model import meta |
|
44 | 41 | from kallithea.lib.compat import json |
|
45 | 42 | from kallithea.lib.auth import AuthUser |
|
46 | 43 | from kallithea.lib.base import _get_ip_addr as _get_ip, _get_access_path |
|
47 | 44 | from kallithea.lib.utils2 import safe_unicode, safe_str |
|
48 | 45 | |
|
49 | 46 | log = logging.getLogger('JSONRPC') |
|
50 | 47 | |
|
51 | 48 | |
|
52 | 49 | class JSONRPCError(BaseException): |
|
53 | 50 | |
|
54 | 51 | def __init__(self, message): |
|
55 | 52 | self.message = message |
|
56 | 53 | super(JSONRPCError, self).__init__() |
|
57 | 54 | |
|
58 | 55 | def __str__(self): |
|
59 | 56 | return safe_str(self.message) |
|
60 | 57 | |
|
61 | 58 | |
|
62 | class JSONRPCErrorResponse(Response, Exception): | |
|
59 | class JSONRPCErrorResponse(Response, HTTPException): | |
|
63 | 60 | """ |
|
64 | 61 | Generate a Response object with a JSON-RPC error body |
|
65 | 62 | """ |
|
66 | 63 | |
|
67 | 64 | def __init__(self, message=None, retid=None, code=None): |
|
65 | HTTPException.__init__(self, message, self) | |
|
68 | 66 | Response.__init__(self, |
|
69 |
body= |
|
|
67 | json_body=dict(id=retid, result=None, error=message), | |
|
70 | 68 | status=code, |
|
71 | 69 | content_type='application/json') |
|
72 | 70 | |
|
73 | 71 | |
|
74 |
class JSONRPCController( |
|
|
72 | class JSONRPCController(TGController): | |
|
75 | 73 | """ |
|
76 | 74 | A WSGI-speaking JSON-RPC controller class |
|
77 | 75 | |
|
78 | 76 | See the specification: |
|
79 | 77 | <http://json-rpc.org/wiki/specification>`. |
|
80 | 78 | |
|
81 | 79 | Valid controller return values should be json-serializable objects. |
|
82 | 80 | |
|
83 | 81 | Sub-classes should catch their exceptions and raise JSONRPCError |
|
84 | 82 | if they want to pass meaningful errors to the client. |
|
85 | 83 | |
|
86 | 84 | """ |
|
87 | 85 | |
|
88 | 86 | def _get_ip_addr(self, environ): |
|
89 | 87 | return _get_ip(environ) |
|
90 | 88 | |
|
91 | 89 | def _get_method_args(self): |
|
92 | 90 | """ |
|
93 | 91 | Return `self._rpc_args` to dispatched controller method |
|
94 | 92 | chosen by __call__ |
|
95 | 93 | """ |
|
96 | 94 | return self._rpc_args |
|
97 | 95 | |
|
98 | def __call__(self, environ, start_response): | |
|
96 | def _dispatch(self, state, remainder=None): | |
|
99 | 97 | """ |
|
100 | 98 | Parse the request body as JSON, look up the method on the |
|
101 | 99 | controller and if it exists, dispatch to it. |
|
102 | 100 | """ |
|
103 | try: | |
|
104 | return self._handle_request(environ, start_response) | |
|
105 | except JSONRPCErrorResponse as e: | |
|
106 | return e | |
|
107 | finally: | |
|
108 | meta.Session.remove() | |
|
101 | # Since we are here we should respond as JSON | |
|
102 | response.content_type = 'application/json' | |
|
109 | 103 | |
|
110 | def _handle_request(self, environ, start_response): | |
|
104 | environ = state.request.environ | |
|
111 | 105 | start = time.time() |
|
112 | 106 | ip_addr = request.ip_addr = self._get_ip_addr(environ) |
|
113 | 107 | self._req_id = None |
|
114 | 108 | if 'CONTENT_LENGTH' not in environ: |
|
115 | 109 | log.debug("No Content-Length") |
|
116 | 110 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
117 | 111 | message="No Content-Length in request") |
|
118 | 112 | else: |
|
119 | 113 | length = environ['CONTENT_LENGTH'] or 0 |
|
120 | 114 | length = int(environ['CONTENT_LENGTH']) |
|
121 | 115 | log.debug('Content-Length: %s', length) |
|
122 | 116 | |
|
123 | 117 | if length == 0: |
|
124 | 118 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
125 | 119 | message="Content-Length is 0") |
|
126 | 120 | |
|
127 | 121 | raw_body = environ['wsgi.input'].read(length) |
|
128 | 122 | |
|
129 | 123 | try: |
|
130 | 124 | json_body = json.loads(raw_body) |
|
131 | 125 | except ValueError as e: |
|
132 | 126 | # catch JSON errors Here |
|
133 | 127 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
134 | 128 | message="JSON parse error ERR:%s RAW:%r" |
|
135 | 129 | % (e, raw_body)) |
|
136 | 130 | |
|
137 | 131 | # check AUTH based on API key |
|
138 | 132 | try: |
|
139 | 133 | self._req_api_key = json_body['api_key'] |
|
140 | 134 | self._req_id = json_body['id'] |
|
141 | 135 | self._req_method = json_body['method'] |
|
142 | 136 | self._request_params = json_body['args'] |
|
143 | 137 | if not isinstance(self._request_params, dict): |
|
144 | 138 | self._request_params = {} |
|
145 | 139 | |
|
146 | 140 | log.debug('method: %s, params: %s', |
|
147 | 141 | self._req_method, self._request_params) |
|
148 | 142 | except KeyError as e: |
|
149 | 143 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
150 | 144 | message='Incorrect JSON query missing %s' % e) |
|
151 | 145 | |
|
152 | 146 | # check if we can find this session using api_key |
|
153 | 147 | try: |
|
154 | 148 | u = User.get_by_api_key(self._req_api_key) |
|
155 | 149 | if u is None: |
|
156 | 150 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
157 | 151 | message='Invalid API key') |
|
158 | 152 | |
|
159 | 153 | auth_u = AuthUser(dbuser=u) |
|
160 | 154 | if not AuthUser.check_ip_allowed(auth_u, ip_addr): |
|
161 | 155 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
162 | 156 | message='request from IP:%s not allowed' % (ip_addr,)) |
|
163 | 157 | else: |
|
164 | 158 | log.info('Access for IP:%s allowed', ip_addr) |
|
165 | 159 | |
|
166 | 160 | except Exception as e: |
|
167 | 161 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
168 | 162 | message='Invalid API key') |
|
169 | 163 | |
|
170 | 164 | self._error = None |
|
171 | 165 | try: |
|
172 | 166 | self._func = self._find_method() |
|
173 | 167 | except AttributeError as e: |
|
174 | 168 | raise JSONRPCErrorResponse(retid=self._req_id, |
|
175 | 169 | message=str(e)) |
|
176 | 170 | |
|
177 | 171 | # now that we have a method, add self._req_params to |
|
178 | 172 | # self.kargs and dispatch control to WGIController |
|
179 | 173 | argspec = inspect.getargspec(self._func) |
|
180 | 174 | arglist = argspec[0][1:] |
|
181 | 175 | defaults = map(type, argspec[3] or []) |
|
182 | 176 | default_empty = types.NotImplementedType |
|
183 | 177 | |
|
184 | 178 | # kw arguments required by this method |
|
185 | 179 | func_kwargs = dict(itertools.izip_longest(reversed(arglist), reversed(defaults), |
|
186 | 180 | fillvalue=default_empty)) |
|
187 | 181 | |
|
188 | 182 | # this is little trick to inject logged in user for |
|
189 | 183 | # perms decorators to work they expect the controller class to have |
|
190 | 184 | # authuser attribute set |
|
191 | 185 | request.authuser = request.user = auth_u |
|
192 | 186 | |
|
193 | 187 | # This attribute will need to be first param of a method that uses |
|
194 | 188 | # api_key, which is translated to instance of user at that name |
|
195 | 189 | USER_SESSION_ATTR = 'apiuser' |
|
196 | 190 | |
|
197 | 191 | # get our arglist and check if we provided them as args |
|
198 | 192 | for arg, default in func_kwargs.iteritems(): |
|
199 | 193 | if arg == USER_SESSION_ATTR: |
|
200 | 194 | # USER_SESSION_ATTR is something translated from API key and |
|
201 | 195 | # this is checked before so we don't need validate it |
|
202 | 196 | continue |
|
203 | 197 | |
|
204 | 198 | # skip the required param check if it's default value is |
|
205 | 199 | # NotImplementedType (default_empty) |
|
206 | 200 | if default == default_empty and arg not in self._request_params: |
|
207 | 201 | raise JSONRPCErrorResponse( |
|
208 | 202 | retid=self._req_id, |
|
209 | 203 | message='Missing non optional `%s` arg in JSON DATA' % arg, |
|
210 | 204 | ) |
|
211 | 205 | |
|
212 | 206 | extra = set(self._request_params).difference(func_kwargs) |
|
213 | 207 | if extra: |
|
214 | 208 | raise JSONRPCErrorResponse( |
|
215 | 209 | retid=self._req_id, |
|
216 | 210 | message='Unknown %s arg in JSON DATA' % |
|
217 | 211 | ', '.join('`%s`' % arg for arg in extra), |
|
218 | 212 | ) |
|
219 | 213 | |
|
220 | 214 | self._rpc_args = {} |
|
221 | ||
|
222 | 215 | self._rpc_args.update(self._request_params) |
|
223 | ||
|
224 | 216 | self._rpc_args['action'] = self._req_method |
|
225 | 217 | self._rpc_args['environ'] = environ |
|
226 | self._rpc_args['start_response'] = start_response | |
|
227 | 218 | |
|
228 | status = [] | |
|
229 | headers = [] | |
|
230 | exc_info = [] | |
|
231 | ||
|
232 | def change_content(new_status, new_headers, new_exc_info=None): | |
|
233 | status.append(new_status) | |
|
234 | headers.extend(new_headers) | |
|
235 | exc_info.append(new_exc_info) | |
|
236 | ||
|
237 | output = WSGIController.__call__(self, environ, change_content) | |
|
238 | output = list(output) # expand iterator - just to ensure exact timing | |
|
239 | replace_header(headers, 'Content-Type', 'application/json') | |
|
240 | start_response(status[0], headers, exc_info[0]) | |
|
241 | 219 | log.info('IP: %s Request to %s time: %.3fs' % ( |
|
242 | 220 | self._get_ip_addr(environ), |
|
243 | 221 | safe_unicode(_get_access_path(environ)), time.time() - start) |
|
244 | 222 | ) |
|
245 | return output | |
|
246 | 223 | |
|
247 | def _dispatch_call(self): | |
|
224 | state.set_action(self._rpc_call, []) | |
|
225 | state.set_params(self._rpc_args) | |
|
226 | return state | |
|
227 | ||
|
228 | def _rpc_call(self, action, environ, **rpc_args): | |
|
248 | 229 | """ |
|
249 | Implement dispatch interface specified by WSGIController | |
|
230 | Call the specified RPC Method | |
|
250 | 231 | """ |
|
251 | 232 | raw_response = '' |
|
252 | 233 | try: |
|
253 |
raw_response = |
|
|
234 | raw_response = getattr(self, action)(**rpc_args) | |
|
254 | 235 | if isinstance(raw_response, HTTPError): |
|
255 | 236 | self._error = str(raw_response) |
|
256 | 237 | except JSONRPCError as e: |
|
257 | 238 | self._error = safe_str(e) |
|
258 | 239 | except Exception as e: |
|
259 | 240 | log.error('Encountered unhandled exception: %s', |
|
260 | 241 | traceback.format_exc(),) |
|
261 | 242 | json_exc = JSONRPCError('Internal server error') |
|
262 | 243 | self._error = safe_str(json_exc) |
|
263 | 244 | |
|
264 | 245 | if self._error is not None: |
|
265 | 246 | raw_response = None |
|
266 | 247 | |
|
267 | 248 | response = dict(id=self._req_id, result=raw_response, error=self._error) |
|
268 | 249 | try: |
|
269 | 250 | return json.dumps(response) |
|
270 | 251 | except TypeError as e: |
|
271 | 252 | log.error('API FAILED. Error encoding response: %s', e) |
|
272 | 253 | return json.dumps( |
|
273 | 254 | dict( |
|
274 | 255 | id=self._req_id, |
|
275 | 256 | result=None, |
|
276 | 257 | error="Error encoding response" |
|
277 | 258 | ) |
|
278 | 259 | ) |
|
279 | 260 | |
|
280 | 261 | def _find_method(self): |
|
281 | 262 | """ |
|
282 | 263 | Return method named by `self._req_method` in controller if able |
|
283 | 264 | """ |
|
284 | 265 | log.debug('Trying to find JSON-RPC method: %s', self._req_method) |
|
285 | 266 | if self._req_method.startswith('_'): |
|
286 | 267 | raise AttributeError("Method not allowed") |
|
287 | 268 | |
|
288 | 269 | try: |
|
289 | 270 | func = getattr(self, self._req_method, None) |
|
290 | 271 | except UnicodeEncodeError: |
|
291 | 272 | raise AttributeError("Problem decoding unicode in requested " |
|
292 | 273 | "method name.") |
|
293 | 274 | |
|
294 | 275 | if isinstance(func, types.MethodType): |
|
295 | 276 | return func |
|
296 | 277 | else: |
|
297 | 278 | raise AttributeError("No such method: %s" % (self._req_method,)) |
@@ -1,94 +1,94 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.error |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea error controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Dec 8, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import cgi |
|
30 | 30 | import logging |
|
31 | 31 | |
|
32 | from tg import tmpl_context as c, request, config | |
|
32 | from tg import tmpl_context as c, request, config, expose | |
|
33 | 33 | from tg.i18n import ugettext as _ |
|
34 | from pylons.middleware import media_path | |
|
35 | 34 | |
|
36 | 35 | from kallithea.lib.base import BaseController, render |
|
37 | 36 | |
|
38 | 37 | log = logging.getLogger(__name__) |
|
39 | 38 | |
|
40 | 39 | |
|
41 | 40 | class ErrorController(BaseController): |
|
42 | 41 | """Generates error documents as and when they are required. |
|
43 | 42 | |
|
44 | 43 | The ErrorDocuments middleware forwards to ErrorController when error |
|
45 | 44 | related status codes are returned from the application. |
|
46 | 45 | |
|
47 | 46 | This behavior can be altered by changing the parameters to the |
|
48 | 47 | ErrorDocuments middleware in your config/middleware.py file. |
|
49 | 48 | """ |
|
50 | 49 | |
|
51 | 50 | def _before(self, *args, **kwargs): |
|
52 | 51 | # disable all base actions since we don't need them here |
|
53 | 52 | pass |
|
54 | 53 | |
|
55 | def document(self): | |
|
56 | resp = request.environ.get('pylons.original_response') | |
|
54 | @expose('/errors/error_document.html') | |
|
55 | def document(self, *args, **kwargs): | |
|
56 | resp = request.environ.get('tg.original_response') | |
|
57 | 57 | c.site_name = config.get('title') |
|
58 | 58 | |
|
59 | 59 | log.debug('### %s ###', resp and resp.status or 'no response') |
|
60 | 60 | |
|
61 | 61 | e = request.environ |
|
62 | 62 | c.serv_p = r'%(protocol)s://%(host)s/' % { |
|
63 | 63 | 'protocol': e.get('wsgi.url_scheme'), |
|
64 | 64 | 'host': e.get('HTTP_HOST'), } |
|
65 | 65 | if resp: |
|
66 | 66 | c.error_message = cgi.escape(request.GET.get('code', |
|
67 | 67 | str(resp.status))) |
|
68 | 68 | c.error_explanation = self.get_error_explanation(resp.status_int) |
|
69 | 69 | else: |
|
70 | 70 | c.error_message = _('No response') |
|
71 | 71 | c.error_explanation = _('Unknown error') |
|
72 | 72 | |
|
73 | return render('/errors/error_document.html') | |
|
73 | return dict() | |
|
74 | 74 | |
|
75 | 75 | def get_error_explanation(self, code): |
|
76 | 76 | """ get the error explanations of int codes |
|
77 | 77 | [400, 401, 403, 404, 500]""" |
|
78 | 78 | try: |
|
79 | 79 | code = int(code) |
|
80 | 80 | except ValueError: |
|
81 | 81 | code = 500 |
|
82 | 82 | |
|
83 | 83 | if code == 400: |
|
84 | 84 | return _('The request could not be understood by the server' |
|
85 | 85 | ' due to malformed syntax.') |
|
86 | 86 | if code == 401: |
|
87 | 87 | return _('Unauthorized access to resource') |
|
88 | 88 | if code == 403: |
|
89 | 89 | return _("You don't have permission to view this page") |
|
90 | 90 | if code == 404: |
|
91 | 91 | return _('The resource could not be found') |
|
92 | 92 | if code == 500: |
|
93 | 93 | return _('The server encountered an unexpected condition' |
|
94 | 94 | ' which prevented it from fulfilling the request.') |
@@ -1,46 +1,52 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | kallithea.lib.app_globals |
|
17 | 17 | ~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
18 | 18 | |
|
19 | 19 | The application's Globals object |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: Oct 06, 2010 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | ||
|
29 | from beaker.cache import CacheManager | |
|
30 | from beaker.util import parse_cache_config_options | |
|
28 | import tg | |
|
29 | from tg import config | |
|
31 | 30 | |
|
32 | 31 | |
|
33 | 32 | class Globals(object): |
|
34 | 33 | """ |
|
35 | 34 | Globals acts as a container for objects available throughout the |
|
36 | 35 | life of the application |
|
37 | 36 | """ |
|
38 | 37 | |
|
39 |
def __init__(self |
|
|
38 | def __init__(self): | |
|
40 | 39 | """One instance of Globals is created during application |
|
41 | 40 | initialization and is available during requests via the |
|
42 | 41 | 'app_globals' variable |
|
43 | 42 | |
|
44 | 43 | """ |
|
45 | self.cache = CacheManager(**parse_cache_config_options(config)) | |
|
46 | 44 | self.available_permissions = None # propagated after init_model |
|
45 | ||
|
46 | @property | |
|
47 | def cache(self): | |
|
48 | return tg.cache | |
|
49 | ||
|
50 | @property | |
|
51 | def mako_lookup(self): | |
|
52 | return config['render_functions']['mako'].normal_loader |
@@ -1,688 +1,674 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | kallithea.lib.base |
|
17 | 17 | ~~~~~~~~~~~~~~~~~~ |
|
18 | 18 | |
|
19 | 19 | The base Controller API |
|
20 | 20 | Provides the BaseController class for subclassing. And usage in different |
|
21 | 21 | controllers |
|
22 | 22 | |
|
23 | 23 | This file was forked by the Kallithea project in July 2014. |
|
24 | 24 | Original author and date, and relevant copyright and licensing information is below: |
|
25 | 25 | :created_on: Oct 06, 2010 |
|
26 | 26 | :author: marcink |
|
27 | 27 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
28 | 28 | :license: GPLv3, see LICENSE.md for more details. |
|
29 | 29 | """ |
|
30 | 30 | |
|
31 | 31 | import datetime |
|
32 | 32 | import decorator |
|
33 | 33 | import logging |
|
34 | 34 | import time |
|
35 | 35 | import traceback |
|
36 | 36 | import warnings |
|
37 | 37 | |
|
38 | 38 | import webob.exc |
|
39 | 39 | import paste.httpexceptions |
|
40 | 40 | import paste.auth.basic |
|
41 | 41 | import paste.httpheaders |
|
42 | 42 | from webhelpers.pylonslib import secure_form |
|
43 | 43 | |
|
44 | from tg import config, tmpl_context as c, request, response, session | |
|
45 |
from |
|
|
46 | from pylons.templating import render_mako as render # don't remove this import | |
|
44 | from tg import config, tmpl_context as c, request, response, session, render_template | |
|
45 | from tg import TGController | |
|
47 | 46 | from tg.i18n import ugettext as _ |
|
48 | 47 | |
|
49 | 48 | from kallithea import __version__, BACKENDS |
|
50 | 49 | |
|
51 | 50 | from kallithea.config.routing import url |
|
52 | 51 | from kallithea.lib.utils2 import str2bool, safe_unicode, AttributeDict, \ |
|
53 | 52 | safe_str, safe_int |
|
54 | 53 | from kallithea.lib import auth_modules |
|
55 | 54 | from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
56 | 55 | from kallithea.lib.compat import json |
|
57 | 56 | from kallithea.lib.utils import get_repo_slug |
|
58 | 57 | from kallithea.lib.exceptions import UserCreationError |
|
59 | 58 | from kallithea.lib.vcs.exceptions import RepositoryError, EmptyRepositoryError, ChangesetDoesNotExistError |
|
60 | 59 | from kallithea.model import meta |
|
61 | 60 | |
|
62 | 61 | from kallithea.model.db import PullRequest, Repository, Ui, User, Setting |
|
63 | 62 | from kallithea.model.notification import NotificationModel |
|
64 | 63 | from kallithea.model.scm import ScmModel |
|
65 | 64 | |
|
66 | 65 | log = logging.getLogger(__name__) |
|
67 | 66 | |
|
68 | 67 | |
|
68 | def render(template_path): | |
|
69 | return render_template({'url': url}, 'mako', template_path) | |
|
70 | ||
|
71 | ||
|
69 | 72 | def _filter_proxy(ip): |
|
70 | 73 | """ |
|
71 | 74 | HEADERS can have multiple ips inside the left-most being the original |
|
72 | 75 | client, and each successive proxy that passed the request adding the IP |
|
73 | 76 | address where it received the request from. |
|
74 | 77 | |
|
75 | 78 | :param ip: |
|
76 | 79 | """ |
|
77 | 80 | if ',' in ip: |
|
78 | 81 | _ips = ip.split(',') |
|
79 | 82 | _first_ip = _ips[0].strip() |
|
80 | 83 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
81 | 84 | return _first_ip |
|
82 | 85 | return ip |
|
83 | 86 | |
|
84 | 87 | |
|
85 | 88 | def _get_ip_addr(environ): |
|
86 | 89 | proxy_key = 'HTTP_X_REAL_IP' |
|
87 | 90 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
88 | 91 | def_key = 'REMOTE_ADDR' |
|
89 | 92 | |
|
90 | 93 | ip = environ.get(proxy_key) |
|
91 | 94 | if ip: |
|
92 | 95 | return _filter_proxy(ip) |
|
93 | 96 | |
|
94 | 97 | ip = environ.get(proxy_key2) |
|
95 | 98 | if ip: |
|
96 | 99 | return _filter_proxy(ip) |
|
97 | 100 | |
|
98 | 101 | ip = environ.get(def_key, '0.0.0.0') |
|
99 | 102 | return _filter_proxy(ip) |
|
100 | 103 | |
|
101 | 104 | |
|
102 | 105 | def _get_access_path(environ): |
|
103 | 106 | path = environ.get('PATH_INFO') |
|
104 |
org_req = environ.get(' |
|
|
107 | org_req = environ.get('tg.original_request') | |
|
105 | 108 | if org_req: |
|
106 | 109 | path = org_req.environ.get('PATH_INFO') |
|
107 | 110 | return path |
|
108 | 111 | |
|
109 | 112 | |
|
110 | 113 | def log_in_user(user, remember, is_external_auth): |
|
111 | 114 | """ |
|
112 | 115 | Log a `User` in and update session and cookies. If `remember` is True, |
|
113 | 116 | the session cookie is set to expire in a year; otherwise, it expires at |
|
114 | 117 | the end of the browser session. |
|
115 | 118 | |
|
116 | 119 | Returns populated `AuthUser` object. |
|
117 | 120 | """ |
|
118 | 121 | user.update_lastlogin() |
|
119 | 122 | meta.Session().commit() |
|
120 | 123 | |
|
121 | 124 | auth_user = AuthUser(dbuser=user, |
|
122 | 125 | is_external_auth=is_external_auth) |
|
123 | 126 | # It should not be possible to explicitly log in as the default user. |
|
124 | 127 | assert not auth_user.is_default_user |
|
125 | 128 | auth_user.is_authenticated = True |
|
126 | 129 | |
|
127 | 130 | # Start new session to prevent session fixation attacks. |
|
128 | 131 | session.invalidate() |
|
129 | 132 | session['authuser'] = cookie = auth_user.to_cookie() |
|
130 | 133 | |
|
131 | 134 | # If they want to be remembered, update the cookie. |
|
132 | 135 | # NOTE: Assumes that beaker defaults to browser session cookie. |
|
133 | 136 | if remember: |
|
134 | 137 | t = datetime.datetime.now() + datetime.timedelta(days=365) |
|
135 | 138 | session._set_cookie_expires(t) |
|
136 | 139 | |
|
137 | 140 | session.save() |
|
138 | 141 | |
|
139 | 142 | log.info('user %s is now authenticated and stored in ' |
|
140 | 143 | 'session, session attrs %s', user.username, cookie) |
|
141 | 144 | |
|
142 | 145 | # dumps session attrs back to cookie |
|
143 | 146 | session._update_cookie_out() |
|
144 | 147 | |
|
145 | 148 | return auth_user |
|
146 | 149 | |
|
147 | 150 | |
|
148 | 151 | class BasicAuth(paste.auth.basic.AuthBasicAuthenticator): |
|
149 | 152 | |
|
150 | 153 | def __init__(self, realm, authfunc, auth_http_code=None): |
|
151 | 154 | self.realm = realm |
|
152 | 155 | self.authfunc = authfunc |
|
153 | 156 | self._rc_auth_http_code = auth_http_code |
|
154 | 157 | |
|
155 | 158 | def build_authentication(self): |
|
156 | 159 | head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
157 | 160 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': |
|
158 | 161 | # return 403 if alternative http return code is specified in |
|
159 | 162 | # Kallithea config |
|
160 | 163 | return paste.httpexceptions.HTTPForbidden(headers=head) |
|
161 | 164 | return paste.httpexceptions.HTTPUnauthorized(headers=head) |
|
162 | 165 | |
|
163 | 166 | def authenticate(self, environ): |
|
164 | 167 | authorization = paste.httpheaders.AUTHORIZATION(environ) |
|
165 | 168 | if not authorization: |
|
166 | 169 | return self.build_authentication() |
|
167 | 170 | (authmeth, auth) = authorization.split(' ', 1) |
|
168 | 171 | if 'basic' != authmeth.lower(): |
|
169 | 172 | return self.build_authentication() |
|
170 | 173 | auth = auth.strip().decode('base64') |
|
171 | 174 | _parts = auth.split(':', 1) |
|
172 | 175 | if len(_parts) == 2: |
|
173 | 176 | username, password = _parts |
|
174 | 177 | if self.authfunc(username, password, environ) is not None: |
|
175 | 178 | return username |
|
176 | 179 | return self.build_authentication() |
|
177 | 180 | |
|
178 | 181 | __call__ = authenticate |
|
179 | 182 | |
|
180 | 183 | |
|
181 | 184 | class BaseVCSController(object): |
|
182 | 185 | """Base controller for handling Mercurial/Git protocol requests |
|
183 | 186 | (coming from a VCS client, and not a browser). |
|
184 | 187 | """ |
|
185 | 188 | |
|
186 | 189 | def __init__(self, application, config): |
|
187 | 190 | self.application = application |
|
188 | 191 | self.config = config |
|
189 | 192 | # base path of repo locations |
|
190 | 193 | self.basepath = self.config['base_path'] |
|
191 | 194 | # authenticate this VCS request using the authentication modules |
|
192 | 195 | self.authenticate = BasicAuth('', auth_modules.authenticate, |
|
193 | 196 | config.get('auth_ret_code')) |
|
194 | 197 | |
|
195 | 198 | def _authorize(self, environ, start_response, action, repo_name, ip_addr): |
|
196 | 199 | """Authenticate and authorize user. |
|
197 | 200 | |
|
198 | 201 | Since we're dealing with a VCS client and not a browser, we only |
|
199 | 202 | support HTTP basic authentication, either directly via raw header |
|
200 | 203 | inspection, or by using container authentication to delegate the |
|
201 | 204 | authentication to the web server. |
|
202 | 205 | |
|
203 | 206 | Returns (user, None) on successful authentication and authorization. |
|
204 | 207 | Returns (None, wsgi_app) to send the wsgi_app response to the client. |
|
205 | 208 | """ |
|
206 | 209 | # Check if anonymous access is allowed. |
|
207 | 210 | default_user = User.get_default_user(cache=True) |
|
208 | 211 | is_default_user_allowed = (default_user.active and |
|
209 | 212 | self._check_permission(action, default_user, repo_name, ip_addr)) |
|
210 | 213 | if is_default_user_allowed: |
|
211 | 214 | return default_user, None |
|
212 | 215 | |
|
213 | 216 | if not default_user.active: |
|
214 | 217 | log.debug('Anonymous access is disabled') |
|
215 | 218 | else: |
|
216 | 219 | log.debug('Not authorized to access this ' |
|
217 | 220 | 'repository as anonymous user') |
|
218 | 221 | |
|
219 | 222 | username = None |
|
220 | 223 | #============================================================== |
|
221 | 224 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
222 | 225 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
223 | 226 | #============================================================== |
|
224 | 227 | |
|
225 | 228 | # try to auth based on environ, container auth methods |
|
226 | 229 | log.debug('Running PRE-AUTH for container based authentication') |
|
227 | 230 | pre_auth = auth_modules.authenticate('', '', environ) |
|
228 | 231 | if pre_auth is not None and pre_auth.get('username'): |
|
229 | 232 | username = pre_auth['username'] |
|
230 | 233 | log.debug('PRE-AUTH got %s as username', username) |
|
231 | 234 | |
|
232 | 235 | # If not authenticated by the container, running basic auth |
|
233 | 236 | if not username: |
|
234 | 237 | self.authenticate.realm = safe_str(self.config['realm']) |
|
235 | 238 | result = self.authenticate(environ) |
|
236 | 239 | if isinstance(result, str): |
|
237 | 240 | paste.httpheaders.AUTH_TYPE.update(environ, 'basic') |
|
238 | 241 | paste.httpheaders.REMOTE_USER.update(environ, result) |
|
239 | 242 | username = result |
|
240 | 243 | else: |
|
241 | 244 | return None, result.wsgi_application |
|
242 | 245 | |
|
243 | 246 | #============================================================== |
|
244 | 247 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
245 | 248 | #============================================================== |
|
246 | 249 | try: |
|
247 | 250 | user = User.get_by_username_or_email(username) |
|
248 | 251 | if user is None or not user.active: |
|
249 | 252 | return None, webob.exc.HTTPForbidden() |
|
250 | 253 | except Exception: |
|
251 | 254 | log.error(traceback.format_exc()) |
|
252 | 255 | return None, webob.exc.HTTPInternalServerError() |
|
253 | 256 | |
|
254 | 257 | #check permissions for this repository |
|
255 | 258 | perm = self._check_permission(action, user, repo_name, ip_addr) |
|
256 | 259 | if not perm: |
|
257 | 260 | return None, webob.exc.HTTPForbidden() |
|
258 | 261 | |
|
259 | 262 | return user, None |
|
260 | 263 | |
|
261 | 264 | def _handle_request(self, environ, start_response): |
|
262 | 265 | raise NotImplementedError() |
|
263 | 266 | |
|
264 | 267 | def _get_by_id(self, repo_name): |
|
265 | 268 | """ |
|
266 | 269 | Gets a special pattern _<ID> from clone url and tries to replace it |
|
267 | 270 | with a repository_name for support of _<ID> permanent URLs |
|
268 | 271 | |
|
269 | 272 | :param repo_name: |
|
270 | 273 | """ |
|
271 | 274 | |
|
272 | 275 | data = repo_name.split('/') |
|
273 | 276 | if len(data) >= 2: |
|
274 | 277 | from kallithea.lib.utils import get_repo_by_id |
|
275 | 278 | by_id_match = get_repo_by_id(repo_name) |
|
276 | 279 | if by_id_match: |
|
277 | 280 | data[1] = safe_str(by_id_match) |
|
278 | 281 | |
|
279 | 282 | return '/'.join(data) |
|
280 | 283 | |
|
281 | 284 | def _invalidate_cache(self, repo_name): |
|
282 | 285 | """ |
|
283 | 286 | Sets cache for this repository for invalidation on next access |
|
284 | 287 | |
|
285 | 288 | :param repo_name: full repo name, also a cache key |
|
286 | 289 | """ |
|
287 | 290 | ScmModel().mark_for_invalidation(repo_name) |
|
288 | 291 | |
|
289 | 292 | def _check_permission(self, action, user, repo_name, ip_addr=None): |
|
290 | 293 | """ |
|
291 | 294 | Checks permissions using action (push/pull) user and repository |
|
292 | 295 | name |
|
293 | 296 | |
|
294 | 297 | :param action: push or pull action |
|
295 | 298 | :param user: `User` instance |
|
296 | 299 | :param repo_name: repository name |
|
297 | 300 | """ |
|
298 | 301 | # check IP |
|
299 | 302 | ip_allowed = AuthUser.check_ip_allowed(user, ip_addr) |
|
300 | 303 | if ip_allowed: |
|
301 | 304 | log.info('Access for IP:%s allowed', ip_addr) |
|
302 | 305 | else: |
|
303 | 306 | return False |
|
304 | 307 | |
|
305 | 308 | if action == 'push': |
|
306 | 309 | if not HasPermissionAnyMiddleware('repository.write', |
|
307 | 310 | 'repository.admin')(user, |
|
308 | 311 | repo_name): |
|
309 | 312 | return False |
|
310 | 313 | |
|
311 | 314 | else: |
|
312 | 315 | #any other action need at least read permission |
|
313 | 316 | if not HasPermissionAnyMiddleware('repository.read', |
|
314 | 317 | 'repository.write', |
|
315 | 318 | 'repository.admin')(user, |
|
316 | 319 | repo_name): |
|
317 | 320 | return False |
|
318 | 321 | |
|
319 | 322 | return True |
|
320 | 323 | |
|
321 | 324 | def _get_ip_addr(self, environ): |
|
322 | 325 | return _get_ip_addr(environ) |
|
323 | 326 | |
|
324 | 327 | def _check_locking_state(self, environ, action, repo, user_id): |
|
325 | 328 | """ |
|
326 | 329 | Checks locking on this repository, if locking is enabled and lock is |
|
327 | 330 | present returns a tuple of make_lock, locked, locked_by. |
|
328 | 331 | make_lock can have 3 states None (do nothing) True, make lock |
|
329 | 332 | False release lock, This value is later propagated to hooks, which |
|
330 | 333 | do the locking. Think about this as signals passed to hooks what to do. |
|
331 | 334 | |
|
332 | 335 | """ |
|
333 | 336 | locked = False # defines that locked error should be thrown to user |
|
334 | 337 | make_lock = None |
|
335 | 338 | repo = Repository.get_by_repo_name(repo) |
|
336 | 339 | user = User.get(user_id) |
|
337 | 340 | |
|
338 | 341 | # this is kind of hacky, but due to how mercurial handles client-server |
|
339 | 342 | # server see all operation on changeset; bookmarks, phases and |
|
340 | 343 | # obsolescence marker in different transaction, we don't want to check |
|
341 | 344 | # locking on those |
|
342 | 345 | obsolete_call = environ['QUERY_STRING'] in ['cmd=listkeys',] |
|
343 | 346 | locked_by = repo.locked |
|
344 | 347 | if repo and repo.enable_locking and not obsolete_call: |
|
345 | 348 | if action == 'push': |
|
346 | 349 | #check if it's already locked !, if it is compare users |
|
347 | 350 | user_id, _date = repo.locked |
|
348 | 351 | if user.user_id == user_id: |
|
349 | 352 | log.debug('Got push from user %s, now unlocking', user) |
|
350 | 353 | # unlock if we have push from user who locked |
|
351 | 354 | make_lock = False |
|
352 | 355 | else: |
|
353 | 356 | # we're not the same user who locked, ban with 423 ! |
|
354 | 357 | locked = True |
|
355 | 358 | if action == 'pull': |
|
356 | 359 | if repo.locked[0] and repo.locked[1]: |
|
357 | 360 | locked = True |
|
358 | 361 | else: |
|
359 | 362 | log.debug('Setting lock on repo %s by %s', repo, user) |
|
360 | 363 | make_lock = True |
|
361 | 364 | |
|
362 | 365 | else: |
|
363 | 366 | log.debug('Repository %s do not have locking enabled', repo) |
|
364 | 367 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', |
|
365 | 368 | make_lock, locked, locked_by) |
|
366 | 369 | return make_lock, locked, locked_by |
|
367 | 370 | |
|
368 | 371 | def __call__(self, environ, start_response): |
|
369 | 372 | start = time.time() |
|
370 | 373 | try: |
|
371 | 374 | return self._handle_request(environ, start_response) |
|
372 | 375 | finally: |
|
373 | 376 | log = logging.getLogger('kallithea.' + self.__class__.__name__) |
|
374 | 377 | log.debug('Request time: %.3fs', time.time() - start) |
|
375 | 378 | meta.Session.remove() |
|
376 | 379 | |
|
377 | 380 | |
|
378 |
class BaseController( |
|
|
381 | class BaseController(TGController): | |
|
379 | 382 | |
|
380 | 383 | def _before(self, *args, **kwargs): |
|
381 | pass | |
|
382 | ||
|
383 | def __before__(self): | |
|
384 | 384 | """ |
|
385 |
_ |
|
|
385 | _before is called before controller methods and after __call__ | |
|
386 | 386 | """ |
|
387 | 387 | c.kallithea_version = __version__ |
|
388 | 388 | rc_config = Setting.get_app_settings() |
|
389 | 389 | |
|
390 | 390 | # Visual options |
|
391 | 391 | c.visual = AttributeDict({}) |
|
392 | 392 | |
|
393 | 393 | ## DB stored |
|
394 | 394 | c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon')) |
|
395 | 395 | c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon')) |
|
396 | 396 | c.visual.stylify_metatags = str2bool(rc_config.get('stylify_metatags')) |
|
397 | 397 | c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) |
|
398 | 398 | c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) |
|
399 | 399 | c.visual.repository_fields = str2bool(rc_config.get('repository_fields')) |
|
400 | 400 | c.visual.show_version = str2bool(rc_config.get('show_version')) |
|
401 | 401 | c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar')) |
|
402 | 402 | c.visual.gravatar_url = rc_config.get('gravatar_url') |
|
403 | 403 | |
|
404 | 404 | c.ga_code = rc_config.get('ga_code') |
|
405 | 405 | # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code |
|
406 | 406 | if c.ga_code and '<' not in c.ga_code: |
|
407 | 407 | c.ga_code = '''<script type="text/javascript"> |
|
408 | 408 | var _gaq = _gaq || []; |
|
409 | 409 | _gaq.push(['_setAccount', '%s']); |
|
410 | 410 | _gaq.push(['_trackPageview']); |
|
411 | 411 | |
|
412 | 412 | (function() { |
|
413 | 413 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; |
|
414 | 414 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; |
|
415 | 415 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); |
|
416 | 416 | })(); |
|
417 | 417 | </script>''' % c.ga_code |
|
418 | 418 | c.site_name = rc_config.get('title') |
|
419 | 419 | c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') |
|
420 | 420 | |
|
421 | 421 | ## INI stored |
|
422 | 422 | c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True)) |
|
423 | 423 | c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True)) |
|
424 | 424 | |
|
425 | 425 | c.instance_id = config.get('instance_id') |
|
426 | 426 | c.issues_url = config.get('bugtracker', url('issues_url')) |
|
427 | 427 | # END CONFIG VARS |
|
428 | 428 | |
|
429 | 429 | c.repo_name = get_repo_slug(request) # can be empty |
|
430 | 430 | c.backends = BACKENDS.keys() |
|
431 | 431 | c.unread_notifications = NotificationModel() \ |
|
432 | 432 | .get_unread_cnt_for_user(request.authuser.user_id) |
|
433 | 433 | |
|
434 | 434 | self.cut_off_limit = safe_int(config.get('cut_off_limit')) |
|
435 | 435 | |
|
436 | 436 | c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() |
|
437 | 437 | |
|
438 | 438 | self.scm_model = ScmModel() |
|
439 | 439 | |
|
440 | # __before__ in Pylons is called _before in TurboGears2. As preparation | |
|
441 | # to the migration to TurboGears2, all __before__ methods were already | |
|
442 | # renamed to _before. We call them from here to keep the behavior. | |
|
443 | # This is a temporary call that will be removed in the real TurboGears2 | |
|
444 | # migration commit. | |
|
445 | self._before() | |
|
446 | ||
|
447 | 440 | @staticmethod |
|
448 | 441 | def _determine_auth_user(api_key, bearer_token, session_authuser): |
|
449 | 442 | """ |
|
450 | 443 | Create an `AuthUser` object given the API key/bearer token |
|
451 | 444 | (if any) and the value of the authuser session cookie. |
|
452 | 445 | """ |
|
453 | 446 | |
|
454 | 447 | # Authenticate by bearer token |
|
455 | 448 | if bearer_token is not None: |
|
456 | 449 | api_key = bearer_token |
|
457 | 450 | |
|
458 | 451 | # Authenticate by API key |
|
459 | 452 | if api_key is not None: |
|
460 | 453 | au = AuthUser(dbuser=User.get_by_api_key(api_key), |
|
461 | 454 | authenticating_api_key=api_key, is_external_auth=True) |
|
462 | 455 | if au.is_anonymous: |
|
463 | 456 | log.warning('API key ****%s is NOT valid', api_key[-4:]) |
|
464 | 457 | raise webob.exc.HTTPForbidden(_('Invalid API key')) |
|
465 | 458 | return au |
|
466 | 459 | |
|
467 | 460 | # Authenticate by session cookie |
|
468 | 461 | # In ancient login sessions, 'authuser' may not be a dict. |
|
469 | 462 | # In that case, the user will have to log in again. |
|
470 | 463 | # v0.3 and earlier included an 'is_authenticated' key; if present, |
|
471 | 464 | # this must be True. |
|
472 | 465 | if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True): |
|
473 | 466 | try: |
|
474 | 467 | return AuthUser.from_cookie(session_authuser) |
|
475 | 468 | except UserCreationError as e: |
|
476 | 469 | # container auth or other auth functions that create users on |
|
477 | 470 | # the fly can throw UserCreationError to signal issues with |
|
478 | 471 | # user creation. Explanation should be provided in the |
|
479 | 472 | # exception object. |
|
480 | 473 | from kallithea.lib import helpers as h |
|
481 | 474 | h.flash(e, 'error', logf=log.error) |
|
482 | 475 | |
|
483 | 476 | # Authenticate by auth_container plugin (if enabled) |
|
484 | 477 | if any( |
|
485 | 478 | plugin.is_container_auth |
|
486 | 479 | for plugin in auth_modules.get_auth_plugins() |
|
487 | 480 | ): |
|
488 | 481 | try: |
|
489 | 482 | user_info = auth_modules.authenticate('', '', request.environ) |
|
490 | 483 | except UserCreationError as e: |
|
491 | 484 | from kallithea.lib import helpers as h |
|
492 | 485 | h.flash(e, 'error', logf=log.error) |
|
493 | 486 | else: |
|
494 | 487 | if user_info is not None: |
|
495 | 488 | username = user_info['username'] |
|
496 | 489 | user = User.get_by_username(username, case_insensitive=True) |
|
497 | 490 | return log_in_user(user, remember=False, |
|
498 | 491 | is_external_auth=True) |
|
499 | 492 | |
|
500 | 493 | # User is anonymous |
|
501 | 494 | return AuthUser() |
|
502 | 495 | |
|
503 | 496 | @staticmethod |
|
504 | 497 | def _basic_security_checks(): |
|
505 | 498 | """Perform basic security/sanity checks before processing the request.""" |
|
506 | 499 | |
|
507 | 500 | # Only allow the following HTTP request methods. |
|
508 | 501 | if request.method not in ['GET', 'HEAD', 'POST']: |
|
509 | 502 | raise webob.exc.HTTPMethodNotAllowed() |
|
510 | 503 | |
|
511 | 504 | # Also verify the _method override - no longer allowed. |
|
512 | 505 | if request.params.get('_method') is None: |
|
513 | 506 | pass # no override, no problem |
|
514 | 507 | else: |
|
515 | 508 | raise webob.exc.HTTPMethodNotAllowed() |
|
516 | 509 | |
|
517 | 510 | # Make sure CSRF token never appears in the URL. If so, invalidate it. |
|
518 | 511 | if secure_form.token_key in request.GET: |
|
519 | 512 | log.error('CSRF key leak detected') |
|
520 | 513 | session.pop(secure_form.token_key, None) |
|
521 | 514 | session.save() |
|
522 | 515 | from kallithea.lib import helpers as h |
|
523 | 516 | h.flash(_('CSRF token leak has been detected - all form tokens have been expired'), |
|
524 | 517 | category='error') |
|
525 | 518 | |
|
526 | 519 | # WebOb already ignores request payload parameters for anything other |
|
527 | 520 | # than POST/PUT, but double-check since other Kallithea code relies on |
|
528 | 521 | # this assumption. |
|
529 | 522 | if request.method not in ['POST', 'PUT'] and request.POST: |
|
530 | 523 | log.error('%r request with payload parameters; WebOb should have stopped this', request.method) |
|
531 | 524 | raise webob.exc.HTTPBadRequest() |
|
532 | 525 | |
|
533 |
def __call__(self, environ, |
|
|
534 | """Invoke the Controller""" | |
|
535 | ||
|
536 | # WSGIController.__call__ dispatches to the Controller method | |
|
537 | # the request is routed to. This routing information is | |
|
538 | # available in environ['pylons.routes_dict'] | |
|
526 | def __call__(self, environ, context): | |
|
539 | 527 | try: |
|
540 | 528 | request.ip_addr = _get_ip_addr(environ) |
|
541 | 529 | # make sure that we update permissions each time we call controller |
|
542 | 530 | |
|
543 | 531 | self._basic_security_checks() |
|
544 | 532 | |
|
545 | 533 | #set globals for auth user |
|
546 | 534 | |
|
547 | 535 | bearer_token = None |
|
548 | 536 | try: |
|
549 | 537 | # Request.authorization may raise ValueError on invalid input |
|
550 | 538 | type, params = request.authorization |
|
551 | 539 | except (ValueError, TypeError): |
|
552 | 540 | pass |
|
553 | 541 | else: |
|
554 | 542 | if type.lower() == 'bearer': |
|
555 | 543 | bearer_token = params |
|
556 | 544 | |
|
557 | 545 | request.authuser = request.user = self._determine_auth_user( |
|
558 | 546 | request.GET.get('api_key'), |
|
559 | 547 | bearer_token, |
|
560 | 548 | session.get('authuser'), |
|
561 | 549 | ) |
|
562 | 550 | |
|
563 | 551 | log.info('IP: %s User: %s accessed %s', |
|
564 | 552 | request.ip_addr, request.authuser, |
|
565 | 553 | safe_unicode(_get_access_path(environ)), |
|
566 | 554 | ) |
|
567 |
return |
|
|
555 | return super(BaseController, self).__call__(environ, context) | |
|
568 | 556 | except webob.exc.HTTPException as e: |
|
569 |
return e |
|
|
570 | finally: | |
|
571 | meta.Session.remove() | |
|
557 | return e | |
|
572 | 558 | |
|
573 | 559 | |
|
574 | 560 | class BaseRepoController(BaseController): |
|
575 | 561 | """ |
|
576 | 562 | Base class for controllers responsible for loading all needed data for |
|
577 | 563 | repository loaded items are |
|
578 | 564 | |
|
579 | 565 | c.db_repo_scm_instance: instance of scm repository |
|
580 | 566 | c.db_repo: instance of db |
|
581 | 567 | c.repository_followers: number of followers |
|
582 | 568 | c.repository_forks: number of forks |
|
583 | 569 | c.repository_following: weather the current user is following the current repo |
|
584 | 570 | """ |
|
585 | 571 | |
|
586 | 572 | def _before(self, *args, **kwargs): |
|
587 | 573 | super(BaseRepoController, self)._before(*args, **kwargs) |
|
588 | 574 | if c.repo_name: # extracted from routes |
|
589 | 575 | _dbr = Repository.get_by_repo_name(c.repo_name) |
|
590 | 576 | if not _dbr: |
|
591 | 577 | return |
|
592 | 578 | |
|
593 | 579 | log.debug('Found repository in database %s with state `%s`', |
|
594 | 580 | safe_unicode(_dbr), safe_unicode(_dbr.repo_state)) |
|
595 | 581 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
596 | 582 | |
|
597 | 583 | # allow to delete repos that are somehow damages in filesystem |
|
598 | 584 | if route in ['delete_repo']: |
|
599 | 585 | return |
|
600 | 586 | |
|
601 | 587 | if _dbr.repo_state in [Repository.STATE_PENDING]: |
|
602 | 588 | if route in ['repo_creating_home']: |
|
603 | 589 | return |
|
604 | 590 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
605 | 591 | raise webob.exc.HTTPFound(location=check_url) |
|
606 | 592 | |
|
607 | 593 | dbr = c.db_repo = _dbr |
|
608 | 594 | c.db_repo_scm_instance = c.db_repo.scm_instance |
|
609 | 595 | if c.db_repo_scm_instance is None: |
|
610 | 596 | log.error('%s this repository is present in database but it ' |
|
611 | 597 | 'cannot be created as an scm instance', c.repo_name) |
|
612 | 598 | from kallithea.lib import helpers as h |
|
613 | 599 | h.flash(h.literal(_('Repository not found in the filesystem')), |
|
614 | 600 | category='error') |
|
615 | 601 | raise paste.httpexceptions.HTTPNotFound() |
|
616 | 602 | |
|
617 | 603 | # some globals counter for menu |
|
618 | 604 | c.repository_followers = self.scm_model.get_followers(dbr) |
|
619 | 605 | c.repository_forks = self.scm_model.get_forks(dbr) |
|
620 | 606 | c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) |
|
621 | 607 | c.repository_following = self.scm_model.is_following_repo( |
|
622 | 608 | c.repo_name, request.authuser.user_id) |
|
623 | 609 | |
|
624 | 610 | @staticmethod |
|
625 | 611 | def _get_ref_rev(repo, ref_type, ref_name, returnempty=False): |
|
626 | 612 | """ |
|
627 | 613 | Safe way to get changeset. If error occurs show error. |
|
628 | 614 | """ |
|
629 | 615 | from kallithea.lib import helpers as h |
|
630 | 616 | try: |
|
631 | 617 | return repo.scm_instance.get_ref_revision(ref_type, ref_name) |
|
632 | 618 | except EmptyRepositoryError as e: |
|
633 | 619 | if returnempty: |
|
634 | 620 | return repo.scm_instance.EMPTY_CHANGESET |
|
635 | 621 | h.flash(h.literal(_('There are no changesets yet')), |
|
636 | 622 | category='error') |
|
637 | 623 | raise webob.exc.HTTPNotFound() |
|
638 | 624 | except ChangesetDoesNotExistError as e: |
|
639 | 625 | h.flash(h.literal(_('Changeset for %s %s not found in %s') % |
|
640 | 626 | (ref_type, ref_name, repo.repo_name)), |
|
641 | 627 | category='error') |
|
642 | 628 | raise webob.exc.HTTPNotFound() |
|
643 | 629 | except RepositoryError as e: |
|
644 | 630 | log.error(traceback.format_exc()) |
|
645 | 631 | h.flash(safe_str(e), category='error') |
|
646 | 632 | raise webob.exc.HTTPBadRequest() |
|
647 | 633 | |
|
648 | 634 | |
|
649 | 635 | class WSGIResultCloseCallback(object): |
|
650 | 636 | """Wrap a WSGI result and let close call close after calling the |
|
651 | 637 | close method on the result. |
|
652 | 638 | """ |
|
653 | 639 | def __init__(self, result, close): |
|
654 | 640 | self._result = result |
|
655 | 641 | self._close = close |
|
656 | 642 | |
|
657 | 643 | def __iter__(self): |
|
658 | 644 | return iter(self._result) |
|
659 | 645 | |
|
660 | 646 | def close(self): |
|
661 | 647 | if hasattr(self._result, 'close'): |
|
662 | 648 | self._result.close() |
|
663 | 649 | self._close() |
|
664 | 650 | |
|
665 | 651 | |
|
666 | 652 | @decorator.decorator |
|
667 | 653 | def jsonify(func, *args, **kwargs): |
|
668 | 654 | """Action decorator that formats output for JSON |
|
669 | 655 | |
|
670 | 656 | Given a function that will return content, this decorator will turn |
|
671 | 657 | the result into JSON, with a content-type of 'application/json' and |
|
672 | 658 | output it. |
|
673 | 659 | """ |
|
674 | 660 | response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
675 | 661 | data = func(*args, **kwargs) |
|
676 | 662 | if isinstance(data, (list, tuple)): |
|
677 | 663 | # A JSON list response is syntactically valid JavaScript and can be |
|
678 | 664 | # loaded and executed as JavaScript by a malicious third-party site |
|
679 | 665 | # using <script>, which can lead to cross-site data leaks. |
|
680 | 666 | # JSON responses should therefore be scalars or objects (i.e. Python |
|
681 | 667 | # dicts), because a JSON object is a syntax error if intepreted as JS. |
|
682 | 668 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
683 | 669 | "cross-site data leak attacks, see " \ |
|
684 | 670 | "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
685 | 671 | warnings.warn(msg, Warning, 2) |
|
686 | 672 | log.warning(msg) |
|
687 | 673 | log.debug("Returning JSON wrapped action output") |
|
688 | 674 | return json.dumps(data, encoding='utf-8') |
@@ -1,108 +1,108 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.paster_commands.common |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Common code for gearbox commands. |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 18, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import sys |
|
30 | 30 | import logging.config |
|
31 | 31 | |
|
32 | 32 | import paste.deploy |
|
33 | 33 | import gearbox.command |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
37 | 37 | while True: |
|
38 | 38 | ok = raw_input(prompt) |
|
39 | 39 | if ok in ('y', 'ye', 'yes'): |
|
40 | 40 | return True |
|
41 | 41 | if ok in ('n', 'no', 'nop', 'nope'): |
|
42 | 42 | return False |
|
43 | 43 | retries = retries - 1 |
|
44 | 44 | if retries < 0: |
|
45 | 45 | raise IOError |
|
46 | 46 | print complaint |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class BasePasterCommand(gearbox.command.Command): |
|
50 | 50 | """ |
|
51 | 51 | Abstract Base Class for gearbox commands. |
|
52 | 52 | """ |
|
53 | 53 | |
|
54 | 54 | # override to control how much get_parser and run should do: |
|
55 | 55 | takes_config_file = True |
|
56 | 56 | requires_db_session = True |
|
57 | 57 | |
|
58 | 58 | def run(self, args): |
|
59 | 59 | """ |
|
60 | 60 | Overrides Command.run |
|
61 | 61 | |
|
62 | 62 | Checks for a config file argument and loads it. |
|
63 | 63 | """ |
|
64 | 64 | if self.takes_config_file: |
|
65 | 65 | self._bootstrap_config(args.config_file) |
|
66 | 66 | if self.requires_db_session: |
|
67 | 67 | self._init_session() |
|
68 | 68 | |
|
69 | 69 | return super(BasePasterCommand, self).run(args) |
|
70 | 70 | |
|
71 | 71 | def get_parser(self, prog_name): |
|
72 | 72 | parser = super(BasePasterCommand, self).get_parser(prog_name) |
|
73 | 73 | |
|
74 | 74 | if self.takes_config_file: |
|
75 | 75 | parser.add_argument("-c", "--config", |
|
76 | 76 | help='Kallithea .ini file with configuration of database etc', |
|
77 | 77 | dest='config_file', required=True) |
|
78 | 78 | |
|
79 | 79 | return parser |
|
80 | 80 | |
|
81 | 81 | def _bootstrap_config(self, config_file): |
|
82 | 82 | """ |
|
83 | 83 | Read the config file and initialize logging and the application. |
|
84 | 84 | """ |
|
85 | from tg import config as pylonsconfig | |
|
85 | from kallithea.config.middleware import make_app | |
|
86 | 86 | |
|
87 | 87 | path_to_ini_file = os.path.realpath(config_file) |
|
88 | 88 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
89 | 89 | logging.config.fileConfig(path_to_ini_file) |
|
90 |
|
|
|
90 | make_app(conf.global_conf, **conf.local_conf) | |
|
91 | 91 | |
|
92 | 92 | def _init_session(self): |
|
93 | 93 | """ |
|
94 | 94 | Initialize SqlAlchemy Session from global config. |
|
95 | 95 | """ |
|
96 | 96 | |
|
97 | 97 | from tg import config |
|
98 | 98 | from kallithea.model.base import init_model |
|
99 | 99 | from kallithea.lib.utils2 import engine_from_config |
|
100 | 100 | from kallithea.lib.utils import setup_cache_regions |
|
101 | 101 | setup_cache_regions(config) |
|
102 | 102 | engine = engine_from_config(config, 'sqlalchemy.') |
|
103 | 103 | init_model(engine) |
|
104 | 104 | |
|
105 | 105 | def error(self, msg, exitcode=1): |
|
106 | 106 | """Write error message and exit""" |
|
107 | 107 | sys.stderr.write('%s\n' % msg) |
|
108 | 108 | raise SystemExit(exitcode) |
@@ -1,100 +1,100 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.paster_commands.make_index |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | make-index gearbox command for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Aug 17, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import os |
|
30 | 30 | import sys |
|
31 | 31 | from os.path import dirname |
|
32 | 32 | |
|
33 | 33 | from string import strip |
|
34 | 34 | from kallithea.model.repo import RepoModel |
|
35 | 35 | from kallithea.lib.paster_commands.common import BasePasterCommand |
|
36 | 36 | from kallithea.lib.utils import load_rcextensions |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class Command(BasePasterCommand): |
|
40 | 40 | "Kallithea: Create or update full text search index" |
|
41 | 41 | |
|
42 | 42 | def take_action(self, args): |
|
43 |
from |
|
|
43 | from tg import config | |
|
44 | 44 | index_location = config['index_dir'] |
|
45 | 45 | load_rcextensions(config['here']) |
|
46 | 46 | |
|
47 | 47 | repo_location = args.repo_location \ |
|
48 | 48 | if args.repo_location else RepoModel().repos_path |
|
49 | 49 | repo_list = map(strip, args.repo_list.split(',')) \ |
|
50 | 50 | if args.repo_list else None |
|
51 | 51 | |
|
52 | 52 | repo_update_list = map(strip, args.repo_update_list.split(',')) \ |
|
53 | 53 | if args.repo_update_list else None |
|
54 | 54 | |
|
55 | 55 | #====================================================================== |
|
56 | 56 | # WHOOSH DAEMON |
|
57 | 57 | #====================================================================== |
|
58 | 58 | from kallithea.lib.pidlock import LockHeld, DaemonLock |
|
59 | 59 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
60 | 60 | try: |
|
61 | 61 | l = DaemonLock(file_=os.path.join(dirname(dirname(index_location)), |
|
62 | 62 | 'make_index.lock')) |
|
63 | 63 | WhooshIndexingDaemon(index_location=index_location, |
|
64 | 64 | repo_location=repo_location, |
|
65 | 65 | repo_list=repo_list, |
|
66 | 66 | repo_update_list=repo_update_list) \ |
|
67 | 67 | .run(full_index=args.full_index) |
|
68 | 68 | l.release() |
|
69 | 69 | except LockHeld: |
|
70 | 70 | sys.exit(1) |
|
71 | 71 | |
|
72 | 72 | def get_parser(self, prog_name): |
|
73 | 73 | parser = super(Command, self).get_parser(prog_name) |
|
74 | 74 | |
|
75 | 75 | parser.add_argument('--repo-location', |
|
76 | 76 | action='store', |
|
77 | 77 | dest='repo_location', |
|
78 | 78 | help="Specifies repositories location to index OPTIONAL", |
|
79 | 79 | ) |
|
80 | 80 | parser.add_argument('--index-only', |
|
81 | 81 | action='store', |
|
82 | 82 | dest='repo_list', |
|
83 | 83 | help="Specifies a comma separated list of repositories " |
|
84 | 84 | "to build index on. If not given all repositories " |
|
85 | 85 | "are scanned for indexing. OPTIONAL", |
|
86 | 86 | ) |
|
87 | 87 | parser.add_argument('--update-only', |
|
88 | 88 | action='store', |
|
89 | 89 | dest='repo_update_list', |
|
90 | 90 | help="Specifies a comma separated list of repositories " |
|
91 | 91 | "to re-build index on. OPTIONAL", |
|
92 | 92 | ) |
|
93 | 93 | parser.add_argument('-f', |
|
94 | 94 | action='store_true', |
|
95 | 95 | dest='full_index', |
|
96 | 96 | help="Specifies that index should be made full i.e" |
|
97 | 97 | " destroy old and build from scratch", |
|
98 | 98 | default=False) |
|
99 | 99 | |
|
100 | 100 | return parser |
@@ -1,66 +1,66 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.paster_commands.make_rcextensions |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | make-rcext gearbox command for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Mar 6, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import os |
|
30 | 30 | import sys |
|
31 | 31 | import pkg_resources |
|
32 | 32 | |
|
33 | 33 | from kallithea.lib.paster_commands.common import ask_ok, BasePasterCommand |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class Command(BasePasterCommand): |
|
37 | 37 | """Kallithea: Write template file for extending Kallithea in Python |
|
38 | 38 | |
|
39 | 39 | A rcextensions directory with a __init__.py file will be created next to |
|
40 | 40 | the ini file. Local customizations in that file will survive upgrades. |
|
41 | 41 | The file contains instructions on how it can be customized. |
|
42 | 42 | """ |
|
43 | 43 | |
|
44 | 44 | takes_config_file = False |
|
45 | 45 | |
|
46 | 46 | def take_action(self, args): |
|
47 |
from |
|
|
47 | from tg import config | |
|
48 | 48 | |
|
49 | 49 | here = config['here'] |
|
50 | 50 | content = pkg_resources.resource_string( |
|
51 | 51 | 'kallithea', os.path.join('config', 'rcextensions', '__init__.py') |
|
52 | 52 | ) |
|
53 | 53 | ext_file = os.path.join(here, 'rcextensions', '__init__.py') |
|
54 | 54 | if os.path.exists(ext_file): |
|
55 | 55 | msg = ('Extension file already exists, do you want ' |
|
56 | 56 | 'to overwrite it ? [y/n]') |
|
57 | 57 | if not ask_ok(msg): |
|
58 | 58 | print 'Nothing done, exiting...' |
|
59 | 59 | return |
|
60 | 60 | |
|
61 | 61 | dirname = os.path.dirname(ext_file) |
|
62 | 62 | if not os.path.isdir(dirname): |
|
63 | 63 | os.makedirs(dirname) |
|
64 | 64 | with open(ext_file, 'wb') as f: |
|
65 | 65 | f.write(content) |
|
66 | 66 | print 'Wrote new extensions file to %s' % ext_file |
@@ -1,610 +1,616 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%text>################################################################################</%text> |
|
3 | 3 | <%text>################################################################################</%text> |
|
4 | 4 | # Kallithea - config file generated with kallithea-config # |
|
5 | 5 | <%text>################################################################################</%text> |
|
6 | 6 | <%text>################################################################################</%text> |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | pdebug = false |
|
11 | 11 | |
|
12 | 12 | <%text>################################################################################</%text> |
|
13 | 13 | <%text>## Email settings ##</%text> |
|
14 | 14 | <%text>## ##</%text> |
|
15 | 15 | <%text>## Refer to the documentation ("Email settings") for more details. ##</%text> |
|
16 | 16 | <%text>## ##</%text> |
|
17 | 17 | <%text>## It is recommended to use a valid sender address that passes access ##</%text> |
|
18 | 18 | <%text>## validation and spam filtering in mail servers. ##</%text> |
|
19 | 19 | <%text>################################################################################</%text> |
|
20 | 20 | |
|
21 | 21 | <%text>## 'From' header for application emails. You can optionally add a name.</%text> |
|
22 | 22 | <%text>## Default:</%text> |
|
23 | 23 | #app_email_from = Kallithea |
|
24 | 24 | <%text>## Examples:</%text> |
|
25 | 25 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
26 | 26 | #app_email_from = kallithea-noreply@example.com |
|
27 | 27 | |
|
28 | 28 | <%text>## Subject prefix for application emails.</%text> |
|
29 | 29 | <%text>## A space between this prefix and the real subject is automatically added.</%text> |
|
30 | 30 | <%text>## Default:</%text> |
|
31 | 31 | #email_prefix = |
|
32 | 32 | <%text>## Example:</%text> |
|
33 | 33 | #email_prefix = [Kallithea] |
|
34 | 34 | |
|
35 | 35 | <%text>## Recipients for error emails and fallback recipients of application mails.</%text> |
|
36 | 36 | <%text>## Multiple addresses can be specified, space-separated.</%text> |
|
37 | 37 | <%text>## Only addresses are allowed, do not add any name part.</%text> |
|
38 | 38 | <%text>## Default:</%text> |
|
39 | 39 | #email_to = |
|
40 | 40 | <%text>## Examples:</%text> |
|
41 | 41 | #email_to = admin@example.com |
|
42 | 42 | #email_to = admin@example.com another_admin@example.com |
|
43 | 43 | |
|
44 | 44 | <%text>## 'From' header for error emails. You can optionally add a name.</%text> |
|
45 | 45 | <%text>## Default:</%text> |
|
46 | 46 | #error_email_from = pylons@yourapp.com |
|
47 | 47 | <%text>## Examples:</%text> |
|
48 | 48 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
49 | 49 | #error_email_from = paste_error@example.com |
|
50 | 50 | |
|
51 | 51 | <%text>## SMTP server settings</%text> |
|
52 | 52 | <%text>## If specifying credentials, make sure to use secure connections.</%text> |
|
53 | 53 | <%text>## Default: Send unencrypted unauthenticated mails to the specified smtp_server.</%text> |
|
54 | 54 | <%text>## For "SSL", use smtp_use_ssl = true and smtp_port = 465.</%text> |
|
55 | 55 | <%text>## For "STARTTLS", use smtp_use_tls = true and smtp_port = 587.</%text> |
|
56 | 56 | #smtp_server = smtp.example.com |
|
57 | 57 | #smtp_username = |
|
58 | 58 | #smtp_password = |
|
59 | 59 | #smtp_port = 25 |
|
60 | 60 | #smtp_use_ssl = false |
|
61 | 61 | #smtp_use_tls = false |
|
62 | 62 | |
|
63 | 63 | [server:main] |
|
64 | 64 | %if http_server == 'gearbox': |
|
65 | 65 | <%text>## Gearbox default web server ##</%text> |
|
66 | 66 | use = egg:gearbox#wsgiref |
|
67 | 67 | <%text>## nr of worker threads to spawn</%text> |
|
68 | 68 | threadpool_workers = 1 |
|
69 | 69 | <%text>## max request before thread respawn</%text> |
|
70 | 70 | threadpool_max_requests = 100 |
|
71 | 71 | <%text>## option to use threads of process</%text> |
|
72 | 72 | use_threadpool = true |
|
73 | 73 | |
|
74 | 74 | %elif http_server == 'gevent': |
|
75 | 75 | <%text>## Gearbox gevent web server ##</%text> |
|
76 | 76 | use = egg:gearbox#gevent |
|
77 | 77 | |
|
78 | 78 | %elif http_server == 'waitress': |
|
79 | 79 | <%text>## WAITRESS ##</%text> |
|
80 | 80 | use = egg:waitress#main |
|
81 | 81 | <%text>## number of worker threads</%text> |
|
82 | 82 | threads = 1 |
|
83 | 83 | <%text>## MAX BODY SIZE 100GB</%text> |
|
84 | 84 | max_request_body_size = 107374182400 |
|
85 | 85 | <%text>## use poll instead of select, fixes fd limits, may not work on old</%text> |
|
86 | 86 | <%text>## windows systems.</%text> |
|
87 | 87 | #asyncore_use_poll = True |
|
88 | 88 | |
|
89 | 89 | %elif http_server == 'gunicorn': |
|
90 | 90 | <%text>## GUNICORN ##</%text> |
|
91 | 91 | use = egg:gunicorn#main |
|
92 | 92 | <%text>## number of process workers. You must set `instance_id = *` when this option</%text> |
|
93 | 93 | <%text>## is set to more than one worker</%text> |
|
94 | 94 | workers = 1 |
|
95 | 95 | <%text>## process name</%text> |
|
96 | 96 | proc_name = kallithea |
|
97 | 97 | <%text>## type of worker class, one of sync, eventlet, gevent, tornado</%text> |
|
98 | 98 | <%text>## recommended for bigger setup is using of of other than sync one</%text> |
|
99 | 99 | worker_class = sync |
|
100 | 100 | max_requests = 1000 |
|
101 | 101 | <%text>## amount of time a worker can handle request before it gets killed and</%text> |
|
102 | 102 | <%text>## restarted</%text> |
|
103 | 103 | timeout = 3600 |
|
104 | 104 | |
|
105 | 105 | %elif http_server == 'uwsgi': |
|
106 | 106 | <%text>## UWSGI ##</%text> |
|
107 | 107 | <%text>## run with uwsgi --ini-paste-logged <inifile.ini></%text> |
|
108 | 108 | [uwsgi] |
|
109 | 109 | socket = /tmp/uwsgi.sock |
|
110 | 110 | master = true |
|
111 | 111 | http = 127.0.0.1:5000 |
|
112 | 112 | |
|
113 | 113 | <%text>## set as deamon and redirect all output to file</%text> |
|
114 | 114 | #daemonize = ./uwsgi_kallithea.log |
|
115 | 115 | |
|
116 | 116 | <%text>## master process PID</%text> |
|
117 | 117 | pidfile = ./uwsgi_kallithea.pid |
|
118 | 118 | |
|
119 | 119 | <%text>## stats server with workers statistics, use uwsgitop</%text> |
|
120 | 120 | <%text>## for monitoring, `uwsgitop 127.0.0.1:1717`</%text> |
|
121 | 121 | stats = 127.0.0.1:1717 |
|
122 | 122 | memory-report = true |
|
123 | 123 | |
|
124 | 124 | <%text>## log 5XX errors</%text> |
|
125 | 125 | log-5xx = true |
|
126 | 126 | |
|
127 | 127 | <%text>## Set the socket listen queue size.</%text> |
|
128 | 128 | listen = 256 |
|
129 | 129 | |
|
130 | 130 | <%text>## Gracefully Reload workers after the specified amount of managed requests</%text> |
|
131 | 131 | <%text>## (avoid memory leaks).</%text> |
|
132 | 132 | max-requests = 1000 |
|
133 | 133 | |
|
134 | 134 | <%text>## enable large buffers</%text> |
|
135 | 135 | buffer-size = 65535 |
|
136 | 136 | |
|
137 | 137 | <%text>## socket and http timeouts ##</%text> |
|
138 | 138 | http-timeout = 3600 |
|
139 | 139 | socket-timeout = 3600 |
|
140 | 140 | |
|
141 | 141 | <%text>## Log requests slower than the specified number of milliseconds.</%text> |
|
142 | 142 | log-slow = 10 |
|
143 | 143 | |
|
144 | 144 | <%text>## Exit if no app can be loaded.</%text> |
|
145 | 145 | need-app = true |
|
146 | 146 | |
|
147 | 147 | <%text>## Set lazy mode (load apps in workers instead of master).</%text> |
|
148 | 148 | lazy = true |
|
149 | 149 | |
|
150 | 150 | <%text>## scaling ##</%text> |
|
151 | 151 | <%text>## set cheaper algorithm to use, if not set default will be used</%text> |
|
152 | 152 | cheaper-algo = spare |
|
153 | 153 | |
|
154 | 154 | <%text>## minimum number of workers to keep at all times</%text> |
|
155 | 155 | cheaper = 1 |
|
156 | 156 | |
|
157 | 157 | <%text>## number of workers to spawn at startup</%text> |
|
158 | 158 | cheaper-initial = 1 |
|
159 | 159 | |
|
160 | 160 | <%text>## maximum number of workers that can be spawned</%text> |
|
161 | 161 | workers = 4 |
|
162 | 162 | |
|
163 | 163 | <%text>## how many workers should be spawned at a time</%text> |
|
164 | 164 | cheaper-step = 1 |
|
165 | 165 | |
|
166 | 166 | %endif |
|
167 | 167 | <%text>## COMMON ##</%text> |
|
168 | 168 | host = ${host} |
|
169 | 169 | port = ${port} |
|
170 | 170 | |
|
171 | 171 | <%text>## middleware for hosting the WSGI application under a URL prefix</%text> |
|
172 | 172 | #[filter:proxy-prefix] |
|
173 | 173 | #use = egg:PasteDeploy#prefix |
|
174 | 174 | #prefix = /<your-prefix> |
|
175 | 175 | |
|
176 | 176 | [app:main] |
|
177 | 177 | use = egg:kallithea |
|
178 | 178 | <%text>## enable proxy prefix middleware</%text> |
|
179 | 179 | #filter-with = proxy-prefix |
|
180 | 180 | |
|
181 | 181 | full_stack = true |
|
182 | 182 | static_files = true |
|
183 | 183 | <%text>## Available Languages:</%text> |
|
184 | 184 | <%text>## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW</%text> |
|
185 | 185 | lang = |
|
186 | 186 | cache_dir = ${here}/data |
|
187 | 187 | index_dir = ${here}/data/index |
|
188 | 188 | |
|
189 | 189 | <%text>## perform a full repository scan on each server start, this should be</%text> |
|
190 | 190 | <%text>## set to false after first startup, to allow faster server restarts.</%text> |
|
191 | 191 | initial_repo_scan = false |
|
192 | 192 | |
|
193 | 193 | <%text>## uncomment and set this path to use archive download cache</%text> |
|
194 | 194 | archive_cache_dir = ${here}/tarballcache |
|
195 | 195 | |
|
196 | 196 | <%text>## change this to unique ID for security</%text> |
|
197 | 197 | app_instance_uuid = ${uuid()} |
|
198 | 198 | |
|
199 | 199 | <%text>## cut off limit for large diffs (size in bytes)</%text> |
|
200 | 200 | cut_off_limit = 256000 |
|
201 | 201 | |
|
202 | 202 | <%text>## force https in Kallithea, fixes https redirects, assumes it's always https</%text> |
|
203 | 203 | force_https = false |
|
204 | 204 | |
|
205 | 205 | <%text>## use Strict-Transport-Security headers</%text> |
|
206 | 206 | use_htsts = false |
|
207 | 207 | |
|
208 | 208 | <%text>## number of commits stats will parse on each iteration</%text> |
|
209 | 209 | commit_parse_limit = 25 |
|
210 | 210 | |
|
211 | 211 | <%text>## path to git executable</%text> |
|
212 | 212 | git_path = git |
|
213 | 213 | |
|
214 | 214 | <%text>## git rev filter option, --all is the default filter, if you need to</%text> |
|
215 | 215 | <%text>## hide all refs in changelog switch this to --branches --tags</%text> |
|
216 | 216 | #git_rev_filter = --branches --tags |
|
217 | 217 | |
|
218 | 218 | <%text>## RSS feed options</%text> |
|
219 | 219 | rss_cut_off_limit = 256000 |
|
220 | 220 | rss_items_per_page = 10 |
|
221 | 221 | rss_include_diff = false |
|
222 | 222 | |
|
223 | 223 | <%text>## options for showing and identifying changesets</%text> |
|
224 | 224 | show_sha_length = 12 |
|
225 | 225 | show_revision_number = false |
|
226 | 226 | |
|
227 | 227 | <%text>## Canonical URL to use when creating full URLs in UI and texts.</%text> |
|
228 | 228 | <%text>## Useful when the site is available under different names or protocols.</%text> |
|
229 | 229 | <%text>## Defaults to what is provided in the WSGI environment.</%text> |
|
230 | 230 | #canonical_url = https://kallithea.example.com/repos |
|
231 | 231 | |
|
232 | 232 | <%text>## gist URL alias, used to create nicer urls for gist. This should be an</%text> |
|
233 | 233 | <%text>## url that does rewrites to _admin/gists/<gistid>.</%text> |
|
234 | 234 | <%text>## example: http://gist.example.com/{gistid}. Empty means use the internal</%text> |
|
235 | 235 | <%text>## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid></%text> |
|
236 | 236 | gist_alias_url = |
|
237 | 237 | |
|
238 | 238 | <%text>## white list of API enabled controllers. This allows to add list of</%text> |
|
239 | 239 | <%text>## controllers to which access will be enabled by api_key. eg: to enable</%text> |
|
240 | 240 | <%text>## api access to raw_files put `FilesController:raw`, to enable access to patches</%text> |
|
241 | 241 | <%text>## add `ChangesetController:changeset_patch`. This list should be "," separated</%text> |
|
242 | 242 | <%text>## Syntax is <ControllerClass>:<function>. Check debug logs for generated names</%text> |
|
243 | 243 | <%text>## Recommended settings below are commented out:</%text> |
|
244 | 244 | api_access_controllers_whitelist = |
|
245 | 245 | # ChangesetController:changeset_patch, |
|
246 | 246 | # ChangesetController:changeset_raw, |
|
247 | 247 | # FilesController:raw, |
|
248 | 248 | # FilesController:archivefile |
|
249 | 249 | |
|
250 | 250 | <%text>## default encoding used to convert from and to unicode</%text> |
|
251 | 251 | <%text>## can be also a comma separated list of encoding in case of mixed encodings</%text> |
|
252 | 252 | default_encoding = utf8 |
|
253 | 253 | |
|
254 | 254 | <%text>## issue tracker for Kallithea (leave blank to disable, absent for default)</%text> |
|
255 | 255 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
256 | 256 | |
|
257 | 257 | <%text>## issue tracking mapping for commits messages</%text> |
|
258 | 258 | <%text>## comment out issue_pat, issue_server, issue_prefix to enable</%text> |
|
259 | 259 | |
|
260 | 260 | <%text>## pattern to get the issues from commit messages</%text> |
|
261 | 261 | <%text>## default one used here is #<numbers> with a regex passive group for `#`</%text> |
|
262 | 262 | <%text>## {id} will be all groups matched from this pattern</%text> |
|
263 | 263 | |
|
264 | 264 | issue_pat = (?:\s*#)(\d+) |
|
265 | 265 | |
|
266 | 266 | <%text>## server url to the issue, each {id} will be replaced with match</%text> |
|
267 | 267 | <%text>## fetched from the regex and {repo} is replaced with full repository name</%text> |
|
268 | 268 | <%text>## including groups {repo_name} is replaced with just name of repo</%text> |
|
269 | 269 | |
|
270 | 270 | issue_server_link = https://issues.example.com/{repo}/issue/{id} |
|
271 | 271 | |
|
272 | 272 | <%text>## prefix to add to link to indicate it's an url</%text> |
|
273 | 273 | <%text>## #314 will be replaced by <issue_prefix><id></%text> |
|
274 | 274 | |
|
275 | 275 | issue_prefix = # |
|
276 | 276 | |
|
277 | 277 | <%text>## issue_pat, issue_server_link, issue_prefix can have suffixes to specify</%text> |
|
278 | 278 | <%text>## multiple patterns, to other issues server, wiki or others</%text> |
|
279 | 279 | <%text>## below an example how to create a wiki pattern</%text> |
|
280 | 280 | # wiki-some-id -> https://wiki.example.com/some-id |
|
281 | 281 | |
|
282 | 282 | #issue_pat_wiki = (?:wiki-)(.+) |
|
283 | 283 | #issue_server_link_wiki = https://wiki.example.com/{id} |
|
284 | 284 | #issue_prefix_wiki = WIKI- |
|
285 | 285 | |
|
286 | 286 | <%text>## alternative return HTTP header for failed authentication. Default HTTP</%text> |
|
287 | 287 | <%text>## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with</%text> |
|
288 | 288 | <%text>## handling that. Set this variable to 403 to return HTTPForbidden</%text> |
|
289 | 289 | auth_ret_code = |
|
290 | 290 | |
|
291 | 291 | <%text>## locking return code. When repository is locked return this HTTP code. 2XX</%text> |
|
292 | 292 | <%text>## codes don't break the transactions while 4XX codes do</%text> |
|
293 | 293 | lock_ret_code = 423 |
|
294 | 294 | |
|
295 | 295 | <%text>## allows to change the repository location in settings page</%text> |
|
296 | 296 | allow_repo_location_change = True |
|
297 | 297 | |
|
298 | 298 | <%text>## allows to setup custom hooks in settings page</%text> |
|
299 | 299 | allow_custom_hooks_settings = True |
|
300 | 300 | |
|
301 | 301 | <%text>## extra extensions for indexing, space separated and without the leading '.'.</%text> |
|
302 | 302 | # index.extensions = |
|
303 | 303 | # gemfile |
|
304 | 304 | # lock |
|
305 | 305 | |
|
306 | 306 | <%text>## extra filenames for indexing, space separated</%text> |
|
307 | 307 | # index.filenames = |
|
308 | 308 | # .dockerignore |
|
309 | 309 | # .editorconfig |
|
310 | 310 | # INSTALL |
|
311 | 311 | # CHANGELOG |
|
312 | 312 | |
|
313 | 313 | <%text>####################################</%text> |
|
314 | 314 | <%text>### CELERY CONFIG ####</%text> |
|
315 | 315 | <%text>####################################</%text> |
|
316 | 316 | |
|
317 | 317 | use_celery = false |
|
318 | 318 | |
|
319 | 319 | <%text>## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq:</%text> |
|
320 | 320 | broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost |
|
321 | 321 | |
|
322 | 322 | celery.imports = kallithea.lib.celerylib.tasks |
|
323 | 323 | celery.accept.content = pickle |
|
324 | 324 | celery.result.backend = amqp |
|
325 | 325 | celery.result.dburi = amqp:// |
|
326 | 326 | celery.result.serialier = json |
|
327 | 327 | |
|
328 | 328 | #celery.send.task.error.emails = true |
|
329 | 329 | #celery.amqp.task.result.expires = 18000 |
|
330 | 330 | |
|
331 | 331 | celeryd.concurrency = 2 |
|
332 | 332 | celeryd.max.tasks.per.child = 1 |
|
333 | 333 | |
|
334 | 334 | <%text>## If true, tasks will never be sent to the queue, but executed locally instead.</%text> |
|
335 | 335 | celery.always.eager = false |
|
336 | 336 | |
|
337 | 337 | <%text>####################################</%text> |
|
338 | 338 | <%text>### BEAKER CACHE ####</%text> |
|
339 | 339 | <%text>####################################</%text> |
|
340 | 340 | |
|
341 | 341 | beaker.cache.data_dir = ${here}/data/cache/data |
|
342 | 342 | beaker.cache.lock_dir = ${here}/data/cache/lock |
|
343 | 343 | |
|
344 | 344 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
345 | 345 | |
|
346 | 346 | beaker.cache.short_term.type = memory |
|
347 | 347 | beaker.cache.short_term.expire = 60 |
|
348 | 348 | beaker.cache.short_term.key_length = 256 |
|
349 | 349 | |
|
350 | 350 | beaker.cache.long_term.type = memory |
|
351 | 351 | beaker.cache.long_term.expire = 36000 |
|
352 | 352 | beaker.cache.long_term.key_length = 256 |
|
353 | 353 | |
|
354 | 354 | beaker.cache.sql_cache_short.type = memory |
|
355 | 355 | beaker.cache.sql_cache_short.expire = 10 |
|
356 | 356 | beaker.cache.sql_cache_short.key_length = 256 |
|
357 | 357 | |
|
358 | 358 | <%text>####################################</%text> |
|
359 | 359 | <%text>### BEAKER SESSION ####</%text> |
|
360 | 360 | <%text>####################################</%text> |
|
361 | 361 | |
|
362 | 362 | <%text>## Name of session cookie. Should be unique for a given host and path, even when running</%text> |
|
363 | 363 | <%text>## on different ports. Otherwise, cookie sessions will be shared and messed up.</%text> |
|
364 | 364 | beaker.session.key = kallithea |
|
365 | 365 | <%text>## Sessions should always only be accessible by the browser, not directly by JavaScript.</%text> |
|
366 | 366 | beaker.session.httponly = true |
|
367 | 367 | <%text>## Session lifetime. 2592000 seconds is 30 days.</%text> |
|
368 | 368 | beaker.session.timeout = 2592000 |
|
369 | 369 | |
|
370 | 370 | <%text>## Server secret used with HMAC to ensure integrity of cookies.</%text> |
|
371 | 371 | beaker.session.secret = ${uuid()} |
|
372 | 372 | <%text>## Further, encrypt the data with AES.</%text> |
|
373 | 373 | #beaker.session.encrypt_key = <key_for_encryption> |
|
374 | 374 | #beaker.session.validate_key = <validation_key> |
|
375 | 375 | |
|
376 | 376 | <%text>## Type of storage used for the session, current types are</%text> |
|
377 | 377 | <%text>## dbm, file, memcached, database, and memory.</%text> |
|
378 | 378 | |
|
379 | 379 | <%text>## File system storage of session data. (default)</%text> |
|
380 | 380 | #beaker.session.type = file |
|
381 | 381 | |
|
382 | 382 | <%text>## Cookie only, store all session data inside the cookie. Requires secure secrets.</%text> |
|
383 | 383 | #beaker.session.type = cookie |
|
384 | 384 | |
|
385 | 385 | <%text>## Database storage of session data.</%text> |
|
386 | 386 | #beaker.session.type = ext:database |
|
387 | 387 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
388 | 388 | #beaker.session.table_name = db_session |
|
389 | 389 | |
|
390 | 390 | %if error_aggregation_service == 'appenlight': |
|
391 | 391 | <%text>############################</%text> |
|
392 | 392 | <%text>## ERROR HANDLING SYSTEMS ##</%text> |
|
393 | 393 | <%text>############################</%text> |
|
394 | 394 | |
|
395 | 395 | <%text>####################</%text> |
|
396 | 396 | <%text>### [appenlight] ###</%text> |
|
397 | 397 | <%text>####################</%text> |
|
398 | 398 | |
|
399 | 399 | <%text>## AppEnlight is tailored to work with Kallithea, see</%text> |
|
400 | 400 | <%text>## http://appenlight.com for details how to obtain an account</%text> |
|
401 | 401 | <%text>## you must install python package `appenlight_client` to make it work</%text> |
|
402 | 402 | |
|
403 | 403 | <%text>## appenlight enabled</%text> |
|
404 | 404 | appenlight = false |
|
405 | 405 | |
|
406 | 406 | appenlight.server_url = https://api.appenlight.com |
|
407 | 407 | appenlight.api_key = YOUR_API_KEY |
|
408 | 408 | |
|
409 | 409 | <%text>## TWEAK AMOUNT OF INFO SENT HERE</%text> |
|
410 | 410 | |
|
411 | 411 | <%text>## enables 404 error logging (default False)</%text> |
|
412 | 412 | appenlight.report_404 = false |
|
413 | 413 | |
|
414 | 414 | <%text>## time in seconds after request is considered being slow (default 1)</%text> |
|
415 | 415 | appenlight.slow_request_time = 1 |
|
416 | 416 | |
|
417 | 417 | <%text>## record slow requests in application</%text> |
|
418 | 418 | <%text>## (needs to be enabled for slow datastore recording and time tracking)</%text> |
|
419 | 419 | appenlight.slow_requests = true |
|
420 | 420 | |
|
421 | 421 | <%text>## enable hooking to application loggers</%text> |
|
422 | 422 | #appenlight.logging = true |
|
423 | 423 | |
|
424 | 424 | <%text>## minimum log level for log capture</%text> |
|
425 | 425 | #appenlight.logging.level = WARNING |
|
426 | 426 | |
|
427 | 427 | <%text>## send logs only from erroneous/slow requests</%text> |
|
428 | 428 | <%text>## (saves API quota for intensive logging)</%text> |
|
429 | 429 | appenlight.logging_on_error = false |
|
430 | 430 | |
|
431 | 431 | <%text>## list of additional keywords that should be grabbed from environ object</%text> |
|
432 | 432 | <%text>## can be string with comma separated list of words in lowercase</%text> |
|
433 | 433 | <%text>## (by default client will always send following info:</%text> |
|
434 | 434 | <%text>## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that</%text> |
|
435 | 435 | <%text>## start with HTTP* this list be extended with additional keywords here</%text> |
|
436 | 436 | appenlight.environ_keys_whitelist = |
|
437 | 437 | |
|
438 | 438 | <%text>## list of keywords that should be blanked from request object</%text> |
|
439 | 439 | <%text>## can be string with comma separated list of words in lowercase</%text> |
|
440 | 440 | <%text>## (by default client will always blank keys that contain following words</%text> |
|
441 | 441 | <%text>## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'</%text> |
|
442 | 442 | <%text>## this list be extended with additional keywords set here</%text> |
|
443 | 443 | appenlight.request_keys_blacklist = |
|
444 | 444 | |
|
445 | 445 | <%text>## list of namespaces that should be ignores when gathering log entries</%text> |
|
446 | 446 | <%text>## can be string with comma separated list of namespaces</%text> |
|
447 | 447 | <%text>## (by default the client ignores own entries: appenlight_client.client)</%text> |
|
448 | 448 | appenlight.log_namespace_blacklist = |
|
449 | 449 | |
|
450 | 450 | %elif error_aggregation_service == 'sentry': |
|
451 | 451 | <%text>################</%text> |
|
452 | 452 | <%text>### [sentry] ###</%text> |
|
453 | 453 | <%text>################</%text> |
|
454 | 454 | |
|
455 | 455 | <%text>## sentry is a alternative open source error aggregator</%text> |
|
456 | 456 | <%text>## you must install python packages `sentry` and `raven` to enable</%text> |
|
457 | 457 | |
|
458 | 458 | sentry.dsn = YOUR_DNS |
|
459 | 459 | sentry.servers = |
|
460 | 460 | sentry.name = |
|
461 | 461 | sentry.key = |
|
462 | 462 | sentry.public_key = |
|
463 | 463 | sentry.secret_key = |
|
464 | 464 | sentry.project = |
|
465 | 465 | sentry.site = |
|
466 | 466 | sentry.include_paths = |
|
467 | 467 | sentry.exclude_paths = |
|
468 | 468 | |
|
469 | 469 | %endif |
|
470 | 470 | <%text>################################################################################</%text> |
|
471 | 471 | <%text>## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##</%text> |
|
472 | 472 | <%text>## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##</%text> |
|
473 | 473 | <%text>## execute malicious code after an exception is raised. ##</%text> |
|
474 | 474 | <%text>################################################################################</%text> |
|
475 | 475 | set debug = false |
|
476 | 476 | |
|
477 | 477 | <%text>##################################</%text> |
|
478 | 478 | <%text>### LOGVIEW CONFIG ###</%text> |
|
479 | 479 | <%text>##################################</%text> |
|
480 | 480 | |
|
481 | 481 | logview.sqlalchemy = #faa |
|
482 | 482 | logview.pylons.templating = #bfb |
|
483 | 483 | logview.pylons.util = #eee |
|
484 | 484 | |
|
485 | 485 | <%text>#########################################################</%text> |
|
486 | 486 | <%text>### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###</%text> |
|
487 | 487 | <%text>#########################################################</%text> |
|
488 | 488 | |
|
489 | 489 | %if database_engine == 'sqlite': |
|
490 | 490 | # SQLITE [default] |
|
491 | 491 | sqlalchemy.url = sqlite:///${here}/kallithea.db?timeout=60 |
|
492 | 492 | |
|
493 | 493 | %elif database_engine == 'postgres': |
|
494 | 494 | # POSTGRESQL |
|
495 | 495 | sqlalchemy.url = postgresql://user:pass@localhost/kallithea |
|
496 | 496 | |
|
497 | 497 | %elif database_engine == 'mysql': |
|
498 | 498 | # MySQL |
|
499 | 499 | sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8 |
|
500 | 500 | |
|
501 | 501 | %endif |
|
502 | 502 | # see sqlalchemy docs for others |
|
503 | 503 | |
|
504 | 504 | sqlalchemy.echo = false |
|
505 | 505 | sqlalchemy.pool_recycle = 3600 |
|
506 | 506 | |
|
507 | 507 | <%text>################################</%text> |
|
508 | 508 | <%text>### ALEMBIC CONFIGURATION ####</%text> |
|
509 | 509 | <%text>################################</%text> |
|
510 | 510 | |
|
511 | 511 | [alembic] |
|
512 | 512 | script_location = kallithea:alembic |
|
513 | 513 | |
|
514 | 514 | <%text>################################</%text> |
|
515 | 515 | <%text>### LOGGING CONFIGURATION ####</%text> |
|
516 | 516 | <%text>################################</%text> |
|
517 | 517 | |
|
518 | 518 | [loggers] |
|
519 | keys = root, routes, kallithea, sqlalchemy, gearbox, beaker, templates, whoosh_indexer | |
|
519 | keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer | |
|
520 | 520 | |
|
521 | 521 | [handlers] |
|
522 | 522 | keys = console, console_sql |
|
523 | 523 | |
|
524 | 524 | [formatters] |
|
525 | 525 | keys = generic, color_formatter, color_formatter_sql |
|
526 | 526 | |
|
527 | 527 | <%text>#############</%text> |
|
528 | 528 | <%text>## LOGGERS ##</%text> |
|
529 | 529 | <%text>#############</%text> |
|
530 | 530 | |
|
531 | 531 | [logger_root] |
|
532 | 532 | level = NOTSET |
|
533 | 533 | handlers = console |
|
534 | 534 | |
|
535 | 535 | [logger_routes] |
|
536 | 536 | level = DEBUG |
|
537 | 537 | handlers = |
|
538 | 538 | qualname = routes.middleware |
|
539 | 539 | <%text>## "level = DEBUG" logs the route matched and routing variables.</%text> |
|
540 | 540 | propagate = 1 |
|
541 | 541 | |
|
542 | 542 | [logger_beaker] |
|
543 | 543 | level = DEBUG |
|
544 | 544 | handlers = |
|
545 | 545 | qualname = beaker.container |
|
546 | 546 | propagate = 1 |
|
547 | 547 | |
|
548 | 548 | [logger_templates] |
|
549 | 549 | level = INFO |
|
550 | 550 | handlers = |
|
551 | 551 | qualname = pylons.templating |
|
552 | 552 | propagate = 1 |
|
553 | 553 | |
|
554 | 554 | [logger_kallithea] |
|
555 | 555 | level = DEBUG |
|
556 | 556 | handlers = |
|
557 | 557 | qualname = kallithea |
|
558 | 558 | propagate = 1 |
|
559 | 559 | |
|
560 | [logger_tg] | |
|
561 | level = DEBUG | |
|
562 | handlers = | |
|
563 | qualname = tg | |
|
564 | propagate = 1 | |
|
565 | ||
|
560 | 566 | [logger_gearbox] |
|
561 | 567 | level = DEBUG |
|
562 | 568 | handlers = |
|
563 | 569 | qualname = gearbox |
|
564 | 570 | propagate = 1 |
|
565 | 571 | |
|
566 | 572 | [logger_sqlalchemy] |
|
567 | 573 | level = INFO |
|
568 | 574 | handlers = console_sql |
|
569 | 575 | qualname = sqlalchemy.engine |
|
570 | 576 | propagate = 0 |
|
571 | 577 | |
|
572 | 578 | [logger_whoosh_indexer] |
|
573 | 579 | level = DEBUG |
|
574 | 580 | handlers = |
|
575 | 581 | qualname = whoosh_indexer |
|
576 | 582 | propagate = 1 |
|
577 | 583 | |
|
578 | 584 | <%text>##############</%text> |
|
579 | 585 | <%text>## HANDLERS ##</%text> |
|
580 | 586 | <%text>##############</%text> |
|
581 | 587 | |
|
582 | 588 | [handler_console] |
|
583 | 589 | class = StreamHandler |
|
584 | 590 | args = (sys.stderr,) |
|
585 | 591 | level = INFO |
|
586 | 592 | formatter = generic |
|
587 | 593 | |
|
588 | 594 | [handler_console_sql] |
|
589 | 595 | class = StreamHandler |
|
590 | 596 | args = (sys.stderr,) |
|
591 | 597 | level = WARN |
|
592 | 598 | formatter = generic |
|
593 | 599 | |
|
594 | 600 | <%text>################</%text> |
|
595 | 601 | <%text>## FORMATTERS ##</%text> |
|
596 | 602 | <%text>################</%text> |
|
597 | 603 | |
|
598 | 604 | [formatter_generic] |
|
599 | 605 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
600 | 606 | datefmt = %Y-%m-%d %H:%M:%S |
|
601 | 607 | |
|
602 | 608 | [formatter_color_formatter] |
|
603 | 609 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
604 | 610 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
605 | 611 | datefmt = %Y-%m-%d %H:%M:%S |
|
606 | 612 | |
|
607 | 613 | [formatter_color_formatter_sql] |
|
608 | 614 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
609 | 615 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
610 | 616 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,672 +1,673 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.utils |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Utilities library for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 18, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import re |
|
30 | 30 | import logging |
|
31 | 31 | import datetime |
|
32 | 32 | import traceback |
|
33 | 33 | import beaker |
|
34 | 34 | |
|
35 | from tg import request, response | |
|
35 | 36 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
36 | 37 | from beaker.cache import _cache_decorate |
|
37 | 38 | |
|
38 | 39 | from kallithea.lib.vcs.utils.hgcompat import ui, config |
|
39 | 40 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
40 | 41 | from kallithea.lib.vcs.exceptions import VCSError |
|
41 | 42 | |
|
42 | 43 | from kallithea.model import meta |
|
43 | 44 | from kallithea.model.db import Repository, User, Ui, \ |
|
44 | 45 | UserLog, RepoGroup, Setting, UserGroup |
|
45 | 46 | from kallithea.model.repo_group import RepoGroupModel |
|
46 | 47 | from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser |
|
47 | 48 | from kallithea.lib.vcs.utils.fakemod import create_module |
|
48 | 49 | |
|
49 | 50 | log = logging.getLogger(__name__) |
|
50 | 51 | |
|
51 | 52 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*') |
|
52 | 53 | |
|
53 | 54 | |
|
54 | 55 | def recursive_replace(str_, replace=' '): |
|
55 | 56 | """ |
|
56 | 57 | Recursive replace of given sign to just one instance |
|
57 | 58 | |
|
58 | 59 | :param str_: given string |
|
59 | 60 | :param replace: char to find and replace multiple instances |
|
60 | 61 | |
|
61 | 62 | Examples:: |
|
62 | 63 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
63 | 64 | 'Mighty-Mighty-Bo-sstones' |
|
64 | 65 | """ |
|
65 | 66 | |
|
66 | 67 | if str_.find(replace * 2) == -1: |
|
67 | 68 | return str_ |
|
68 | 69 | else: |
|
69 | 70 | str_ = str_.replace(replace * 2, replace) |
|
70 | 71 | return recursive_replace(str_, replace) |
|
71 | 72 | |
|
72 | 73 | |
|
73 | 74 | def repo_name_slug(value): |
|
74 | 75 | """ |
|
75 | 76 | Return slug of name of repository |
|
76 | 77 | This function is called on each creation/modification |
|
77 | 78 | of repository to prevent bad names in repo |
|
78 | 79 | """ |
|
79 | 80 | |
|
80 | 81 | slug = remove_formatting(value) |
|
81 | 82 | slug = strip_tags(slug) |
|
82 | 83 | |
|
83 | 84 | for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
84 | 85 | slug = slug.replace(c, '-') |
|
85 | 86 | slug = recursive_replace(slug, '-') |
|
86 | 87 | slug = collapse(slug, '-') |
|
87 | 88 | return slug |
|
88 | 89 | |
|
89 | 90 | |
|
90 | 91 | #============================================================================== |
|
91 | 92 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
92 | 93 | #============================================================================== |
|
93 | 94 | def get_repo_slug(request): |
|
94 | 95 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
95 | 96 | if _repo: |
|
96 | 97 | _repo = _repo.rstrip('/') |
|
97 | 98 | return _repo |
|
98 | 99 | |
|
99 | 100 | |
|
100 | 101 | def get_repo_group_slug(request): |
|
101 | 102 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
102 | 103 | if _group: |
|
103 | 104 | _group = _group.rstrip('/') |
|
104 | 105 | return _group |
|
105 | 106 | |
|
106 | 107 | |
|
107 | 108 | def get_user_group_slug(request): |
|
108 | 109 | _group = request.environ['pylons.routes_dict'].get('id') |
|
109 | 110 | _group = UserGroup.get(_group) |
|
110 | 111 | if _group: |
|
111 | 112 | return _group.users_group_name |
|
112 | 113 | return None |
|
113 | 114 | |
|
114 | 115 | |
|
115 | 116 | def _extract_id_from_repo_name(repo_name): |
|
116 | 117 | if repo_name.startswith('/'): |
|
117 | 118 | repo_name = repo_name.lstrip('/') |
|
118 | 119 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
119 | 120 | if by_id_match: |
|
120 | 121 | return by_id_match.groups()[0] |
|
121 | 122 | |
|
122 | 123 | |
|
123 | 124 | def get_repo_by_id(repo_name): |
|
124 | 125 | """ |
|
125 | 126 | Extracts repo_name by id from special urls. Example url is _11/repo_name |
|
126 | 127 | |
|
127 | 128 | :param repo_name: |
|
128 | 129 | :return: repo_name if matched else None |
|
129 | 130 | """ |
|
130 | 131 | _repo_id = _extract_id_from_repo_name(repo_name) |
|
131 | 132 | if _repo_id: |
|
132 | 133 | from kallithea.model.db import Repository |
|
133 | 134 | repo = Repository.get(_repo_id) |
|
134 | 135 | if repo: |
|
135 | 136 | # TODO: return repo instead of reponame? or would that be a layering violation? |
|
136 | 137 | return repo.repo_name |
|
137 | 138 | return None |
|
138 | 139 | |
|
139 | 140 | |
|
140 | 141 | def action_logger(user, action, repo, ipaddr='', commit=False): |
|
141 | 142 | """ |
|
142 | 143 | Action logger for various actions made by users |
|
143 | 144 | |
|
144 | 145 | :param user: user that made this action, can be a unique username string or |
|
145 | 146 | object containing user_id attribute |
|
146 | 147 | :param action: action to log, should be on of predefined unique actions for |
|
147 | 148 | easy translations |
|
148 | 149 | :param repo: string name of repository or object containing repo_id, |
|
149 | 150 | that action was made on |
|
150 | 151 | :param ipaddr: optional IP address from what the action was made |
|
151 | 152 | |
|
152 | 153 | """ |
|
153 | 154 | |
|
154 | 155 | # if we don't get explicit IP address try to get one from registered user |
|
155 | 156 | # in tmpl context var |
|
156 | 157 | if not ipaddr: |
|
157 | 158 | ipaddr = getattr(get_current_authuser(), 'ip_addr', '') |
|
158 | 159 | |
|
159 | 160 | if getattr(user, 'user_id', None): |
|
160 | 161 | user_obj = User.get(user.user_id) |
|
161 | 162 | elif isinstance(user, basestring): |
|
162 | 163 | user_obj = User.get_by_username(user) |
|
163 | 164 | else: |
|
164 | 165 | raise Exception('You have to provide a user object or a username') |
|
165 | 166 | |
|
166 | 167 | if getattr(repo, 'repo_id', None): |
|
167 | 168 | repo_obj = Repository.get(repo.repo_id) |
|
168 | 169 | repo_name = repo_obj.repo_name |
|
169 | 170 | elif isinstance(repo, basestring): |
|
170 | 171 | repo_name = repo.lstrip('/') |
|
171 | 172 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
172 | 173 | else: |
|
173 | 174 | repo_obj = None |
|
174 | 175 | repo_name = u'' |
|
175 | 176 | |
|
176 | 177 | user_log = UserLog() |
|
177 | 178 | user_log.user_id = user_obj.user_id |
|
178 | 179 | user_log.username = user_obj.username |
|
179 | 180 | user_log.action = safe_unicode(action) |
|
180 | 181 | |
|
181 | 182 | user_log.repository = repo_obj |
|
182 | 183 | user_log.repository_name = repo_name |
|
183 | 184 | |
|
184 | 185 | user_log.action_date = datetime.datetime.now() |
|
185 | 186 | user_log.user_ip = ipaddr |
|
186 | 187 | meta.Session().add(user_log) |
|
187 | 188 | |
|
188 | 189 | log.info('Logging action:%s on %s by user:%s ip:%s', |
|
189 | 190 | action, safe_unicode(repo), user_obj, ipaddr) |
|
190 | 191 | if commit: |
|
191 | 192 | meta.Session().commit() |
|
192 | 193 | |
|
193 | 194 | |
|
194 | 195 | def get_filesystem_repos(path): |
|
195 | 196 | """ |
|
196 | 197 | Scans given path for repos and return (name,(type,path)) tuple |
|
197 | 198 | |
|
198 | 199 | :param path: path to scan for repositories |
|
199 | 200 | :param recursive: recursive search and return names with subdirs in front |
|
200 | 201 | """ |
|
201 | 202 | |
|
202 | 203 | # remove ending slash for better results |
|
203 | 204 | path = safe_str(path.rstrip(os.sep)) |
|
204 | 205 | log.debug('now scanning in %s', path) |
|
205 | 206 | |
|
206 | 207 | def isdir(*n): |
|
207 | 208 | return os.path.isdir(os.path.join(*n)) |
|
208 | 209 | |
|
209 | 210 | for root, dirs, _files in os.walk(path): |
|
210 | 211 | recurse_dirs = [] |
|
211 | 212 | for subdir in dirs: |
|
212 | 213 | # skip removed repos |
|
213 | 214 | if REMOVED_REPO_PAT.match(subdir): |
|
214 | 215 | continue |
|
215 | 216 | |
|
216 | 217 | #skip .<something> dirs TODO: rly? then we should prevent creating them ... |
|
217 | 218 | if subdir.startswith('.'): |
|
218 | 219 | continue |
|
219 | 220 | |
|
220 | 221 | cur_path = os.path.join(root, subdir) |
|
221 | 222 | if isdir(cur_path, '.git'): |
|
222 | 223 | log.warning('ignoring non-bare Git repo: %s', cur_path) |
|
223 | 224 | continue |
|
224 | 225 | |
|
225 | 226 | if (isdir(cur_path, '.hg') or |
|
226 | 227 | isdir(cur_path, '.svn') or |
|
227 | 228 | isdir(cur_path, 'objects') and (isdir(cur_path, 'refs') or |
|
228 | 229 | os.path.isfile(os.path.join(cur_path, 'packed-refs')))): |
|
229 | 230 | |
|
230 | 231 | if not os.access(cur_path, os.R_OK) or not os.access(cur_path, os.X_OK): |
|
231 | 232 | log.warning('ignoring repo path without access: %s', cur_path) |
|
232 | 233 | continue |
|
233 | 234 | |
|
234 | 235 | if not os.access(cur_path, os.W_OK): |
|
235 | 236 | log.warning('repo path without write access: %s', cur_path) |
|
236 | 237 | |
|
237 | 238 | try: |
|
238 | 239 | scm_info = get_scm(cur_path) |
|
239 | 240 | assert cur_path.startswith(path) |
|
240 | 241 | repo_path = cur_path[len(path) + 1:] |
|
241 | 242 | yield repo_path, scm_info |
|
242 | 243 | continue # no recursion |
|
243 | 244 | except VCSError: |
|
244 | 245 | # We should perhaps ignore such broken repos, but especially |
|
245 | 246 | # the bare git detection is unreliable so we dive into it |
|
246 | 247 | pass |
|
247 | 248 | |
|
248 | 249 | recurse_dirs.append(subdir) |
|
249 | 250 | |
|
250 | 251 | dirs[:] = recurse_dirs |
|
251 | 252 | |
|
252 | 253 | |
|
253 | 254 | def is_valid_repo(repo_name, base_path, scm=None): |
|
254 | 255 | """ |
|
255 | 256 | Returns True if given path is a valid repository False otherwise. |
|
256 | 257 | If scm param is given also compare if given scm is the same as expected |
|
257 | 258 | from scm parameter |
|
258 | 259 | |
|
259 | 260 | :param repo_name: |
|
260 | 261 | :param base_path: |
|
261 | 262 | :param scm: |
|
262 | 263 | |
|
263 | 264 | :return True: if given path is a valid repository |
|
264 | 265 | """ |
|
265 | 266 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
266 | 267 | |
|
267 | 268 | try: |
|
268 | 269 | scm_ = get_scm(full_path) |
|
269 | 270 | if scm: |
|
270 | 271 | return scm_[0] == scm |
|
271 | 272 | return True |
|
272 | 273 | except VCSError: |
|
273 | 274 | return False |
|
274 | 275 | |
|
275 | 276 | |
|
276 | 277 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
277 | 278 | """ |
|
278 | 279 | Returns True if given path is a repository group False otherwise |
|
279 | 280 | |
|
280 | 281 | :param repo_name: |
|
281 | 282 | :param base_path: |
|
282 | 283 | """ |
|
283 | 284 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
284 | 285 | |
|
285 | 286 | # check if it's not a repo |
|
286 | 287 | if is_valid_repo(repo_group_name, base_path): |
|
287 | 288 | return False |
|
288 | 289 | |
|
289 | 290 | try: |
|
290 | 291 | # we need to check bare git repos at higher level |
|
291 | 292 | # since we might match branches/hooks/info/objects or possible |
|
292 | 293 | # other things inside bare git repo |
|
293 | 294 | get_scm(os.path.dirname(full_path)) |
|
294 | 295 | return False |
|
295 | 296 | except VCSError: |
|
296 | 297 | pass |
|
297 | 298 | |
|
298 | 299 | # check if it's a valid path |
|
299 | 300 | if skip_path_check or os.path.isdir(full_path): |
|
300 | 301 | return True |
|
301 | 302 | |
|
302 | 303 | return False |
|
303 | 304 | |
|
304 | 305 | |
|
305 | 306 | #propagated from mercurial documentation |
|
306 | 307 | ui_sections = ['alias', 'auth', |
|
307 | 308 | 'decode/encode', 'defaults', |
|
308 | 309 | 'diff', 'email', |
|
309 | 310 | 'extensions', 'format', |
|
310 | 311 | 'merge-patterns', 'merge-tools', |
|
311 | 312 | 'hooks', 'http_proxy', |
|
312 | 313 | 'smtp', 'patch', |
|
313 | 314 | 'paths', 'profiling', |
|
314 | 315 | 'server', 'trusted', |
|
315 | 316 | 'ui', 'web', ] |
|
316 | 317 | |
|
317 | 318 | |
|
318 | 319 | def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): |
|
319 | 320 | """ |
|
320 | 321 | A function that will read python rc files or database |
|
321 | 322 | and make an mercurial ui object from read options |
|
322 | 323 | |
|
323 | 324 | :param path: path to mercurial config file |
|
324 | 325 | :param checkpaths: check the path |
|
325 | 326 | :param read_from: read from 'file' or 'db' |
|
326 | 327 | """ |
|
327 | 328 | |
|
328 | 329 | baseui = ui.ui() |
|
329 | 330 | |
|
330 | 331 | # clean the baseui object |
|
331 | 332 | baseui._ocfg = config.config() |
|
332 | 333 | baseui._ucfg = config.config() |
|
333 | 334 | baseui._tcfg = config.config() |
|
334 | 335 | |
|
335 | 336 | if read_from == 'file': |
|
336 | 337 | if not os.path.isfile(path): |
|
337 | 338 | log.debug('hgrc file is not present at %s, skipping...', path) |
|
338 | 339 | return False |
|
339 | 340 | log.debug('reading hgrc from %s', path) |
|
340 | 341 | cfg = config.config() |
|
341 | 342 | cfg.read(path) |
|
342 | 343 | for section in ui_sections: |
|
343 | 344 | for k, v in cfg.items(section): |
|
344 | 345 | log.debug('settings ui from file: [%s] %s=%s', section, k, v) |
|
345 | 346 | baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) |
|
346 | 347 | |
|
347 | 348 | elif read_from == 'db': |
|
348 | 349 | sa = meta.Session() |
|
349 | 350 | ret = sa.query(Ui).all() |
|
350 | 351 | |
|
351 | 352 | hg_ui = ret |
|
352 | 353 | for ui_ in hg_ui: |
|
353 | 354 | if ui_.ui_active: |
|
354 | 355 | ui_val = '' if ui_.ui_value is None else safe_str(ui_.ui_value) |
|
355 | 356 | log.debug('settings ui from db: [%s] %s=%r', ui_.ui_section, |
|
356 | 357 | ui_.ui_key, ui_val) |
|
357 | 358 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), |
|
358 | 359 | ui_val) |
|
359 | 360 | if clear_session: |
|
360 | 361 | meta.Session.remove() |
|
361 | 362 | |
|
362 | 363 | # force set push_ssl requirement to False, Kallithea handles that |
|
363 | 364 | baseui.setconfig('web', 'push_ssl', False) |
|
364 | 365 | baseui.setconfig('web', 'allow_push', '*') |
|
365 | 366 | # prevent interactive questions for ssh password / passphrase |
|
366 | 367 | ssh = baseui.config('ui', 'ssh', default='ssh') |
|
367 | 368 | baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) |
|
368 | 369 | |
|
369 | 370 | return baseui |
|
370 | 371 | |
|
371 | 372 | |
|
372 | 373 | def set_app_settings(config): |
|
373 | 374 | """ |
|
374 | 375 | Updates app config with new settings from database |
|
375 | 376 | |
|
376 | 377 | :param config: |
|
377 | 378 | """ |
|
378 | 379 | hgsettings = Setting.get_app_settings() |
|
379 | 380 | |
|
380 | 381 | for k, v in hgsettings.items(): |
|
381 | 382 | config[k] = v |
|
382 | 383 | |
|
383 | 384 | |
|
384 | 385 | def set_vcs_config(config): |
|
385 | 386 | """ |
|
386 | 387 | Patch VCS config with some Kallithea specific stuff |
|
387 | 388 | |
|
388 | 389 | :param config: kallithea.CONFIG |
|
389 | 390 | """ |
|
390 | 391 | from kallithea.lib.vcs import conf |
|
391 | 392 | from kallithea.lib.utils2 import aslist |
|
392 | 393 | conf.settings.BACKENDS = { |
|
393 | 394 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', |
|
394 | 395 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', |
|
395 | 396 | } |
|
396 | 397 | |
|
397 | 398 | conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') |
|
398 | 399 | conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() |
|
399 | 400 | conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', |
|
400 | 401 | 'utf8'), sep=',') |
|
401 | 402 | |
|
402 | 403 | |
|
403 | 404 | def set_indexer_config(config): |
|
404 | 405 | """ |
|
405 | 406 | Update Whoosh index mapping |
|
406 | 407 | |
|
407 | 408 | :param config: kallithea.CONFIG |
|
408 | 409 | """ |
|
409 | 410 | from kallithea.config import conf |
|
410 | 411 | |
|
411 | 412 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
412 | 413 | conf.INDEX_EXTENSIONS.extend(re.split('\s+', config.get('index.extensions', ''))) |
|
413 | 414 | |
|
414 | 415 | log.debug('adding extra into INDEX_FILENAMES') |
|
415 | 416 | conf.INDEX_FILENAMES.extend(re.split('\s+', config.get('index.filenames', ''))) |
|
416 | 417 | |
|
417 | 418 | |
|
418 | 419 | def map_groups(path): |
|
419 | 420 | """ |
|
420 | 421 | Given a full path to a repository, create all nested groups that this |
|
421 | 422 | repo is inside. This function creates parent-child relationships between |
|
422 | 423 | groups and creates default perms for all new groups. |
|
423 | 424 | |
|
424 | 425 | :param paths: full path to repository |
|
425 | 426 | """ |
|
426 | 427 | sa = meta.Session() |
|
427 | 428 | groups = path.split(Repository.url_sep()) |
|
428 | 429 | parent = None |
|
429 | 430 | group = None |
|
430 | 431 | |
|
431 | 432 | # last element is repo in nested groups structure |
|
432 | 433 | groups = groups[:-1] |
|
433 | 434 | rgm = RepoGroupModel() |
|
434 | 435 | owner = User.get_first_admin() |
|
435 | 436 | for lvl, group_name in enumerate(groups): |
|
436 | 437 | group_name = u'/'.join(groups[:lvl] + [group_name]) |
|
437 | 438 | group = RepoGroup.get_by_group_name(group_name) |
|
438 | 439 | desc = '%s group' % group_name |
|
439 | 440 | |
|
440 | 441 | # skip folders that are now removed repos |
|
441 | 442 | if REMOVED_REPO_PAT.match(group_name): |
|
442 | 443 | break |
|
443 | 444 | |
|
444 | 445 | if group is None: |
|
445 | 446 | log.debug('creating group level: %s group_name: %s', |
|
446 | 447 | lvl, group_name) |
|
447 | 448 | group = RepoGroup(group_name, parent) |
|
448 | 449 | group.group_description = desc |
|
449 | 450 | group.owner = owner |
|
450 | 451 | sa.add(group) |
|
451 | 452 | rgm._create_default_perms(group) |
|
452 | 453 | sa.flush() |
|
453 | 454 | |
|
454 | 455 | parent = group |
|
455 | 456 | return group |
|
456 | 457 | |
|
457 | 458 | |
|
458 | 459 | def repo2db_mapper(initial_repo_list, remove_obsolete=False, |
|
459 | 460 | install_git_hooks=False, user=None, overwrite_git_hooks=False): |
|
460 | 461 | """ |
|
461 | 462 | maps all repos given in initial_repo_list, non existing repositories |
|
462 | 463 | are created, if remove_obsolete is True it also check for db entries |
|
463 | 464 | that are not in initial_repo_list and removes them. |
|
464 | 465 | |
|
465 | 466 | :param initial_repo_list: list of repositories found by scanning methods |
|
466 | 467 | :param remove_obsolete: check for obsolete entries in database |
|
467 | 468 | :param install_git_hooks: if this is True, also check and install git hook |
|
468 | 469 | for a repo if missing |
|
469 | 470 | :param overwrite_git_hooks: if this is True, overwrite any existing git hooks |
|
470 | 471 | that may be encountered (even if user-deployed) |
|
471 | 472 | """ |
|
472 | 473 | from kallithea.model.repo import RepoModel |
|
473 | 474 | from kallithea.model.scm import ScmModel |
|
474 | 475 | sa = meta.Session() |
|
475 | 476 | repo_model = RepoModel() |
|
476 | 477 | if user is None: |
|
477 | 478 | user = User.get_first_admin() |
|
478 | 479 | added = [] |
|
479 | 480 | |
|
480 | 481 | ##creation defaults |
|
481 | 482 | defs = Setting.get_default_repo_settings(strip_prefix=True) |
|
482 | 483 | enable_statistics = defs.get('repo_enable_statistics') |
|
483 | 484 | enable_locking = defs.get('repo_enable_locking') |
|
484 | 485 | enable_downloads = defs.get('repo_enable_downloads') |
|
485 | 486 | private = defs.get('repo_private') |
|
486 | 487 | |
|
487 | 488 | for name, repo in initial_repo_list.items(): |
|
488 | 489 | group = map_groups(name) |
|
489 | 490 | unicode_name = safe_unicode(name) |
|
490 | 491 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
491 | 492 | # found repo that is on filesystem not in Kallithea database |
|
492 | 493 | if not db_repo: |
|
493 | 494 | log.info('repository %s not found, creating now', name) |
|
494 | 495 | added.append(name) |
|
495 | 496 | desc = (repo.description |
|
496 | 497 | if repo.description != 'unknown' |
|
497 | 498 | else '%s repository' % name) |
|
498 | 499 | |
|
499 | 500 | new_repo = repo_model._create_repo( |
|
500 | 501 | repo_name=name, |
|
501 | 502 | repo_type=repo.alias, |
|
502 | 503 | description=desc, |
|
503 | 504 | repo_group=getattr(group, 'group_id', None), |
|
504 | 505 | owner=user, |
|
505 | 506 | enable_locking=enable_locking, |
|
506 | 507 | enable_downloads=enable_downloads, |
|
507 | 508 | enable_statistics=enable_statistics, |
|
508 | 509 | private=private, |
|
509 | 510 | state=Repository.STATE_CREATED |
|
510 | 511 | ) |
|
511 | 512 | sa.commit() |
|
512 | 513 | # we added that repo just now, and make sure it has githook |
|
513 | 514 | # installed, and updated server info |
|
514 | 515 | if new_repo.repo_type == 'git': |
|
515 | 516 | git_repo = new_repo.scm_instance |
|
516 | 517 | ScmModel().install_git_hooks(git_repo) |
|
517 | 518 | # update repository server-info |
|
518 | 519 | log.debug('Running update server info') |
|
519 | 520 | git_repo._update_server_info() |
|
520 | 521 | new_repo.update_changeset_cache() |
|
521 | 522 | elif install_git_hooks: |
|
522 | 523 | if db_repo.repo_type == 'git': |
|
523 | 524 | ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) |
|
524 | 525 | |
|
525 | 526 | removed = [] |
|
526 | 527 | # remove from database those repositories that are not in the filesystem |
|
527 | 528 | unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list) |
|
528 | 529 | for repo in sa.query(Repository).all(): |
|
529 | 530 | if repo.repo_name not in unicode_initial_repo_list: |
|
530 | 531 | if remove_obsolete: |
|
531 | 532 | log.debug("Removing non-existing repository found in db `%s`", |
|
532 | 533 | repo.repo_name) |
|
533 | 534 | try: |
|
534 | 535 | RepoModel().delete(repo, forks='detach', fs_remove=False) |
|
535 | 536 | sa.commit() |
|
536 | 537 | except Exception: |
|
537 | 538 | #don't hold further removals on error |
|
538 | 539 | log.error(traceback.format_exc()) |
|
539 | 540 | sa.rollback() |
|
540 | 541 | removed.append(repo.repo_name) |
|
541 | 542 | return added, removed |
|
542 | 543 | |
|
543 | 544 | |
|
544 | 545 | def load_rcextensions(root_path): |
|
545 | 546 | import kallithea |
|
546 | 547 | from kallithea.config import conf |
|
547 | 548 | |
|
548 | 549 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
549 | 550 | if os.path.isfile(path): |
|
550 | 551 | rcext = create_module('rc', path) |
|
551 | 552 | EXT = kallithea.EXTENSIONS = rcext |
|
552 | 553 | log.debug('Found rcextensions now loading %s...', rcext) |
|
553 | 554 | |
|
554 | 555 | # Additional mappings that are not present in the pygments lexers |
|
555 | 556 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
556 | 557 | |
|
557 | 558 | #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) |
|
558 | 559 | |
|
559 | 560 | if getattr(EXT, 'INDEX_EXTENSIONS', []): |
|
560 | 561 | log.debug('settings custom INDEX_EXTENSIONS') |
|
561 | 562 | conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) |
|
562 | 563 | |
|
563 | 564 | #ADDITIONAL MAPPINGS |
|
564 | 565 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
565 | 566 | conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) |
|
566 | 567 | |
|
567 | 568 | # auto check if the module is not missing any data, set to default if is |
|
568 | 569 | # this will help autoupdate new feature of rcext module |
|
569 | 570 | #from kallithea.config import rcextensions |
|
570 | 571 | #for k in dir(rcextensions): |
|
571 | 572 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
572 | 573 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
573 | 574 | |
|
574 | 575 | |
|
575 | 576 | #============================================================================== |
|
576 | 577 | # MISC |
|
577 | 578 | #============================================================================== |
|
578 | 579 | |
|
579 | 580 | def check_git_version(): |
|
580 | 581 | """ |
|
581 | 582 | Checks what version of git is installed in system, and issues a warning |
|
582 | 583 | if it's too old for Kallithea to work properly. |
|
583 | 584 | """ |
|
584 | 585 | from kallithea import BACKENDS |
|
585 | 586 | from kallithea.lib.vcs.backends.git.repository import GitRepository |
|
586 | 587 | from kallithea.lib.vcs.conf import settings |
|
587 | 588 | from distutils.version import StrictVersion |
|
588 | 589 | |
|
589 | 590 | if 'git' not in BACKENDS: |
|
590 | 591 | return None |
|
591 | 592 | |
|
592 | 593 | stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True, |
|
593 | 594 | _safe=True) |
|
594 | 595 | |
|
595 | 596 | m = re.search("\d+.\d+.\d+", stdout) |
|
596 | 597 | if m: |
|
597 | 598 | ver = StrictVersion(m.group(0)) |
|
598 | 599 | else: |
|
599 | 600 | ver = StrictVersion('0.0.0') |
|
600 | 601 | |
|
601 | 602 | req_ver = StrictVersion('1.7.4') |
|
602 | 603 | |
|
603 | 604 | log.debug('Git executable: "%s" version %s detected: %s', |
|
604 | 605 | settings.GIT_EXECUTABLE_PATH, ver, stdout) |
|
605 | 606 | if stderr: |
|
606 | 607 | log.warning('Error detecting git version: %r', stderr) |
|
607 | 608 | elif ver < req_ver: |
|
608 | 609 | log.warning('Kallithea detected git version %s, which is too old ' |
|
609 | 610 | 'for the system to function properly. ' |
|
610 | 611 | 'Please upgrade to version %s or later.' % (ver, req_ver)) |
|
611 | 612 | return ver |
|
612 | 613 | |
|
613 | 614 | |
|
614 | 615 | #=============================================================================== |
|
615 | 616 | # CACHE RELATED METHODS |
|
616 | 617 | #=============================================================================== |
|
617 | 618 | |
|
618 | 619 | # set cache regions for beaker so celery can utilise it |
|
619 | 620 | def setup_cache_regions(settings): |
|
620 | 621 | # Create dict with just beaker cache configs with prefix stripped |
|
621 | 622 | cache_settings = {'regions': None} |
|
622 | 623 | prefix = 'beaker.cache.' |
|
623 | 624 | for key in settings: |
|
624 | 625 | if key.startswith(prefix): |
|
625 | 626 | name = key[len(prefix):] |
|
626 | 627 | cache_settings[name] = settings[key] |
|
627 | 628 | # Find all regions, apply defaults, and apply to beaker |
|
628 | 629 | if cache_settings['regions']: |
|
629 | 630 | for region in cache_settings['regions'].split(','): |
|
630 | 631 | region = region.strip() |
|
631 | 632 | prefix = region + '.' |
|
632 | 633 | region_settings = {} |
|
633 | 634 | for key in cache_settings: |
|
634 | 635 | if key.startswith(prefix): |
|
635 | 636 | name = key[len(prefix):] |
|
636 | 637 | region_settings[name] = cache_settings[key] |
|
637 | 638 | region_settings.setdefault('expire', |
|
638 | 639 | cache_settings.get('expire', '60')) |
|
639 | 640 | region_settings.setdefault('lock_dir', |
|
640 | 641 | cache_settings.get('lock_dir')) |
|
641 | 642 | region_settings.setdefault('data_dir', |
|
642 | 643 | cache_settings.get('data_dir')) |
|
643 | 644 | region_settings.setdefault('type', |
|
644 | 645 | cache_settings.get('type', 'memory')) |
|
645 | 646 | beaker.cache.cache_regions[region] = region_settings |
|
646 | 647 | |
|
647 | 648 | |
|
648 | 649 | def conditional_cache(region, prefix, condition, func): |
|
649 | 650 | """ |
|
650 | 651 | |
|
651 | 652 | Conditional caching function use like:: |
|
652 | 653 | def _c(arg): |
|
653 | 654 | #heavy computation function |
|
654 | 655 | return data |
|
655 | 656 | |
|
656 | 657 | # depending from condition the compute is wrapped in cache or not |
|
657 | 658 | compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func) |
|
658 | 659 | return compute(arg) |
|
659 | 660 | |
|
660 | 661 | :param region: name of cache region |
|
661 | 662 | :param prefix: cache region prefix |
|
662 | 663 | :param condition: condition for cache to be triggered, and return data cached |
|
663 | 664 | :param func: wrapped heavy function to compute |
|
664 | 665 | |
|
665 | 666 | """ |
|
666 | 667 | wrapped = func |
|
667 | 668 | if condition: |
|
668 | 669 | log.debug('conditional_cache: True, wrapping call of ' |
|
669 | 670 | 'func: %s into %s region cache' % (region, func)) |
|
670 | 671 | wrapped = _cache_decorate((prefix,), None, None, region)(func) |
|
671 | 672 | |
|
672 | 673 | return wrapped |
@@ -1,345 +1,345 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.model.notification |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Model for notifications |
|
19 | 19 | |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: Nov 20, 2011 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | import traceback |
|
31 | 31 | |
|
32 | from tg import tmpl_context as c | |
|
32 | from tg import tmpl_context as c, app_globals | |
|
33 | 33 | from tg.i18n import ugettext as _ |
|
34 | 34 | from sqlalchemy.orm import joinedload, subqueryload |
|
35 | 35 | |
|
36 | 36 | import kallithea |
|
37 | 37 | from kallithea.lib import helpers as h |
|
38 | 38 | from kallithea.lib.utils2 import safe_unicode |
|
39 | 39 | from kallithea.model.db import Notification, User, UserNotification |
|
40 | 40 | from kallithea.model.meta import Session |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class NotificationModel(object): |
|
46 | 46 | |
|
47 | 47 | def create(self, created_by, subject, body, recipients=None, |
|
48 | 48 | type_=Notification.TYPE_MESSAGE, with_email=True, |
|
49 | 49 | email_kwargs=None, repo_name=None): |
|
50 | 50 | """ |
|
51 | 51 | |
|
52 | 52 | Creates notification of given type |
|
53 | 53 | |
|
54 | 54 | :param created_by: int, str or User instance. User who created this |
|
55 | 55 | notification |
|
56 | 56 | :param subject: |
|
57 | 57 | :param body: |
|
58 | 58 | :param recipients: list of int, str or User objects, when None |
|
59 | 59 | is given send to all admins |
|
60 | 60 | :param type_: type of notification |
|
61 | 61 | :param with_email: send email with this notification |
|
62 | 62 | :param email_kwargs: additional dict to pass as args to email template |
|
63 | 63 | """ |
|
64 | 64 | from kallithea.lib.celerylib import tasks |
|
65 | 65 | email_kwargs = email_kwargs or {} |
|
66 | 66 | if recipients and not getattr(recipients, '__iter__', False): |
|
67 | 67 | raise Exception('recipients must be a list or iterable') |
|
68 | 68 | |
|
69 | 69 | created_by_obj = User.guess_instance(created_by) |
|
70 | 70 | |
|
71 | 71 | recipients_objs = [] |
|
72 | 72 | if recipients: |
|
73 | 73 | for u in recipients: |
|
74 | 74 | obj = User.guess_instance(u) |
|
75 | 75 | if obj is not None: |
|
76 | 76 | recipients_objs.append(obj) |
|
77 | 77 | else: |
|
78 | 78 | # TODO: inform user that requested operation couldn't be completed |
|
79 | 79 | log.error('cannot email unknown user %r', u) |
|
80 | 80 | recipients_objs = set(recipients_objs) |
|
81 | 81 | log.debug('sending notifications %s to %s', |
|
82 | 82 | type_, recipients_objs |
|
83 | 83 | ) |
|
84 | 84 | elif recipients is None: |
|
85 | 85 | # empty recipients means to all admins |
|
86 | 86 | recipients_objs = User.query().filter(User.admin == True).all() |
|
87 | 87 | log.debug('sending notifications %s to admins: %s', |
|
88 | 88 | type_, recipients_objs |
|
89 | 89 | ) |
|
90 | 90 | #else: silently skip notification mails? |
|
91 | 91 | |
|
92 | 92 | # TODO: inform user who are notified |
|
93 | 93 | notif = Notification.create( |
|
94 | 94 | created_by=created_by_obj, subject=subject, |
|
95 | 95 | body=body, recipients=recipients_objs, type_=type_ |
|
96 | 96 | ) |
|
97 | 97 | |
|
98 | 98 | if not with_email: |
|
99 | 99 | return notif |
|
100 | 100 | |
|
101 | 101 | #don't send email to person who created this comment |
|
102 | 102 | rec_objs = set(recipients_objs).difference(set([created_by_obj])) |
|
103 | 103 | |
|
104 | 104 | headers = {} |
|
105 | 105 | headers['X-Kallithea-Notification-Type'] = type_ |
|
106 | 106 | if 'threading' in email_kwargs: |
|
107 | 107 | headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading']) |
|
108 | 108 | |
|
109 | 109 | # send email with notification to all other participants |
|
110 | 110 | for rec in rec_objs: |
|
111 | 111 | ## this is passed into template |
|
112 | 112 | html_kwargs = { |
|
113 | 113 | 'subject': subject, |
|
114 | 114 | 'body': h.render_w_mentions(body, repo_name), |
|
115 | 115 | 'when': h.fmt_date(notif.created_on), |
|
116 | 116 | 'user': notif.created_by_user.username, |
|
117 | 117 | } |
|
118 | 118 | |
|
119 | 119 | txt_kwargs = { |
|
120 | 120 | 'subject': subject, |
|
121 | 121 | 'body': body, |
|
122 | 122 | 'when': h.fmt_date(notif.created_on), |
|
123 | 123 | 'user': notif.created_by_user.username, |
|
124 | 124 | } |
|
125 | 125 | |
|
126 | 126 | html_kwargs.update(email_kwargs) |
|
127 | 127 | txt_kwargs.update(email_kwargs) |
|
128 | 128 | email_subject = EmailNotificationModel() \ |
|
129 | 129 | .get_email_description(type_, **txt_kwargs) |
|
130 | 130 | email_txt_body = EmailNotificationModel() \ |
|
131 | 131 | .get_email_tmpl(type_, 'txt', **txt_kwargs) |
|
132 | 132 | email_html_body = EmailNotificationModel() \ |
|
133 | 133 | .get_email_tmpl(type_, 'html', **html_kwargs) |
|
134 | 134 | |
|
135 | 135 | tasks.send_email([rec.email], email_subject, email_txt_body, |
|
136 | 136 | email_html_body, headers, author=created_by_obj) |
|
137 | 137 | |
|
138 | 138 | return notif |
|
139 | 139 | |
|
140 | 140 | def delete(self, user, notification): |
|
141 | 141 | # we don't want to remove actual notification just the assignment |
|
142 | 142 | try: |
|
143 | 143 | notification = Notification.guess_instance(notification) |
|
144 | 144 | user = User.guess_instance(user) |
|
145 | 145 | if notification and user: |
|
146 | 146 | obj = UserNotification.query() \ |
|
147 | 147 | .filter(UserNotification.user == user) \ |
|
148 | 148 | .filter(UserNotification.notification |
|
149 | 149 | == notification) \ |
|
150 | 150 | .one() |
|
151 | 151 | Session().delete(obj) |
|
152 | 152 | return True |
|
153 | 153 | except Exception: |
|
154 | 154 | log.error(traceback.format_exc()) |
|
155 | 155 | raise |
|
156 | 156 | |
|
157 | 157 | def query_for_user(self, user, filter_=None): |
|
158 | 158 | """ |
|
159 | 159 | Get notifications for given user, filter them if filter dict is given |
|
160 | 160 | |
|
161 | 161 | :param user: |
|
162 | 162 | :param filter: |
|
163 | 163 | """ |
|
164 | 164 | user = User.guess_instance(user) |
|
165 | 165 | |
|
166 | 166 | q = UserNotification.query() \ |
|
167 | 167 | .filter(UserNotification.user == user) \ |
|
168 | 168 | .join((Notification, UserNotification.notification_id == |
|
169 | 169 | Notification.notification_id)) \ |
|
170 | 170 | .options(joinedload('notification')) \ |
|
171 | 171 | .options(subqueryload('notification.created_by_user')) \ |
|
172 | 172 | .order_by(Notification.created_on.desc()) |
|
173 | 173 | |
|
174 | 174 | if filter_: |
|
175 | 175 | q = q.filter(Notification.type_.in_(filter_)) |
|
176 | 176 | |
|
177 | 177 | return q |
|
178 | 178 | |
|
179 | 179 | def mark_read(self, user, notification): |
|
180 | 180 | try: |
|
181 | 181 | notification = Notification.guess_instance(notification) |
|
182 | 182 | user = User.guess_instance(user) |
|
183 | 183 | if notification and user: |
|
184 | 184 | obj = UserNotification.query() \ |
|
185 | 185 | .filter(UserNotification.user == user) \ |
|
186 | 186 | .filter(UserNotification.notification |
|
187 | 187 | == notification) \ |
|
188 | 188 | .one() |
|
189 | 189 | obj.read = True |
|
190 | 190 | return True |
|
191 | 191 | except Exception: |
|
192 | 192 | log.error(traceback.format_exc()) |
|
193 | 193 | raise |
|
194 | 194 | |
|
195 | 195 | def mark_all_read_for_user(self, user, filter_=None): |
|
196 | 196 | user = User.guess_instance(user) |
|
197 | 197 | q = UserNotification.query() \ |
|
198 | 198 | .filter(UserNotification.user == user) \ |
|
199 | 199 | .filter(UserNotification.read == False) \ |
|
200 | 200 | .join((Notification, UserNotification.notification_id == |
|
201 | 201 | Notification.notification_id)) |
|
202 | 202 | if filter_: |
|
203 | 203 | q = q.filter(Notification.type_.in_(filter_)) |
|
204 | 204 | |
|
205 | 205 | # this is a little inefficient but sqlalchemy doesn't support |
|
206 | 206 | # update on joined tables :( |
|
207 | 207 | for obj in q: |
|
208 | 208 | obj.read = True |
|
209 | 209 | |
|
210 | 210 | def get_unread_cnt_for_user(self, user): |
|
211 | 211 | user = User.guess_instance(user) |
|
212 | 212 | return UserNotification.query() \ |
|
213 | 213 | .filter(UserNotification.read == False) \ |
|
214 | 214 | .filter(UserNotification.user == user).count() |
|
215 | 215 | |
|
216 | 216 | def get_unread_for_user(self, user): |
|
217 | 217 | user = User.guess_instance(user) |
|
218 | 218 | return [x.notification for x in UserNotification.query() \ |
|
219 | 219 | .filter(UserNotification.read == False) \ |
|
220 | 220 | .filter(UserNotification.user == user).all()] |
|
221 | 221 | |
|
222 | 222 | def get_user_notification(self, user, notification): |
|
223 | 223 | user = User.guess_instance(user) |
|
224 | 224 | notification = Notification.guess_instance(notification) |
|
225 | 225 | |
|
226 | 226 | return UserNotification.query() \ |
|
227 | 227 | .filter(UserNotification.notification == notification) \ |
|
228 | 228 | .filter(UserNotification.user == user).scalar() |
|
229 | 229 | |
|
230 | 230 | def make_description(self, notification, show_age=True): |
|
231 | 231 | """ |
|
232 | 232 | Creates a human readable description based on properties |
|
233 | 233 | of notification object |
|
234 | 234 | """ |
|
235 | 235 | #alias |
|
236 | 236 | _n = notification |
|
237 | 237 | |
|
238 | 238 | if show_age: |
|
239 | 239 | return { |
|
240 | 240 | _n.TYPE_CHANGESET_COMMENT: _('%(user)s commented on changeset %(age)s'), |
|
241 | 241 | _n.TYPE_MESSAGE: _('%(user)s sent message %(age)s'), |
|
242 | 242 | _n.TYPE_MENTION: _('%(user)s mentioned you %(age)s'), |
|
243 | 243 | _n.TYPE_REGISTRATION: _('%(user)s registered in Kallithea %(age)s'), |
|
244 | 244 | _n.TYPE_PULL_REQUEST: _('%(user)s opened new pull request %(age)s'), |
|
245 | 245 | _n.TYPE_PULL_REQUEST_COMMENT: _('%(user)s commented on pull request %(age)s'), |
|
246 | 246 | }[notification.type_] % dict( |
|
247 | 247 | user=notification.created_by_user.username, |
|
248 | 248 | age=h.age(notification.created_on), |
|
249 | 249 | ) |
|
250 | 250 | else: |
|
251 | 251 | return { |
|
252 | 252 | _n.TYPE_CHANGESET_COMMENT: _('%(user)s commented on changeset at %(when)s'), |
|
253 | 253 | _n.TYPE_MESSAGE: _('%(user)s sent message at %(when)s'), |
|
254 | 254 | _n.TYPE_MENTION: _('%(user)s mentioned you at %(when)s'), |
|
255 | 255 | _n.TYPE_REGISTRATION: _('%(user)s registered in Kallithea at %(when)s'), |
|
256 | 256 | _n.TYPE_PULL_REQUEST: _('%(user)s opened new pull request at %(when)s'), |
|
257 | 257 | _n.TYPE_PULL_REQUEST_COMMENT: _('%(user)s commented on pull request at %(when)s'), |
|
258 | 258 | }[notification.type_] % dict( |
|
259 | 259 | user=notification.created_by_user.username, |
|
260 | 260 | when=h.fmt_date(notification.created_on), |
|
261 | 261 | ) |
|
262 | 262 | |
|
263 | 263 | |
|
264 | 264 | class EmailNotificationModel(object): |
|
265 | 265 | |
|
266 | 266 | TYPE_CHANGESET_COMMENT = Notification.TYPE_CHANGESET_COMMENT |
|
267 | 267 | TYPE_MESSAGE = Notification.TYPE_MESSAGE # only used for testing |
|
268 | 268 | # Notification.TYPE_MENTION is not used |
|
269 | 269 | TYPE_PASSWORD_RESET = 'password_link' |
|
270 | 270 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION |
|
271 | 271 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST |
|
272 | 272 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT |
|
273 | 273 | TYPE_DEFAULT = 'default' |
|
274 | 274 | |
|
275 | 275 | def __init__(self): |
|
276 | 276 | super(EmailNotificationModel, self).__init__() |
|
277 |
self._template_root = kallithea.CONFIG[' |
|
|
278 |
self._tmpl_lookup = |
|
|
277 | self._template_root = kallithea.CONFIG['paths']['templates'][0] | |
|
278 | self._tmpl_lookup = app_globals.mako_lookup | |
|
279 | 279 | self.email_types = { |
|
280 | 280 | self.TYPE_CHANGESET_COMMENT: 'changeset_comment', |
|
281 | 281 | self.TYPE_PASSWORD_RESET: 'password_reset', |
|
282 | 282 | self.TYPE_REGISTRATION: 'registration', |
|
283 | 283 | self.TYPE_DEFAULT: 'default', |
|
284 | 284 | self.TYPE_PULL_REQUEST: 'pull_request', |
|
285 | 285 | self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', |
|
286 | 286 | } |
|
287 | 287 | self._subj_map = { |
|
288 | 288 | self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s'), |
|
289 | 289 | self.TYPE_MESSAGE: 'Test Message', |
|
290 | 290 | # self.TYPE_PASSWORD_RESET |
|
291 | 291 | self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), |
|
292 | 292 | # self.TYPE_DEFAULT |
|
293 | 293 | self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
294 | 294 | self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
295 | 295 | } |
|
296 | 296 | |
|
297 | 297 | def get_email_description(self, type_, **kwargs): |
|
298 | 298 | """ |
|
299 | 299 | return subject for email based on given type |
|
300 | 300 | """ |
|
301 | 301 | tmpl = self._subj_map[type_] |
|
302 | 302 | try: |
|
303 | 303 | subj = tmpl % kwargs |
|
304 | 304 | except KeyError as e: |
|
305 | 305 | log.error('error generating email subject for %r from %s: %s', type_, ','.join(self._subj_map.keys()), e) |
|
306 | 306 | raise |
|
307 | 307 | l = [safe_unicode(x) for x in [kwargs.get('status_change'), kwargs.get('closing_pr') and _('Closing')] if x] |
|
308 | 308 | if l: |
|
309 | 309 | if subj.startswith('['): |
|
310 | 310 | subj = '[' + ', '.join(l) + ': ' + subj[1:] |
|
311 | 311 | else: |
|
312 | 312 | subj = '[' + ', '.join(l) + '] ' + subj |
|
313 | 313 | return subj |
|
314 | 314 | |
|
315 | 315 | def get_email_tmpl(self, type_, content_type, **kwargs): |
|
316 | 316 | """ |
|
317 | 317 | return generated template for email based on given type |
|
318 | 318 | """ |
|
319 | 319 | |
|
320 | 320 | base = 'email_templates/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type |
|
321 | 321 | email_template = self._tmpl_lookup.get_template(base) |
|
322 | 322 | # translator and helpers inject |
|
323 | 323 | _kwargs = {'_': _, |
|
324 | 324 | 'h': h, |
|
325 | 325 | 'c': c} |
|
326 | 326 | _kwargs.update(kwargs) |
|
327 | 327 | if content_type == 'html': |
|
328 | 328 | _kwargs.update({ |
|
329 | 329 | "color_text": "#202020", |
|
330 | 330 | "color_emph": "#395fa0", |
|
331 | 331 | "color_link": "#395fa0", |
|
332 | 332 | "color_border": "#ddd", |
|
333 | 333 | "color_background_grey": "#f9f9f9", |
|
334 | 334 | "color_button": "#395fa0", |
|
335 | 335 | "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace", |
|
336 | 336 | "sans_style": "font-family:Helvetica,Arial,sans-serif", |
|
337 | 337 | }) |
|
338 | 338 | _kwargs.update({ |
|
339 | 339 | "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs, |
|
340 | 340 | "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs, |
|
341 | 341 | "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs, |
|
342 | 342 | }) |
|
343 | 343 | |
|
344 | 344 | log.debug('rendering tmpl %s with kwargs %s', base, _kwargs) |
|
345 | 345 | return email_template.render(**_kwargs) |
@@ -1,740 +1,740 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.model.repo |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Repository model for kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jun 5, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | import os |
|
30 | 30 | import shutil |
|
31 | 31 | import logging |
|
32 | 32 | import traceback |
|
33 | 33 | from datetime import datetime |
|
34 | 34 | from sqlalchemy.orm import subqueryload |
|
35 | 35 | |
|
36 | 36 | from kallithea.lib.utils import make_ui |
|
37 | 37 | from kallithea.lib.vcs.backends import get_backend |
|
38 | 38 | from kallithea.lib.utils2 import LazyProperty, safe_str, safe_unicode, \ |
|
39 | 39 | remove_prefix, obfuscate_url_pw, get_current_authuser |
|
40 | 40 | from kallithea.lib.caching_query import FromCache |
|
41 | 41 | from kallithea.lib.hooks import log_delete_repository |
|
42 | 42 | |
|
43 | 43 | from kallithea.model.db import Repository, UserRepoToPerm, UserGroupRepoToPerm, \ |
|
44 | 44 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, Session, \ |
|
45 | 45 | Statistics, UserGroup, Ui, RepoGroup, RepositoryField |
|
46 | 46 | |
|
47 | 47 | from kallithea.lib import helpers as h |
|
48 | 48 | from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel |
|
49 | 49 | from kallithea.lib.exceptions import AttachedForksError |
|
50 | 50 | from kallithea.model.scm import UserGroupList |
|
51 | 51 | |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | class RepoModel(object): |
|
56 | 56 | |
|
57 | 57 | URL_SEPARATOR = Repository.url_sep() |
|
58 | 58 | |
|
59 | 59 | def _create_default_perms(self, repository, private): |
|
60 | 60 | # create default permission |
|
61 | 61 | default = 'repository.read' |
|
62 | 62 | def_user = User.get_default_user() |
|
63 | 63 | for p in def_user.user_perms: |
|
64 | 64 | if p.permission.permission_name.startswith('repository.'): |
|
65 | 65 | default = p.permission.permission_name |
|
66 | 66 | break |
|
67 | 67 | |
|
68 | 68 | default_perm = 'repository.none' if private else default |
|
69 | 69 | |
|
70 | 70 | repo_to_perm = UserRepoToPerm() |
|
71 | 71 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
72 | 72 | |
|
73 | 73 | repo_to_perm.repository = repository |
|
74 | 74 | repo_to_perm.user_id = def_user.user_id |
|
75 | 75 | Session().add(repo_to_perm) |
|
76 | 76 | |
|
77 | 77 | return repo_to_perm |
|
78 | 78 | |
|
79 | 79 | @LazyProperty |
|
80 | 80 | def repos_path(self): |
|
81 | 81 | """ |
|
82 | 82 | Gets the repositories root path from database |
|
83 | 83 | """ |
|
84 | 84 | |
|
85 | 85 | q = Ui.query().filter(Ui.ui_key == '/').one() |
|
86 | 86 | return q.ui_value |
|
87 | 87 | |
|
88 | 88 | def get(self, repo_id, cache=False): |
|
89 | 89 | repo = Repository.query() \ |
|
90 | 90 | .filter(Repository.repo_id == repo_id) |
|
91 | 91 | |
|
92 | 92 | if cache: |
|
93 | 93 | repo = repo.options(FromCache("sql_cache_short", |
|
94 | 94 | "get_repo_%s" % repo_id)) |
|
95 | 95 | return repo.scalar() |
|
96 | 96 | |
|
97 | 97 | def get_repo(self, repository): |
|
98 | 98 | return Repository.guess_instance(repository) |
|
99 | 99 | |
|
100 | 100 | def get_by_repo_name(self, repo_name, cache=False): |
|
101 | 101 | repo = Repository.query() \ |
|
102 | 102 | .filter(Repository.repo_name == repo_name) |
|
103 | 103 | |
|
104 | 104 | if cache: |
|
105 | 105 | repo = repo.options(FromCache("sql_cache_short", |
|
106 | 106 | "get_repo_%s" % repo_name)) |
|
107 | 107 | return repo.scalar() |
|
108 | 108 | |
|
109 | 109 | def get_all_user_repos(self, user): |
|
110 | 110 | """ |
|
111 | 111 | Gets all repositories that user have at least read access |
|
112 | 112 | |
|
113 | 113 | :param user: |
|
114 | 114 | """ |
|
115 | 115 | from kallithea.lib.auth import AuthUser |
|
116 | 116 | user = User.guess_instance(user) |
|
117 | 117 | repos = AuthUser(dbuser=user).permissions['repositories'] |
|
118 | 118 | access_check = lambda r: r[1] in ['repository.read', |
|
119 | 119 | 'repository.write', |
|
120 | 120 | 'repository.admin'] |
|
121 | 121 | repos = [x[0] for x in filter(access_check, repos.items())] |
|
122 | 122 | return Repository.query().filter(Repository.repo_name.in_(repos)) |
|
123 | 123 | |
|
124 | 124 | def get_users_js(self): |
|
125 | 125 | users = User.query() \ |
|
126 | 126 | .filter(User.active == True) \ |
|
127 | 127 | .order_by(User.name, User.lastname) \ |
|
128 | 128 | .all() |
|
129 | 129 | return [ |
|
130 | 130 | { |
|
131 | 131 | 'id': u.user_id, |
|
132 | 132 | 'fname': h.escape(u.name), |
|
133 | 133 | 'lname': h.escape(u.lastname), |
|
134 | 134 | 'nname': u.username, |
|
135 | 135 | 'gravatar_lnk': h.gravatar_url(u.email, size=28, default='default'), |
|
136 | 136 | 'gravatar_size': 14, |
|
137 | 137 | } for u in users] |
|
138 | 138 | |
|
139 | 139 | def get_user_groups_js(self): |
|
140 | 140 | user_groups = UserGroup.query() \ |
|
141 | 141 | .filter(UserGroup.users_group_active == True) \ |
|
142 | 142 | .order_by(UserGroup.users_group_name) \ |
|
143 | 143 | .options(subqueryload(UserGroup.members)) \ |
|
144 | 144 | .all() |
|
145 | 145 | user_groups = UserGroupList(user_groups, perm_level='read') |
|
146 | 146 | return [ |
|
147 | 147 | { |
|
148 | 148 | 'id': gr.users_group_id, |
|
149 | 149 | 'grname': gr.users_group_name, |
|
150 | 150 | 'grmembers': len(gr.members), |
|
151 | 151 | } for gr in user_groups] |
|
152 | 152 | |
|
153 | 153 | @classmethod |
|
154 | 154 | def _render_datatable(cls, tmpl, *args, **kwargs): |
|
155 | 155 | import kallithea |
|
156 | from tg import tmpl_context as c, request | |
|
156 | from tg import tmpl_context as c, request, app_globals | |
|
157 | 157 | from tg.i18n import ugettext as _ |
|
158 | 158 | |
|
159 |
_tmpl_lookup = |
|
|
159 | _tmpl_lookup = app_globals.mako_lookup | |
|
160 | 160 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
161 | 161 | |
|
162 | 162 | tmpl = template.get_def(tmpl) |
|
163 | 163 | kwargs.update(dict(_=_, h=h, c=c, request=request)) |
|
164 | 164 | return tmpl.render(*args, **kwargs) |
|
165 | 165 | |
|
166 | 166 | def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True, |
|
167 | 167 | super_user_actions=False, short_name=False): |
|
168 | 168 | _render = self._render_datatable |
|
169 | 169 | from tg import tmpl_context as c |
|
170 | 170 | |
|
171 | 171 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
172 | 172 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
173 | 173 | short_name=short_name, admin=False) |
|
174 | 174 | |
|
175 | 175 | def last_change(last_change): |
|
176 | 176 | return _render("last_change", last_change) |
|
177 | 177 | |
|
178 | 178 | def rss_lnk(repo_name): |
|
179 | 179 | return _render("rss", repo_name) |
|
180 | 180 | |
|
181 | 181 | def atom_lnk(repo_name): |
|
182 | 182 | return _render("atom", repo_name) |
|
183 | 183 | |
|
184 | 184 | def last_rev(repo_name, cs_cache): |
|
185 | 185 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
186 | 186 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
187 | 187 | cs_cache.get('message')) |
|
188 | 188 | |
|
189 | 189 | def desc(desc): |
|
190 | 190 | return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metatags) |
|
191 | 191 | |
|
192 | 192 | def state(repo_state): |
|
193 | 193 | return _render("repo_state", repo_state) |
|
194 | 194 | |
|
195 | 195 | def repo_actions(repo_name): |
|
196 | 196 | return _render('repo_actions', repo_name, super_user_actions) |
|
197 | 197 | |
|
198 | 198 | def owner_actions(owner_id, username): |
|
199 | 199 | return _render('user_name', owner_id, username) |
|
200 | 200 | |
|
201 | 201 | repos_data = [] |
|
202 | 202 | for repo in repos_list: |
|
203 | 203 | if perm_check: |
|
204 | 204 | # check permission at this level |
|
205 | 205 | if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'): |
|
206 | 206 | continue |
|
207 | 207 | cs_cache = repo.changeset_cache |
|
208 | 208 | row = { |
|
209 | 209 | "raw_name": repo.repo_name, |
|
210 | 210 | "just_name": repo.just_name, |
|
211 | 211 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
212 | 212 | repo.repo_state, repo.private, repo.fork), |
|
213 | 213 | "last_change_iso": repo.last_db_change.isoformat(), |
|
214 | 214 | "last_change": last_change(repo.last_db_change), |
|
215 | 215 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
216 | 216 | "last_rev_raw": cs_cache.get('revision'), |
|
217 | 217 | "desc": desc(repo.description), |
|
218 | 218 | "owner": h.person(repo.owner), |
|
219 | 219 | "state": state(repo.repo_state), |
|
220 | 220 | "rss": rss_lnk(repo.repo_name), |
|
221 | 221 | "atom": atom_lnk(repo.repo_name), |
|
222 | 222 | |
|
223 | 223 | } |
|
224 | 224 | if admin: |
|
225 | 225 | row.update({ |
|
226 | 226 | "action": repo_actions(repo.repo_name), |
|
227 | 227 | "owner": owner_actions(repo.owner_id, |
|
228 | 228 | h.person(repo.owner)) |
|
229 | 229 | }) |
|
230 | 230 | repos_data.append(row) |
|
231 | 231 | |
|
232 | 232 | return { |
|
233 | 233 | "totalRecords": len(repos_list), |
|
234 | 234 | "startIndex": 0, |
|
235 | 235 | "sort": "name", |
|
236 | 236 | "dir": "asc", |
|
237 | 237 | "records": repos_data |
|
238 | 238 | } |
|
239 | 239 | |
|
240 | 240 | def _get_defaults(self, repo_name): |
|
241 | 241 | """ |
|
242 | 242 | Gets information about repository, and returns a dict for |
|
243 | 243 | usage in forms |
|
244 | 244 | |
|
245 | 245 | :param repo_name: |
|
246 | 246 | """ |
|
247 | 247 | |
|
248 | 248 | repo_info = Repository.get_by_repo_name(repo_name) |
|
249 | 249 | |
|
250 | 250 | if repo_info is None: |
|
251 | 251 | return None |
|
252 | 252 | |
|
253 | 253 | defaults = repo_info.get_dict() |
|
254 | 254 | defaults['repo_name'] = repo_info.just_name |
|
255 | 255 | defaults['repo_group'] = repo_info.group_id |
|
256 | 256 | |
|
257 | 257 | for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'), |
|
258 | 258 | (1, 'repo_description'), (1, 'repo_enable_locking'), |
|
259 | 259 | (1, 'repo_landing_rev'), (0, 'clone_uri'), |
|
260 | 260 | (1, 'repo_private'), (1, 'repo_enable_statistics')]: |
|
261 | 261 | attr = k |
|
262 | 262 | if strip: |
|
263 | 263 | attr = remove_prefix(k, 'repo_') |
|
264 | 264 | |
|
265 | 265 | val = defaults[attr] |
|
266 | 266 | if k == 'repo_landing_rev': |
|
267 | 267 | val = ':'.join(defaults[attr]) |
|
268 | 268 | defaults[k] = val |
|
269 | 269 | if k == 'clone_uri': |
|
270 | 270 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
271 | 271 | |
|
272 | 272 | # fill owner |
|
273 | 273 | if repo_info.owner: |
|
274 | 274 | defaults.update({'owner': repo_info.owner.username}) |
|
275 | 275 | else: |
|
276 | 276 | replacement_user = User.query().filter(User.admin == |
|
277 | 277 | True).first().username |
|
278 | 278 | defaults.update({'owner': replacement_user}) |
|
279 | 279 | |
|
280 | 280 | # fill repository users |
|
281 | 281 | for p in repo_info.repo_to_perm: |
|
282 | 282 | defaults.update({'u_perm_%s' % p.user.username: |
|
283 | 283 | p.permission.permission_name}) |
|
284 | 284 | |
|
285 | 285 | # fill repository groups |
|
286 | 286 | for p in repo_info.users_group_to_perm: |
|
287 | 287 | defaults.update({'g_perm_%s' % p.users_group.users_group_name: |
|
288 | 288 | p.permission.permission_name}) |
|
289 | 289 | |
|
290 | 290 | return defaults |
|
291 | 291 | |
|
292 | 292 | def update(self, repo, **kwargs): |
|
293 | 293 | try: |
|
294 | 294 | cur_repo = Repository.guess_instance(repo) |
|
295 | 295 | org_repo_name = cur_repo.repo_name |
|
296 | 296 | if 'owner' in kwargs: |
|
297 | 297 | cur_repo.owner = User.get_by_username(kwargs['owner']) |
|
298 | 298 | |
|
299 | 299 | if 'repo_group' in kwargs: |
|
300 | 300 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
301 | 301 | cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) |
|
302 | 302 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
303 | 303 | for k in ['repo_enable_downloads', |
|
304 | 304 | 'repo_description', |
|
305 | 305 | 'repo_enable_locking', |
|
306 | 306 | 'repo_landing_rev', |
|
307 | 307 | 'repo_private', |
|
308 | 308 | 'repo_enable_statistics', |
|
309 | 309 | ]: |
|
310 | 310 | if k in kwargs: |
|
311 | 311 | setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k]) |
|
312 | 312 | clone_uri = kwargs.get('clone_uri') |
|
313 | 313 | if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden: |
|
314 | 314 | cur_repo.clone_uri = clone_uri |
|
315 | 315 | |
|
316 | 316 | if 'repo_name' in kwargs: |
|
317 | 317 | cur_repo.repo_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
318 | 318 | |
|
319 | 319 | #if private flag is set, reset default permission to NONE |
|
320 | 320 | if kwargs.get('repo_private'): |
|
321 | 321 | EMPTY_PERM = 'repository.none' |
|
322 | 322 | RepoModel().grant_user_permission( |
|
323 | 323 | repo=cur_repo, user='default', perm=EMPTY_PERM |
|
324 | 324 | ) |
|
325 | 325 | #handle extra fields |
|
326 | 326 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
327 | 327 | kwargs): |
|
328 | 328 | k = RepositoryField.un_prefix_key(field) |
|
329 | 329 | ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo) |
|
330 | 330 | if ex_field: |
|
331 | 331 | ex_field.field_value = kwargs[field] |
|
332 | 332 | |
|
333 | 333 | if org_repo_name != cur_repo.repo_name: |
|
334 | 334 | # rename repository |
|
335 | 335 | self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name) |
|
336 | 336 | |
|
337 | 337 | return cur_repo |
|
338 | 338 | except Exception: |
|
339 | 339 | log.error(traceback.format_exc()) |
|
340 | 340 | raise |
|
341 | 341 | |
|
342 | 342 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
343 | 343 | private=False, clone_uri=None, repo_group=None, |
|
344 | 344 | landing_rev='rev:tip', fork_of=None, |
|
345 | 345 | copy_fork_permissions=False, enable_statistics=False, |
|
346 | 346 | enable_locking=False, enable_downloads=False, |
|
347 | 347 | copy_group_permissions=False, state=Repository.STATE_PENDING): |
|
348 | 348 | """ |
|
349 | 349 | Create repository inside database with PENDING state. This should only be |
|
350 | 350 | executed by create() repo, with exception of importing existing repos. |
|
351 | 351 | |
|
352 | 352 | """ |
|
353 | 353 | from kallithea.model.scm import ScmModel |
|
354 | 354 | |
|
355 | 355 | owner = User.guess_instance(owner) |
|
356 | 356 | fork_of = Repository.guess_instance(fork_of) |
|
357 | 357 | repo_group = RepoGroup.guess_instance(repo_group) |
|
358 | 358 | try: |
|
359 | 359 | repo_name = safe_unicode(repo_name) |
|
360 | 360 | description = safe_unicode(description) |
|
361 | 361 | # repo name is just a name of repository |
|
362 | 362 | # while repo_name_full is a full qualified name that is combined |
|
363 | 363 | # with name and path of group |
|
364 | 364 | repo_name_full = repo_name |
|
365 | 365 | repo_name = repo_name.split(self.URL_SEPARATOR)[-1] |
|
366 | 366 | |
|
367 | 367 | new_repo = Repository() |
|
368 | 368 | new_repo.repo_state = state |
|
369 | 369 | new_repo.enable_statistics = False |
|
370 | 370 | new_repo.repo_name = repo_name_full |
|
371 | 371 | new_repo.repo_type = repo_type |
|
372 | 372 | new_repo.owner = owner |
|
373 | 373 | new_repo.group = repo_group |
|
374 | 374 | new_repo.description = description or repo_name |
|
375 | 375 | new_repo.private = private |
|
376 | 376 | new_repo.clone_uri = clone_uri |
|
377 | 377 | new_repo.landing_rev = landing_rev |
|
378 | 378 | |
|
379 | 379 | new_repo.enable_statistics = enable_statistics |
|
380 | 380 | new_repo.enable_locking = enable_locking |
|
381 | 381 | new_repo.enable_downloads = enable_downloads |
|
382 | 382 | |
|
383 | 383 | if repo_group: |
|
384 | 384 | new_repo.enable_locking = repo_group.enable_locking |
|
385 | 385 | |
|
386 | 386 | if fork_of: |
|
387 | 387 | parent_repo = fork_of |
|
388 | 388 | new_repo.fork = parent_repo |
|
389 | 389 | |
|
390 | 390 | Session().add(new_repo) |
|
391 | 391 | |
|
392 | 392 | if fork_of and copy_fork_permissions: |
|
393 | 393 | repo = fork_of |
|
394 | 394 | user_perms = UserRepoToPerm.query() \ |
|
395 | 395 | .filter(UserRepoToPerm.repository == repo).all() |
|
396 | 396 | group_perms = UserGroupRepoToPerm.query() \ |
|
397 | 397 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
398 | 398 | |
|
399 | 399 | for perm in user_perms: |
|
400 | 400 | UserRepoToPerm.create(perm.user, new_repo, perm.permission) |
|
401 | 401 | |
|
402 | 402 | for perm in group_perms: |
|
403 | 403 | UserGroupRepoToPerm.create(perm.users_group, new_repo, |
|
404 | 404 | perm.permission) |
|
405 | 405 | |
|
406 | 406 | elif repo_group and copy_group_permissions: |
|
407 | 407 | |
|
408 | 408 | user_perms = UserRepoGroupToPerm.query() \ |
|
409 | 409 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
410 | 410 | |
|
411 | 411 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
412 | 412 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
413 | 413 | |
|
414 | 414 | for perm in user_perms: |
|
415 | 415 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
416 | 416 | perm_obj = Permission.get_by_key(perm_name) |
|
417 | 417 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
418 | 418 | |
|
419 | 419 | for perm in group_perms: |
|
420 | 420 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
421 | 421 | perm_obj = Permission.get_by_key(perm_name) |
|
422 | 422 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
423 | 423 | |
|
424 | 424 | else: |
|
425 | 425 | self._create_default_perms(new_repo, private) |
|
426 | 426 | |
|
427 | 427 | # now automatically start following this repository as owner |
|
428 | 428 | ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
429 | 429 | # we need to flush here, in order to check if database won't |
|
430 | 430 | # throw any exceptions, create filesystem dirs at the very end |
|
431 | 431 | Session().flush() |
|
432 | 432 | return new_repo |
|
433 | 433 | except Exception: |
|
434 | 434 | log.error(traceback.format_exc()) |
|
435 | 435 | raise |
|
436 | 436 | |
|
437 | 437 | def create(self, form_data, cur_user): |
|
438 | 438 | """ |
|
439 | 439 | Create repository using celery tasks |
|
440 | 440 | |
|
441 | 441 | :param form_data: |
|
442 | 442 | :param cur_user: |
|
443 | 443 | """ |
|
444 | 444 | from kallithea.lib.celerylib import tasks |
|
445 | 445 | return tasks.create_repo(form_data, cur_user) |
|
446 | 446 | |
|
447 | 447 | def _update_permissions(self, repo, perms_new=None, perms_updates=None, |
|
448 | 448 | check_perms=True): |
|
449 | 449 | if not perms_new: |
|
450 | 450 | perms_new = [] |
|
451 | 451 | if not perms_updates: |
|
452 | 452 | perms_updates = [] |
|
453 | 453 | |
|
454 | 454 | # update permissions |
|
455 | 455 | for member, perm, member_type in perms_updates: |
|
456 | 456 | if member_type == 'user': |
|
457 | 457 | # this updates existing one |
|
458 | 458 | self.grant_user_permission( |
|
459 | 459 | repo=repo, user=member, perm=perm |
|
460 | 460 | ) |
|
461 | 461 | else: |
|
462 | 462 | #check if we have permissions to alter this usergroup's access |
|
463 | 463 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
464 | 464 | self.grant_user_group_permission( |
|
465 | 465 | repo=repo, group_name=member, perm=perm |
|
466 | 466 | ) |
|
467 | 467 | # set new permissions |
|
468 | 468 | for member, perm, member_type in perms_new: |
|
469 | 469 | if member_type == 'user': |
|
470 | 470 | self.grant_user_permission( |
|
471 | 471 | repo=repo, user=member, perm=perm |
|
472 | 472 | ) |
|
473 | 473 | else: |
|
474 | 474 | #check if we have permissions to alter this usergroup's access |
|
475 | 475 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
476 | 476 | self.grant_user_group_permission( |
|
477 | 477 | repo=repo, group_name=member, perm=perm |
|
478 | 478 | ) |
|
479 | 479 | |
|
480 | 480 | def create_fork(self, form_data, cur_user): |
|
481 | 481 | """ |
|
482 | 482 | Simple wrapper into executing celery task for fork creation |
|
483 | 483 | |
|
484 | 484 | :param form_data: |
|
485 | 485 | :param cur_user: |
|
486 | 486 | """ |
|
487 | 487 | from kallithea.lib.celerylib import tasks |
|
488 | 488 | return tasks.create_repo_fork(form_data, cur_user) |
|
489 | 489 | |
|
490 | 490 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
491 | 491 | """ |
|
492 | 492 | Delete given repository, forks parameter defines what do do with |
|
493 | 493 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
494 | 494 | forks |
|
495 | 495 | |
|
496 | 496 | :param repo: |
|
497 | 497 | :param forks: str 'delete' or 'detach' |
|
498 | 498 | :param fs_remove: remove(archive) repo from filesystem |
|
499 | 499 | """ |
|
500 | 500 | if not cur_user: |
|
501 | 501 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
502 | 502 | repo = Repository.guess_instance(repo) |
|
503 | 503 | if repo is not None: |
|
504 | 504 | if forks == 'detach': |
|
505 | 505 | for r in repo.forks: |
|
506 | 506 | r.fork = None |
|
507 | 507 | elif forks == 'delete': |
|
508 | 508 | for r in repo.forks: |
|
509 | 509 | self.delete(r, forks='delete') |
|
510 | 510 | elif [f for f in repo.forks]: |
|
511 | 511 | raise AttachedForksError() |
|
512 | 512 | |
|
513 | 513 | old_repo_dict = repo.get_dict() |
|
514 | 514 | try: |
|
515 | 515 | Session().delete(repo) |
|
516 | 516 | if fs_remove: |
|
517 | 517 | self._delete_filesystem_repo(repo) |
|
518 | 518 | else: |
|
519 | 519 | log.debug('skipping removal from filesystem') |
|
520 | 520 | log_delete_repository(old_repo_dict, |
|
521 | 521 | deleted_by=cur_user) |
|
522 | 522 | except Exception: |
|
523 | 523 | log.error(traceback.format_exc()) |
|
524 | 524 | raise |
|
525 | 525 | |
|
526 | 526 | def grant_user_permission(self, repo, user, perm): |
|
527 | 527 | """ |
|
528 | 528 | Grant permission for user on given repository, or update existing one |
|
529 | 529 | if found |
|
530 | 530 | |
|
531 | 531 | :param repo: Instance of Repository, repository_id, or repository name |
|
532 | 532 | :param user: Instance of User, user_id or username |
|
533 | 533 | :param perm: Instance of Permission, or permission_name |
|
534 | 534 | """ |
|
535 | 535 | user = User.guess_instance(user) |
|
536 | 536 | repo = Repository.guess_instance(repo) |
|
537 | 537 | permission = Permission.guess_instance(perm) |
|
538 | 538 | |
|
539 | 539 | # check if we have that permission already |
|
540 | 540 | obj = UserRepoToPerm.query() \ |
|
541 | 541 | .filter(UserRepoToPerm.user == user) \ |
|
542 | 542 | .filter(UserRepoToPerm.repository == repo) \ |
|
543 | 543 | .scalar() |
|
544 | 544 | if obj is None: |
|
545 | 545 | # create new ! |
|
546 | 546 | obj = UserRepoToPerm() |
|
547 | 547 | Session().add(obj) |
|
548 | 548 | obj.repository = repo |
|
549 | 549 | obj.user = user |
|
550 | 550 | obj.permission = permission |
|
551 | 551 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
552 | 552 | return obj |
|
553 | 553 | |
|
554 | 554 | def revoke_user_permission(self, repo, user): |
|
555 | 555 | """ |
|
556 | 556 | Revoke permission for user on given repository |
|
557 | 557 | |
|
558 | 558 | :param repo: Instance of Repository, repository_id, or repository name |
|
559 | 559 | :param user: Instance of User, user_id or username |
|
560 | 560 | """ |
|
561 | 561 | |
|
562 | 562 | user = User.guess_instance(user) |
|
563 | 563 | repo = Repository.guess_instance(repo) |
|
564 | 564 | |
|
565 | 565 | obj = UserRepoToPerm.query() \ |
|
566 | 566 | .filter(UserRepoToPerm.repository == repo) \ |
|
567 | 567 | .filter(UserRepoToPerm.user == user) \ |
|
568 | 568 | .scalar() |
|
569 | 569 | if obj is not None: |
|
570 | 570 | Session().delete(obj) |
|
571 | 571 | log.debug('Revoked perm on %s on %s', repo, user) |
|
572 | 572 | |
|
573 | 573 | def grant_user_group_permission(self, repo, group_name, perm): |
|
574 | 574 | """ |
|
575 | 575 | Grant permission for user group on given repository, or update |
|
576 | 576 | existing one if found |
|
577 | 577 | |
|
578 | 578 | :param repo: Instance of Repository, repository_id, or repository name |
|
579 | 579 | :param group_name: Instance of UserGroup, users_group_id, |
|
580 | 580 | or user group name |
|
581 | 581 | :param perm: Instance of Permission, or permission_name |
|
582 | 582 | """ |
|
583 | 583 | repo = Repository.guess_instance(repo) |
|
584 | 584 | group_name = UserGroup.guess_instance(group_name) |
|
585 | 585 | permission = Permission.guess_instance(perm) |
|
586 | 586 | |
|
587 | 587 | # check if we have that permission already |
|
588 | 588 | obj = UserGroupRepoToPerm.query() \ |
|
589 | 589 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
590 | 590 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
591 | 591 | .scalar() |
|
592 | 592 | |
|
593 | 593 | if obj is None: |
|
594 | 594 | # create new |
|
595 | 595 | obj = UserGroupRepoToPerm() |
|
596 | 596 | Session().add(obj) |
|
597 | 597 | |
|
598 | 598 | obj.repository = repo |
|
599 | 599 | obj.users_group = group_name |
|
600 | 600 | obj.permission = permission |
|
601 | 601 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
602 | 602 | return obj |
|
603 | 603 | |
|
604 | 604 | def revoke_user_group_permission(self, repo, group_name): |
|
605 | 605 | """ |
|
606 | 606 | Revoke permission for user group on given repository |
|
607 | 607 | |
|
608 | 608 | :param repo: Instance of Repository, repository_id, or repository name |
|
609 | 609 | :param group_name: Instance of UserGroup, users_group_id, |
|
610 | 610 | or user group name |
|
611 | 611 | """ |
|
612 | 612 | repo = Repository.guess_instance(repo) |
|
613 | 613 | group_name = UserGroup.guess_instance(group_name) |
|
614 | 614 | |
|
615 | 615 | obj = UserGroupRepoToPerm.query() \ |
|
616 | 616 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
617 | 617 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
618 | 618 | .scalar() |
|
619 | 619 | if obj is not None: |
|
620 | 620 | Session().delete(obj) |
|
621 | 621 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
622 | 622 | |
|
623 | 623 | def delete_stats(self, repo_name): |
|
624 | 624 | """ |
|
625 | 625 | removes stats for given repo |
|
626 | 626 | |
|
627 | 627 | :param repo_name: |
|
628 | 628 | """ |
|
629 | 629 | repo = Repository.guess_instance(repo_name) |
|
630 | 630 | try: |
|
631 | 631 | obj = Statistics.query() \ |
|
632 | 632 | .filter(Statistics.repository == repo).scalar() |
|
633 | 633 | if obj is not None: |
|
634 | 634 | Session().delete(obj) |
|
635 | 635 | except Exception: |
|
636 | 636 | log.error(traceback.format_exc()) |
|
637 | 637 | raise |
|
638 | 638 | |
|
639 | 639 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
640 | 640 | clone_uri=None, repo_store_location=None): |
|
641 | 641 | """ |
|
642 | 642 | Makes repository on filesystem. Operation is group aware, meaning that it will create |
|
643 | 643 | a repository within a group, and alter the paths accordingly to the group location. |
|
644 | 644 | |
|
645 | 645 | :param repo_name: |
|
646 | 646 | :param alias: |
|
647 | 647 | :param parent: |
|
648 | 648 | :param clone_uri: |
|
649 | 649 | :param repo_store_location: |
|
650 | 650 | """ |
|
651 | 651 | from kallithea.lib.utils import is_valid_repo, is_valid_repo_group |
|
652 | 652 | from kallithea.model.scm import ScmModel |
|
653 | 653 | |
|
654 | 654 | if '/' in repo_name: |
|
655 | 655 | raise ValueError('repo_name must not contain groups got `%s`' % repo_name) |
|
656 | 656 | |
|
657 | 657 | if isinstance(repo_group, RepoGroup): |
|
658 | 658 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
659 | 659 | else: |
|
660 | 660 | new_parent_path = repo_group or '' |
|
661 | 661 | |
|
662 | 662 | if repo_store_location: |
|
663 | 663 | _paths = [repo_store_location] |
|
664 | 664 | else: |
|
665 | 665 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
666 | 666 | # we need to make it str for mercurial |
|
667 | 667 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
668 | 668 | |
|
669 | 669 | # check if this path is not a repository |
|
670 | 670 | if is_valid_repo(repo_path, self.repos_path): |
|
671 | 671 | raise Exception('This path %s is a valid repository' % repo_path) |
|
672 | 672 | |
|
673 | 673 | # check if this path is a group |
|
674 | 674 | if is_valid_repo_group(repo_path, self.repos_path): |
|
675 | 675 | raise Exception('This path %s is a valid group' % repo_path) |
|
676 | 676 | |
|
677 | 677 | log.info('creating repo %s in %s from url: `%s`', |
|
678 | 678 | repo_name, safe_unicode(repo_path), |
|
679 | 679 | obfuscate_url_pw(clone_uri)) |
|
680 | 680 | |
|
681 | 681 | backend = get_backend(repo_type) |
|
682 | 682 | |
|
683 | 683 | if repo_type == 'hg': |
|
684 | 684 | baseui = make_ui('db', clear_session=False) |
|
685 | 685 | # patch and reset hooks section of UI config to not run any |
|
686 | 686 | # hooks on creating remote repo |
|
687 | 687 | for k, v in baseui.configitems('hooks'): |
|
688 | 688 | baseui.setconfig('hooks', k, None) |
|
689 | 689 | |
|
690 | 690 | repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui) |
|
691 | 691 | elif repo_type == 'git': |
|
692 | 692 | repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) |
|
693 | 693 | # add kallithea hook into this repo |
|
694 | 694 | ScmModel().install_git_hooks(repo=repo) |
|
695 | 695 | else: |
|
696 | 696 | raise Exception('Not supported repo_type %s expected hg/git' % repo_type) |
|
697 | 697 | |
|
698 | 698 | log.debug('Created repo %s with %s backend', |
|
699 | 699 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
700 | 700 | return repo |
|
701 | 701 | |
|
702 | 702 | def _rename_filesystem_repo(self, old, new): |
|
703 | 703 | """ |
|
704 | 704 | renames repository on filesystem |
|
705 | 705 | |
|
706 | 706 | :param old: old name |
|
707 | 707 | :param new: new name |
|
708 | 708 | """ |
|
709 | 709 | log.info('renaming repo from %s to %s', old, new) |
|
710 | 710 | |
|
711 | 711 | old_path = safe_str(os.path.join(self.repos_path, old)) |
|
712 | 712 | new_path = safe_str(os.path.join(self.repos_path, new)) |
|
713 | 713 | if os.path.isdir(new_path): |
|
714 | 714 | raise Exception( |
|
715 | 715 | 'Was trying to rename to already existing dir %s' % new_path |
|
716 | 716 | ) |
|
717 | 717 | shutil.move(old_path, new_path) |
|
718 | 718 | |
|
719 | 719 | def _delete_filesystem_repo(self, repo): |
|
720 | 720 | """ |
|
721 | 721 | removes repo from filesystem, the removal is actually done by |
|
722 | 722 | renaming dir to a 'rm__*' prefix which Kallithea will skip. |
|
723 | 723 | It can be undeleted later by reverting the rename. |
|
724 | 724 | |
|
725 | 725 | :param repo: repo object |
|
726 | 726 | """ |
|
727 | 727 | rm_path = safe_str(os.path.join(self.repos_path, repo.repo_name)) |
|
728 | 728 | log.info("Removing %s", rm_path) |
|
729 | 729 | |
|
730 | 730 | _now = datetime.now() |
|
731 | 731 | _ms = str(_now.microsecond).rjust(6, '0') |
|
732 | 732 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
733 | 733 | repo.just_name) |
|
734 | 734 | if repo.group: |
|
735 | 735 | args = repo.group.full_path_splitted + [_d] |
|
736 | 736 | _d = os.path.join(*args) |
|
737 | 737 | if os.path.exists(rm_path): |
|
738 | 738 | shutil.move(rm_path, safe_str(os.path.join(self.repos_path, _d))) |
|
739 | 739 | else: |
|
740 | 740 | log.error("Can't find repo to delete in %r", rm_path) |
@@ -1,215 +1,206 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | import datetime |
|
16 | 16 | import logging |
|
17 | 17 | import os |
|
18 | 18 | import pytest |
|
19 | 19 | import re |
|
20 | 20 | import tempfile |
|
21 | 21 | import time |
|
22 | 22 | |
|
23 | 23 | from tg import config |
|
24 | import pylons | |
|
25 | from pylons import url | |
|
26 | from pylons.i18n.translation import _get_translator | |
|
27 | from pylons.util import ContextObj | |
|
28 | from routes.util import URLGenerator | |
|
29 | 24 | from webtest import TestApp |
|
30 | 25 | |
|
31 | from kallithea import is_windows | |
|
26 | from kallithea import is_windows, model | |
|
32 | 27 | from kallithea.model.db import Notification, User, UserNotification |
|
33 | 28 | from kallithea.model.meta import Session |
|
34 | 29 | from kallithea.lib.utils2 import safe_str |
|
35 | 30 | |
|
36 | 31 | os.environ['TZ'] = 'UTC' |
|
37 | 32 | if not is_windows: |
|
38 | 33 | time.tzset() |
|
39 | 34 | |
|
40 | 35 | log = logging.getLogger(__name__) |
|
41 | 36 | |
|
42 | 37 | skipif = pytest.mark.skipif |
|
43 | 38 | parametrize = pytest.mark.parametrize |
|
44 | 39 | |
|
40 | # Hack: These module global values MUST be set to actual values before running any tests. This is currently done by conftest.py. | |
|
41 | url = None | |
|
42 | testapp = None | |
|
43 | ||
|
45 | 44 | __all__ = [ |
|
46 | 45 | 'skipif', 'parametrize', 'environ', 'url', 'TestController', |
|
47 | 46 | 'ldap_lib_installed', 'pam_lib_installed', 'invalidate_all_caches', |
|
48 | 47 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO', |
|
49 | 48 | 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS', |
|
50 | 49 | 'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS', |
|
51 | 50 | 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN', |
|
52 | 51 | 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO', |
|
53 | 52 | 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO', |
|
54 | 53 | 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', |
|
55 | 54 | 'GIT_REMOTE_REPO', 'SCM_TESTS', |
|
56 | 55 | ] |
|
57 | 56 | |
|
58 | 57 | # Invoke websetup with the current config file |
|
59 | 58 | # SetupCommand('setup-app').run([config_file]) |
|
60 | 59 | |
|
61 | 60 | environ = {} |
|
62 | 61 | |
|
63 | 62 | #SOME GLOBALS FOR TESTS |
|
64 | 63 | |
|
65 | 64 | TESTS_TMP_PATH = os.path.join(tempfile.gettempdir(), 'rc_test_%s' % tempfile._RandomNameSequence().next()) |
|
66 | 65 | TEST_USER_ADMIN_LOGIN = 'test_admin' |
|
67 | 66 | TEST_USER_ADMIN_PASS = 'test12' |
|
68 | 67 | TEST_USER_ADMIN_EMAIL = 'test_admin@example.com' |
|
69 | 68 | |
|
70 | 69 | TEST_USER_REGULAR_LOGIN = 'test_regular' |
|
71 | 70 | TEST_USER_REGULAR_PASS = 'test12' |
|
72 | 71 | TEST_USER_REGULAR_EMAIL = 'test_regular@example.com' |
|
73 | 72 | |
|
74 | 73 | TEST_USER_REGULAR2_LOGIN = 'test_regular2' |
|
75 | 74 | TEST_USER_REGULAR2_PASS = 'test12' |
|
76 | 75 | TEST_USER_REGULAR2_EMAIL = 'test_regular2@example.com' |
|
77 | 76 | |
|
78 | 77 | HG_REPO = u'vcs_test_hg' |
|
79 | 78 | GIT_REPO = u'vcs_test_git' |
|
80 | 79 | |
|
81 | 80 | NEW_HG_REPO = u'vcs_test_hg_new' |
|
82 | 81 | NEW_GIT_REPO = u'vcs_test_git_new' |
|
83 | 82 | |
|
84 | 83 | HG_FORK = u'vcs_test_hg_fork' |
|
85 | 84 | GIT_FORK = u'vcs_test_git_fork' |
|
86 | 85 | |
|
87 | 86 | ## VCS |
|
88 | 87 | SCM_TESTS = ['hg', 'git'] |
|
89 | 88 | uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple()))) |
|
90 | 89 | |
|
91 | 90 | GIT_REMOTE_REPO = 'git://github.com/codeinn/vcs.git' |
|
92 | 91 | |
|
93 | 92 | TEST_GIT_REPO = os.path.join(TESTS_TMP_PATH, GIT_REPO) |
|
94 | 93 | TEST_GIT_REPO_CLONE = os.path.join(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix) |
|
95 | 94 | TEST_GIT_REPO_PULL = os.path.join(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix) |
|
96 | 95 | |
|
97 | 96 | |
|
98 | 97 | HG_REMOTE_REPO = 'http://bitbucket.org/marcinkuzminski/vcs' |
|
99 | 98 | |
|
100 | 99 | TEST_HG_REPO = os.path.join(TESTS_TMP_PATH, HG_REPO) |
|
101 | 100 | TEST_HG_REPO_CLONE = os.path.join(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix) |
|
102 | 101 | TEST_HG_REPO_PULL = os.path.join(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix) |
|
103 | 102 | |
|
104 | 103 | TEST_DIR = tempfile.gettempdir() |
|
105 | 104 | TEST_REPO_PREFIX = 'vcs-test' |
|
106 | 105 | |
|
107 | 106 | # cached repos if any ! |
|
108 | 107 | # comment out to get some other repos from bb or github |
|
109 | 108 | GIT_REMOTE_REPO = os.path.join(TESTS_TMP_PATH, GIT_REPO) |
|
110 | 109 | HG_REMOTE_REPO = os.path.join(TESTS_TMP_PATH, HG_REPO) |
|
111 | 110 | |
|
112 | 111 | #skip ldap tests if LDAP lib is not installed |
|
113 | 112 | ldap_lib_installed = False |
|
114 | 113 | try: |
|
115 | 114 | import ldap |
|
116 | 115 | ldap.API_VERSION |
|
117 | 116 | ldap_lib_installed = True |
|
118 | 117 | except ImportError: |
|
119 | 118 | # means that python-ldap is not installed |
|
120 | 119 | pass |
|
121 | 120 | |
|
122 | 121 | try: |
|
123 | 122 | import pam |
|
124 | 123 | pam.PAM_TEXT_INFO |
|
125 | 124 | pam_lib_installed = True |
|
126 | 125 | except ImportError: |
|
127 | 126 | pam_lib_installed = False |
|
128 | 127 | |
|
129 | 128 | def invalidate_all_caches(): |
|
130 | 129 | """Invalidate all beaker caches currently configured. |
|
131 | 130 | Useful when manipulating IP permissions in a test and changes need to take |
|
132 | 131 | effect immediately. |
|
133 | 132 | Note: Any use of this function is probably a workaround - it should be |
|
134 | 133 | replaced with a more specific cache invalidation in code or test.""" |
|
135 | 134 | from beaker.cache import cache_managers |
|
136 | 135 | for cache in cache_managers.values(): |
|
137 | 136 | cache.clear() |
|
138 | 137 | |
|
139 | 138 | class NullHandler(logging.Handler): |
|
140 | 139 | def emit(self, record): |
|
141 | 140 | pass |
|
142 | 141 | |
|
143 | 142 | class TestController(object): |
|
144 | 143 | """Pytest-style test controller""" |
|
145 | 144 | |
|
146 | 145 | # Note: pytest base classes cannot have an __init__ method |
|
147 | 146 | |
|
148 | 147 | @pytest.fixture(autouse=True) |
|
149 | 148 | def app_fixture(self): |
|
150 | config = pylons.test.pylonsapp.config | |
|
151 | url._push_object(URLGenerator(config['routes.map'], environ)) | |
|
152 | pylons.app_globals._push_object(config['pylons.app_globals']) | |
|
153 | pylons.config._push_object(config) | |
|
154 | pylons.tmpl_context._push_object(ContextObj()) | |
|
155 | # Initialize a translator for tests that utilize i18n | |
|
156 | translator = _get_translator(pylons.config.get('lang')) | |
|
157 | pylons.translator._push_object(translator) | |
|
158 | 149 | h = NullHandler() |
|
159 | 150 | logging.getLogger("kallithea").addHandler(h) |
|
160 |
self.app = TestApp( |
|
|
151 | self.app = TestApp(testapp) | |
|
161 | 152 | return self.app |
|
162 | 153 | |
|
163 | 154 | def remove_all_notifications(self): |
|
164 | 155 | # query().delete() does not (by default) trigger cascades |
|
165 | 156 | # ( http://docs.sqlalchemy.org/en/rel_0_7/orm/collections.html#passive-deletes ) |
|
166 | 157 | # so delete the UserNotification first to ensure referential integrity. |
|
167 | 158 | UserNotification.query().delete() |
|
168 | 159 | |
|
169 | 160 | Notification.query().delete() |
|
170 | 161 | Session().commit() |
|
171 | 162 | |
|
172 | 163 | def log_user(self, username=TEST_USER_ADMIN_LOGIN, |
|
173 | 164 | password=TEST_USER_ADMIN_PASS): |
|
174 | 165 | self._logged_username = username |
|
175 | 166 | response = self.app.post(url(controller='login', action='index'), |
|
176 | 167 | {'username': username, |
|
177 | 168 | 'password': password}) |
|
178 | 169 | |
|
179 | 170 | if 'Invalid username or password' in response.body: |
|
180 | 171 | pytest.fail('could not login using %s %s' % (username, password)) |
|
181 | 172 | |
|
182 | 173 | assert response.status == '302 Found' |
|
183 | 174 | self.assert_authenticated_user(response, username) |
|
184 | 175 | |
|
185 | 176 | response = response.follow() |
|
186 | 177 | return response.session['authuser'] |
|
187 | 178 | |
|
188 | 179 | def _get_logged_user(self): |
|
189 | 180 | return User.get_by_username(self._logged_username) |
|
190 | 181 | |
|
191 | 182 | def assert_authenticated_user(self, response, expected_username): |
|
192 | 183 | cookie = response.session.get('authuser') |
|
193 | 184 | user = cookie and cookie.get('user_id') |
|
194 | 185 | user = user and User.get(user) |
|
195 | 186 | user = user and user.username |
|
196 | 187 | assert user == expected_username |
|
197 | 188 | |
|
198 | 189 | def authentication_token(self): |
|
199 | 190 | return self.app.get(url('authentication_token')).body |
|
200 | 191 | |
|
201 | 192 | def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m): |
|
202 | 193 | if 'flash' not in response.session: |
|
203 | 194 | pytest.fail(safe_str(u'msg `%s` not found - session has no flash:\n%s' % (msg, response))) |
|
204 | 195 | try: |
|
205 | 196 | level, m = response.session['flash'][-1 - skip] |
|
206 | 197 | if _matcher(msg, m): |
|
207 | 198 | return |
|
208 | 199 | except IndexError: |
|
209 | 200 | pass |
|
210 | 201 | pytest.fail(safe_str(u'msg `%s` not found in session flash (skipping %s): %s' % |
|
211 | 202 | (msg, skip, |
|
212 | 203 | ', '.join('`%s`' % m for level, m in response.session['flash'])))) |
|
213 | 204 | |
|
214 | 205 | def checkSessionFlashRegex(self, response, regex, skip=0): |
|
215 | 206 | self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search) |
@@ -1,124 +1,122 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import sys |
|
3 | 3 | import logging |
|
4 | import pkg_resources | |
|
4 | 5 | |
|
5 | import pkg_resources | |
|
6 | 6 | from paste.deploy import loadapp |
|
7 | import pylons.test | |
|
8 | from pylons.i18n.translation import _get_translator | |
|
7 | from routes.util import URLGenerator | |
|
8 | from tg import config | |
|
9 | ||
|
9 | 10 | import pytest |
|
10 | 11 | from kallithea.model.user import UserModel |
|
11 | 12 | from kallithea.model.meta import Session |
|
12 | 13 | from kallithea.model.db import Setting, User, UserIpMap |
|
13 | 14 | from kallithea.tests.base import invalidate_all_caches, TEST_USER_REGULAR_LOGIN |
|
15 | import kallithea.tests.base # FIXME: needed for setting testapp instance!!! | |
|
14 | 16 | |
|
15 |
from |
|
|
17 | from tg.util.webtest import test_context | |
|
16 | 18 | |
|
17 | 19 | def pytest_configure(): |
|
18 | 20 | path = os.getcwd() |
|
19 | 21 | sys.path.insert(0, path) |
|
20 | 22 | pkg_resources.working_set.add_entry(path) |
|
21 | 23 | |
|
22 | 24 | # Disable INFO logging of test database creation, restore with NOTSET |
|
23 | 25 | logging.disable(logging.INFO) |
|
24 |
|
|
|
26 | kallithea.tests.base.testapp = loadapp('config:kallithea/tests/test.ini', relative_to=path) | |
|
25 | 27 | logging.disable(logging.NOTSET) |
|
26 | 28 | |
|
27 | # Initialize a translator for tests that utilize i18n | |
|
28 | translator = _get_translator(pylons.config.get('lang')) | |
|
29 | pylons.translator._push_object(translator) | |
|
30 | ||
|
31 | return pylons.test.pylonsapp | |
|
29 | kallithea.tests.base.url = URLGenerator(config['routes.map'], kallithea.tests.base.environ) | |
|
32 | 30 | |
|
33 | 31 | |
|
34 | 32 | @pytest.fixture |
|
35 | 33 | def create_test_user(): |
|
36 | 34 | """Provide users that automatically disappear after test is over.""" |
|
37 | 35 | test_user_ids = [] |
|
38 | 36 | def _create_test_user(user_form): |
|
39 | 37 | user = UserModel().create(user_form) |
|
40 | 38 | test_user_ids.append(user.user_id) |
|
41 | 39 | return user |
|
42 | 40 | yield _create_test_user |
|
43 | 41 | for user_id in test_user_ids: |
|
44 | 42 | UserModel().delete(user_id) |
|
45 | 43 | Session().commit() |
|
46 | 44 | |
|
47 | 45 | |
|
48 | 46 | def _set_settings(*kvtseq): |
|
49 | 47 | session = Session() |
|
50 | 48 | for kvt in kvtseq: |
|
51 | 49 | assert len(kvt) in (2, 3) |
|
52 | 50 | k = kvt[0] |
|
53 | 51 | v = kvt[1] |
|
54 | 52 | t = kvt[2] if len(kvt) == 3 else 'unicode' |
|
55 | 53 | Setting.create_or_update(k, v, t) |
|
56 | 54 | session.commit() |
|
57 | 55 | |
|
58 | 56 | |
|
59 | 57 | @pytest.fixture |
|
60 | 58 | def set_test_settings(): |
|
61 | 59 | """Restore settings after test is over.""" |
|
62 | 60 | # Save settings. |
|
63 | 61 | settings_snapshot = [ |
|
64 | 62 | (s.app_settings_name, s.app_settings_value, s.app_settings_type) |
|
65 | 63 | for s in Setting.query().all()] |
|
66 | 64 | yield _set_settings |
|
67 | 65 | # Restore settings. |
|
68 | 66 | session = Session() |
|
69 | 67 | keys = frozenset(k for (k, v, t) in settings_snapshot) |
|
70 | 68 | for s in Setting.query().all(): |
|
71 | 69 | if s.app_settings_name not in keys: |
|
72 | 70 | session.delete(s) |
|
73 | 71 | for k, v, t in settings_snapshot: |
|
74 | 72 | if t == 'list' and hasattr(v, '__iter__'): |
|
75 | 73 | v = ','.join(v) # Quirk: must format list value manually. |
|
76 | 74 | Setting.create_or_update(k, v, t) |
|
77 | 75 | session.commit() |
|
78 | 76 | |
|
79 | 77 | @pytest.fixture |
|
80 | 78 | def auto_clear_ip_permissions(): |
|
81 | 79 | """Fixture that provides nothing but clearing IP permissions upon test |
|
82 | 80 | exit. This clearing is needed to avoid other test failing to make fake http |
|
83 | 81 | accesses.""" |
|
84 | 82 | yield |
|
85 | 83 | # cleanup |
|
86 | 84 | user_model = UserModel() |
|
87 | 85 | |
|
88 | 86 | user_ids = [] |
|
89 | 87 | user_ids.append(User.get_default_user().user_id) |
|
90 | 88 | user_ids.append(User.get_by_username(TEST_USER_REGULAR_LOGIN).user_id) |
|
91 | 89 | |
|
92 | 90 | for user_id in user_ids: |
|
93 | 91 | for ip in UserIpMap.query().filter(UserIpMap.user_id == user_id): |
|
94 | 92 | user_model.delete_extra_ip(user_id, ip.ip_id) |
|
95 | 93 | |
|
96 | 94 | # IP permissions are cached, need to invalidate this cache explicitly |
|
97 | 95 | invalidate_all_caches() |
|
98 | 96 | |
|
99 | 97 | @pytest.fixture |
|
100 | 98 | def test_context_fixture(app_fixture): |
|
101 | 99 | """ |
|
102 | 100 | Encompass the entire test using this fixture in a test_context, |
|
103 | 101 | making sure that certain functionality still works even if no call to |
|
104 | 102 | self.app.get/post has been made. |
|
105 | 103 | The typical error message indicating you need a test_context is: |
|
106 | 104 | TypeError: No object (name: context) has been registered for this thread |
|
107 | 105 | |
|
108 | 106 | The standard way to fix this is simply using the test_context context |
|
109 | 107 | manager directly inside your test: |
|
110 | 108 | with test_context(self.app): |
|
111 | 109 | <actions> |
|
112 | 110 | but if test setup code (xUnit-style or pytest fixtures) also needs to be |
|
113 | 111 | executed inside the test context, that method is not possible. |
|
114 | 112 | Even if there is no such setup code, the fixture may reduce code complexity |
|
115 | 113 | if the entire test needs to run inside a test context. |
|
116 | 114 | |
|
117 | 115 | To apply this fixture (like any other fixture) to all test methods of a |
|
118 | 116 | class, use the following class decorator: |
|
119 | 117 | @pytest.mark.usefixtures("test_context_fixture") |
|
120 | 118 | class TestFoo(TestController): |
|
121 | 119 | ... |
|
122 | 120 | """ |
|
123 | 121 | with test_context(app_fixture): |
|
124 | 122 | yield |
@@ -1,171 +1,171 b'' | |||
|
1 | 1 | from kallithea.tests.base import * |
|
2 | 2 | from kallithea.model.db import User |
|
3 | 3 | |
|
4 | 4 | from kallithea.model.user import UserModel |
|
5 | 5 | from kallithea.model.notification import NotificationModel |
|
6 | 6 | from kallithea.model.meta import Session |
|
7 | 7 | from kallithea.lib import helpers as h |
|
8 | 8 | |
|
9 |
from |
|
|
9 | from tg.util.webtest import test_context | |
|
10 | 10 | |
|
11 | 11 | class TestNotificationsController(TestController): |
|
12 | 12 | def setup_method(self, method): |
|
13 | 13 | self.remove_all_notifications() |
|
14 | 14 | |
|
15 | 15 | def test_index(self, create_test_user): |
|
16 | 16 | self.log_user() |
|
17 | 17 | |
|
18 | 18 | u1 = create_test_user(dict(username='u1', password='qweqwe', |
|
19 | 19 | email='u1@example.com', |
|
20 | 20 | firstname=u'u1', lastname=u'u1', |
|
21 | 21 | active=True)) |
|
22 | 22 | u1 = u1.user_id |
|
23 | 23 | Session().commit() |
|
24 | 24 | |
|
25 | 25 | response = self.app.get(url('notifications')) |
|
26 | 26 | response.mustcontain('<div>No notifications here yet</div>') |
|
27 | 27 | |
|
28 | 28 | with test_context(self.app): |
|
29 | 29 | cur_user = self._get_logged_user() |
|
30 | 30 | notif = NotificationModel().create(created_by=u1, subject=u'test_notification_1', |
|
31 | 31 | body=u'notification_1', recipients=[cur_user]) |
|
32 | 32 | Session().commit() |
|
33 | 33 | |
|
34 | 34 | response = self.app.get(url('notifications')) |
|
35 | 35 | response.mustcontain('id="notification_%s"' % notif.notification_id) |
|
36 | 36 | |
|
37 | 37 | def test_delete(self, create_test_user): |
|
38 | 38 | self.log_user() |
|
39 | 39 | cur_user = self._get_logged_user() |
|
40 | 40 | |
|
41 | 41 | with test_context(self.app): |
|
42 | 42 | u1 = create_test_user(dict(username='u1', password='qweqwe', |
|
43 | 43 | email='u1@example.com', |
|
44 | 44 | firstname=u'u1', lastname=u'u1', |
|
45 | 45 | active=True)) |
|
46 | 46 | u2 = create_test_user(dict(username='u2', password='qweqwe', |
|
47 | 47 | email='u2@example.com', |
|
48 | 48 | firstname=u'u2', lastname=u'u2', |
|
49 | 49 | active=True)) |
|
50 | 50 | |
|
51 | 51 | # make notifications |
|
52 | 52 | notification = NotificationModel().create(created_by=cur_user, |
|
53 | 53 | subject=u'test', |
|
54 | 54 | body=u'hi there', |
|
55 | 55 | recipients=[cur_user, u1, u2]) |
|
56 | 56 | Session().commit() |
|
57 | 57 | u1 = User.get(u1.user_id) |
|
58 | 58 | u2 = User.get(u2.user_id) |
|
59 | 59 | |
|
60 | 60 | # check DB |
|
61 | 61 | get_notif = lambda un: [x.notification for x in un] |
|
62 | 62 | assert get_notif(cur_user.notifications) == [notification] |
|
63 | 63 | assert get_notif(u1.notifications) == [notification] |
|
64 | 64 | assert get_notif(u2.notifications) == [notification] |
|
65 | 65 | cur_usr_id = cur_user.user_id |
|
66 | 66 | |
|
67 | 67 | response = self.app.post( |
|
68 | 68 | url('notification_delete', notification_id=notification.notification_id), |
|
69 | 69 | params={'_authentication_token': self.authentication_token()}) |
|
70 | 70 | assert response.body == 'ok' |
|
71 | 71 | |
|
72 | 72 | cur_user = User.get(cur_usr_id) |
|
73 | 73 | assert cur_user.notifications == [] |
|
74 | 74 | |
|
75 | 75 | def test_show(self, create_test_user): |
|
76 | 76 | self.log_user() |
|
77 | 77 | with test_context(self.app): |
|
78 | 78 | cur_user = self._get_logged_user() |
|
79 | 79 | u1 = create_test_user(dict(username='u1', password='qweqwe', |
|
80 | 80 | email='u1@example.com', |
|
81 | 81 | firstname=u'u1', lastname=u'u1', |
|
82 | 82 | active=True)) |
|
83 | 83 | u2 = create_test_user(dict(username='u2', password='qweqwe', |
|
84 | 84 | email='u2@example.com', |
|
85 | 85 | firstname=u'u2', lastname=u'u2', |
|
86 | 86 | active=True)) |
|
87 | 87 | Session().commit() |
|
88 | 88 | |
|
89 | 89 | subject = u'test' |
|
90 | 90 | notif_body = u'hi there' |
|
91 | 91 | notification = NotificationModel().create(created_by=cur_user, |
|
92 | 92 | subject=subject, |
|
93 | 93 | body=notif_body, |
|
94 | 94 | recipients=[cur_user, u1, u2]) |
|
95 | 95 | |
|
96 | 96 | response = self.app.get(url('notification', |
|
97 | 97 | notification_id=notification.notification_id)) |
|
98 | 98 | |
|
99 | 99 | response.mustcontain(subject) |
|
100 | 100 | response.mustcontain(notif_body) |
|
101 | 101 | |
|
102 | 102 | def test_description_with_age(self): |
|
103 | 103 | self.log_user() |
|
104 | 104 | with test_context(self.app): |
|
105 | 105 | cur_user = self._get_logged_user() |
|
106 | 106 | subject = u'test' |
|
107 | 107 | notify_body = u'hi there' |
|
108 | 108 | |
|
109 | 109 | notification = NotificationModel().create(created_by = cur_user, |
|
110 | 110 | subject = subject, |
|
111 | 111 | body = notify_body) |
|
112 | 112 | |
|
113 | 113 | description = NotificationModel().make_description(notification) |
|
114 | 114 | assert description == "{0} sent message {1}".format( |
|
115 | 115 | cur_user.username, |
|
116 | 116 | h.age(notification.created_on) |
|
117 | 117 | ) |
|
118 | 118 | |
|
119 | 119 | def test_description_with_datetime(self): |
|
120 | 120 | self.log_user() |
|
121 | 121 | with test_context(self.app): |
|
122 | 122 | cur_user = self._get_logged_user() |
|
123 | 123 | subject = u'test' |
|
124 | 124 | notify_body = u'hi there' |
|
125 | 125 | notification = NotificationModel().create(created_by = cur_user, |
|
126 | 126 | subject = subject, |
|
127 | 127 | body = notify_body) |
|
128 | 128 | |
|
129 | 129 | description = NotificationModel().make_description(notification, False) |
|
130 | 130 | assert description == "{0} sent message at {1}".format( |
|
131 | 131 | cur_user.username, |
|
132 | 132 | h.fmt_date(notification.created_on) |
|
133 | 133 | ) |
|
134 | 134 | |
|
135 | 135 | def test_mark_all_read(self, create_test_user): |
|
136 | 136 | self.log_user() |
|
137 | 137 | with test_context(self.app): |
|
138 | 138 | u0 = self._get_logged_user() |
|
139 | 139 | u1 = create_test_user(dict(username='u1', password='qweqwe', |
|
140 | 140 | email='u1@example.com', |
|
141 | 141 | firstname=u'u1', lastname=u'u1', |
|
142 | 142 | active=True)) |
|
143 | 143 | u2 = create_test_user(dict(username='u2', password='qweqwe', |
|
144 | 144 | email='u2@example.com', |
|
145 | 145 | firstname=u'u2', lastname=u'u2', |
|
146 | 146 | active=True)) |
|
147 | 147 | notif = NotificationModel().create(created_by=u1, |
|
148 | 148 | subject=u'subject', |
|
149 | 149 | body=u'body', |
|
150 | 150 | recipients=[u0, u2]) |
|
151 | 151 | u0_id, u1_id, u2_id = u0.user_id, u1.user_id, u2.user_id |
|
152 | 152 | |
|
153 | 153 | assert [n.read for n in u0.notifications] == [False] |
|
154 | 154 | assert u1.notifications == [] |
|
155 | 155 | assert [n.read for n in u2.notifications] == [False] |
|
156 | 156 | |
|
157 | 157 | # Mark all read for current user. |
|
158 | 158 | |
|
159 | 159 | response = self.app.get(url('notifications_mark_all_read'), # TODO: should be POST |
|
160 | 160 | extra_environ=dict(HTTP_X_PARTIAL_XHR='1')) |
|
161 | 161 | |
|
162 | 162 | assert response.status_int == 200 |
|
163 | 163 | response.mustcontain('id="notification_%s"' % notif.notification_id) |
|
164 | 164 | |
|
165 | 165 | u0 = User.get(u0_id) |
|
166 | 166 | u1 = User.get(u1_id) |
|
167 | 167 | u2 = User.get(u2_id) |
|
168 | 168 | |
|
169 | 169 | assert [n.read for n in u0.notifications] == [True] |
|
170 | 170 | assert u1.notifications == [] |
|
171 | 171 | assert [n.read for n in u2.notifications] == [False] |
@@ -1,81 +1,81 b'' | |||
|
1 | 1 | import time |
|
2 | 2 | |
|
3 | 3 | from kallithea.model.db import User, UserIpMap |
|
4 | 4 | from kallithea.model.user import UserModel |
|
5 | 5 | from kallithea.model.meta import Session |
|
6 | 6 | from kallithea.tests.base import * |
|
7 | 7 | |
|
8 |
from |
|
|
8 | from tg.util.webtest import test_context | |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | class TestAdminPermissionsController(TestController): |
|
12 | 12 | |
|
13 | 13 | def test_index(self): |
|
14 | 14 | self.log_user() |
|
15 | 15 | response = self.app.get(url('admin_permissions')) |
|
16 | 16 | # Test response... |
|
17 | 17 | |
|
18 | 18 | def test_index_ips(self): |
|
19 | 19 | self.log_user() |
|
20 | 20 | response = self.app.get(url('admin_permissions_ips')) |
|
21 | 21 | # Test response... |
|
22 | 22 | response.mustcontain('All IP addresses are allowed') |
|
23 | 23 | |
|
24 | 24 | def test_add_ips(self, auto_clear_ip_permissions): |
|
25 | 25 | self.log_user() |
|
26 | 26 | default_user_id = User.get_default_user().user_id |
|
27 | 27 | response = self.app.post(url('edit_user_ips_update', id=default_user_id), |
|
28 | 28 | params=dict(new_ip='127.0.0.0/24', |
|
29 | 29 | _authentication_token=self.authentication_token())) |
|
30 | 30 | |
|
31 | 31 | # IP permissions are cached, need to invalidate this cache explicitly |
|
32 | 32 | invalidate_all_caches() |
|
33 | 33 | |
|
34 | 34 | self.app.get(url('admin_permissions_ips'), status=302) |
|
35 | 35 | |
|
36 | 36 | # REMOTE_ADDR must match 127.0.0.0/24 |
|
37 | 37 | response = self.app.get(url('admin_permissions_ips'), |
|
38 | 38 | extra_environ={'REMOTE_ADDR': '127.0.0.1'}) |
|
39 | 39 | response.mustcontain('127.0.0.0/24') |
|
40 | 40 | response.mustcontain('127.0.0.0 - 127.0.0.255') |
|
41 | 41 | |
|
42 | 42 | def test_delete_ips(self, auto_clear_ip_permissions): |
|
43 | 43 | self.log_user() |
|
44 | 44 | default_user_id = User.get_default_user().user_id |
|
45 | 45 | |
|
46 | 46 | ## first add |
|
47 | 47 | new_ip = '127.0.0.0/24' |
|
48 | 48 | with test_context(self.app): |
|
49 | 49 | user_model = UserModel() |
|
50 | 50 | ip_obj = user_model.add_extra_ip(default_user_id, new_ip) |
|
51 | 51 | Session().commit() |
|
52 | 52 | |
|
53 | 53 | ## double check that add worked |
|
54 | 54 | # IP permissions are cached, need to invalidate this cache explicitly |
|
55 | 55 | invalidate_all_caches() |
|
56 | 56 | self.app.get(url('admin_permissions_ips'), status=302) |
|
57 | 57 | # REMOTE_ADDR must match 127.0.0.0/24 |
|
58 | 58 | response = self.app.get(url('admin_permissions_ips'), |
|
59 | 59 | extra_environ={'REMOTE_ADDR': '127.0.0.1'}) |
|
60 | 60 | response.mustcontain('127.0.0.0/24') |
|
61 | 61 | response.mustcontain('127.0.0.0 - 127.0.0.255') |
|
62 | 62 | |
|
63 | 63 | ## now delete |
|
64 | 64 | response = self.app.post(url('edit_user_ips_delete', id=default_user_id), |
|
65 | 65 | params=dict(del_ip_id=ip_obj.ip_id, |
|
66 | 66 | _authentication_token=self.authentication_token()), |
|
67 | 67 | extra_environ={'REMOTE_ADDR': '127.0.0.1'}) |
|
68 | 68 | |
|
69 | 69 | # IP permissions are cached, need to invalidate this cache explicitly |
|
70 | 70 | invalidate_all_caches() |
|
71 | 71 | |
|
72 | 72 | response = self.app.get(url('admin_permissions_ips')) |
|
73 | 73 | response.mustcontain('All IP addresses are allowed') |
|
74 | 74 | response.mustcontain(no=['127.0.0.0/24']) |
|
75 | 75 | response.mustcontain(no=['127.0.0.0 - 127.0.0.255']) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def test_index_overview(self): |
|
79 | 79 | self.log_user() |
|
80 | 80 | response = self.app.get(url('admin_permissions_perms')) |
|
81 | 81 | # Test response... |
@@ -1,606 +1,607 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | from sqlalchemy.orm.exc import NoResultFound, ObjectDeletedError |
|
16 | 16 | |
|
17 | 17 | import pytest |
|
18 | 18 | from kallithea.tests.base import * |
|
19 | 19 | from kallithea.tests.fixture import Fixture |
|
20 | from kallithea.tests.test_context import test_context | |
|
21 | 20 | from kallithea.controllers.admin.users import UsersController |
|
22 | 21 | from kallithea.model.db import User, Permission, UserIpMap, UserApiKeys |
|
23 | 22 | from kallithea.lib.auth import check_password |
|
24 | 23 | from kallithea.model.user import UserModel |
|
25 | 24 | from kallithea.model import validators |
|
26 | 25 | from kallithea.lib import helpers as h |
|
27 | 26 | from kallithea.model.meta import Session |
|
28 | 27 | from webob.exc import HTTPNotFound |
|
29 | 28 | |
|
29 | from tg.util.webtest import test_context | |
|
30 | ||
|
30 | 31 | fixture = Fixture() |
|
31 | 32 | |
|
32 | 33 | @pytest.fixture |
|
33 | 34 | def user_and_repo_group_fail(): |
|
34 | 35 | username = 'repogrouperr' |
|
35 | 36 | groupname = u'repogroup_fail' |
|
36 | 37 | user = fixture.create_user(name=username) |
|
37 | 38 | repo_group = fixture.create_repo_group(name=groupname, cur_user=username) |
|
38 | 39 | yield user, repo_group |
|
39 | 40 | # cleanup |
|
40 | 41 | try: |
|
41 | 42 | fixture.destroy_repo_group(repo_group) |
|
42 | 43 | except ObjectDeletedError: |
|
43 | 44 | # delete already succeeded in test body |
|
44 | 45 | pass |
|
45 | 46 | |
|
46 | 47 | class TestAdminUsersController(TestController): |
|
47 | 48 | test_user_1 = 'testme' |
|
48 | 49 | |
|
49 | 50 | @classmethod |
|
50 | 51 | def teardown_class(cls): |
|
51 | 52 | if User.get_by_username(cls.test_user_1): |
|
52 | 53 | UserModel().delete(cls.test_user_1) |
|
53 | 54 | Session().commit() |
|
54 | 55 | |
|
55 | 56 | def test_index(self): |
|
56 | 57 | self.log_user() |
|
57 | 58 | response = self.app.get(url('users')) |
|
58 | 59 | # TODO: Test response... |
|
59 | 60 | |
|
60 | 61 | def test_create(self): |
|
61 | 62 | self.log_user() |
|
62 | 63 | username = 'newtestuser' |
|
63 | 64 | password = 'test12' |
|
64 | 65 | password_confirmation = password |
|
65 | 66 | name = u'name' |
|
66 | 67 | lastname = u'lastname' |
|
67 | 68 | email = 'mail@example.com' |
|
68 | 69 | |
|
69 | 70 | response = self.app.post(url('new_user'), |
|
70 | 71 | {'username': username, |
|
71 | 72 | 'password': password, |
|
72 | 73 | 'password_confirmation': password_confirmation, |
|
73 | 74 | 'firstname': name, |
|
74 | 75 | 'active': True, |
|
75 | 76 | 'lastname': lastname, |
|
76 | 77 | 'extern_name': 'internal', |
|
77 | 78 | 'extern_type': 'internal', |
|
78 | 79 | 'email': email, |
|
79 | 80 | '_authentication_token': self.authentication_token()}) |
|
80 | 81 | # 302 Found |
|
81 | 82 | # The resource was found at http://localhost/_admin/users/5/edit; you should be redirected automatically. |
|
82 | 83 | |
|
83 | 84 | self.checkSessionFlash(response, '''Created user %s''' % username) |
|
84 | 85 | |
|
85 | 86 | response = response.follow() |
|
86 | 87 | response.mustcontain("""%s user settings""" % username) # in <title> |
|
87 | 88 | |
|
88 | 89 | new_user = Session().query(User). \ |
|
89 | 90 | filter(User.username == username).one() |
|
90 | 91 | |
|
91 | 92 | assert new_user.username == username |
|
92 | 93 | assert check_password(password, new_user.password) == True |
|
93 | 94 | assert new_user.name == name |
|
94 | 95 | assert new_user.lastname == lastname |
|
95 | 96 | assert new_user.email == email |
|
96 | 97 | |
|
97 | 98 | def test_create_err(self): |
|
98 | 99 | self.log_user() |
|
99 | 100 | username = 'new_user' |
|
100 | 101 | password = '' |
|
101 | 102 | name = u'name' |
|
102 | 103 | lastname = u'lastname' |
|
103 | 104 | email = 'errmail.example.com' |
|
104 | 105 | |
|
105 | 106 | response = self.app.post(url('new_user'), |
|
106 | 107 | {'username': username, |
|
107 | 108 | 'password': password, |
|
108 | 109 | 'name': name, |
|
109 | 110 | 'active': False, |
|
110 | 111 | 'lastname': lastname, |
|
111 | 112 | 'email': email, |
|
112 | 113 | '_authentication_token': self.authentication_token()}) |
|
113 | 114 | |
|
114 | 115 | with test_context(self.app): |
|
115 | 116 | msg = validators.ValidUsername(False, {})._messages['system_invalid_username'] |
|
116 | 117 | msg = h.html_escape(msg % {'username': 'new_user'}) |
|
117 | 118 | response.mustcontain("""<span class="error-message">%s</span>""" % msg) |
|
118 | 119 | response.mustcontain("""<span class="error-message">Please enter a value</span>""") |
|
119 | 120 | response.mustcontain("""<span class="error-message">An email address must contain a single @</span>""") |
|
120 | 121 | |
|
121 | 122 | def get_user(): |
|
122 | 123 | Session().query(User).filter(User.username == username).one() |
|
123 | 124 | |
|
124 | 125 | with pytest.raises(NoResultFound): |
|
125 | 126 | get_user(), 'found user in database' |
|
126 | 127 | |
|
127 | 128 | def test_new(self): |
|
128 | 129 | self.log_user() |
|
129 | 130 | response = self.app.get(url('new_user')) |
|
130 | 131 | |
|
131 | 132 | @parametrize('name,attrs', |
|
132 | 133 | [('firstname', {'firstname': 'new_username'}), |
|
133 | 134 | ('lastname', {'lastname': 'new_username'}), |
|
134 | 135 | ('admin', {'admin': True}), |
|
135 | 136 | ('admin', {'admin': False}), |
|
136 | 137 | ('extern_type', {'extern_type': 'ldap'}), |
|
137 | 138 | ('extern_type', {'extern_type': None}), |
|
138 | 139 | ('extern_name', {'extern_name': 'test'}), |
|
139 | 140 | ('extern_name', {'extern_name': None}), |
|
140 | 141 | ('active', {'active': False}), |
|
141 | 142 | ('active', {'active': True}), |
|
142 | 143 | ('email', {'email': 'someemail@example.com'}), |
|
143 | 144 | # ('new_password', {'new_password': 'foobar123', |
|
144 | 145 | # 'password_confirmation': 'foobar123'}) |
|
145 | 146 | ]) |
|
146 | 147 | def test_update(self, name, attrs): |
|
147 | 148 | self.log_user() |
|
148 | 149 | usr = fixture.create_user(self.test_user_1, password='qweqwe', |
|
149 | 150 | email='testme@example.com', |
|
150 | 151 | extern_type='internal', |
|
151 | 152 | extern_name=self.test_user_1, |
|
152 | 153 | skip_if_exists=True) |
|
153 | 154 | Session().commit() |
|
154 | 155 | params = usr.get_api_data(True) |
|
155 | 156 | params.update({'password_confirmation': ''}) |
|
156 | 157 | params.update({'new_password': ''}) |
|
157 | 158 | params.update(attrs) |
|
158 | 159 | if name == 'email': |
|
159 | 160 | params['emails'] = [attrs['email']] |
|
160 | 161 | if name == 'extern_type': |
|
161 | 162 | #cannot update this via form, expected value is original one |
|
162 | 163 | params['extern_type'] = "internal" |
|
163 | 164 | if name == 'extern_name': |
|
164 | 165 | #cannot update this via form, expected value is original one |
|
165 | 166 | params['extern_name'] = self.test_user_1 |
|
166 | 167 | # special case since this user is not |
|
167 | 168 | # logged in yet his data is not filled |
|
168 | 169 | # so we use creation data |
|
169 | 170 | |
|
170 | 171 | params.update({'_authentication_token': self.authentication_token()}) |
|
171 | 172 | response = self.app.post(url('update_user', id=usr.user_id), params) |
|
172 | 173 | self.checkSessionFlash(response, 'User updated successfully') |
|
173 | 174 | params.pop('_authentication_token') |
|
174 | 175 | |
|
175 | 176 | updated_user = User.get_by_username(self.test_user_1) |
|
176 | 177 | updated_params = updated_user.get_api_data(True) |
|
177 | 178 | updated_params.update({'password_confirmation': ''}) |
|
178 | 179 | updated_params.update({'new_password': ''}) |
|
179 | 180 | |
|
180 | 181 | assert params == updated_params |
|
181 | 182 | |
|
182 | 183 | def test_delete(self): |
|
183 | 184 | self.log_user() |
|
184 | 185 | username = 'newtestuserdeleteme' |
|
185 | 186 | |
|
186 | 187 | fixture.create_user(name=username) |
|
187 | 188 | |
|
188 | 189 | new_user = Session().query(User) \ |
|
189 | 190 | .filter(User.username == username).one() |
|
190 | 191 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
191 | 192 | params={'_authentication_token': self.authentication_token()}) |
|
192 | 193 | |
|
193 | 194 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
194 | 195 | |
|
195 | 196 | def test_delete_repo_err(self): |
|
196 | 197 | self.log_user() |
|
197 | 198 | username = 'repoerr' |
|
198 | 199 | reponame = u'repoerr_fail' |
|
199 | 200 | |
|
200 | 201 | fixture.create_user(name=username) |
|
201 | 202 | fixture.create_repo(name=reponame, cur_user=username) |
|
202 | 203 | |
|
203 | 204 | new_user = Session().query(User) \ |
|
204 | 205 | .filter(User.username == username).one() |
|
205 | 206 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
206 | 207 | params={'_authentication_token': self.authentication_token()}) |
|
207 | 208 | self.checkSessionFlash(response, 'User "%s" still ' |
|
208 | 209 | 'owns 1 repositories and cannot be removed. ' |
|
209 | 210 | 'Switch owners or remove those repositories: ' |
|
210 | 211 | '%s' % (username, reponame)) |
|
211 | 212 | |
|
212 | 213 | response = self.app.post(url('delete_repo', repo_name=reponame), |
|
213 | 214 | params={'_authentication_token': self.authentication_token()}) |
|
214 | 215 | self.checkSessionFlash(response, 'Deleted repository %s' % reponame) |
|
215 | 216 | |
|
216 | 217 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
217 | 218 | params={'_authentication_token': self.authentication_token()}) |
|
218 | 219 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
219 | 220 | |
|
220 | 221 | def test_delete_repo_group_err(self, user_and_repo_group_fail): |
|
221 | 222 | self.log_user() |
|
222 | 223 | username = 'repogrouperr' |
|
223 | 224 | groupname = u'repogroup_fail' |
|
224 | 225 | |
|
225 | 226 | new_user = Session().query(User) \ |
|
226 | 227 | .filter(User.username == username).one() |
|
227 | 228 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
228 | 229 | params={'_authentication_token': self.authentication_token()}) |
|
229 | 230 | self.checkSessionFlash(response, 'User "%s" still ' |
|
230 | 231 | 'owns 1 repository groups and cannot be removed. ' |
|
231 | 232 | 'Switch owners or remove those repository groups: ' |
|
232 | 233 | '%s' % (username, groupname)) |
|
233 | 234 | |
|
234 | 235 | # Relevant _if_ the user deletion succeeded to make sure we can render groups without owner |
|
235 | 236 | # rg = RepoGroup.get_by_group_name(group_name=groupname) |
|
236 | 237 | # response = self.app.get(url('repos_groups', id=rg.group_id)) |
|
237 | 238 | |
|
238 | 239 | response = self.app.post(url('delete_repo_group', group_name=groupname), |
|
239 | 240 | params={'_authentication_token': self.authentication_token()}) |
|
240 | 241 | self.checkSessionFlash(response, 'Removed repository group %s' % groupname) |
|
241 | 242 | |
|
242 | 243 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
243 | 244 | params={'_authentication_token': self.authentication_token()}) |
|
244 | 245 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
245 | 246 | |
|
246 | 247 | def test_delete_user_group_err(self): |
|
247 | 248 | self.log_user() |
|
248 | 249 | username = 'usergrouperr' |
|
249 | 250 | groupname = u'usergroup_fail' |
|
250 | 251 | |
|
251 | 252 | fixture.create_user(name=username) |
|
252 | 253 | ug = fixture.create_user_group(name=groupname, cur_user=username) |
|
253 | 254 | |
|
254 | 255 | new_user = Session().query(User) \ |
|
255 | 256 | .filter(User.username == username).one() |
|
256 | 257 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
257 | 258 | params={'_authentication_token': self.authentication_token()}) |
|
258 | 259 | self.checkSessionFlash(response, 'User "%s" still ' |
|
259 | 260 | 'owns 1 user groups and cannot be removed. ' |
|
260 | 261 | 'Switch owners or remove those user groups: ' |
|
261 | 262 | '%s' % (username, groupname)) |
|
262 | 263 | |
|
263 | 264 | # TODO: why do this fail? |
|
264 | 265 | #response = self.app.delete(url('delete_users_group', id=groupname)) |
|
265 | 266 | #self.checkSessionFlash(response, 'Removed user group %s' % groupname) |
|
266 | 267 | |
|
267 | 268 | fixture.destroy_user_group(ug.users_group_id) |
|
268 | 269 | |
|
269 | 270 | response = self.app.post(url('delete_user', id=new_user.user_id), |
|
270 | 271 | params={'_authentication_token': self.authentication_token()}) |
|
271 | 272 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
272 | 273 | |
|
273 | 274 | def test_edit(self): |
|
274 | 275 | self.log_user() |
|
275 | 276 | user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
276 | 277 | response = self.app.get(url('edit_user', id=user.user_id)) |
|
277 | 278 | |
|
278 | 279 | def test_add_perm_create_repo(self): |
|
279 | 280 | self.log_user() |
|
280 | 281 | perm_none = Permission.get_by_key('hg.create.none') |
|
281 | 282 | perm_create = Permission.get_by_key('hg.create.repository') |
|
282 | 283 | |
|
283 | 284 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
284 | 285 | email='dummy', firstname=u'a', |
|
285 | 286 | lastname=u'b') |
|
286 | 287 | Session().commit() |
|
287 | 288 | uid = user.user_id |
|
288 | 289 | |
|
289 | 290 | try: |
|
290 | 291 | #User should have None permission on creation repository |
|
291 | 292 | assert UserModel().has_perm(user, perm_none) == False |
|
292 | 293 | assert UserModel().has_perm(user, perm_create) == False |
|
293 | 294 | |
|
294 | 295 | response = self.app.post(url('edit_user_perms_update', id=uid), |
|
295 | 296 | params=dict(create_repo_perm=True, |
|
296 | 297 | _authentication_token=self.authentication_token())) |
|
297 | 298 | |
|
298 | 299 | perm_none = Permission.get_by_key('hg.create.none') |
|
299 | 300 | perm_create = Permission.get_by_key('hg.create.repository') |
|
300 | 301 | |
|
301 | 302 | #User should have None permission on creation repository |
|
302 | 303 | assert UserModel().has_perm(uid, perm_none) == False |
|
303 | 304 | assert UserModel().has_perm(uid, perm_create) == True |
|
304 | 305 | finally: |
|
305 | 306 | UserModel().delete(uid) |
|
306 | 307 | Session().commit() |
|
307 | 308 | |
|
308 | 309 | def test_revoke_perm_create_repo(self): |
|
309 | 310 | self.log_user() |
|
310 | 311 | perm_none = Permission.get_by_key('hg.create.none') |
|
311 | 312 | perm_create = Permission.get_by_key('hg.create.repository') |
|
312 | 313 | |
|
313 | 314 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
314 | 315 | email='dummy', firstname=u'a', |
|
315 | 316 | lastname=u'b') |
|
316 | 317 | Session().commit() |
|
317 | 318 | uid = user.user_id |
|
318 | 319 | |
|
319 | 320 | try: |
|
320 | 321 | #User should have None permission on creation repository |
|
321 | 322 | assert UserModel().has_perm(user, perm_none) == False |
|
322 | 323 | assert UserModel().has_perm(user, perm_create) == False |
|
323 | 324 | |
|
324 | 325 | response = self.app.post(url('edit_user_perms_update', id=uid), |
|
325 | 326 | params=dict(_authentication_token=self.authentication_token())) |
|
326 | 327 | |
|
327 | 328 | perm_none = Permission.get_by_key('hg.create.none') |
|
328 | 329 | perm_create = Permission.get_by_key('hg.create.repository') |
|
329 | 330 | |
|
330 | 331 | #User should have None permission on creation repository |
|
331 | 332 | assert UserModel().has_perm(uid, perm_none) == True |
|
332 | 333 | assert UserModel().has_perm(uid, perm_create) == False |
|
333 | 334 | finally: |
|
334 | 335 | UserModel().delete(uid) |
|
335 | 336 | Session().commit() |
|
336 | 337 | |
|
337 | 338 | def test_add_perm_fork_repo(self): |
|
338 | 339 | self.log_user() |
|
339 | 340 | perm_none = Permission.get_by_key('hg.fork.none') |
|
340 | 341 | perm_fork = Permission.get_by_key('hg.fork.repository') |
|
341 | 342 | |
|
342 | 343 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
343 | 344 | email='dummy', firstname=u'a', |
|
344 | 345 | lastname=u'b') |
|
345 | 346 | Session().commit() |
|
346 | 347 | uid = user.user_id |
|
347 | 348 | |
|
348 | 349 | try: |
|
349 | 350 | #User should have None permission on creation repository |
|
350 | 351 | assert UserModel().has_perm(user, perm_none) == False |
|
351 | 352 | assert UserModel().has_perm(user, perm_fork) == False |
|
352 | 353 | |
|
353 | 354 | response = self.app.post(url('edit_user_perms_update', id=uid), |
|
354 | 355 | params=dict(create_repo_perm=True, |
|
355 | 356 | _authentication_token=self.authentication_token())) |
|
356 | 357 | |
|
357 | 358 | perm_none = Permission.get_by_key('hg.create.none') |
|
358 | 359 | perm_create = Permission.get_by_key('hg.create.repository') |
|
359 | 360 | |
|
360 | 361 | #User should have None permission on creation repository |
|
361 | 362 | assert UserModel().has_perm(uid, perm_none) == False |
|
362 | 363 | assert UserModel().has_perm(uid, perm_create) == True |
|
363 | 364 | finally: |
|
364 | 365 | UserModel().delete(uid) |
|
365 | 366 | Session().commit() |
|
366 | 367 | |
|
367 | 368 | def test_revoke_perm_fork_repo(self): |
|
368 | 369 | self.log_user() |
|
369 | 370 | perm_none = Permission.get_by_key('hg.fork.none') |
|
370 | 371 | perm_fork = Permission.get_by_key('hg.fork.repository') |
|
371 | 372 | |
|
372 | 373 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
373 | 374 | email='dummy', firstname=u'a', |
|
374 | 375 | lastname=u'b') |
|
375 | 376 | Session().commit() |
|
376 | 377 | uid = user.user_id |
|
377 | 378 | |
|
378 | 379 | try: |
|
379 | 380 | #User should have None permission on creation repository |
|
380 | 381 | assert UserModel().has_perm(user, perm_none) == False |
|
381 | 382 | assert UserModel().has_perm(user, perm_fork) == False |
|
382 | 383 | |
|
383 | 384 | response = self.app.post(url('edit_user_perms_update', id=uid), |
|
384 | 385 | params=dict(_authentication_token=self.authentication_token())) |
|
385 | 386 | |
|
386 | 387 | perm_none = Permission.get_by_key('hg.create.none') |
|
387 | 388 | perm_create = Permission.get_by_key('hg.create.repository') |
|
388 | 389 | |
|
389 | 390 | #User should have None permission on creation repository |
|
390 | 391 | assert UserModel().has_perm(uid, perm_none) == True |
|
391 | 392 | assert UserModel().has_perm(uid, perm_create) == False |
|
392 | 393 | finally: |
|
393 | 394 | UserModel().delete(uid) |
|
394 | 395 | Session().commit() |
|
395 | 396 | |
|
396 | 397 | def test_ips(self): |
|
397 | 398 | self.log_user() |
|
398 | 399 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
399 | 400 | response = self.app.get(url('edit_user_ips', id=user.user_id)) |
|
400 | 401 | response.mustcontain('All IP addresses are allowed') |
|
401 | 402 | |
|
402 | 403 | @parametrize('test_name,ip,ip_range,failure', [ |
|
403 | 404 | ('127/24', '127.0.0.1/24', '127.0.0.0 - 127.0.0.255', False), |
|
404 | 405 | ('10/32', '10.0.0.10/32', '10.0.0.10 - 10.0.0.10', False), |
|
405 | 406 | ('0/16', '0.0.0.0/16', '0.0.0.0 - 0.0.255.255', False), |
|
406 | 407 | ('0/8', '0.0.0.0/8', '0.0.0.0 - 0.255.255.255', False), |
|
407 | 408 | ('127_bad_mask', '127.0.0.1/99', '127.0.0.1 - 127.0.0.1', True), |
|
408 | 409 | ('127_bad_ip', 'foobar', 'foobar', True), |
|
409 | 410 | ]) |
|
410 | 411 | def test_add_ip(self, test_name, ip, ip_range, failure, auto_clear_ip_permissions): |
|
411 | 412 | self.log_user() |
|
412 | 413 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
413 | 414 | user_id = user.user_id |
|
414 | 415 | |
|
415 | 416 | response = self.app.post(url('edit_user_ips_update', id=user_id), |
|
416 | 417 | params=dict(new_ip=ip, _authentication_token=self.authentication_token())) |
|
417 | 418 | |
|
418 | 419 | if failure: |
|
419 | 420 | self.checkSessionFlash(response, 'Please enter a valid IPv4 or IPv6 address') |
|
420 | 421 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
421 | 422 | response.mustcontain(no=[ip]) |
|
422 | 423 | response.mustcontain(no=[ip_range]) |
|
423 | 424 | |
|
424 | 425 | else: |
|
425 | 426 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
426 | 427 | response.mustcontain(ip) |
|
427 | 428 | response.mustcontain(ip_range) |
|
428 | 429 | |
|
429 | 430 | def test_delete_ip(self, auto_clear_ip_permissions): |
|
430 | 431 | self.log_user() |
|
431 | 432 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
432 | 433 | user_id = user.user_id |
|
433 | 434 | ip = '127.0.0.1/32' |
|
434 | 435 | ip_range = '127.0.0.1 - 127.0.0.1' |
|
435 | 436 | with test_context(self.app): |
|
436 | 437 | new_ip = UserModel().add_extra_ip(user_id, ip) |
|
437 | 438 | Session().commit() |
|
438 | 439 | new_ip_id = new_ip.ip_id |
|
439 | 440 | |
|
440 | 441 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
441 | 442 | response.mustcontain(ip) |
|
442 | 443 | response.mustcontain(ip_range) |
|
443 | 444 | |
|
444 | 445 | self.app.post(url('edit_user_ips_delete', id=user_id), |
|
445 | 446 | params=dict(del_ip_id=new_ip_id, _authentication_token=self.authentication_token())) |
|
446 | 447 | |
|
447 | 448 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
448 | 449 | response.mustcontain('All IP addresses are allowed') |
|
449 | 450 | response.mustcontain(no=[ip]) |
|
450 | 451 | response.mustcontain(no=[ip_range]) |
|
451 | 452 | |
|
452 | 453 | def test_api_keys(self): |
|
453 | 454 | self.log_user() |
|
454 | 455 | |
|
455 | 456 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
456 | 457 | response = self.app.get(url('edit_user_api_keys', id=user.user_id)) |
|
457 | 458 | response.mustcontain(user.api_key) |
|
458 | 459 | response.mustcontain('Expires: Never') |
|
459 | 460 | |
|
460 | 461 | @parametrize('desc,lifetime', [ |
|
461 | 462 | ('forever', -1), |
|
462 | 463 | ('5mins', 60*5), |
|
463 | 464 | ('30days', 60*60*24*30), |
|
464 | 465 | ]) |
|
465 | 466 | def test_add_api_keys(self, desc, lifetime): |
|
466 | 467 | self.log_user() |
|
467 | 468 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
468 | 469 | user_id = user.user_id |
|
469 | 470 | |
|
470 | 471 | response = self.app.post(url('edit_user_api_keys_update', id=user_id), |
|
471 | 472 | {'description': desc, 'lifetime': lifetime, '_authentication_token': self.authentication_token()}) |
|
472 | 473 | self.checkSessionFlash(response, 'API key successfully created') |
|
473 | 474 | try: |
|
474 | 475 | response = response.follow() |
|
475 | 476 | user = User.get(user_id) |
|
476 | 477 | for api_key in user.api_keys: |
|
477 | 478 | response.mustcontain(api_key) |
|
478 | 479 | finally: |
|
479 | 480 | for api_key in UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all(): |
|
480 | 481 | Session().delete(api_key) |
|
481 | 482 | Session().commit() |
|
482 | 483 | |
|
483 | 484 | def test_remove_api_key(self): |
|
484 | 485 | self.log_user() |
|
485 | 486 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
486 | 487 | user_id = user.user_id |
|
487 | 488 | |
|
488 | 489 | response = self.app.post(url('edit_user_api_keys_update', id=user_id), |
|
489 | 490 | {'description': 'desc', 'lifetime': -1, '_authentication_token': self.authentication_token()}) |
|
490 | 491 | self.checkSessionFlash(response, 'API key successfully created') |
|
491 | 492 | response = response.follow() |
|
492 | 493 | |
|
493 | 494 | #now delete our key |
|
494 | 495 | keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all() |
|
495 | 496 | assert 1 == len(keys) |
|
496 | 497 | |
|
497 | 498 | response = self.app.post(url('edit_user_api_keys_delete', id=user_id), |
|
498 | 499 | {'del_api_key': keys[0].api_key, '_authentication_token': self.authentication_token()}) |
|
499 | 500 | self.checkSessionFlash(response, 'API key successfully deleted') |
|
500 | 501 | keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all() |
|
501 | 502 | assert 0 == len(keys) |
|
502 | 503 | |
|
503 | 504 | def test_reset_main_api_key(self): |
|
504 | 505 | self.log_user() |
|
505 | 506 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
506 | 507 | user_id = user.user_id |
|
507 | 508 | api_key = user.api_key |
|
508 | 509 | response = self.app.get(url('edit_user_api_keys', id=user_id)) |
|
509 | 510 | response.mustcontain(api_key) |
|
510 | 511 | response.mustcontain('Expires: Never') |
|
511 | 512 | |
|
512 | 513 | response = self.app.post(url('edit_user_api_keys_delete', id=user_id), |
|
513 | 514 | {'del_api_key_builtin': api_key, '_authentication_token': self.authentication_token()}) |
|
514 | 515 | self.checkSessionFlash(response, 'API key successfully reset') |
|
515 | 516 | response = response.follow() |
|
516 | 517 | response.mustcontain(no=[api_key]) |
|
517 | 518 | |
|
518 | 519 | |
|
519 | 520 | class TestAdminUsersController_unittest(TestController): |
|
520 | 521 | """ Unit tests for the users controller """ |
|
521 | 522 | |
|
522 | 523 | def test_get_user_or_raise_if_default(self, monkeypatch, test_context_fixture): |
|
523 | 524 | # flash complains about an non-existing session |
|
524 | 525 | def flash_mock(*args, **kwargs): |
|
525 | 526 | pass |
|
526 | 527 | monkeypatch.setattr(h, 'flash', flash_mock) |
|
527 | 528 | |
|
528 | 529 | u = UsersController() |
|
529 | 530 | # a regular user should work correctly |
|
530 | 531 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
531 | 532 | assert u._get_user_or_raise_if_default(user.user_id) == user |
|
532 | 533 | # the default user should raise |
|
533 | 534 | with pytest.raises(HTTPNotFound): |
|
534 | 535 | u._get_user_or_raise_if_default(User.get_default_user().user_id) |
|
535 | 536 | |
|
536 | 537 | |
|
537 | 538 | class TestAdminUsersControllerForDefaultUser(TestController): |
|
538 | 539 | """ |
|
539 | 540 | Edit actions on the default user are not allowed. |
|
540 | 541 | Validate that they throw a 404 exception. |
|
541 | 542 | """ |
|
542 | 543 | def test_edit_default_user(self): |
|
543 | 544 | self.log_user() |
|
544 | 545 | user = User.get_default_user() |
|
545 | 546 | response = self.app.get(url('edit_user', id=user.user_id), status=404) |
|
546 | 547 | |
|
547 | 548 | def test_edit_advanced_default_user(self): |
|
548 | 549 | self.log_user() |
|
549 | 550 | user = User.get_default_user() |
|
550 | 551 | response = self.app.get(url('edit_user_advanced', id=user.user_id), status=404) |
|
551 | 552 | |
|
552 | 553 | # API keys |
|
553 | 554 | def test_edit_api_keys_default_user(self): |
|
554 | 555 | self.log_user() |
|
555 | 556 | user = User.get_default_user() |
|
556 | 557 | response = self.app.get(url('edit_user_api_keys', id=user.user_id), status=404) |
|
557 | 558 | |
|
558 | 559 | def test_add_api_keys_default_user(self): |
|
559 | 560 | self.log_user() |
|
560 | 561 | user = User.get_default_user() |
|
561 | 562 | response = self.app.post(url('edit_user_api_keys_update', id=user.user_id), |
|
562 | 563 | {'_authentication_token': self.authentication_token()}, status=404) |
|
563 | 564 | |
|
564 | 565 | def test_delete_api_keys_default_user(self): |
|
565 | 566 | self.log_user() |
|
566 | 567 | user = User.get_default_user() |
|
567 | 568 | response = self.app.post(url('edit_user_api_keys_delete', id=user.user_id), |
|
568 | 569 | {'_authentication_token': self.authentication_token()}, status=404) |
|
569 | 570 | |
|
570 | 571 | # Permissions |
|
571 | 572 | def test_edit_perms_default_user(self): |
|
572 | 573 | self.log_user() |
|
573 | 574 | user = User.get_default_user() |
|
574 | 575 | response = self.app.get(url('edit_user_perms', id=user.user_id), status=404) |
|
575 | 576 | |
|
576 | 577 | def test_update_perms_default_user(self): |
|
577 | 578 | self.log_user() |
|
578 | 579 | user = User.get_default_user() |
|
579 | 580 | response = self.app.post(url('edit_user_perms_update', id=user.user_id), |
|
580 | 581 | {'_authentication_token': self.authentication_token()}, status=404) |
|
581 | 582 | |
|
582 | 583 | # Emails |
|
583 | 584 | def test_edit_emails_default_user(self): |
|
584 | 585 | self.log_user() |
|
585 | 586 | user = User.get_default_user() |
|
586 | 587 | response = self.app.get(url('edit_user_emails', id=user.user_id), status=404) |
|
587 | 588 | |
|
588 | 589 | def test_add_emails_default_user(self): |
|
589 | 590 | self.log_user() |
|
590 | 591 | user = User.get_default_user() |
|
591 | 592 | response = self.app.post(url('edit_user_emails_update', id=user.user_id), |
|
592 | 593 | {'_authentication_token': self.authentication_token()}, status=404) |
|
593 | 594 | |
|
594 | 595 | def test_delete_emails_default_user(self): |
|
595 | 596 | self.log_user() |
|
596 | 597 | user = User.get_default_user() |
|
597 | 598 | response = self.app.post(url('edit_user_emails_delete', id=user.user_id), |
|
598 | 599 | {'_authentication_token': self.authentication_token()}, status=404) |
|
599 | 600 | |
|
600 | 601 | # IP addresses |
|
601 | 602 | # Add/delete of IP addresses for the default user is used to maintain |
|
602 | 603 | # the global IP whitelist and thus allowed. Only 'edit' is forbidden. |
|
603 | 604 | def test_edit_ip_default_user(self): |
|
604 | 605 | self.log_user() |
|
605 | 606 | user = User.get_default_user() |
|
606 | 607 | response = self.app.get(url('edit_user_ips', id=user.user_id), status=404) |
@@ -1,516 +1,516 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | import re |
|
3 | 3 | import time |
|
4 | 4 | import urlparse |
|
5 | 5 | |
|
6 | 6 | import mock |
|
7 | 7 | |
|
8 | 8 | from kallithea.tests.base import * |
|
9 | 9 | from kallithea.tests.fixture import Fixture |
|
10 | 10 | from kallithea.lib.utils2 import generate_api_key |
|
11 | 11 | from kallithea.lib.auth import check_password |
|
12 | 12 | from kallithea.lib import helpers as h |
|
13 | 13 | from kallithea.model.api_key import ApiKeyModel |
|
14 | 14 | from kallithea.model import validators |
|
15 | 15 | from kallithea.model.db import User, Notification |
|
16 | 16 | from kallithea.model.meta import Session |
|
17 | 17 | from kallithea.model.user import UserModel |
|
18 | 18 | |
|
19 |
from |
|
|
19 | from tg.util.webtest import test_context | |
|
20 | 20 | |
|
21 | 21 | fixture = Fixture() |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | class TestLoginController(TestController): |
|
25 | 25 | def setup_method(self, method): |
|
26 | 26 | self.remove_all_notifications() |
|
27 | 27 | assert Notification.query().all() == [] |
|
28 | 28 | |
|
29 | 29 | def test_index(self): |
|
30 | 30 | response = self.app.get(url(controller='login', action='index')) |
|
31 | 31 | assert response.status == '200 OK' |
|
32 | 32 | # Test response... |
|
33 | 33 | |
|
34 | 34 | def test_login_admin_ok(self): |
|
35 | 35 | response = self.app.post(url(controller='login', action='index'), |
|
36 | 36 | {'username': TEST_USER_ADMIN_LOGIN, |
|
37 | 37 | 'password': TEST_USER_ADMIN_PASS}) |
|
38 | 38 | assert response.status == '302 Found' |
|
39 | 39 | self.assert_authenticated_user(response, TEST_USER_ADMIN_LOGIN) |
|
40 | 40 | |
|
41 | 41 | response = response.follow() |
|
42 | 42 | response.mustcontain('/%s' % HG_REPO) |
|
43 | 43 | |
|
44 | 44 | def test_login_regular_ok(self): |
|
45 | 45 | response = self.app.post(url(controller='login', action='index'), |
|
46 | 46 | {'username': TEST_USER_REGULAR_LOGIN, |
|
47 | 47 | 'password': TEST_USER_REGULAR_PASS}) |
|
48 | 48 | |
|
49 | 49 | assert response.status == '302 Found' |
|
50 | 50 | self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN) |
|
51 | 51 | |
|
52 | 52 | response = response.follow() |
|
53 | 53 | response.mustcontain('/%s' % HG_REPO) |
|
54 | 54 | |
|
55 | 55 | def test_login_regular_email_ok(self): |
|
56 | 56 | response = self.app.post(url(controller='login', action='index'), |
|
57 | 57 | {'username': TEST_USER_REGULAR_EMAIL, |
|
58 | 58 | 'password': TEST_USER_REGULAR_PASS}) |
|
59 | 59 | |
|
60 | 60 | assert response.status == '302 Found' |
|
61 | 61 | self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN) |
|
62 | 62 | |
|
63 | 63 | response = response.follow() |
|
64 | 64 | response.mustcontain('/%s' % HG_REPO) |
|
65 | 65 | |
|
66 | 66 | def test_login_ok_came_from(self): |
|
67 | 67 | test_came_from = '/_admin/users' |
|
68 | 68 | response = self.app.post(url(controller='login', action='index', |
|
69 | 69 | came_from=test_came_from), |
|
70 | 70 | {'username': TEST_USER_ADMIN_LOGIN, |
|
71 | 71 | 'password': TEST_USER_ADMIN_PASS}) |
|
72 | 72 | assert response.status == '302 Found' |
|
73 | 73 | response = response.follow() |
|
74 | 74 | |
|
75 | 75 | assert response.status == '200 OK' |
|
76 | 76 | response.mustcontain('Users Administration') |
|
77 | 77 | |
|
78 | 78 | def test_login_do_not_remember(self): |
|
79 | 79 | response = self.app.post(url(controller='login', action='index'), |
|
80 | 80 | {'username': TEST_USER_REGULAR_LOGIN, |
|
81 | 81 | 'password': TEST_USER_REGULAR_PASS, |
|
82 | 82 | 'remember': False}) |
|
83 | 83 | |
|
84 | 84 | assert 'Set-Cookie' in response.headers |
|
85 | 85 | for cookie in response.headers.getall('Set-Cookie'): |
|
86 | 86 | assert not re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r has expiration date, but should be a session cookie' % cookie |
|
87 | 87 | |
|
88 | 88 | def test_login_remember(self): |
|
89 | 89 | response = self.app.post(url(controller='login', action='index'), |
|
90 | 90 | {'username': TEST_USER_REGULAR_LOGIN, |
|
91 | 91 | 'password': TEST_USER_REGULAR_PASS, |
|
92 | 92 | 'remember': True}) |
|
93 | 93 | |
|
94 | 94 | assert 'Set-Cookie' in response.headers |
|
95 | 95 | for cookie in response.headers.getall('Set-Cookie'): |
|
96 | 96 | assert re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r should have expiration date, but is a session cookie' % cookie |
|
97 | 97 | |
|
98 | 98 | def test_logout(self): |
|
99 | 99 | response = self.app.post(url(controller='login', action='index'), |
|
100 | 100 | {'username': TEST_USER_REGULAR_LOGIN, |
|
101 | 101 | 'password': TEST_USER_REGULAR_PASS}) |
|
102 | 102 | |
|
103 | 103 | # Verify that a login session has been established. |
|
104 | 104 | response = self.app.get(url(controller='login', action='index')) |
|
105 | 105 | response = response.follow() |
|
106 | 106 | assert 'authuser' in response.session |
|
107 | 107 | |
|
108 | 108 | response.click('Log Out') |
|
109 | 109 | |
|
110 | 110 | # Verify that the login session has been terminated. |
|
111 | 111 | response = self.app.get(url(controller='login', action='index')) |
|
112 | 112 | assert 'authuser' not in response.session |
|
113 | 113 | |
|
114 | 114 | @parametrize('url_came_from', [ |
|
115 | 115 | ('data:text/html,<script>window.alert("xss")</script>',), |
|
116 | 116 | ('mailto:test@example.com',), |
|
117 | 117 | ('file:///etc/passwd',), |
|
118 | 118 | ('ftp://ftp.example.com',), |
|
119 | 119 | ('http://other.example.com/bl%C3%A5b%C3%A6rgr%C3%B8d',), |
|
120 | 120 | ('//evil.example.com/',), |
|
121 | 121 | ('/\r\nX-Header-Injection: boo',), |
|
122 | 122 | ('/invälid_url_bytes',), |
|
123 | 123 | ('non-absolute-path',), |
|
124 | 124 | ]) |
|
125 | 125 | def test_login_bad_came_froms(self, url_came_from): |
|
126 | 126 | response = self.app.post(url(controller='login', action='index', |
|
127 | 127 | came_from=url_came_from), |
|
128 | 128 | {'username': TEST_USER_ADMIN_LOGIN, |
|
129 | 129 | 'password': TEST_USER_ADMIN_PASS}, |
|
130 | 130 | status=400) |
|
131 | 131 | |
|
132 | 132 | def test_login_short_password(self): |
|
133 | 133 | response = self.app.post(url(controller='login', action='index'), |
|
134 | 134 | {'username': TEST_USER_ADMIN_LOGIN, |
|
135 | 135 | 'password': 'as'}) |
|
136 | 136 | assert response.status == '200 OK' |
|
137 | 137 | |
|
138 | 138 | response.mustcontain('Enter 3 characters or more') |
|
139 | 139 | |
|
140 | 140 | def test_login_wrong_username_password(self): |
|
141 | 141 | response = self.app.post(url(controller='login', action='index'), |
|
142 | 142 | {'username': 'error', |
|
143 | 143 | 'password': 'test12'}) |
|
144 | 144 | |
|
145 | 145 | response.mustcontain('Invalid username or password') |
|
146 | 146 | |
|
147 | 147 | # verify that get arguments are correctly passed along login redirection |
|
148 | 148 | |
|
149 | 149 | @parametrize('args,args_encoded', [ |
|
150 | 150 | ({'foo':'one', 'bar':'two'}, (('foo', 'one'), ('bar', 'two'))), |
|
151 | 151 | ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'}, |
|
152 | 152 | (('blue', u'blå'.encode('utf-8')), ('green', u'grøn'.encode('utf-8')))), |
|
153 | 153 | ]) |
|
154 | 154 | def test_redirection_to_login_form_preserves_get_args(self, args, args_encoded): |
|
155 | 155 | with fixture.anon_access(False): |
|
156 | 156 | response = self.app.get(url(controller='summary', action='index', |
|
157 | 157 | repo_name=HG_REPO, |
|
158 | 158 | **args)) |
|
159 | 159 | assert response.status == '302 Found' |
|
160 | 160 | came_from = urlparse.parse_qs(urlparse.urlparse(response.location).query)['came_from'][0] |
|
161 | 161 | came_from_qs = urlparse.parse_qsl(urlparse.urlparse(came_from).query) |
|
162 | 162 | for encoded in args_encoded: |
|
163 | 163 | assert encoded in came_from_qs |
|
164 | 164 | |
|
165 | 165 | @parametrize('args,args_encoded', [ |
|
166 | 166 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
167 | 167 | ({'blue': u'blå', 'green':u'grøn'}, |
|
168 | 168 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
169 | 169 | ]) |
|
170 | 170 | def test_login_form_preserves_get_args(self, args, args_encoded): |
|
171 | 171 | response = self.app.get(url(controller='login', action='index', |
|
172 | 172 | came_from=url('/_admin/users', **args))) |
|
173 | 173 | came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0] |
|
174 | 174 | for encoded in args_encoded: |
|
175 | 175 | assert encoded in came_from |
|
176 | 176 | |
|
177 | 177 | @parametrize('args,args_encoded', [ |
|
178 | 178 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
179 | 179 | ({'blue': u'blå', 'green':u'grøn'}, |
|
180 | 180 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
181 | 181 | ]) |
|
182 | 182 | def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded): |
|
183 | 183 | response = self.app.post(url(controller='login', action='index', |
|
184 | 184 | came_from = url('/_admin/users', **args)), |
|
185 | 185 | {'username': TEST_USER_ADMIN_LOGIN, |
|
186 | 186 | 'password': TEST_USER_ADMIN_PASS}) |
|
187 | 187 | assert response.status == '302 Found' |
|
188 | 188 | for encoded in args_encoded: |
|
189 | 189 | assert encoded in response.location |
|
190 | 190 | |
|
191 | 191 | @parametrize('args,args_encoded', [ |
|
192 | 192 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
193 | 193 | ({'blue': u'blå', 'green':u'grøn'}, |
|
194 | 194 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
195 | 195 | ]) |
|
196 | 196 | def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded): |
|
197 | 197 | response = self.app.post(url(controller='login', action='index', |
|
198 | 198 | came_from=url('/_admin/users', **args)), |
|
199 | 199 | {'username': 'error', |
|
200 | 200 | 'password': 'test12'}) |
|
201 | 201 | |
|
202 | 202 | response.mustcontain('Invalid username or password') |
|
203 | 203 | came_from = urlparse.parse_qs(urlparse.urlparse(response.form.action).query)['came_from'][0] |
|
204 | 204 | for encoded in args_encoded: |
|
205 | 205 | assert encoded in came_from |
|
206 | 206 | |
|
207 | 207 | #========================================================================== |
|
208 | 208 | # REGISTRATIONS |
|
209 | 209 | #========================================================================== |
|
210 | 210 | def test_register(self): |
|
211 | 211 | response = self.app.get(url(controller='login', action='register')) |
|
212 | 212 | response.mustcontain('Sign Up') |
|
213 | 213 | |
|
214 | 214 | def test_register_err_same_username(self): |
|
215 | 215 | uname = TEST_USER_ADMIN_LOGIN |
|
216 | 216 | response = self.app.post(url(controller='login', action='register'), |
|
217 | 217 | {'username': uname, |
|
218 | 218 | 'password': 'test12', |
|
219 | 219 | 'password_confirmation': 'test12', |
|
220 | 220 | 'email': 'goodmail@example.com', |
|
221 | 221 | 'firstname': 'test', |
|
222 | 222 | 'lastname': 'test'}) |
|
223 | 223 | |
|
224 | 224 | with test_context(self.app): |
|
225 | 225 | msg = validators.ValidUsername()._messages['username_exists'] |
|
226 | 226 | msg = h.html_escape(msg % {'username': uname}) |
|
227 | 227 | response.mustcontain(msg) |
|
228 | 228 | |
|
229 | 229 | def test_register_err_same_email(self): |
|
230 | 230 | response = self.app.post(url(controller='login', action='register'), |
|
231 | 231 | {'username': 'test_admin_0', |
|
232 | 232 | 'password': 'test12', |
|
233 | 233 | 'password_confirmation': 'test12', |
|
234 | 234 | 'email': TEST_USER_ADMIN_EMAIL, |
|
235 | 235 | 'firstname': 'test', |
|
236 | 236 | 'lastname': 'test'}) |
|
237 | 237 | |
|
238 | 238 | with test_context(self.app): |
|
239 | 239 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
240 | 240 | response.mustcontain(msg) |
|
241 | 241 | |
|
242 | 242 | def test_register_err_same_email_case_sensitive(self): |
|
243 | 243 | response = self.app.post(url(controller='login', action='register'), |
|
244 | 244 | {'username': 'test_admin_1', |
|
245 | 245 | 'password': 'test12', |
|
246 | 246 | 'password_confirmation': 'test12', |
|
247 | 247 | 'email': TEST_USER_ADMIN_EMAIL.title(), |
|
248 | 248 | 'firstname': 'test', |
|
249 | 249 | 'lastname': 'test'}) |
|
250 | 250 | with test_context(self.app): |
|
251 | 251 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
252 | 252 | response.mustcontain(msg) |
|
253 | 253 | |
|
254 | 254 | def test_register_err_wrong_data(self): |
|
255 | 255 | response = self.app.post(url(controller='login', action='register'), |
|
256 | 256 | {'username': 'xs', |
|
257 | 257 | 'password': 'test', |
|
258 | 258 | 'password_confirmation': 'test', |
|
259 | 259 | 'email': 'goodmailm', |
|
260 | 260 | 'firstname': 'test', |
|
261 | 261 | 'lastname': 'test'}) |
|
262 | 262 | assert response.status == '200 OK' |
|
263 | 263 | response.mustcontain('An email address must contain a single @') |
|
264 | 264 | response.mustcontain('Enter a value 6 characters long or more') |
|
265 | 265 | |
|
266 | 266 | def test_register_err_username(self): |
|
267 | 267 | response = self.app.post(url(controller='login', action='register'), |
|
268 | 268 | {'username': 'error user', |
|
269 | 269 | 'password': 'test12', |
|
270 | 270 | 'password_confirmation': 'test12', |
|
271 | 271 | 'email': 'goodmailm', |
|
272 | 272 | 'firstname': 'test', |
|
273 | 273 | 'lastname': 'test'}) |
|
274 | 274 | |
|
275 | 275 | response.mustcontain('An email address must contain a single @') |
|
276 | 276 | response.mustcontain('Username may only contain ' |
|
277 | 277 | 'alphanumeric characters underscores, ' |
|
278 | 278 | 'periods or dashes and must begin with an ' |
|
279 | 279 | 'alphanumeric character') |
|
280 | 280 | |
|
281 | 281 | def test_register_err_case_sensitive(self): |
|
282 | 282 | usr = TEST_USER_ADMIN_LOGIN.title() |
|
283 | 283 | response = self.app.post(url(controller='login', action='register'), |
|
284 | 284 | {'username': usr, |
|
285 | 285 | 'password': 'test12', |
|
286 | 286 | 'password_confirmation': 'test12', |
|
287 | 287 | 'email': 'goodmailm', |
|
288 | 288 | 'firstname': 'test', |
|
289 | 289 | 'lastname': 'test'}) |
|
290 | 290 | |
|
291 | 291 | response.mustcontain('An email address must contain a single @') |
|
292 | 292 | with test_context(self.app): |
|
293 | 293 | msg = validators.ValidUsername()._messages['username_exists'] |
|
294 | 294 | msg = h.html_escape(msg % {'username': usr}) |
|
295 | 295 | response.mustcontain(msg) |
|
296 | 296 | |
|
297 | 297 | def test_register_special_chars(self): |
|
298 | 298 | response = self.app.post(url(controller='login', action='register'), |
|
299 | 299 | {'username': 'xxxaxn', |
|
300 | 300 | 'password': 'ąćźżąśśśś', |
|
301 | 301 | 'password_confirmation': 'ąćźżąśśśś', |
|
302 | 302 | 'email': 'goodmailm@test.plx', |
|
303 | 303 | 'firstname': 'test', |
|
304 | 304 | 'lastname': 'test'}) |
|
305 | 305 | |
|
306 | 306 | with test_context(self.app): |
|
307 | 307 | msg = validators.ValidPassword()._messages['invalid_password'] |
|
308 | 308 | response.mustcontain(msg) |
|
309 | 309 | |
|
310 | 310 | def test_register_password_mismatch(self): |
|
311 | 311 | response = self.app.post(url(controller='login', action='register'), |
|
312 | 312 | {'username': 'xs', |
|
313 | 313 | 'password': '123qwe', |
|
314 | 314 | 'password_confirmation': 'qwe123', |
|
315 | 315 | 'email': 'goodmailm@test.plxa', |
|
316 | 316 | 'firstname': 'test', |
|
317 | 317 | 'lastname': 'test'}) |
|
318 | 318 | with test_context(self.app): |
|
319 | 319 | msg = validators.ValidPasswordsMatch('password', 'password_confirmation')._messages['password_mismatch'] |
|
320 | 320 | response.mustcontain(msg) |
|
321 | 321 | |
|
322 | 322 | def test_register_ok(self): |
|
323 | 323 | username = 'test_regular4' |
|
324 | 324 | password = 'qweqwe' |
|
325 | 325 | email = 'user4@example.com' |
|
326 | 326 | name = 'testname' |
|
327 | 327 | lastname = 'testlastname' |
|
328 | 328 | |
|
329 | 329 | response = self.app.post(url(controller='login', action='register'), |
|
330 | 330 | {'username': username, |
|
331 | 331 | 'password': password, |
|
332 | 332 | 'password_confirmation': password, |
|
333 | 333 | 'email': email, |
|
334 | 334 | 'firstname': name, |
|
335 | 335 | 'lastname': lastname, |
|
336 | 336 | 'admin': True}) # This should be overridden |
|
337 | 337 | assert response.status == '302 Found' |
|
338 | 338 | self.checkSessionFlash(response, 'You have successfully registered with Kallithea') |
|
339 | 339 | |
|
340 | 340 | ret = Session().query(User).filter(User.username == 'test_regular4').one() |
|
341 | 341 | assert ret.username == username |
|
342 | 342 | assert check_password(password, ret.password) == True |
|
343 | 343 | assert ret.email == email |
|
344 | 344 | assert ret.name == name |
|
345 | 345 | assert ret.lastname == lastname |
|
346 | 346 | assert ret.api_key != None |
|
347 | 347 | assert ret.admin == False |
|
348 | 348 | |
|
349 | 349 | #========================================================================== |
|
350 | 350 | # PASSWORD RESET |
|
351 | 351 | #========================================================================== |
|
352 | 352 | |
|
353 | 353 | def test_forgot_password_wrong_mail(self): |
|
354 | 354 | bad_email = 'username%wrongmail.org' |
|
355 | 355 | response = self.app.post( |
|
356 | 356 | url(controller='login', action='password_reset'), |
|
357 | 357 | {'email': bad_email, } |
|
358 | 358 | ) |
|
359 | 359 | |
|
360 | 360 | response.mustcontain('An email address must contain a single @') |
|
361 | 361 | |
|
362 | 362 | def test_forgot_password(self): |
|
363 | 363 | response = self.app.get(url(controller='login', |
|
364 | 364 | action='password_reset')) |
|
365 | 365 | assert response.status == '200 OK' |
|
366 | 366 | |
|
367 | 367 | username = 'test_password_reset_1' |
|
368 | 368 | password = 'qweqwe' |
|
369 | 369 | email = 'username@example.com' |
|
370 | 370 | name = u'passwd' |
|
371 | 371 | lastname = u'reset' |
|
372 | 372 | timestamp = int(time.time()) |
|
373 | 373 | |
|
374 | 374 | new = User() |
|
375 | 375 | new.username = username |
|
376 | 376 | new.password = password |
|
377 | 377 | new.email = email |
|
378 | 378 | new.name = name |
|
379 | 379 | new.lastname = lastname |
|
380 | 380 | new.api_key = generate_api_key() |
|
381 | 381 | Session().add(new) |
|
382 | 382 | Session().commit() |
|
383 | 383 | |
|
384 | 384 | response = self.app.post(url(controller='login', |
|
385 | 385 | action='password_reset'), |
|
386 | 386 | {'email': email, }) |
|
387 | 387 | |
|
388 | 388 | self.checkSessionFlash(response, 'A password reset confirmation code has been sent') |
|
389 | 389 | |
|
390 | 390 | response = response.follow() |
|
391 | 391 | |
|
392 | 392 | # BAD TOKEN |
|
393 | 393 | |
|
394 | 394 | token = "bad" |
|
395 | 395 | |
|
396 | 396 | response = self.app.post(url(controller='login', |
|
397 | 397 | action='password_reset_confirmation'), |
|
398 | 398 | {'email': email, |
|
399 | 399 | 'timestamp': timestamp, |
|
400 | 400 | 'password': "p@ssw0rd", |
|
401 | 401 | 'password_confirm': "p@ssw0rd", |
|
402 | 402 | 'token': token, |
|
403 | 403 | }) |
|
404 | 404 | assert response.status == '200 OK' |
|
405 | 405 | response.mustcontain('Invalid password reset token') |
|
406 | 406 | |
|
407 | 407 | # GOOD TOKEN |
|
408 | 408 | |
|
409 | 409 | # TODO: The token should ideally be taken from the mail sent |
|
410 | 410 | # above, instead of being recalculated. |
|
411 | 411 | |
|
412 | 412 | token = UserModel().get_reset_password_token( |
|
413 | 413 | User.get_by_username(username), timestamp, self.authentication_token()) |
|
414 | 414 | |
|
415 | 415 | response = self.app.get(url(controller='login', |
|
416 | 416 | action='password_reset_confirmation', |
|
417 | 417 | email=email, |
|
418 | 418 | timestamp=timestamp, |
|
419 | 419 | token=token)) |
|
420 | 420 | assert response.status == '200 OK' |
|
421 | 421 | response.mustcontain("You are about to set a new password for the email address %s" % email) |
|
422 | 422 | |
|
423 | 423 | response = self.app.post(url(controller='login', |
|
424 | 424 | action='password_reset_confirmation'), |
|
425 | 425 | {'email': email, |
|
426 | 426 | 'timestamp': timestamp, |
|
427 | 427 | 'password': "p@ssw0rd", |
|
428 | 428 | 'password_confirm': "p@ssw0rd", |
|
429 | 429 | 'token': token, |
|
430 | 430 | }) |
|
431 | 431 | assert response.status == '302 Found' |
|
432 | 432 | self.checkSessionFlash(response, 'Successfully updated password') |
|
433 | 433 | |
|
434 | 434 | response = response.follow() |
|
435 | 435 | |
|
436 | 436 | #========================================================================== |
|
437 | 437 | # API |
|
438 | 438 | #========================================================================== |
|
439 | 439 | |
|
440 | 440 | def _get_api_whitelist(self, values=None): |
|
441 | 441 | config = {'api_access_controllers_whitelist': values or []} |
|
442 | 442 | return config |
|
443 | 443 | |
|
444 | 444 | def _api_key_test(self, api_key, status): |
|
445 | 445 | """Verifies HTTP status code for accessing an auth-requiring page, |
|
446 | 446 | using the given api_key URL parameter as well as using the API key |
|
447 | 447 | with bearer authentication. |
|
448 | 448 | |
|
449 | 449 | If api_key is None, no api_key is passed at all. If api_key is True, |
|
450 | 450 | a real, working API key is used. |
|
451 | 451 | """ |
|
452 | 452 | with fixture.anon_access(False): |
|
453 | 453 | if api_key is None: |
|
454 | 454 | params = {} |
|
455 | 455 | headers = {} |
|
456 | 456 | else: |
|
457 | 457 | if api_key is True: |
|
458 | 458 | api_key = User.get_first_admin().api_key |
|
459 | 459 | params = {'api_key': api_key} |
|
460 | 460 | headers = {'Authorization': 'Bearer ' + str(api_key)} |
|
461 | 461 | |
|
462 | 462 | self.app.get(url(controller='changeset', action='changeset_raw', |
|
463 | 463 | repo_name=HG_REPO, revision='tip', **params), |
|
464 | 464 | status=status) |
|
465 | 465 | |
|
466 | 466 | self.app.get(url(controller='changeset', action='changeset_raw', |
|
467 | 467 | repo_name=HG_REPO, revision='tip'), |
|
468 | 468 | headers=headers, |
|
469 | 469 | status=status) |
|
470 | 470 | |
|
471 | 471 | @parametrize('test_name,api_key,code', [ |
|
472 | 472 | ('none', None, 302), |
|
473 | 473 | ('empty_string', '', 403), |
|
474 | 474 | ('fake_number', '123456', 403), |
|
475 | 475 | ('proper_api_key', True, 403) |
|
476 | 476 | ]) |
|
477 | 477 | def test_access_not_whitelisted_page_via_api_key(self, test_name, api_key, code): |
|
478 | 478 | whitelist = self._get_api_whitelist([]) |
|
479 | 479 | with mock.patch('kallithea.CONFIG', whitelist): |
|
480 | 480 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
481 | 481 | self._api_key_test(api_key, code) |
|
482 | 482 | |
|
483 | 483 | @parametrize('test_name,api_key,code', [ |
|
484 | 484 | ('none', None, 302), |
|
485 | 485 | ('empty_string', '', 403), |
|
486 | 486 | ('fake_number', '123456', 403), |
|
487 | 487 | ('fake_not_alnum', 'a-z', 403), |
|
488 | 488 | ('fake_api_key', '0123456789abcdef0123456789ABCDEF01234567', 403), |
|
489 | 489 | ('proper_api_key', True, 200) |
|
490 | 490 | ]) |
|
491 | 491 | def test_access_whitelisted_page_via_api_key(self, test_name, api_key, code): |
|
492 | 492 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
493 | 493 | with mock.patch('kallithea.CONFIG', whitelist): |
|
494 | 494 | assert ['ChangesetController:changeset_raw'] == whitelist['api_access_controllers_whitelist'] |
|
495 | 495 | self._api_key_test(api_key, code) |
|
496 | 496 | |
|
497 | 497 | def test_access_page_via_extra_api_key(self): |
|
498 | 498 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
499 | 499 | with mock.patch('kallithea.CONFIG', whitelist): |
|
500 | 500 | assert ['ChangesetController:changeset_raw'] == whitelist['api_access_controllers_whitelist'] |
|
501 | 501 | |
|
502 | 502 | new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test') |
|
503 | 503 | Session().commit() |
|
504 | 504 | self._api_key_test(new_api_key.api_key, status=200) |
|
505 | 505 | |
|
506 | 506 | def test_access_page_via_expired_api_key(self): |
|
507 | 507 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
508 | 508 | with mock.patch('kallithea.CONFIG', whitelist): |
|
509 | 509 | assert ['ChangesetController:changeset_raw'] == whitelist['api_access_controllers_whitelist'] |
|
510 | 510 | |
|
511 | 511 | new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test') |
|
512 | 512 | Session().commit() |
|
513 | 513 | #patch the API key and make it expired |
|
514 | 514 | new_api_key.expires = 0 |
|
515 | 515 | Session().commit() |
|
516 | 516 | self._api_key_test(new_api_key.api_key, status=403) |
@@ -1,250 +1,250 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | from kallithea.model.db import User, UserFollowing, Repository, UserApiKeys |
|
4 | 4 | from kallithea.tests.base import * |
|
5 | 5 | from kallithea.tests.fixture import Fixture |
|
6 | 6 | from kallithea.lib import helpers as h |
|
7 | 7 | from kallithea.model.user import UserModel |
|
8 | 8 | from kallithea.model.meta import Session |
|
9 | 9 | |
|
10 |
from |
|
|
10 | from tg.util.webtest import test_context | |
|
11 | 11 | |
|
12 | 12 | fixture = Fixture() |
|
13 | 13 | |
|
14 | 14 | |
|
15 | 15 | class TestMyAccountController(TestController): |
|
16 | 16 | test_user_1 = 'testme' |
|
17 | 17 | |
|
18 | 18 | @classmethod |
|
19 | 19 | def teardown_class(cls): |
|
20 | 20 | if User.get_by_username(cls.test_user_1): |
|
21 | 21 | UserModel().delete(cls.test_user_1) |
|
22 | 22 | Session().commit() |
|
23 | 23 | |
|
24 | 24 | def test_my_account(self): |
|
25 | 25 | self.log_user() |
|
26 | 26 | response = self.app.get(url('my_account')) |
|
27 | 27 | |
|
28 | 28 | response.mustcontain('value="%s' % TEST_USER_ADMIN_LOGIN) |
|
29 | 29 | |
|
30 | 30 | def test_my_account_my_repos(self): |
|
31 | 31 | self.log_user() |
|
32 | 32 | response = self.app.get(url('my_account_repos')) |
|
33 | 33 | cnt = Repository.query().filter(Repository.owner == |
|
34 | 34 | User.get_by_username(TEST_USER_ADMIN_LOGIN)).count() |
|
35 | 35 | response.mustcontain('"totalRecords": %s' % cnt) |
|
36 | 36 | |
|
37 | 37 | def test_my_account_my_watched(self): |
|
38 | 38 | self.log_user() |
|
39 | 39 | response = self.app.get(url('my_account_watched')) |
|
40 | 40 | |
|
41 | 41 | cnt = UserFollowing.query().filter(UserFollowing.user == |
|
42 | 42 | User.get_by_username(TEST_USER_ADMIN_LOGIN)).count() |
|
43 | 43 | response.mustcontain('"totalRecords": %s' % cnt) |
|
44 | 44 | |
|
45 | 45 | def test_my_account_my_emails(self): |
|
46 | 46 | self.log_user() |
|
47 | 47 | response = self.app.get(url('my_account_emails')) |
|
48 | 48 | response.mustcontain('No additional emails specified') |
|
49 | 49 | |
|
50 | 50 | def test_my_account_my_emails_add_existing_email(self): |
|
51 | 51 | self.log_user() |
|
52 | 52 | response = self.app.get(url('my_account_emails')) |
|
53 | 53 | response.mustcontain('No additional emails specified') |
|
54 | 54 | response = self.app.post(url('my_account_emails'), |
|
55 | 55 | {'new_email': TEST_USER_REGULAR_EMAIL, '_authentication_token': self.authentication_token()}) |
|
56 | 56 | self.checkSessionFlash(response, 'This email address is already in use') |
|
57 | 57 | |
|
58 | 58 | def test_my_account_my_emails_add_missing_email_in_form(self): |
|
59 | 59 | self.log_user() |
|
60 | 60 | response = self.app.get(url('my_account_emails')) |
|
61 | 61 | response.mustcontain('No additional emails specified') |
|
62 | 62 | response = self.app.post(url('my_account_emails'), |
|
63 | 63 | {'_authentication_token': self.authentication_token()}) |
|
64 | 64 | self.checkSessionFlash(response, 'Please enter an email address') |
|
65 | 65 | |
|
66 | 66 | def test_my_account_my_emails_add_remove(self): |
|
67 | 67 | self.log_user() |
|
68 | 68 | response = self.app.get(url('my_account_emails')) |
|
69 | 69 | response.mustcontain('No additional emails specified') |
|
70 | 70 | |
|
71 | 71 | response = self.app.post(url('my_account_emails'), |
|
72 | 72 | {'new_email': 'barz@example.com', '_authentication_token': self.authentication_token()}) |
|
73 | 73 | |
|
74 | 74 | response = self.app.get(url('my_account_emails')) |
|
75 | 75 | |
|
76 | 76 | from kallithea.model.db import UserEmailMap |
|
77 | 77 | email_id = UserEmailMap.query() \ |
|
78 | 78 | .filter(UserEmailMap.user == User.get_by_username(TEST_USER_ADMIN_LOGIN)) \ |
|
79 | 79 | .filter(UserEmailMap.email == 'barz@example.com').one().email_id |
|
80 | 80 | |
|
81 | 81 | response.mustcontain('barz@example.com') |
|
82 | 82 | response.mustcontain('<input id="del_email_id" name="del_email_id" type="hidden" value="%s" />' % email_id) |
|
83 | 83 | |
|
84 | 84 | response = self.app.post(url('my_account_emails_delete'), |
|
85 | 85 | {'del_email_id': email_id, '_authentication_token': self.authentication_token()}) |
|
86 | 86 | self.checkSessionFlash(response, 'Removed email from user') |
|
87 | 87 | response = self.app.get(url('my_account_emails')) |
|
88 | 88 | response.mustcontain('No additional emails specified') |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | @parametrize('name,attrs', |
|
92 | 92 | [('firstname', {'firstname': 'new_username'}), |
|
93 | 93 | ('lastname', {'lastname': 'new_username'}), |
|
94 | 94 | ('admin', {'admin': True}), |
|
95 | 95 | ('admin', {'admin': False}), |
|
96 | 96 | ('extern_type', {'extern_type': 'ldap'}), |
|
97 | 97 | ('extern_type', {'extern_type': None}), |
|
98 | 98 | #('extern_name', {'extern_name': 'test'}), |
|
99 | 99 | #('extern_name', {'extern_name': None}), |
|
100 | 100 | ('active', {'active': False}), |
|
101 | 101 | ('active', {'active': True}), |
|
102 | 102 | ('email', {'email': 'someemail@example.com'}), |
|
103 | 103 | # ('new_password', {'new_password': 'foobar123', |
|
104 | 104 | # 'password_confirmation': 'foobar123'}) |
|
105 | 105 | ]) |
|
106 | 106 | def test_my_account_update(self, name, attrs): |
|
107 | 107 | usr = fixture.create_user(self.test_user_1, password='qweqwe', |
|
108 | 108 | email='testme@example.com', |
|
109 | 109 | extern_type='internal', |
|
110 | 110 | extern_name=self.test_user_1, |
|
111 | 111 | skip_if_exists=True) |
|
112 | 112 | params = usr.get_api_data(True) # current user data |
|
113 | 113 | user_id = usr.user_id |
|
114 | 114 | self.log_user(username=self.test_user_1, password='qweqwe') |
|
115 | 115 | |
|
116 | 116 | params.update({'password_confirmation': ''}) |
|
117 | 117 | params.update({'new_password': ''}) |
|
118 | 118 | params.update({'extern_type': 'internal'}) |
|
119 | 119 | params.update({'extern_name': self.test_user_1}) |
|
120 | 120 | params.update({'_authentication_token': self.authentication_token()}) |
|
121 | 121 | |
|
122 | 122 | params.update(attrs) |
|
123 | 123 | response = self.app.post(url('my_account'), params) |
|
124 | 124 | |
|
125 | 125 | self.checkSessionFlash(response, |
|
126 | 126 | 'Your account was updated successfully') |
|
127 | 127 | |
|
128 | 128 | updated_user = User.get_by_username(self.test_user_1) |
|
129 | 129 | updated_params = updated_user.get_api_data(True) |
|
130 | 130 | updated_params.update({'password_confirmation': ''}) |
|
131 | 131 | updated_params.update({'new_password': ''}) |
|
132 | 132 | |
|
133 | 133 | params['last_login'] = updated_params['last_login'] |
|
134 | 134 | if name == 'email': |
|
135 | 135 | params['emails'] = [attrs['email']] |
|
136 | 136 | if name == 'extern_type': |
|
137 | 137 | #cannot update this via form, expected value is original one |
|
138 | 138 | params['extern_type'] = "internal" |
|
139 | 139 | if name == 'extern_name': |
|
140 | 140 | #cannot update this via form, expected value is original one |
|
141 | 141 | params['extern_name'] = str(user_id) |
|
142 | 142 | if name == 'active': |
|
143 | 143 | #my account cannot deactivate account |
|
144 | 144 | params['active'] = True |
|
145 | 145 | if name == 'admin': |
|
146 | 146 | #my account cannot make you an admin ! |
|
147 | 147 | params['admin'] = False |
|
148 | 148 | |
|
149 | 149 | params.pop('_authentication_token') |
|
150 | 150 | assert params == updated_params |
|
151 | 151 | |
|
152 | 152 | def test_my_account_update_err_email_exists(self): |
|
153 | 153 | self.log_user() |
|
154 | 154 | |
|
155 | 155 | new_email = TEST_USER_REGULAR_EMAIL # already existing email |
|
156 | 156 | response = self.app.post(url('my_account'), |
|
157 | 157 | params=dict( |
|
158 | 158 | username=TEST_USER_ADMIN_LOGIN, |
|
159 | 159 | new_password=TEST_USER_ADMIN_PASS, |
|
160 | 160 | password_confirmation='test122', |
|
161 | 161 | firstname=u'NewName', |
|
162 | 162 | lastname=u'NewLastname', |
|
163 | 163 | email=new_email, |
|
164 | 164 | _authentication_token=self.authentication_token()) |
|
165 | 165 | ) |
|
166 | 166 | |
|
167 | 167 | response.mustcontain('This email address is already in use') |
|
168 | 168 | |
|
169 | 169 | def test_my_account_update_err(self): |
|
170 | 170 | self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
171 | 171 | |
|
172 | 172 | new_email = 'newmail.pl' |
|
173 | 173 | response = self.app.post(url('my_account'), |
|
174 | 174 | params=dict( |
|
175 | 175 | username=TEST_USER_ADMIN_LOGIN, |
|
176 | 176 | new_password=TEST_USER_ADMIN_PASS, |
|
177 | 177 | password_confirmation='test122', |
|
178 | 178 | firstname=u'NewName', |
|
179 | 179 | lastname=u'NewLastname', |
|
180 | 180 | email=new_email, |
|
181 | 181 | _authentication_token=self.authentication_token())) |
|
182 | 182 | |
|
183 | 183 | response.mustcontain('An email address must contain a single @') |
|
184 | 184 | from kallithea.model import validators |
|
185 | 185 | with test_context(self.app): |
|
186 | 186 | msg = validators.ValidUsername(edit=False, old_data={}) \ |
|
187 | 187 | ._messages['username_exists'] |
|
188 | 188 | msg = h.html_escape(msg % {'username': TEST_USER_ADMIN_LOGIN}) |
|
189 | 189 | response.mustcontain(msg) |
|
190 | 190 | |
|
191 | 191 | def test_my_account_api_keys(self): |
|
192 | 192 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
193 | 193 | user = User.get(usr['user_id']) |
|
194 | 194 | response = self.app.get(url('my_account_api_keys')) |
|
195 | 195 | response.mustcontain(user.api_key) |
|
196 | 196 | response.mustcontain('Expires: Never') |
|
197 | 197 | |
|
198 | 198 | @parametrize('desc,lifetime', [ |
|
199 | 199 | ('forever', -1), |
|
200 | 200 | ('5mins', 60*5), |
|
201 | 201 | ('30days', 60*60*24*30), |
|
202 | 202 | ]) |
|
203 | 203 | def test_my_account_add_api_keys(self, desc, lifetime): |
|
204 | 204 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
205 | 205 | user = User.get(usr['user_id']) |
|
206 | 206 | response = self.app.post(url('my_account_api_keys'), |
|
207 | 207 | {'description': desc, 'lifetime': lifetime, '_authentication_token': self.authentication_token()}) |
|
208 | 208 | self.checkSessionFlash(response, 'API key successfully created') |
|
209 | 209 | try: |
|
210 | 210 | response = response.follow() |
|
211 | 211 | user = User.get(usr['user_id']) |
|
212 | 212 | for api_key in user.api_keys: |
|
213 | 213 | response.mustcontain(api_key) |
|
214 | 214 | finally: |
|
215 | 215 | for api_key in UserApiKeys.query().all(): |
|
216 | 216 | Session().delete(api_key) |
|
217 | 217 | Session().commit() |
|
218 | 218 | |
|
219 | 219 | def test_my_account_remove_api_key(self): |
|
220 | 220 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
221 | 221 | user = User.get(usr['user_id']) |
|
222 | 222 | response = self.app.post(url('my_account_api_keys'), |
|
223 | 223 | {'description': 'desc', 'lifetime': -1, '_authentication_token': self.authentication_token()}) |
|
224 | 224 | self.checkSessionFlash(response, 'API key successfully created') |
|
225 | 225 | response = response.follow() |
|
226 | 226 | |
|
227 | 227 | #now delete our key |
|
228 | 228 | keys = UserApiKeys.query().all() |
|
229 | 229 | assert 1 == len(keys) |
|
230 | 230 | |
|
231 | 231 | response = self.app.post(url('my_account_api_keys_delete'), |
|
232 | 232 | {'del_api_key': keys[0].api_key, '_authentication_token': self.authentication_token()}) |
|
233 | 233 | self.checkSessionFlash(response, 'API key successfully deleted') |
|
234 | 234 | keys = UserApiKeys.query().all() |
|
235 | 235 | assert 0 == len(keys) |
|
236 | 236 | |
|
237 | 237 | |
|
238 | 238 | def test_my_account_reset_main_api_key(self): |
|
239 | 239 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
240 | 240 | user = User.get(usr['user_id']) |
|
241 | 241 | api_key = user.api_key |
|
242 | 242 | response = self.app.get(url('my_account_api_keys')) |
|
243 | 243 | response.mustcontain(api_key) |
|
244 | 244 | response.mustcontain('Expires: Never') |
|
245 | 245 | |
|
246 | 246 | response = self.app.post(url('my_account_api_keys_delete'), |
|
247 | 247 | {'del_api_key_builtin': api_key, '_authentication_token': self.authentication_token()}) |
|
248 | 248 | self.checkSessionFlash(response, 'API key successfully reset') |
|
249 | 249 | response = response.follow() |
|
250 | 250 | response.mustcontain(no=[api_key]) |
@@ -1,300 +1,300 b'' | |||
|
1 | 1 | import re |
|
2 | 2 | import pytest |
|
3 | 3 | |
|
4 |
from |
|
|
4 | from tg.util.webtest import test_context | |
|
5 | 5 | |
|
6 | 6 | from kallithea.tests.base import * |
|
7 | 7 | from kallithea.tests.fixture import Fixture |
|
8 | 8 | from kallithea.model.db import User |
|
9 | 9 | from kallithea.model.meta import Session |
|
10 | 10 | |
|
11 | 11 | from kallithea.controllers.pullrequests import PullrequestsController |
|
12 | 12 | |
|
13 | 13 | fixture = Fixture() |
|
14 | 14 | |
|
15 | 15 | class TestPullrequestsController(TestController): |
|
16 | 16 | |
|
17 | 17 | def test_index(self): |
|
18 | 18 | self.log_user() |
|
19 | 19 | response = self.app.get(url(controller='pullrequests', action='index', |
|
20 | 20 | repo_name=HG_REPO)) |
|
21 | 21 | |
|
22 | 22 | def test_create_trivial(self): |
|
23 | 23 | self.log_user() |
|
24 | 24 | response = self.app.post(url(controller='pullrequests', action='create', |
|
25 | 25 | repo_name=HG_REPO), |
|
26 | 26 | {'org_repo': HG_REPO, |
|
27 | 27 | 'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a', |
|
28 | 28 | 'other_repo': HG_REPO, |
|
29 | 29 | 'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e', |
|
30 | 30 | 'pullrequest_title': 'title', |
|
31 | 31 | 'pullrequest_desc': 'description', |
|
32 | 32 | '_authentication_token': self.authentication_token(), |
|
33 | 33 | }, |
|
34 | 34 | status=302) |
|
35 | 35 | response = response.follow() |
|
36 | 36 | assert response.status == '200 OK' |
|
37 | 37 | response.mustcontain('Successfully opened new pull request') |
|
38 | 38 | response.mustcontain('No additional changesets found for iterating on this pull request') |
|
39 | 39 | response.mustcontain('href="/vcs_test_hg/changeset/4f7e2131323e0749a740c0a56ab68ae9269c562a"') |
|
40 | 40 | |
|
41 | 41 | def test_available(self): |
|
42 | 42 | self.log_user() |
|
43 | 43 | response = self.app.post(url(controller='pullrequests', action='create', |
|
44 | 44 | repo_name=HG_REPO), |
|
45 | 45 | {'org_repo': HG_REPO, |
|
46 | 46 | 'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0', |
|
47 | 47 | 'other_repo': HG_REPO, |
|
48 | 48 | 'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e', |
|
49 | 49 | 'pullrequest_title': 'title', |
|
50 | 50 | 'pullrequest_desc': 'description', |
|
51 | 51 | '_authentication_token': self.authentication_token(), |
|
52 | 52 | }, |
|
53 | 53 | status=302) |
|
54 | 54 | response = response.follow() |
|
55 | 55 | assert response.status == '200 OK' |
|
56 | 56 | response.mustcontain(no='No additional changesets found for iterating on this pull request') |
|
57 | 57 | response.mustcontain('The following additional changes are available on stable:') |
|
58 | 58 | response.mustcontain('<input id="updaterev_4f7e2131323e0749a740c0a56ab68ae9269c562a" name="updaterev" type="radio" value="4f7e2131323e0749a740c0a56ab68ae9269c562a" />') |
|
59 | 59 | response.mustcontain('href="/vcs_test_hg/changeset/4f7e2131323e0749a740c0a56ab68ae9269c562a"') # as update |
|
60 | 60 | |
|
61 | 61 | def test_range(self): |
|
62 | 62 | self.log_user() |
|
63 | 63 | response = self.app.post(url(controller='pullrequests', action='create', |
|
64 | 64 | repo_name=HG_REPO), |
|
65 | 65 | {'org_repo': HG_REPO, |
|
66 | 66 | 'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a', |
|
67 | 67 | 'other_repo': HG_REPO, |
|
68 | 68 | 'other_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0', |
|
69 | 69 | 'pullrequest_title': 'title', |
|
70 | 70 | 'pullrequest_desc': 'description', |
|
71 | 71 | '_authentication_token': self.authentication_token(), |
|
72 | 72 | }, |
|
73 | 73 | status=302) |
|
74 | 74 | response = response.follow() |
|
75 | 75 | assert response.status == '200 OK' |
|
76 | 76 | response.mustcontain('No additional changesets found for iterating on this pull request') |
|
77 | 77 | response.mustcontain('href="/vcs_test_hg/changeset/4f7e2131323e0749a740c0a56ab68ae9269c562a"') |
|
78 | 78 | |
|
79 | 79 | def test_update_reviewers(self): |
|
80 | 80 | self.log_user() |
|
81 | 81 | regular_user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
82 | 82 | regular_user2 = User.get_by_username(TEST_USER_REGULAR2_LOGIN) |
|
83 | 83 | admin_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
84 | 84 | |
|
85 | 85 | # create initial PR |
|
86 | 86 | response = self.app.post(url(controller='pullrequests', action='create', |
|
87 | 87 | repo_name=HG_REPO), |
|
88 | 88 | {'org_repo': HG_REPO, |
|
89 | 89 | 'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0', |
|
90 | 90 | 'other_repo': HG_REPO, |
|
91 | 91 | 'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e', |
|
92 | 92 | 'pullrequest_title': 'title', |
|
93 | 93 | 'pullrequest_desc': 'description', |
|
94 | 94 | '_authentication_token': self.authentication_token(), |
|
95 | 95 | }, |
|
96 | 96 | status=302) |
|
97 | 97 | pull_request1_id = re.search('/pull-request/(\d+)/', response.location).group(1) |
|
98 | 98 | assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request1_id) |
|
99 | 99 | |
|
100 | 100 | # create new iteration |
|
101 | 101 | response = self.app.post(url(controller='pullrequests', action='post', |
|
102 | 102 | repo_name=HG_REPO, pull_request_id=pull_request1_id), |
|
103 | 103 | { |
|
104 | 104 | 'updaterev': '4f7e2131323e0749a740c0a56ab68ae9269c562a', |
|
105 | 105 | 'pullrequest_title': 'title', |
|
106 | 106 | 'pullrequest_desc': 'description', |
|
107 | 107 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
108 | 108 | '_authentication_token': self.authentication_token(), |
|
109 | 109 | 'review_members': [regular_user.user_id], |
|
110 | 110 | }, |
|
111 | 111 | status=302) |
|
112 | 112 | pull_request2_id = re.search('/pull-request/(\d+)/', response.location).group(1) |
|
113 | 113 | assert pull_request2_id != pull_request1_id |
|
114 | 114 | assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request2_id) |
|
115 | 115 | response = response.follow() |
|
116 | 116 | # verify reviewer was added |
|
117 | 117 | response.mustcontain('<input type="hidden" value="%s" name="review_members" />' % regular_user.user_id) |
|
118 | 118 | |
|
119 | 119 | # update without creating new iteration |
|
120 | 120 | response = self.app.post(url(controller='pullrequests', action='post', |
|
121 | 121 | repo_name=HG_REPO, pull_request_id=pull_request2_id), |
|
122 | 122 | { |
|
123 | 123 | 'pullrequest_title': 'Title', |
|
124 | 124 | 'pullrequest_desc': 'description', |
|
125 | 125 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
126 | 126 | '_authentication_token': self.authentication_token(), |
|
127 | 127 | 'org_review_members': [admin_user.user_id], # fake - just to get some 'meanwhile' warning ... but it is also added ... |
|
128 | 128 | 'review_members': [regular_user2.user_id, admin_user.user_id], |
|
129 | 129 | }, |
|
130 | 130 | status=302) |
|
131 | 131 | assert response.location == 'http://localhost/%s/pull-request/%s/_/stable' % (HG_REPO, pull_request2_id) |
|
132 | 132 | response = response.follow() |
|
133 | 133 | # verify reviewers were added / removed |
|
134 | 134 | response.mustcontain('Meanwhile, the following reviewers have been added: test_regular') |
|
135 | 135 | response.mustcontain('Meanwhile, the following reviewers have been removed: test_admin') |
|
136 | 136 | response.mustcontain('<input type="hidden" value="%s" name="review_members" />' % regular_user.user_id) |
|
137 | 137 | response.mustcontain('<input type="hidden" value="%s" name="review_members" />' % regular_user2.user_id) |
|
138 | 138 | response.mustcontain(no='<input type="hidden" value="%s" name="review_members" />' % admin_user.user_id) |
|
139 | 139 | |
|
140 | 140 | def test_update_with_invalid_reviewer(self): |
|
141 | 141 | invalid_user_id = 99999 |
|
142 | 142 | self.log_user() |
|
143 | 143 | # create a valid pull request |
|
144 | 144 | response = self.app.post(url(controller='pullrequests', action='create', |
|
145 | 145 | repo_name=HG_REPO), |
|
146 | 146 | { |
|
147 | 147 | 'org_repo': HG_REPO, |
|
148 | 148 | 'org_ref': 'rev:94f45ed825a1:94f45ed825a113e61af7e141f44ca578374abef0', |
|
149 | 149 | 'other_repo': HG_REPO, |
|
150 | 150 | 'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e', |
|
151 | 151 | 'pullrequest_title': 'title', |
|
152 | 152 | 'pullrequest_desc': 'description', |
|
153 | 153 | '_authentication_token': self.authentication_token(), |
|
154 | 154 | }, |
|
155 | 155 | status=302) |
|
156 | 156 | # location is of the form: |
|
157 | 157 | # http://localhost/vcs_test_hg/pull-request/54/_/title |
|
158 | 158 | m = re.search('/pull-request/(\d+)/', response.location) |
|
159 | 159 | assert m != None |
|
160 | 160 | pull_request_id = m.group(1) |
|
161 | 161 | |
|
162 | 162 | # update it |
|
163 | 163 | response = self.app.post(url(controller='pullrequests', action='post', |
|
164 | 164 | repo_name=HG_REPO, pull_request_id=pull_request_id), |
|
165 | 165 | { |
|
166 | 166 | 'updaterev': '4f7e2131323e0749a740c0a56ab68ae9269c562a', |
|
167 | 167 | 'pullrequest_title': 'title', |
|
168 | 168 | 'pullrequest_desc': 'description', |
|
169 | 169 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
170 | 170 | '_authentication_token': self.authentication_token(), |
|
171 | 171 | 'review_members': [str(invalid_user_id)], |
|
172 | 172 | }, |
|
173 | 173 | status=400) |
|
174 | 174 | response.mustcontain('Invalid reviewer "%s" specified' % invalid_user_id) |
|
175 | 175 | |
|
176 | 176 | def test_edit_with_invalid_reviewer(self): |
|
177 | 177 | invalid_user_id = 99999 |
|
178 | 178 | self.log_user() |
|
179 | 179 | # create a valid pull request |
|
180 | 180 | response = self.app.post(url(controller='pullrequests', action='create', |
|
181 | 181 | repo_name=HG_REPO), |
|
182 | 182 | { |
|
183 | 183 | 'org_repo': HG_REPO, |
|
184 | 184 | 'org_ref': 'branch:stable:4f7e2131323e0749a740c0a56ab68ae9269c562a', |
|
185 | 185 | 'other_repo': HG_REPO, |
|
186 | 186 | 'other_ref': 'branch:default:96507bd11ecc815ebc6270fdf6db110928c09c1e', |
|
187 | 187 | 'pullrequest_title': 'title', |
|
188 | 188 | 'pullrequest_desc': 'description', |
|
189 | 189 | '_authentication_token': self.authentication_token(), |
|
190 | 190 | }, |
|
191 | 191 | status=302) |
|
192 | 192 | # location is of the form: |
|
193 | 193 | # http://localhost/vcs_test_hg/pull-request/54/_/title |
|
194 | 194 | m = re.search('/pull-request/(\d+)/', response.location) |
|
195 | 195 | assert m != None |
|
196 | 196 | pull_request_id = m.group(1) |
|
197 | 197 | |
|
198 | 198 | # edit it |
|
199 | 199 | response = self.app.post(url(controller='pullrequests', action='post', |
|
200 | 200 | repo_name=HG_REPO, pull_request_id=pull_request_id), |
|
201 | 201 | { |
|
202 | 202 | 'pullrequest_title': 'title', |
|
203 | 203 | 'pullrequest_desc': 'description', |
|
204 | 204 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
205 | 205 | '_authentication_token': self.authentication_token(), |
|
206 | 206 | 'review_members': [str(invalid_user_id)], |
|
207 | 207 | }, |
|
208 | 208 | status=400) |
|
209 | 209 | response.mustcontain('Invalid reviewer "%s" specified' % invalid_user_id) |
|
210 | 210 | |
|
211 | 211 | @pytest.mark.usefixtures("test_context_fixture") # apply fixture for all test methods |
|
212 | 212 | class TestPullrequestsGetRepoRefs(TestController): |
|
213 | 213 | |
|
214 | 214 | def setup_method(self, method): |
|
215 | 215 | self.repo_name = u'main' |
|
216 | 216 | repo = fixture.create_repo(self.repo_name, repo_type='hg') |
|
217 | 217 | self.repo_scm_instance = repo.scm_instance |
|
218 | 218 | Session.commit() |
|
219 | 219 | self.c = PullrequestsController() |
|
220 | 220 | |
|
221 | 221 | def teardown_method(self, method): |
|
222 | 222 | fixture.destroy_repo(u'main') |
|
223 | 223 | Session.commit() |
|
224 | 224 | Session.remove() |
|
225 | 225 | |
|
226 | 226 | def test_repo_refs_empty_repo(self): |
|
227 | 227 | # empty repo with no commits, no branches, no bookmarks, just one tag |
|
228 | 228 | refs, default = self.c._get_repo_refs(self.repo_scm_instance) |
|
229 | 229 | assert default == 'tag:null:0000000000000000000000000000000000000000' |
|
230 | 230 | |
|
231 | 231 | def test_repo_refs_one_commit_no_hints(self): |
|
232 | 232 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
233 | 233 | content='line1\n', message='commit1', vcs_type='hg', |
|
234 | 234 | parent=None, newfile=True) |
|
235 | 235 | |
|
236 | 236 | refs, default = self.c._get_repo_refs(self.repo_scm_instance) |
|
237 | 237 | assert default == 'branch:default:%s' % cs0.raw_id |
|
238 | 238 | assert ([('branch:default:%s' % cs0.raw_id, 'default (current tip)')], |
|
239 | 239 | 'Branches') in refs |
|
240 | 240 | |
|
241 | 241 | def test_repo_refs_one_commit_rev_hint(self): |
|
242 | 242 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
243 | 243 | content='line1\n', message='commit1', vcs_type='hg', |
|
244 | 244 | parent=None, newfile=True) |
|
245 | 245 | |
|
246 | 246 | refs, default = self.c._get_repo_refs(self.repo_scm_instance, rev=cs0.raw_id) |
|
247 | 247 | expected = 'branch:default:%s' % cs0.raw_id |
|
248 | 248 | assert default == expected |
|
249 | 249 | assert ([(expected, 'default (current tip)')], 'Branches') in refs |
|
250 | 250 | |
|
251 | 251 | def test_repo_refs_two_commits_no_hints(self): |
|
252 | 252 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
253 | 253 | content='line1\n', message='commit1', vcs_type='hg', |
|
254 | 254 | parent=None, newfile=True) |
|
255 | 255 | cs1 = fixture.commit_change(self.repo_name, filename='file2', |
|
256 | 256 | content='line2\n', message='commit2', vcs_type='hg', |
|
257 | 257 | parent=None, newfile=True) |
|
258 | 258 | |
|
259 | 259 | refs, default = self.c._get_repo_refs(self.repo_scm_instance) |
|
260 | 260 | expected = 'branch:default:%s' % cs1.raw_id |
|
261 | 261 | assert default == expected |
|
262 | 262 | assert ([(expected, 'default (current tip)')], 'Branches') in refs |
|
263 | 263 | |
|
264 | 264 | def test_repo_refs_two_commits_rev_hints(self): |
|
265 | 265 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
266 | 266 | content='line1\n', message='commit1', vcs_type='hg', |
|
267 | 267 | parent=None, newfile=True) |
|
268 | 268 | cs1 = fixture.commit_change(self.repo_name, filename='file2', |
|
269 | 269 | content='line2\n', message='commit2', vcs_type='hg', |
|
270 | 270 | parent=None, newfile=True) |
|
271 | 271 | |
|
272 | 272 | refs, default = self.c._get_repo_refs(self.repo_scm_instance, rev=cs0.raw_id) |
|
273 | 273 | expected = 'rev:%s:%s' % (cs0.raw_id, cs0.raw_id) |
|
274 | 274 | assert default == expected |
|
275 | 275 | assert ([(expected, 'Changeset: %s' % cs0.raw_id[0:12])], 'Special') in refs |
|
276 | 276 | assert ([('branch:default:%s' % cs1.raw_id, 'default (current tip)')], 'Branches') in refs |
|
277 | 277 | |
|
278 | 278 | refs, default = self.c._get_repo_refs(self.repo_scm_instance, rev=cs1.raw_id) |
|
279 | 279 | expected = 'branch:default:%s' % cs1.raw_id |
|
280 | 280 | assert default == expected |
|
281 | 281 | assert ([(expected, 'default (current tip)')], 'Branches') in refs |
|
282 | 282 | |
|
283 | 283 | def test_repo_refs_two_commits_branch_hint(self): |
|
284 | 284 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
285 | 285 | content='line1\n', message='commit1', vcs_type='hg', |
|
286 | 286 | parent=None, newfile=True) |
|
287 | 287 | cs1 = fixture.commit_change(self.repo_name, filename='file2', |
|
288 | 288 | content='line2\n', message='commit2', vcs_type='hg', |
|
289 | 289 | parent=None, newfile=True) |
|
290 | 290 | |
|
291 | 291 | refs, default = self.c._get_repo_refs(self.repo_scm_instance, branch='default') |
|
292 | 292 | expected = 'branch:default:%s' % cs1.raw_id |
|
293 | 293 | assert default == expected |
|
294 | 294 | assert ([(expected, 'default (current tip)')], 'Branches') in refs |
|
295 | 295 | |
|
296 | 296 | def test_repo_refs_one_branch_no_hints(self): |
|
297 | 297 | cs0 = fixture.commit_change(self.repo_name, filename='file1', |
|
298 | 298 | content='line1\n', message='commit1', vcs_type='hg', |
|
299 | 299 | parent=None, newfile=True) |
|
300 | 300 | # TODO |
@@ -1,291 +1,291 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import re |
|
3 | 3 | |
|
4 | 4 | import mock |
|
5 | 5 | import routes.util |
|
6 | 6 | |
|
7 | 7 | from kallithea.tests.base import * |
|
8 | 8 | from kallithea.lib import helpers as h |
|
9 | 9 | from kallithea.model.db import User, Notification, UserNotification |
|
10 | 10 | from kallithea.model.user import UserModel |
|
11 | 11 | from kallithea.model.meta import Session |
|
12 | 12 | from kallithea.model.notification import NotificationModel, EmailNotificationModel |
|
13 | 13 | |
|
14 | 14 | import kallithea.lib.celerylib |
|
15 | 15 | import kallithea.lib.celerylib.tasks |
|
16 | 16 | |
|
17 |
from |
|
|
17 | from tg.util.webtest import test_context | |
|
18 | 18 | |
|
19 | 19 | class TestNotifications(TestController): |
|
20 | 20 | |
|
21 | 21 | def setup_method(self, method): |
|
22 | 22 | Session.remove() |
|
23 | 23 | u1 = UserModel().create_or_update(username=u'u1', |
|
24 | 24 | password=u'qweqwe', |
|
25 | 25 | email=u'u1@example.com', |
|
26 | 26 | firstname=u'u1', lastname=u'u1') |
|
27 | 27 | Session().commit() |
|
28 | 28 | self.u1 = u1.user_id |
|
29 | 29 | |
|
30 | 30 | u2 = UserModel().create_or_update(username=u'u2', |
|
31 | 31 | password=u'qweqwe', |
|
32 | 32 | email=u'u2@example.com', |
|
33 | 33 | firstname=u'u2', lastname=u'u3') |
|
34 | 34 | Session().commit() |
|
35 | 35 | self.u2 = u2.user_id |
|
36 | 36 | |
|
37 | 37 | u3 = UserModel().create_or_update(username=u'u3', |
|
38 | 38 | password=u'qweqwe', |
|
39 | 39 | email=u'u3@example.com', |
|
40 | 40 | firstname=u'u3', lastname=u'u3') |
|
41 | 41 | Session().commit() |
|
42 | 42 | self.u3 = u3.user_id |
|
43 | 43 | |
|
44 | 44 | self.remove_all_notifications() |
|
45 | 45 | assert [] == Notification.query().all() |
|
46 | 46 | assert [] == UserNotification.query().all() |
|
47 | 47 | |
|
48 | 48 | def test_create_notification(self): |
|
49 | 49 | with test_context(self.app): |
|
50 | 50 | usrs = [self.u1, self.u2] |
|
51 | 51 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
52 | 52 | assert recipients == ['u2@example.com'] |
|
53 | 53 | assert subject == 'Test Message' |
|
54 | 54 | assert body == u"hi there" |
|
55 | 55 | assert '>hi there<' in html_body |
|
56 | 56 | assert author.username == 'u1' |
|
57 | 57 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
58 | 58 | notification = NotificationModel().create(created_by=self.u1, |
|
59 | 59 | subject=u'subj', body=u'hi there', |
|
60 | 60 | recipients=usrs) |
|
61 | 61 | Session().commit() |
|
62 | 62 | u1 = User.get(self.u1) |
|
63 | 63 | u2 = User.get(self.u2) |
|
64 | 64 | u3 = User.get(self.u3) |
|
65 | 65 | notifications = Notification.query().all() |
|
66 | 66 | assert len(notifications) == 1 |
|
67 | 67 | |
|
68 | 68 | assert notifications[0].recipients == [u1, u2] |
|
69 | 69 | assert notification.notification_id == notifications[0].notification_id |
|
70 | 70 | |
|
71 | 71 | unotification = UserNotification.query() \ |
|
72 | 72 | .filter(UserNotification.notification == notification).all() |
|
73 | 73 | |
|
74 | 74 | assert len(unotification) == len(usrs) |
|
75 | 75 | assert set([x.user_id for x in unotification]) == set(usrs) |
|
76 | 76 | |
|
77 | 77 | def test_user_notifications(self): |
|
78 | 78 | with test_context(self.app): |
|
79 | 79 | notification1 = NotificationModel().create(created_by=self.u1, |
|
80 | 80 | subject=u'subj', body=u'hi there1', |
|
81 | 81 | recipients=[self.u3]) |
|
82 | 82 | Session().commit() |
|
83 | 83 | notification2 = NotificationModel().create(created_by=self.u1, |
|
84 | 84 | subject=u'subj', body=u'hi there2', |
|
85 | 85 | recipients=[self.u3]) |
|
86 | 86 | Session().commit() |
|
87 | 87 | u3 = Session().query(User).get(self.u3) |
|
88 | 88 | |
|
89 | 89 | assert sorted([x.notification for x in u3.notifications]) == sorted([notification2, notification1]) |
|
90 | 90 | |
|
91 | 91 | def test_delete_notifications(self): |
|
92 | 92 | with test_context(self.app): |
|
93 | 93 | notification = NotificationModel().create(created_by=self.u1, |
|
94 | 94 | subject=u'title', body=u'hi there3', |
|
95 | 95 | recipients=[self.u3, self.u1, self.u2]) |
|
96 | 96 | Session().commit() |
|
97 | 97 | notifications = Notification.query().all() |
|
98 | 98 | assert notification in notifications |
|
99 | 99 | |
|
100 | 100 | Notification.delete(notification.notification_id) |
|
101 | 101 | Session().commit() |
|
102 | 102 | |
|
103 | 103 | notifications = Notification.query().all() |
|
104 | 104 | assert not notification in notifications |
|
105 | 105 | |
|
106 | 106 | un = UserNotification.query().filter(UserNotification.notification |
|
107 | 107 | == notification).all() |
|
108 | 108 | assert un == [] |
|
109 | 109 | |
|
110 | 110 | def test_delete_association(self): |
|
111 | 111 | with test_context(self.app): |
|
112 | 112 | notification = NotificationModel().create(created_by=self.u1, |
|
113 | 113 | subject=u'title', body=u'hi there3', |
|
114 | 114 | recipients=[self.u3, self.u1, self.u2]) |
|
115 | 115 | Session().commit() |
|
116 | 116 | |
|
117 | 117 | unotification = UserNotification.query() \ |
|
118 | 118 | .filter(UserNotification.notification == |
|
119 | 119 | notification) \ |
|
120 | 120 | .filter(UserNotification.user_id == self.u3) \ |
|
121 | 121 | .scalar() |
|
122 | 122 | |
|
123 | 123 | assert unotification.user_id == self.u3 |
|
124 | 124 | |
|
125 | 125 | NotificationModel().delete(self.u3, |
|
126 | 126 | notification.notification_id) |
|
127 | 127 | Session().commit() |
|
128 | 128 | |
|
129 | 129 | u3notification = UserNotification.query() \ |
|
130 | 130 | .filter(UserNotification.notification == |
|
131 | 131 | notification) \ |
|
132 | 132 | .filter(UserNotification.user_id == self.u3) \ |
|
133 | 133 | .scalar() |
|
134 | 134 | |
|
135 | 135 | assert u3notification == None |
|
136 | 136 | |
|
137 | 137 | # notification object is still there |
|
138 | 138 | assert Notification.query().all() == [notification] |
|
139 | 139 | |
|
140 | 140 | #u1 and u2 still have assignments |
|
141 | 141 | u1notification = UserNotification.query() \ |
|
142 | 142 | .filter(UserNotification.notification == |
|
143 | 143 | notification) \ |
|
144 | 144 | .filter(UserNotification.user_id == self.u1) \ |
|
145 | 145 | .scalar() |
|
146 | 146 | assert u1notification != None |
|
147 | 147 | u2notification = UserNotification.query() \ |
|
148 | 148 | .filter(UserNotification.notification == |
|
149 | 149 | notification) \ |
|
150 | 150 | .filter(UserNotification.user_id == self.u2) \ |
|
151 | 151 | .scalar() |
|
152 | 152 | assert u2notification != None |
|
153 | 153 | |
|
154 | 154 | def test_notification_counter(self): |
|
155 | 155 | with test_context(self.app): |
|
156 | 156 | NotificationModel().create(created_by=self.u1, |
|
157 | 157 | subject=u'title', body=u'hi there_delete', |
|
158 | 158 | recipients=[self.u3, self.u1]) |
|
159 | 159 | Session().commit() |
|
160 | 160 | |
|
161 | 161 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
162 | 162 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 0 |
|
163 | 163 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 1 |
|
164 | 164 | |
|
165 | 165 | notification = NotificationModel().create(created_by=self.u1, |
|
166 | 166 | subject=u'title', body=u'hi there3', |
|
167 | 167 | recipients=[self.u3, self.u1, self.u2]) |
|
168 | 168 | Session().commit() |
|
169 | 169 | |
|
170 | 170 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
171 | 171 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 1 |
|
172 | 172 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 2 |
|
173 | 173 | |
|
174 | 174 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) |
|
175 | 175 | def test_dump_html_mails(self): |
|
176 | 176 | # Exercise all notification types and dump them to one big html file |
|
177 | 177 | l = [] |
|
178 | 178 | |
|
179 | 179 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
180 | 180 | l.append('<hr/>\n') |
|
181 | 181 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope |
|
182 | 182 | l.append('<pre>\n') |
|
183 | 183 | l.append('From: %s\n' % author.username) |
|
184 | 184 | l.append('To: %s\n' % ' '.join(recipients)) |
|
185 | 185 | l.append('Subject: %s\n' % subject) |
|
186 | 186 | l.append('</pre>\n') |
|
187 | 187 | l.append('<hr/>\n') |
|
188 | 188 | l.append('<pre>%s</pre>\n' % body) |
|
189 | 189 | l.append('<hr/>\n') |
|
190 | 190 | l.append(html_body) |
|
191 | 191 | l.append('<hr/>\n') |
|
192 | 192 | |
|
193 | 193 | with test_context(self.app): |
|
194 | 194 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
195 | 195 | pr_kwargs = dict( |
|
196 | 196 | pr_nice_id='#7', |
|
197 | 197 | pr_title='The Title', |
|
198 | 198 | pr_title_short='The Title', |
|
199 | 199 | pr_url='http://pr.org/7', |
|
200 | 200 | pr_target_repo='http://mainline.com/repo', |
|
201 | 201 | pr_target_branch='trunk', |
|
202 | 202 | pr_source_repo='https://dev.org/repo', |
|
203 | 203 | pr_source_branch='devbranch', |
|
204 | 204 | pr_owner=User.get(self.u2), |
|
205 | 205 | pr_owner_username='u2' |
|
206 | 206 | ) |
|
207 | 207 | |
|
208 | 208 | for type_, body, kwargs in [ |
|
209 | 209 | (Notification.TYPE_CHANGESET_COMMENT, |
|
210 | 210 | u'This is the new comment.\n\n - and here it ends indented.', |
|
211 | 211 | dict( |
|
212 | 212 | short_id='cafe1234', |
|
213 | 213 | raw_id='cafe1234c0ffeecafe', |
|
214 | 214 | branch='brunch', |
|
215 | 215 | cs_comment_user='Opinionated User (jsmith)', |
|
216 | 216 | cs_comment_url='http://comment.org', |
|
217 | 217 | is_mention=[False, True], |
|
218 | 218 | message='This changeset did something clever which is hard to explain', |
|
219 | 219 | message_short='This changeset did something cl...', |
|
220 | 220 | status_change=[None, 'Approved'], |
|
221 | 221 | cs_target_repo='repo_target', |
|
222 | 222 | cs_url='http://changeset.com', |
|
223 | 223 | cs_author=User.get(self.u2))), |
|
224 | 224 | (Notification.TYPE_MESSAGE, |
|
225 | 225 | u'This is the body of the test message\n - nothing interesting here except indentation.', |
|
226 | 226 | dict()), |
|
227 | 227 | #(Notification.TYPE_MENTION, '$body', None), # not used |
|
228 | 228 | (Notification.TYPE_REGISTRATION, |
|
229 | 229 | u'Registration body', |
|
230 | 230 | dict( |
|
231 | 231 | new_username='newbie', |
|
232 | 232 | registered_user_url='http://newbie.org', |
|
233 | 233 | new_email='new@email.com', |
|
234 | 234 | new_full_name='New Full Name')), |
|
235 | 235 | (Notification.TYPE_PULL_REQUEST, |
|
236 | 236 | u'This PR is awesome because it does stuff\n - please approve indented!', |
|
237 | 237 | dict( |
|
238 | 238 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... |
|
239 | 239 | is_mention=[False, True], |
|
240 | 240 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], |
|
241 | 241 | org_repo_name='repo_org', |
|
242 | 242 | **pr_kwargs)), |
|
243 | 243 | (Notification.TYPE_PULL_REQUEST_COMMENT, |
|
244 | 244 | u'Me too!\n\n - and indented on second line', |
|
245 | 245 | dict( |
|
246 | 246 | closing_pr=[False, True], |
|
247 | 247 | is_mention=[False, True], |
|
248 | 248 | pr_comment_user='Opinionated User (jsmith)', |
|
249 | 249 | pr_comment_url='http://pr.org/comment', |
|
250 | 250 | status_change=[None, 'Under Review'], |
|
251 | 251 | **pr_kwargs)), |
|
252 | 252 | ]: |
|
253 | 253 | kwargs['repo_name'] = u'repo/name' |
|
254 | 254 | params = [(type_, type_, body, kwargs)] |
|
255 | 255 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general |
|
256 | 256 | if not isinstance(kwargs.get(param_name), list): |
|
257 | 257 | continue |
|
258 | 258 | new_params = [] |
|
259 | 259 | for v in kwargs[param_name]: |
|
260 | 260 | for desc, type_, body, kwargs in params: |
|
261 | 261 | kwargs = dict(kwargs) |
|
262 | 262 | kwargs[param_name] = v |
|
263 | 263 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) |
|
264 | 264 | params = new_params |
|
265 | 265 | |
|
266 | 266 | for desc, type_, body, kwargs in params: |
|
267 | 267 | # desc is used as "global" variable |
|
268 | 268 | notification = NotificationModel().create(created_by=self.u1, |
|
269 | 269 | subject=u'unused', body=body, email_kwargs=kwargs, |
|
270 | 270 | recipients=[self.u2], type_=type_) |
|
271 | 271 | |
|
272 | 272 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: |
|
273 | 273 | desc = 'TYPE_PASSWORD_RESET' |
|
274 | 274 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') |
|
275 | 275 | kallithea.lib.celerylib.tasks.send_email(['john@doe.com'], |
|
276 | 276 | "Password reset link", |
|
277 | 277 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), |
|
278 | 278 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), |
|
279 | 279 | author=User.get(self.u1)) |
|
280 | 280 | |
|
281 | 281 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ |
|
282 | 282 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) |
|
283 | 283 | |
|
284 | 284 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') |
|
285 | 285 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') |
|
286 | 286 | with file(outfn, 'w') as f: |
|
287 | 287 | f.write(out) |
|
288 | 288 | with file(reffn) as f: |
|
289 | 289 | ref = f.read() |
|
290 | 290 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations |
|
291 | 291 | os.unlink(outfn) |
@@ -1,424 +1,424 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.tests.other.test_libs |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Package for testing various lib/helper functions in kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jun 9, 2011 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import datetime |
|
29 | 29 | import hashlib |
|
30 | 30 | import mock |
|
31 | 31 | from kallithea.tests.base import * |
|
32 | 32 | from kallithea.lib.utils2 import AttributeDict |
|
33 | 33 | from kallithea.model.db import Repository |
|
34 |
from |
|
|
34 | from tg.util.webtest import test_context | |
|
35 | 35 | |
|
36 | 36 | proto = 'http' |
|
37 | 37 | TEST_URLS = [ |
|
38 | 38 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
39 | 39 | '%s://127.0.0.1' % proto), |
|
40 | 40 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
41 | 41 | '%s://127.0.0.1' % proto), |
|
42 | 42 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
43 | 43 | '%s://127.0.0.1' % proto), |
|
44 | 44 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
45 | 45 | '%s://127.0.0.1:8080' % proto), |
|
46 | 46 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], |
|
47 | 47 | '%s://example.com' % proto), |
|
48 | 48 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', |
|
49 | 49 | '8080'], |
|
50 | 50 | '%s://example.com:8080' % proto), |
|
51 | 51 | ] |
|
52 | 52 | |
|
53 | 53 | proto = 'https' |
|
54 | 54 | TEST_URLS += [ |
|
55 | 55 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
56 | 56 | '%s://127.0.0.1' % proto), |
|
57 | 57 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
58 | 58 | '%s://127.0.0.1' % proto), |
|
59 | 59 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
60 | 60 | '%s://127.0.0.1' % proto), |
|
61 | 61 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
62 | 62 | '%s://127.0.0.1:8080' % proto), |
|
63 | 63 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], |
|
64 | 64 | '%s://example.com' % proto), |
|
65 | 65 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', |
|
66 | 66 | '8080'], |
|
67 | 67 | '%s://example.com:8080' % proto), |
|
68 | 68 | ] |
|
69 | 69 | |
|
70 | 70 | class FakeUrlGenerator(object): |
|
71 | 71 | |
|
72 | 72 | def __init__(self, current_url=None, default_route=None, **routes): |
|
73 | 73 | """Initialize using specified 'current' URL template, |
|
74 | 74 | default route template, and all other aguments describing known |
|
75 | 75 | routes (format: route=template)""" |
|
76 | 76 | self.current_url = current_url |
|
77 | 77 | self.default_route = default_route |
|
78 | 78 | self.routes = routes |
|
79 | 79 | |
|
80 | 80 | def __call__(self, route_name, *args, **kwargs): |
|
81 | 81 | if route_name in self.routes: |
|
82 | 82 | return self.routes[route_name] % kwargs |
|
83 | 83 | |
|
84 | 84 | return self.default_route % kwargs |
|
85 | 85 | |
|
86 | 86 | def current(self, *args, **kwargs): |
|
87 | 87 | return self.current_url % kwargs |
|
88 | 88 | |
|
89 | 89 | class TestLibs(TestController): |
|
90 | 90 | |
|
91 | 91 | @parametrize('test_url,expected,expected_creds', TEST_URLS) |
|
92 | 92 | def test_uri_filter(self, test_url, expected, expected_creds): |
|
93 | 93 | from kallithea.lib.utils2 import uri_filter |
|
94 | 94 | assert uri_filter(test_url) == expected |
|
95 | 95 | |
|
96 | 96 | @parametrize('test_url,expected,expected_creds', TEST_URLS) |
|
97 | 97 | def test_credentials_filter(self, test_url, expected, expected_creds): |
|
98 | 98 | from kallithea.lib.utils2 import credentials_filter |
|
99 | 99 | assert credentials_filter(test_url) == expected_creds |
|
100 | 100 | |
|
101 | 101 | @parametrize('str_bool,expected', [ |
|
102 | 102 | ('t', True), |
|
103 | 103 | ('true', True), |
|
104 | 104 | ('y', True), |
|
105 | 105 | ('yes', True), |
|
106 | 106 | ('on', True), |
|
107 | 107 | ('1', True), |
|
108 | 108 | ('Y', True), |
|
109 | 109 | ('yeS', True), |
|
110 | 110 | ('Y', True), |
|
111 | 111 | ('TRUE', True), |
|
112 | 112 | ('T', True), |
|
113 | 113 | ('False', False), |
|
114 | 114 | ('F', False), |
|
115 | 115 | ('FALSE', False), |
|
116 | 116 | ('0', False), |
|
117 | 117 | ('-1', False), |
|
118 | 118 | ('', False) |
|
119 | 119 | ]) |
|
120 | 120 | def test_str2bool(self, str_bool, expected): |
|
121 | 121 | from kallithea.lib.utils2 import str2bool |
|
122 | 122 | assert str2bool(str_bool) == expected |
|
123 | 123 | |
|
124 | 124 | def test_mention_extractor(self): |
|
125 | 125 | from kallithea.lib.utils2 import extract_mentioned_usernames |
|
126 | 126 | sample = ( |
|
127 | 127 | "@first hi there @world here's my email username@example.com " |
|
128 | 128 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three " |
|
129 | 129 | "@UPPER @cAmEL @2one_more22 @john please see this http://org.pl " |
|
130 | 130 | "@marian.user just do it @marco-polo and next extract @marco_polo " |
|
131 | 131 | "user.dot hej ! not-needed maril@example.com" |
|
132 | 132 | ) |
|
133 | 133 | |
|
134 | 134 | expected = set([ |
|
135 | 135 | '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john', |
|
136 | 136 | 'marian.user', 'marco-polo', 'marco_polo', 'world']) |
|
137 | 137 | assert expected == set(extract_mentioned_usernames(sample)) |
|
138 | 138 | |
|
139 | 139 | @parametrize('age_args,expected', [ |
|
140 | 140 | (dict(), u'just now'), |
|
141 | 141 | (dict(seconds= -1), u'1 second ago'), |
|
142 | 142 | (dict(seconds= -60 * 2), u'2 minutes ago'), |
|
143 | 143 | (dict(hours= -1), u'1 hour ago'), |
|
144 | 144 | (dict(hours= -24), u'1 day ago'), |
|
145 | 145 | (dict(hours= -24 * 5), u'5 days ago'), |
|
146 | 146 | (dict(months= -1), u'1 month ago'), |
|
147 | 147 | (dict(months= -1, days= -2), u'1 month and 2 days ago'), |
|
148 | 148 | (dict(months= -1, days= -20), u'1 month and 19 days ago'), |
|
149 | 149 | (dict(years= -1, months= -1), u'1 year and 1 month ago'), |
|
150 | 150 | (dict(years= -1, months= -10), u'1 year and 10 months ago'), |
|
151 | 151 | (dict(years= -2, months= -4), u'2 years and 4 months ago'), |
|
152 | 152 | (dict(years= -2, months= -11), u'2 years and 11 months ago'), |
|
153 | 153 | (dict(years= -3, months= -2), u'3 years and 2 months ago'), |
|
154 | 154 | ]) |
|
155 | 155 | def test_age(self, age_args, expected): |
|
156 | 156 | from kallithea.lib.utils2 import age |
|
157 | 157 | from dateutil import relativedelta |
|
158 | 158 | with test_context(self.app): |
|
159 | 159 | n = datetime.datetime(year=2012, month=5, day=17) |
|
160 | 160 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
161 | 161 | assert age(n + delt(**age_args), now=n) == expected |
|
162 | 162 | |
|
163 | 163 | @parametrize('age_args,expected', [ |
|
164 | 164 | (dict(), u'just now'), |
|
165 | 165 | (dict(seconds= -1), u'1 second ago'), |
|
166 | 166 | (dict(seconds= -60 * 2), u'2 minutes ago'), |
|
167 | 167 | (dict(hours= -1), u'1 hour ago'), |
|
168 | 168 | (dict(hours= -24), u'1 day ago'), |
|
169 | 169 | (dict(hours= -24 * 5), u'5 days ago'), |
|
170 | 170 | (dict(months= -1), u'1 month ago'), |
|
171 | 171 | (dict(months= -1, days= -2), u'1 month ago'), |
|
172 | 172 | (dict(months= -1, days= -20), u'1 month ago'), |
|
173 | 173 | (dict(years= -1, months= -1), u'13 months ago'), |
|
174 | 174 | (dict(years= -1, months= -10), u'22 months ago'), |
|
175 | 175 | (dict(years= -2, months= -4), u'2 years ago'), |
|
176 | 176 | (dict(years= -2, months= -11), u'3 years ago'), |
|
177 | 177 | (dict(years= -3, months= -2), u'3 years ago'), |
|
178 | 178 | (dict(years= -4, months= -8), u'5 years ago'), |
|
179 | 179 | ]) |
|
180 | 180 | def test_age_short(self, age_args, expected): |
|
181 | 181 | from kallithea.lib.utils2 import age |
|
182 | 182 | from dateutil import relativedelta |
|
183 | 183 | with test_context(self.app): |
|
184 | 184 | n = datetime.datetime(year=2012, month=5, day=17) |
|
185 | 185 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
186 | 186 | assert age(n + delt(**age_args), show_short_version=True, now=n) == expected |
|
187 | 187 | |
|
188 | 188 | @parametrize('age_args,expected', [ |
|
189 | 189 | (dict(), u'just now'), |
|
190 | 190 | (dict(seconds=1), u'in 1 second'), |
|
191 | 191 | (dict(seconds=60 * 2), u'in 2 minutes'), |
|
192 | 192 | (dict(hours=1), u'in 1 hour'), |
|
193 | 193 | (dict(hours=24), u'in 1 day'), |
|
194 | 194 | (dict(hours=24 * 5), u'in 5 days'), |
|
195 | 195 | (dict(months=1), u'in 1 month'), |
|
196 | 196 | (dict(months=1, days=1), u'in 1 month and 1 day'), |
|
197 | 197 | (dict(years=1, months=1), u'in 1 year and 1 month') |
|
198 | 198 | ]) |
|
199 | 199 | def test_age_in_future(self, age_args, expected): |
|
200 | 200 | from kallithea.lib.utils2 import age |
|
201 | 201 | from dateutil import relativedelta |
|
202 | 202 | with test_context(self.app): |
|
203 | 203 | n = datetime.datetime(year=2012, month=5, day=17) |
|
204 | 204 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
205 | 205 | assert age(n + delt(**age_args), now=n) == expected |
|
206 | 206 | |
|
207 | 207 | def test_tag_extractor(self): |
|
208 | 208 | sample = ( |
|
209 | 209 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" |
|
210 | 210 | "[requires] [stale] [see<>=>] [see => http://example.com]" |
|
211 | 211 | "[requires => url] [lang => python] [just a tag]" |
|
212 | 212 | "[,d] [ => ULR ] [obsolete] [desc]]" |
|
213 | 213 | ) |
|
214 | 214 | from kallithea.lib.helpers import urlify_text |
|
215 | 215 | res = urlify_text(sample, stylize=True) |
|
216 | 216 | assert '<div class="metatag" data-tag="tag">tag</div>' in res |
|
217 | 217 | assert '<div class="metatag" data-tag="obsolete">obsolete</div>' in res |
|
218 | 218 | assert '<div class="metatag" data-tag="stale">stale</div>' in res |
|
219 | 219 | assert '<div class="metatag" data-tag="lang">python</div>' in res |
|
220 | 220 | assert '<div class="metatag" data-tag="requires">requires => <a href="/url">url</a></div>' in res |
|
221 | 221 | assert '<div class="metatag" data-tag="tag">tag</div>' in res |
|
222 | 222 | |
|
223 | 223 | def test_alternative_gravatar(self): |
|
224 | 224 | from kallithea.lib.helpers import gravatar_url |
|
225 | 225 | _md5 = lambda s: hashlib.md5(s).hexdigest() |
|
226 | 226 | |
|
227 |
# |
|
|
227 | # mock tg.tmpl_context | |
|
228 | 228 | def fake_tmpl_context(_url): |
|
229 | 229 | _c = AttributeDict() |
|
230 | 230 | _c.visual = AttributeDict() |
|
231 | 231 | _c.visual.use_gravatar = True |
|
232 | 232 | _c.visual.gravatar_url = _url |
|
233 | 233 | |
|
234 | 234 | return _c |
|
235 | 235 | |
|
236 | 236 | fake_url = FakeUrlGenerator(current_url='https://example.com') |
|
237 | 237 | with mock.patch('kallithea.config.routing.url', fake_url): |
|
238 | 238 | fake = fake_tmpl_context(_url='http://example.com/{email}') |
|
239 |
with mock.patch(' |
|
|
239 | with mock.patch('tg.tmpl_context', fake): | |
|
240 | 240 | from kallithea.config.routing import url |
|
241 | 241 | assert url.current() == 'https://example.com' |
|
242 | 242 | grav = gravatar_url(email_address='test@example.com', size=24) |
|
243 | 243 | assert grav == 'http://example.com/test@example.com' |
|
244 | 244 | |
|
245 | 245 | fake = fake_tmpl_context(_url='http://example.com/{email}') |
|
246 |
with mock.patch(' |
|
|
246 | with mock.patch('tg.tmpl_context', fake): | |
|
247 | 247 | grav = gravatar_url(email_address='test@example.com', size=24) |
|
248 | 248 | assert grav == 'http://example.com/test@example.com' |
|
249 | 249 | |
|
250 | 250 | fake = fake_tmpl_context(_url='http://example.com/{md5email}') |
|
251 |
with mock.patch(' |
|
|
251 | with mock.patch('tg.tmpl_context', fake): | |
|
252 | 252 | em = 'test@example.com' |
|
253 | 253 | grav = gravatar_url(email_address=em, size=24) |
|
254 | 254 | assert grav == 'http://example.com/%s' % (_md5(em)) |
|
255 | 255 | |
|
256 | 256 | fake = fake_tmpl_context(_url='http://example.com/{md5email}/{size}') |
|
257 |
with mock.patch(' |
|
|
257 | with mock.patch('tg.tmpl_context', fake): | |
|
258 | 258 | em = 'test@example.com' |
|
259 | 259 | grav = gravatar_url(email_address=em, size=24) |
|
260 | 260 | assert grav == 'http://example.com/%s/%s' % (_md5(em), 24) |
|
261 | 261 | |
|
262 | 262 | fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}') |
|
263 |
with mock.patch(' |
|
|
263 | with mock.patch('tg.tmpl_context', fake): | |
|
264 | 264 | em = 'test@example.com' |
|
265 | 265 | grav = gravatar_url(email_address=em, size=24) |
|
266 | 266 | assert grav == 'https://example.com/%s/%s' % (_md5(em), 24) |
|
267 | 267 | |
|
268 | 268 | @parametrize('tmpl,repo_name,overrides,prefix,expected', [ |
|
269 | 269 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'), |
|
270 | 270 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/group/repo1'), |
|
271 | 271 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/prefix', 'http://vps1:8000/prefix/group/repo1'), |
|
272 | 272 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix', 'http://user@vps1:8000/prefix/group/repo1'), |
|
273 | 273 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix', 'http://username@vps1:8000/prefix/group/repo1'), |
|
274 | 274 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'), |
|
275 | 275 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'), |
|
276 | 276 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'), |
|
277 | 277 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'), |
|
278 | 278 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'), |
|
279 | 279 | ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://vps1:8000/_23'), |
|
280 | 280 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://username@proxy1.example.com/group/repo1'), |
|
281 | 281 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.example.com/group/repo1'), |
|
282 | 282 | ('https://proxy1.example.com/{user}/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://proxy1.example.com/username/group/repo1'), |
|
283 | 283 | ]) |
|
284 | 284 | def test_clone_url_generator(self, tmpl, repo_name, overrides, prefix, expected): |
|
285 | 285 | from kallithea.lib.utils2 import get_clone_url |
|
286 | 286 | clone_url = get_clone_url(uri_tmpl=tmpl, qualified_home_url='http://vps1:8000'+prefix, |
|
287 | 287 | repo_name=repo_name, repo_id=23, **overrides) |
|
288 | 288 | assert clone_url == expected |
|
289 | 289 | |
|
290 | 290 | def _quick_url(self, text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None): |
|
291 | 291 | """ |
|
292 | 292 | Changes `some text url[foo]` => `some text <a href="/">foo</a> |
|
293 | 293 | |
|
294 | 294 | :param text: |
|
295 | 295 | """ |
|
296 | 296 | import re |
|
297 | 297 | # quickly change expected url[] into a link |
|
298 | 298 | URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])') |
|
299 | 299 | |
|
300 | 300 | def url_func(match_obj): |
|
301 | 301 | _url = match_obj.groups()[0] |
|
302 | 302 | return tmpl % (url_ or '/repo_name/changeset/%s' % _url, _url) |
|
303 | 303 | return URL_PAT.sub(url_func, text) |
|
304 | 304 | |
|
305 | 305 | @parametrize('sample,expected', [ |
|
306 | 306 | ("", |
|
307 | 307 | ""), |
|
308 | 308 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", |
|
309 | 309 | """git-svn-id: <a href="https://svn.apache.org/repos/asf/libcloud/trunk@1441655">https://svn.apache.org/repos/asf/libcloud/trunk@1441655</a> 13f79535-47bb-0310-9956-ffa450edef68"""), |
|
310 | 310 | ("from rev 000000000000", |
|
311 | 311 | """from rev url[000000000000]"""), |
|
312 | 312 | ("from rev 000000000000123123 also rev 000000000000", |
|
313 | 313 | """from rev url[000000000000123123] also rev url[000000000000]"""), |
|
314 | 314 | ("this should-000 00", |
|
315 | 315 | """this should-000 00"""), |
|
316 | 316 | ("longtextffffffffff rev 123123123123", |
|
317 | 317 | """longtextffffffffff rev url[123123123123]"""), |
|
318 | 318 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", |
|
319 | 319 | """rev ffffffffffffffffffffffffffffffffffffffffffffffffff"""), |
|
320 | 320 | ("ffffffffffff some text traalaa", |
|
321 | 321 | """url[ffffffffffff] some text traalaa"""), |
|
322 | 322 | ("""Multi line |
|
323 | 323 | 123123123123 |
|
324 | 324 | some text 123123123123 |
|
325 | 325 | sometimes ! |
|
326 | 326 | """, |
|
327 | 327 | """Multi line<br/>""" |
|
328 | 328 | """ url[123123123123]<br/>""" |
|
329 | 329 | """ some text url[123123123123]<br/>""" |
|
330 | 330 | """ sometimes !"""), |
|
331 | 331 | ]) |
|
332 | 332 | def test_urlify_text(self, sample, expected): |
|
333 | 333 | expected = self._quick_url(expected) |
|
334 | 334 | fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s') |
|
335 | 335 | with mock.patch('kallithea.config.routing.url', fake_url): |
|
336 | 336 | from kallithea.lib.helpers import urlify_text |
|
337 | 337 | assert urlify_text(sample, 'repo_name') == expected |
|
338 | 338 | |
|
339 | 339 | @parametrize('sample,expected,url_', [ |
|
340 | 340 | ("", |
|
341 | 341 | "", |
|
342 | 342 | ""), |
|
343 | 343 | ("https://svn.apache.org/repos", |
|
344 | 344 | """url[https://svn.apache.org/repos]""", |
|
345 | 345 | "https://svn.apache.org/repos"), |
|
346 | 346 | ("http://svn.apache.org/repos", |
|
347 | 347 | """url[http://svn.apache.org/repos]""", |
|
348 | 348 | "http://svn.apache.org/repos"), |
|
349 | 349 | ("from rev a also rev http://google.com", |
|
350 | 350 | """from rev a also rev url[http://google.com]""", |
|
351 | 351 | "http://google.com"), |
|
352 | 352 | ("http://imgur.com/foo.gif inline http://imgur.com/foo.gif ending http://imgur.com/foo.gif", |
|
353 | 353 | """url[http://imgur.com/foo.gif] inline url[http://imgur.com/foo.gif] ending url[http://imgur.com/foo.gif]""", |
|
354 | 354 | "http://imgur.com/foo.gif"), |
|
355 | 355 | ("""Multi line |
|
356 | 356 | https://foo.bar.example.com |
|
357 | 357 | some text lalala""", |
|
358 | 358 | """Multi line<br/>""" |
|
359 | 359 | """ url[https://foo.bar.example.com]<br/>""" |
|
360 | 360 | """ some text lalala""", |
|
361 | 361 | "https://foo.bar.example.com"), |
|
362 | 362 | ("@mention @someone", |
|
363 | 363 | """<b>@mention</b> <b>@someone</b>""", |
|
364 | 364 | ""), |
|
365 | 365 | ("deadbeefcafe 123412341234", |
|
366 | 366 | """<a class="revision-link" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a> <a class="revision-link" href="/repo_name/changeset/123412341234">123412341234</a>""", |
|
367 | 367 | ""), |
|
368 | 368 | ("We support * markup for *bold* markup of *single or multiple* words, " |
|
369 | 369 | "*a bit @like http://slack.com*. " |
|
370 | 370 | "The first * must come after whitespace and not be followed by whitespace, " |
|
371 | 371 | "contain anything but * and newline until the next *, " |
|
372 | 372 | "which must not come after whitespace " |
|
373 | 373 | "and not be followed by * or alphanumerical *characters*.", |
|
374 | 374 | """We support * markup for <b>*bold*</b> markup of <b>*single or multiple*</b> words, """ |
|
375 | 375 | """<b>*a bit <b>@like</b> <a href="http://slack.com">http://slack.com</a>*</b>. """ |
|
376 | 376 | """The first * must come after whitespace and not be followed by whitespace, """ |
|
377 | 377 | """contain anything but * and newline until the next *, """ |
|
378 | 378 | """which must not come after whitespace """ |
|
379 | 379 | """and not be followed by * or alphanumerical <b>*characters*</b>.""", |
|
380 | 380 | "-"), |
|
381 | 381 | # tags are covered by test_tag_extractor |
|
382 | 382 | ]) |
|
383 | 383 | def test_urlify_test(self, sample, expected, url_): |
|
384 | 384 | expected = self._quick_url(expected, |
|
385 | 385 | tmpl="""<a href="%s">%s</a>""", url_=url_) |
|
386 | 386 | fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s') |
|
387 | 387 | with mock.patch('kallithea.config.routing.url', fake_url): |
|
388 | 388 | from kallithea.lib.helpers import urlify_text |
|
389 | 389 | assert urlify_text(sample, 'repo_name', stylize=True) == expected |
|
390 | 390 | |
|
391 | 391 | @parametrize('sample,expected', [ |
|
392 | 392 | ("deadbeefcafe @mention, and http://foo.bar/ yo", |
|
393 | 393 | """<a class="revision-link" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a>""" |
|
394 | 394 | """<a class="message-link" href="#the-link"> <b>@mention</b>, and </a>""" |
|
395 | 395 | """<a href="http://foo.bar/">http://foo.bar/</a>""" |
|
396 | 396 | """<a class="message-link" href="#the-link"> yo</a>"""), |
|
397 | 397 | ]) |
|
398 | 398 | def test_urlify_link(self, sample, expected): |
|
399 | 399 | fake_url = FakeUrlGenerator(changeset_home='/%(repo_name)s/changeset/%(revision)s') |
|
400 | 400 | with mock.patch('kallithea.config.routing.url', fake_url): |
|
401 | 401 | from kallithea.lib.helpers import urlify_text |
|
402 | 402 | assert urlify_text(sample, 'repo_name', link_='#the-link') == expected |
|
403 | 403 | |
|
404 | 404 | @parametrize('test,expected', [ |
|
405 | 405 | ("", None), |
|
406 | 406 | ("/_2", '2'), |
|
407 | 407 | ("_2", '2'), |
|
408 | 408 | ("/_2/", '2'), |
|
409 | 409 | ("_2/", '2'), |
|
410 | 410 | |
|
411 | 411 | ("/_21", '21'), |
|
412 | 412 | ("_21", '21'), |
|
413 | 413 | ("/_21/", '21'), |
|
414 | 414 | ("_21/", '21'), |
|
415 | 415 | |
|
416 | 416 | ("/_21/foobar", '21'), |
|
417 | 417 | ("_21/121", '21'), |
|
418 | 418 | ("/_21/_12", '21'), |
|
419 | 419 | ("_21/prefix/foo", '21'), |
|
420 | 420 | ]) |
|
421 | 421 | def test_get_repo_by_id(self, test, expected): |
|
422 | 422 | from kallithea.lib.utils import _extract_id_from_repo_name |
|
423 | 423 | _test = _extract_id_from_repo_name(test) |
|
424 | 424 | assert _test == expected, 'url:%s, got:`%s` expected: `%s`' % (test, _test, expected) |
@@ -1,614 +1,620 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # Kallithea - config for tests: # |
|
4 | 4 | # initial_repo_scan = true # |
|
5 | 5 | # sqlalchemy and kallithea_test.sqlite # |
|
6 | 6 | # custom logging # |
|
7 | 7 | # # |
|
8 | 8 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
9 | 9 | ################################################################################ |
|
10 | 10 | ################################################################################ |
|
11 | 11 | |
|
12 | 12 | [DEFAULT] |
|
13 | 13 | debug = true |
|
14 | 14 | pdebug = false |
|
15 | 15 | |
|
16 | 16 | ################################################################################ |
|
17 | 17 | ## Email settings ## |
|
18 | 18 | ## ## |
|
19 | 19 | ## Refer to the documentation ("Email settings") for more details. ## |
|
20 | 20 | ## ## |
|
21 | 21 | ## It is recommended to use a valid sender address that passes access ## |
|
22 | 22 | ## validation and spam filtering in mail servers. ## |
|
23 | 23 | ################################################################################ |
|
24 | 24 | |
|
25 | 25 | ## 'From' header for application emails. You can optionally add a name. |
|
26 | 26 | ## Default: |
|
27 | 27 | #app_email_from = Kallithea |
|
28 | 28 | ## Examples: |
|
29 | 29 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
30 | 30 | #app_email_from = kallithea-noreply@example.com |
|
31 | 31 | |
|
32 | 32 | ## Subject prefix for application emails. |
|
33 | 33 | ## A space between this prefix and the real subject is automatically added. |
|
34 | 34 | ## Default: |
|
35 | 35 | #email_prefix = |
|
36 | 36 | ## Example: |
|
37 | 37 | #email_prefix = [Kallithea] |
|
38 | 38 | |
|
39 | 39 | ## Recipients for error emails and fallback recipients of application mails. |
|
40 | 40 | ## Multiple addresses can be specified, space-separated. |
|
41 | 41 | ## Only addresses are allowed, do not add any name part. |
|
42 | 42 | ## Default: |
|
43 | 43 | #email_to = |
|
44 | 44 | ## Examples: |
|
45 | 45 | #email_to = admin@example.com |
|
46 | 46 | #email_to = admin@example.com another_admin@example.com |
|
47 | 47 | |
|
48 | 48 | ## 'From' header for error emails. You can optionally add a name. |
|
49 | 49 | ## Default: |
|
50 | 50 | #error_email_from = pylons@yourapp.com |
|
51 | 51 | ## Examples: |
|
52 | 52 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
53 | 53 | #error_email_from = paste_error@example.com |
|
54 | 54 | |
|
55 | 55 | ## SMTP server settings |
|
56 | 56 | ## If specifying credentials, make sure to use secure connections. |
|
57 | 57 | ## Default: Send unencrypted unauthenticated mails to the specified smtp_server. |
|
58 | 58 | ## For "SSL", use smtp_use_ssl = true and smtp_port = 465. |
|
59 | 59 | ## For "STARTTLS", use smtp_use_tls = true and smtp_port = 587. |
|
60 | 60 | #smtp_server = smtp.example.com |
|
61 | 61 | #smtp_username = |
|
62 | 62 | #smtp_password = |
|
63 | 63 | #smtp_port = 25 |
|
64 | 64 | #smtp_use_ssl = false |
|
65 | 65 | #smtp_use_tls = false |
|
66 | 66 | |
|
67 | 67 | [server:main] |
|
68 | 68 | ## Gearbox default web server ## |
|
69 | 69 | #use = egg:gearbox#wsgiref |
|
70 | 70 | ## nr of worker threads to spawn |
|
71 | 71 | #threadpool_workers = 1 |
|
72 | 72 | ## max request before thread respawn |
|
73 | 73 | #threadpool_max_requests = 100 |
|
74 | 74 | ## option to use threads of process |
|
75 | 75 | #use_threadpool = true |
|
76 | 76 | |
|
77 | 77 | ## Gearbox gevent web server ## |
|
78 | 78 | #use = egg:gearbox#gevent |
|
79 | 79 | |
|
80 | 80 | ## WAITRESS ## |
|
81 | 81 | use = egg:waitress#main |
|
82 | 82 | ## number of worker threads |
|
83 | 83 | threads = 1 |
|
84 | 84 | ## MAX BODY SIZE 100GB |
|
85 | 85 | max_request_body_size = 107374182400 |
|
86 | 86 | ## use poll instead of select, fixes fd limits, may not work on old |
|
87 | 87 | ## windows systems. |
|
88 | 88 | #asyncore_use_poll = True |
|
89 | 89 | |
|
90 | 90 | ## GUNICORN ## |
|
91 | 91 | #use = egg:gunicorn#main |
|
92 | 92 | ## number of process workers. You must set `instance_id = *` when this option |
|
93 | 93 | ## is set to more than one worker |
|
94 | 94 | #workers = 1 |
|
95 | 95 | ## process name |
|
96 | 96 | #proc_name = kallithea |
|
97 | 97 | ## type of worker class, one of sync, eventlet, gevent, tornado |
|
98 | 98 | ## recommended for bigger setup is using of of other than sync one |
|
99 | 99 | #worker_class = sync |
|
100 | 100 | #max_requests = 1000 |
|
101 | 101 | ## amount of time a worker can handle request before it gets killed and |
|
102 | 102 | ## restarted |
|
103 | 103 | #timeout = 3600 |
|
104 | 104 | |
|
105 | 105 | ## UWSGI ## |
|
106 | 106 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
107 | 107 | #[uwsgi] |
|
108 | 108 | #socket = /tmp/uwsgi.sock |
|
109 | 109 | #master = true |
|
110 | 110 | #http = 127.0.0.1:5000 |
|
111 | 111 | |
|
112 | 112 | ## set as deamon and redirect all output to file |
|
113 | 113 | #daemonize = ./uwsgi_kallithea.log |
|
114 | 114 | |
|
115 | 115 | ## master process PID |
|
116 | 116 | #pidfile = ./uwsgi_kallithea.pid |
|
117 | 117 | |
|
118 | 118 | ## stats server with workers statistics, use uwsgitop |
|
119 | 119 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
120 | 120 | #stats = 127.0.0.1:1717 |
|
121 | 121 | #memory-report = true |
|
122 | 122 | |
|
123 | 123 | ## log 5XX errors |
|
124 | 124 | #log-5xx = true |
|
125 | 125 | |
|
126 | 126 | ## Set the socket listen queue size. |
|
127 | 127 | #listen = 256 |
|
128 | 128 | |
|
129 | 129 | ## Gracefully Reload workers after the specified amount of managed requests |
|
130 | 130 | ## (avoid memory leaks). |
|
131 | 131 | #max-requests = 1000 |
|
132 | 132 | |
|
133 | 133 | ## enable large buffers |
|
134 | 134 | #buffer-size = 65535 |
|
135 | 135 | |
|
136 | 136 | ## socket and http timeouts ## |
|
137 | 137 | #http-timeout = 3600 |
|
138 | 138 | #socket-timeout = 3600 |
|
139 | 139 | |
|
140 | 140 | ## Log requests slower than the specified number of milliseconds. |
|
141 | 141 | #log-slow = 10 |
|
142 | 142 | |
|
143 | 143 | ## Exit if no app can be loaded. |
|
144 | 144 | #need-app = true |
|
145 | 145 | |
|
146 | 146 | ## Set lazy mode (load apps in workers instead of master). |
|
147 | 147 | #lazy = true |
|
148 | 148 | |
|
149 | 149 | ## scaling ## |
|
150 | 150 | ## set cheaper algorithm to use, if not set default will be used |
|
151 | 151 | #cheaper-algo = spare |
|
152 | 152 | |
|
153 | 153 | ## minimum number of workers to keep at all times |
|
154 | 154 | #cheaper = 1 |
|
155 | 155 | |
|
156 | 156 | ## number of workers to spawn at startup |
|
157 | 157 | #cheaper-initial = 1 |
|
158 | 158 | |
|
159 | 159 | ## maximum number of workers that can be spawned |
|
160 | 160 | #workers = 4 |
|
161 | 161 | |
|
162 | 162 | ## how many workers should be spawned at a time |
|
163 | 163 | #cheaper-step = 1 |
|
164 | 164 | |
|
165 | 165 | ## COMMON ## |
|
166 | 166 | host = 127.0.0.1 |
|
167 | 167 | #port = 5000 |
|
168 | 168 | port = 4999 |
|
169 | 169 | |
|
170 | 170 | ## middleware for hosting the WSGI application under a URL prefix |
|
171 | 171 | #[filter:proxy-prefix] |
|
172 | 172 | #use = egg:PasteDeploy#prefix |
|
173 | 173 | #prefix = /<your-prefix> |
|
174 | 174 | |
|
175 | 175 | [app:main] |
|
176 | 176 | use = egg:kallithea |
|
177 | 177 | ## enable proxy prefix middleware |
|
178 | 178 | #filter-with = proxy-prefix |
|
179 | 179 | |
|
180 | 180 | full_stack = true |
|
181 | 181 | static_files = true |
|
182 | 182 | ## Available Languages: |
|
183 | 183 | ## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW |
|
184 | 184 | lang = |
|
185 | 185 | #cache_dir = %(here)s/data |
|
186 | 186 | cache_dir = %(here)s/../../data/test/cache |
|
187 | 187 | #index_dir = %(here)s/data/index |
|
188 | 188 | index_dir = %(here)s/../../data/test/index |
|
189 | 189 | |
|
190 | 190 | ## perform a full repository scan on each server start, this should be |
|
191 | 191 | ## set to false after first startup, to allow faster server restarts. |
|
192 | 192 | #initial_repo_scan = false |
|
193 | 193 | initial_repo_scan = true |
|
194 | 194 | |
|
195 | 195 | ## uncomment and set this path to use archive download cache |
|
196 | 196 | #archive_cache_dir = %(here)s/tarballcache |
|
197 | 197 | archive_cache_dir = %(here)s/../../data/test/tarballcache |
|
198 | 198 | |
|
199 | 199 | ## change this to unique ID for security |
|
200 | 200 | app_instance_uuid = test |
|
201 | 201 | |
|
202 | 202 | ## cut off limit for large diffs (size in bytes) |
|
203 | 203 | cut_off_limit = 256000 |
|
204 | 204 | |
|
205 | 205 | ## force https in Kallithea, fixes https redirects, assumes it's always https |
|
206 | 206 | force_https = false |
|
207 | 207 | |
|
208 | 208 | ## use Strict-Transport-Security headers |
|
209 | 209 | use_htsts = false |
|
210 | 210 | |
|
211 | 211 | ## number of commits stats will parse on each iteration |
|
212 | 212 | commit_parse_limit = 25 |
|
213 | 213 | |
|
214 | 214 | ## path to git executable |
|
215 | 215 | git_path = git |
|
216 | 216 | |
|
217 | 217 | ## git rev filter option, --all is the default filter, if you need to |
|
218 | 218 | ## hide all refs in changelog switch this to --branches --tags |
|
219 | 219 | #git_rev_filter = --branches --tags |
|
220 | 220 | |
|
221 | 221 | ## RSS feed options |
|
222 | 222 | rss_cut_off_limit = 256000 |
|
223 | 223 | rss_items_per_page = 10 |
|
224 | 224 | rss_include_diff = false |
|
225 | 225 | |
|
226 | 226 | ## options for showing and identifying changesets |
|
227 | 227 | show_sha_length = 12 |
|
228 | 228 | #show_revision_number = false |
|
229 | 229 | show_revision_number = true |
|
230 | 230 | |
|
231 | 231 | ## Canonical URL to use when creating full URLs in UI and texts. |
|
232 | 232 | ## Useful when the site is available under different names or protocols. |
|
233 | 233 | ## Defaults to what is provided in the WSGI environment. |
|
234 | 234 | #canonical_url = https://kallithea.example.com/repos |
|
235 | 235 | |
|
236 | 236 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
237 | 237 | ## url that does rewrites to _admin/gists/<gistid>. |
|
238 | 238 | ## example: http://gist.example.com/{gistid}. Empty means use the internal |
|
239 | 239 | ## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid> |
|
240 | 240 | gist_alias_url = |
|
241 | 241 | |
|
242 | 242 | ## white list of API enabled controllers. This allows to add list of |
|
243 | 243 | ## controllers to which access will be enabled by api_key. eg: to enable |
|
244 | 244 | ## api access to raw_files put `FilesController:raw`, to enable access to patches |
|
245 | 245 | ## add `ChangesetController:changeset_patch`. This list should be "," separated |
|
246 | 246 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names |
|
247 | 247 | ## Recommended settings below are commented out: |
|
248 | 248 | api_access_controllers_whitelist = |
|
249 | 249 | # ChangesetController:changeset_patch, |
|
250 | 250 | # ChangesetController:changeset_raw, |
|
251 | 251 | # FilesController:raw, |
|
252 | 252 | # FilesController:archivefile |
|
253 | 253 | |
|
254 | 254 | ## default encoding used to convert from and to unicode |
|
255 | 255 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
256 | 256 | default_encoding = utf8 |
|
257 | 257 | |
|
258 | 258 | ## issue tracker for Kallithea (leave blank to disable, absent for default) |
|
259 | 259 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
260 | 260 | |
|
261 | 261 | ## issue tracking mapping for commits messages |
|
262 | 262 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
263 | 263 | |
|
264 | 264 | ## pattern to get the issues from commit messages |
|
265 | 265 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
266 | 266 | ## {id} will be all groups matched from this pattern |
|
267 | 267 | |
|
268 | 268 | issue_pat = (?:\s*#)(\d+) |
|
269 | 269 | |
|
270 | 270 | ## server url to the issue, each {id} will be replaced with match |
|
271 | 271 | ## fetched from the regex and {repo} is replaced with full repository name |
|
272 | 272 | ## including groups {repo_name} is replaced with just name of repo |
|
273 | 273 | |
|
274 | 274 | issue_server_link = https://issues.example.com/{repo}/issue/{id} |
|
275 | 275 | |
|
276 | 276 | ## prefix to add to link to indicate it's an url |
|
277 | 277 | ## #314 will be replaced by <issue_prefix><id> |
|
278 | 278 | |
|
279 | 279 | issue_prefix = # |
|
280 | 280 | |
|
281 | 281 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
282 | 282 | ## multiple patterns, to other issues server, wiki or others |
|
283 | 283 | ## below an example how to create a wiki pattern |
|
284 | 284 | # wiki-some-id -> https://wiki.example.com/some-id |
|
285 | 285 | |
|
286 | 286 | #issue_pat_wiki = (?:wiki-)(.+) |
|
287 | 287 | #issue_server_link_wiki = https://wiki.example.com/{id} |
|
288 | 288 | #issue_prefix_wiki = WIKI- |
|
289 | 289 | |
|
290 | 290 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
291 | 291 | ## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with |
|
292 | 292 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
293 | 293 | auth_ret_code = |
|
294 | 294 | |
|
295 | 295 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
296 | 296 | ## codes don't break the transactions while 4XX codes do |
|
297 | 297 | lock_ret_code = 423 |
|
298 | 298 | |
|
299 | 299 | ## allows to change the repository location in settings page |
|
300 | 300 | allow_repo_location_change = True |
|
301 | 301 | |
|
302 | 302 | ## allows to setup custom hooks in settings page |
|
303 | 303 | allow_custom_hooks_settings = True |
|
304 | 304 | |
|
305 | 305 | ## extra extensions for indexing, space separated and without the leading '.'. |
|
306 | 306 | # index.extensions = |
|
307 | 307 | # gemfile |
|
308 | 308 | # lock |
|
309 | 309 | |
|
310 | 310 | ## extra filenames for indexing, space separated |
|
311 | 311 | # index.filenames = |
|
312 | 312 | # .dockerignore |
|
313 | 313 | # .editorconfig |
|
314 | 314 | # INSTALL |
|
315 | 315 | # CHANGELOG |
|
316 | 316 | |
|
317 | 317 | #################################### |
|
318 | 318 | ### CELERY CONFIG #### |
|
319 | 319 | #################################### |
|
320 | 320 | |
|
321 | 321 | use_celery = false |
|
322 | 322 | |
|
323 | 323 | ## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq: |
|
324 | 324 | broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost |
|
325 | 325 | |
|
326 | 326 | celery.imports = kallithea.lib.celerylib.tasks |
|
327 | 327 | celery.accept.content = pickle |
|
328 | 328 | celery.result.backend = amqp |
|
329 | 329 | celery.result.dburi = amqp:// |
|
330 | 330 | celery.result.serialier = json |
|
331 | 331 | |
|
332 | 332 | #celery.send.task.error.emails = true |
|
333 | 333 | #celery.amqp.task.result.expires = 18000 |
|
334 | 334 | |
|
335 | 335 | celeryd.concurrency = 2 |
|
336 | 336 | celeryd.max.tasks.per.child = 1 |
|
337 | 337 | |
|
338 | 338 | ## If true, tasks will never be sent to the queue, but executed locally instead. |
|
339 | 339 | celery.always.eager = false |
|
340 | 340 | |
|
341 | 341 | #################################### |
|
342 | 342 | ### BEAKER CACHE #### |
|
343 | 343 | #################################### |
|
344 | 344 | |
|
345 | 345 | #beaker.cache.data_dir = %(here)s/data/cache/data |
|
346 | 346 | beaker.cache.data_dir = %(here)s/../../data/test/cache/data |
|
347 | 347 | #beaker.cache.lock_dir = %(here)s/data/cache/lock |
|
348 | 348 | beaker.cache.lock_dir = %(here)s/../../data/test/cache/lock |
|
349 | 349 | |
|
350 | 350 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
351 | 351 | |
|
352 | 352 | beaker.cache.short_term.type = memory |
|
353 | 353 | beaker.cache.short_term.expire = 60 |
|
354 | 354 | beaker.cache.short_term.key_length = 256 |
|
355 | 355 | |
|
356 | 356 | beaker.cache.long_term.type = memory |
|
357 | 357 | beaker.cache.long_term.expire = 36000 |
|
358 | 358 | beaker.cache.long_term.key_length = 256 |
|
359 | 359 | |
|
360 | 360 | beaker.cache.sql_cache_short.type = memory |
|
361 | 361 | #beaker.cache.sql_cache_short.expire = 10 |
|
362 | 362 | beaker.cache.sql_cache_short.expire = 1 |
|
363 | 363 | beaker.cache.sql_cache_short.key_length = 256 |
|
364 | 364 | |
|
365 | 365 | #################################### |
|
366 | 366 | ### BEAKER SESSION #### |
|
367 | 367 | #################################### |
|
368 | 368 | |
|
369 | 369 | ## Name of session cookie. Should be unique for a given host and path, even when running |
|
370 | 370 | ## on different ports. Otherwise, cookie sessions will be shared and messed up. |
|
371 | 371 | beaker.session.key = kallithea |
|
372 | 372 | ## Sessions should always only be accessible by the browser, not directly by JavaScript. |
|
373 | 373 | beaker.session.httponly = true |
|
374 | 374 | ## Session lifetime. 2592000 seconds is 30 days. |
|
375 | 375 | beaker.session.timeout = 2592000 |
|
376 | 376 | |
|
377 | 377 | ## Server secret used with HMAC to ensure integrity of cookies. |
|
378 | 378 | beaker.session.secret = {74e0cd75-b339-478b-b129-07dd221def1f} |
|
379 | 379 | ## Further, encrypt the data with AES. |
|
380 | 380 | #beaker.session.encrypt_key = <key_for_encryption> |
|
381 | 381 | #beaker.session.validate_key = <validation_key> |
|
382 | 382 | |
|
383 | 383 | ## Type of storage used for the session, current types are |
|
384 | 384 | ## dbm, file, memcached, database, and memory. |
|
385 | 385 | |
|
386 | 386 | ## File system storage of session data. (default) |
|
387 | 387 | #beaker.session.type = file |
|
388 | 388 | |
|
389 | 389 | ## Cookie only, store all session data inside the cookie. Requires secure secrets. |
|
390 | 390 | #beaker.session.type = cookie |
|
391 | 391 | |
|
392 | 392 | ## Database storage of session data. |
|
393 | 393 | #beaker.session.type = ext:database |
|
394 | 394 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
395 | 395 | #beaker.session.table_name = db_session |
|
396 | 396 | |
|
397 | 397 | ############################ |
|
398 | 398 | ## ERROR HANDLING SYSTEMS ## |
|
399 | 399 | ############################ |
|
400 | 400 | |
|
401 | 401 | #################### |
|
402 | 402 | ### [appenlight] ### |
|
403 | 403 | #################### |
|
404 | 404 | |
|
405 | 405 | ## AppEnlight is tailored to work with Kallithea, see |
|
406 | 406 | ## http://appenlight.com for details how to obtain an account |
|
407 | 407 | ## you must install python package `appenlight_client` to make it work |
|
408 | 408 | |
|
409 | 409 | ## appenlight enabled |
|
410 | 410 | appenlight = false |
|
411 | 411 | |
|
412 | 412 | appenlight.server_url = https://api.appenlight.com |
|
413 | 413 | appenlight.api_key = YOUR_API_KEY |
|
414 | 414 | |
|
415 | 415 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
416 | 416 | |
|
417 | 417 | ## enables 404 error logging (default False) |
|
418 | 418 | appenlight.report_404 = false |
|
419 | 419 | |
|
420 | 420 | ## time in seconds after request is considered being slow (default 1) |
|
421 | 421 | appenlight.slow_request_time = 1 |
|
422 | 422 | |
|
423 | 423 | ## record slow requests in application |
|
424 | 424 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
425 | 425 | appenlight.slow_requests = true |
|
426 | 426 | |
|
427 | 427 | ## enable hooking to application loggers |
|
428 | 428 | #appenlight.logging = true |
|
429 | 429 | |
|
430 | 430 | ## minimum log level for log capture |
|
431 | 431 | #appenlight.logging.level = WARNING |
|
432 | 432 | |
|
433 | 433 | ## send logs only from erroneous/slow requests |
|
434 | 434 | ## (saves API quota for intensive logging) |
|
435 | 435 | appenlight.logging_on_error = false |
|
436 | 436 | |
|
437 | 437 | ## list of additional keywords that should be grabbed from environ object |
|
438 | 438 | ## can be string with comma separated list of words in lowercase |
|
439 | 439 | ## (by default client will always send following info: |
|
440 | 440 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
441 | 441 | ## start with HTTP* this list be extended with additional keywords here |
|
442 | 442 | appenlight.environ_keys_whitelist = |
|
443 | 443 | |
|
444 | 444 | ## list of keywords that should be blanked from request object |
|
445 | 445 | ## can be string with comma separated list of words in lowercase |
|
446 | 446 | ## (by default client will always blank keys that contain following words |
|
447 | 447 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
448 | 448 | ## this list be extended with additional keywords set here |
|
449 | 449 | appenlight.request_keys_blacklist = |
|
450 | 450 | |
|
451 | 451 | ## list of namespaces that should be ignores when gathering log entries |
|
452 | 452 | ## can be string with comma separated list of namespaces |
|
453 | 453 | ## (by default the client ignores own entries: appenlight_client.client) |
|
454 | 454 | appenlight.log_namespace_blacklist = |
|
455 | 455 | |
|
456 | 456 | ################ |
|
457 | 457 | ### [sentry] ### |
|
458 | 458 | ################ |
|
459 | 459 | |
|
460 | 460 | ## sentry is a alternative open source error aggregator |
|
461 | 461 | ## you must install python packages `sentry` and `raven` to enable |
|
462 | 462 | |
|
463 | 463 | sentry.dsn = YOUR_DNS |
|
464 | 464 | sentry.servers = |
|
465 | 465 | sentry.name = |
|
466 | 466 | sentry.key = |
|
467 | 467 | sentry.public_key = |
|
468 | 468 | sentry.secret_key = |
|
469 | 469 | sentry.project = |
|
470 | 470 | sentry.site = |
|
471 | 471 | sentry.include_paths = |
|
472 | 472 | sentry.exclude_paths = |
|
473 | 473 | |
|
474 | 474 | ################################################################################ |
|
475 | 475 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
476 | 476 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
477 | 477 | ## execute malicious code after an exception is raised. ## |
|
478 | 478 | ################################################################################ |
|
479 | 479 | set debug = false |
|
480 | 480 | |
|
481 | 481 | ################################## |
|
482 | 482 | ### LOGVIEW CONFIG ### |
|
483 | 483 | ################################## |
|
484 | 484 | |
|
485 | 485 | logview.sqlalchemy = #faa |
|
486 | 486 | logview.pylons.templating = #bfb |
|
487 | 487 | logview.pylons.util = #eee |
|
488 | 488 | |
|
489 | 489 | ######################################################### |
|
490 | 490 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
491 | 491 | ######################################################### |
|
492 | 492 | |
|
493 | 493 | # SQLITE [default] |
|
494 | 494 | #sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60 |
|
495 | 495 | sqlalchemy.url = sqlite:///%(here)s/kallithea_test.sqlite |
|
496 | 496 | |
|
497 | 497 | # POSTGRESQL |
|
498 | 498 | #sqlalchemy.url = postgresql://user:pass@localhost/kallithea |
|
499 | 499 | |
|
500 | 500 | # MySQL |
|
501 | 501 | #sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8 |
|
502 | 502 | |
|
503 | 503 | # see sqlalchemy docs for others |
|
504 | 504 | |
|
505 | 505 | sqlalchemy.echo = false |
|
506 | 506 | sqlalchemy.pool_recycle = 3600 |
|
507 | 507 | |
|
508 | 508 | ################################ |
|
509 | 509 | ### ALEMBIC CONFIGURATION #### |
|
510 | 510 | ################################ |
|
511 | 511 | |
|
512 | 512 | [alembic] |
|
513 | 513 | script_location = kallithea:alembic |
|
514 | 514 | |
|
515 | 515 | ################################ |
|
516 | 516 | ### LOGGING CONFIGURATION #### |
|
517 | 517 | ################################ |
|
518 | 518 | |
|
519 | 519 | [loggers] |
|
520 | keys = root, routes, kallithea, sqlalchemy, gearbox, beaker, templates, whoosh_indexer | |
|
520 | keys = root, routes, kallithea, sqlalchemy, tg, gearbox, beaker, templates, whoosh_indexer | |
|
521 | 521 | |
|
522 | 522 | [handlers] |
|
523 | 523 | keys = console, console_sql |
|
524 | 524 | |
|
525 | 525 | [formatters] |
|
526 | 526 | keys = generic, color_formatter, color_formatter_sql |
|
527 | 527 | |
|
528 | 528 | ############# |
|
529 | 529 | ## LOGGERS ## |
|
530 | 530 | ############# |
|
531 | 531 | |
|
532 | 532 | [logger_root] |
|
533 | 533 | level = NOTSET |
|
534 | 534 | handlers = console |
|
535 | 535 | |
|
536 | 536 | [logger_routes] |
|
537 | 537 | level = DEBUG |
|
538 | 538 | handlers = |
|
539 | 539 | qualname = routes.middleware |
|
540 | 540 | ## "level = DEBUG" logs the route matched and routing variables. |
|
541 | 541 | propagate = 1 |
|
542 | 542 | |
|
543 | 543 | [logger_beaker] |
|
544 | 544 | level = DEBUG |
|
545 | 545 | handlers = |
|
546 | 546 | qualname = beaker.container |
|
547 | 547 | propagate = 1 |
|
548 | 548 | |
|
549 | 549 | [logger_templates] |
|
550 | 550 | level = INFO |
|
551 | 551 | handlers = |
|
552 | 552 | qualname = pylons.templating |
|
553 | 553 | propagate = 1 |
|
554 | 554 | |
|
555 | 555 | [logger_kallithea] |
|
556 | 556 | level = DEBUG |
|
557 | 557 | handlers = |
|
558 | 558 | qualname = kallithea |
|
559 | 559 | propagate = 1 |
|
560 | 560 | |
|
561 | [logger_tg] | |
|
562 | level = DEBUG | |
|
563 | handlers = | |
|
564 | qualname = tg | |
|
565 | propagate = 1 | |
|
566 | ||
|
561 | 567 | [logger_gearbox] |
|
562 | 568 | level = DEBUG |
|
563 | 569 | handlers = |
|
564 | 570 | qualname = gearbox |
|
565 | 571 | propagate = 1 |
|
566 | 572 | |
|
567 | 573 | [logger_sqlalchemy] |
|
568 | 574 | level = INFO |
|
569 | 575 | handlers = console_sql |
|
570 | 576 | qualname = sqlalchemy.engine |
|
571 | 577 | propagate = 0 |
|
572 | 578 | |
|
573 | 579 | [logger_whoosh_indexer] |
|
574 | 580 | level = DEBUG |
|
575 | 581 | handlers = |
|
576 | 582 | qualname = whoosh_indexer |
|
577 | 583 | propagate = 1 |
|
578 | 584 | |
|
579 | 585 | ############## |
|
580 | 586 | ## HANDLERS ## |
|
581 | 587 | ############## |
|
582 | 588 | |
|
583 | 589 | [handler_console] |
|
584 | 590 | class = StreamHandler |
|
585 | 591 | args = (sys.stderr,) |
|
586 | 592 | #level = INFO |
|
587 | 593 | level = DEBUG |
|
588 | 594 | #formatter = generic |
|
589 | 595 | formatter = color_formatter |
|
590 | 596 | |
|
591 | 597 | [handler_console_sql] |
|
592 | 598 | class = StreamHandler |
|
593 | 599 | args = (sys.stderr,) |
|
594 | 600 | level = WARN |
|
595 | 601 | #formatter = generic |
|
596 | 602 | formatter = color_formatter_sql |
|
597 | 603 | |
|
598 | 604 | ################ |
|
599 | 605 | ## FORMATTERS ## |
|
600 | 606 | ################ |
|
601 | 607 | |
|
602 | 608 | [formatter_generic] |
|
603 | 609 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
604 | 610 | datefmt = %Y-%m-%d %H:%M:%S |
|
605 | 611 | |
|
606 | 612 | [formatter_color_formatter] |
|
607 | 613 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
608 | 614 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
609 | 615 | datefmt = %Y-%m-%d %H:%M:%S |
|
610 | 616 | |
|
611 | 617 | [formatter_color_formatter_sql] |
|
612 | 618 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
613 | 619 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
614 | 620 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,171 +1,172 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | import os |
|
4 | 4 | import sys |
|
5 | 5 | import platform |
|
6 | 6 | |
|
7 | 7 | if sys.version_info < (2, 6) or sys.version_info >= (3,): |
|
8 | 8 | raise Exception('Kallithea requires python 2.6 or 2.7') |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | here = os.path.abspath(os.path.dirname(__file__)) |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | def _get_meta_var(name, data, callback_handler=None): |
|
15 | 15 | import re |
|
16 | 16 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) |
|
17 | 17 | if matches: |
|
18 | 18 | if not callable(callback_handler): |
|
19 | 19 | callback_handler = lambda v: v |
|
20 | 20 | |
|
21 | 21 | return callback_handler(eval(matches.groups()[0])) |
|
22 | 22 | |
|
23 | 23 | _meta = open(os.path.join(here, 'kallithea', '__init__.py'), 'rb') |
|
24 | 24 | _metadata = _meta.read() |
|
25 | 25 | _meta.close() |
|
26 | 26 | |
|
27 | 27 | callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:])) |
|
28 | 28 | __version__ = _get_meta_var('VERSION', _metadata, callback) |
|
29 | 29 | __license__ = _get_meta_var('__license__', _metadata) |
|
30 | 30 | __author__ = _get_meta_var('__author__', _metadata) |
|
31 | 31 | __url__ = _get_meta_var('__url__', _metadata) |
|
32 | 32 | # defines current platform |
|
33 | 33 | __platform__ = platform.system() |
|
34 | 34 | |
|
35 | 35 | is_windows = __platform__ in ['Windows'] |
|
36 | 36 | |
|
37 | 37 | requirements = [ |
|
38 | 38 | "alembic>=0.8.0,<0.9", |
|
39 | 39 | "GearBox<1", |
|
40 | 40 | "waitress>=0.8.8,<1.0", |
|
41 | 41 | "webob>=1.7,<2", |
|
42 | "Pylons>=1.0.0,<=1.0.2", | |
|
42 | "backlash >= 0.1.1, < 1.0.0", | |
|
43 | "TurboGears2 >= 2.3.10, < 3.0.0", | |
|
44 | "tgext.routes >= 0.2.0, < 1.0.0", | |
|
43 | 45 | "Beaker>=1.7.0,<2", |
|
44 | 46 | "WebHelpers==1.3", |
|
45 | 47 | "formencode>=1.2.4,<=1.2.6", |
|
46 | 48 | "SQLAlchemy>=1.0,<1.1", |
|
47 | 49 | "Mako>=0.9.0,<=1.0.0", |
|
48 | 50 | "pygments>=1.5", |
|
49 | 51 | "whoosh>=2.5.0,<=2.5.7", |
|
50 | 52 | "celery>=3.1,<3.2", |
|
51 | 53 | "babel>=0.9.6,<2.4", |
|
52 | 54 | "python-dateutil>=1.5.0,<2.0.0", |
|
53 | 55 | "markdown==2.2.1", |
|
54 | 56 | "docutils>=0.8.1", |
|
55 | 57 | "URLObject==2.3.4", |
|
56 | 58 | "Routes==1.13", |
|
57 | 59 | "dulwich>=0.14.1", |
|
58 | 60 | "mercurial>=2.9,<4.2", |
|
61 | "decorator >= 3.3.2", | |
|
62 | "Paste >= 2.0.3, < 3.0", | |
|
59 | 63 | ] |
|
60 | 64 | |
|
61 | 65 | if sys.version_info < (2, 7): |
|
62 | 66 | requirements.append("importlib==1.0.1") |
|
63 | 67 | requirements.append("argparse") |
|
64 | 68 | |
|
65 | 69 | if not is_windows: |
|
66 | 70 | requirements.append("bcrypt>=3.1.0") |
|
67 | 71 | |
|
68 | 72 | dependency_links = [ |
|
69 | 73 | ] |
|
70 | 74 | |
|
71 | 75 | classifiers = [ |
|
72 | 76 | 'Development Status :: 4 - Beta', |
|
73 | 77 | 'Environment :: Web Environment', |
|
74 | 78 | 'Framework :: Pylons', |
|
75 | 79 | 'Intended Audience :: Developers', |
|
76 | 80 | 'License :: OSI Approved :: GNU General Public License (GPL)', |
|
77 | 81 | 'Operating System :: OS Independent', |
|
78 | 82 | 'Programming Language :: Python', |
|
79 | 83 | 'Programming Language :: Python :: 2.6', |
|
80 | 84 | 'Programming Language :: Python :: 2.7', |
|
81 | 85 | 'Topic :: Software Development :: Version Control', |
|
82 | 86 | ] |
|
83 | 87 | |
|
84 | 88 | |
|
85 | 89 | # additional files from project that goes somewhere in the filesystem |
|
86 | 90 | # relative to sys.prefix |
|
87 | 91 | data_files = [] |
|
88 | 92 | |
|
89 | 93 | description = ('Kallithea is a fast and powerful management tool ' |
|
90 | 94 | 'for Mercurial and Git with a built in push/pull server, ' |
|
91 | 95 | 'full text search and code-review.') |
|
92 | 96 | |
|
93 | 97 | keywords = ' '.join([ |
|
94 | 98 | 'kallithea', 'mercurial', 'git', 'code review', |
|
95 | 99 | 'repo groups', 'ldap', 'repository management', 'hgweb replacement', |
|
96 | 100 | 'hgwebdir', 'gitweb replacement', 'serving hgweb', |
|
97 | 101 | ]) |
|
98 | 102 | |
|
99 | 103 | # long description |
|
100 | 104 | README_FILE = 'README.rst' |
|
101 | 105 | try: |
|
102 | 106 | long_description = open(README_FILE).read() |
|
103 | 107 | except IOError as err: |
|
104 | 108 | sys.stderr.write( |
|
105 | 109 | "[WARNING] Cannot find file specified as long_description (%s)\n" |
|
106 | 110 | % README_FILE |
|
107 | 111 | ) |
|
108 | 112 | long_description = description |
|
109 | 113 | |
|
110 | 114 | import setuptools |
|
111 | 115 | |
|
112 | 116 | # monkey patch setuptools to use distutils owner/group functionality |
|
113 | 117 | from setuptools.command import sdist |
|
114 | 118 | sdist_org = sdist.sdist |
|
115 | 119 | class sdist_new(sdist_org): |
|
116 | 120 | def initialize_options(self): |
|
117 | 121 | sdist_org.initialize_options(self) |
|
118 | 122 | self.owner = self.group = 'root' |
|
119 | 123 | sdist.sdist = sdist_new |
|
120 | 124 | |
|
121 | 125 | packages = setuptools.find_packages(exclude=['ez_setup']) |
|
122 | 126 | |
|
123 | 127 | setuptools.setup( |
|
124 | 128 | name='Kallithea', |
|
125 | 129 | version=__version__, |
|
126 | 130 | description=description, |
|
127 | 131 | long_description=long_description, |
|
128 | 132 | keywords=keywords, |
|
129 | 133 | license=__license__, |
|
130 | 134 | author=__author__, |
|
131 | 135 | author_email='kallithea@sfconservancy.org', |
|
132 | 136 | dependency_links=dependency_links, |
|
133 | 137 | url=__url__, |
|
134 | 138 | install_requires=requirements, |
|
135 | 139 | classifiers=classifiers, |
|
136 | 140 | data_files=data_files, |
|
137 | 141 | packages=packages, |
|
138 | 142 | include_package_data=True, |
|
139 | 143 | message_extractors={'kallithea': [ |
|
140 | 144 | ('**.py', 'python', None), |
|
141 | 145 | ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), |
|
142 | 146 | ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}), |
|
143 | 147 | ('public/**', 'ignore', None)]}, |
|
144 | 148 | zip_safe=False, |
|
145 | 149 | entry_points=""" |
|
146 | 150 | [console_scripts] |
|
147 | 151 | kallithea-api = kallithea.bin.kallithea_api:main |
|
148 | 152 | kallithea-gist = kallithea.bin.kallithea_gist:main |
|
149 | 153 | kallithea-config = kallithea.bin.kallithea_config:main |
|
150 | 154 | |
|
151 | 155 | [paste.app_factory] |
|
152 | 156 | main = kallithea.config.middleware:make_app |
|
153 | 157 | |
|
154 | [paste.app_install] | |
|
155 | main = pylons.util:PylonsInstaller | |
|
156 | ||
|
157 | 158 | [gearbox.commands] |
|
158 | 159 | make-config=kallithea.lib.paster_commands.make_config:Command |
|
159 | 160 | setup-db=kallithea.lib.paster_commands.setup_db:Command |
|
160 | 161 | cleanup-repos=kallithea.lib.paster_commands.cleanup:Command |
|
161 | 162 | update-repoinfo=kallithea.lib.paster_commands.update_repoinfo:Command |
|
162 | 163 | make-rcext=kallithea.lib.paster_commands.make_rcextensions:Command |
|
163 | 164 | repo-scan=kallithea.lib.paster_commands.repo_scan:Command |
|
164 | 165 | cache-keys=kallithea.lib.paster_commands.cache_keys:Command |
|
165 | 166 | ishell=kallithea.lib.paster_commands.ishell:Command |
|
166 | 167 | make-index=kallithea.lib.paster_commands.make_index:Command |
|
167 | 168 | upgrade-db=kallithea.lib.dbmigrate:UpgradeDb |
|
168 | 169 | celeryd=kallithea.lib.paster_commands.celeryd:Command |
|
169 | 170 | install-iis=kallithea.lib.paster_commands.install_iis:Command |
|
170 | 171 | """, |
|
171 | 172 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now