Show More
@@ -1,591 +1,591 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # Kallithea - Development config: # |
|
4 | 4 | # listening on *:5000 # |
|
5 | 5 | # sqlite and kallithea.db # |
|
6 | 6 | # initial_repo_scan = true # |
|
7 | 7 | # set debug = true # |
|
8 | 8 | # verbose and colorful logging # |
|
9 | 9 | # # |
|
10 | 10 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ################################################################################ |
|
13 | 13 | |
|
14 | 14 | [DEFAULT] |
|
15 | 15 | debug = true |
|
16 | 16 | pdebug = false |
|
17 | 17 | |
|
18 | 18 | ################################################################################ |
|
19 | 19 | ## Email settings ## |
|
20 | 20 | ## ## |
|
21 | 21 | ## Refer to the documentation ("Email settings") for more details. ## |
|
22 | 22 | ## ## |
|
23 | 23 | ## It is recommended to use a valid sender address that passes access ## |
|
24 | 24 | ## validation and spam filtering in mail servers. ## |
|
25 | 25 | ################################################################################ |
|
26 | 26 | |
|
27 | 27 | ## 'From' header for application emails. You can optionally add a name. |
|
28 | 28 | ## Default: |
|
29 | 29 | #app_email_from = Kallithea |
|
30 | 30 | ## Examples: |
|
31 | 31 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
32 | 32 | #app_email_from = kallithea-noreply@example.com |
|
33 | 33 | |
|
34 | 34 | ## Subject prefix for application emails. |
|
35 | 35 | ## A space between this prefix and the real subject is automatically added. |
|
36 | 36 | ## Default: |
|
37 | 37 | #email_prefix = |
|
38 | 38 | ## Example: |
|
39 | 39 | #email_prefix = [Kallithea] |
|
40 | 40 | |
|
41 | 41 | ## Recipients for error emails and fallback recipients of application mails. |
|
42 | 42 | ## Multiple addresses can be specified, space-separated. |
|
43 | 43 | ## Only addresses are allowed, do not add any name part. |
|
44 | 44 | ## Default: |
|
45 | 45 | #email_to = |
|
46 | 46 | ## Examples: |
|
47 | 47 | #email_to = admin@example.com |
|
48 | 48 | #email_to = admin@example.com another_admin@example.com |
|
49 | 49 | |
|
50 | 50 | ## 'From' header for error emails. You can optionally add a name. |
|
51 | 51 | ## Default: |
|
52 | 52 | #error_email_from = pylons@yourapp.com |
|
53 | 53 | ## Examples: |
|
54 | 54 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
55 | 55 | #error_email_from = paste_error@example.com |
|
56 | 56 | |
|
57 | 57 | ## SMTP server settings |
|
58 | 58 | ## Only smtp_server is mandatory. All other settings take the specified default |
|
59 | 59 | ## values. |
|
60 |
#smtp_server = |
|
|
60 | #smtp_server = smtp.example.com | |
|
61 | 61 | #smtp_username = |
|
62 | 62 | #smtp_password = |
|
63 | 63 | #smtp_port = 25 |
|
64 | 64 | #smtp_use_tls = false |
|
65 | 65 | #smtp_use_ssl = false |
|
66 | 66 | ## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.). |
|
67 | 67 | ## If empty, use any of the authentication parameters supported by the server. |
|
68 | 68 | #smtp_auth = |
|
69 | 69 | |
|
70 | 70 | [server:main] |
|
71 | 71 | ## PASTE ## |
|
72 | 72 | #use = egg:Paste#http |
|
73 | 73 | ## nr of worker threads to spawn |
|
74 | 74 | #threadpool_workers = 5 |
|
75 | 75 | ## max request before thread respawn |
|
76 | 76 | #threadpool_max_requests = 10 |
|
77 | 77 | ## option to use threads of process |
|
78 | 78 | #use_threadpool = true |
|
79 | 79 | |
|
80 | 80 | ## WAITRESS ## |
|
81 | 81 | use = egg:waitress#main |
|
82 | 82 | ## number of worker threads |
|
83 | 83 | threads = 5 |
|
84 | 84 | ## MAX BODY SIZE 100GB |
|
85 | 85 | max_request_body_size = 107374182400 |
|
86 | 86 | ## use poll instead of select, fixes fd limits, may not work on old |
|
87 | 87 | ## windows systems. |
|
88 | 88 | #asyncore_use_poll = True |
|
89 | 89 | |
|
90 | 90 | ## GUNICORN ## |
|
91 | 91 | #use = egg:gunicorn#main |
|
92 | 92 | ## number of process workers. You must set `instance_id = *` when this option |
|
93 | 93 | ## is set to more than one worker |
|
94 | 94 | #workers = 1 |
|
95 | 95 | ## process name |
|
96 | 96 | #proc_name = kallithea |
|
97 | 97 | ## type of worker class, one of sync, eventlet, gevent, tornado |
|
98 | 98 | ## recommended for bigger setup is using of of other than sync one |
|
99 | 99 | #worker_class = sync |
|
100 | 100 | #max_requests = 1000 |
|
101 | 101 | ## ammount of time a worker can handle request before it gets killed and |
|
102 | 102 | ## restarted |
|
103 | 103 | #timeout = 3600 |
|
104 | 104 | |
|
105 | 105 | ## UWSGI ## |
|
106 | 106 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
107 | 107 | #[uwsgi] |
|
108 | 108 | #socket = /tmp/uwsgi.sock |
|
109 | 109 | #master = true |
|
110 | 110 | #http = 127.0.0.1:5000 |
|
111 | 111 | |
|
112 | 112 | ## set as deamon and redirect all output to file |
|
113 | 113 | #daemonize = ./uwsgi_kallithea.log |
|
114 | 114 | |
|
115 | 115 | ## master process PID |
|
116 | 116 | #pidfile = ./uwsgi_kallithea.pid |
|
117 | 117 | |
|
118 | 118 | ## stats server with workers statistics, use uwsgitop |
|
119 | 119 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
120 | 120 | #stats = 127.0.0.1:1717 |
|
121 | 121 | #memory-report = true |
|
122 | 122 | |
|
123 | 123 | ## log 5XX errors |
|
124 | 124 | #log-5xx = true |
|
125 | 125 | |
|
126 | 126 | ## Set the socket listen queue size. |
|
127 | 127 | #listen = 256 |
|
128 | 128 | |
|
129 | 129 | ## Gracefully Reload workers after the specified amount of managed requests |
|
130 | 130 | ## (avoid memory leaks). |
|
131 | 131 | #max-requests = 1000 |
|
132 | 132 | |
|
133 | 133 | ## enable large buffers |
|
134 | 134 | #buffer-size = 65535 |
|
135 | 135 | |
|
136 | 136 | ## socket and http timeouts ## |
|
137 | 137 | #http-timeout = 3600 |
|
138 | 138 | #socket-timeout = 3600 |
|
139 | 139 | |
|
140 | 140 | ## Log requests slower than the specified number of milliseconds. |
|
141 | 141 | #log-slow = 10 |
|
142 | 142 | |
|
143 | 143 | ## Exit if no app can be loaded. |
|
144 | 144 | #need-app = true |
|
145 | 145 | |
|
146 | 146 | ## Set lazy mode (load apps in workers instead of master). |
|
147 | 147 | #lazy = true |
|
148 | 148 | |
|
149 | 149 | ## scaling ## |
|
150 | 150 | ## set cheaper algorithm to use, if not set default will be used |
|
151 | 151 | #cheaper-algo = spare |
|
152 | 152 | |
|
153 | 153 | ## minimum number of workers to keep at all times |
|
154 | 154 | #cheaper = 1 |
|
155 | 155 | |
|
156 | 156 | ## number of workers to spawn at startup |
|
157 | 157 | #cheaper-initial = 1 |
|
158 | 158 | |
|
159 | 159 | ## maximum number of workers that can be spawned |
|
160 | 160 | #workers = 4 |
|
161 | 161 | |
|
162 | 162 | ## how many workers should be spawned at a time |
|
163 | 163 | #cheaper-step = 1 |
|
164 | 164 | |
|
165 | 165 | ## COMMON ## |
|
166 | 166 | host = 0.0.0.0 |
|
167 | 167 | port = 5000 |
|
168 | 168 | |
|
169 | 169 | ## middleware for hosting the WSGI application under a URL prefix |
|
170 | 170 | #[filter:proxy-prefix] |
|
171 | 171 | #use = egg:PasteDeploy#prefix |
|
172 | 172 | #prefix = /<your-prefix> |
|
173 | 173 | |
|
174 | 174 | [app:main] |
|
175 | 175 | use = egg:kallithea |
|
176 | 176 | ## enable proxy prefix middleware |
|
177 | 177 | #filter-with = proxy-prefix |
|
178 | 178 | |
|
179 | 179 | full_stack = true |
|
180 | 180 | static_files = true |
|
181 | 181 | ## Available Languages: |
|
182 | 182 | ## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW |
|
183 | 183 | lang = |
|
184 | 184 | cache_dir = %(here)s/data |
|
185 | 185 | index_dir = %(here)s/data/index |
|
186 | 186 | |
|
187 | 187 | ## perform a full repository scan on each server start, this should be |
|
188 | 188 | ## set to false after first startup, to allow faster server restarts. |
|
189 | 189 | #initial_repo_scan = false |
|
190 | 190 | initial_repo_scan = true |
|
191 | 191 | |
|
192 | 192 | ## uncomment and set this path to use archive download cache |
|
193 | 193 | archive_cache_dir = %(here)s/tarballcache |
|
194 | 194 | |
|
195 | 195 | ## change this to unique ID for security |
|
196 | 196 | app_instance_uuid = development-not-secret |
|
197 | 197 | |
|
198 | 198 | ## cut off limit for large diffs (size in bytes) |
|
199 | 199 | cut_off_limit = 256000 |
|
200 | 200 | |
|
201 | 201 | ## use cache version of scm repo everywhere |
|
202 | 202 | vcs_full_cache = true |
|
203 | 203 | |
|
204 | 204 | ## force https in Kallithea, fixes https redirects, assumes it's always https |
|
205 | 205 | force_https = false |
|
206 | 206 | |
|
207 | 207 | ## use Strict-Transport-Security headers |
|
208 | 208 | use_htsts = false |
|
209 | 209 | |
|
210 | 210 | ## number of commits stats will parse on each iteration |
|
211 | 211 | commit_parse_limit = 25 |
|
212 | 212 | |
|
213 | 213 | ## path to git executable |
|
214 | 214 | git_path = git |
|
215 | 215 | |
|
216 | 216 | ## git rev filter option, --all is the default filter, if you need to |
|
217 | 217 | ## hide all refs in changelog switch this to --branches --tags |
|
218 | 218 | #git_rev_filter = --branches --tags |
|
219 | 219 | |
|
220 | 220 | ## RSS feed options |
|
221 | 221 | rss_cut_off_limit = 256000 |
|
222 | 222 | rss_items_per_page = 10 |
|
223 | 223 | rss_include_diff = false |
|
224 | 224 | |
|
225 | 225 | ## options for showing and identifying changesets |
|
226 | 226 | show_sha_length = 12 |
|
227 | 227 | show_revision_number = false |
|
228 | 228 | |
|
229 | 229 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
230 | 230 | ## url that does rewrites to _admin/gists/<gistid>. |
|
231 |
## example: http://gist. |
|
|
232 |
## Kallithea url, ie. http[s]:// |
|
|
231 | ## example: http://gist.example.com/{gistid}. Empty means use the internal | |
|
232 | ## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid> | |
|
233 | 233 | gist_alias_url = |
|
234 | 234 | |
|
235 | 235 | ## white list of API enabled controllers. This allows to add list of |
|
236 | 236 | ## controllers to which access will be enabled by api_key. eg: to enable |
|
237 | 237 | ## api access to raw_files put `FilesController:raw`, to enable access to patches |
|
238 | 238 | ## add `ChangesetController:changeset_patch`. This list should be "," separated |
|
239 | 239 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names |
|
240 | 240 | ## Recommended settings below are commented out: |
|
241 | 241 | api_access_controllers_whitelist = |
|
242 | 242 | # ChangesetController:changeset_patch, |
|
243 | 243 | # ChangesetController:changeset_raw, |
|
244 | 244 | # FilesController:raw, |
|
245 | 245 | # FilesController:archivefile |
|
246 | 246 | |
|
247 | 247 | ## default encoding used to convert from and to unicode |
|
248 | 248 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
249 | 249 | default_encoding = utf8 |
|
250 | 250 | |
|
251 | 251 | ## issue tracker for Kallithea (leave blank to disable, absent for default) |
|
252 | 252 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
253 | 253 | |
|
254 | 254 | ## issue tracking mapping for commits messages |
|
255 | 255 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
256 | 256 | |
|
257 | 257 | ## pattern to get the issues from commit messages |
|
258 | 258 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
259 | 259 | ## {id} will be all groups matched from this pattern |
|
260 | 260 | |
|
261 | 261 | issue_pat = (?:\s*#)(\d+) |
|
262 | 262 | |
|
263 | 263 | ## server url to the issue, each {id} will be replaced with match |
|
264 | 264 | ## fetched from the regex and {repo} is replaced with full repository name |
|
265 | 265 | ## including groups {repo_name} is replaced with just name of repo |
|
266 | 266 | |
|
267 |
issue_server_link = https:// |
|
|
267 | issue_server_link = https://issues.example.com/{repo}/issue/{id} | |
|
268 | 268 | |
|
269 | 269 | ## prefix to add to link to indicate it's an url |
|
270 | 270 | ## #314 will be replaced by <issue_prefix><id> |
|
271 | 271 | |
|
272 | 272 | issue_prefix = # |
|
273 | 273 | |
|
274 | 274 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
275 | 275 | ## multiple patterns, to other issues server, wiki or others |
|
276 | 276 | ## below an example how to create a wiki pattern |
|
277 |
# wiki-some-id -> https:// |
|
|
277 | # wiki-some-id -> https://wiki.example.com/some-id | |
|
278 | 278 | |
|
279 | 279 | #issue_pat_wiki = (?:wiki-)(.+) |
|
280 |
#issue_server_link_wiki = https:// |
|
|
280 | #issue_server_link_wiki = https://wiki.example.com/{id} | |
|
281 | 281 | #issue_prefix_wiki = WIKI- |
|
282 | 282 | |
|
283 | 283 | ## instance-id prefix |
|
284 | 284 | ## a prefix key for this instance used for cache invalidation when running |
|
285 | 285 | ## multiple instances of kallithea, make sure it's globally unique for |
|
286 | 286 | ## all running kallithea instances. Leave empty if you don't use it |
|
287 | 287 | instance_id = |
|
288 | 288 | |
|
289 | 289 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
290 | 290 | ## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with |
|
291 | 291 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
292 | 292 | auth_ret_code = |
|
293 | 293 | |
|
294 | 294 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
295 | 295 | ## codes don't break the transactions while 4XX codes do |
|
296 | 296 | lock_ret_code = 423 |
|
297 | 297 | |
|
298 | 298 | ## allows to change the repository location in settings page |
|
299 | 299 | allow_repo_location_change = True |
|
300 | 300 | |
|
301 | 301 | ## allows to setup custom hooks in settings page |
|
302 | 302 | allow_custom_hooks_settings = True |
|
303 | 303 | |
|
304 | 304 | #################################### |
|
305 | 305 | ### CELERY CONFIG #### |
|
306 | 306 | #################################### |
|
307 | 307 | |
|
308 | 308 | use_celery = false |
|
309 | 309 | broker.host = localhost |
|
310 | 310 | broker.vhost = rabbitmqhost |
|
311 | 311 | broker.port = 5672 |
|
312 | 312 | broker.user = rabbitmq |
|
313 | 313 | broker.password = qweqwe |
|
314 | 314 | |
|
315 | 315 | celery.imports = kallithea.lib.celerylib.tasks |
|
316 | 316 | |
|
317 | 317 | celery.result.backend = amqp |
|
318 | 318 | celery.result.dburi = amqp:// |
|
319 | 319 | celery.result.serialier = json |
|
320 | 320 | |
|
321 | 321 | #celery.send.task.error.emails = true |
|
322 | 322 | #celery.amqp.task.result.expires = 18000 |
|
323 | 323 | |
|
324 | 324 | celeryd.concurrency = 2 |
|
325 | 325 | #celeryd.log.file = celeryd.log |
|
326 | 326 | celeryd.log.level = DEBUG |
|
327 | 327 | celeryd.max.tasks.per.child = 1 |
|
328 | 328 | |
|
329 | 329 | ## tasks will never be sent to the queue, but executed locally instead. |
|
330 | 330 | celery.always.eager = false |
|
331 | 331 | |
|
332 | 332 | #################################### |
|
333 | 333 | ### BEAKER CACHE #### |
|
334 | 334 | #################################### |
|
335 | 335 | |
|
336 | 336 | beaker.cache.data_dir = %(here)s/data/cache/data |
|
337 | 337 | beaker.cache.lock_dir = %(here)s/data/cache/lock |
|
338 | 338 | |
|
339 | 339 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
340 | 340 | |
|
341 | 341 | beaker.cache.short_term.type = memory |
|
342 | 342 | beaker.cache.short_term.expire = 60 |
|
343 | 343 | beaker.cache.short_term.key_length = 256 |
|
344 | 344 | |
|
345 | 345 | beaker.cache.long_term.type = memory |
|
346 | 346 | beaker.cache.long_term.expire = 36000 |
|
347 | 347 | beaker.cache.long_term.key_length = 256 |
|
348 | 348 | |
|
349 | 349 | beaker.cache.sql_cache_short.type = memory |
|
350 | 350 | beaker.cache.sql_cache_short.expire = 10 |
|
351 | 351 | beaker.cache.sql_cache_short.key_length = 256 |
|
352 | 352 | |
|
353 | 353 | #################################### |
|
354 | 354 | ### BEAKER SESSION #### |
|
355 | 355 | #################################### |
|
356 | 356 | |
|
357 | 357 | ## Name of session cookie. Should be unique for a given host and path, even when running |
|
358 | 358 | ## on different ports. Otherwise, cookie sessions will be shared and messed up. |
|
359 | 359 | beaker.session.key = kallithea |
|
360 | 360 | ## Sessions should always only be accessible by the browser, not directly by JavaScript. |
|
361 | 361 | beaker.session.httponly = true |
|
362 | 362 | ## Session lifetime. 2592000 seconds is 30 days. |
|
363 | 363 | beaker.session.timeout = 2592000 |
|
364 | 364 | |
|
365 | 365 | ## Server secret used with HMAC to ensure integrity of cookies. |
|
366 | 366 | beaker.session.secret = development-not-secret |
|
367 | 367 | ## Further, encrypt the data with AES. |
|
368 | 368 | #beaker.session.encrypt_key = <key_for_encryption> |
|
369 | 369 | #beaker.session.validate_key = <validation_key> |
|
370 | 370 | |
|
371 | 371 | ## Type of storage used for the session, current types are |
|
372 | 372 | ## dbm, file, memcached, database, and memory. |
|
373 | 373 | |
|
374 | 374 | ## File system storage of session data. (default) |
|
375 | 375 | #beaker.session.type = file |
|
376 | 376 | |
|
377 | 377 | ## Cookie only, store all session data inside the cookie. Requires secure secrets. |
|
378 | 378 | #beaker.session.type = cookie |
|
379 | 379 | |
|
380 | 380 | ## Database storage of session data. |
|
381 | 381 | #beaker.session.type = ext:database |
|
382 | 382 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
383 | 383 | #beaker.session.table_name = db_session |
|
384 | 384 | |
|
385 | 385 | ############################ |
|
386 | 386 | ## ERROR HANDLING SYSTEMS ## |
|
387 | 387 | ############################ |
|
388 | 388 | |
|
389 | 389 | #################### |
|
390 | 390 | ### [errormator] ### |
|
391 | 391 | #################### |
|
392 | 392 | |
|
393 | 393 | ## Errormator is tailored to work with Kallithea, see |
|
394 | 394 | ## http://errormator.com for details how to obtain an account |
|
395 | 395 | ## you must install python package `errormator_client` to make it work |
|
396 | 396 | |
|
397 | 397 | ## errormator enabled |
|
398 | 398 | errormator = false |
|
399 | 399 | |
|
400 | 400 | errormator.server_url = https://api.errormator.com |
|
401 | 401 | errormator.api_key = YOUR_API_KEY |
|
402 | 402 | |
|
403 | 403 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
404 | 404 | |
|
405 | 405 | ## enables 404 error logging (default False) |
|
406 | 406 | errormator.report_404 = false |
|
407 | 407 | |
|
408 | 408 | ## time in seconds after request is considered being slow (default 1) |
|
409 | 409 | errormator.slow_request_time = 1 |
|
410 | 410 | |
|
411 | 411 | ## record slow requests in application |
|
412 | 412 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
413 | 413 | errormator.slow_requests = true |
|
414 | 414 | |
|
415 | 415 | ## enable hooking to application loggers |
|
416 | 416 | #errormator.logging = true |
|
417 | 417 | |
|
418 | 418 | ## minimum log level for log capture |
|
419 | 419 | #errormator.logging.level = WARNING |
|
420 | 420 | |
|
421 | 421 | ## send logs only from erroneous/slow requests |
|
422 | 422 | ## (saves API quota for intensive logging) |
|
423 | 423 | errormator.logging_on_error = false |
|
424 | 424 | |
|
425 | 425 | ## list of additonal keywords that should be grabbed from environ object |
|
426 | 426 | ## can be string with comma separated list of words in lowercase |
|
427 | 427 | ## (by default client will always send following info: |
|
428 | 428 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
429 | 429 | ## start with HTTP* this list be extended with additional keywords here |
|
430 | 430 | errormator.environ_keys_whitelist = |
|
431 | 431 | |
|
432 | 432 | ## list of keywords that should be blanked from request object |
|
433 | 433 | ## can be string with comma separated list of words in lowercase |
|
434 | 434 | ## (by default client will always blank keys that contain following words |
|
435 | 435 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
436 | 436 | ## this list be extended with additional keywords set here |
|
437 | 437 | errormator.request_keys_blacklist = |
|
438 | 438 | |
|
439 | 439 | ## list of namespaces that should be ignores when gathering log entries |
|
440 | 440 | ## can be string with comma separated list of namespaces |
|
441 | 441 | ## (by default the client ignores own entries: errormator_client.client) |
|
442 | 442 | errormator.log_namespace_blacklist = |
|
443 | 443 | |
|
444 | 444 | ################ |
|
445 | 445 | ### [sentry] ### |
|
446 | 446 | ################ |
|
447 | 447 | |
|
448 | 448 | ## sentry is a alternative open source error aggregator |
|
449 | 449 | ## you must install python packages `sentry` and `raven` to enable |
|
450 | 450 | |
|
451 | 451 | sentry.dsn = YOUR_DNS |
|
452 | 452 | sentry.servers = |
|
453 | 453 | sentry.name = |
|
454 | 454 | sentry.key = |
|
455 | 455 | sentry.public_key = |
|
456 | 456 | sentry.secret_key = |
|
457 | 457 | sentry.project = |
|
458 | 458 | sentry.site = |
|
459 | 459 | sentry.include_paths = |
|
460 | 460 | sentry.exclude_paths = |
|
461 | 461 | |
|
462 | 462 | ################################################################################ |
|
463 | 463 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
464 | 464 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
465 | 465 | ## execute malicious code after an exception is raised. ## |
|
466 | 466 | ################################################################################ |
|
467 | 467 | #set debug = false |
|
468 | 468 | set debug = true |
|
469 | 469 | |
|
470 | 470 | ################################## |
|
471 | 471 | ### LOGVIEW CONFIG ### |
|
472 | 472 | ################################## |
|
473 | 473 | |
|
474 | 474 | logview.sqlalchemy = #faa |
|
475 | 475 | logview.pylons.templating = #bfb |
|
476 | 476 | logview.pylons.util = #eee |
|
477 | 477 | |
|
478 | 478 | ######################################################### |
|
479 | 479 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
480 | 480 | ######################################################### |
|
481 | 481 | |
|
482 | 482 | # SQLITE [default] |
|
483 | 483 | sqlalchemy.db1.url = sqlite:///%(here)s/kallithea.db?timeout=60 |
|
484 | 484 | |
|
485 | 485 | # POSTGRESQL |
|
486 | 486 | #sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea |
|
487 | 487 | |
|
488 | 488 | # MySQL |
|
489 | 489 | #sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea |
|
490 | 490 | |
|
491 | 491 | # see sqlalchemy docs for others |
|
492 | 492 | |
|
493 | 493 | sqlalchemy.db1.echo = false |
|
494 | 494 | sqlalchemy.db1.pool_recycle = 3600 |
|
495 | 495 | sqlalchemy.db1.convert_unicode = true |
|
496 | 496 | |
|
497 | 497 | ################################ |
|
498 | 498 | ### LOGGING CONFIGURATION #### |
|
499 | 499 | ################################ |
|
500 | 500 | |
|
501 | 501 | [loggers] |
|
502 | 502 | keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer |
|
503 | 503 | |
|
504 | 504 | [handlers] |
|
505 | 505 | keys = console, console_sql |
|
506 | 506 | |
|
507 | 507 | [formatters] |
|
508 | 508 | keys = generic, color_formatter, color_formatter_sql |
|
509 | 509 | |
|
510 | 510 | ############# |
|
511 | 511 | ## LOGGERS ## |
|
512 | 512 | ############# |
|
513 | 513 | |
|
514 | 514 | [logger_root] |
|
515 | 515 | level = NOTSET |
|
516 | 516 | handlers = console |
|
517 | 517 | |
|
518 | 518 | [logger_routes] |
|
519 | 519 | level = DEBUG |
|
520 | 520 | handlers = |
|
521 | 521 | qualname = routes.middleware |
|
522 | 522 | ## "level = DEBUG" logs the route matched and routing variables. |
|
523 | 523 | propagate = 1 |
|
524 | 524 | |
|
525 | 525 | [logger_beaker] |
|
526 | 526 | level = DEBUG |
|
527 | 527 | handlers = |
|
528 | 528 | qualname = beaker.container |
|
529 | 529 | propagate = 1 |
|
530 | 530 | |
|
531 | 531 | [logger_templates] |
|
532 | 532 | level = INFO |
|
533 | 533 | handlers = |
|
534 | 534 | qualname = pylons.templating |
|
535 | 535 | propagate = 1 |
|
536 | 536 | |
|
537 | 537 | [logger_kallithea] |
|
538 | 538 | level = DEBUG |
|
539 | 539 | handlers = |
|
540 | 540 | qualname = kallithea |
|
541 | 541 | propagate = 1 |
|
542 | 542 | |
|
543 | 543 | [logger_sqlalchemy] |
|
544 | 544 | level = INFO |
|
545 | 545 | handlers = console_sql |
|
546 | 546 | qualname = sqlalchemy.engine |
|
547 | 547 | propagate = 0 |
|
548 | 548 | |
|
549 | 549 | [logger_whoosh_indexer] |
|
550 | 550 | level = DEBUG |
|
551 | 551 | handlers = |
|
552 | 552 | qualname = whoosh_indexer |
|
553 | 553 | propagate = 1 |
|
554 | 554 | |
|
555 | 555 | ############## |
|
556 | 556 | ## HANDLERS ## |
|
557 | 557 | ############## |
|
558 | 558 | |
|
559 | 559 | [handler_console] |
|
560 | 560 | class = StreamHandler |
|
561 | 561 | args = (sys.stderr,) |
|
562 | 562 | #level = INFO |
|
563 | 563 | #formatter = generic |
|
564 | 564 | level = DEBUG |
|
565 | 565 | formatter = color_formatter |
|
566 | 566 | |
|
567 | 567 | [handler_console_sql] |
|
568 | 568 | class = StreamHandler |
|
569 | 569 | args = (sys.stderr,) |
|
570 | 570 | #level = WARN |
|
571 | 571 | #formatter = generic |
|
572 | 572 | level = DEBUG |
|
573 | 573 | formatter = color_formatter_sql |
|
574 | 574 | |
|
575 | 575 | ################ |
|
576 | 576 | ## FORMATTERS ## |
|
577 | 577 | ################ |
|
578 | 578 | |
|
579 | 579 | [formatter_generic] |
|
580 | 580 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
581 | 581 | datefmt = %Y-%m-%d %H:%M:%S |
|
582 | 582 | |
|
583 | 583 | [formatter_color_formatter] |
|
584 | 584 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
585 | 585 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
586 | 586 | datefmt = %Y-%m-%d %H:%M:%S |
|
587 | 587 | |
|
588 | 588 | [formatter_color_formatter_sql] |
|
589 | 589 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
590 | 590 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
591 | 591 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,1026 +1,1026 b'' | |||
|
1 | 1 | .. _api: |
|
2 | 2 | |
|
3 | 3 | === |
|
4 | 4 | API |
|
5 | 5 | === |
|
6 | 6 | |
|
7 | 7 | Kallithea has a simple JSON RPC API with a single schema for calling all API |
|
8 | 8 | methods. Everything is available by sending JSON encoded http(s) requests to |
|
9 | 9 | ``<your_server>/_admin/api``. |
|
10 | 10 | |
|
11 | 11 | |
|
12 | 12 | API access for web views |
|
13 | 13 | ++++++++++++++++++++++++ |
|
14 | 14 | |
|
15 | 15 | API access can also be turned on for each web view in Kallithea that is |
|
16 | 16 | decorated with the ``@LoginRequired`` decorator. Some views use |
|
17 | 17 | ``@LoginRequired(api_access=True)`` and are always available. By default only |
|
18 | 18 | RSS/Atom feed views are enabled. Other views are |
|
19 | 19 | only available if they have been whitelisted. Edit the |
|
20 | 20 | ``api_access_controllers_whitelist`` option in your .ini file and define views |
|
21 | 21 | that should have API access enabled. |
|
22 | 22 | |
|
23 | 23 | For example, to enable API access to patch/diff, raw file and archive:: |
|
24 | 24 | |
|
25 | 25 | api_access_controllers_whitelist = |
|
26 | 26 | ChangesetController:changeset_patch, |
|
27 | 27 | ChangesetController:changeset_raw, |
|
28 | 28 | FilesController:raw, |
|
29 | 29 | FilesController:archivefile |
|
30 | 30 | |
|
31 | 31 | After this change, a Kallithea view can be accessed without login by adding a |
|
32 | 32 | GET parameter ``?api_key=<api_key>`` to the URL. |
|
33 | 33 | |
|
34 | 34 | Exposing raw diffs is a good way to integrate with |
|
35 | 35 | third-party services like code review, or build farms that can download archives. |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | API access |
|
39 | 39 | ++++++++++ |
|
40 | 40 | |
|
41 | 41 | Clients must send JSON encoded JSON-RPC requests:: |
|
42 | 42 | |
|
43 | 43 | { |
|
44 | 44 | "id: "<id>", |
|
45 | 45 | "api_key": "<api_key>", |
|
46 | 46 | "method": "<method_name>", |
|
47 | 47 | "args": {"<arg_key>": "<arg_val>"} |
|
48 | 48 | } |
|
49 | 49 | |
|
50 | 50 | For example, to pull to a local "CPython" mirror using curl:: |
|
51 | 51 | |
|
52 | curl https://example.com/_admin/api -X POST -H 'content-type:text/plain' \ | |
|
52 | curl https://kallithea.example.com/_admin/api -X POST -H 'content-type:text/plain' \ | |
|
53 | 53 | --data-binary '{"id":1,"api_key":"xe7cdb2v278e4evbdf5vs04v832v0efvcbcve4a3","method":"pull","args":{"repo":"CPython"}}' |
|
54 | 54 | |
|
55 | 55 | In general, provide |
|
56 | 56 | - *id*, a value of any type, can be used to match the response with the request that it is replying to. |
|
57 | 57 | - *api_key*, for authentication and permission validation. |
|
58 | 58 | - *method*, the name of the method to call -- a list of available methods can be found below. |
|
59 | 59 | - *args*, the arguments to pass to the method. |
|
60 | 60 | |
|
61 | 61 | .. note:: |
|
62 | 62 | |
|
63 | 63 | api_key can be found or set on the user account page. |
|
64 | 64 | |
|
65 | 65 | The response to the JSON-RPC API call will always be a JSON structure:: |
|
66 | 66 | |
|
67 | 67 | { |
|
68 | 68 | "id": <id>, # the id that was used in the request |
|
69 | 69 | "result": <result>|null, # JSON formatted result (null on error) |
|
70 | 70 | "error": null|<error_message> # JSON formatted error (null on success) |
|
71 | 71 | } |
|
72 | 72 | |
|
73 | 73 | All responses from the API will be ``HTTP/1.0 200 OK``. If an error occurs, |
|
74 | 74 | the reponse will have a failure description in *error* and |
|
75 | 75 | *result* will be null. |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | API client |
|
79 | 79 | ++++++++++ |
|
80 | 80 | |
|
81 | 81 | Kallithea comes with a ``kallithea-api`` command line tool, providing a convenient |
|
82 | 82 | way to call the JSON-RPC API. |
|
83 | 83 | |
|
84 | 84 | For example, to call ``get_repo``:: |
|
85 | 85 | |
|
86 | 86 | kallithea-api --apihost=<Kallithea URL> --apikey=<API key> get_repo |
|
87 | 87 | |
|
88 | 88 | Calling method get_repo => <Kallithea URL> |
|
89 | 89 | Server response |
|
90 | 90 | ERROR:"Missing non optional `repoid` arg in JSON DATA" |
|
91 | 91 | |
|
92 | 92 | Oops, looks like we forgot to add an argument. Let's try again, now |
|
93 | 93 | providing the ``repoid`` as a parameter:: |
|
94 | 94 | |
|
95 | 95 | kallithea-api --apihost=<Kallithea URL> --apikey=<API key> get_repo repoid:myrepo |
|
96 | 96 | |
|
97 | 97 | Calling method get_repo => <Kallithea URL> |
|
98 | 98 | Server response |
|
99 | 99 | { |
|
100 | 100 | "clone_uri": null, |
|
101 | 101 | "created_on": "2015-08-31T14:55:19.042", |
|
102 | 102 | ... |
|
103 | 103 | |
|
104 | 104 | To avoid specifying ``apihost`` and ``apikey`` every time, run:: |
|
105 | 105 | |
|
106 | 106 | kallithea-api --save-config --apihost=<Kallithea URL> --apikey=<API key> |
|
107 | 107 | |
|
108 | 108 | This will create a ``~/.config/kallithea`` with the specified URL and API key |
|
109 | 109 | so you don't have to specify them every time. |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | API methods |
|
113 | 113 | +++++++++++ |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | pull |
|
117 | 117 | ---- |
|
118 | 118 | |
|
119 | 119 | Pull the given repo from remote location. Can be used to automatically keep |
|
120 | 120 | remote repos up to date. |
|
121 | 121 | This command can only be executed using the api_key of a user with admin rights. |
|
122 | 122 | |
|
123 | 123 | INPUT:: |
|
124 | 124 | |
|
125 | 125 | id : <id_for_response> |
|
126 | 126 | api_key : "<api_key>" |
|
127 | 127 | method : "pull" |
|
128 | 128 | args : { |
|
129 | 129 | "repoid" : "<reponame or repo_id>" |
|
130 | 130 | } |
|
131 | 131 | |
|
132 | 132 | OUTPUT:: |
|
133 | 133 | |
|
134 | 134 | id : <id_given_in_input> |
|
135 | 135 | result : "Pulled from `<reponame>`" |
|
136 | 136 | error : null |
|
137 | 137 | |
|
138 | 138 | rescan_repos |
|
139 | 139 | ------------ |
|
140 | 140 | |
|
141 | 141 | Rescan repositories. If ``remove_obsolete`` is set, |
|
142 | 142 | Kallithea will delete repos that are in the database but not in the filesystem. |
|
143 | 143 | This command can only be executed using the api_key of a user with admin rights. |
|
144 | 144 | |
|
145 | 145 | INPUT:: |
|
146 | 146 | |
|
147 | 147 | id : <id_for_response> |
|
148 | 148 | api_key : "<api_key>" |
|
149 | 149 | method : "rescan_repos" |
|
150 | 150 | args : { |
|
151 | 151 | "remove_obsolete" : "<boolean = Optional(False)>" |
|
152 | 152 | } |
|
153 | 153 | |
|
154 | 154 | OUTPUT:: |
|
155 | 155 | |
|
156 | 156 | id : <id_given_in_input> |
|
157 | 157 | result : "{'added': [<list of names of added repos>], |
|
158 | 158 | 'removed': [<list of names of removed repos>]}" |
|
159 | 159 | error : null |
|
160 | 160 | |
|
161 | 161 | invalidate_cache |
|
162 | 162 | ---------------- |
|
163 | 163 | |
|
164 | 164 | Invalidate the cache for a repository. |
|
165 | 165 | This command can only be executed using the api_key of a user with admin rights, |
|
166 | 166 | or that of a regular user with admin or write access to the repository. |
|
167 | 167 | |
|
168 | 168 | INPUT:: |
|
169 | 169 | |
|
170 | 170 | id : <id_for_response> |
|
171 | 171 | api_key : "<api_key>" |
|
172 | 172 | method : "invalidate_cache" |
|
173 | 173 | args : { |
|
174 | 174 | "repoid" : "<reponame or repo_id>" |
|
175 | 175 | } |
|
176 | 176 | |
|
177 | 177 | OUTPUT:: |
|
178 | 178 | |
|
179 | 179 | id : <id_given_in_input> |
|
180 | 180 | result : "Caches of repository `<reponame>`" |
|
181 | 181 | error : null |
|
182 | 182 | |
|
183 | 183 | lock |
|
184 | 184 | ---- |
|
185 | 185 | |
|
186 | 186 | Set the locking state on the given repository by the given user. |
|
187 | 187 | If the param ``userid`` is skipped, it is set to the ID of the user who is calling this method. |
|
188 | 188 | If param ``locked`` is skipped, the current lock state of the repository is returned. |
|
189 | 189 | This command can only be executed using the api_key of a user with admin rights, or that of a regular user with admin or write access to the repository. |
|
190 | 190 | |
|
191 | 191 | INPUT:: |
|
192 | 192 | |
|
193 | 193 | id : <id_for_response> |
|
194 | 194 | api_key : "<api_key>" |
|
195 | 195 | method : "lock" |
|
196 | 196 | args : { |
|
197 | 197 | "repoid" : "<reponame or repo_id>" |
|
198 | 198 | "userid" : "<user_id or username = Optional(=apiuser)>", |
|
199 | 199 | "locked" : "<bool true|false = Optional(=None)>" |
|
200 | 200 | } |
|
201 | 201 | |
|
202 | 202 | OUTPUT:: |
|
203 | 203 | |
|
204 | 204 | id : <id_given_in_input> |
|
205 | 205 | result : { |
|
206 | 206 | "repo": "<reponame>", |
|
207 | 207 | "locked": "<bool true|false>", |
|
208 | 208 | "locked_since": "<float lock_time>", |
|
209 | 209 | "locked_by": "<username>", |
|
210 | 210 | "msg": "User `<username>` set lock state for repo `<reponame>` to `<false|true>`" |
|
211 | 211 | } |
|
212 | 212 | error : null |
|
213 | 213 | |
|
214 | 214 | get_ip |
|
215 | 215 | ------ |
|
216 | 216 | |
|
217 | 217 | Return IP address as seen from Kallithea server, together with all |
|
218 | 218 | defined IP addresses for given user. |
|
219 | 219 | This command can only be executed using the api_key of a user with admin rights. |
|
220 | 220 | |
|
221 | 221 | INPUT:: |
|
222 | 222 | |
|
223 | 223 | id : <id_for_response> |
|
224 | 224 | api_key : "<api_key>" |
|
225 | 225 | method : "get_ip" |
|
226 | 226 | args : { |
|
227 | 227 | "userid" : "<user_id or username>", |
|
228 | 228 | } |
|
229 | 229 | |
|
230 | 230 | OUTPUT:: |
|
231 | 231 | |
|
232 | 232 | id : <id_given_in_input> |
|
233 | 233 | result : { |
|
234 | 234 | "ip_addr_server": <ip_from_clien>", |
|
235 | 235 | "user_ips": [ |
|
236 | 236 | { |
|
237 | 237 | "ip_addr": "<ip_with_mask>", |
|
238 | 238 | "ip_range": ["<start_ip>", "<end_ip>"], |
|
239 | 239 | }, |
|
240 | 240 | ... |
|
241 | 241 | ] |
|
242 | 242 | } |
|
243 | 243 | |
|
244 | 244 | error : null |
|
245 | 245 | |
|
246 | 246 | get_user |
|
247 | 247 | -------- |
|
248 | 248 | |
|
249 | 249 | Get a user by username or userid. The result is empty if user can't be found. |
|
250 | 250 | If userid param is skipped, it is set to id of user who is calling this method. |
|
251 | 251 | Any userid can be specified when the command is executed using the api_key of a user with admin rights. |
|
252 | 252 | Regular users can only speicy their own userid. |
|
253 | 253 | |
|
254 | 254 | INPUT:: |
|
255 | 255 | |
|
256 | 256 | id : <id_for_response> |
|
257 | 257 | api_key : "<api_key>" |
|
258 | 258 | method : "get_user" |
|
259 | 259 | args : { |
|
260 | 260 | "userid" : "<username or user_id Optional(=apiuser)>" |
|
261 | 261 | } |
|
262 | 262 | |
|
263 | 263 | OUTPUT:: |
|
264 | 264 | |
|
265 | 265 | id : <id_given_in_input> |
|
266 | 266 | result: None if user does not exist or |
|
267 | 267 | { |
|
268 | 268 | "user_id" : "<user_id>", |
|
269 | 269 | "api_key" : "<api_key>", |
|
270 | 270 | "username" : "<username>", |
|
271 | 271 | "firstname": "<firstname>", |
|
272 | 272 | "lastname" : "<lastname>", |
|
273 | 273 | "email" : "<email>", |
|
274 | 274 | "emails": "<list_of_all_additional_emails>", |
|
275 | 275 | "ip_addresses": "<list_of_ip_addresses_for_user>", |
|
276 | 276 | "active" : "<bool>", |
|
277 | 277 | "admin" : "<bool>", |
|
278 | 278 | "ldap_dn" : "<ldap_dn>", |
|
279 | 279 | "last_login": "<last_login>", |
|
280 | 280 | "permissions": { |
|
281 | 281 | "global": ["hg.create.repository", |
|
282 | 282 | "repository.read", |
|
283 | 283 | "hg.register.manual_activate"], |
|
284 | 284 | "repositories": {"repo1": "repository.none"}, |
|
285 | 285 | "repositories_groups": {"Group1": "group.read"} |
|
286 | 286 | }, |
|
287 | 287 | } |
|
288 | 288 | error: null |
|
289 | 289 | |
|
290 | 290 | get_users |
|
291 | 291 | --------- |
|
292 | 292 | |
|
293 | 293 | List all existing users. |
|
294 | 294 | This command can only be executed using the api_key of a user with admin rights. |
|
295 | 295 | |
|
296 | 296 | INPUT:: |
|
297 | 297 | |
|
298 | 298 | id : <id_for_response> |
|
299 | 299 | api_key : "<api_key>" |
|
300 | 300 | method : "get_users" |
|
301 | 301 | args : { } |
|
302 | 302 | |
|
303 | 303 | OUTPUT:: |
|
304 | 304 | |
|
305 | 305 | id : <id_given_in_input> |
|
306 | 306 | result: [ |
|
307 | 307 | { |
|
308 | 308 | "user_id" : "<user_id>", |
|
309 | 309 | "api_key" : "<api_key>", |
|
310 | 310 | "username" : "<username>", |
|
311 | 311 | "firstname": "<firstname>", |
|
312 | 312 | "lastname" : "<lastname>", |
|
313 | 313 | "email" : "<email>", |
|
314 | 314 | "emails": "<list_of_all_additional_emails>", |
|
315 | 315 | "ip_addresses": "<list_of_ip_addresses_for_user>", |
|
316 | 316 | "active" : "<bool>", |
|
317 | 317 | "admin" : "<bool>", |
|
318 | 318 | "ldap_dn" : "<ldap_dn>", |
|
319 | 319 | "last_login": "<last_login>", |
|
320 | 320 | }, |
|
321 | 321 | … |
|
322 | 322 | ] |
|
323 | 323 | error: null |
|
324 | 324 | |
|
325 | 325 | .. _create-user: |
|
326 | 326 | |
|
327 | 327 | create_user |
|
328 | 328 | ----------- |
|
329 | 329 | |
|
330 | 330 | Create new user. |
|
331 | 331 | This command can only be executed using the api_key of a user with admin rights. |
|
332 | 332 | |
|
333 | 333 | INPUT:: |
|
334 | 334 | |
|
335 | 335 | id : <id_for_response> |
|
336 | 336 | api_key : "<api_key>" |
|
337 | 337 | method : "create_user" |
|
338 | 338 | args : { |
|
339 | 339 | "username" : "<username>", |
|
340 | 340 | "email" : "<useremail>", |
|
341 | 341 | "password" : "<password = Optional(None)>", |
|
342 | 342 | "firstname" : "<firstname> = Optional(None)", |
|
343 | 343 | "lastname" : "<lastname> = Optional(None)", |
|
344 | 344 | "active" : "<bool> = Optional(True)", |
|
345 | 345 | "admin" : "<bool> = Optional(False)", |
|
346 | 346 | "ldap_dn" : "<ldap_dn> = Optional(None)" |
|
347 | 347 | } |
|
348 | 348 | |
|
349 | 349 | OUTPUT:: |
|
350 | 350 | |
|
351 | 351 | id : <id_given_in_input> |
|
352 | 352 | result: { |
|
353 | 353 | "msg" : "created new user `<username>`", |
|
354 | 354 | "user": { |
|
355 | 355 | "user_id" : "<user_id>", |
|
356 | 356 | "username" : "<username>", |
|
357 | 357 | "firstname": "<firstname>", |
|
358 | 358 | "lastname" : "<lastname>", |
|
359 | 359 | "email" : "<email>", |
|
360 | 360 | "emails": "<list_of_all_additional_emails>", |
|
361 | 361 | "active" : "<bool>", |
|
362 | 362 | "admin" : "<bool>", |
|
363 | 363 | "ldap_dn" : "<ldap_dn>", |
|
364 | 364 | "last_login": "<last_login>", |
|
365 | 365 | }, |
|
366 | 366 | } |
|
367 | 367 | error: null |
|
368 | 368 | |
|
369 | 369 | Example:: |
|
370 | 370 | |
|
371 | 371 | kallithea-api create_user username:bent email:bent@example.com firstname:Bent lastname:Bentsen extern_type:ldap extern_name:uid=bent,dc=example,dc=com |
|
372 | 372 | |
|
373 | 373 | update_user |
|
374 | 374 | ----------- |
|
375 | 375 | |
|
376 | 376 | Update the given user if such user exists. |
|
377 | 377 | This command can only be executed using the api_key of a user with admin rights. |
|
378 | 378 | |
|
379 | 379 | INPUT:: |
|
380 | 380 | |
|
381 | 381 | id : <id_for_response> |
|
382 | 382 | api_key : "<api_key>" |
|
383 | 383 | method : "update_user" |
|
384 | 384 | args : { |
|
385 | 385 | "userid" : "<user_id or username>", |
|
386 | 386 | "username" : "<username> = Optional(None)", |
|
387 | 387 | "email" : "<useremail> = Optional(None)", |
|
388 | 388 | "password" : "<password> = Optional(None)", |
|
389 | 389 | "firstname" : "<firstname> = Optional(None)", |
|
390 | 390 | "lastname" : "<lastname> = Optional(None)", |
|
391 | 391 | "active" : "<bool> = Optional(None)", |
|
392 | 392 | "admin" : "<bool> = Optional(None)", |
|
393 | 393 | "ldap_dn" : "<ldap_dn> = Optional(None)" |
|
394 | 394 | } |
|
395 | 395 | |
|
396 | 396 | OUTPUT:: |
|
397 | 397 | |
|
398 | 398 | id : <id_given_in_input> |
|
399 | 399 | result: { |
|
400 | 400 | "msg" : "updated user ID:<userid> <username>", |
|
401 | 401 | "user": { |
|
402 | 402 | "user_id" : "<user_id>", |
|
403 | 403 | "api_key" : "<api_key>", |
|
404 | 404 | "username" : "<username>", |
|
405 | 405 | "firstname": "<firstname>", |
|
406 | 406 | "lastname" : "<lastname>", |
|
407 | 407 | "email" : "<email>", |
|
408 | 408 | "emails": "<list_of_all_additional_emails>", |
|
409 | 409 | "active" : "<bool>", |
|
410 | 410 | "admin" : "<bool>", |
|
411 | 411 | "ldap_dn" : "<ldap_dn>", |
|
412 | 412 | "last_login": "<last_login>", |
|
413 | 413 | }, |
|
414 | 414 | } |
|
415 | 415 | error: null |
|
416 | 416 | |
|
417 | 417 | delete_user |
|
418 | 418 | ----------- |
|
419 | 419 | |
|
420 | 420 | Delete the given user if such a user exists. |
|
421 | 421 | This command can only be executed using the api_key of a user with admin rights. |
|
422 | 422 | |
|
423 | 423 | INPUT:: |
|
424 | 424 | |
|
425 | 425 | id : <id_for_response> |
|
426 | 426 | api_key : "<api_key>" |
|
427 | 427 | method : "delete_user" |
|
428 | 428 | args : { |
|
429 | 429 | "userid" : "<user_id or username>", |
|
430 | 430 | } |
|
431 | 431 | |
|
432 | 432 | OUTPUT:: |
|
433 | 433 | |
|
434 | 434 | id : <id_given_in_input> |
|
435 | 435 | result: { |
|
436 | 436 | "msg" : "deleted user ID:<userid> <username>", |
|
437 | 437 | "user": null |
|
438 | 438 | } |
|
439 | 439 | error: null |
|
440 | 440 | |
|
441 | 441 | get_user_group |
|
442 | 442 | -------------- |
|
443 | 443 | |
|
444 | 444 | Get an existing user group. |
|
445 | 445 | This command can only be executed using the api_key of a user with admin rights. |
|
446 | 446 | |
|
447 | 447 | INPUT:: |
|
448 | 448 | |
|
449 | 449 | id : <id_for_response> |
|
450 | 450 | api_key : "<api_key>" |
|
451 | 451 | method : "get_user_group" |
|
452 | 452 | args : { |
|
453 | 453 | "usergroupid" : "<user group id or name>" |
|
454 | 454 | } |
|
455 | 455 | |
|
456 | 456 | OUTPUT:: |
|
457 | 457 | |
|
458 | 458 | id : <id_given_in_input> |
|
459 | 459 | result : None if group not exist |
|
460 | 460 | { |
|
461 | 461 | "users_group_id" : "<id>", |
|
462 | 462 | "group_name" : "<groupname>", |
|
463 | 463 | "active": "<bool>", |
|
464 | 464 | "members" : [ |
|
465 | 465 | { |
|
466 | 466 | "user_id" : "<user_id>", |
|
467 | 467 | "api_key" : "<api_key>", |
|
468 | 468 | "username" : "<username>", |
|
469 | 469 | "firstname": "<firstname>", |
|
470 | 470 | "lastname" : "<lastname>", |
|
471 | 471 | "email" : "<email>", |
|
472 | 472 | "emails": "<list_of_all_additional_emails>", |
|
473 | 473 | "active" : "<bool>", |
|
474 | 474 | "admin" : "<bool>", |
|
475 | 475 | "ldap_dn" : "<ldap_dn>", |
|
476 | 476 | "last_login": "<last_login>", |
|
477 | 477 | }, |
|
478 | 478 | … |
|
479 | 479 | ] |
|
480 | 480 | } |
|
481 | 481 | error : null |
|
482 | 482 | |
|
483 | 483 | get_user_groups |
|
484 | 484 | --------------- |
|
485 | 485 | |
|
486 | 486 | List all existing user groups. |
|
487 | 487 | This command can only be executed using the api_key of a user with admin rights. |
|
488 | 488 | |
|
489 | 489 | INPUT:: |
|
490 | 490 | |
|
491 | 491 | id : <id_for_response> |
|
492 | 492 | api_key : "<api_key>" |
|
493 | 493 | method : "get_user_groups" |
|
494 | 494 | args : { } |
|
495 | 495 | |
|
496 | 496 | OUTPUT:: |
|
497 | 497 | |
|
498 | 498 | id : <id_given_in_input> |
|
499 | 499 | result : [ |
|
500 | 500 | { |
|
501 | 501 | "users_group_id" : "<id>", |
|
502 | 502 | "group_name" : "<groupname>", |
|
503 | 503 | "active": "<bool>", |
|
504 | 504 | }, |
|
505 | 505 | … |
|
506 | 506 | ] |
|
507 | 507 | error : null |
|
508 | 508 | |
|
509 | 509 | create_user_group |
|
510 | 510 | ----------------- |
|
511 | 511 | |
|
512 | 512 | Create a new user group. |
|
513 | 513 | This command can only be executed using the api_key of a user with admin rights. |
|
514 | 514 | |
|
515 | 515 | INPUT:: |
|
516 | 516 | |
|
517 | 517 | id : <id_for_response> |
|
518 | 518 | api_key : "<api_key>" |
|
519 | 519 | method : "create_user_group" |
|
520 | 520 | args: { |
|
521 | 521 | "group_name": "<groupname>", |
|
522 | 522 | "owner" : "<owner_name_or_id = Optional(=apiuser)>", |
|
523 | 523 | "active": "<bool> = Optional(True)" |
|
524 | 524 | } |
|
525 | 525 | |
|
526 | 526 | OUTPUT:: |
|
527 | 527 | |
|
528 | 528 | id : <id_given_in_input> |
|
529 | 529 | result: { |
|
530 | 530 | "msg": "created new user group `<groupname>`", |
|
531 | 531 | "users_group": { |
|
532 | 532 | "users_group_id" : "<id>", |
|
533 | 533 | "group_name" : "<groupname>", |
|
534 | 534 | "active": "<bool>", |
|
535 | 535 | }, |
|
536 | 536 | } |
|
537 | 537 | error: null |
|
538 | 538 | |
|
539 | 539 | add_user_to_user_group |
|
540 | 540 | ---------------------- |
|
541 | 541 | |
|
542 | 542 | Adds a user to a user group. If the user already is in that group, success will be |
|
543 | 543 | ``false``. |
|
544 | 544 | This command can only be executed using the api_key of a user with admin rights. |
|
545 | 545 | |
|
546 | 546 | INPUT:: |
|
547 | 547 | |
|
548 | 548 | id : <id_for_response> |
|
549 | 549 | api_key : "<api_key>" |
|
550 | 550 | method : "add_user_user_group" |
|
551 | 551 | args: { |
|
552 | 552 | "usersgroupid" : "<user group id or name>", |
|
553 | 553 | "userid" : "<user_id or username>", |
|
554 | 554 | } |
|
555 | 555 | |
|
556 | 556 | OUTPUT:: |
|
557 | 557 | |
|
558 | 558 | id : <id_given_in_input> |
|
559 | 559 | result: { |
|
560 | 560 | "success": True|False # depends on if member is in group |
|
561 | 561 | "msg": "added member `<username>` to a user group `<groupname>` | |
|
562 | 562 | User is already in that group" |
|
563 | 563 | } |
|
564 | 564 | error: null |
|
565 | 565 | |
|
566 | 566 | remove_user_from_user_group |
|
567 | 567 | --------------------------- |
|
568 | 568 | |
|
569 | 569 | Remove a user from a user group. If the user isn't in the given group, success will |
|
570 | 570 | be ``false``. |
|
571 | 571 | This command can only be executed using the api_key of a user with admin rights. |
|
572 | 572 | |
|
573 | 573 | INPUT:: |
|
574 | 574 | |
|
575 | 575 | id : <id_for_response> |
|
576 | 576 | api_key : "<api_key>" |
|
577 | 577 | method : "remove_user_from_user_group" |
|
578 | 578 | args: { |
|
579 | 579 | "usersgroupid" : "<user group id or name>", |
|
580 | 580 | "userid" : "<user_id or username>", |
|
581 | 581 | } |
|
582 | 582 | |
|
583 | 583 | OUTPUT:: |
|
584 | 584 | |
|
585 | 585 | id : <id_given_in_input> |
|
586 | 586 | result: { |
|
587 | 587 | "success": True|False, # depends on if member is in group |
|
588 | 588 | "msg": "removed member <username> from user group <groupname> | |
|
589 | 589 | User wasn't in group" |
|
590 | 590 | } |
|
591 | 591 | error: null |
|
592 | 592 | |
|
593 | 593 | get_repo |
|
594 | 594 | -------- |
|
595 | 595 | |
|
596 | 596 | Get an existing repository by its name or repository_id. Members will contain |
|
597 | 597 | either users_group or users associated to that repository. |
|
598 | 598 | This command can only be executed using the api_key of a user with admin rights, |
|
599 | 599 | or that of a regular user with at least read access to the repository. |
|
600 | 600 | |
|
601 | 601 | INPUT:: |
|
602 | 602 | |
|
603 | 603 | id : <id_for_response> |
|
604 | 604 | api_key : "<api_key>" |
|
605 | 605 | method : "get_repo" |
|
606 | 606 | args: { |
|
607 | 607 | "repoid" : "<reponame or repo_id>" |
|
608 | 608 | } |
|
609 | 609 | |
|
610 | 610 | OUTPUT:: |
|
611 | 611 | |
|
612 | 612 | id : <id_given_in_input> |
|
613 | 613 | result: None if repository does not exist or |
|
614 | 614 | { |
|
615 | 615 | "repo_id" : "<repo_id>", |
|
616 | 616 | "repo_name" : "<reponame>" |
|
617 | 617 | "repo_type" : "<repo_type>", |
|
618 | 618 | "clone_uri" : "<clone_uri>", |
|
619 | 619 | "enable_downloads": "<bool>", |
|
620 | 620 | "enable_locking": "<bool>", |
|
621 | 621 | "enable_statistics": "<bool>", |
|
622 | 622 | "private": "<bool>", |
|
623 | 623 | "created_on" : "<date_time_created>", |
|
624 | 624 | "description" : "<description>", |
|
625 | 625 | "landing_rev": "<landing_rev>", |
|
626 | 626 | "last_changeset": { |
|
627 | 627 | "author": "<full_author>", |
|
628 | 628 | "date": "<date_time_of_commit>", |
|
629 | 629 | "message": "<commit_message>", |
|
630 | 630 | "raw_id": "<raw_id>", |
|
631 | 631 | "revision": "<numeric_revision>", |
|
632 | 632 | "short_id": "<short_id>" |
|
633 | 633 | } |
|
634 | 634 | "owner": "<repo_owner>", |
|
635 | 635 | "fork_of": "<name_of_fork_parent>", |
|
636 | 636 | "members" : [ |
|
637 | 637 | { |
|
638 | 638 | "type": "user", |
|
639 | 639 | "user_id" : "<user_id>", |
|
640 | 640 | "api_key" : "<api_key>", |
|
641 | 641 | "username" : "<username>", |
|
642 | 642 | "firstname": "<firstname>", |
|
643 | 643 | "lastname" : "<lastname>", |
|
644 | 644 | "email" : "<email>", |
|
645 | 645 | "emails": "<list_of_all_additional_emails>", |
|
646 | 646 | "active" : "<bool>", |
|
647 | 647 | "admin" : "<bool>", |
|
648 | 648 | "ldap_dn" : "<ldap_dn>", |
|
649 | 649 | "last_login": "<last_login>", |
|
650 | 650 | "permission" : "repository.(read|write|admin)" |
|
651 | 651 | }, |
|
652 | 652 | … |
|
653 | 653 | { |
|
654 | 654 | "type": "users_group", |
|
655 | 655 | "id" : "<usersgroupid>", |
|
656 | 656 | "name" : "<usersgroupname>", |
|
657 | 657 | "active": "<bool>", |
|
658 | 658 | "permission" : "repository.(read|write|admin)" |
|
659 | 659 | }, |
|
660 | 660 | … |
|
661 | 661 | ] |
|
662 | 662 | "followers": [ |
|
663 | 663 | { |
|
664 | 664 | "user_id" : "<user_id>", |
|
665 | 665 | "username" : "<username>", |
|
666 | 666 | "api_key" : "<api_key>", |
|
667 | 667 | "firstname": "<firstname>", |
|
668 | 668 | "lastname" : "<lastname>", |
|
669 | 669 | "email" : "<email>", |
|
670 | 670 | "emails": "<list_of_all_additional_emails>", |
|
671 | 671 | "ip_addresses": "<list_of_ip_addresses_for_user>", |
|
672 | 672 | "active" : "<bool>", |
|
673 | 673 | "admin" : "<bool>", |
|
674 | 674 | "ldap_dn" : "<ldap_dn>", |
|
675 | 675 | "last_login": "<last_login>", |
|
676 | 676 | }, |
|
677 | 677 | … |
|
678 | 678 | ] |
|
679 | 679 | } |
|
680 | 680 | error: null |
|
681 | 681 | |
|
682 | 682 | get_repos |
|
683 | 683 | --------- |
|
684 | 684 | |
|
685 | 685 | List all existing repositories. |
|
686 | 686 | This command can only be executed using the api_key of a user with admin rights, |
|
687 | 687 | or that of a regular user with at least read access to the repository. |
|
688 | 688 | |
|
689 | 689 | INPUT:: |
|
690 | 690 | |
|
691 | 691 | id : <id_for_response> |
|
692 | 692 | api_key : "<api_key>" |
|
693 | 693 | method : "get_repos" |
|
694 | 694 | args: { } |
|
695 | 695 | |
|
696 | 696 | OUTPUT:: |
|
697 | 697 | |
|
698 | 698 | id : <id_given_in_input> |
|
699 | 699 | result: [ |
|
700 | 700 | { |
|
701 | 701 | "repo_id" : "<repo_id>", |
|
702 | 702 | "repo_name" : "<reponame>" |
|
703 | 703 | "repo_type" : "<repo_type>", |
|
704 | 704 | "clone_uri" : "<clone_uri>", |
|
705 | 705 | "private" : "<bool>", |
|
706 | 706 | "created_on" : "<datetimecreated>", |
|
707 | 707 | "description" : "<description>", |
|
708 | 708 | "landing_rev": "<landing_rev>", |
|
709 | 709 | "owner": "<repo_owner>", |
|
710 | 710 | "fork_of": "<name_of_fork_parent>", |
|
711 | 711 | "enable_downloads": "<bool>", |
|
712 | 712 | "enable_locking": "<bool>", |
|
713 | 713 | "enable_statistics": "<bool>", |
|
714 | 714 | }, |
|
715 | 715 | … |
|
716 | 716 | ] |
|
717 | 717 | error: null |
|
718 | 718 | |
|
719 | 719 | get_repo_nodes |
|
720 | 720 | -------------- |
|
721 | 721 | |
|
722 | 722 | Return a list of files and directories for a given path at the given revision. |
|
723 | 723 | It is possible to specify ret_type to show only ``files`` or ``dirs``. |
|
724 | 724 | This command can only be executed using the api_key of a user with admin rights. |
|
725 | 725 | |
|
726 | 726 | INPUT:: |
|
727 | 727 | |
|
728 | 728 | id : <id_for_response> |
|
729 | 729 | api_key : "<api_key>" |
|
730 | 730 | method : "get_repo_nodes" |
|
731 | 731 | args: { |
|
732 | 732 | "repoid" : "<reponame or repo_id>" |
|
733 | 733 | "revision" : "<revision>", |
|
734 | 734 | "root_path" : "<root_path>", |
|
735 | 735 | "ret_type" : "<ret_type> = Optional('all')" |
|
736 | 736 | } |
|
737 | 737 | |
|
738 | 738 | OUTPUT:: |
|
739 | 739 | |
|
740 | 740 | id : <id_given_in_input> |
|
741 | 741 | result: [ |
|
742 | 742 | { |
|
743 | 743 | "name" : "<name>" |
|
744 | 744 | "type" : "<type>", |
|
745 | 745 | }, |
|
746 | 746 | … |
|
747 | 747 | ] |
|
748 | 748 | error: null |
|
749 | 749 | |
|
750 | 750 | create_repo |
|
751 | 751 | ----------- |
|
752 | 752 | |
|
753 | 753 | Create a repository. If the repository name contains "/", all needed repository |
|
754 | 754 | groups will be created. For example "foo/bar/baz" will create repository groups |
|
755 | 755 | "foo", "bar" (with "foo" as parent), and create "baz" repository with |
|
756 | 756 | "bar" as group. |
|
757 | 757 | This command can only be executed using the api_key of a user with admin rights, |
|
758 | 758 | or that of a regular user with create repository permission. |
|
759 | 759 | Regular users cannot specify owner parameter. |
|
760 | 760 | |
|
761 | 761 | INPUT:: |
|
762 | 762 | |
|
763 | 763 | id : <id_for_response> |
|
764 | 764 | api_key : "<api_key>" |
|
765 | 765 | method : "create_repo" |
|
766 | 766 | args: { |
|
767 | 767 | "repo_name" : "<reponame>", |
|
768 | 768 | "owner" : "<owner_name_or_id = Optional(=apiuser)>", |
|
769 | 769 | "repo_type" : "<repo_type> = Optional('hg')", |
|
770 | 770 | "description" : "<description> = Optional('')", |
|
771 | 771 | "private" : "<bool> = Optional(False)", |
|
772 | 772 | "clone_uri" : "<clone_uri> = Optional(None)", |
|
773 | 773 | "landing_rev" : "<landing_rev> = Optional('tip')", |
|
774 | 774 | "enable_downloads": "<bool> = Optional(False)", |
|
775 | 775 | "enable_locking": "<bool> = Optional(False)", |
|
776 | 776 | "enable_statistics": "<bool> = Optional(False)", |
|
777 | 777 | } |
|
778 | 778 | |
|
779 | 779 | OUTPUT:: |
|
780 | 780 | |
|
781 | 781 | id : <id_given_in_input> |
|
782 | 782 | result: { |
|
783 | 783 | "msg": "Created new repository `<reponame>`", |
|
784 | 784 | "repo": { |
|
785 | 785 | "repo_id" : "<repo_id>", |
|
786 | 786 | "repo_name" : "<reponame>" |
|
787 | 787 | "repo_type" : "<repo_type>", |
|
788 | 788 | "clone_uri" : "<clone_uri>", |
|
789 | 789 | "private" : "<bool>", |
|
790 | 790 | "created_on" : "<datetimecreated>", |
|
791 | 791 | "description" : "<description>", |
|
792 | 792 | "landing_rev": "<landing_rev>", |
|
793 | 793 | "owner": "<username or user_id>", |
|
794 | 794 | "fork_of": "<name_of_fork_parent>", |
|
795 | 795 | "enable_downloads": "<bool>", |
|
796 | 796 | "enable_locking": "<bool>", |
|
797 | 797 | "enable_statistics": "<bool>", |
|
798 | 798 | }, |
|
799 | 799 | } |
|
800 | 800 | error: null |
|
801 | 801 | |
|
802 | 802 | update_repo |
|
803 | 803 | ----------- |
|
804 | 804 | |
|
805 | 805 | Update a repository. |
|
806 | 806 | This command can only be executed using the api_key of a user with admin rights, |
|
807 | 807 | or that of a regular user with create repository permission. |
|
808 | 808 | Regular users cannot specify owner parameter. |
|
809 | 809 | |
|
810 | 810 | INPUT:: |
|
811 | 811 | |
|
812 | 812 | id : <id_for_response> |
|
813 | 813 | api_key : "<api_key>" |
|
814 | 814 | method : "update_repo" |
|
815 | 815 | args: { |
|
816 | 816 | "repoid" : "<reponame or repo_id>" |
|
817 | 817 | "name" : "<reponame> = Optional('')", |
|
818 | 818 | "group" : "<group_id> = Optional(None)", |
|
819 | 819 | "owner" : "<owner_name_or_id = Optional(=apiuser)>", |
|
820 | 820 | "description" : "<description> = Optional('')", |
|
821 | 821 | "private" : "<bool> = Optional(False)", |
|
822 | 822 | "clone_uri" : "<clone_uri> = Optional(None)", |
|
823 | 823 | "landing_rev" : "<landing_rev> = Optional('tip')", |
|
824 | 824 | "enable_downloads": "<bool> = Optional(False)", |
|
825 | 825 | "enable_locking": "<bool> = Optional(False)", |
|
826 | 826 | "enable_statistics": "<bool> = Optional(False)", |
|
827 | 827 | } |
|
828 | 828 | |
|
829 | 829 | OUTPUT:: |
|
830 | 830 | |
|
831 | 831 | id : <id_given_in_input> |
|
832 | 832 | result: { |
|
833 | 833 | "msg": "updated repo ID:repo_id `<reponame>`", |
|
834 | 834 | "repository": { |
|
835 | 835 | "repo_id" : "<repo_id>", |
|
836 | 836 | "repo_name" : "<reponame>" |
|
837 | 837 | "repo_type" : "<repo_type>", |
|
838 | 838 | "clone_uri" : "<clone_uri>", |
|
839 | 839 | "private": "<bool>", |
|
840 | 840 | "created_on" : "<datetimecreated>", |
|
841 | 841 | "description" : "<description>", |
|
842 | 842 | "landing_rev": "<landing_rev>", |
|
843 | 843 | "owner": "<username or user_id>", |
|
844 | 844 | "fork_of": "<name_of_fork_parent>", |
|
845 | 845 | "enable_downloads": "<bool>", |
|
846 | 846 | "enable_locking": "<bool>", |
|
847 | 847 | "enable_statistics": "<bool>", |
|
848 | 848 | "last_changeset": { |
|
849 | 849 | "author": "<full_author>", |
|
850 | 850 | "date": "<date_time_of_commit>", |
|
851 | 851 | "message": "<commit_message>", |
|
852 | 852 | "raw_id": "<raw_id>", |
|
853 | 853 | "revision": "<numeric_revision>", |
|
854 | 854 | "short_id": "<short_id>" |
|
855 | 855 | } |
|
856 | 856 | "locked_by": "<username>", |
|
857 | 857 | "locked_date": "<float lock_time>", |
|
858 | 858 | }, |
|
859 | 859 | } |
|
860 | 860 | error: null |
|
861 | 861 | |
|
862 | 862 | fork_repo |
|
863 | 863 | --------- |
|
864 | 864 | |
|
865 | 865 | Create a fork of the given repo. If using Celery, this will |
|
866 | 866 | return success message immediately and a fork will be created |
|
867 | 867 | asynchronously. |
|
868 | 868 | This command can only be executed using the api_key of a user with admin |
|
869 | 869 | rights, or with the global fork permission, by a regular user with create |
|
870 | 870 | repository permission and at least read access to the repository. |
|
871 | 871 | Regular users cannot specify owner parameter. |
|
872 | 872 | |
|
873 | 873 | INPUT:: |
|
874 | 874 | |
|
875 | 875 | id : <id_for_response> |
|
876 | 876 | api_key : "<api_key>" |
|
877 | 877 | method : "fork_repo" |
|
878 | 878 | args: { |
|
879 | 879 | "repoid" : "<reponame or repo_id>", |
|
880 | 880 | "fork_name": "<forkname>", |
|
881 | 881 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
882 | 882 | "description": "<description>", |
|
883 | 883 | "copy_permissions": "<bool>", |
|
884 | 884 | "private": "<bool>", |
|
885 | 885 | "landing_rev": "<landing_rev>" |
|
886 | 886 | |
|
887 | 887 | } |
|
888 | 888 | |
|
889 | 889 | OUTPUT:: |
|
890 | 890 | |
|
891 | 891 | id : <id_given_in_input> |
|
892 | 892 | result: { |
|
893 | 893 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
894 | 894 | "success": true |
|
895 | 895 | } |
|
896 | 896 | error: null |
|
897 | 897 | |
|
898 | 898 | delete_repo |
|
899 | 899 | ----------- |
|
900 | 900 | |
|
901 | 901 | Delete a repository. |
|
902 | 902 | This command can only be executed using the api_key of a user with admin rights, |
|
903 | 903 | or that of a regular user with admin access to the repository. |
|
904 | 904 | When ``forks`` param is set it is possible to detach or delete forks of the deleted repository. |
|
905 | 905 | |
|
906 | 906 | INPUT:: |
|
907 | 907 | |
|
908 | 908 | id : <id_for_response> |
|
909 | 909 | api_key : "<api_key>" |
|
910 | 910 | method : "delete_repo" |
|
911 | 911 | args: { |
|
912 | 912 | "repoid" : "<reponame or repo_id>", |
|
913 | 913 | "forks" : "`delete` or `detach` = Optional(None)" |
|
914 | 914 | } |
|
915 | 915 | |
|
916 | 916 | OUTPUT:: |
|
917 | 917 | |
|
918 | 918 | id : <id_given_in_input> |
|
919 | 919 | result: { |
|
920 | 920 | "msg": "Deleted repository `<reponame>`", |
|
921 | 921 | "success": true |
|
922 | 922 | } |
|
923 | 923 | error: null |
|
924 | 924 | |
|
925 | 925 | grant_user_permission |
|
926 | 926 | --------------------- |
|
927 | 927 | |
|
928 | 928 | Grant permission for a user on the given repository, or update the existing one if found. |
|
929 | 929 | This command can only be executed using the api_key of a user with admin rights. |
|
930 | 930 | |
|
931 | 931 | INPUT:: |
|
932 | 932 | |
|
933 | 933 | id : <id_for_response> |
|
934 | 934 | api_key : "<api_key>" |
|
935 | 935 | method : "grant_user_permission" |
|
936 | 936 | args: { |
|
937 | 937 | "repoid" : "<reponame or repo_id>" |
|
938 | 938 | "userid" : "<username or user_id>" |
|
939 | 939 | "perm" : "(repository.(none|read|write|admin))", |
|
940 | 940 | } |
|
941 | 941 | |
|
942 | 942 | OUTPUT:: |
|
943 | 943 | |
|
944 | 944 | id : <id_given_in_input> |
|
945 | 945 | result: { |
|
946 | 946 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
947 | 947 | "success": true |
|
948 | 948 | } |
|
949 | 949 | error: null |
|
950 | 950 | |
|
951 | 951 | revoke_user_permission |
|
952 | 952 | ---------------------- |
|
953 | 953 | |
|
954 | 954 | Revoke permission for a user on the given repository. |
|
955 | 955 | This command can only be executed using the api_key of a user with admin rights. |
|
956 | 956 | |
|
957 | 957 | INPUT:: |
|
958 | 958 | |
|
959 | 959 | id : <id_for_response> |
|
960 | 960 | api_key : "<api_key>" |
|
961 | 961 | method : "revoke_user_permission" |
|
962 | 962 | args: { |
|
963 | 963 | "repoid" : "<reponame or repo_id>" |
|
964 | 964 | "userid" : "<username or user_id>" |
|
965 | 965 | } |
|
966 | 966 | |
|
967 | 967 | OUTPUT:: |
|
968 | 968 | |
|
969 | 969 | id : <id_given_in_input> |
|
970 | 970 | result: { |
|
971 | 971 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
972 | 972 | "success": true |
|
973 | 973 | } |
|
974 | 974 | error: null |
|
975 | 975 | |
|
976 | 976 | grant_user_group_permission |
|
977 | 977 | --------------------------- |
|
978 | 978 | |
|
979 | 979 | Grant permission for a user group on the given repository, or update the |
|
980 | 980 | existing one if found. |
|
981 | 981 | This command can only be executed using the api_key of a user with admin rights. |
|
982 | 982 | |
|
983 | 983 | INPUT:: |
|
984 | 984 | |
|
985 | 985 | id : <id_for_response> |
|
986 | 986 | api_key : "<api_key>" |
|
987 | 987 | method : "grant_user_group_permission" |
|
988 | 988 | args: { |
|
989 | 989 | "repoid" : "<reponame or repo_id>" |
|
990 | 990 | "usersgroupid" : "<user group id or name>" |
|
991 | 991 | "perm" : "(repository.(none|read|write|admin))", |
|
992 | 992 | } |
|
993 | 993 | |
|
994 | 994 | OUTPUT:: |
|
995 | 995 | |
|
996 | 996 | id : <id_given_in_input> |
|
997 | 997 | result: { |
|
998 | 998 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
999 | 999 | "success": true |
|
1000 | 1000 | } |
|
1001 | 1001 | error: null |
|
1002 | 1002 | |
|
1003 | 1003 | revoke_user_group_permission |
|
1004 | 1004 | ---------------------------- |
|
1005 | 1005 | |
|
1006 | 1006 | Revoke permission for a user group on the given repository. |
|
1007 | 1007 | This command can only be executed using the api_key of a user with admin rights. |
|
1008 | 1008 | |
|
1009 | 1009 | INPUT:: |
|
1010 | 1010 | |
|
1011 | 1011 | id : <id_for_response> |
|
1012 | 1012 | api_key : "<api_key>" |
|
1013 | 1013 | method : "revoke_user_group_permission" |
|
1014 | 1014 | args: { |
|
1015 | 1015 | "repoid" : "<reponame or repo_id>" |
|
1016 | 1016 | "usersgroupid" : "<user group id or name>" |
|
1017 | 1017 | } |
|
1018 | 1018 | |
|
1019 | 1019 | OUTPUT:: |
|
1020 | 1020 | |
|
1021 | 1021 | id : <id_given_in_input> |
|
1022 | 1022 | result: { |
|
1023 | 1023 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1024 | 1024 | "success": true |
|
1025 | 1025 | } |
|
1026 | 1026 | error: null |
@@ -1,809 +1,809 b'' | |||
|
1 | 1 | .. _setup: |
|
2 | 2 | |
|
3 | 3 | ===== |
|
4 | 4 | Setup |
|
5 | 5 | ===== |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | Setting up Kallithea |
|
9 | 9 | -------------------- |
|
10 | 10 | |
|
11 | 11 | First, you will need to create a Kallithea configuration file. Run the |
|
12 | 12 | following command to do so:: |
|
13 | 13 | |
|
14 | 14 | paster make-config Kallithea my.ini |
|
15 | 15 | |
|
16 | 16 | This will create the file ``my.ini`` in the current directory. This |
|
17 | 17 | configuration file contains the various settings for Kallithea, e.g. |
|
18 | 18 | proxy port, email settings, usage of static files, cache, Celery |
|
19 | 19 | settings, and logging. |
|
20 | 20 | |
|
21 | 21 | Next, you need to create the databases used by Kallithea. It is recommended to |
|
22 | 22 | use PostgreSQL or SQLite (default). If you choose a database other than the |
|
23 | 23 | default, ensure you properly adjust the database URL in your ``my.ini`` |
|
24 | 24 | configuration file to use this other database. Kallithea currently supports |
|
25 | 25 | PostgreSQL, SQLite and MySQL databases. Create the database by running |
|
26 | 26 | the following command:: |
|
27 | 27 | |
|
28 | 28 | paster setup-db my.ini |
|
29 | 29 | |
|
30 | 30 | This will prompt you for a "root" path. This "root" path is the location where |
|
31 | 31 | Kallithea will store all of its repositories on the current machine. After |
|
32 | 32 | entering this "root" path ``setup-db`` will also prompt you for a username |
|
33 | 33 | and password for the initial admin account which ``setup-db`` sets |
|
34 | 34 | up for you. |
|
35 | 35 | |
|
36 | 36 | The ``setup-db`` values can also be given on the command line. |
|
37 | 37 | Example:: |
|
38 | 38 | |
|
39 |
paster setup-db my.ini --user=nn --password=secret --email=nn@example. |
|
|
39 | paster setup-db my.ini --user=nn --password=secret --email=nn@example.com --repos=/srv/repos | |
|
40 | 40 | |
|
41 | 41 | The ``setup-db`` command will create all needed tables and an |
|
42 | 42 | admin account. When choosing a root path you can either use a new |
|
43 | 43 | empty location, or a location which already contains existing |
|
44 | 44 | repositories. If you choose a location which contains existing |
|
45 | 45 | repositories Kallithea will add all of the repositories at the chosen |
|
46 | 46 | location to its database. (Note: make sure you specify the correct |
|
47 | 47 | path to the root). |
|
48 | 48 | |
|
49 | 49 | .. note:: the given path for Mercurial_ repositories **must** be write |
|
50 | 50 | accessible for the application. It's very important since |
|
51 | 51 | the Kallithea web interface will work without write access, |
|
52 | 52 | but when trying to do a push it will fail with permission |
|
53 | 53 | denied errors unless it has write access. |
|
54 | 54 | |
|
55 | 55 | You are now ready to use Kallithea. To run it simply execute:: |
|
56 | 56 | |
|
57 | 57 | paster serve my.ini |
|
58 | 58 | |
|
59 | 59 | - This command runs the Kallithea server. The web app should be available at |
|
60 | 60 | http://127.0.0.1:5000. The IP address and port is configurable via the |
|
61 | 61 | configuration file created in the previous step. |
|
62 | 62 | - Log in to Kallithea using the admin account created when running ``setup-db``. |
|
63 | 63 | - The default permissions on each repository is read, and the owner is admin. |
|
64 | 64 | Remember to update these if needed. |
|
65 | 65 | - In the admin panel you can toggle LDAP, anonymous, and permissions |
|
66 | 66 | settings, as well as edit more advanced options on users and |
|
67 | 67 | repositories. |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | Extensions |
|
71 | 71 | ---------- |
|
72 | 72 | |
|
73 | 73 | Optionally one can create an ``rcextensions`` package that extends Kallithea |
|
74 | 74 | functionality. |
|
75 | 75 | To generate a skeleton extensions package, run:: |
|
76 | 76 | |
|
77 | 77 | paster make-rcext my.ini |
|
78 | 78 | |
|
79 | 79 | This will create an ``rcextensions`` package next to the specified ``ini`` file. |
|
80 | 80 | With ``rcextensions`` it's possible to add additional mapping for whoosh, |
|
81 | 81 | stats and add additional code into the push/pull/create/delete repo hooks, |
|
82 | 82 | for example for sending signals to build-bots such as Jenkins. |
|
83 | 83 | |
|
84 | 84 | See the ``__init__.py`` file inside the generated ``rcextensions`` package |
|
85 | 85 | for more details. |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | Using Kallithea with SSH |
|
89 | 89 | ------------------------ |
|
90 | 90 | |
|
91 | 91 | Kallithea currently only hosts repositories using http and https. (The addition |
|
92 | 92 | of ssh hosting is a planned future feature.) However you can easily use ssh in |
|
93 | 93 | parallel with Kallithea. (Repository access via ssh is a standard "out of |
|
94 | 94 | the box" feature of Mercurial_ and you can use this to access any of the |
|
95 | 95 | repositories that Kallithea is hosting. See PublishingRepositories_) |
|
96 | 96 | |
|
97 | 97 | Kallithea repository structures are kept in directories with the same name |
|
98 | 98 | as the project. When using repository groups, each group is a subdirectory. |
|
99 | 99 | This allows you to easily use ssh for accessing repositories. |
|
100 | 100 | |
|
101 | 101 | In order to use ssh you need to make sure that your web server and the users' |
|
102 | 102 | login accounts have the correct permissions set on the appropriate directories. |
|
103 | 103 | |
|
104 | 104 | .. note:: These permissions are independent of any permissions you |
|
105 | 105 | have set up using the Kallithea web interface. |
|
106 | 106 | |
|
107 | 107 | If your main directory (the same as set in Kallithea settings) is for |
|
108 | 108 | example set to ``/srv/repos`` and the repository you are using is |
|
109 | 109 | named ``kallithea``, then to clone via ssh you should run:: |
|
110 | 110 | |
|
111 |
hg clone ssh://user@ |
|
|
111 | hg clone ssh://user@kallithea.example.com/srv/repos/kallithea | |
|
112 | 112 | |
|
113 | 113 | Using other external tools such as mercurial-server_ or using ssh key-based |
|
114 | 114 | authentication is fully supported. |
|
115 | 115 | |
|
116 | 116 | .. note:: In an advanced setup, in order for your ssh access to use |
|
117 | 117 | the same permissions as set up via the Kallithea web |
|
118 | 118 | interface, you can create an authentication hook to connect |
|
119 | 119 | to the Kallithea db and run check functions for permissions |
|
120 | 120 | against that. |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | Setting up Whoosh full text search |
|
124 | 124 | ---------------------------------- |
|
125 | 125 | |
|
126 | 126 | Kallithea provides full text search of repositories using `Whoosh`__. |
|
127 | 127 | |
|
128 | 128 | .. __: https://pythonhosted.org/Whoosh/ |
|
129 | 129 | |
|
130 | 130 | For an incremental index build, run:: |
|
131 | 131 | |
|
132 | 132 | paster make-index my.ini |
|
133 | 133 | |
|
134 | 134 | For a full index rebuild, run:: |
|
135 | 135 | |
|
136 | 136 | paster make-index my.ini -f |
|
137 | 137 | |
|
138 | 138 | The ``--repo-location`` option allows the location of the repositories to be overriden; |
|
139 | 139 | usually, the location is retrieved from the Kallithea database. |
|
140 | 140 | |
|
141 | 141 | The ``--index-only`` option can be used to limit the indexed repositories to a comma-separated list:: |
|
142 | 142 | |
|
143 | 143 | paster make-index my.ini --index-only=vcs,kallithea |
|
144 | 144 | |
|
145 | 145 | To keep your index up-to-date it is necessary to do periodic index builds; |
|
146 | 146 | for this, it is recommended to use a crontab entry. Example:: |
|
147 | 147 | |
|
148 | 148 | 0 3 * * * /path/to/virtualenv/bin/paster make-index /path/to/kallithea/my.ini |
|
149 | 149 | |
|
150 | 150 | When using incremental mode (the default), Whoosh will check the last |
|
151 | 151 | modification date of each file and add it to be reindexed if a newer file is |
|
152 | 152 | available. The indexing daemon checks for any removed files and removes them |
|
153 | 153 | from index. |
|
154 | 154 | |
|
155 | 155 | If you want to rebuild the index from scratch, you can use the ``-f`` flag as above, |
|
156 | 156 | or in the admin panel you can check the "build from scratch" checkbox. |
|
157 | 157 | |
|
158 | 158 | |
|
159 | 159 | Setting up LDAP support |
|
160 | 160 | ----------------------- |
|
161 | 161 | |
|
162 | 162 | Kallithea supports LDAP authentication. In order |
|
163 | 163 | to use LDAP, you have to install the python-ldap_ package. This package is |
|
164 | 164 | available via PyPI, so you can install it by running:: |
|
165 | 165 | |
|
166 | 166 | pip install python-ldap |
|
167 | 167 | |
|
168 | 168 | .. note:: ``python-ldap`` requires some libraries to be installed on |
|
169 | 169 | your system, so before installing it check that you have at |
|
170 | 170 | least the ``openldap`` and ``sasl`` libraries. |
|
171 | 171 | |
|
172 | 172 | Choose *Admin > Authentication*, click the ``kallithea.lib.auth_modules.auth_ldap`` button |
|
173 | 173 | and then *Save*, to enable the LDAP plugin and configure its settings. |
|
174 | 174 | |
|
175 | 175 | Here's a typical LDAP setup:: |
|
176 | 176 | |
|
177 | 177 | Connection settings |
|
178 | 178 | Enable LDAP = checked |
|
179 |
Host = host.example. |
|
|
179 | Host = host.example.com | |
|
180 | 180 | Port = 389 |
|
181 | 181 | Account = <account> |
|
182 | 182 | Password = <password> |
|
183 | 183 | Connection Security = LDAPS connection |
|
184 | 184 | Certificate Checks = DEMAND |
|
185 | 185 | |
|
186 | 186 | Search settings |
|
187 | 187 | Base DN = CN=users,DC=host,DC=example,DC=org |
|
188 | 188 | LDAP Filter = (&(objectClass=user)(!(objectClass=computer))) |
|
189 | 189 | LDAP Search Scope = SUBTREE |
|
190 | 190 | |
|
191 | 191 | Attribute mappings |
|
192 | 192 | Login Attribute = uid |
|
193 | 193 | First Name Attribute = firstName |
|
194 | 194 | Last Name Attribute = lastName |
|
195 | 195 | Email Attribute = mail |
|
196 | 196 | |
|
197 | 197 | If your user groups are placed in an Organisation Unit (OU) structure, the Search Settings configuration differs:: |
|
198 | 198 | |
|
199 | 199 | Search settings |
|
200 | 200 | Base DN = DC=host,DC=example,DC=org |
|
201 | 201 | LDAP Filter = (&(memberOf=CN=your user group,OU=subunit,OU=unit,DC=host,DC=example,DC=org)(objectClass=user)) |
|
202 | 202 | LDAP Search Scope = SUBTREE |
|
203 | 203 | |
|
204 | 204 | .. _enable_ldap: |
|
205 | 205 | |
|
206 | 206 | Enable LDAP : required |
|
207 | 207 | Whether to use LDAP for authenticating users. |
|
208 | 208 | |
|
209 | 209 | .. _ldap_host: |
|
210 | 210 | |
|
211 | 211 | Host : required |
|
212 | 212 | LDAP server hostname or IP address. Can be also a comma separated |
|
213 | 213 | list of servers to support LDAP fail-over. |
|
214 | 214 | |
|
215 | 215 | .. _Port: |
|
216 | 216 | |
|
217 | 217 | Port : required |
|
218 | 218 | 389 for un-encrypted LDAP, 636 for SSL-encrypted LDAP. |
|
219 | 219 | |
|
220 | 220 | .. _ldap_account: |
|
221 | 221 | |
|
222 | 222 | Account : optional |
|
223 | 223 | Only required if the LDAP server does not allow anonymous browsing of |
|
224 | 224 | records. This should be a special account for record browsing. This |
|
225 | 225 | will require `LDAP Password`_ below. |
|
226 | 226 | |
|
227 | 227 | .. _LDAP Password: |
|
228 | 228 | |
|
229 | 229 | Password : optional |
|
230 | 230 | Only required if the LDAP server does not allow anonymous browsing of |
|
231 | 231 | records. |
|
232 | 232 | |
|
233 | 233 | .. _Enable LDAPS: |
|
234 | 234 | |
|
235 | 235 | Connection Security : required |
|
236 | 236 | Defines the connection to LDAP server |
|
237 | 237 | |
|
238 | 238 | No encryption |
|
239 | 239 | Plain non encrypted connection |
|
240 | 240 | |
|
241 | 241 | LDAPS connection |
|
242 | 242 | Enable LDAPS connections. It will likely require `Port`_ to be set to |
|
243 | 243 | a different value (standard LDAPS port is 636). When LDAPS is enabled |
|
244 | 244 | then `Certificate Checks`_ is required. |
|
245 | 245 | |
|
246 | 246 | START_TLS on LDAP connection |
|
247 | 247 | START TLS connection |
|
248 | 248 | |
|
249 | 249 | .. _Certificate Checks: |
|
250 | 250 | |
|
251 | 251 | Certificate Checks : optional |
|
252 | 252 | How SSL certificates verification is handled -- this is only useful when |
|
253 | 253 | `Enable LDAPS`_ is enabled. Only DEMAND or HARD offer full SSL security |
|
254 | 254 | while the other options are susceptible to man-in-the-middle attacks. SSL |
|
255 | 255 | certificates can be installed to /etc/openldap/cacerts so that the |
|
256 | 256 | DEMAND or HARD options can be used with self-signed certificates or |
|
257 | 257 | certificates that do not have traceable certificates of authority. |
|
258 | 258 | |
|
259 | 259 | NEVER |
|
260 | 260 | A serve certificate will never be requested or checked. |
|
261 | 261 | |
|
262 | 262 | ALLOW |
|
263 | 263 | A server certificate is requested. Failure to provide a |
|
264 | 264 | certificate or providing a bad certificate will not terminate the |
|
265 | 265 | session. |
|
266 | 266 | |
|
267 | 267 | TRY |
|
268 | 268 | A server certificate is requested. Failure to provide a |
|
269 | 269 | certificate does not halt the session; providing a bad certificate |
|
270 | 270 | halts the session. |
|
271 | 271 | |
|
272 | 272 | DEMAND |
|
273 | 273 | A server certificate is requested and must be provided and |
|
274 | 274 | authenticated for the session to proceed. |
|
275 | 275 | |
|
276 | 276 | HARD |
|
277 | 277 | The same as DEMAND. |
|
278 | 278 | |
|
279 | 279 | .. _Base DN: |
|
280 | 280 | |
|
281 | 281 | Base DN : required |
|
282 | 282 | The Distinguished Name (DN) where searches for users will be performed. |
|
283 | 283 | Searches can be controlled by `LDAP Filter`_ and `LDAP Search Scope`_. |
|
284 | 284 | |
|
285 | 285 | .. _LDAP Filter: |
|
286 | 286 | |
|
287 | 287 | LDAP Filter : optional |
|
288 | 288 | A LDAP filter defined by RFC 2254. This is more useful when `LDAP |
|
289 | 289 | Search Scope`_ is set to SUBTREE. The filter is useful for limiting |
|
290 | 290 | which LDAP objects are identified as representing Users for |
|
291 | 291 | authentication. The filter is augmented by `Login Attribute`_ below. |
|
292 | 292 | This can commonly be left blank. |
|
293 | 293 | |
|
294 | 294 | .. _LDAP Search Scope: |
|
295 | 295 | |
|
296 | 296 | LDAP Search Scope : required |
|
297 | 297 | This limits how far LDAP will search for a matching object. |
|
298 | 298 | |
|
299 | 299 | BASE |
|
300 | 300 | Only allows searching of `Base DN`_ and is usually not what you |
|
301 | 301 | want. |
|
302 | 302 | |
|
303 | 303 | ONELEVEL |
|
304 | 304 | Searches all entries under `Base DN`_, but not Base DN itself. |
|
305 | 305 | |
|
306 | 306 | SUBTREE |
|
307 | 307 | Searches all entries below `Base DN`_, but not Base DN itself. |
|
308 | 308 | When using SUBTREE `LDAP Filter`_ is useful to limit object |
|
309 | 309 | location. |
|
310 | 310 | |
|
311 | 311 | .. _Login Attribute: |
|
312 | 312 | |
|
313 | 313 | Login Attribute : required |
|
314 | 314 | The LDAP record attribute that will be matched as the USERNAME or |
|
315 | 315 | ACCOUNT used to connect to Kallithea. This will be added to `LDAP |
|
316 | 316 | Filter`_ for locating the User object. If `LDAP Filter`_ is specified as |
|
317 | 317 | "LDAPFILTER", `Login Attribute`_ is specified as "uid" and the user has |
|
318 | 318 | connected as "jsmith" then the `LDAP Filter`_ will be augmented as below |
|
319 | 319 | :: |
|
320 | 320 | |
|
321 | 321 | (&(LDAPFILTER)(uid=jsmith)) |
|
322 | 322 | |
|
323 | 323 | .. _ldap_attr_firstname: |
|
324 | 324 | |
|
325 | 325 | First Name Attribute : required |
|
326 | 326 | The LDAP record attribute which represents the user's first name. |
|
327 | 327 | |
|
328 | 328 | .. _ldap_attr_lastname: |
|
329 | 329 | |
|
330 | 330 | Last Name Attribute : required |
|
331 | 331 | The LDAP record attribute which represents the user's last name. |
|
332 | 332 | |
|
333 | 333 | .. _ldap_attr_email: |
|
334 | 334 | |
|
335 | 335 | Email Attribute : required |
|
336 | 336 | The LDAP record attribute which represents the user's email address. |
|
337 | 337 | |
|
338 | 338 | If all data are entered correctly, and python-ldap_ is properly installed |
|
339 | 339 | users should be granted access to Kallithea with LDAP accounts. At this |
|
340 | 340 | time user information is copied from LDAP into the Kallithea user database. |
|
341 | 341 | This means that updates of an LDAP user object may not be reflected as a |
|
342 | 342 | user update in Kallithea. |
|
343 | 343 | |
|
344 | 344 | If You have problems with LDAP access and believe You entered correct |
|
345 | 345 | information check out the Kallithea logs, any error messages sent from LDAP |
|
346 | 346 | will be saved there. |
|
347 | 347 | |
|
348 | 348 | Active Directory |
|
349 | 349 | '''''''''''''''' |
|
350 | 350 | |
|
351 | 351 | Kallithea can use Microsoft Active Directory for user authentication. This |
|
352 | 352 | is done through an LDAP or LDAPS connection to Active Directory. The |
|
353 | 353 | following LDAP configuration settings are typical for using Active |
|
354 | 354 | Directory :: |
|
355 | 355 | |
|
356 | 356 | Base DN = OU=SBSUsers,OU=Users,OU=MyBusiness,DC=v3sys,DC=local |
|
357 | 357 | Login Attribute = sAMAccountName |
|
358 | 358 | First Name Attribute = givenName |
|
359 | 359 | Last Name Attribute = sn |
|
360 | 360 | Email Attribute = mail |
|
361 | 361 | |
|
362 | 362 | All other LDAP settings will likely be site-specific and should be |
|
363 | 363 | appropriately configured. |
|
364 | 364 | |
|
365 | 365 | |
|
366 | 366 | Authentication by container or reverse-proxy |
|
367 | 367 | -------------------------------------------- |
|
368 | 368 | |
|
369 | 369 | Kallithea supports delegating the authentication |
|
370 | 370 | of users to its WSGI container, or to a reverse-proxy server through which all |
|
371 | 371 | clients access the application. |
|
372 | 372 | |
|
373 | 373 | When these authentication methods are enabled in Kallithea, it uses the |
|
374 | 374 | username that the container/proxy (Apache or Nginx, etc.) provides and doesn't |
|
375 | 375 | perform the authentication itself. The authorization, however, is still done by |
|
376 | 376 | Kallithea according to its settings. |
|
377 | 377 | |
|
378 | 378 | When a user logs in for the first time using these authentication methods, |
|
379 | 379 | a matching user account is created in Kallithea with default permissions. An |
|
380 | 380 | administrator can then modify it using Kallithea's admin interface. |
|
381 | 381 | |
|
382 | 382 | It's also possible for an administrator to create accounts and configure their |
|
383 | 383 | permissions before the user logs in for the first time, using the :ref:`create-user` API. |
|
384 | 384 | |
|
385 | 385 | Container-based authentication |
|
386 | 386 | '''''''''''''''''''''''''''''' |
|
387 | 387 | |
|
388 | 388 | In a container-based authentication setup, Kallithea reads the user name from |
|
389 | 389 | the ``REMOTE_USER`` server variable provided by the WSGI container. |
|
390 | 390 | |
|
391 | 391 | After setting up your container (see `Apache with mod_wsgi`_), you'll need |
|
392 | 392 | to configure it to require authentication on the location configured for |
|
393 | 393 | Kallithea. |
|
394 | 394 | |
|
395 | 395 | Proxy pass-through authentication |
|
396 | 396 | ''''''''''''''''''''''''''''''''' |
|
397 | 397 | |
|
398 | 398 | In a proxy pass-through authentication setup, Kallithea reads the user name |
|
399 | 399 | from the ``X-Forwarded-User`` request header, which should be configured to be |
|
400 | 400 | sent by the reverse-proxy server. |
|
401 | 401 | |
|
402 | 402 | After setting up your proxy solution (see `Apache virtual host reverse proxy example`_, |
|
403 | 403 | `Apache as subdirectory`_ or `Nginx virtual host example`_), you'll need to |
|
404 | 404 | configure the authentication and add the username in a request header named |
|
405 | 405 | ``X-Forwarded-User``. |
|
406 | 406 | |
|
407 | 407 | For example, the following config section for Apache sets a subdirectory in a |
|
408 | 408 | reverse-proxy setup with basic auth: |
|
409 | 409 | |
|
410 | 410 | .. code-block:: apache |
|
411 | 411 | |
|
412 | 412 | <Location /someprefix> |
|
413 | 413 | ProxyPass http://127.0.0.1:5000/someprefix |
|
414 | 414 | ProxyPassReverse http://127.0.0.1:5000/someprefix |
|
415 | 415 | SetEnvIf X-Url-Scheme https HTTPS=1 |
|
416 | 416 | |
|
417 | 417 | AuthType Basic |
|
418 | 418 | AuthName "Kallithea authentication" |
|
419 | 419 | AuthUserFile /srv/kallithea/.htpasswd |
|
420 | 420 | Require valid-user |
|
421 | 421 | |
|
422 | 422 | RequestHeader unset X-Forwarded-User |
|
423 | 423 | |
|
424 | 424 | RewriteEngine On |
|
425 | 425 | RewriteCond %{LA-U:REMOTE_USER} (.+) |
|
426 | 426 | RewriteRule .* - [E=RU:%1] |
|
427 | 427 | RequestHeader set X-Forwarded-User %{RU}e |
|
428 | 428 | </Location> |
|
429 | 429 | |
|
430 | 430 | .. note:: |
|
431 | 431 | If you enable proxy pass-through authentication, make sure your server is |
|
432 | 432 | only accessible through the proxy. Otherwise, any client would be able to |
|
433 | 433 | forge the authentication header and could effectively become authenticated |
|
434 | 434 | using any account of their liking. |
|
435 | 435 | |
|
436 | 436 | |
|
437 | 437 | Integration with issue trackers |
|
438 | 438 | ------------------------------- |
|
439 | 439 | |
|
440 | 440 | Kallithea provides a simple integration with issue trackers. It's possible |
|
441 | 441 | to define a regular expression that will match an issue ID in commit messages, |
|
442 | 442 | and have that replaced with a URL to the issue. To enable this simply |
|
443 | 443 | uncomment the following variables in the ini file:: |
|
444 | 444 | |
|
445 | 445 | issue_pat = (?:^#|\s#)(\w+) |
|
446 |
issue_server_link = https:// |
|
|
446 | issue_server_link = https://issues.example.com/{repo}/issue/{id} | |
|
447 | 447 | issue_prefix = # |
|
448 | 448 | |
|
449 | 449 | ``issue_pat`` is the regular expression describing which strings in |
|
450 | 450 | commit messages will be treated as issue references. A match group in |
|
451 | 451 | parentheses should be used to specify the actual issue id. |
|
452 | 452 | |
|
453 | 453 | The default expression matches issues in the format ``#<number>``, e.g., ``#300``. |
|
454 | 454 | |
|
455 | 455 | Matched issue references are replaced with the link specified in |
|
456 | 456 | ``issue_server_link``. ``{id}`` is replaced with the issue ID, and |
|
457 | 457 | ``{repo}`` with the repository name. Since the # is stripped away, |
|
458 | 458 | ``issue_prefix`` is prepended to the link text. ``issue_prefix`` doesn't |
|
459 | 459 | necessarily need to be ``#``: if you set issue prefix to ``ISSUE-`` this will |
|
460 | 460 | generate a URL in the format: |
|
461 | 461 | |
|
462 | 462 | .. code-block:: html |
|
463 | 463 | |
|
464 |
<a href="https:// |
|
|
464 | <a href="https://issues.example.com/example_repo/issue/300">ISSUE-300</a> | |
|
465 | 465 | |
|
466 | 466 | If needed, more than one pattern can be specified by appending a unique suffix to |
|
467 | 467 | the variables. For example:: |
|
468 | 468 | |
|
469 | 469 | issue_pat_wiki = (?:wiki-)(.+) |
|
470 |
issue_server_link_wiki = https:// |
|
|
470 | issue_server_link_wiki = https://wiki.example.com/{id} | |
|
471 | 471 | issue_prefix_wiki = WIKI- |
|
472 | 472 | |
|
473 | 473 | With these settings, wiki pages can be referenced as wiki-some-id, and every |
|
474 | 474 | such reference will be transformed into: |
|
475 | 475 | |
|
476 | 476 | .. code-block:: html |
|
477 | 477 | |
|
478 |
<a href="https:// |
|
|
478 | <a href="https://wiki.example.com/some-id">WIKI-some-id</a> | |
|
479 | 479 | |
|
480 | 480 | |
|
481 | 481 | Hook management |
|
482 | 482 | --------------- |
|
483 | 483 | |
|
484 | 484 | Hooks can be managed in similar way to that used in ``.hgrc`` files. |
|
485 | 485 | To manage hooks, choose *Admin > Settings > Hooks*. |
|
486 | 486 | |
|
487 | 487 | The built-in hooks cannot be modified, though they can be enabled or disabled in the *VCS* section. |
|
488 | 488 | |
|
489 | 489 | To add another custom hook simply fill in the first textbox with |
|
490 | 490 | ``<name>.<hook_type>`` and the second with the hook path. Example hooks |
|
491 | 491 | can be found in ``kallithea.lib.hooks``. |
|
492 | 492 | |
|
493 | 493 | |
|
494 | 494 | Changing default encoding |
|
495 | 495 | ------------------------- |
|
496 | 496 | |
|
497 | 497 | By default, Kallithea uses UTF-8 encoding. |
|
498 | 498 | This is configurable as ``default_encoding`` in the .ini file. |
|
499 | 499 | This affects many parts in Kallithea including user names, filenames, and |
|
500 | 500 | encoding of commit messages. In addition Kallithea can detect if the ``chardet`` |
|
501 | 501 | library is installed. If ``chardet`` is detected Kallithea will fallback to it |
|
502 | 502 | when there are encode/decode errors. |
|
503 | 503 | |
|
504 | 504 | |
|
505 | 505 | Celery configuration |
|
506 | 506 | -------------------- |
|
507 | 507 | |
|
508 | 508 | Kallithea can use the distributed task queue system Celery_ to run tasks like |
|
509 | 509 | cloning repositories or sending emails. |
|
510 | 510 | |
|
511 | 511 | Kallithea will in most setups work perfectly fine out of the box (without |
|
512 | 512 | Celery), executing all tasks in the web server process. Some tasks can however |
|
513 | 513 | take some time to run and it can be better to run such tasks asynchronously in |
|
514 | 514 | a separate process so the web server can focus on serving web requests. |
|
515 | 515 | |
|
516 | 516 | For installation and configuration of Celery, see the `Celery documentation`_. |
|
517 | 517 | Note that Celery requires a message broker service like RabbitMQ_ (recommended) |
|
518 | 518 | or Redis_. |
|
519 | 519 | |
|
520 | 520 | The use of Celery is configured in the Kallithea ini configuration file. |
|
521 | 521 | To enable it, simply set:: |
|
522 | 522 | |
|
523 | 523 | use_celery = true |
|
524 | 524 | |
|
525 | 525 | and add or change the ``celery.*`` and ``broker.*`` configuration variables. |
|
526 | 526 | |
|
527 | 527 | Remember that the ini files use the format with '.' and not with '_' like |
|
528 | 528 | Celery. So for example setting `BROKER_HOST` in Celery means setting |
|
529 | 529 | `broker.host` in the configuration file. |
|
530 | 530 | |
|
531 | 531 | To start the Celery process, run:: |
|
532 | 532 | |
|
533 | 533 | paster celeryd <configfile.ini> |
|
534 | 534 | |
|
535 | 535 | .. note:: |
|
536 | 536 | Make sure you run this command from the same virtualenv, and with the same |
|
537 | 537 | user that Kallithea runs. |
|
538 | 538 | |
|
539 | 539 | |
|
540 | 540 | HTTPS support |
|
541 | 541 | ------------- |
|
542 | 542 | |
|
543 | 543 | Kallithea will by default generate URLs based on the WSGI environment. |
|
544 | 544 | |
|
545 | 545 | Alternatively, you can use some special configuration settings to control |
|
546 | 546 | directly which scheme/protocol Kallithea will use when generating URLs: |
|
547 | 547 | |
|
548 | 548 | - With ``https_fixup = true``, the scheme will be taken from the |
|
549 | 549 | ``X-Url-Scheme``, ``X-Forwarded-Scheme`` or ``X-Forwarded-Proto`` HTTP header |
|
550 | 550 | (default ``http``). |
|
551 | 551 | - With ``force_https = true`` the default will be ``https``. |
|
552 | 552 | - With ``use_htsts = true``, Kallithea will set ``Strict-Transport-Security`` when using https. |
|
553 | 553 | |
|
554 | 554 | |
|
555 | 555 | Nginx virtual host example |
|
556 | 556 | -------------------------- |
|
557 | 557 | |
|
558 | 558 | Sample config for Nginx using proxy: |
|
559 | 559 | |
|
560 | 560 | .. code-block:: nginx |
|
561 | 561 | |
|
562 | 562 | upstream kallithea { |
|
563 | 563 | server 127.0.0.1:5000; |
|
564 | 564 | # add more instances for load balancing |
|
565 | 565 | #server 127.0.0.1:5001; |
|
566 | 566 | #server 127.0.0.1:5002; |
|
567 | 567 | } |
|
568 | 568 | |
|
569 | 569 | ## gist alias |
|
570 | 570 | server { |
|
571 | 571 | listen 443; |
|
572 |
server_name gist. |
|
|
572 | server_name gist.example.com; | |
|
573 | 573 | access_log /var/log/nginx/gist.access.log; |
|
574 | 574 | error_log /var/log/nginx/gist.error.log; |
|
575 | 575 | |
|
576 | 576 | ssl on; |
|
577 | 577 | ssl_certificate gist.your.kallithea.server.crt; |
|
578 | 578 | ssl_certificate_key gist.your.kallithea.server.key; |
|
579 | 579 | |
|
580 | 580 | ssl_session_timeout 5m; |
|
581 | 581 | |
|
582 | 582 | ssl_protocols SSLv3 TLSv1; |
|
583 | 583 | ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5; |
|
584 | 584 | ssl_prefer_server_ciphers on; |
|
585 | 585 | |
|
586 |
rewrite ^/(.+)$ https:// |
|
|
587 |
rewrite (.*) https:// |
|
|
586 | rewrite ^/(.+)$ https://kallithea.example.com/_admin/gists/$1; | |
|
587 | rewrite (.*) https://kallithea.example.com/_admin/gists; | |
|
588 | 588 | } |
|
589 | 589 | |
|
590 | 590 | server { |
|
591 | 591 | listen 443; |
|
592 |
server_name |
|
|
592 | server_name kallithea.example.com | |
|
593 | 593 | access_log /var/log/nginx/kallithea.access.log; |
|
594 | 594 | error_log /var/log/nginx/kallithea.error.log; |
|
595 | 595 | |
|
596 | 596 | ssl on; |
|
597 | 597 | ssl_certificate your.kallithea.server.crt; |
|
598 | 598 | ssl_certificate_key your.kallithea.server.key; |
|
599 | 599 | |
|
600 | 600 | ssl_session_timeout 5m; |
|
601 | 601 | |
|
602 | 602 | ssl_protocols SSLv3 TLSv1; |
|
603 | 603 | ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5; |
|
604 | 604 | ssl_prefer_server_ciphers on; |
|
605 | 605 | |
|
606 | 606 | ## uncomment root directive if you want to serve static files by nginx |
|
607 | 607 | ## requires static_files = false in .ini file |
|
608 | 608 | #root /path/to/installation/kallithea/public; |
|
609 | 609 | include /etc/nginx/proxy.conf; |
|
610 | 610 | location / { |
|
611 | 611 | try_files $uri @kallithea; |
|
612 | 612 | } |
|
613 | 613 | |
|
614 | 614 | location @kallithea { |
|
615 | 615 | proxy_pass http://127.0.0.1:5000; |
|
616 | 616 | } |
|
617 | 617 | |
|
618 | 618 | } |
|
619 | 619 | |
|
620 | 620 | Here's the proxy.conf. It's tuned so it will not timeout on long |
|
621 | 621 | pushes or large pushes:: |
|
622 | 622 | |
|
623 | 623 | proxy_redirect off; |
|
624 | 624 | proxy_set_header Host $host; |
|
625 | 625 | ## needed for container auth |
|
626 | 626 | #proxy_set_header REMOTE_USER $remote_user; |
|
627 | 627 | #proxy_set_header X-Forwarded-User $remote_user; |
|
628 | 628 | proxy_set_header X-Url-Scheme $scheme; |
|
629 | 629 | proxy_set_header X-Host $http_host; |
|
630 | 630 | proxy_set_header X-Real-IP $remote_addr; |
|
631 | 631 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; |
|
632 | 632 | proxy_set_header Proxy-host $proxy_host; |
|
633 | 633 | proxy_buffering off; |
|
634 | 634 | proxy_connect_timeout 7200; |
|
635 | 635 | proxy_send_timeout 7200; |
|
636 | 636 | proxy_read_timeout 7200; |
|
637 | 637 | proxy_buffers 8 32k; |
|
638 | 638 | client_max_body_size 1024m; |
|
639 | 639 | client_body_buffer_size 128k; |
|
640 | 640 | large_client_header_buffers 8 64k; |
|
641 | 641 | |
|
642 | 642 | |
|
643 | 643 | Apache virtual host reverse proxy example |
|
644 | 644 | ----------------------------------------- |
|
645 | 645 | |
|
646 | 646 | Here is a sample configuration file for Apache using proxy: |
|
647 | 647 | |
|
648 | 648 | .. code-block:: apache |
|
649 | 649 | |
|
650 | 650 | <VirtualHost *:80> |
|
651 |
ServerName |
|
|
651 | ServerName kallithea.example.com | |
|
652 | 652 | |
|
653 | 653 | <Proxy *> |
|
654 | 654 | # For Apache 2.4 and later: |
|
655 | 655 | Require all granted |
|
656 | 656 | |
|
657 | 657 | # For Apache 2.2 and earlier, instead use: |
|
658 | 658 | # Order allow,deny |
|
659 | 659 | # Allow from all |
|
660 | 660 | </Proxy> |
|
661 | 661 | |
|
662 | 662 | #important ! |
|
663 | 663 | #Directive to properly generate url (clone url) for pylons |
|
664 | 664 | ProxyPreserveHost On |
|
665 | 665 | |
|
666 | 666 | #kallithea instance |
|
667 | 667 | ProxyPass / http://127.0.0.1:5000/ |
|
668 | 668 | ProxyPassReverse / http://127.0.0.1:5000/ |
|
669 | 669 | |
|
670 | 670 | #to enable https use line below |
|
671 | 671 | #SetEnvIf X-Url-Scheme https HTTPS=1 |
|
672 | 672 | </VirtualHost> |
|
673 | 673 | |
|
674 | 674 | Additional tutorial |
|
675 | 675 | http://pylonsbook.com/en/1.1/deployment.html#using-apache-to-proxy-requests-to-pylons |
|
676 | 676 | |
|
677 | 677 | |
|
678 | 678 | Apache as subdirectory |
|
679 | 679 | ---------------------- |
|
680 | 680 | |
|
681 | 681 | Apache subdirectory part: |
|
682 | 682 | |
|
683 | 683 | .. code-block:: apache |
|
684 | 684 | |
|
685 | 685 | <Location /<someprefix> > |
|
686 | 686 | ProxyPass http://127.0.0.1:5000/<someprefix> |
|
687 | 687 | ProxyPassReverse http://127.0.0.1:5000/<someprefix> |
|
688 | 688 | SetEnvIf X-Url-Scheme https HTTPS=1 |
|
689 | 689 | </Location> |
|
690 | 690 | |
|
691 | 691 | Besides the regular apache setup you will need to add the following line |
|
692 | 692 | into ``[app:main]`` section of your .ini file:: |
|
693 | 693 | |
|
694 | 694 | filter-with = proxy-prefix |
|
695 | 695 | |
|
696 | 696 | Add the following at the end of the .ini file:: |
|
697 | 697 | |
|
698 | 698 | [filter:proxy-prefix] |
|
699 | 699 | use = egg:PasteDeploy#prefix |
|
700 | 700 | prefix = /<someprefix> |
|
701 | 701 | |
|
702 | 702 | then change ``<someprefix>`` into your chosen prefix |
|
703 | 703 | |
|
704 | 704 | |
|
705 | 705 | Apache with mod_wsgi |
|
706 | 706 | -------------------- |
|
707 | 707 | |
|
708 | 708 | Alternatively, Kallithea can be set up with Apache under mod_wsgi. For |
|
709 | 709 | that, you'll need to: |
|
710 | 710 | |
|
711 | 711 | - Install mod_wsgi. If using a Debian-based distro, you can install |
|
712 | 712 | the package libapache2-mod-wsgi:: |
|
713 | 713 | |
|
714 | 714 | aptitude install libapache2-mod-wsgi |
|
715 | 715 | |
|
716 | 716 | - Enable mod_wsgi:: |
|
717 | 717 | |
|
718 | 718 | a2enmod wsgi |
|
719 | 719 | |
|
720 | 720 | - Create a wsgi dispatch script, like the one below. Make sure you |
|
721 | 721 | check that the paths correctly point to where you installed Kallithea |
|
722 | 722 | and its Python Virtual Environment. |
|
723 | 723 | - Enable the ``WSGIScriptAlias`` directive for the WSGI dispatch script, |
|
724 | 724 | as in the following example. Once again, check the paths are |
|
725 | 725 | correctly specified. |
|
726 | 726 | |
|
727 | 727 | Here is a sample excerpt from an Apache Virtual Host configuration file: |
|
728 | 728 | |
|
729 | 729 | .. code-block:: apache |
|
730 | 730 | |
|
731 | 731 | WSGIDaemonProcess kallithea \ |
|
732 | 732 | processes=1 threads=4 \ |
|
733 | 733 | python-path=/srv/kallithea/pyenv/lib/python2.7/site-packages |
|
734 | 734 | WSGIScriptAlias / /srv/kallithea/dispatch.wsgi |
|
735 | 735 | WSGIPassAuthorization On |
|
736 | 736 | |
|
737 | 737 | Or if using a dispatcher WSGI script with proper virtualenv activation: |
|
738 | 738 | |
|
739 | 739 | .. code-block:: apache |
|
740 | 740 | |
|
741 | 741 | WSGIDaemonProcess kallithea processes=1 threads=4 |
|
742 | 742 | WSGIScriptAlias / /srv/kallithea/dispatch.wsgi |
|
743 | 743 | WSGIPassAuthorization On |
|
744 | 744 | |
|
745 | 745 | .. note:: |
|
746 | 746 | When running apache as root, please make sure it doesn't run Kallithea as |
|
747 | 747 | root, for examply by adding: ``user=www-data group=www-data`` to the configuration. |
|
748 | 748 | |
|
749 | 749 | .. note:: |
|
750 | 750 | If running Kallithea in multiprocess mode, |
|
751 | 751 | make sure you set ``instance_id = *`` in the configuration so each process |
|
752 | 752 | gets it's own cache invalidation key. |
|
753 | 753 | |
|
754 | 754 | Example WSGI dispatch script: |
|
755 | 755 | |
|
756 | 756 | .. code-block:: python |
|
757 | 757 | |
|
758 | 758 | import os |
|
759 | 759 | os.environ["HGENCODING"] = "UTF-8" |
|
760 | 760 | os.environ['PYTHON_EGG_CACHE'] = '/srv/kallithea/.egg-cache' |
|
761 | 761 | |
|
762 | 762 | # sometimes it's needed to set the curent dir |
|
763 | 763 | os.chdir('/srv/kallithea/') |
|
764 | 764 | |
|
765 | 765 | import site |
|
766 | 766 | site.addsitedir("/srv/kallithea/pyenv/lib/python2.7/site-packages") |
|
767 | 767 | |
|
768 | 768 | from paste.deploy import loadapp |
|
769 | 769 | from paste.script.util.logging_config import fileConfig |
|
770 | 770 | |
|
771 | 771 | fileConfig('/srv/kallithea/my.ini') |
|
772 | 772 | application = loadapp('config:/srv/kallithea/my.ini') |
|
773 | 773 | |
|
774 | 774 | Or using proper virtualenv activation: |
|
775 | 775 | |
|
776 | 776 | .. code-block:: python |
|
777 | 777 | |
|
778 | 778 | activate_this = '/srv/kallithea/venv/bin/activate_this.py' |
|
779 | 779 | execfile(activate_this, dict(__file__=activate_this)) |
|
780 | 780 | |
|
781 | 781 | import os |
|
782 | 782 | os.environ['HOME'] = '/srv/kallithea' |
|
783 | 783 | |
|
784 | 784 | ini = '/srv/kallithea/kallithea.ini' |
|
785 | 785 | from paste.script.util.logging_config import fileConfig |
|
786 | 786 | fileConfig(ini) |
|
787 | 787 | from paste.deploy import loadapp |
|
788 | 788 | application = loadapp('config:' + ini) |
|
789 | 789 | |
|
790 | 790 | |
|
791 | 791 | Other configuration files |
|
792 | 792 | ------------------------- |
|
793 | 793 | |
|
794 | 794 | A number of `example init.d scripts`__ can be found in |
|
795 | 795 | the ``init.d`` directory of the Kallithea source. |
|
796 | 796 | |
|
797 | 797 | .. __: https://kallithea-scm.org/repos/kallithea/files/tip/init.d/ . |
|
798 | 798 | |
|
799 | 799 | |
|
800 | 800 | .. _virtualenv: http://pypi.python.org/pypi/virtualenv |
|
801 | 801 | .. _python: http://www.python.org/ |
|
802 | 802 | .. _Mercurial: http://mercurial.selenic.com/ |
|
803 | 803 | .. _Celery: http://celeryproject.org/ |
|
804 | 804 | .. _Celery documentation: http://docs.celeryproject.org/en/latest/getting-started/index.html |
|
805 | 805 | .. _RabbitMQ: http://www.rabbitmq.com/ |
|
806 | 806 | .. _Redis: http://redis.io/ |
|
807 | 807 | .. _python-ldap: http://www.python-ldap.org/ |
|
808 | 808 | .. _mercurial-server: http://www.lshift.net/mercurial-server.html |
|
809 | 809 | .. _PublishingRepositories: http://mercurial.selenic.com/wiki/PublishingRepositories |
@@ -1,181 +1,181 b'' | |||
|
1 | 1 | .. _general: |
|
2 | 2 | |
|
3 | 3 | ======================= |
|
4 | 4 | General Kallithea usage |
|
5 | 5 | ======================= |
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | Repository deletion |
|
9 | 9 | ------------------- |
|
10 | 10 | |
|
11 | 11 | Currently when an admin or owner deletes a repository, Kallithea does |
|
12 | 12 | not physically delete said repository from the filesystem, but instead |
|
13 | 13 | renames it in a special way so that it is not possible to push, clone |
|
14 | 14 | or access the repository. |
|
15 | 15 | |
|
16 | 16 | There is a special command for cleaning up such archived repositories:: |
|
17 | 17 | |
|
18 | 18 | paster cleanup-repos --older-than=30d my.ini |
|
19 | 19 | |
|
20 | 20 | This command scans for archived repositories that are older than |
|
21 | 21 | 30 days, displays them, and asks if you want to delete them (unless given |
|
22 | 22 | the ``--dont-ask`` flag). If you host a large amount of repositories with |
|
23 | 23 | forks that are constantly being deleted, it is recommended that you run this |
|
24 | 24 | command via crontab. |
|
25 | 25 | |
|
26 | 26 | It is worth noting that even if someone is given administrative access to |
|
27 | 27 | Kallithea and deletes a repository, you can easily restore such an action by |
|
28 | 28 | renaming the repository directory, removing the ``rm__<date>`` prefix. |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | File view: follow current branch |
|
32 | 32 | -------------------------------- |
|
33 | 33 | |
|
34 | 34 | In the file view, left and right arrows allow to jump to the previous and next |
|
35 | 35 | revision. Depending on the way revisions were created in the repository, this |
|
36 | 36 | could jump to a different branch. When the checkbox ``Follow current branch`` |
|
37 | 37 | is checked, these arrows will only jump to revisions on the same branch as the |
|
38 | 38 | currently visible revision. So for example, if someone is viewing files in the |
|
39 | 39 | ``beta`` branch and marks the `Follow current branch` checkbox, the < and > |
|
40 | 40 | arrows will only show revisions on the ``beta`` branch. |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | Changelog features |
|
44 | 44 | ------------------ |
|
45 | 45 | |
|
46 | 46 | The core feature of a repository's ``changelog`` page is to show the revisions |
|
47 | 47 | in a repository. However, there are several other features available from the |
|
48 | 48 | changelog. |
|
49 | 49 | |
|
50 | 50 | Branch filter |
|
51 | 51 | By default, the changelog shows revisions from all branches in the |
|
52 | 52 | repository. Use the branch filter to restrict to a given branch. |
|
53 | 53 | |
|
54 | 54 | Viewing a changeset |
|
55 | 55 | A particular changeset can be opened by clicking on either the changeset |
|
56 | 56 | hash or the commit message, or by ticking the checkbox and clicking the |
|
57 | 57 | ``Show selected changeset`` button at the top. |
|
58 | 58 | |
|
59 | 59 | Viewing all changes between two changesets |
|
60 | 60 | To get a list of all changesets between two selected changesets, along with |
|
61 | 61 | the changes in each one of them, tick the checkboxes of the first and |
|
62 | 62 | last changeset in the desired range and click the ``Show selected changesets`` |
|
63 | 63 | button at the top. You can only show the range between the first and last |
|
64 | 64 | checkbox (no cherry-picking). |
|
65 | 65 | |
|
66 | 66 | From that page, you can proceed to viewing the overall delta between the |
|
67 | 67 | selected changesets, by clicking the ``Compare revisions`` button. |
|
68 | 68 | |
|
69 | 69 | Creating a pull request |
|
70 | 70 | You can create a new pull request for the changes of a particular changeset |
|
71 | 71 | (and its ancestors) by selecting it and clicking the ``Open new pull request |
|
72 | 72 | for selected changesets`` button. |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | Permanent repository URLs |
|
76 | 76 | ------------------------- |
|
77 | 77 | |
|
78 | 78 | Due to the complicated nature of repository grouping, URLs of repositories |
|
79 | 79 | can often change. For example, a repository originally accessible from:: |
|
80 | 80 | |
|
81 | http://example.com/repo_name | |
|
81 | http://kallithea.example.com/repo_name | |
|
82 | 82 | |
|
83 | 83 | would get a new URL after moving it to test_group:: |
|
84 | 84 | |
|
85 | http://example.com/test_group/repo_name | |
|
85 | http://kallithea.example.com/test_group/repo_name | |
|
86 | 86 | |
|
87 | 87 | Such moving of a repository to a group can be an issue for build systems and |
|
88 | 88 | other scripts where the repository paths are hardcoded. To mitigate this, |
|
89 | 89 | Kallithea provides permanent URLs using the repository ID prefixed with an |
|
90 | 90 | underscore. In all Kallithea URLs, for example those for the changelog and the |
|
91 | 91 | file view, a repository name can be replaced by this ``_ID`` string. Since IDs |
|
92 | 92 | are always the same, moving the repository to a different group will not affect |
|
93 | 93 | such URLs. |
|
94 | 94 | |
|
95 | 95 | In the example, the repository could also be accessible as:: |
|
96 | 96 | |
|
97 | http://example.com/_<ID> | |
|
97 | http://kallithea.example.com/_<ID> | |
|
98 | 98 | |
|
99 | 99 | The ID of a given repository can be shown from the repository ``Summary`` page, |
|
100 | 100 | by selecting the ``Show by ID`` button next to ``Clone URL``. |
|
101 | 101 | |
|
102 | 102 | |
|
103 | 103 | Email notifications |
|
104 | 104 | ------------------- |
|
105 | 105 | |
|
106 | 106 | With email settings properly configured in the Kallithea |
|
107 | 107 | configuration file, Kallithea will send emails on user registration and when |
|
108 | 108 | errors occur. |
|
109 | 109 | |
|
110 | 110 | Emails are also sent for comments on changesets. In this case, an email is sent |
|
111 | 111 | to the committer of the changeset (if known to Kallithea), to all reviewers of |
|
112 | 112 | the pull request (if applicable) and to all people mentioned in the comment |
|
113 | 113 | using @mention notation. |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | Trending source files |
|
117 | 117 | --------------------- |
|
118 | 118 | |
|
119 | 119 | Trending source files are calculated based on a predefined dictionary of known |
|
120 | 120 | types and extensions. If an extension is missing or you would like to scan |
|
121 | 121 | custom files, it is possible to extend the ``LANGUAGES_EXTENSIONS_MAP`` |
|
122 | 122 | dictionary located in ``kallithea/config/conf.py`` with new types. |
|
123 | 123 | |
|
124 | 124 | |
|
125 | 125 | Cloning remote repositories |
|
126 | 126 | --------------------------- |
|
127 | 127 | |
|
128 | 128 | Kallithea has the ability to clone repositories from given remote locations. |
|
129 | 129 | Currently it supports the following options: |
|
130 | 130 | |
|
131 | 131 | - hg -> hg clone |
|
132 | 132 | - svn -> hg clone |
|
133 | 133 | - git -> git clone |
|
134 | 134 | |
|
135 | 135 | .. note:: svn -> hg cloning requires the ``hgsubversion`` library to be |
|
136 | 136 | installed. |
|
137 | 137 | |
|
138 | 138 | If you need to clone repositories that are protected via basic authentication, |
|
139 | 139 | you can pass the credentials in the URL, e.g. |
|
140 |
``http://user:passw@remote. |
|
|
140 | ``http://user:passw@remote.example.com/repo``. Kallithea will then try to login and | |
|
141 | 141 | clone using the given credentials. Please note that the given credentials will |
|
142 | 142 | be stored as plaintext inside the database. However, the authentication |
|
143 | 143 | information will not be shown in the clone URL on the summary page. |
|
144 | 144 | |
|
145 | 145 | |
|
146 | 146 | Specific features configurable in the Admin settings |
|
147 | 147 | ---------------------------------------------------- |
|
148 | 148 | |
|
149 | 149 | In general, the Admin settings should be self-explanatory and will not be |
|
150 | 150 | described in more detail in this documentation. However, there are a few |
|
151 | 151 | features that merit further explanation. |
|
152 | 152 | |
|
153 | 153 | Repository extra fields |
|
154 | 154 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
155 | 155 | |
|
156 | 156 | In the *Visual* tab, there is an option "Use repository extra |
|
157 | 157 | fields", which allows to set custom fields for each repository in the system. |
|
158 | 158 | |
|
159 | 159 | Once enabled site-wide, the custom fields can be edited per-repository under |
|
160 | 160 | *Options* | *Settings* | *Extra Fields*. |
|
161 | 161 | |
|
162 | 162 | Example usage of such fields would be to define company-specific information |
|
163 | 163 | into repositories, e.g., defining a ``repo_manager`` key that would give info |
|
164 | 164 | about a manager of each repository. There's no limit for adding custom fields. |
|
165 | 165 | Newly created fields are accessible via the API. |
|
166 | 166 | |
|
167 | 167 | Meta tagging |
|
168 | 168 | ~~~~~~~~~~~~ |
|
169 | 169 | |
|
170 | 170 | In the *Visual* tab, option "Stylify recognised meta tags" will cause Kallithea |
|
171 | 171 | to turn certain text fragments in repository and repository group |
|
172 | 172 | descriptions into colored tags. Currently recognised tags are:: |
|
173 | 173 | |
|
174 | 174 | [featured] |
|
175 | 175 | [stale] |
|
176 | 176 | [dead] |
|
177 | 177 | [lang => lang] |
|
178 | 178 | [license => License] |
|
179 | 179 | [requires => Repo] |
|
180 | 180 | [recommends => Repo] |
|
181 | 181 | [see => URI] |
@@ -1,125 +1,125 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.bin.kallithea_api |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Api CLI client for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jun 3, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import sys |
|
29 | 29 | import argparse |
|
30 | 30 | |
|
31 | 31 | from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def argparser(argv): |
|
35 | 35 | usage = ( |
|
36 | 36 | "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
37 | 37 | "[--config=CONFIG] [--save-config] " |
|
38 | 38 | "METHOD <key:val> <key2:val> ...\n" |
|
39 |
"Create config file: kallithea-api --apikey=<key> --apihost=http:// |
|
|
39 | "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config" | |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | parser = argparse.ArgumentParser(description='Kallithea API cli', |
|
43 | 43 | usage=usage) |
|
44 | 44 | |
|
45 | 45 | ## config |
|
46 | 46 | group = parser.add_argument_group('config') |
|
47 | 47 | group.add_argument('--apikey', help='api access key') |
|
48 | 48 | group.add_argument('--apihost', help='api host') |
|
49 | 49 | group.add_argument('--config', help='config file') |
|
50 | 50 | group.add_argument('--save-config', action='store_true', help='save the given config into a file') |
|
51 | 51 | |
|
52 | 52 | group = parser.add_argument_group('API') |
|
53 | 53 | group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None, |
|
54 | 54 | help='API method name to call followed by key:value attributes', |
|
55 | 55 | ) |
|
56 | 56 | group.add_argument('--format', dest='format', type=str, |
|
57 | 57 | help='output format default: `%s` can ' |
|
58 | 58 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
59 | 59 | default=FORMAT_PRETTY |
|
60 | 60 | ) |
|
61 | 61 | args, other = parser.parse_known_args() |
|
62 | 62 | return parser, args, other |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def main(argv=None): |
|
66 | 66 | """ |
|
67 | 67 | Main execution function for cli |
|
68 | 68 | |
|
69 | 69 | :param argv: |
|
70 | 70 | """ |
|
71 | 71 | if argv is None: |
|
72 | 72 | argv = sys.argv |
|
73 | 73 | |
|
74 | 74 | conf = None |
|
75 | 75 | parser, args, other = argparser(argv) |
|
76 | 76 | |
|
77 | 77 | api_credentials_given = (args.apikey and args.apihost) |
|
78 | 78 | if args.save_config: |
|
79 | 79 | if not api_credentials_given: |
|
80 | 80 | raise parser.error('--save-config requires --apikey and --apihost') |
|
81 | 81 | conf = RcConf(config_location=args.config, |
|
82 | 82 | autocreate=True, config={'apikey': args.apikey, |
|
83 | 83 | 'apihost': args.apihost}) |
|
84 | 84 | sys.exit() |
|
85 | 85 | |
|
86 | 86 | if not conf: |
|
87 | 87 | conf = RcConf(config_location=args.config, autoload=True) |
|
88 | 88 | if not conf: |
|
89 | 89 | if not api_credentials_given: |
|
90 | 90 | parser.error('Could not find config file and missing ' |
|
91 | 91 | '--apikey or --apihost in params') |
|
92 | 92 | |
|
93 | 93 | apikey = args.apikey or conf['apikey'] |
|
94 | 94 | apihost = args.apihost or conf['apihost'] |
|
95 | 95 | method = args.method |
|
96 | 96 | |
|
97 | 97 | # if we don't have method here it's an error |
|
98 | 98 | if not method: |
|
99 | 99 | parser.error('Please specify method name') |
|
100 | 100 | |
|
101 | 101 | try: |
|
102 | 102 | margs = dict(map(lambda s: s.split(':', 1), other)) |
|
103 | 103 | except ValueError: |
|
104 | 104 | sys.stderr.write('Error parsing arguments \n') |
|
105 | 105 | sys.exit() |
|
106 | 106 | if args.format == FORMAT_PRETTY: |
|
107 | 107 | print 'Calling method %s => %s' % (method, apihost) |
|
108 | 108 | |
|
109 | 109 | json_resp = api_call(apikey, apihost, method, **margs) |
|
110 | 110 | error_prefix = '' |
|
111 | 111 | if json_resp['error']: |
|
112 | 112 | error_prefix = 'ERROR:' |
|
113 | 113 | json_data = json_resp['error'] |
|
114 | 114 | else: |
|
115 | 115 | json_data = json_resp['result'] |
|
116 | 116 | if args.format == FORMAT_JSON: |
|
117 | 117 | print json.dumps(json_data) |
|
118 | 118 | elif args.format == FORMAT_PRETTY: |
|
119 | 119 | print 'Server response \n%s%s' % ( |
|
120 | 120 | error_prefix, json.dumps(json_data, indent=4, sort_keys=True) |
|
121 | 121 | ) |
|
122 | 122 | return 0 |
|
123 | 123 | |
|
124 | 124 | if __name__ == '__main__': |
|
125 | 125 | sys.exit(main(sys.argv)) |
@@ -1,172 +1,172 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.bin.kallithea_gist |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Gist CLI client for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: May 9, 2013 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import sys |
|
30 | 30 | import stat |
|
31 | 31 | import argparse |
|
32 | 32 | import fileinput |
|
33 | 33 | |
|
34 | 34 | from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def argparser(argv): |
|
38 | 38 | usage = ( |
|
39 | 39 | "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
40 | 40 | "[--config=CONFIG] [--save-config] [GIST OPTIONS] " |
|
41 | 41 | "[filename or stdin use - for terminal stdin ]\n" |
|
42 |
"Create config file: kallithea-gist --apikey=<key> --apihost=http:// |
|
|
42 | "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config" | |
|
43 | 43 | ) |
|
44 | 44 | |
|
45 | 45 | parser = argparse.ArgumentParser(description='Kallithea Gist cli', |
|
46 | 46 | usage=usage) |
|
47 | 47 | |
|
48 | 48 | ## config |
|
49 | 49 | group = parser.add_argument_group('config') |
|
50 | 50 | group.add_argument('--apikey', help='api access key') |
|
51 | 51 | group.add_argument('--apihost', help='api host') |
|
52 | 52 | group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea') |
|
53 | 53 | group.add_argument('--save-config', action='store_true', |
|
54 | 54 | help='save the given config into a file') |
|
55 | 55 | |
|
56 | 56 | group = parser.add_argument_group('GIST') |
|
57 | 57 | group.add_argument('-p', '--private', action='store_true', |
|
58 | 58 | help='create private Gist') |
|
59 | 59 | group.add_argument('-f', '--filename', |
|
60 | 60 | help='set uploaded gist filename, ' |
|
61 | 61 | 'also defines syntax highlighting') |
|
62 | 62 | group.add_argument('-d', '--description', help='Gist description') |
|
63 | 63 | group.add_argument('-l', '--lifetime', metavar='MINUTES', |
|
64 | 64 | help='gist lifetime in minutes, -1 (DEFAULT) is forever') |
|
65 | 65 | group.add_argument('--format', dest='format', type=str, |
|
66 | 66 | help='output format DEFAULT: `%s` can ' |
|
67 | 67 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
68 | 68 | default=FORMAT_PRETTY |
|
69 | 69 | ) |
|
70 | 70 | args, other = parser.parse_known_args() |
|
71 | 71 | return parser, args, other |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | def _run(argv): |
|
75 | 75 | conf = None |
|
76 | 76 | parser, args, other = argparser(argv) |
|
77 | 77 | |
|
78 | 78 | api_credentials_given = (args.apikey and args.apihost) |
|
79 | 79 | if args.save_config: |
|
80 | 80 | if not api_credentials_given: |
|
81 | 81 | raise parser.error('--save-config requires --apikey and --apihost') |
|
82 | 82 | conf = RcConf(config_location=args.config, |
|
83 | 83 | autocreate=True, config={'apikey': args.apikey, |
|
84 | 84 | 'apihost': args.apihost}) |
|
85 | 85 | sys.exit() |
|
86 | 86 | |
|
87 | 87 | if not conf: |
|
88 | 88 | conf = RcConf(config_location=args.config, autoload=True) |
|
89 | 89 | if not conf: |
|
90 | 90 | if not api_credentials_given: |
|
91 | 91 | parser.error('Could not find config file and missing ' |
|
92 | 92 | '--apikey or --apihost in params') |
|
93 | 93 | |
|
94 | 94 | apikey = args.apikey or conf['apikey'] |
|
95 | 95 | host = args.apihost or conf['apihost'] |
|
96 | 96 | DEFAULT_FILENAME = 'gistfile1.txt' |
|
97 | 97 | if other: |
|
98 | 98 | # skip multifiles for now |
|
99 | 99 | filename = other[0] |
|
100 | 100 | if filename == '-': |
|
101 | 101 | filename = DEFAULT_FILENAME |
|
102 | 102 | gist_content = '' |
|
103 | 103 | for line in fileinput.input('-'): |
|
104 | 104 | gist_content += line |
|
105 | 105 | else: |
|
106 | 106 | with open(filename, 'rb') as f: |
|
107 | 107 | gist_content = f.read() |
|
108 | 108 | |
|
109 | 109 | else: |
|
110 | 110 | filename = DEFAULT_FILENAME |
|
111 | 111 | gist_content = None |
|
112 | 112 | # little bit hacky but cross platform check where the |
|
113 | 113 | # stdin comes from we skip the terminal case it can be handled by '-' |
|
114 | 114 | mode = os.fstat(0).st_mode |
|
115 | 115 | if stat.S_ISFIFO(mode): |
|
116 | 116 | # "stdin is piped" |
|
117 | 117 | gist_content = sys.stdin.read() |
|
118 | 118 | elif stat.S_ISREG(mode): |
|
119 | 119 | # "stdin is redirected" |
|
120 | 120 | gist_content = sys.stdin.read() |
|
121 | 121 | else: |
|
122 | 122 | # "stdin is terminal" |
|
123 | 123 | pass |
|
124 | 124 | |
|
125 | 125 | # make sure we don't upload binary stuff |
|
126 | 126 | if gist_content and '\0' in gist_content: |
|
127 | 127 | raise Exception('Error: binary files upload is not possible') |
|
128 | 128 | |
|
129 | 129 | filename = os.path.basename(args.filename or filename) |
|
130 | 130 | if gist_content: |
|
131 | 131 | files = { |
|
132 | 132 | filename: { |
|
133 | 133 | 'content': gist_content, |
|
134 | 134 | 'lexer': None |
|
135 | 135 | } |
|
136 | 136 | } |
|
137 | 137 | |
|
138 | 138 | margs = dict( |
|
139 | 139 | lifetime=args.lifetime, |
|
140 | 140 | description=args.description, |
|
141 | 141 | gist_type='private' if args.private else 'public', |
|
142 | 142 | files=files |
|
143 | 143 | ) |
|
144 | 144 | |
|
145 | 145 | json_data = api_call(apikey, host, 'create_gist', **margs)['result'] |
|
146 | 146 | if args.format == FORMAT_JSON: |
|
147 | 147 | print json.dumps(json_data) |
|
148 | 148 | elif args.format == FORMAT_PRETTY: |
|
149 | 149 | print json_data |
|
150 | 150 | print 'Created %s gist %s' % (json_data['gist']['type'], |
|
151 | 151 | json_data['gist']['url']) |
|
152 | 152 | return 0 |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def main(argv=None): |
|
156 | 156 | """ |
|
157 | 157 | Main execution function for cli |
|
158 | 158 | |
|
159 | 159 | :param argv: |
|
160 | 160 | """ |
|
161 | 161 | if argv is None: |
|
162 | 162 | argv = sys.argv |
|
163 | 163 | |
|
164 | 164 | try: |
|
165 | 165 | return _run(argv) |
|
166 | 166 | except Exception as e: |
|
167 | 167 | print e |
|
168 | 168 | return 1 |
|
169 | 169 | |
|
170 | 170 | |
|
171 | 171 | if __name__ == '__main__': |
|
172 | 172 | sys.exit(main(sys.argv)) |
@@ -1,11 +1,11 b'' | |||
|
1 | 1 | [default] |
|
2 |
api_url = http:// |
|
|
2 | api_url = http://kallithea.example.com/_admin/api | |
|
3 | 3 | api_user = admin |
|
4 | 4 | api_key = XXXXXXXXXXXX |
|
5 | 5 | |
|
6 |
ldap_uri = ldap:// |
|
|
7 |
ldap_user = cn=kallithea, |
|
|
6 | ldap_uri = ldap://ldap.example.com:389 | |
|
7 | ldap_user = cn=kallithea,dc=example,dc=com | |
|
8 | 8 | ldap_key = XXXXXXXXX |
|
9 |
base_dn = dc= |
|
|
9 | base_dn = dc=example,dc=com | |
|
10 | 10 | |
|
11 | 11 | sync_users = True No newline at end of file |
@@ -1,591 +1,591 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%text>################################################################################</%text> |
|
3 | 3 | <%text>################################################################################</%text> |
|
4 | 4 | # Kallithea - config file generated with kallithea-config # |
|
5 | 5 | <%text>################################################################################</%text> |
|
6 | 6 | <%text>################################################################################</%text> |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | pdebug = false |
|
11 | 11 | |
|
12 | 12 | <%text>################################################################################</%text> |
|
13 | 13 | <%text>## Email settings ##</%text> |
|
14 | 14 | <%text>## ##</%text> |
|
15 | 15 | <%text>## Refer to the documentation ("Email settings") for more details. ##</%text> |
|
16 | 16 | <%text>## ##</%text> |
|
17 | 17 | <%text>## It is recommended to use a valid sender address that passes access ##</%text> |
|
18 | 18 | <%text>## validation and spam filtering in mail servers. ##</%text> |
|
19 | 19 | <%text>################################################################################</%text> |
|
20 | 20 | |
|
21 | 21 | <%text>## 'From' header for application emails. You can optionally add a name.</%text> |
|
22 | 22 | <%text>## Default:</%text> |
|
23 | 23 | #app_email_from = Kallithea |
|
24 | 24 | <%text>## Examples:</%text> |
|
25 | 25 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
26 | 26 | #app_email_from = kallithea-noreply@example.com |
|
27 | 27 | |
|
28 | 28 | <%text>## Subject prefix for application emails.</%text> |
|
29 | 29 | <%text>## A space between this prefix and the real subject is automatically added.</%text> |
|
30 | 30 | <%text>## Default:</%text> |
|
31 | 31 | #email_prefix = |
|
32 | 32 | <%text>## Example:</%text> |
|
33 | 33 | #email_prefix = [Kallithea] |
|
34 | 34 | |
|
35 | 35 | <%text>## Recipients for error emails and fallback recipients of application mails.</%text> |
|
36 | 36 | <%text>## Multiple addresses can be specified, space-separated.</%text> |
|
37 | 37 | <%text>## Only addresses are allowed, do not add any name part.</%text> |
|
38 | 38 | <%text>## Default:</%text> |
|
39 | 39 | #email_to = |
|
40 | 40 | <%text>## Examples:</%text> |
|
41 | 41 | #email_to = admin@example.com |
|
42 | 42 | #email_to = admin@example.com another_admin@example.com |
|
43 | 43 | |
|
44 | 44 | <%text>## 'From' header for error emails. You can optionally add a name.</%text> |
|
45 | 45 | <%text>## Default:</%text> |
|
46 | 46 | #error_email_from = pylons@yourapp.com |
|
47 | 47 | <%text>## Examples:</%text> |
|
48 | 48 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
49 | 49 | #error_email_from = paste_error@example.com |
|
50 | 50 | |
|
51 | 51 | <%text>## SMTP server settings</%text> |
|
52 | 52 | <%text>## Only smtp_server is mandatory. All other settings take the specified default</%text> |
|
53 | 53 | <%text>## values.</%text> |
|
54 |
#smtp_server = |
|
|
54 | #smtp_server = smtp.example.com | |
|
55 | 55 | #smtp_username = |
|
56 | 56 | #smtp_password = |
|
57 | 57 | #smtp_port = 25 |
|
58 | 58 | #smtp_use_tls = false |
|
59 | 59 | #smtp_use_ssl = false |
|
60 | 60 | <%text>## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.).</%text> |
|
61 | 61 | <%text>## If empty, use any of the authentication parameters supported by the server.</%text> |
|
62 | 62 | #smtp_auth = |
|
63 | 63 | |
|
64 | 64 | [server:main] |
|
65 | 65 | %if http_server == 'paste': |
|
66 | 66 | <%text>## PASTE ##</%text> |
|
67 | 67 | use = egg:Paste#http |
|
68 | 68 | <%text>## nr of worker threads to spawn</%text> |
|
69 | 69 | threadpool_workers = 5 |
|
70 | 70 | <%text>## max request before thread respawn</%text> |
|
71 | 71 | threadpool_max_requests = 10 |
|
72 | 72 | <%text>## option to use threads of process</%text> |
|
73 | 73 | use_threadpool = true |
|
74 | 74 | |
|
75 | 75 | %elif http_server == 'waitress': |
|
76 | 76 | <%text>## WAITRESS ##</%text> |
|
77 | 77 | use = egg:waitress#main |
|
78 | 78 | <%text>## number of worker threads</%text> |
|
79 | 79 | threads = 5 |
|
80 | 80 | <%text>## MAX BODY SIZE 100GB</%text> |
|
81 | 81 | max_request_body_size = 107374182400 |
|
82 | 82 | <%text>## use poll instead of select, fixes fd limits, may not work on old</%text> |
|
83 | 83 | <%text>## windows systems.</%text> |
|
84 | 84 | #asyncore_use_poll = True |
|
85 | 85 | |
|
86 | 86 | %elif http_server == 'gunicorn': |
|
87 | 87 | <%text>## GUNICORN ##</%text> |
|
88 | 88 | use = egg:gunicorn#main |
|
89 | 89 | <%text>## number of process workers. You must set `instance_id = *` when this option</%text> |
|
90 | 90 | <%text>## is set to more than one worker</%text> |
|
91 | 91 | workers = 1 |
|
92 | 92 | <%text>## process name</%text> |
|
93 | 93 | proc_name = kallithea |
|
94 | 94 | <%text>## type of worker class, one of sync, eventlet, gevent, tornado</%text> |
|
95 | 95 | <%text>## recommended for bigger setup is using of of other than sync one</%text> |
|
96 | 96 | worker_class = sync |
|
97 | 97 | max_requests = 1000 |
|
98 | 98 | <%text>## ammount of time a worker can handle request before it gets killed and</%text> |
|
99 | 99 | <%text>## restarted</%text> |
|
100 | 100 | timeout = 3600 |
|
101 | 101 | |
|
102 | 102 | %elif http_server == 'uwsgi': |
|
103 | 103 | <%text>## UWSGI ##</%text> |
|
104 | 104 | <%text>## run with uwsgi --ini-paste-logged <inifile.ini></%text> |
|
105 | 105 | [uwsgi] |
|
106 | 106 | socket = /tmp/uwsgi.sock |
|
107 | 107 | master = true |
|
108 | 108 | http = 127.0.0.1:5000 |
|
109 | 109 | |
|
110 | 110 | <%text>## set as deamon and redirect all output to file</%text> |
|
111 | 111 | #daemonize = ./uwsgi_kallithea.log |
|
112 | 112 | |
|
113 | 113 | <%text>## master process PID</%text> |
|
114 | 114 | pidfile = ./uwsgi_kallithea.pid |
|
115 | 115 | |
|
116 | 116 | <%text>## stats server with workers statistics, use uwsgitop</%text> |
|
117 | 117 | <%text>## for monitoring, `uwsgitop 127.0.0.1:1717`</%text> |
|
118 | 118 | stats = 127.0.0.1:1717 |
|
119 | 119 | memory-report = true |
|
120 | 120 | |
|
121 | 121 | <%text>## log 5XX errors</%text> |
|
122 | 122 | log-5xx = true |
|
123 | 123 | |
|
124 | 124 | <%text>## Set the socket listen queue size.</%text> |
|
125 | 125 | listen = 256 |
|
126 | 126 | |
|
127 | 127 | <%text>## Gracefully Reload workers after the specified amount of managed requests</%text> |
|
128 | 128 | <%text>## (avoid memory leaks).</%text> |
|
129 | 129 | max-requests = 1000 |
|
130 | 130 | |
|
131 | 131 | <%text>## enable large buffers</%text> |
|
132 | 132 | buffer-size = 65535 |
|
133 | 133 | |
|
134 | 134 | <%text>## socket and http timeouts ##</%text> |
|
135 | 135 | http-timeout = 3600 |
|
136 | 136 | socket-timeout = 3600 |
|
137 | 137 | |
|
138 | 138 | <%text>## Log requests slower than the specified number of milliseconds.</%text> |
|
139 | 139 | log-slow = 10 |
|
140 | 140 | |
|
141 | 141 | <%text>## Exit if no app can be loaded.</%text> |
|
142 | 142 | need-app = true |
|
143 | 143 | |
|
144 | 144 | <%text>## Set lazy mode (load apps in workers instead of master).</%text> |
|
145 | 145 | lazy = true |
|
146 | 146 | |
|
147 | 147 | <%text>## scaling ##</%text> |
|
148 | 148 | <%text>## set cheaper algorithm to use, if not set default will be used</%text> |
|
149 | 149 | cheaper-algo = spare |
|
150 | 150 | |
|
151 | 151 | <%text>## minimum number of workers to keep at all times</%text> |
|
152 | 152 | cheaper = 1 |
|
153 | 153 | |
|
154 | 154 | <%text>## number of workers to spawn at startup</%text> |
|
155 | 155 | cheaper-initial = 1 |
|
156 | 156 | |
|
157 | 157 | <%text>## maximum number of workers that can be spawned</%text> |
|
158 | 158 | workers = 4 |
|
159 | 159 | |
|
160 | 160 | <%text>## how many workers should be spawned at a time</%text> |
|
161 | 161 | cheaper-step = 1 |
|
162 | 162 | |
|
163 | 163 | %endif |
|
164 | 164 | <%text>## COMMON ##</%text> |
|
165 | 165 | host = ${host} |
|
166 | 166 | port = ${port} |
|
167 | 167 | |
|
168 | 168 | <%text>## middleware for hosting the WSGI application under a URL prefix</%text> |
|
169 | 169 | #[filter:proxy-prefix] |
|
170 | 170 | #use = egg:PasteDeploy#prefix |
|
171 | 171 | #prefix = /<your-prefix> |
|
172 | 172 | |
|
173 | 173 | [app:main] |
|
174 | 174 | use = egg:kallithea |
|
175 | 175 | <%text>## enable proxy prefix middleware</%text> |
|
176 | 176 | #filter-with = proxy-prefix |
|
177 | 177 | |
|
178 | 178 | full_stack = true |
|
179 | 179 | static_files = true |
|
180 | 180 | <%text>## Available Languages:</%text> |
|
181 | 181 | <%text>## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW</%text> |
|
182 | 182 | lang = |
|
183 | 183 | cache_dir = ${here}/data |
|
184 | 184 | index_dir = ${here}/data/index |
|
185 | 185 | |
|
186 | 186 | <%text>## perform a full repository scan on each server start, this should be</%text> |
|
187 | 187 | <%text>## set to false after first startup, to allow faster server restarts.</%text> |
|
188 | 188 | initial_repo_scan = false |
|
189 | 189 | |
|
190 | 190 | <%text>## uncomment and set this path to use archive download cache</%text> |
|
191 | 191 | archive_cache_dir = ${here}/tarballcache |
|
192 | 192 | |
|
193 | 193 | <%text>## change this to unique ID for security</%text> |
|
194 | 194 | app_instance_uuid = ${uuid()} |
|
195 | 195 | |
|
196 | 196 | <%text>## cut off limit for large diffs (size in bytes)</%text> |
|
197 | 197 | cut_off_limit = 256000 |
|
198 | 198 | |
|
199 | 199 | <%text>## use cache version of scm repo everywhere</%text> |
|
200 | 200 | vcs_full_cache = true |
|
201 | 201 | |
|
202 | 202 | <%text>## force https in Kallithea, fixes https redirects, assumes it's always https</%text> |
|
203 | 203 | force_https = false |
|
204 | 204 | |
|
205 | 205 | <%text>## use Strict-Transport-Security headers</%text> |
|
206 | 206 | use_htsts = false |
|
207 | 207 | |
|
208 | 208 | <%text>## number of commits stats will parse on each iteration</%text> |
|
209 | 209 | commit_parse_limit = 25 |
|
210 | 210 | |
|
211 | 211 | <%text>## path to git executable</%text> |
|
212 | 212 | git_path = git |
|
213 | 213 | |
|
214 | 214 | <%text>## git rev filter option, --all is the default filter, if you need to</%text> |
|
215 | 215 | <%text>## hide all refs in changelog switch this to --branches --tags</%text> |
|
216 | 216 | #git_rev_filter = --branches --tags |
|
217 | 217 | |
|
218 | 218 | <%text>## RSS feed options</%text> |
|
219 | 219 | rss_cut_off_limit = 256000 |
|
220 | 220 | rss_items_per_page = 10 |
|
221 | 221 | rss_include_diff = false |
|
222 | 222 | |
|
223 | 223 | <%text>## options for showing and identifying changesets</%text> |
|
224 | 224 | show_sha_length = 12 |
|
225 | 225 | show_revision_number = false |
|
226 | 226 | |
|
227 | 227 | <%text>## gist URL alias, used to create nicer urls for gist. This should be an</%text> |
|
228 | 228 | <%text>## url that does rewrites to _admin/gists/<gistid>.</%text> |
|
229 |
<%text>## example: http://gist. |
|
|
230 |
<%text>## Kallithea url, ie. http[s]:// |
|
|
229 | <%text>## example: http://gist.example.com/{gistid}. Empty means use the internal</%text> | |
|
230 | <%text>## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid></%text> | |
|
231 | 231 | gist_alias_url = |
|
232 | 232 | |
|
233 | 233 | <%text>## white list of API enabled controllers. This allows to add list of</%text> |
|
234 | 234 | <%text>## controllers to which access will be enabled by api_key. eg: to enable</%text> |
|
235 | 235 | <%text>## api access to raw_files put `FilesController:raw`, to enable access to patches</%text> |
|
236 | 236 | <%text>## add `ChangesetController:changeset_patch`. This list should be "," separated</%text> |
|
237 | 237 | <%text>## Syntax is <ControllerClass>:<function>. Check debug logs for generated names</%text> |
|
238 | 238 | <%text>## Recommended settings below are commented out:</%text> |
|
239 | 239 | api_access_controllers_whitelist = |
|
240 | 240 | # ChangesetController:changeset_patch, |
|
241 | 241 | # ChangesetController:changeset_raw, |
|
242 | 242 | # FilesController:raw, |
|
243 | 243 | # FilesController:archivefile |
|
244 | 244 | |
|
245 | 245 | <%text>## default encoding used to convert from and to unicode</%text> |
|
246 | 246 | <%text>## can be also a comma seperated list of encoding in case of mixed encodings</%text> |
|
247 | 247 | default_encoding = utf8 |
|
248 | 248 | |
|
249 | 249 | <%text>## issue tracker for Kallithea (leave blank to disable, absent for default)</%text> |
|
250 | 250 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
251 | 251 | |
|
252 | 252 | <%text>## issue tracking mapping for commits messages</%text> |
|
253 | 253 | <%text>## comment out issue_pat, issue_server, issue_prefix to enable</%text> |
|
254 | 254 | |
|
255 | 255 | <%text>## pattern to get the issues from commit messages</%text> |
|
256 | 256 | <%text>## default one used here is #<numbers> with a regex passive group for `#`</%text> |
|
257 | 257 | <%text>## {id} will be all groups matched from this pattern</%text> |
|
258 | 258 | |
|
259 | 259 | issue_pat = (?:\s*#)(\d+) |
|
260 | 260 | |
|
261 | 261 | <%text>## server url to the issue, each {id} will be replaced with match</%text> |
|
262 | 262 | <%text>## fetched from the regex and {repo} is replaced with full repository name</%text> |
|
263 | 263 | <%text>## including groups {repo_name} is replaced with just name of repo</%text> |
|
264 | 264 | |
|
265 |
issue_server_link = https:// |
|
|
265 | issue_server_link = https://issues.example.com/{repo}/issue/{id} | |
|
266 | 266 | |
|
267 | 267 | <%text>## prefix to add to link to indicate it's an url</%text> |
|
268 | 268 | <%text>## #314 will be replaced by <issue_prefix><id></%text> |
|
269 | 269 | |
|
270 | 270 | issue_prefix = # |
|
271 | 271 | |
|
272 | 272 | <%text>## issue_pat, issue_server_link, issue_prefix can have suffixes to specify</%text> |
|
273 | 273 | <%text>## multiple patterns, to other issues server, wiki or others</%text> |
|
274 | 274 | <%text>## below an example how to create a wiki pattern</%text> |
|
275 |
# wiki-some-id -> https:// |
|
|
275 | # wiki-some-id -> https://wiki.example.com/some-id | |
|
276 | 276 | |
|
277 | 277 | #issue_pat_wiki = (?:wiki-)(.+) |
|
278 |
#issue_server_link_wiki = https:// |
|
|
278 | #issue_server_link_wiki = https://wiki.example.com/{id} | |
|
279 | 279 | #issue_prefix_wiki = WIKI- |
|
280 | 280 | |
|
281 | 281 | <%text>## instance-id prefix</%text> |
|
282 | 282 | <%text>## a prefix key for this instance used for cache invalidation when running</%text> |
|
283 | 283 | <%text>## multiple instances of kallithea, make sure it's globally unique for</%text> |
|
284 | 284 | <%text>## all running kallithea instances. Leave empty if you don't use it</%text> |
|
285 | 285 | instance_id = |
|
286 | 286 | |
|
287 | 287 | <%text>## alternative return HTTP header for failed authentication. Default HTTP</%text> |
|
288 | 288 | <%text>## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with</%text> |
|
289 | 289 | <%text>## handling that. Set this variable to 403 to return HTTPForbidden</%text> |
|
290 | 290 | auth_ret_code = |
|
291 | 291 | |
|
292 | 292 | <%text>## locking return code. When repository is locked return this HTTP code. 2XX</%text> |
|
293 | 293 | <%text>## codes don't break the transactions while 4XX codes do</%text> |
|
294 | 294 | lock_ret_code = 423 |
|
295 | 295 | |
|
296 | 296 | <%text>## allows to change the repository location in settings page</%text> |
|
297 | 297 | allow_repo_location_change = True |
|
298 | 298 | |
|
299 | 299 | <%text>## allows to setup custom hooks in settings page</%text> |
|
300 | 300 | allow_custom_hooks_settings = True |
|
301 | 301 | |
|
302 | 302 | <%text>####################################</%text> |
|
303 | 303 | <%text>### CELERY CONFIG ####</%text> |
|
304 | 304 | <%text>####################################</%text> |
|
305 | 305 | |
|
306 | 306 | use_celery = false |
|
307 | 307 | broker.host = localhost |
|
308 | 308 | broker.vhost = rabbitmqhost |
|
309 | 309 | broker.port = 5672 |
|
310 | 310 | broker.user = rabbitmq |
|
311 | 311 | broker.password = qweqwe |
|
312 | 312 | |
|
313 | 313 | celery.imports = kallithea.lib.celerylib.tasks |
|
314 | 314 | |
|
315 | 315 | celery.result.backend = amqp |
|
316 | 316 | celery.result.dburi = amqp:// |
|
317 | 317 | celery.result.serialier = json |
|
318 | 318 | |
|
319 | 319 | #celery.send.task.error.emails = true |
|
320 | 320 | #celery.amqp.task.result.expires = 18000 |
|
321 | 321 | |
|
322 | 322 | celeryd.concurrency = 2 |
|
323 | 323 | #celeryd.log.file = celeryd.log |
|
324 | 324 | celeryd.log.level = DEBUG |
|
325 | 325 | celeryd.max.tasks.per.child = 1 |
|
326 | 326 | |
|
327 | 327 | <%text>## tasks will never be sent to the queue, but executed locally instead.</%text> |
|
328 | 328 | celery.always.eager = false |
|
329 | 329 | |
|
330 | 330 | <%text>####################################</%text> |
|
331 | 331 | <%text>### BEAKER CACHE ####</%text> |
|
332 | 332 | <%text>####################################</%text> |
|
333 | 333 | |
|
334 | 334 | beaker.cache.data_dir = ${here}/data/cache/data |
|
335 | 335 | beaker.cache.lock_dir = ${here}/data/cache/lock |
|
336 | 336 | |
|
337 | 337 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
338 | 338 | |
|
339 | 339 | beaker.cache.short_term.type = memory |
|
340 | 340 | beaker.cache.short_term.expire = 60 |
|
341 | 341 | beaker.cache.short_term.key_length = 256 |
|
342 | 342 | |
|
343 | 343 | beaker.cache.long_term.type = memory |
|
344 | 344 | beaker.cache.long_term.expire = 36000 |
|
345 | 345 | beaker.cache.long_term.key_length = 256 |
|
346 | 346 | |
|
347 | 347 | beaker.cache.sql_cache_short.type = memory |
|
348 | 348 | beaker.cache.sql_cache_short.expire = 10 |
|
349 | 349 | beaker.cache.sql_cache_short.key_length = 256 |
|
350 | 350 | |
|
351 | 351 | <%text>####################################</%text> |
|
352 | 352 | <%text>### BEAKER SESSION ####</%text> |
|
353 | 353 | <%text>####################################</%text> |
|
354 | 354 | |
|
355 | 355 | <%text>## Name of session cookie. Should be unique for a given host and path, even when running</%text> |
|
356 | 356 | <%text>## on different ports. Otherwise, cookie sessions will be shared and messed up.</%text> |
|
357 | 357 | beaker.session.key = kallithea |
|
358 | 358 | <%text>## Sessions should always only be accessible by the browser, not directly by JavaScript.</%text> |
|
359 | 359 | beaker.session.httponly = true |
|
360 | 360 | <%text>## Session lifetime. 2592000 seconds is 30 days.</%text> |
|
361 | 361 | beaker.session.timeout = 2592000 |
|
362 | 362 | |
|
363 | 363 | <%text>## Server secret used with HMAC to ensure integrity of cookies.</%text> |
|
364 | 364 | beaker.session.secret = ${uuid()} |
|
365 | 365 | <%text>## Further, encrypt the data with AES.</%text> |
|
366 | 366 | #beaker.session.encrypt_key = <key_for_encryption> |
|
367 | 367 | #beaker.session.validate_key = <validation_key> |
|
368 | 368 | |
|
369 | 369 | <%text>## Type of storage used for the session, current types are</%text> |
|
370 | 370 | <%text>## dbm, file, memcached, database, and memory.</%text> |
|
371 | 371 | |
|
372 | 372 | <%text>## File system storage of session data. (default)</%text> |
|
373 | 373 | #beaker.session.type = file |
|
374 | 374 | |
|
375 | 375 | <%text>## Cookie only, store all session data inside the cookie. Requires secure secrets.</%text> |
|
376 | 376 | #beaker.session.type = cookie |
|
377 | 377 | |
|
378 | 378 | <%text>## Database storage of session data.</%text> |
|
379 | 379 | #beaker.session.type = ext:database |
|
380 | 380 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
381 | 381 | #beaker.session.table_name = db_session |
|
382 | 382 | |
|
383 | 383 | %if error_aggregation_service == 'errormator': |
|
384 | 384 | <%text>############################</%text> |
|
385 | 385 | <%text>## ERROR HANDLING SYSTEMS ##</%text> |
|
386 | 386 | <%text>############################</%text> |
|
387 | 387 | |
|
388 | 388 | <%text>####################</%text> |
|
389 | 389 | <%text>### [errormator] ###</%text> |
|
390 | 390 | <%text>####################</%text> |
|
391 | 391 | |
|
392 | 392 | <%text>## Errormator is tailored to work with Kallithea, see</%text> |
|
393 | 393 | <%text>## http://errormator.com for details how to obtain an account</%text> |
|
394 | 394 | <%text>## you must install python package `errormator_client` to make it work</%text> |
|
395 | 395 | |
|
396 | 396 | <%text>## errormator enabled</%text> |
|
397 | 397 | errormator = false |
|
398 | 398 | |
|
399 | 399 | errormator.server_url = https://api.errormator.com |
|
400 | 400 | errormator.api_key = YOUR_API_KEY |
|
401 | 401 | |
|
402 | 402 | <%text>## TWEAK AMOUNT OF INFO SENT HERE</%text> |
|
403 | 403 | |
|
404 | 404 | <%text>## enables 404 error logging (default False)</%text> |
|
405 | 405 | errormator.report_404 = false |
|
406 | 406 | |
|
407 | 407 | <%text>## time in seconds after request is considered being slow (default 1)</%text> |
|
408 | 408 | errormator.slow_request_time = 1 |
|
409 | 409 | |
|
410 | 410 | <%text>## record slow requests in application</%text> |
|
411 | 411 | <%text>## (needs to be enabled for slow datastore recording and time tracking)</%text> |
|
412 | 412 | errormator.slow_requests = true |
|
413 | 413 | |
|
414 | 414 | <%text>## enable hooking to application loggers</%text> |
|
415 | 415 | #errormator.logging = true |
|
416 | 416 | |
|
417 | 417 | <%text>## minimum log level for log capture</%text> |
|
418 | 418 | #errormator.logging.level = WARNING |
|
419 | 419 | |
|
420 | 420 | <%text>## send logs only from erroneous/slow requests</%text> |
|
421 | 421 | <%text>## (saves API quota for intensive logging)</%text> |
|
422 | 422 | errormator.logging_on_error = false |
|
423 | 423 | |
|
424 | 424 | <%text>## list of additonal keywords that should be grabbed from environ object</%text> |
|
425 | 425 | <%text>## can be string with comma separated list of words in lowercase</%text> |
|
426 | 426 | <%text>## (by default client will always send following info:</%text> |
|
427 | 427 | <%text>## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that</%text> |
|
428 | 428 | <%text>## start with HTTP* this list be extended with additional keywords here</%text> |
|
429 | 429 | errormator.environ_keys_whitelist = |
|
430 | 430 | |
|
431 | 431 | <%text>## list of keywords that should be blanked from request object</%text> |
|
432 | 432 | <%text>## can be string with comma separated list of words in lowercase</%text> |
|
433 | 433 | <%text>## (by default client will always blank keys that contain following words</%text> |
|
434 | 434 | <%text>## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'</%text> |
|
435 | 435 | <%text>## this list be extended with additional keywords set here</%text> |
|
436 | 436 | errormator.request_keys_blacklist = |
|
437 | 437 | |
|
438 | 438 | <%text>## list of namespaces that should be ignores when gathering log entries</%text> |
|
439 | 439 | <%text>## can be string with comma separated list of namespaces</%text> |
|
440 | 440 | <%text>## (by default the client ignores own entries: errormator_client.client)</%text> |
|
441 | 441 | errormator.log_namespace_blacklist = |
|
442 | 442 | |
|
443 | 443 | %elif error_aggregation_service == 'sentry': |
|
444 | 444 | <%text>################</%text> |
|
445 | 445 | <%text>### [sentry] ###</%text> |
|
446 | 446 | <%text>################</%text> |
|
447 | 447 | |
|
448 | 448 | <%text>## sentry is a alternative open source error aggregator</%text> |
|
449 | 449 | <%text>## you must install python packages `sentry` and `raven` to enable</%text> |
|
450 | 450 | |
|
451 | 451 | sentry.dsn = YOUR_DNS |
|
452 | 452 | sentry.servers = |
|
453 | 453 | sentry.name = |
|
454 | 454 | sentry.key = |
|
455 | 455 | sentry.public_key = |
|
456 | 456 | sentry.secret_key = |
|
457 | 457 | sentry.project = |
|
458 | 458 | sentry.site = |
|
459 | 459 | sentry.include_paths = |
|
460 | 460 | sentry.exclude_paths = |
|
461 | 461 | |
|
462 | 462 | %endif |
|
463 | 463 | <%text>################################################################################</%text> |
|
464 | 464 | <%text>## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##</%text> |
|
465 | 465 | <%text>## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##</%text> |
|
466 | 466 | <%text>## execute malicious code after an exception is raised. ##</%text> |
|
467 | 467 | <%text>################################################################################</%text> |
|
468 | 468 | set debug = false |
|
469 | 469 | |
|
470 | 470 | <%text>##################################</%text> |
|
471 | 471 | <%text>### LOGVIEW CONFIG ###</%text> |
|
472 | 472 | <%text>##################################</%text> |
|
473 | 473 | |
|
474 | 474 | logview.sqlalchemy = #faa |
|
475 | 475 | logview.pylons.templating = #bfb |
|
476 | 476 | logview.pylons.util = #eee |
|
477 | 477 | |
|
478 | 478 | <%text>#########################################################</%text> |
|
479 | 479 | <%text>### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###</%text> |
|
480 | 480 | <%text>#########################################################</%text> |
|
481 | 481 | |
|
482 | 482 | %if database_engine == 'sqlite': |
|
483 | 483 | # SQLITE [default] |
|
484 | 484 | sqlalchemy.db1.url = sqlite:///${here}/kallithea.db?timeout=60 |
|
485 | 485 | |
|
486 | 486 | %elif database_engine == 'postgres': |
|
487 | 487 | # POSTGRESQL |
|
488 | 488 | sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea |
|
489 | 489 | |
|
490 | 490 | %elif database_engine == 'mysql': |
|
491 | 491 | # MySQL |
|
492 | 492 | sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea |
|
493 | 493 | |
|
494 | 494 | %endif |
|
495 | 495 | # see sqlalchemy docs for others |
|
496 | 496 | |
|
497 | 497 | sqlalchemy.db1.echo = false |
|
498 | 498 | sqlalchemy.db1.pool_recycle = 3600 |
|
499 | 499 | sqlalchemy.db1.convert_unicode = true |
|
500 | 500 | |
|
501 | 501 | <%text>################################</%text> |
|
502 | 502 | <%text>### LOGGING CONFIGURATION ####</%text> |
|
503 | 503 | <%text>################################</%text> |
|
504 | 504 | |
|
505 | 505 | [loggers] |
|
506 | 506 | keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer |
|
507 | 507 | |
|
508 | 508 | [handlers] |
|
509 | 509 | keys = console, console_sql |
|
510 | 510 | |
|
511 | 511 | [formatters] |
|
512 | 512 | keys = generic, color_formatter, color_formatter_sql |
|
513 | 513 | |
|
514 | 514 | <%text>#############</%text> |
|
515 | 515 | <%text>## LOGGERS ##</%text> |
|
516 | 516 | <%text>#############</%text> |
|
517 | 517 | |
|
518 | 518 | [logger_root] |
|
519 | 519 | level = NOTSET |
|
520 | 520 | handlers = console |
|
521 | 521 | |
|
522 | 522 | [logger_routes] |
|
523 | 523 | level = DEBUG |
|
524 | 524 | handlers = |
|
525 | 525 | qualname = routes.middleware |
|
526 | 526 | <%text>## "level = DEBUG" logs the route matched and routing variables.</%text> |
|
527 | 527 | propagate = 1 |
|
528 | 528 | |
|
529 | 529 | [logger_beaker] |
|
530 | 530 | level = DEBUG |
|
531 | 531 | handlers = |
|
532 | 532 | qualname = beaker.container |
|
533 | 533 | propagate = 1 |
|
534 | 534 | |
|
535 | 535 | [logger_templates] |
|
536 | 536 | level = INFO |
|
537 | 537 | handlers = |
|
538 | 538 | qualname = pylons.templating |
|
539 | 539 | propagate = 1 |
|
540 | 540 | |
|
541 | 541 | [logger_kallithea] |
|
542 | 542 | level = DEBUG |
|
543 | 543 | handlers = |
|
544 | 544 | qualname = kallithea |
|
545 | 545 | propagate = 1 |
|
546 | 546 | |
|
547 | 547 | [logger_sqlalchemy] |
|
548 | 548 | level = INFO |
|
549 | 549 | handlers = console_sql |
|
550 | 550 | qualname = sqlalchemy.engine |
|
551 | 551 | propagate = 0 |
|
552 | 552 | |
|
553 | 553 | [logger_whoosh_indexer] |
|
554 | 554 | level = DEBUG |
|
555 | 555 | handlers = |
|
556 | 556 | qualname = whoosh_indexer |
|
557 | 557 | propagate = 1 |
|
558 | 558 | |
|
559 | 559 | <%text>##############</%text> |
|
560 | 560 | <%text>## HANDLERS ##</%text> |
|
561 | 561 | <%text>##############</%text> |
|
562 | 562 | |
|
563 | 563 | [handler_console] |
|
564 | 564 | class = StreamHandler |
|
565 | 565 | args = (sys.stderr,) |
|
566 | 566 | level = INFO |
|
567 | 567 | formatter = generic |
|
568 | 568 | |
|
569 | 569 | [handler_console_sql] |
|
570 | 570 | class = StreamHandler |
|
571 | 571 | args = (sys.stderr,) |
|
572 | 572 | level = WARN |
|
573 | 573 | formatter = generic |
|
574 | 574 | |
|
575 | 575 | <%text>################</%text> |
|
576 | 576 | <%text>## FORMATTERS ##</%text> |
|
577 | 577 | <%text>################</%text> |
|
578 | 578 | |
|
579 | 579 | [formatter_generic] |
|
580 | 580 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
581 | 581 | datefmt = %Y-%m-%d %H:%M:%S |
|
582 | 582 | |
|
583 | 583 | [formatter_color_formatter] |
|
584 | 584 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
585 | 585 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
586 | 586 | datefmt = %Y-%m-%d %H:%M:%S |
|
587 | 587 | |
|
588 | 588 | [formatter_color_formatter_sql] |
|
589 | 589 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
590 | 590 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
591 | 591 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,580 +1,580 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # Kallithea - Example config # |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | ################################################################################ |
|
8 | 8 | |
|
9 | 9 | [DEFAULT] |
|
10 | 10 | debug = true |
|
11 | 11 | pdebug = false |
|
12 | 12 | |
|
13 | 13 | ################################################################################ |
|
14 | 14 | ## Email settings ## |
|
15 | 15 | ## ## |
|
16 | 16 | ## Refer to the documentation ("Email settings") for more details. ## |
|
17 | 17 | ## ## |
|
18 | 18 | ## It is recommended to use a valid sender address that passes access ## |
|
19 | 19 | ## validation and spam filtering in mail servers. ## |
|
20 | 20 | ################################################################################ |
|
21 | 21 | |
|
22 | 22 | ## 'From' header for application emails. You can optionally add a name. |
|
23 | 23 | ## Default: |
|
24 | 24 | #app_email_from = Kallithea |
|
25 | 25 | ## Examples: |
|
26 | 26 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
27 | 27 | #app_email_from = kallithea-noreply@example.com |
|
28 | 28 | |
|
29 | 29 | ## Subject prefix for application emails. |
|
30 | 30 | ## A space between this prefix and the real subject is automatically added. |
|
31 | 31 | ## Default: |
|
32 | 32 | #email_prefix = |
|
33 | 33 | ## Example: |
|
34 | 34 | #email_prefix = [Kallithea] |
|
35 | 35 | |
|
36 | 36 | ## Recipients for error emails and fallback recipients of application mails. |
|
37 | 37 | ## Multiple addresses can be specified, space-separated. |
|
38 | 38 | ## Only addresses are allowed, do not add any name part. |
|
39 | 39 | ## Default: |
|
40 | 40 | #email_to = |
|
41 | 41 | ## Examples: |
|
42 | 42 | #email_to = admin@example.com |
|
43 | 43 | #email_to = admin@example.com another_admin@example.com |
|
44 | 44 | |
|
45 | 45 | ## 'From' header for error emails. You can optionally add a name. |
|
46 | 46 | ## Default: |
|
47 | 47 | #error_email_from = pylons@yourapp.com |
|
48 | 48 | ## Examples: |
|
49 | 49 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
50 | 50 | #error_email_from = paste_error@example.com |
|
51 | 51 | |
|
52 | 52 | ## SMTP server settings |
|
53 | 53 | ## Only smtp_server is mandatory. All other settings take the specified default |
|
54 | 54 | ## values. |
|
55 |
#smtp_server = |
|
|
55 | #smtp_server = smtp.example.com | |
|
56 | 56 | #smtp_username = |
|
57 | 57 | #smtp_password = |
|
58 | 58 | #smtp_port = 25 |
|
59 | 59 | #smtp_use_tls = false |
|
60 | 60 | #smtp_use_ssl = false |
|
61 | 61 | ## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.). |
|
62 | 62 | ## If empty, use any of the authentication parameters supported by the server. |
|
63 | 63 | #smtp_auth = |
|
64 | 64 | |
|
65 | 65 | [server:main] |
|
66 | 66 | ## PASTE ## |
|
67 | 67 | #use = egg:Paste#http |
|
68 | 68 | ## nr of worker threads to spawn |
|
69 | 69 | #threadpool_workers = 5 |
|
70 | 70 | ## max request before thread respawn |
|
71 | 71 | #threadpool_max_requests = 10 |
|
72 | 72 | ## option to use threads of process |
|
73 | 73 | #use_threadpool = true |
|
74 | 74 | |
|
75 | 75 | ## WAITRESS ## |
|
76 | 76 | use = egg:waitress#main |
|
77 | 77 | ## number of worker threads |
|
78 | 78 | threads = 5 |
|
79 | 79 | ## MAX BODY SIZE 100GB |
|
80 | 80 | max_request_body_size = 107374182400 |
|
81 | 81 | ## use poll instead of select, fixes fd limits, may not work on old |
|
82 | 82 | ## windows systems. |
|
83 | 83 | #asyncore_use_poll = True |
|
84 | 84 | |
|
85 | 85 | ## GUNICORN ## |
|
86 | 86 | #use = egg:gunicorn#main |
|
87 | 87 | ## number of process workers. You must set `instance_id = *` when this option |
|
88 | 88 | ## is set to more than one worker |
|
89 | 89 | #workers = 1 |
|
90 | 90 | ## process name |
|
91 | 91 | #proc_name = kallithea |
|
92 | 92 | ## type of worker class, one of sync, eventlet, gevent, tornado |
|
93 | 93 | ## recommended for bigger setup is using of of other than sync one |
|
94 | 94 | #worker_class = sync |
|
95 | 95 | #max_requests = 1000 |
|
96 | 96 | ## ammount of time a worker can handle request before it gets killed and |
|
97 | 97 | ## restarted |
|
98 | 98 | #timeout = 3600 |
|
99 | 99 | |
|
100 | 100 | ## UWSGI ## |
|
101 | 101 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
102 | 102 | #[uwsgi] |
|
103 | 103 | #socket = /tmp/uwsgi.sock |
|
104 | 104 | #master = true |
|
105 | 105 | #http = 127.0.0.1:5000 |
|
106 | 106 | |
|
107 | 107 | ## set as deamon and redirect all output to file |
|
108 | 108 | #daemonize = ./uwsgi_kallithea.log |
|
109 | 109 | |
|
110 | 110 | ## master process PID |
|
111 | 111 | #pidfile = ./uwsgi_kallithea.pid |
|
112 | 112 | |
|
113 | 113 | ## stats server with workers statistics, use uwsgitop |
|
114 | 114 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
115 | 115 | #stats = 127.0.0.1:1717 |
|
116 | 116 | #memory-report = true |
|
117 | 117 | |
|
118 | 118 | ## log 5XX errors |
|
119 | 119 | #log-5xx = true |
|
120 | 120 | |
|
121 | 121 | ## Set the socket listen queue size. |
|
122 | 122 | #listen = 256 |
|
123 | 123 | |
|
124 | 124 | ## Gracefully Reload workers after the specified amount of managed requests |
|
125 | 125 | ## (avoid memory leaks). |
|
126 | 126 | #max-requests = 1000 |
|
127 | 127 | |
|
128 | 128 | ## enable large buffers |
|
129 | 129 | #buffer-size = 65535 |
|
130 | 130 | |
|
131 | 131 | ## socket and http timeouts ## |
|
132 | 132 | #http-timeout = 3600 |
|
133 | 133 | #socket-timeout = 3600 |
|
134 | 134 | |
|
135 | 135 | ## Log requests slower than the specified number of milliseconds. |
|
136 | 136 | #log-slow = 10 |
|
137 | 137 | |
|
138 | 138 | ## Exit if no app can be loaded. |
|
139 | 139 | #need-app = true |
|
140 | 140 | |
|
141 | 141 | ## Set lazy mode (load apps in workers instead of master). |
|
142 | 142 | #lazy = true |
|
143 | 143 | |
|
144 | 144 | ## scaling ## |
|
145 | 145 | ## set cheaper algorithm to use, if not set default will be used |
|
146 | 146 | #cheaper-algo = spare |
|
147 | 147 | |
|
148 | 148 | ## minimum number of workers to keep at all times |
|
149 | 149 | #cheaper = 1 |
|
150 | 150 | |
|
151 | 151 | ## number of workers to spawn at startup |
|
152 | 152 | #cheaper-initial = 1 |
|
153 | 153 | |
|
154 | 154 | ## maximum number of workers that can be spawned |
|
155 | 155 | #workers = 4 |
|
156 | 156 | |
|
157 | 157 | ## how many workers should be spawned at a time |
|
158 | 158 | #cheaper-step = 1 |
|
159 | 159 | |
|
160 | 160 | ## COMMON ## |
|
161 | 161 | host = 127.0.0.1 |
|
162 | 162 | port = 5000 |
|
163 | 163 | |
|
164 | 164 | ## middleware for hosting the WSGI application under a URL prefix |
|
165 | 165 | #[filter:proxy-prefix] |
|
166 | 166 | #use = egg:PasteDeploy#prefix |
|
167 | 167 | #prefix = /<your-prefix> |
|
168 | 168 | |
|
169 | 169 | [app:main] |
|
170 | 170 | use = egg:kallithea |
|
171 | 171 | ## enable proxy prefix middleware |
|
172 | 172 | #filter-with = proxy-prefix |
|
173 | 173 | |
|
174 | 174 | full_stack = true |
|
175 | 175 | static_files = true |
|
176 | 176 | ## Available Languages: |
|
177 | 177 | ## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW |
|
178 | 178 | lang = |
|
179 | 179 | cache_dir = %(here)s/data |
|
180 | 180 | index_dir = %(here)s/data/index |
|
181 | 181 | |
|
182 | 182 | ## perform a full repository scan on each server start, this should be |
|
183 | 183 | ## set to false after first startup, to allow faster server restarts. |
|
184 | 184 | initial_repo_scan = false |
|
185 | 185 | |
|
186 | 186 | ## uncomment and set this path to use archive download cache |
|
187 | 187 | archive_cache_dir = %(here)s/tarballcache |
|
188 | 188 | |
|
189 | 189 | ## change this to unique ID for security |
|
190 | 190 | app_instance_uuid = ${app_instance_uuid} |
|
191 | 191 | |
|
192 | 192 | ## cut off limit for large diffs (size in bytes) |
|
193 | 193 | cut_off_limit = 256000 |
|
194 | 194 | |
|
195 | 195 | ## use cache version of scm repo everywhere |
|
196 | 196 | vcs_full_cache = true |
|
197 | 197 | |
|
198 | 198 | ## force https in Kallithea, fixes https redirects, assumes it's always https |
|
199 | 199 | force_https = false |
|
200 | 200 | |
|
201 | 201 | ## use Strict-Transport-Security headers |
|
202 | 202 | use_htsts = false |
|
203 | 203 | |
|
204 | 204 | ## number of commits stats will parse on each iteration |
|
205 | 205 | commit_parse_limit = 25 |
|
206 | 206 | |
|
207 | 207 | ## path to git executable |
|
208 | 208 | git_path = git |
|
209 | 209 | |
|
210 | 210 | ## git rev filter option, --all is the default filter, if you need to |
|
211 | 211 | ## hide all refs in changelog switch this to --branches --tags |
|
212 | 212 | #git_rev_filter = --branches --tags |
|
213 | 213 | |
|
214 | 214 | ## RSS feed options |
|
215 | 215 | rss_cut_off_limit = 256000 |
|
216 | 216 | rss_items_per_page = 10 |
|
217 | 217 | rss_include_diff = false |
|
218 | 218 | |
|
219 | 219 | ## options for showing and identifying changesets |
|
220 | 220 | show_sha_length = 12 |
|
221 | 221 | show_revision_number = false |
|
222 | 222 | |
|
223 | 223 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
224 | 224 | ## url that does rewrites to _admin/gists/<gistid>. |
|
225 |
## example: http://gist. |
|
|
226 |
## Kallithea url, ie. http[s]:// |
|
|
225 | ## example: http://gist.example.com/{gistid}. Empty means use the internal | |
|
226 | ## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid> | |
|
227 | 227 | gist_alias_url = |
|
228 | 228 | |
|
229 | 229 | ## white list of API enabled controllers. This allows to add list of |
|
230 | 230 | ## controllers to which access will be enabled by api_key. eg: to enable |
|
231 | 231 | ## api access to raw_files put `FilesController:raw`, to enable access to patches |
|
232 | 232 | ## add `ChangesetController:changeset_patch`. This list should be "," separated |
|
233 | 233 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names |
|
234 | 234 | ## Recommended settings below are commented out: |
|
235 | 235 | api_access_controllers_whitelist = |
|
236 | 236 | # ChangesetController:changeset_patch, |
|
237 | 237 | # ChangesetController:changeset_raw, |
|
238 | 238 | # FilesController:raw, |
|
239 | 239 | # FilesController:archivefile |
|
240 | 240 | |
|
241 | 241 | ## default encoding used to convert from and to unicode |
|
242 | 242 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
243 | 243 | default_encoding = utf8 |
|
244 | 244 | |
|
245 | 245 | ## issue tracker for Kallithea (leave blank to disable, absent for default) |
|
246 | 246 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
247 | 247 | |
|
248 | 248 | ## issue tracking mapping for commits messages |
|
249 | 249 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
250 | 250 | |
|
251 | 251 | ## pattern to get the issues from commit messages |
|
252 | 252 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
253 | 253 | ## {id} will be all groups matched from this pattern |
|
254 | 254 | |
|
255 | 255 | issue_pat = (?:\s*#)(\d+) |
|
256 | 256 | |
|
257 | 257 | ## server url to the issue, each {id} will be replaced with match |
|
258 | 258 | ## fetched from the regex and {repo} is replaced with full repository name |
|
259 | 259 | ## including groups {repo_name} is replaced with just name of repo |
|
260 | 260 | |
|
261 |
issue_server_link = https:// |
|
|
261 | issue_server_link = https://issues.example.com/{repo}/issue/{id} | |
|
262 | 262 | |
|
263 | 263 | ## prefix to add to link to indicate it's an url |
|
264 | 264 | ## #314 will be replaced by <issue_prefix><id> |
|
265 | 265 | |
|
266 | 266 | issue_prefix = # |
|
267 | 267 | |
|
268 | 268 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
269 | 269 | ## multiple patterns, to other issues server, wiki or others |
|
270 | 270 | ## below an example how to create a wiki pattern |
|
271 |
# wiki-some-id -> https:// |
|
|
271 | # wiki-some-id -> https://wiki.example.com/some-id | |
|
272 | 272 | |
|
273 | 273 | #issue_pat_wiki = (?:wiki-)(.+) |
|
274 |
#issue_server_link_wiki = https:// |
|
|
274 | #issue_server_link_wiki = https://wiki.example.com/{id} | |
|
275 | 275 | #issue_prefix_wiki = WIKI- |
|
276 | 276 | |
|
277 | 277 | ## instance-id prefix |
|
278 | 278 | ## a prefix key for this instance used for cache invalidation when running |
|
279 | 279 | ## multiple instances of kallithea, make sure it's globally unique for |
|
280 | 280 | ## all running kallithea instances. Leave empty if you don't use it |
|
281 | 281 | instance_id = |
|
282 | 282 | |
|
283 | 283 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
284 | 284 | ## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with |
|
285 | 285 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
286 | 286 | auth_ret_code = |
|
287 | 287 | |
|
288 | 288 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
289 | 289 | ## codes don't break the transactions while 4XX codes do |
|
290 | 290 | lock_ret_code = 423 |
|
291 | 291 | |
|
292 | 292 | ## allows to change the repository location in settings page |
|
293 | 293 | allow_repo_location_change = True |
|
294 | 294 | |
|
295 | 295 | ## allows to setup custom hooks in settings page |
|
296 | 296 | allow_custom_hooks_settings = True |
|
297 | 297 | |
|
298 | 298 | #################################### |
|
299 | 299 | ### CELERY CONFIG #### |
|
300 | 300 | #################################### |
|
301 | 301 | |
|
302 | 302 | use_celery = false |
|
303 | 303 | broker.host = localhost |
|
304 | 304 | broker.vhost = rabbitmqhost |
|
305 | 305 | broker.port = 5672 |
|
306 | 306 | broker.user = rabbitmq |
|
307 | 307 | broker.password = qweqwe |
|
308 | 308 | |
|
309 | 309 | celery.imports = kallithea.lib.celerylib.tasks |
|
310 | 310 | |
|
311 | 311 | celery.result.backend = amqp |
|
312 | 312 | celery.result.dburi = amqp:// |
|
313 | 313 | celery.result.serialier = json |
|
314 | 314 | |
|
315 | 315 | #celery.send.task.error.emails = true |
|
316 | 316 | #celery.amqp.task.result.expires = 18000 |
|
317 | 317 | |
|
318 | 318 | celeryd.concurrency = 2 |
|
319 | 319 | #celeryd.log.file = celeryd.log |
|
320 | 320 | celeryd.log.level = DEBUG |
|
321 | 321 | celeryd.max.tasks.per.child = 1 |
|
322 | 322 | |
|
323 | 323 | ## tasks will never be sent to the queue, but executed locally instead. |
|
324 | 324 | celery.always.eager = false |
|
325 | 325 | |
|
326 | 326 | #################################### |
|
327 | 327 | ### BEAKER CACHE #### |
|
328 | 328 | #################################### |
|
329 | 329 | |
|
330 | 330 | beaker.cache.data_dir = %(here)s/data/cache/data |
|
331 | 331 | beaker.cache.lock_dir = %(here)s/data/cache/lock |
|
332 | 332 | |
|
333 | 333 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
334 | 334 | |
|
335 | 335 | beaker.cache.short_term.type = memory |
|
336 | 336 | beaker.cache.short_term.expire = 60 |
|
337 | 337 | beaker.cache.short_term.key_length = 256 |
|
338 | 338 | |
|
339 | 339 | beaker.cache.long_term.type = memory |
|
340 | 340 | beaker.cache.long_term.expire = 36000 |
|
341 | 341 | beaker.cache.long_term.key_length = 256 |
|
342 | 342 | |
|
343 | 343 | beaker.cache.sql_cache_short.type = memory |
|
344 | 344 | beaker.cache.sql_cache_short.expire = 10 |
|
345 | 345 | beaker.cache.sql_cache_short.key_length = 256 |
|
346 | 346 | |
|
347 | 347 | #################################### |
|
348 | 348 | ### BEAKER SESSION #### |
|
349 | 349 | #################################### |
|
350 | 350 | |
|
351 | 351 | ## Name of session cookie. Should be unique for a given host and path, even when running |
|
352 | 352 | ## on different ports. Otherwise, cookie sessions will be shared and messed up. |
|
353 | 353 | beaker.session.key = kallithea |
|
354 | 354 | ## Sessions should always only be accessible by the browser, not directly by JavaScript. |
|
355 | 355 | beaker.session.httponly = true |
|
356 | 356 | ## Session lifetime. 2592000 seconds is 30 days. |
|
357 | 357 | beaker.session.timeout = 2592000 |
|
358 | 358 | |
|
359 | 359 | ## Server secret used with HMAC to ensure integrity of cookies. |
|
360 | 360 | beaker.session.secret = ${app_instance_uuid} |
|
361 | 361 | ## Further, encrypt the data with AES. |
|
362 | 362 | #beaker.session.encrypt_key = <key_for_encryption> |
|
363 | 363 | #beaker.session.validate_key = <validation_key> |
|
364 | 364 | |
|
365 | 365 | ## Type of storage used for the session, current types are |
|
366 | 366 | ## dbm, file, memcached, database, and memory. |
|
367 | 367 | |
|
368 | 368 | ## File system storage of session data. (default) |
|
369 | 369 | #beaker.session.type = file |
|
370 | 370 | |
|
371 | 371 | ## Cookie only, store all session data inside the cookie. Requires secure secrets. |
|
372 | 372 | #beaker.session.type = cookie |
|
373 | 373 | |
|
374 | 374 | ## Database storage of session data. |
|
375 | 375 | #beaker.session.type = ext:database |
|
376 | 376 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
377 | 377 | #beaker.session.table_name = db_session |
|
378 | 378 | |
|
379 | 379 | ############################ |
|
380 | 380 | ## ERROR HANDLING SYSTEMS ## |
|
381 | 381 | ############################ |
|
382 | 382 | |
|
383 | 383 | #################### |
|
384 | 384 | ### [errormator] ### |
|
385 | 385 | #################### |
|
386 | 386 | |
|
387 | 387 | ## Errormator is tailored to work with Kallithea, see |
|
388 | 388 | ## http://errormator.com for details how to obtain an account |
|
389 | 389 | ## you must install python package `errormator_client` to make it work |
|
390 | 390 | |
|
391 | 391 | ## errormator enabled |
|
392 | 392 | errormator = false |
|
393 | 393 | |
|
394 | 394 | errormator.server_url = https://api.errormator.com |
|
395 | 395 | errormator.api_key = YOUR_API_KEY |
|
396 | 396 | |
|
397 | 397 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
398 | 398 | |
|
399 | 399 | ## enables 404 error logging (default False) |
|
400 | 400 | errormator.report_404 = false |
|
401 | 401 | |
|
402 | 402 | ## time in seconds after request is considered being slow (default 1) |
|
403 | 403 | errormator.slow_request_time = 1 |
|
404 | 404 | |
|
405 | 405 | ## record slow requests in application |
|
406 | 406 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
407 | 407 | errormator.slow_requests = true |
|
408 | 408 | |
|
409 | 409 | ## enable hooking to application loggers |
|
410 | 410 | #errormator.logging = true |
|
411 | 411 | |
|
412 | 412 | ## minimum log level for log capture |
|
413 | 413 | #errormator.logging.level = WARNING |
|
414 | 414 | |
|
415 | 415 | ## send logs only from erroneous/slow requests |
|
416 | 416 | ## (saves API quota for intensive logging) |
|
417 | 417 | errormator.logging_on_error = false |
|
418 | 418 | |
|
419 | 419 | ## list of additonal keywords that should be grabbed from environ object |
|
420 | 420 | ## can be string with comma separated list of words in lowercase |
|
421 | 421 | ## (by default client will always send following info: |
|
422 | 422 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
423 | 423 | ## start with HTTP* this list be extended with additional keywords here |
|
424 | 424 | errormator.environ_keys_whitelist = |
|
425 | 425 | |
|
426 | 426 | ## list of keywords that should be blanked from request object |
|
427 | 427 | ## can be string with comma separated list of words in lowercase |
|
428 | 428 | ## (by default client will always blank keys that contain following words |
|
429 | 429 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
430 | 430 | ## this list be extended with additional keywords set here |
|
431 | 431 | errormator.request_keys_blacklist = |
|
432 | 432 | |
|
433 | 433 | ## list of namespaces that should be ignores when gathering log entries |
|
434 | 434 | ## can be string with comma separated list of namespaces |
|
435 | 435 | ## (by default the client ignores own entries: errormator_client.client) |
|
436 | 436 | errormator.log_namespace_blacklist = |
|
437 | 437 | |
|
438 | 438 | ################ |
|
439 | 439 | ### [sentry] ### |
|
440 | 440 | ################ |
|
441 | 441 | |
|
442 | 442 | ## sentry is a alternative open source error aggregator |
|
443 | 443 | ## you must install python packages `sentry` and `raven` to enable |
|
444 | 444 | |
|
445 | 445 | sentry.dsn = YOUR_DNS |
|
446 | 446 | sentry.servers = |
|
447 | 447 | sentry.name = |
|
448 | 448 | sentry.key = |
|
449 | 449 | sentry.public_key = |
|
450 | 450 | sentry.secret_key = |
|
451 | 451 | sentry.project = |
|
452 | 452 | sentry.site = |
|
453 | 453 | sentry.include_paths = |
|
454 | 454 | sentry.exclude_paths = |
|
455 | 455 | |
|
456 | 456 | ################################################################################ |
|
457 | 457 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
458 | 458 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
459 | 459 | ## execute malicious code after an exception is raised. ## |
|
460 | 460 | ################################################################################ |
|
461 | 461 | set debug = false |
|
462 | 462 | |
|
463 | 463 | ################################## |
|
464 | 464 | ### LOGVIEW CONFIG ### |
|
465 | 465 | ################################## |
|
466 | 466 | |
|
467 | 467 | logview.sqlalchemy = #faa |
|
468 | 468 | logview.pylons.templating = #bfb |
|
469 | 469 | logview.pylons.util = #eee |
|
470 | 470 | |
|
471 | 471 | ######################################################### |
|
472 | 472 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
473 | 473 | ######################################################### |
|
474 | 474 | |
|
475 | 475 | # SQLITE [default] |
|
476 | 476 | sqlalchemy.db1.url = sqlite:///%(here)s/kallithea.db?timeout=60 |
|
477 | 477 | |
|
478 | 478 | # POSTGRESQL |
|
479 | 479 | #sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea |
|
480 | 480 | |
|
481 | 481 | # MySQL |
|
482 | 482 | #sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea |
|
483 | 483 | |
|
484 | 484 | # see sqlalchemy docs for others |
|
485 | 485 | |
|
486 | 486 | sqlalchemy.db1.echo = false |
|
487 | 487 | sqlalchemy.db1.pool_recycle = 3600 |
|
488 | 488 | sqlalchemy.db1.convert_unicode = true |
|
489 | 489 | |
|
490 | 490 | ################################ |
|
491 | 491 | ### LOGGING CONFIGURATION #### |
|
492 | 492 | ################################ |
|
493 | 493 | |
|
494 | 494 | [loggers] |
|
495 | 495 | keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer |
|
496 | 496 | |
|
497 | 497 | [handlers] |
|
498 | 498 | keys = console, console_sql |
|
499 | 499 | |
|
500 | 500 | [formatters] |
|
501 | 501 | keys = generic, color_formatter, color_formatter_sql |
|
502 | 502 | |
|
503 | 503 | ############# |
|
504 | 504 | ## LOGGERS ## |
|
505 | 505 | ############# |
|
506 | 506 | |
|
507 | 507 | [logger_root] |
|
508 | 508 | level = NOTSET |
|
509 | 509 | handlers = console |
|
510 | 510 | |
|
511 | 511 | [logger_routes] |
|
512 | 512 | level = DEBUG |
|
513 | 513 | handlers = |
|
514 | 514 | qualname = routes.middleware |
|
515 | 515 | ## "level = DEBUG" logs the route matched and routing variables. |
|
516 | 516 | propagate = 1 |
|
517 | 517 | |
|
518 | 518 | [logger_beaker] |
|
519 | 519 | level = DEBUG |
|
520 | 520 | handlers = |
|
521 | 521 | qualname = beaker.container |
|
522 | 522 | propagate = 1 |
|
523 | 523 | |
|
524 | 524 | [logger_templates] |
|
525 | 525 | level = INFO |
|
526 | 526 | handlers = |
|
527 | 527 | qualname = pylons.templating |
|
528 | 528 | propagate = 1 |
|
529 | 529 | |
|
530 | 530 | [logger_kallithea] |
|
531 | 531 | level = DEBUG |
|
532 | 532 | handlers = |
|
533 | 533 | qualname = kallithea |
|
534 | 534 | propagate = 1 |
|
535 | 535 | |
|
536 | 536 | [logger_sqlalchemy] |
|
537 | 537 | level = INFO |
|
538 | 538 | handlers = console_sql |
|
539 | 539 | qualname = sqlalchemy.engine |
|
540 | 540 | propagate = 0 |
|
541 | 541 | |
|
542 | 542 | [logger_whoosh_indexer] |
|
543 | 543 | level = DEBUG |
|
544 | 544 | handlers = |
|
545 | 545 | qualname = whoosh_indexer |
|
546 | 546 | propagate = 1 |
|
547 | 547 | |
|
548 | 548 | ############## |
|
549 | 549 | ## HANDLERS ## |
|
550 | 550 | ############## |
|
551 | 551 | |
|
552 | 552 | [handler_console] |
|
553 | 553 | class = StreamHandler |
|
554 | 554 | args = (sys.stderr,) |
|
555 | 555 | level = INFO |
|
556 | 556 | formatter = generic |
|
557 | 557 | |
|
558 | 558 | [handler_console_sql] |
|
559 | 559 | class = StreamHandler |
|
560 | 560 | args = (sys.stderr,) |
|
561 | 561 | level = WARN |
|
562 | 562 | formatter = generic |
|
563 | 563 | |
|
564 | 564 | ################ |
|
565 | 565 | ## FORMATTERS ## |
|
566 | 566 | ################ |
|
567 | 567 | |
|
568 | 568 | [formatter_generic] |
|
569 | 569 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
570 | 570 | datefmt = %Y-%m-%d %H:%M:%S |
|
571 | 571 | |
|
572 | 572 | [formatter_color_formatter] |
|
573 | 573 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
574 | 574 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
575 | 575 | datefmt = %Y-%m-%d %H:%M:%S |
|
576 | 576 | |
|
577 | 577 | [formatter_color_formatter_sql] |
|
578 | 578 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
579 | 579 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
580 | 580 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,453 +1,453 b'' | |||
|
1 | 1 | # The code in this module is entirely lifted from the Lamson project |
|
2 | 2 | # (http://lamsonproject.org/). Its copyright is: |
|
3 | 3 | |
|
4 | 4 | # Copyright (c) 2008, Zed A. Shaw |
|
5 | 5 | # All rights reserved. |
|
6 | 6 | |
|
7 | 7 | # It is provided under this license: |
|
8 | 8 | |
|
9 | 9 | # Redistribution and use in source and binary forms, with or without |
|
10 | 10 | # modification, are permitted provided that the following conditions are met: |
|
11 | 11 | |
|
12 | 12 | # * Redistributions of source code must retain the above copyright notice, this |
|
13 | 13 | # list of conditions and the following disclaimer. |
|
14 | 14 | |
|
15 | 15 | # * Redistributions in binary form must reproduce the above copyright notice, |
|
16 | 16 | # this list of conditions and the following disclaimer in the documentation |
|
17 | 17 | # and/or other materials provided with the distribution. |
|
18 | 18 | |
|
19 | 19 | # * Neither the name of the Zed A. Shaw nor the names of its contributors may |
|
20 | 20 | # be used to endorse or promote products derived from this software without |
|
21 | 21 | # specific prior written permission. |
|
22 | 22 | |
|
23 | 23 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|
24 | 24 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|
25 | 25 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS |
|
26 | 26 | # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE |
|
27 | 27 | # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, |
|
28 | 28 | # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
|
29 | 29 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
|
30 | 30 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
|
31 | 31 | # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, |
|
32 | 32 | # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
|
33 | 33 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
34 | 34 | # POSSIBILITY OF SUCH DAMAGE. |
|
35 | 35 | |
|
36 | 36 | import os |
|
37 | 37 | import mimetypes |
|
38 | 38 | import string |
|
39 | 39 | from email import encoders |
|
40 | 40 | from email.charset import Charset |
|
41 | 41 | from email.utils import parseaddr |
|
42 | 42 | from email.mime.base import MIMEBase |
|
43 | 43 | |
|
44 | 44 | ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc'] |
|
45 | 45 | DEFAULT_ENCODING = "utf-8" |
|
46 | 46 | VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | def normalize_header(header): |
|
50 | 50 | return string.capwords(header.lower(), '-') |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class EncodingError(Exception): |
|
54 | 54 | """Thrown when there is an encoding error.""" |
|
55 | 55 | pass |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class MailBase(object): |
|
59 | 59 | """MailBase is used as the basis of lamson.mail and contains the basics of |
|
60 | 60 | encoding an email. You actually can do all your email processing with this |
|
61 | 61 | class, but it's more raw. |
|
62 | 62 | """ |
|
63 | 63 | def __init__(self, items=()): |
|
64 | 64 | self.headers = dict(items) |
|
65 | 65 | self.parts = [] |
|
66 | 66 | self.body = None |
|
67 | 67 | self.content_encoding = {'Content-Type': (None, {}), |
|
68 | 68 | 'Content-Disposition': (None, {}), |
|
69 | 69 | 'Content-Transfer-Encoding': (None, {})} |
|
70 | 70 | |
|
71 | 71 | def __getitem__(self, key): |
|
72 | 72 | return self.headers.get(normalize_header(key), None) |
|
73 | 73 | |
|
74 | 74 | def __len__(self): |
|
75 | 75 | return len(self.headers) |
|
76 | 76 | |
|
77 | 77 | def __iter__(self): |
|
78 | 78 | return iter(self.headers) |
|
79 | 79 | |
|
80 | 80 | def __contains__(self, key): |
|
81 | 81 | return normalize_header(key) in self.headers |
|
82 | 82 | |
|
83 | 83 | def __setitem__(self, key, value): |
|
84 | 84 | self.headers[normalize_header(key)] = value |
|
85 | 85 | |
|
86 | 86 | def __delitem__(self, key): |
|
87 | 87 | del self.headers[normalize_header(key)] |
|
88 | 88 | |
|
89 | 89 | def __nonzero__(self): |
|
90 | 90 | return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0 |
|
91 | 91 | |
|
92 | 92 | def keys(self): |
|
93 | 93 | """Returns the sorted keys.""" |
|
94 | 94 | return sorted(self.headers.keys()) |
|
95 | 95 | |
|
96 | 96 | def attach_file(self, filename, data, ctype, disposition): |
|
97 | 97 | """ |
|
98 | 98 | A file attachment is a raw attachment with a disposition that |
|
99 | 99 | indicates the file name. |
|
100 | 100 | """ |
|
101 | 101 | assert filename, "You can't attach a file without a filename." |
|
102 | 102 | ctype = ctype.lower() |
|
103 | 103 | |
|
104 | 104 | part = MailBase() |
|
105 | 105 | part.body = data |
|
106 | 106 | part.content_encoding['Content-Type'] = (ctype, {'name': filename}) |
|
107 | 107 | part.content_encoding['Content-Disposition'] = (disposition, |
|
108 | 108 | {'filename': filename}) |
|
109 | 109 | self.parts.append(part) |
|
110 | 110 | |
|
111 | 111 | def attach_text(self, data, ctype): |
|
112 | 112 | """ |
|
113 | 113 | This attaches a simpler text encoded part, which doesn't have a |
|
114 | 114 | filename. |
|
115 | 115 | """ |
|
116 | 116 | ctype = ctype.lower() |
|
117 | 117 | |
|
118 | 118 | part = MailBase() |
|
119 | 119 | part.body = data |
|
120 | 120 | part.content_encoding['Content-Type'] = (ctype, {}) |
|
121 | 121 | self.parts.append(part) |
|
122 | 122 | |
|
123 | 123 | def walk(self): |
|
124 | 124 | for p in self.parts: |
|
125 | 125 | yield p |
|
126 | 126 | for x in p.walk(): |
|
127 | 127 | yield x |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | class MailResponse(object): |
|
131 | 131 | """ |
|
132 | 132 | You are given MailResponse objects from the lamson.view methods, and |
|
133 | 133 | whenever you want to generate an email to send to someone. It has the |
|
134 | 134 | same basic functionality as MailRequest, but it is designed to be written |
|
135 | 135 | to, rather than read from (although you can do both). |
|
136 | 136 | |
|
137 | 137 | You can easily set a Body or Html during creation or after by passing it |
|
138 | 138 | as __init__ parameters, or by setting those attributes. |
|
139 | 139 | |
|
140 | 140 | You can initially set the From, To, and Subject, but they are headers so |
|
141 |
use the dict notation to change them: msg['From'] = 'joe@ |
|
|
141 | use the dict notation to change them: msg['From'] = 'joe@example.com'. | |
|
142 | 142 | |
|
143 | 143 | The message is not fully crafted until right when you convert it with |
|
144 | 144 | MailResponse.to_message. This lets you change it and work with it, then |
|
145 | 145 | send it out when it's ready. |
|
146 | 146 | """ |
|
147 | 147 | def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None, |
|
148 | 148 | separator="; "): |
|
149 | 149 | self.Body = Body |
|
150 | 150 | self.Html = Html |
|
151 | 151 | self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)]) |
|
152 | 152 | self.multipart = self.Body and self.Html |
|
153 | 153 | self.attachments = [] |
|
154 | 154 | self.separator = separator |
|
155 | 155 | |
|
156 | 156 | def __contains__(self, key): |
|
157 | 157 | return self.base.__contains__(key) |
|
158 | 158 | |
|
159 | 159 | def __getitem__(self, key): |
|
160 | 160 | return self.base.__getitem__(key) |
|
161 | 161 | |
|
162 | 162 | def __setitem__(self, key, val): |
|
163 | 163 | return self.base.__setitem__(key, val) |
|
164 | 164 | |
|
165 | 165 | def __delitem__(self, name): |
|
166 | 166 | del self.base[name] |
|
167 | 167 | |
|
168 | 168 | def attach(self, filename=None, content_type=None, data=None, |
|
169 | 169 | disposition=None): |
|
170 | 170 | """ |
|
171 | 171 | |
|
172 | 172 | Simplifies attaching files from disk or data as files. To attach |
|
173 | 173 | simple text simple give data and a content_type. To attach a file, |
|
174 | 174 | give the data/content_type/filename/disposition combination. |
|
175 | 175 | |
|
176 | 176 | For convenience, if you don't give data and only a filename, then it |
|
177 | 177 | will read that file's contents when you call to_message() later. If |
|
178 | 178 | you give data and filename then it will assume you've filled data |
|
179 | 179 | with what the file's contents are and filename is just the name to |
|
180 | 180 | use. |
|
181 | 181 | """ |
|
182 | 182 | |
|
183 | 183 | assert filename or data, ("You must give a filename or some data to " |
|
184 | 184 | "attach.") |
|
185 | 185 | assert data or os.path.exists(filename), ("File doesn't exist, and no " |
|
186 | 186 | "data given.") |
|
187 | 187 | |
|
188 | 188 | self.multipart = True |
|
189 | 189 | |
|
190 | 190 | if filename and not content_type: |
|
191 | 191 | content_type, encoding = mimetypes.guess_type(filename) |
|
192 | 192 | |
|
193 | 193 | assert content_type, ("No content type given, and couldn't guess " |
|
194 | 194 | "from the filename: %r" % filename) |
|
195 | 195 | |
|
196 | 196 | self.attachments.append({'filename': filename, |
|
197 | 197 | 'content_type': content_type, |
|
198 | 198 | 'data': data, |
|
199 | 199 | 'disposition': disposition,}) |
|
200 | 200 | |
|
201 | 201 | def attach_part(self, part): |
|
202 | 202 | """ |
|
203 | 203 | Attaches a raw MailBase part from a MailRequest (or anywhere) |
|
204 | 204 | so that you can copy it over. |
|
205 | 205 | """ |
|
206 | 206 | self.multipart = True |
|
207 | 207 | |
|
208 | 208 | self.attachments.append({'filename': None, |
|
209 | 209 | 'content_type': None, |
|
210 | 210 | 'data': None, |
|
211 | 211 | 'disposition': None, |
|
212 | 212 | 'part': part, |
|
213 | 213 | }) |
|
214 | 214 | |
|
215 | 215 | def attach_all_parts(self, mail_request): |
|
216 | 216 | """ |
|
217 | 217 | Used for copying the attachment parts of a mail.MailRequest |
|
218 | 218 | object for mailing lists that need to maintain attachments. |
|
219 | 219 | """ |
|
220 | 220 | for part in mail_request.all_parts(): |
|
221 | 221 | self.attach_part(part) |
|
222 | 222 | |
|
223 | 223 | self.base.content_encoding = mail_request.base.content_encoding.copy() |
|
224 | 224 | |
|
225 | 225 | def clear(self): |
|
226 | 226 | """ |
|
227 | 227 | Clears out the attachments so you can redo them. Use this to keep the |
|
228 | 228 | headers for a series of different messages with different attachments. |
|
229 | 229 | """ |
|
230 | 230 | del self.attachments[:] |
|
231 | 231 | del self.base.parts[:] |
|
232 | 232 | self.multipart = False |
|
233 | 233 | |
|
234 | 234 | def update(self, message): |
|
235 | 235 | """ |
|
236 | 236 | Used to easily set a bunch of heading from another dict |
|
237 | 237 | like object. |
|
238 | 238 | """ |
|
239 | 239 | for k in message.keys(): |
|
240 | 240 | self.base[k] = message[k] |
|
241 | 241 | |
|
242 | 242 | def __str__(self): |
|
243 | 243 | """ |
|
244 | 244 | Converts to a string. |
|
245 | 245 | """ |
|
246 | 246 | return self.to_message().as_string() |
|
247 | 247 | |
|
248 | 248 | def _encode_attachment(self, filename=None, content_type=None, data=None, |
|
249 | 249 | disposition=None, part=None): |
|
250 | 250 | """ |
|
251 | 251 | Used internally to take the attachments mentioned in self.attachments |
|
252 | 252 | and do the actual encoding in a lazy way when you call to_message. |
|
253 | 253 | """ |
|
254 | 254 | if part: |
|
255 | 255 | self.base.parts.append(part) |
|
256 | 256 | elif filename: |
|
257 | 257 | if not data: |
|
258 | 258 | data = open(filename).read() |
|
259 | 259 | |
|
260 | 260 | self.base.attach_file(filename, data, content_type, |
|
261 | 261 | disposition or 'attachment') |
|
262 | 262 | else: |
|
263 | 263 | self.base.attach_text(data, content_type) |
|
264 | 264 | |
|
265 | 265 | ctype = self.base.content_encoding['Content-Type'][0] |
|
266 | 266 | |
|
267 | 267 | if ctype and not ctype.startswith('multipart'): |
|
268 | 268 | self.base.content_encoding['Content-Type'] = ('multipart/mixed', {}) |
|
269 | 269 | |
|
270 | 270 | def to_message(self): |
|
271 | 271 | """ |
|
272 | 272 | Figures out all the required steps to finally craft the |
|
273 | 273 | message you need and return it. The resulting message |
|
274 | 274 | is also available as a self.base attribute. |
|
275 | 275 | |
|
276 | 276 | What is returned is a Python email API message you can |
|
277 | 277 | use with those APIs. The self.base attribute is the raw |
|
278 | 278 | lamson.encoding.MailBase. |
|
279 | 279 | """ |
|
280 | 280 | del self.base.parts[:] |
|
281 | 281 | |
|
282 | 282 | if self.Body and self.Html: |
|
283 | 283 | self.multipart = True |
|
284 | 284 | self.base.content_encoding['Content-Type'] = ( |
|
285 | 285 | 'multipart/alternative', {}) |
|
286 | 286 | |
|
287 | 287 | if self.multipart: |
|
288 | 288 | self.base.body = None |
|
289 | 289 | if self.Body: |
|
290 | 290 | self.base.attach_text(self.Body, 'text/plain') |
|
291 | 291 | |
|
292 | 292 | if self.Html: |
|
293 | 293 | self.base.attach_text(self.Html, 'text/html') |
|
294 | 294 | |
|
295 | 295 | for args in self.attachments: |
|
296 | 296 | self._encode_attachment(**args) |
|
297 | 297 | |
|
298 | 298 | elif self.Body: |
|
299 | 299 | self.base.body = self.Body |
|
300 | 300 | self.base.content_encoding['Content-Type'] = ('text/plain', {}) |
|
301 | 301 | |
|
302 | 302 | elif self.Html: |
|
303 | 303 | self.base.body = self.Html |
|
304 | 304 | self.base.content_encoding['Content-Type'] = ('text/html', {}) |
|
305 | 305 | |
|
306 | 306 | return to_message(self.base, separator=self.separator) |
|
307 | 307 | |
|
308 | 308 | def all_parts(self): |
|
309 | 309 | """ |
|
310 | 310 | Returns all the encoded parts. Only useful for debugging |
|
311 | 311 | or inspecting after calling to_message(). |
|
312 | 312 | """ |
|
313 | 313 | return self.base.parts |
|
314 | 314 | |
|
315 | 315 | def keys(self): |
|
316 | 316 | return self.base.keys() |
|
317 | 317 | |
|
318 | 318 | |
|
319 | 319 | def to_message(mail, separator="; "): |
|
320 | 320 | """ |
|
321 | 321 | Given a MailBase message, this will construct a MIMEPart |
|
322 | 322 | that is canonicalized for use with the Python email API. |
|
323 | 323 | """ |
|
324 | 324 | ctype, params = mail.content_encoding['Content-Type'] |
|
325 | 325 | |
|
326 | 326 | if not ctype: |
|
327 | 327 | if mail.parts: |
|
328 | 328 | ctype = 'multipart/mixed' |
|
329 | 329 | else: |
|
330 | 330 | ctype = 'text/plain' |
|
331 | 331 | else: |
|
332 | 332 | if mail.parts: |
|
333 | 333 | assert ctype.startswith(("multipart", "message")), \ |
|
334 | 334 | "Content type should be multipart or message, not %r" % ctype |
|
335 | 335 | |
|
336 | 336 | # adjust the content type according to what it should be now |
|
337 | 337 | mail.content_encoding['Content-Type'] = (ctype, params) |
|
338 | 338 | |
|
339 | 339 | try: |
|
340 | 340 | out = MIMEPart(ctype, **params) |
|
341 | 341 | except TypeError as exc: # pragma: no cover |
|
342 | 342 | raise EncodingError("Content-Type malformed, not allowed: %r; " |
|
343 | 343 | "%r (Python ERROR: %s" % |
|
344 | 344 | (ctype, params, exc.message)) |
|
345 | 345 | |
|
346 | 346 | for k in mail.keys(): |
|
347 | 347 | if k in ADDRESS_HEADERS_WHITELIST: |
|
348 | 348 | out[k.encode('ascii')] = header_to_mime_encoding( |
|
349 | 349 | mail[k], |
|
350 | 350 | not_email=False, |
|
351 | 351 | separator=separator |
|
352 | 352 | ) |
|
353 | 353 | else: |
|
354 | 354 | out[k.encode('ascii')] = header_to_mime_encoding( |
|
355 | 355 | mail[k], |
|
356 | 356 | not_email=True |
|
357 | 357 | ) |
|
358 | 358 | |
|
359 | 359 | out.extract_payload(mail) |
|
360 | 360 | |
|
361 | 361 | # go through the children |
|
362 | 362 | for part in mail.parts: |
|
363 | 363 | out.attach(to_message(part)) |
|
364 | 364 | |
|
365 | 365 | return out |
|
366 | 366 | |
|
367 | 367 | |
|
368 | 368 | class MIMEPart(MIMEBase): |
|
369 | 369 | """ |
|
370 | 370 | A reimplementation of nearly everything in email.mime to be more useful |
|
371 | 371 | for actually attaching things. Rather than one class for every type of |
|
372 | 372 | thing you'd encode, there's just this one, and it figures out how to |
|
373 | 373 | encode what you ask it. |
|
374 | 374 | """ |
|
375 | 375 | def __init__(self, type, **params): |
|
376 | 376 | self.maintype, self.subtype = type.split('/') |
|
377 | 377 | MIMEBase.__init__(self, self.maintype, self.subtype, **params) |
|
378 | 378 | |
|
379 | 379 | def add_text(self, content): |
|
380 | 380 | # this is text, so encode it in canonical form |
|
381 | 381 | try: |
|
382 | 382 | encoded = content.encode('ascii') |
|
383 | 383 | charset = 'ascii' |
|
384 | 384 | except UnicodeError: |
|
385 | 385 | encoded = content.encode('utf-8') |
|
386 | 386 | charset = 'utf-8' |
|
387 | 387 | |
|
388 | 388 | self.set_payload(encoded, charset=charset) |
|
389 | 389 | |
|
390 | 390 | def extract_payload(self, mail): |
|
391 | 391 | if mail.body is None: |
|
392 | 392 | return # only None, '' is still ok |
|
393 | 393 | |
|
394 | 394 | ctype, ctype_params = mail.content_encoding['Content-Type'] |
|
395 | 395 | cdisp, cdisp_params = mail.content_encoding['Content-Disposition'] |
|
396 | 396 | |
|
397 | 397 | assert ctype, ("Extract payload requires that mail.content_encoding " |
|
398 | 398 | "have a valid Content-Type.") |
|
399 | 399 | |
|
400 | 400 | if ctype.startswith("text/"): |
|
401 | 401 | self.add_text(mail.body) |
|
402 | 402 | else: |
|
403 | 403 | if cdisp: |
|
404 | 404 | # replicate the content-disposition settings |
|
405 | 405 | self.add_header('Content-Disposition', cdisp, **cdisp_params) |
|
406 | 406 | |
|
407 | 407 | self.set_payload(mail.body) |
|
408 | 408 | encoders.encode_base64(self) |
|
409 | 409 | |
|
410 | 410 | def __repr__(self): |
|
411 | 411 | return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % ( |
|
412 | 412 | self.subtype, |
|
413 | 413 | self.maintype, |
|
414 | 414 | self['Content-Type'], |
|
415 | 415 | self['Content-Disposition'], |
|
416 | 416 | self.is_multipart()) |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | def header_to_mime_encoding(value, not_email=False, separator=", "): |
|
420 | 420 | if not value: |
|
421 | 421 | return "" |
|
422 | 422 | |
|
423 | 423 | encoder = Charset(DEFAULT_ENCODING) |
|
424 | 424 | if type(value) == list: |
|
425 | 425 | return separator.join(properly_encode_header( |
|
426 | 426 | v, encoder, not_email) for v in value) |
|
427 | 427 | else: |
|
428 | 428 | return properly_encode_header(value, encoder, not_email) |
|
429 | 429 | |
|
430 | 430 | |
|
431 | 431 | def properly_encode_header(value, encoder, not_email): |
|
432 | 432 | """ |
|
433 | 433 | The only thing special (weird) about this function is that it tries |
|
434 | 434 | to do a fast check to see if the header value has an email address in |
|
435 | 435 | it. Since random headers could have an email address, and email addresses |
|
436 | 436 | have weird special formatting rules, we have to check for it. |
|
437 | 437 | |
|
438 | 438 | Normally this works fine, but in Librelist, we need to "obfuscate" email |
|
439 | 439 | addresses by changing the '@' to '-AT-'. This is where |
|
440 | 440 | VALUE_IS_EMAIL_ADDRESS exists. It's a simple lambda returning True/False |
|
441 | 441 | to check if a header value has an email address. If you need to make this |
|
442 | 442 | check different, then change this. |
|
443 | 443 | """ |
|
444 | 444 | try: |
|
445 | 445 | return value.encode("ascii") |
|
446 | 446 | except UnicodeEncodeError: |
|
447 | 447 | if not not_email and VALUE_IS_EMAIL_ADDRESS(value): |
|
448 | 448 | # this could have an email address, make sure we don't screw it up |
|
449 | 449 | name, address = parseaddr(value) |
|
450 | 450 | return '"%s" <%s>' % ( |
|
451 | 451 | encoder.header_encode(name.encode("utf-8")), address) |
|
452 | 452 | |
|
453 | 453 | return encoder.header_encode(value.encode("utf-8")) |
@@ -1,242 +1,242 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | Pylons application test package |
|
17 | 17 | |
|
18 | 18 | This package assumes the Pylons environment is already loaded, such as |
|
19 | 19 | when this script is imported from the `nosetests --with-pylons=test.ini` |
|
20 | 20 | command. |
|
21 | 21 | |
|
22 | 22 | This module initializes the application via ``websetup`` (`paster |
|
23 | 23 | setup-app`) and provides the base testing objects. |
|
24 | 24 | |
|
25 | 25 | nosetests -x - fail on first error |
|
26 | 26 | nosetests kallithea.tests.functional.test_admin_settings:TestSettingsController.test_my_account |
|
27 | 27 | nosetests --pdb --pdb-failures |
|
28 | 28 | nosetests --with-coverage --cover-package=kallithea.model.validators kallithea.tests.test_validators |
|
29 | 29 | |
|
30 | 30 | optional FLAGS: |
|
31 | 31 | KALLITHEA_WHOOSH_TEST_DISABLE=1 - skip whoosh index building and tests |
|
32 | 32 | KALLITHEA_NO_TMP_PATH=1 - disable new temp path for tests, used mostly for test_vcs_operations |
|
33 | 33 | |
|
34 | 34 | """ |
|
35 | 35 | import os |
|
36 | 36 | import re |
|
37 | 37 | import time |
|
38 | 38 | import logging |
|
39 | 39 | import datetime |
|
40 | 40 | import hashlib |
|
41 | 41 | import tempfile |
|
42 | 42 | from os.path import join as jn |
|
43 | 43 | |
|
44 | 44 | from tempfile import _RandomNameSequence |
|
45 | 45 | |
|
46 | 46 | import pylons |
|
47 | 47 | import pylons.test |
|
48 | 48 | from pylons import config, url |
|
49 | 49 | from pylons.i18n.translation import _get_translator |
|
50 | 50 | from pylons.util import ContextObj |
|
51 | 51 | |
|
52 | 52 | from routes.util import URLGenerator |
|
53 | 53 | from webtest import TestApp |
|
54 | 54 | from nose.plugins.skip import SkipTest |
|
55 | 55 | |
|
56 | 56 | from kallithea.lib.compat import unittest |
|
57 | 57 | from kallithea import is_windows |
|
58 | 58 | from kallithea.model.db import Notification, User, UserNotification |
|
59 | 59 | from kallithea.model.meta import Session |
|
60 | 60 | from kallithea.tests.parameterized import parameterized |
|
61 | 61 | from kallithea.lib.utils2 import safe_str |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | os.environ['TZ'] = 'UTC' |
|
65 | 65 | if not is_windows: |
|
66 | 66 | time.tzset() |
|
67 | 67 | |
|
68 | 68 | log = logging.getLogger(__name__) |
|
69 | 69 | |
|
70 | 70 | __all__ = [ |
|
71 | 71 | 'parameterized', 'environ', 'url', 'TestController', |
|
72 | 72 | 'SkipTest', 'ldap_lib_installed', 'pam_lib_installed', 'BaseTestCase', 'init_stack', |
|
73 | 73 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO', |
|
74 | 74 | 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS', |
|
75 | 75 | 'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS', |
|
76 | 76 | 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN', |
|
77 | 77 | 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO', |
|
78 | 78 | 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO', |
|
79 | 79 | 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', |
|
80 | 80 | 'GIT_REMOTE_REPO', 'SCM_TESTS', |
|
81 | 81 | ] |
|
82 | 82 | |
|
83 | 83 | # Invoke websetup with the current config file |
|
84 | 84 | # SetupCommand('setup-app').run([config_file]) |
|
85 | 85 | |
|
86 | 86 | environ = {} |
|
87 | 87 | |
|
88 | 88 | #SOME GLOBALS FOR TESTS |
|
89 | 89 | |
|
90 | 90 | TESTS_TMP_PATH = jn('/', 'tmp', 'rc_test_%s' % _RandomNameSequence().next()) |
|
91 | 91 | TEST_USER_ADMIN_LOGIN = 'test_admin' |
|
92 | 92 | TEST_USER_ADMIN_PASS = 'test12' |
|
93 |
TEST_USER_ADMIN_EMAIL = 'test_admin@ |
|
|
93 | TEST_USER_ADMIN_EMAIL = 'test_admin@example.com' | |
|
94 | 94 | |
|
95 | 95 | TEST_USER_REGULAR_LOGIN = 'test_regular' |
|
96 | 96 | TEST_USER_REGULAR_PASS = 'test12' |
|
97 |
TEST_USER_REGULAR_EMAIL = 'test_regular@ |
|
|
97 | TEST_USER_REGULAR_EMAIL = 'test_regular@example.com' | |
|
98 | 98 | |
|
99 | 99 | TEST_USER_REGULAR2_LOGIN = 'test_regular2' |
|
100 | 100 | TEST_USER_REGULAR2_PASS = 'test12' |
|
101 |
TEST_USER_REGULAR2_EMAIL = 'test_regular2@ |
|
|
101 | TEST_USER_REGULAR2_EMAIL = 'test_regular2@example.com' | |
|
102 | 102 | |
|
103 | 103 | HG_REPO = 'vcs_test_hg' |
|
104 | 104 | GIT_REPO = 'vcs_test_git' |
|
105 | 105 | |
|
106 | 106 | NEW_HG_REPO = 'vcs_test_hg_new' |
|
107 | 107 | NEW_GIT_REPO = 'vcs_test_git_new' |
|
108 | 108 | |
|
109 | 109 | HG_FORK = 'vcs_test_hg_fork' |
|
110 | 110 | GIT_FORK = 'vcs_test_git_fork' |
|
111 | 111 | |
|
112 | 112 | ## VCS |
|
113 | 113 | SCM_TESTS = ['hg', 'git'] |
|
114 | 114 | uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple()))) |
|
115 | 115 | |
|
116 | 116 | GIT_REMOTE_REPO = 'git://github.com/codeinn/vcs.git' |
|
117 | 117 | |
|
118 | 118 | TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO) |
|
119 | 119 | TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix) |
|
120 | 120 | TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix) |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | HG_REMOTE_REPO = 'http://bitbucket.org/marcinkuzminski/vcs' |
|
124 | 124 | |
|
125 | 125 | TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO) |
|
126 | 126 | TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix) |
|
127 | 127 | TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix) |
|
128 | 128 | |
|
129 | 129 | TEST_DIR = tempfile.gettempdir() |
|
130 | 130 | TEST_REPO_PREFIX = 'vcs-test' |
|
131 | 131 | |
|
132 | 132 | # cached repos if any ! |
|
133 | 133 | # comment out to get some other repos from bb or github |
|
134 | 134 | GIT_REMOTE_REPO = jn(TESTS_TMP_PATH, GIT_REPO) |
|
135 | 135 | HG_REMOTE_REPO = jn(TESTS_TMP_PATH, HG_REPO) |
|
136 | 136 | |
|
137 | 137 | #skip ldap tests if LDAP lib is not installed |
|
138 | 138 | ldap_lib_installed = False |
|
139 | 139 | try: |
|
140 | 140 | import ldap |
|
141 | 141 | ldap.API_VERSION |
|
142 | 142 | ldap_lib_installed = True |
|
143 | 143 | except ImportError: |
|
144 | 144 | # means that python-ldap is not installed |
|
145 | 145 | pass |
|
146 | 146 | |
|
147 | 147 | try: |
|
148 | 148 | import pam |
|
149 | 149 | pam.PAM_TEXT_INFO |
|
150 | 150 | pam_lib_installed = True |
|
151 | 151 | except ImportError: |
|
152 | 152 | pam_lib_installed = False |
|
153 | 153 | |
|
154 | 154 | import logging |
|
155 | 155 | |
|
156 | 156 | class NullHandler(logging.Handler): |
|
157 | 157 | def emit(self, record): |
|
158 | 158 | pass |
|
159 | 159 | |
|
160 | 160 | def init_stack(config=None): |
|
161 | 161 | if not config: |
|
162 | 162 | config = pylons.test.pylonsapp.config |
|
163 | 163 | url._push_object(URLGenerator(config['routes.map'], environ)) |
|
164 | 164 | pylons.app_globals._push_object(config['pylons.app_globals']) |
|
165 | 165 | pylons.config._push_object(config) |
|
166 | 166 | pylons.tmpl_context._push_object(ContextObj()) |
|
167 | 167 | # Initialize a translator for tests that utilize i18n |
|
168 | 168 | translator = _get_translator(pylons.config.get('lang')) |
|
169 | 169 | pylons.translator._push_object(translator) |
|
170 | 170 | h = NullHandler() |
|
171 | 171 | logging.getLogger("kallithea").addHandler(h) |
|
172 | 172 | |
|
173 | 173 | |
|
174 | 174 | class BaseTestCase(unittest.TestCase): |
|
175 | 175 | def __init__(self, *args, **kwargs): |
|
176 | 176 | self.wsgiapp = pylons.test.pylonsapp |
|
177 | 177 | init_stack(self.wsgiapp.config) |
|
178 | 178 | unittest.TestCase.__init__(self, *args, **kwargs) |
|
179 | 179 | |
|
180 | 180 | def remove_all_notifications(self): |
|
181 | 181 | Notification.query().delete() |
|
182 | 182 | |
|
183 | 183 | # Because query().delete() does not (by default) trigger cascades. |
|
184 | 184 | # http://docs.sqlalchemy.org/en/rel_0_7/orm/collections.html#passive-deletes |
|
185 | 185 | UserNotification.query().delete() |
|
186 | 186 | |
|
187 | 187 | Session().commit() |
|
188 | 188 | |
|
189 | 189 | |
|
190 | 190 | class TestController(BaseTestCase): |
|
191 | 191 | |
|
192 | 192 | def __init__(self, *args, **kwargs): |
|
193 | 193 | BaseTestCase.__init__(self, *args, **kwargs) |
|
194 | 194 | self.app = TestApp(self.wsgiapp) |
|
195 | 195 | self.maxDiff = None |
|
196 | 196 | self.index_location = config['app_conf']['index_dir'] |
|
197 | 197 | |
|
198 | 198 | def log_user(self, username=TEST_USER_ADMIN_LOGIN, |
|
199 | 199 | password=TEST_USER_ADMIN_PASS): |
|
200 | 200 | self._logged_username = username |
|
201 | 201 | response = self.app.post(url(controller='login', action='index'), |
|
202 | 202 | {'username': username, |
|
203 | 203 | 'password': password}) |
|
204 | 204 | |
|
205 | 205 | if 'Invalid username or password' in response.body: |
|
206 | 206 | self.fail('could not login using %s %s' % (username, password)) |
|
207 | 207 | |
|
208 | 208 | self.assertEqual(response.status, '302 Found') |
|
209 | 209 | self.assert_authenticated_user(response, username) |
|
210 | 210 | |
|
211 | 211 | response = response.follow() |
|
212 | 212 | return response.session['authuser'] |
|
213 | 213 | |
|
214 | 214 | def _get_logged_user(self): |
|
215 | 215 | return User.get_by_username(self._logged_username) |
|
216 | 216 | |
|
217 | 217 | def assert_authenticated_user(self, response, expected_username): |
|
218 | 218 | cookie = response.session.get('authuser') |
|
219 | 219 | user = cookie and cookie.get('user_id') |
|
220 | 220 | user = user and User.get(user) |
|
221 | 221 | user = user and user.username |
|
222 | 222 | self.assertEqual(user, expected_username) |
|
223 | 223 | self.assertEqual(cookie.get('is_authenticated'), True) |
|
224 | 224 | |
|
225 | 225 | def authentication_token(self): |
|
226 | 226 | return self.app.get(url('authentication_token')).body |
|
227 | 227 | |
|
228 | 228 | def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m): |
|
229 | 229 | if 'flash' not in response.session: |
|
230 | 230 | self.fail(safe_str(u'msg `%s` not found - session has no flash:\n%s' % (msg, response))) |
|
231 | 231 | try: |
|
232 | 232 | level, m = response.session['flash'][-1 - skip] |
|
233 | 233 | if _matcher(msg, m): |
|
234 | 234 | return |
|
235 | 235 | except IndexError: |
|
236 | 236 | pass |
|
237 | 237 | self.fail(safe_str(u'msg `%s` not found in session flash (skipping %s): %s' % |
|
238 | 238 | (msg, skip, |
|
239 | 239 | ', '.join('`%s`' % m for level, m in response.session['flash'])))) |
|
240 | 240 | |
|
241 | 241 | def checkSessionFlashRegex(self, response, regex, skip=0): |
|
242 | 242 | self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search) |
@@ -1,2407 +1,2407 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | tests for api. run with:: |
|
17 | 17 | |
|
18 | 18 | KALLITHEA_WHOOSH_TEST_DISABLE=1 nosetests --with-coverage --cover-package=kallithea.controllers.api.api -x kallithea/tests/api |
|
19 | 19 | """ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import random |
|
23 | 23 | import mock |
|
24 | 24 | |
|
25 | 25 | from kallithea.tests import * |
|
26 | 26 | from kallithea.tests.fixture import Fixture |
|
27 | 27 | from kallithea.lib.compat import json |
|
28 | 28 | from kallithea.lib.auth import AuthUser |
|
29 | 29 | from kallithea.model.user import UserModel |
|
30 | 30 | from kallithea.model.user_group import UserGroupModel |
|
31 | 31 | from kallithea.model.repo import RepoModel |
|
32 | 32 | from kallithea.model.repo_group import RepoGroupModel |
|
33 | 33 | from kallithea.model.meta import Session |
|
34 | 34 | from kallithea.model.scm import ScmModel |
|
35 | 35 | from kallithea.model.gist import GistModel |
|
36 | 36 | from kallithea.model.db import Repository, User, Setting |
|
37 | 37 | from kallithea.lib.utils2 import time_to_datetime |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | API_URL = '/_admin/api' |
|
41 | 41 | TEST_USER_GROUP = 'test_user_group' |
|
42 | 42 | TEST_REPO_GROUP = 'test_repo_group' |
|
43 | 43 | |
|
44 | 44 | fixture = Fixture() |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | def _build_data(apikey, method, **kw): |
|
48 | 48 | """ |
|
49 | 49 | Builds API data with given random ID |
|
50 | 50 | |
|
51 | 51 | :param random_id: |
|
52 | 52 | """ |
|
53 | 53 | random_id = random.randrange(1, 9999) |
|
54 | 54 | return random_id, json.dumps({ |
|
55 | 55 | "id": random_id, |
|
56 | 56 | "api_key": apikey, |
|
57 | 57 | "method": method, |
|
58 | 58 | "args": kw |
|
59 | 59 | }) |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | jsonify = lambda obj: json.loads(json.dumps(obj)) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def crash(*args, **kwargs): |
|
66 | 66 | raise Exception('Total Crash !') |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | def api_call(test_obj, params): |
|
70 | 70 | response = test_obj.app.post(API_URL, content_type='application/json', |
|
71 | 71 | params=params) |
|
72 | 72 | return response |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | ## helpers |
|
76 | 76 | def make_user_group(name=TEST_USER_GROUP): |
|
77 | 77 | gr = fixture.create_user_group(name, cur_user=TEST_USER_ADMIN_LOGIN) |
|
78 | 78 | UserGroupModel().add_user_to_group(user_group=gr, |
|
79 | 79 | user=TEST_USER_ADMIN_LOGIN) |
|
80 | 80 | Session().commit() |
|
81 | 81 | return gr |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def make_repo_group(name=TEST_REPO_GROUP): |
|
85 | 85 | gr = fixture.create_repo_group(name, cur_user=TEST_USER_ADMIN_LOGIN) |
|
86 | 86 | Session().commit() |
|
87 | 87 | return gr |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | class _BaseTestApi(object): |
|
91 | 91 | REPO = None |
|
92 | 92 | REPO_TYPE = None |
|
93 | 93 | |
|
94 | 94 | @classmethod |
|
95 | 95 | def setup_class(cls): |
|
96 | 96 | cls.usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
97 | 97 | cls.apikey = cls.usr.api_key |
|
98 | 98 | cls.test_user = UserModel().create_or_update( |
|
99 | 99 | username='test-api', |
|
100 | 100 | password='test', |
|
101 | 101 | email='test@example.com', |
|
102 | 102 | firstname='first', |
|
103 | 103 | lastname='last' |
|
104 | 104 | ) |
|
105 | 105 | Session().commit() |
|
106 | 106 | cls.TEST_USER_LOGIN = cls.test_user.username |
|
107 | 107 | cls.apikey_regular = cls.test_user.api_key |
|
108 | 108 | |
|
109 | 109 | @classmethod |
|
110 | 110 | def teardown_class(cls): |
|
111 | 111 | pass |
|
112 | 112 | |
|
113 | 113 | def setUp(self): |
|
114 | 114 | self.maxDiff = None |
|
115 | 115 | make_user_group() |
|
116 | 116 | make_repo_group() |
|
117 | 117 | |
|
118 | 118 | def tearDown(self): |
|
119 | 119 | fixture.destroy_user_group(TEST_USER_GROUP) |
|
120 | 120 | fixture.destroy_gists() |
|
121 | 121 | fixture.destroy_repo_group(TEST_REPO_GROUP) |
|
122 | 122 | |
|
123 | 123 | def _compare_ok(self, id_, expected, given): |
|
124 | 124 | expected = jsonify({ |
|
125 | 125 | 'id': id_, |
|
126 | 126 | 'error': None, |
|
127 | 127 | 'result': expected |
|
128 | 128 | }) |
|
129 | 129 | given = json.loads(given) |
|
130 | 130 | self.assertEqual(expected, given) |
|
131 | 131 | |
|
132 | 132 | def _compare_error(self, id_, expected, given): |
|
133 | 133 | expected = jsonify({ |
|
134 | 134 | 'id': id_, |
|
135 | 135 | 'error': expected, |
|
136 | 136 | 'result': None |
|
137 | 137 | }) |
|
138 | 138 | given = json.loads(given) |
|
139 | 139 | self.assertEqual(expected, given) |
|
140 | 140 | |
|
141 | 141 | def test_Optional_object(self): |
|
142 | 142 | from kallithea.controllers.api.api import Optional |
|
143 | 143 | |
|
144 | 144 | option1 = Optional(None) |
|
145 | 145 | self.assertEqual('<Optional:%s>' % None, repr(option1)) |
|
146 | 146 | self.assertEqual(option1(), None) |
|
147 | 147 | |
|
148 | 148 | self.assertEqual(1, Optional.extract(Optional(1))) |
|
149 | 149 | self.assertEqual('trololo', Optional.extract('trololo')) |
|
150 | 150 | |
|
151 | 151 | def test_Optional_OAttr(self): |
|
152 | 152 | from kallithea.controllers.api.api import Optional, OAttr |
|
153 | 153 | |
|
154 | 154 | option1 = Optional(OAttr('apiuser')) |
|
155 | 155 | self.assertEqual('apiuser', Optional.extract(option1)) |
|
156 | 156 | |
|
157 | 157 | def test_OAttr_object(self): |
|
158 | 158 | from kallithea.controllers.api.api import OAttr |
|
159 | 159 | |
|
160 | 160 | oattr1 = OAttr('apiuser') |
|
161 | 161 | self.assertEqual('<OptionalAttr:apiuser>', repr(oattr1)) |
|
162 | 162 | self.assertEqual(oattr1(), oattr1) |
|
163 | 163 | |
|
164 | 164 | def test_api_wrong_key(self): |
|
165 | 165 | id_, params = _build_data('trololo', 'get_user') |
|
166 | 166 | response = api_call(self, params) |
|
167 | 167 | |
|
168 | 168 | expected = 'Invalid API key' |
|
169 | 169 | self._compare_error(id_, expected, given=response.body) |
|
170 | 170 | |
|
171 | 171 | def test_api_missing_non_optional_param(self): |
|
172 | 172 | id_, params = _build_data(self.apikey, 'get_repo') |
|
173 | 173 | response = api_call(self, params) |
|
174 | 174 | |
|
175 | 175 | expected = 'Missing non optional `repoid` arg in JSON DATA' |
|
176 | 176 | self._compare_error(id_, expected, given=response.body) |
|
177 | 177 | |
|
178 | 178 | def test_api_missing_non_optional_param_args_null(self): |
|
179 | 179 | id_, params = _build_data(self.apikey, 'get_repo') |
|
180 | 180 | params = params.replace('"args": {}', '"args": null') |
|
181 | 181 | response = api_call(self, params) |
|
182 | 182 | |
|
183 | 183 | expected = 'Missing non optional `repoid` arg in JSON DATA' |
|
184 | 184 | self._compare_error(id_, expected, given=response.body) |
|
185 | 185 | |
|
186 | 186 | def test_api_missing_non_optional_param_args_bad(self): |
|
187 | 187 | id_, params = _build_data(self.apikey, 'get_repo') |
|
188 | 188 | params = params.replace('"args": {}', '"args": 1') |
|
189 | 189 | response = api_call(self, params) |
|
190 | 190 | |
|
191 | 191 | expected = 'Missing non optional `repoid` arg in JSON DATA' |
|
192 | 192 | self._compare_error(id_, expected, given=response.body) |
|
193 | 193 | |
|
194 | 194 | def test_api_args_is_null(self): |
|
195 | 195 | id_, params = _build_data(self.apikey, 'get_users', ) |
|
196 | 196 | params = params.replace('"args": {}', '"args": null') |
|
197 | 197 | response = api_call(self, params) |
|
198 | 198 | self.assertEqual(response.status, '200 OK') |
|
199 | 199 | |
|
200 | 200 | def test_api_args_is_bad(self): |
|
201 | 201 | id_, params = _build_data(self.apikey, 'get_users', ) |
|
202 | 202 | params = params.replace('"args": {}', '"args": 1') |
|
203 | 203 | response = api_call(self, params) |
|
204 | 204 | self.assertEqual(response.status, '200 OK') |
|
205 | 205 | |
|
206 | 206 | def test_api_args_different_args(self): |
|
207 | 207 | import string |
|
208 | 208 | expected = { |
|
209 | 209 | 'ascii_letters': string.ascii_letters, |
|
210 | 210 | 'ws': string.whitespace, |
|
211 | 211 | 'printables': string.printable |
|
212 | 212 | } |
|
213 | 213 | id_, params = _build_data(self.apikey, 'test', args=expected) |
|
214 | 214 | response = api_call(self, params) |
|
215 | 215 | self.assertEqual(response.status, '200 OK') |
|
216 | 216 | self._compare_ok(id_, expected, response.body) |
|
217 | 217 | |
|
218 | 218 | def test_api_get_users(self): |
|
219 | 219 | id_, params = _build_data(self.apikey, 'get_users', ) |
|
220 | 220 | response = api_call(self, params) |
|
221 | 221 | ret_all = [] |
|
222 | 222 | _users = User.query().filter(User.username != User.DEFAULT_USER) \ |
|
223 | 223 | .order_by(User.username).all() |
|
224 | 224 | for usr in _users: |
|
225 | 225 | ret = usr.get_api_data() |
|
226 | 226 | ret_all.append(jsonify(ret)) |
|
227 | 227 | expected = ret_all |
|
228 | 228 | self._compare_ok(id_, expected, given=response.body) |
|
229 | 229 | |
|
230 | 230 | def test_api_get_user(self): |
|
231 | 231 | id_, params = _build_data(self.apikey, 'get_user', |
|
232 | 232 | userid=TEST_USER_ADMIN_LOGIN) |
|
233 | 233 | response = api_call(self, params) |
|
234 | 234 | |
|
235 | 235 | usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
236 | 236 | ret = usr.get_api_data() |
|
237 | 237 | ret['permissions'] = AuthUser(dbuser=usr).permissions |
|
238 | 238 | |
|
239 | 239 | expected = ret |
|
240 | 240 | self._compare_ok(id_, expected, given=response.body) |
|
241 | 241 | |
|
242 | 242 | def test_api_get_user_that_does_not_exist(self): |
|
243 | 243 | id_, params = _build_data(self.apikey, 'get_user', |
|
244 | 244 | userid='trololo') |
|
245 | 245 | response = api_call(self, params) |
|
246 | 246 | |
|
247 | 247 | expected = "user `%s` does not exist" % 'trololo' |
|
248 | 248 | self._compare_error(id_, expected, given=response.body) |
|
249 | 249 | |
|
250 | 250 | def test_api_get_user_without_giving_userid(self): |
|
251 | 251 | id_, params = _build_data(self.apikey, 'get_user') |
|
252 | 252 | response = api_call(self, params) |
|
253 | 253 | |
|
254 | 254 | usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
255 | 255 | ret = usr.get_api_data() |
|
256 | 256 | ret['permissions'] = AuthUser(dbuser=usr).permissions |
|
257 | 257 | |
|
258 | 258 | expected = ret |
|
259 | 259 | self._compare_ok(id_, expected, given=response.body) |
|
260 | 260 | |
|
261 | 261 | def test_api_get_user_without_giving_userid_non_admin(self): |
|
262 | 262 | id_, params = _build_data(self.apikey_regular, 'get_user') |
|
263 | 263 | response = api_call(self, params) |
|
264 | 264 | |
|
265 | 265 | usr = User.get_by_username(self.TEST_USER_LOGIN) |
|
266 | 266 | ret = usr.get_api_data() |
|
267 | 267 | ret['permissions'] = AuthUser(dbuser=usr).permissions |
|
268 | 268 | |
|
269 | 269 | expected = ret |
|
270 | 270 | self._compare_ok(id_, expected, given=response.body) |
|
271 | 271 | |
|
272 | 272 | def test_api_get_user_with_giving_userid_non_admin(self): |
|
273 | 273 | id_, params = _build_data(self.apikey_regular, 'get_user', |
|
274 | 274 | userid=self.TEST_USER_LOGIN) |
|
275 | 275 | response = api_call(self, params) |
|
276 | 276 | |
|
277 | 277 | expected = 'userid is not the same as your user' |
|
278 | 278 | self._compare_error(id_, expected, given=response.body) |
|
279 | 279 | |
|
280 | 280 | def test_api_pull(self): |
|
281 | 281 | repo_name = 'test_pull' |
|
282 | 282 | r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
283 | 283 | r.clone_uri = os.path.join(TESTS_TMP_PATH, self.REPO) |
|
284 | 284 | Session.add(r) |
|
285 | 285 | Session.commit() |
|
286 | 286 | |
|
287 | 287 | id_, params = _build_data(self.apikey, 'pull', |
|
288 | 288 | repoid=repo_name,) |
|
289 | 289 | response = api_call(self, params) |
|
290 | 290 | |
|
291 | 291 | expected = {'msg': 'Pulled from `%s`' % repo_name, |
|
292 | 292 | 'repository': repo_name} |
|
293 | 293 | self._compare_ok(id_, expected, given=response.body) |
|
294 | 294 | |
|
295 | 295 | fixture.destroy_repo(repo_name) |
|
296 | 296 | |
|
297 | 297 | def test_api_pull_error(self): |
|
298 | 298 | id_, params = _build_data(self.apikey, 'pull', |
|
299 | 299 | repoid=self.REPO, ) |
|
300 | 300 | response = api_call(self, params) |
|
301 | 301 | |
|
302 | 302 | expected = 'Unable to pull changes from `%s`' % self.REPO |
|
303 | 303 | self._compare_error(id_, expected, given=response.body) |
|
304 | 304 | |
|
305 | 305 | def test_api_rescan_repos(self): |
|
306 | 306 | id_, params = _build_data(self.apikey, 'rescan_repos') |
|
307 | 307 | response = api_call(self, params) |
|
308 | 308 | |
|
309 | 309 | expected = {'added': [], 'removed': []} |
|
310 | 310 | self._compare_ok(id_, expected, given=response.body) |
|
311 | 311 | |
|
312 | 312 | @mock.patch.object(ScmModel, 'repo_scan', crash) |
|
313 | 313 | def test_api_rescann_error(self): |
|
314 | 314 | id_, params = _build_data(self.apikey, 'rescan_repos', ) |
|
315 | 315 | response = api_call(self, params) |
|
316 | 316 | |
|
317 | 317 | expected = 'Error occurred during rescan repositories action' |
|
318 | 318 | self._compare_error(id_, expected, given=response.body) |
|
319 | 319 | |
|
320 | 320 | def test_api_invalidate_cache(self): |
|
321 | 321 | repo = RepoModel().get_by_repo_name(self.REPO) |
|
322 | 322 | repo.scm_instance_cached() # seed cache |
|
323 | 323 | |
|
324 | 324 | id_, params = _build_data(self.apikey, 'invalidate_cache', |
|
325 | 325 | repoid=self.REPO) |
|
326 | 326 | response = api_call(self, params) |
|
327 | 327 | |
|
328 | 328 | expected = { |
|
329 | 329 | 'msg': "Cache for repository `%s` was invalidated" % (self.REPO,), |
|
330 | 330 | 'repository': self.REPO |
|
331 | 331 | } |
|
332 | 332 | self._compare_ok(id_, expected, given=response.body) |
|
333 | 333 | |
|
334 | 334 | @mock.patch.object(ScmModel, 'mark_for_invalidation', crash) |
|
335 | 335 | def test_api_invalidate_cache_error(self): |
|
336 | 336 | id_, params = _build_data(self.apikey, 'invalidate_cache', |
|
337 | 337 | repoid=self.REPO) |
|
338 | 338 | response = api_call(self, params) |
|
339 | 339 | |
|
340 | 340 | expected = 'Error occurred during cache invalidation action' |
|
341 | 341 | self._compare_error(id_, expected, given=response.body) |
|
342 | 342 | |
|
343 | 343 | def test_api_invalidate_cache_regular_user_no_permission(self): |
|
344 | 344 | repo = RepoModel().get_by_repo_name(self.REPO) |
|
345 | 345 | repo.scm_instance_cached() # seed cache |
|
346 | 346 | |
|
347 | 347 | id_, params = _build_data(self.apikey_regular, 'invalidate_cache', |
|
348 | 348 | repoid=self.REPO) |
|
349 | 349 | response = api_call(self, params) |
|
350 | 350 | |
|
351 | 351 | expected = "repository `%s` does not exist" % (self.REPO,) |
|
352 | 352 | self._compare_error(id_, expected, given=response.body) |
|
353 | 353 | |
|
354 | 354 | def test_api_lock_repo_lock_aquire(self): |
|
355 | 355 | id_, params = _build_data(self.apikey, 'lock', |
|
356 | 356 | userid=TEST_USER_ADMIN_LOGIN, |
|
357 | 357 | repoid=self.REPO, |
|
358 | 358 | locked=True) |
|
359 | 359 | response = api_call(self, params) |
|
360 | 360 | expected = { |
|
361 | 361 | 'repo': self.REPO, 'locked': True, |
|
362 | 362 | 'locked_since': response.json['result']['locked_since'], |
|
363 | 363 | 'locked_by': TEST_USER_ADMIN_LOGIN, |
|
364 | 364 | 'lock_state_changed': True, |
|
365 | 365 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
366 | 366 | % (TEST_USER_ADMIN_LOGIN, self.REPO, True)) |
|
367 | 367 | } |
|
368 | 368 | self._compare_ok(id_, expected, given=response.body) |
|
369 | 369 | |
|
370 | 370 | def test_api_lock_repo_lock_aquire_by_non_admin(self): |
|
371 | 371 | repo_name = 'api_delete_me' |
|
372 | 372 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
373 | 373 | cur_user=self.TEST_USER_LOGIN) |
|
374 | 374 | try: |
|
375 | 375 | id_, params = _build_data(self.apikey_regular, 'lock', |
|
376 | 376 | repoid=repo_name, |
|
377 | 377 | locked=True) |
|
378 | 378 | response = api_call(self, params) |
|
379 | 379 | expected = { |
|
380 | 380 | 'repo': repo_name, |
|
381 | 381 | 'locked': True, |
|
382 | 382 | 'locked_since': response.json['result']['locked_since'], |
|
383 | 383 | 'locked_by': self.TEST_USER_LOGIN, |
|
384 | 384 | 'lock_state_changed': True, |
|
385 | 385 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
386 | 386 | % (self.TEST_USER_LOGIN, repo_name, True)) |
|
387 | 387 | } |
|
388 | 388 | self._compare_ok(id_, expected, given=response.body) |
|
389 | 389 | finally: |
|
390 | 390 | fixture.destroy_repo(repo_name) |
|
391 | 391 | |
|
392 | 392 | def test_api_lock_repo_lock_aquire_non_admin_with_userid(self): |
|
393 | 393 | repo_name = 'api_delete_me' |
|
394 | 394 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
395 | 395 | cur_user=self.TEST_USER_LOGIN) |
|
396 | 396 | try: |
|
397 | 397 | id_, params = _build_data(self.apikey_regular, 'lock', |
|
398 | 398 | userid=TEST_USER_ADMIN_LOGIN, |
|
399 | 399 | repoid=repo_name, |
|
400 | 400 | locked=True) |
|
401 | 401 | response = api_call(self, params) |
|
402 | 402 | expected = 'userid is not the same as your user' |
|
403 | 403 | self._compare_error(id_, expected, given=response.body) |
|
404 | 404 | finally: |
|
405 | 405 | fixture.destroy_repo(repo_name) |
|
406 | 406 | |
|
407 | 407 | def test_api_lock_repo_lock_aquire_non_admin_not_his_repo(self): |
|
408 | 408 | id_, params = _build_data(self.apikey_regular, 'lock', |
|
409 | 409 | repoid=self.REPO, |
|
410 | 410 | locked=True) |
|
411 | 411 | response = api_call(self, params) |
|
412 | 412 | expected = 'repository `%s` does not exist' % (self.REPO) |
|
413 | 413 | self._compare_error(id_, expected, given=response.body) |
|
414 | 414 | |
|
415 | 415 | def test_api_lock_repo_lock_release(self): |
|
416 | 416 | id_, params = _build_data(self.apikey, 'lock', |
|
417 | 417 | userid=TEST_USER_ADMIN_LOGIN, |
|
418 | 418 | repoid=self.REPO, |
|
419 | 419 | locked=False) |
|
420 | 420 | response = api_call(self, params) |
|
421 | 421 | expected = { |
|
422 | 422 | 'repo': self.REPO, |
|
423 | 423 | 'locked': False, |
|
424 | 424 | 'locked_since': None, |
|
425 | 425 | 'locked_by': TEST_USER_ADMIN_LOGIN, |
|
426 | 426 | 'lock_state_changed': True, |
|
427 | 427 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
428 | 428 | % (TEST_USER_ADMIN_LOGIN, self.REPO, False)) |
|
429 | 429 | } |
|
430 | 430 | self._compare_ok(id_, expected, given=response.body) |
|
431 | 431 | |
|
432 | 432 | def test_api_lock_repo_lock_aquire_optional_userid(self): |
|
433 | 433 | id_, params = _build_data(self.apikey, 'lock', |
|
434 | 434 | repoid=self.REPO, |
|
435 | 435 | locked=True) |
|
436 | 436 | response = api_call(self, params) |
|
437 | 437 | time_ = response.json['result']['locked_since'] |
|
438 | 438 | expected = { |
|
439 | 439 | 'repo': self.REPO, |
|
440 | 440 | 'locked': True, |
|
441 | 441 | 'locked_since': time_, |
|
442 | 442 | 'locked_by': TEST_USER_ADMIN_LOGIN, |
|
443 | 443 | 'lock_state_changed': True, |
|
444 | 444 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
445 | 445 | % (TEST_USER_ADMIN_LOGIN, self.REPO, True)) |
|
446 | 446 | } |
|
447 | 447 | |
|
448 | 448 | self._compare_ok(id_, expected, given=response.body) |
|
449 | 449 | |
|
450 | 450 | def test_api_lock_repo_lock_optional_locked(self): |
|
451 | 451 | id_, params = _build_data(self.apikey, 'lock', |
|
452 | 452 | repoid=self.REPO) |
|
453 | 453 | response = api_call(self, params) |
|
454 | 454 | time_ = response.json['result']['locked_since'] |
|
455 | 455 | expected = { |
|
456 | 456 | 'repo': self.REPO, |
|
457 | 457 | 'locked': True, |
|
458 | 458 | 'locked_since': time_, |
|
459 | 459 | 'locked_by': TEST_USER_ADMIN_LOGIN, |
|
460 | 460 | 'lock_state_changed': False, |
|
461 | 461 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
462 | 462 | % (self.REPO, TEST_USER_ADMIN_LOGIN, |
|
463 | 463 | json.dumps(time_to_datetime(time_)))) |
|
464 | 464 | } |
|
465 | 465 | self._compare_ok(id_, expected, given=response.body) |
|
466 | 466 | |
|
467 | 467 | def test_api_lock_repo_lock_optional_not_locked(self): |
|
468 | 468 | repo_name = 'api_not_locked' |
|
469 | 469 | repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
470 | 470 | cur_user=self.TEST_USER_LOGIN) |
|
471 | 471 | self.assertEqual(repo.locked, [None, None]) |
|
472 | 472 | try: |
|
473 | 473 | id_, params = _build_data(self.apikey, 'lock', |
|
474 | 474 | repoid=repo.repo_id) |
|
475 | 475 | response = api_call(self, params) |
|
476 | 476 | expected = { |
|
477 | 477 | 'repo': repo_name, |
|
478 | 478 | 'locked': False, |
|
479 | 479 | 'locked_since': None, |
|
480 | 480 | 'locked_by': None, |
|
481 | 481 | 'lock_state_changed': False, |
|
482 | 482 | 'msg': ('Repo `%s` not locked.' % (repo_name,)) |
|
483 | 483 | } |
|
484 | 484 | self._compare_ok(id_, expected, given=response.body) |
|
485 | 485 | finally: |
|
486 | 486 | fixture.destroy_repo(repo_name) |
|
487 | 487 | |
|
488 | 488 | @mock.patch.object(Repository, 'lock', crash) |
|
489 | 489 | def test_api_lock_error(self): |
|
490 | 490 | id_, params = _build_data(self.apikey, 'lock', |
|
491 | 491 | userid=TEST_USER_ADMIN_LOGIN, |
|
492 | 492 | repoid=self.REPO, |
|
493 | 493 | locked=True) |
|
494 | 494 | response = api_call(self, params) |
|
495 | 495 | |
|
496 | 496 | expected = 'Error occurred locking repository `%s`' % self.REPO |
|
497 | 497 | self._compare_error(id_, expected, given=response.body) |
|
498 | 498 | |
|
499 | 499 | def test_api_get_locks_regular_user(self): |
|
500 | 500 | id_, params = _build_data(self.apikey_regular, 'get_locks') |
|
501 | 501 | response = api_call(self, params) |
|
502 | 502 | expected = [] |
|
503 | 503 | self._compare_ok(id_, expected, given=response.body) |
|
504 | 504 | |
|
505 | 505 | def test_api_get_locks_with_userid_regular_user(self): |
|
506 | 506 | id_, params = _build_data(self.apikey_regular, 'get_locks', |
|
507 | 507 | userid=TEST_USER_ADMIN_LOGIN) |
|
508 | 508 | response = api_call(self, params) |
|
509 | 509 | expected = 'userid is not the same as your user' |
|
510 | 510 | self._compare_error(id_, expected, given=response.body) |
|
511 | 511 | |
|
512 | 512 | def test_api_get_locks(self): |
|
513 | 513 | id_, params = _build_data(self.apikey, 'get_locks') |
|
514 | 514 | response = api_call(self, params) |
|
515 | 515 | expected = [] |
|
516 | 516 | self._compare_ok(id_, expected, given=response.body) |
|
517 | 517 | |
|
518 | 518 | def test_api_get_locks_with_one_locked_repo(self): |
|
519 | 519 | repo_name = 'api_delete_me' |
|
520 | 520 | repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
521 | 521 | cur_user=self.TEST_USER_LOGIN) |
|
522 | 522 | Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id) |
|
523 | 523 | try: |
|
524 | 524 | id_, params = _build_data(self.apikey, 'get_locks') |
|
525 | 525 | response = api_call(self, params) |
|
526 | 526 | expected = [repo.get_api_data()] |
|
527 | 527 | self._compare_ok(id_, expected, given=response.body) |
|
528 | 528 | finally: |
|
529 | 529 | fixture.destroy_repo(repo_name) |
|
530 | 530 | |
|
531 | 531 | def test_api_get_locks_with_one_locked_repo_for_specific_user(self): |
|
532 | 532 | repo_name = 'api_delete_me' |
|
533 | 533 | repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
534 | 534 | cur_user=self.TEST_USER_LOGIN) |
|
535 | 535 | Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id) |
|
536 | 536 | try: |
|
537 | 537 | id_, params = _build_data(self.apikey, 'get_locks', |
|
538 | 538 | userid=self.TEST_USER_LOGIN) |
|
539 | 539 | response = api_call(self, params) |
|
540 | 540 | expected = [repo.get_api_data()] |
|
541 | 541 | self._compare_ok(id_, expected, given=response.body) |
|
542 | 542 | finally: |
|
543 | 543 | fixture.destroy_repo(repo_name) |
|
544 | 544 | |
|
545 | 545 | def test_api_get_locks_with_userid(self): |
|
546 | 546 | id_, params = _build_data(self.apikey, 'get_locks', |
|
547 | 547 | userid=TEST_USER_REGULAR_LOGIN) |
|
548 | 548 | response = api_call(self, params) |
|
549 | 549 | expected = [] |
|
550 | 550 | self._compare_ok(id_, expected, given=response.body) |
|
551 | 551 | |
|
552 | 552 | def test_api_create_existing_user(self): |
|
553 | 553 | id_, params = _build_data(self.apikey, 'create_user', |
|
554 | 554 | username=TEST_USER_ADMIN_LOGIN, |
|
555 |
email='test@ |
|
|
555 | email='test@example.com', | |
|
556 | 556 | password='trololo') |
|
557 | 557 | response = api_call(self, params) |
|
558 | 558 | |
|
559 | 559 | expected = "user `%s` already exist" % TEST_USER_ADMIN_LOGIN |
|
560 | 560 | self._compare_error(id_, expected, given=response.body) |
|
561 | 561 | |
|
562 | 562 | def test_api_create_user_with_existing_email(self): |
|
563 | 563 | id_, params = _build_data(self.apikey, 'create_user', |
|
564 | 564 | username=TEST_USER_ADMIN_LOGIN + 'new', |
|
565 | 565 | email=TEST_USER_REGULAR_EMAIL, |
|
566 | 566 | password='trololo') |
|
567 | 567 | response = api_call(self, params) |
|
568 | 568 | |
|
569 | 569 | expected = "email `%s` already exist" % TEST_USER_REGULAR_EMAIL |
|
570 | 570 | self._compare_error(id_, expected, given=response.body) |
|
571 | 571 | |
|
572 | 572 | def test_api_create_user(self): |
|
573 | 573 | username = 'test_new_api_user' |
|
574 |
email = username + "@ |
|
|
574 | email = username + "@example.com" | |
|
575 | 575 | |
|
576 | 576 | id_, params = _build_data(self.apikey, 'create_user', |
|
577 | 577 | username=username, |
|
578 | 578 | email=email, |
|
579 | 579 | password='trololo') |
|
580 | 580 | response = api_call(self, params) |
|
581 | 581 | |
|
582 | 582 | usr = User.get_by_username(username) |
|
583 | 583 | ret = dict( |
|
584 | 584 | msg='created new user `%s`' % username, |
|
585 | 585 | user=jsonify(usr.get_api_data()) |
|
586 | 586 | ) |
|
587 | 587 | |
|
588 | 588 | try: |
|
589 | 589 | expected = ret |
|
590 | 590 | self._compare_ok(id_, expected, given=response.body) |
|
591 | 591 | finally: |
|
592 | 592 | fixture.destroy_user(usr.user_id) |
|
593 | 593 | |
|
594 | 594 | def test_api_create_user_without_password(self): |
|
595 | 595 | username = 'test_new_api_user_passwordless' |
|
596 |
email = username + "@ |
|
|
596 | email = username + "@example.com" | |
|
597 | 597 | |
|
598 | 598 | id_, params = _build_data(self.apikey, 'create_user', |
|
599 | 599 | username=username, |
|
600 | 600 | email=email) |
|
601 | 601 | response = api_call(self, params) |
|
602 | 602 | |
|
603 | 603 | usr = User.get_by_username(username) |
|
604 | 604 | ret = dict( |
|
605 | 605 | msg='created new user `%s`' % username, |
|
606 | 606 | user=jsonify(usr.get_api_data()) |
|
607 | 607 | ) |
|
608 | 608 | try: |
|
609 | 609 | expected = ret |
|
610 | 610 | self._compare_ok(id_, expected, given=response.body) |
|
611 | 611 | finally: |
|
612 | 612 | fixture.destroy_user(usr.user_id) |
|
613 | 613 | |
|
614 | 614 | def test_api_create_user_with_extern_name(self): |
|
615 | 615 | username = 'test_new_api_user_passwordless' |
|
616 |
email = username + "@ |
|
|
616 | email = username + "@example.com" | |
|
617 | 617 | |
|
618 | 618 | id_, params = _build_data(self.apikey, 'create_user', |
|
619 | 619 | username=username, |
|
620 | 620 | email=email, extern_name='internal') |
|
621 | 621 | response = api_call(self, params) |
|
622 | 622 | |
|
623 | 623 | usr = User.get_by_username(username) |
|
624 | 624 | ret = dict( |
|
625 | 625 | msg='created new user `%s`' % username, |
|
626 | 626 | user=jsonify(usr.get_api_data()) |
|
627 | 627 | ) |
|
628 | 628 | try: |
|
629 | 629 | expected = ret |
|
630 | 630 | self._compare_ok(id_, expected, given=response.body) |
|
631 | 631 | finally: |
|
632 | 632 | fixture.destroy_user(usr.user_id) |
|
633 | 633 | |
|
634 | 634 | @mock.patch.object(UserModel, 'create_or_update', crash) |
|
635 | 635 | def test_api_create_user_when_exception_happened(self): |
|
636 | 636 | |
|
637 | 637 | username = 'test_new_api_user' |
|
638 |
email = username + "@ |
|
|
638 | email = username + "@example.com" | |
|
639 | 639 | |
|
640 | 640 | id_, params = _build_data(self.apikey, 'create_user', |
|
641 | 641 | username=username, |
|
642 | 642 | email=email, |
|
643 | 643 | password='trololo') |
|
644 | 644 | response = api_call(self, params) |
|
645 | 645 | expected = 'failed to create user `%s`' % username |
|
646 | 646 | self._compare_error(id_, expected, given=response.body) |
|
647 | 647 | |
|
648 | 648 | def test_api_delete_user(self): |
|
649 | 649 | usr = UserModel().create_or_update(username=u'test_user', |
|
650 | 650 | password=u'qweqwe', |
|
651 | 651 | email=u'u232@example.com', |
|
652 | 652 | firstname=u'u1', lastname=u'u1') |
|
653 | 653 | Session().commit() |
|
654 | 654 | username = usr.username |
|
655 | 655 | email = usr.email |
|
656 | 656 | usr_id = usr.user_id |
|
657 | 657 | ## DELETE THIS USER NOW |
|
658 | 658 | |
|
659 | 659 | id_, params = _build_data(self.apikey, 'delete_user', |
|
660 | 660 | userid=username, ) |
|
661 | 661 | response = api_call(self, params) |
|
662 | 662 | |
|
663 | 663 | ret = {'msg': 'deleted user ID:%s %s' % (usr_id, username), |
|
664 | 664 | 'user': None} |
|
665 | 665 | expected = ret |
|
666 | 666 | self._compare_ok(id_, expected, given=response.body) |
|
667 | 667 | |
|
668 | 668 | @mock.patch.object(UserModel, 'delete', crash) |
|
669 | 669 | def test_api_delete_user_when_exception_happened(self): |
|
670 | 670 | usr = UserModel().create_or_update(username=u'test_user', |
|
671 | 671 | password=u'qweqwe', |
|
672 | 672 | email=u'u232@example.com', |
|
673 | 673 | firstname=u'u1', lastname=u'u1') |
|
674 | 674 | Session().commit() |
|
675 | 675 | username = usr.username |
|
676 | 676 | |
|
677 | 677 | id_, params = _build_data(self.apikey, 'delete_user', |
|
678 | 678 | userid=username, ) |
|
679 | 679 | response = api_call(self, params) |
|
680 | 680 | ret = 'failed to delete user ID:%s %s' % (usr.user_id, |
|
681 | 681 | usr.username) |
|
682 | 682 | expected = ret |
|
683 | 683 | self._compare_error(id_, expected, given=response.body) |
|
684 | 684 | |
|
685 | 685 | @parameterized.expand([('firstname', 'new_username'), |
|
686 | 686 | ('lastname', 'new_username'), |
|
687 | 687 | ('email', 'new_username'), |
|
688 | 688 | ('admin', True), |
|
689 | 689 | ('admin', False), |
|
690 | 690 | ('extern_type', 'ldap'), |
|
691 | 691 | ('extern_type', None), |
|
692 | 692 | ('extern_name', 'test'), |
|
693 | 693 | ('extern_name', None), |
|
694 | 694 | ('active', False), |
|
695 | 695 | ('active', True), |
|
696 | 696 | ('password', 'newpass') |
|
697 | 697 | ]) |
|
698 | 698 | def test_api_update_user(self, name, expected): |
|
699 | 699 | usr = User.get_by_username(self.TEST_USER_LOGIN) |
|
700 | 700 | kw = {name: expected, |
|
701 | 701 | 'userid': usr.user_id} |
|
702 | 702 | id_, params = _build_data(self.apikey, 'update_user', **kw) |
|
703 | 703 | response = api_call(self, params) |
|
704 | 704 | |
|
705 | 705 | ret = { |
|
706 | 706 | 'msg': 'updated user ID:%s %s' % ( |
|
707 | 707 | usr.user_id, self.TEST_USER_LOGIN), |
|
708 | 708 | 'user': jsonify(User \ |
|
709 | 709 | .get_by_username(self.TEST_USER_LOGIN) \ |
|
710 | 710 | .get_api_data()) |
|
711 | 711 | } |
|
712 | 712 | |
|
713 | 713 | expected = ret |
|
714 | 714 | self._compare_ok(id_, expected, given=response.body) |
|
715 | 715 | |
|
716 | 716 | def test_api_update_user_no_changed_params(self): |
|
717 | 717 | usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
718 | 718 | ret = jsonify(usr.get_api_data()) |
|
719 | 719 | id_, params = _build_data(self.apikey, 'update_user', |
|
720 | 720 | userid=TEST_USER_ADMIN_LOGIN) |
|
721 | 721 | |
|
722 | 722 | response = api_call(self, params) |
|
723 | 723 | ret = { |
|
724 | 724 | 'msg': 'updated user ID:%s %s' % ( |
|
725 | 725 | usr.user_id, TEST_USER_ADMIN_LOGIN), |
|
726 | 726 | 'user': ret |
|
727 | 727 | } |
|
728 | 728 | expected = ret |
|
729 | 729 | self._compare_ok(id_, expected, given=response.body) |
|
730 | 730 | |
|
731 | 731 | def test_api_update_user_by_user_id(self): |
|
732 | 732 | usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
733 | 733 | ret = jsonify(usr.get_api_data()) |
|
734 | 734 | id_, params = _build_data(self.apikey, 'update_user', |
|
735 | 735 | userid=usr.user_id) |
|
736 | 736 | |
|
737 | 737 | response = api_call(self, params) |
|
738 | 738 | ret = { |
|
739 | 739 | 'msg': 'updated user ID:%s %s' % ( |
|
740 | 740 | usr.user_id, TEST_USER_ADMIN_LOGIN), |
|
741 | 741 | 'user': ret |
|
742 | 742 | } |
|
743 | 743 | expected = ret |
|
744 | 744 | self._compare_ok(id_, expected, given=response.body) |
|
745 | 745 | |
|
746 | 746 | def test_api_update_user_default_user(self): |
|
747 | 747 | usr = User.get_default_user() |
|
748 | 748 | id_, params = _build_data(self.apikey, 'update_user', |
|
749 | 749 | userid=usr.user_id) |
|
750 | 750 | |
|
751 | 751 | response = api_call(self, params) |
|
752 | 752 | expected = 'editing default user is forbidden' |
|
753 | 753 | self._compare_error(id_, expected, given=response.body) |
|
754 | 754 | |
|
755 | 755 | @mock.patch.object(UserModel, 'update_user', crash) |
|
756 | 756 | def test_api_update_user_when_exception_happens(self): |
|
757 | 757 | usr = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
758 | 758 | ret = jsonify(usr.get_api_data()) |
|
759 | 759 | id_, params = _build_data(self.apikey, 'update_user', |
|
760 | 760 | userid=usr.user_id) |
|
761 | 761 | |
|
762 | 762 | response = api_call(self, params) |
|
763 | 763 | ret = 'failed to update user `%s`' % usr.user_id |
|
764 | 764 | |
|
765 | 765 | expected = ret |
|
766 | 766 | self._compare_error(id_, expected, given=response.body) |
|
767 | 767 | |
|
768 | 768 | def test_api_get_repo(self): |
|
769 | 769 | new_group = 'some_new_group' |
|
770 | 770 | make_user_group(new_group) |
|
771 | 771 | RepoModel().grant_user_group_permission(repo=self.REPO, |
|
772 | 772 | group_name=new_group, |
|
773 | 773 | perm='repository.read') |
|
774 | 774 | Session().commit() |
|
775 | 775 | id_, params = _build_data(self.apikey, 'get_repo', |
|
776 | 776 | repoid=self.REPO) |
|
777 | 777 | response = api_call(self, params) |
|
778 | 778 | |
|
779 | 779 | repo = RepoModel().get_by_repo_name(self.REPO) |
|
780 | 780 | ret = repo.get_api_data() |
|
781 | 781 | |
|
782 | 782 | members = [] |
|
783 | 783 | followers = [] |
|
784 | 784 | for user in repo.repo_to_perm: |
|
785 | 785 | perm = user.permission.permission_name |
|
786 | 786 | user = user.user |
|
787 | 787 | user_data = {'name': user.username, 'type': "user", |
|
788 | 788 | 'permission': perm} |
|
789 | 789 | members.append(user_data) |
|
790 | 790 | |
|
791 | 791 | for user_group in repo.users_group_to_perm: |
|
792 | 792 | perm = user_group.permission.permission_name |
|
793 | 793 | user_group = user_group.users_group |
|
794 | 794 | user_group_data = {'name': user_group.users_group_name, |
|
795 | 795 | 'type': "user_group", 'permission': perm} |
|
796 | 796 | members.append(user_group_data) |
|
797 | 797 | |
|
798 | 798 | for user in repo.followers: |
|
799 | 799 | followers.append(user.user.get_api_data()) |
|
800 | 800 | |
|
801 | 801 | ret['members'] = members |
|
802 | 802 | ret['followers'] = followers |
|
803 | 803 | |
|
804 | 804 | expected = ret |
|
805 | 805 | self._compare_ok(id_, expected, given=response.body) |
|
806 | 806 | fixture.destroy_user_group(new_group) |
|
807 | 807 | |
|
808 | 808 | @parameterized.expand([ |
|
809 | 809 | ('repository.admin',), |
|
810 | 810 | ('repository.write',), |
|
811 | 811 | ('repository.read',), |
|
812 | 812 | ]) |
|
813 | 813 | def test_api_get_repo_by_non_admin(self, grant_perm): |
|
814 | 814 | RepoModel().grant_user_permission(repo=self.REPO, |
|
815 | 815 | user=self.TEST_USER_LOGIN, |
|
816 | 816 | perm=grant_perm) |
|
817 | 817 | Session().commit() |
|
818 | 818 | id_, params = _build_data(self.apikey_regular, 'get_repo', |
|
819 | 819 | repoid=self.REPO) |
|
820 | 820 | response = api_call(self, params) |
|
821 | 821 | |
|
822 | 822 | repo = RepoModel().get_by_repo_name(self.REPO) |
|
823 | 823 | ret = repo.get_api_data() |
|
824 | 824 | |
|
825 | 825 | members = [] |
|
826 | 826 | followers = [] |
|
827 | 827 | self.assertEqual(2, len(repo.repo_to_perm)) |
|
828 | 828 | for user in repo.repo_to_perm: |
|
829 | 829 | perm = user.permission.permission_name |
|
830 | 830 | user_obj = user.user |
|
831 | 831 | user_data = {'name': user_obj.username, 'type': "user", |
|
832 | 832 | 'permission': perm} |
|
833 | 833 | members.append(user_data) |
|
834 | 834 | |
|
835 | 835 | for user_group in repo.users_group_to_perm: |
|
836 | 836 | perm = user_group.permission.permission_name |
|
837 | 837 | user_group_obj = user_group.users_group |
|
838 | 838 | user_group_data = {'name': user_group_obj.users_group_name, |
|
839 | 839 | 'type': "user_group", 'permission': perm} |
|
840 | 840 | members.append(user_group_data) |
|
841 | 841 | |
|
842 | 842 | for user in repo.followers: |
|
843 | 843 | followers.append(user.user.get_api_data()) |
|
844 | 844 | |
|
845 | 845 | ret['members'] = members |
|
846 | 846 | ret['followers'] = followers |
|
847 | 847 | |
|
848 | 848 | expected = ret |
|
849 | 849 | try: |
|
850 | 850 | self._compare_ok(id_, expected, given=response.body) |
|
851 | 851 | finally: |
|
852 | 852 | RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN) |
|
853 | 853 | |
|
854 | 854 | def test_api_get_repo_by_non_admin_no_permission_to_repo(self): |
|
855 | 855 | RepoModel().grant_user_permission(repo=self.REPO, |
|
856 | 856 | user=self.TEST_USER_LOGIN, |
|
857 | 857 | perm='repository.none') |
|
858 | 858 | |
|
859 | 859 | id_, params = _build_data(self.apikey_regular, 'get_repo', |
|
860 | 860 | repoid=self.REPO) |
|
861 | 861 | response = api_call(self, params) |
|
862 | 862 | |
|
863 | 863 | expected = 'repository `%s` does not exist' % (self.REPO) |
|
864 | 864 | self._compare_error(id_, expected, given=response.body) |
|
865 | 865 | |
|
866 | 866 | def test_api_get_repo_that_doesn_not_exist(self): |
|
867 | 867 | id_, params = _build_data(self.apikey, 'get_repo', |
|
868 | 868 | repoid='no-such-repo') |
|
869 | 869 | response = api_call(self, params) |
|
870 | 870 | |
|
871 | 871 | ret = 'repository `%s` does not exist' % 'no-such-repo' |
|
872 | 872 | expected = ret |
|
873 | 873 | self._compare_error(id_, expected, given=response.body) |
|
874 | 874 | |
|
875 | 875 | def test_api_get_repos(self): |
|
876 | 876 | id_, params = _build_data(self.apikey, 'get_repos') |
|
877 | 877 | response = api_call(self, params) |
|
878 | 878 | |
|
879 | 879 | result = [] |
|
880 | 880 | for repo in RepoModel().get_all(): |
|
881 | 881 | result.append(repo.get_api_data()) |
|
882 | 882 | ret = jsonify(result) |
|
883 | 883 | |
|
884 | 884 | expected = ret |
|
885 | 885 | self._compare_ok(id_, expected, given=response.body) |
|
886 | 886 | |
|
887 | 887 | def test_api_get_repos_non_admin(self): |
|
888 | 888 | id_, params = _build_data(self.apikey_regular, 'get_repos') |
|
889 | 889 | response = api_call(self, params) |
|
890 | 890 | |
|
891 | 891 | result = [] |
|
892 | 892 | for repo in RepoModel().get_all_user_repos(self.TEST_USER_LOGIN): |
|
893 | 893 | result.append(repo.get_api_data()) |
|
894 | 894 | ret = jsonify(result) |
|
895 | 895 | |
|
896 | 896 | expected = ret |
|
897 | 897 | self._compare_ok(id_, expected, given=response.body) |
|
898 | 898 | |
|
899 | 899 | @parameterized.expand([('all', 'all'), |
|
900 | 900 | ('dirs', 'dirs'), |
|
901 | 901 | ('files', 'files'), ]) |
|
902 | 902 | def test_api_get_repo_nodes(self, name, ret_type): |
|
903 | 903 | rev = 'tip' |
|
904 | 904 | path = '/' |
|
905 | 905 | id_, params = _build_data(self.apikey, 'get_repo_nodes', |
|
906 | 906 | repoid=self.REPO, revision=rev, |
|
907 | 907 | root_path=path, |
|
908 | 908 | ret_type=ret_type) |
|
909 | 909 | response = api_call(self, params) |
|
910 | 910 | |
|
911 | 911 | # we don't the actual return types here since it's tested somewhere |
|
912 | 912 | # else |
|
913 | 913 | expected = response.json['result'] |
|
914 | 914 | self._compare_ok(id_, expected, given=response.body) |
|
915 | 915 | |
|
916 | 916 | def test_api_get_repo_nodes_bad_revisions(self): |
|
917 | 917 | rev = 'i-dont-exist' |
|
918 | 918 | path = '/' |
|
919 | 919 | id_, params = _build_data(self.apikey, 'get_repo_nodes', |
|
920 | 920 | repoid=self.REPO, revision=rev, |
|
921 | 921 | root_path=path, ) |
|
922 | 922 | response = api_call(self, params) |
|
923 | 923 | |
|
924 | 924 | expected = 'failed to get repo: `%s` nodes' % self.REPO |
|
925 | 925 | self._compare_error(id_, expected, given=response.body) |
|
926 | 926 | |
|
927 | 927 | def test_api_get_repo_nodes_bad_path(self): |
|
928 | 928 | rev = 'tip' |
|
929 | 929 | path = '/idontexits' |
|
930 | 930 | id_, params = _build_data(self.apikey, 'get_repo_nodes', |
|
931 | 931 | repoid=self.REPO, revision=rev, |
|
932 | 932 | root_path=path, ) |
|
933 | 933 | response = api_call(self, params) |
|
934 | 934 | |
|
935 | 935 | expected = 'failed to get repo: `%s` nodes' % self.REPO |
|
936 | 936 | self._compare_error(id_, expected, given=response.body) |
|
937 | 937 | |
|
938 | 938 | def test_api_get_repo_nodes_bad_ret_type(self): |
|
939 | 939 | rev = 'tip' |
|
940 | 940 | path = '/' |
|
941 | 941 | ret_type = 'error' |
|
942 | 942 | id_, params = _build_data(self.apikey, 'get_repo_nodes', |
|
943 | 943 | repoid=self.REPO, revision=rev, |
|
944 | 944 | root_path=path, |
|
945 | 945 | ret_type=ret_type) |
|
946 | 946 | response = api_call(self, params) |
|
947 | 947 | |
|
948 | 948 | expected = ('ret_type must be one of %s' |
|
949 | 949 | % (','.join(['files', 'dirs', 'all']))) |
|
950 | 950 | self._compare_error(id_, expected, given=response.body) |
|
951 | 951 | |
|
952 | 952 | @parameterized.expand([('all', 'all', 'repository.write'), |
|
953 | 953 | ('dirs', 'dirs', 'repository.admin'), |
|
954 | 954 | ('files', 'files', 'repository.read'), ]) |
|
955 | 955 | def test_api_get_repo_nodes_by_regular_user(self, name, ret_type, grant_perm): |
|
956 | 956 | RepoModel().grant_user_permission(repo=self.REPO, |
|
957 | 957 | user=self.TEST_USER_LOGIN, |
|
958 | 958 | perm=grant_perm) |
|
959 | 959 | Session().commit() |
|
960 | 960 | |
|
961 | 961 | rev = 'tip' |
|
962 | 962 | path = '/' |
|
963 | 963 | id_, params = _build_data(self.apikey_regular, 'get_repo_nodes', |
|
964 | 964 | repoid=self.REPO, revision=rev, |
|
965 | 965 | root_path=path, |
|
966 | 966 | ret_type=ret_type) |
|
967 | 967 | response = api_call(self, params) |
|
968 | 968 | |
|
969 | 969 | # we don't the actual return types here since it's tested somewhere |
|
970 | 970 | # else |
|
971 | 971 | expected = response.json['result'] |
|
972 | 972 | try: |
|
973 | 973 | self._compare_ok(id_, expected, given=response.body) |
|
974 | 974 | finally: |
|
975 | 975 | RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN) |
|
976 | 976 | |
|
977 | 977 | def test_api_create_repo(self): |
|
978 | 978 | repo_name = 'api-repo' |
|
979 | 979 | id_, params = _build_data(self.apikey, 'create_repo', |
|
980 | 980 | repo_name=repo_name, |
|
981 | 981 | owner=TEST_USER_ADMIN_LOGIN, |
|
982 | 982 | repo_type=self.REPO_TYPE, |
|
983 | 983 | ) |
|
984 | 984 | response = api_call(self, params) |
|
985 | 985 | |
|
986 | 986 | repo = RepoModel().get_by_repo_name(repo_name) |
|
987 | 987 | self.assertNotEqual(repo, None) |
|
988 | 988 | ret = { |
|
989 | 989 | 'msg': 'Created new repository `%s`' % repo_name, |
|
990 | 990 | 'success': True, |
|
991 | 991 | 'task': None, |
|
992 | 992 | } |
|
993 | 993 | expected = ret |
|
994 | 994 | self._compare_ok(id_, expected, given=response.body) |
|
995 | 995 | fixture.destroy_repo(repo_name) |
|
996 | 996 | |
|
997 | 997 | def test_api_create_repo_and_repo_group(self): |
|
998 | 998 | repo_name = 'my_gr/api-repo' |
|
999 | 999 | id_, params = _build_data(self.apikey, 'create_repo', |
|
1000 | 1000 | repo_name=repo_name, |
|
1001 | 1001 | owner=TEST_USER_ADMIN_LOGIN, |
|
1002 | 1002 | repo_type=self.REPO_TYPE,) |
|
1003 | 1003 | response = api_call(self, params) |
|
1004 | 1004 | print params |
|
1005 | 1005 | repo = RepoModel().get_by_repo_name(repo_name) |
|
1006 | 1006 | self.assertNotEqual(repo, None) |
|
1007 | 1007 | ret = { |
|
1008 | 1008 | 'msg': 'Created new repository `%s`' % repo_name, |
|
1009 | 1009 | 'success': True, |
|
1010 | 1010 | 'task': None, |
|
1011 | 1011 | } |
|
1012 | 1012 | expected = ret |
|
1013 | 1013 | self._compare_ok(id_, expected, given=response.body) |
|
1014 | 1014 | fixture.destroy_repo(repo_name) |
|
1015 | 1015 | fixture.destroy_repo_group('my_gr') |
|
1016 | 1016 | |
|
1017 | 1017 | def test_api_create_repo_in_repo_group_without_permission(self): |
|
1018 | 1018 | repo_group_name = '%s/api-repo-repo' % TEST_REPO_GROUP |
|
1019 | 1019 | repo_name = '%s/api-repo' % repo_group_name |
|
1020 | 1020 | |
|
1021 | 1021 | rg = fixture.create_repo_group(repo_group_name) |
|
1022 | 1022 | Session().commit() |
|
1023 | 1023 | RepoGroupModel().grant_user_permission(repo_group_name, |
|
1024 | 1024 | self.TEST_USER_LOGIN, |
|
1025 | 1025 | 'group.none') |
|
1026 | 1026 | Session().commit() |
|
1027 | 1027 | |
|
1028 | 1028 | id_, params = _build_data(self.apikey_regular, 'create_repo', |
|
1029 | 1029 | repo_name=repo_name, |
|
1030 | 1030 | repo_type=self.REPO_TYPE, |
|
1031 | 1031 | ) |
|
1032 | 1032 | response = api_call(self, params) |
|
1033 | 1033 | |
|
1034 | 1034 | # Current result when API access control is different from Web: |
|
1035 | 1035 | ret = { |
|
1036 | 1036 | 'msg': 'Created new repository `%s`' % repo_name, |
|
1037 | 1037 | 'success': True, |
|
1038 | 1038 | 'task': None, |
|
1039 | 1039 | } |
|
1040 | 1040 | expected = ret |
|
1041 | 1041 | self._compare_ok(id_, expected, given=response.body) |
|
1042 | 1042 | fixture.destroy_repo(repo_name) |
|
1043 | 1043 | |
|
1044 | 1044 | # Expected and arguably more correct result: |
|
1045 | 1045 | #expected = 'failed to create repository `%s`' % repo_name |
|
1046 | 1046 | #self._compare_error(id_, expected, given=response.body) |
|
1047 | 1047 | |
|
1048 | 1048 | fixture.destroy_repo_group(repo_group_name) |
|
1049 | 1049 | |
|
1050 | 1050 | def test_api_create_repo_unknown_owner(self): |
|
1051 | 1051 | repo_name = 'api-repo' |
|
1052 | 1052 | owner = 'i-dont-exist' |
|
1053 | 1053 | id_, params = _build_data(self.apikey, 'create_repo', |
|
1054 | 1054 | repo_name=repo_name, |
|
1055 | 1055 | owner=owner, |
|
1056 | 1056 | repo_type=self.REPO_TYPE, |
|
1057 | 1057 | ) |
|
1058 | 1058 | response = api_call(self, params) |
|
1059 | 1059 | expected = 'user `%s` does not exist' % owner |
|
1060 | 1060 | self._compare_error(id_, expected, given=response.body) |
|
1061 | 1061 | |
|
1062 | 1062 | def test_api_create_repo_dont_specify_owner(self): |
|
1063 | 1063 | repo_name = 'api-repo' |
|
1064 | 1064 | owner = 'i-dont-exist' |
|
1065 | 1065 | id_, params = _build_data(self.apikey, 'create_repo', |
|
1066 | 1066 | repo_name=repo_name, |
|
1067 | 1067 | repo_type=self.REPO_TYPE, |
|
1068 | 1068 | ) |
|
1069 | 1069 | response = api_call(self, params) |
|
1070 | 1070 | |
|
1071 | 1071 | repo = RepoModel().get_by_repo_name(repo_name) |
|
1072 | 1072 | self.assertNotEqual(repo, None) |
|
1073 | 1073 | ret = { |
|
1074 | 1074 | 'msg': 'Created new repository `%s`' % repo_name, |
|
1075 | 1075 | 'success': True, |
|
1076 | 1076 | 'task': None, |
|
1077 | 1077 | } |
|
1078 | 1078 | expected = ret |
|
1079 | 1079 | self._compare_ok(id_, expected, given=response.body) |
|
1080 | 1080 | fixture.destroy_repo(repo_name) |
|
1081 | 1081 | |
|
1082 | 1082 | def test_api_create_repo_by_non_admin(self): |
|
1083 | 1083 | repo_name = 'api-repo' |
|
1084 | 1084 | owner = 'i-dont-exist' |
|
1085 | 1085 | id_, params = _build_data(self.apikey_regular, 'create_repo', |
|
1086 | 1086 | repo_name=repo_name, |
|
1087 | 1087 | repo_type=self.REPO_TYPE, |
|
1088 | 1088 | ) |
|
1089 | 1089 | response = api_call(self, params) |
|
1090 | 1090 | |
|
1091 | 1091 | repo = RepoModel().get_by_repo_name(repo_name) |
|
1092 | 1092 | self.assertNotEqual(repo, None) |
|
1093 | 1093 | ret = { |
|
1094 | 1094 | 'msg': 'Created new repository `%s`' % repo_name, |
|
1095 | 1095 | 'success': True, |
|
1096 | 1096 | 'task': None, |
|
1097 | 1097 | } |
|
1098 | 1098 | expected = ret |
|
1099 | 1099 | self._compare_ok(id_, expected, given=response.body) |
|
1100 | 1100 | fixture.destroy_repo(repo_name) |
|
1101 | 1101 | |
|
1102 | 1102 | def test_api_create_repo_by_non_admin_specify_owner(self): |
|
1103 | 1103 | repo_name = 'api-repo' |
|
1104 | 1104 | owner = 'i-dont-exist' |
|
1105 | 1105 | id_, params = _build_data(self.apikey_regular, 'create_repo', |
|
1106 | 1106 | repo_name=repo_name, |
|
1107 | 1107 | repo_type=self.REPO_TYPE, |
|
1108 | 1108 | owner=owner) |
|
1109 | 1109 | response = api_call(self, params) |
|
1110 | 1110 | |
|
1111 | 1111 | expected = 'Only Kallithea admin can specify `owner` param' |
|
1112 | 1112 | self._compare_error(id_, expected, given=response.body) |
|
1113 | 1113 | fixture.destroy_repo(repo_name) |
|
1114 | 1114 | |
|
1115 | 1115 | def test_api_create_repo_exists(self): |
|
1116 | 1116 | repo_name = self.REPO |
|
1117 | 1117 | id_, params = _build_data(self.apikey, 'create_repo', |
|
1118 | 1118 | repo_name=repo_name, |
|
1119 | 1119 | owner=TEST_USER_ADMIN_LOGIN, |
|
1120 | 1120 | repo_type=self.REPO_TYPE,) |
|
1121 | 1121 | response = api_call(self, params) |
|
1122 | 1122 | expected = "repo `%s` already exist" % repo_name |
|
1123 | 1123 | self._compare_error(id_, expected, given=response.body) |
|
1124 | 1124 | |
|
1125 | 1125 | @mock.patch.object(RepoModel, 'create', crash) |
|
1126 | 1126 | def test_api_create_repo_exception_occurred(self): |
|
1127 | 1127 | repo_name = 'api-repo' |
|
1128 | 1128 | id_, params = _build_data(self.apikey, 'create_repo', |
|
1129 | 1129 | repo_name=repo_name, |
|
1130 | 1130 | owner=TEST_USER_ADMIN_LOGIN, |
|
1131 | 1131 | repo_type=self.REPO_TYPE,) |
|
1132 | 1132 | response = api_call(self, params) |
|
1133 | 1133 | expected = 'failed to create repository `%s`' % repo_name |
|
1134 | 1134 | self._compare_error(id_, expected, given=response.body) |
|
1135 | 1135 | |
|
1136 | 1136 | @parameterized.expand([ |
|
1137 | 1137 | ('owner', {'owner': TEST_USER_REGULAR_LOGIN}), |
|
1138 | 1138 | ('description', {'description': 'new description'}), |
|
1139 | 1139 | ('active', {'active': True}), |
|
1140 | 1140 | ('active', {'active': False}), |
|
1141 |
('clone_uri', {'clone_uri': 'http:// |
|
|
1141 | ('clone_uri', {'clone_uri': 'http://example.com/repo'}), | |
|
1142 | 1142 | ('clone_uri', {'clone_uri': None}), |
|
1143 | 1143 | ('landing_rev', {'landing_rev': 'branch:master'}), |
|
1144 | 1144 | ('enable_statistics', {'enable_statistics': True}), |
|
1145 | 1145 | ('enable_locking', {'enable_locking': True}), |
|
1146 | 1146 | ('enable_downloads', {'enable_downloads': True}), |
|
1147 | 1147 | ('name', {'name': 'new_repo_name'}), |
|
1148 | 1148 | ('repo_group', {'group': 'test_group_for_update'}), |
|
1149 | 1149 | ]) |
|
1150 | 1150 | def test_api_update_repo(self, changing_attr, updates): |
|
1151 | 1151 | repo_name = 'api_update_me' |
|
1152 | 1152 | repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1153 | 1153 | if changing_attr == 'repo_group': |
|
1154 | 1154 | fixture.create_repo_group(updates['group']) |
|
1155 | 1155 | |
|
1156 | 1156 | id_, params = _build_data(self.apikey, 'update_repo', |
|
1157 | 1157 | repoid=repo_name, **updates) |
|
1158 | 1158 | response = api_call(self, params) |
|
1159 | 1159 | if changing_attr == 'name': |
|
1160 | 1160 | repo_name = updates['name'] |
|
1161 | 1161 | if changing_attr == 'repo_group': |
|
1162 | 1162 | repo_name = '/'.join([updates['group'], repo_name]) |
|
1163 | 1163 | try: |
|
1164 | 1164 | expected = { |
|
1165 | 1165 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name), |
|
1166 | 1166 | 'repository': repo.get_api_data() |
|
1167 | 1167 | } |
|
1168 | 1168 | self._compare_ok(id_, expected, given=response.body) |
|
1169 | 1169 | finally: |
|
1170 | 1170 | fixture.destroy_repo(repo_name) |
|
1171 | 1171 | if changing_attr == 'repo_group': |
|
1172 | 1172 | fixture.destroy_repo_group(updates['group']) |
|
1173 | 1173 | |
|
1174 | 1174 | def test_api_update_repo_repo_group_does_not_exist(self): |
|
1175 | 1175 | repo_name = 'admin_owned' |
|
1176 | 1176 | fixture.create_repo(repo_name) |
|
1177 | 1177 | updates = {'group': 'test_group_for_update'} |
|
1178 | 1178 | id_, params = _build_data(self.apikey, 'update_repo', |
|
1179 | 1179 | repoid=repo_name, **updates) |
|
1180 | 1180 | response = api_call(self, params) |
|
1181 | 1181 | try: |
|
1182 | 1182 | expected = 'repository group `%s` does not exist' % updates['group'] |
|
1183 | 1183 | self._compare_error(id_, expected, given=response.body) |
|
1184 | 1184 | finally: |
|
1185 | 1185 | fixture.destroy_repo(repo_name) |
|
1186 | 1186 | |
|
1187 | 1187 | def test_api_update_repo_regular_user_not_allowed(self): |
|
1188 | 1188 | repo_name = 'admin_owned' |
|
1189 | 1189 | fixture.create_repo(repo_name) |
|
1190 | 1190 | updates = {'active': False} |
|
1191 | 1191 | id_, params = _build_data(self.apikey_regular, 'update_repo', |
|
1192 | 1192 | repoid=repo_name, **updates) |
|
1193 | 1193 | response = api_call(self, params) |
|
1194 | 1194 | try: |
|
1195 | 1195 | expected = 'repository `%s` does not exist' % repo_name |
|
1196 | 1196 | self._compare_error(id_, expected, given=response.body) |
|
1197 | 1197 | finally: |
|
1198 | 1198 | fixture.destroy_repo(repo_name) |
|
1199 | 1199 | |
|
1200 | 1200 | @mock.patch.object(RepoModel, 'update', crash) |
|
1201 | 1201 | def test_api_update_repo_exception_occurred(self): |
|
1202 | 1202 | repo_name = 'api_update_me' |
|
1203 | 1203 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1204 | 1204 | id_, params = _build_data(self.apikey, 'update_repo', |
|
1205 | 1205 | repoid=repo_name, owner=TEST_USER_ADMIN_LOGIN,) |
|
1206 | 1206 | response = api_call(self, params) |
|
1207 | 1207 | try: |
|
1208 | 1208 | expected = 'failed to update repo `%s`' % repo_name |
|
1209 | 1209 | self._compare_error(id_, expected, given=response.body) |
|
1210 | 1210 | finally: |
|
1211 | 1211 | fixture.destroy_repo(repo_name) |
|
1212 | 1212 | |
|
1213 | 1213 | def test_api_update_repo_regular_user_change_repo_name(self): |
|
1214 | 1214 | repo_name = 'admin_owned' |
|
1215 | 1215 | new_repo_name = 'new_repo_name' |
|
1216 | 1216 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1217 | 1217 | RepoModel().grant_user_permission(repo=repo_name, |
|
1218 | 1218 | user=self.TEST_USER_LOGIN, |
|
1219 | 1219 | perm='repository.admin') |
|
1220 | 1220 | UserModel().revoke_perm('default', 'hg.create.repository') |
|
1221 | 1221 | UserModel().grant_perm('default', 'hg.create.none') |
|
1222 | 1222 | updates = {'name': new_repo_name} |
|
1223 | 1223 | id_, params = _build_data(self.apikey_regular, 'update_repo', |
|
1224 | 1224 | repoid=repo_name, **updates) |
|
1225 | 1225 | response = api_call(self, params) |
|
1226 | 1226 | try: |
|
1227 | 1227 | expected = 'no permission to create (or move) repositories' |
|
1228 | 1228 | self._compare_error(id_, expected, given=response.body) |
|
1229 | 1229 | finally: |
|
1230 | 1230 | fixture.destroy_repo(repo_name) |
|
1231 | 1231 | fixture.destroy_repo(new_repo_name) |
|
1232 | 1232 | |
|
1233 | 1233 | def test_api_update_repo_regular_user_change_repo_name_allowed(self): |
|
1234 | 1234 | repo_name = 'admin_owned' |
|
1235 | 1235 | new_repo_name = 'new_repo_name' |
|
1236 | 1236 | repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1237 | 1237 | RepoModel().grant_user_permission(repo=repo_name, |
|
1238 | 1238 | user=self.TEST_USER_LOGIN, |
|
1239 | 1239 | perm='repository.admin') |
|
1240 | 1240 | UserModel().revoke_perm('default', 'hg.create.none') |
|
1241 | 1241 | UserModel().grant_perm('default', 'hg.create.repository') |
|
1242 | 1242 | updates = {'name': new_repo_name} |
|
1243 | 1243 | id_, params = _build_data(self.apikey_regular, 'update_repo', |
|
1244 | 1244 | repoid=repo_name, **updates) |
|
1245 | 1245 | response = api_call(self, params) |
|
1246 | 1246 | try: |
|
1247 | 1247 | expected = { |
|
1248 | 1248 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, new_repo_name), |
|
1249 | 1249 | 'repository': repo.get_api_data() |
|
1250 | 1250 | } |
|
1251 | 1251 | self._compare_ok(id_, expected, given=response.body) |
|
1252 | 1252 | finally: |
|
1253 | 1253 | fixture.destroy_repo(repo_name) |
|
1254 | 1254 | fixture.destroy_repo(new_repo_name) |
|
1255 | 1255 | |
|
1256 | 1256 | def test_api_update_repo_regular_user_change_owner(self): |
|
1257 | 1257 | repo_name = 'admin_owned' |
|
1258 | 1258 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1259 | 1259 | RepoModel().grant_user_permission(repo=repo_name, |
|
1260 | 1260 | user=self.TEST_USER_LOGIN, |
|
1261 | 1261 | perm='repository.admin') |
|
1262 | 1262 | updates = {'owner': TEST_USER_ADMIN_LOGIN} |
|
1263 | 1263 | id_, params = _build_data(self.apikey_regular, 'update_repo', |
|
1264 | 1264 | repoid=repo_name, **updates) |
|
1265 | 1265 | response = api_call(self, params) |
|
1266 | 1266 | try: |
|
1267 | 1267 | expected = 'Only Kallithea admin can specify `owner` param' |
|
1268 | 1268 | self._compare_error(id_, expected, given=response.body) |
|
1269 | 1269 | finally: |
|
1270 | 1270 | fixture.destroy_repo(repo_name) |
|
1271 | 1271 | |
|
1272 | 1272 | def test_api_delete_repo(self): |
|
1273 | 1273 | repo_name = 'api_delete_me' |
|
1274 | 1274 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1275 | 1275 | |
|
1276 | 1276 | id_, params = _build_data(self.apikey, 'delete_repo', |
|
1277 | 1277 | repoid=repo_name, ) |
|
1278 | 1278 | response = api_call(self, params) |
|
1279 | 1279 | |
|
1280 | 1280 | ret = { |
|
1281 | 1281 | 'msg': 'Deleted repository `%s`' % repo_name, |
|
1282 | 1282 | 'success': True |
|
1283 | 1283 | } |
|
1284 | 1284 | try: |
|
1285 | 1285 | expected = ret |
|
1286 | 1286 | self._compare_ok(id_, expected, given=response.body) |
|
1287 | 1287 | finally: |
|
1288 | 1288 | fixture.destroy_repo(repo_name) |
|
1289 | 1289 | |
|
1290 | 1290 | def test_api_delete_repo_by_non_admin(self): |
|
1291 | 1291 | repo_name = 'api_delete_me' |
|
1292 | 1292 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE, |
|
1293 | 1293 | cur_user=self.TEST_USER_LOGIN) |
|
1294 | 1294 | id_, params = _build_data(self.apikey_regular, 'delete_repo', |
|
1295 | 1295 | repoid=repo_name, ) |
|
1296 | 1296 | response = api_call(self, params) |
|
1297 | 1297 | |
|
1298 | 1298 | ret = { |
|
1299 | 1299 | 'msg': 'Deleted repository `%s`' % repo_name, |
|
1300 | 1300 | 'success': True |
|
1301 | 1301 | } |
|
1302 | 1302 | try: |
|
1303 | 1303 | expected = ret |
|
1304 | 1304 | self._compare_ok(id_, expected, given=response.body) |
|
1305 | 1305 | finally: |
|
1306 | 1306 | fixture.destroy_repo(repo_name) |
|
1307 | 1307 | |
|
1308 | 1308 | def test_api_delete_repo_by_non_admin_no_permission(self): |
|
1309 | 1309 | repo_name = 'api_delete_me' |
|
1310 | 1310 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1311 | 1311 | try: |
|
1312 | 1312 | id_, params = _build_data(self.apikey_regular, 'delete_repo', |
|
1313 | 1313 | repoid=repo_name, ) |
|
1314 | 1314 | response = api_call(self, params) |
|
1315 | 1315 | expected = 'repository `%s` does not exist' % (repo_name) |
|
1316 | 1316 | self._compare_error(id_, expected, given=response.body) |
|
1317 | 1317 | finally: |
|
1318 | 1318 | fixture.destroy_repo(repo_name) |
|
1319 | 1319 | |
|
1320 | 1320 | def test_api_delete_repo_exception_occurred(self): |
|
1321 | 1321 | repo_name = 'api_delete_me' |
|
1322 | 1322 | fixture.create_repo(repo_name, repo_type=self.REPO_TYPE) |
|
1323 | 1323 | try: |
|
1324 | 1324 | with mock.patch.object(RepoModel, 'delete', crash): |
|
1325 | 1325 | id_, params = _build_data(self.apikey, 'delete_repo', |
|
1326 | 1326 | repoid=repo_name, ) |
|
1327 | 1327 | response = api_call(self, params) |
|
1328 | 1328 | |
|
1329 | 1329 | expected = 'failed to delete repository `%s`' % repo_name |
|
1330 | 1330 | self._compare_error(id_, expected, given=response.body) |
|
1331 | 1331 | finally: |
|
1332 | 1332 | fixture.destroy_repo(repo_name) |
|
1333 | 1333 | |
|
1334 | 1334 | def test_api_fork_repo(self): |
|
1335 | 1335 | fork_name = 'api-repo-fork' |
|
1336 | 1336 | id_, params = _build_data(self.apikey, 'fork_repo', |
|
1337 | 1337 | repoid=self.REPO, |
|
1338 | 1338 | fork_name=fork_name, |
|
1339 | 1339 | owner=TEST_USER_ADMIN_LOGIN, |
|
1340 | 1340 | ) |
|
1341 | 1341 | response = api_call(self, params) |
|
1342 | 1342 | |
|
1343 | 1343 | ret = { |
|
1344 | 1344 | 'msg': 'Created fork of `%s` as `%s`' % (self.REPO, |
|
1345 | 1345 | fork_name), |
|
1346 | 1346 | 'success': True, |
|
1347 | 1347 | 'task': None, |
|
1348 | 1348 | } |
|
1349 | 1349 | expected = ret |
|
1350 | 1350 | self._compare_ok(id_, expected, given=response.body) |
|
1351 | 1351 | fixture.destroy_repo(fork_name) |
|
1352 | 1352 | |
|
1353 | 1353 | def test_api_fork_repo_non_admin(self): |
|
1354 | 1354 | fork_name = 'api-repo-fork' |
|
1355 | 1355 | id_, params = _build_data(self.apikey_regular, 'fork_repo', |
|
1356 | 1356 | repoid=self.REPO, |
|
1357 | 1357 | fork_name=fork_name, |
|
1358 | 1358 | ) |
|
1359 | 1359 | response = api_call(self, params) |
|
1360 | 1360 | |
|
1361 | 1361 | ret = { |
|
1362 | 1362 | 'msg': 'Created fork of `%s` as `%s`' % (self.REPO, |
|
1363 | 1363 | fork_name), |
|
1364 | 1364 | 'success': True, |
|
1365 | 1365 | 'task': None, |
|
1366 | 1366 | } |
|
1367 | 1367 | expected = ret |
|
1368 | 1368 | self._compare_ok(id_, expected, given=response.body) |
|
1369 | 1369 | fixture.destroy_repo(fork_name) |
|
1370 | 1370 | |
|
1371 | 1371 | def test_api_fork_repo_non_admin_specify_owner(self): |
|
1372 | 1372 | fork_name = 'api-repo-fork' |
|
1373 | 1373 | id_, params = _build_data(self.apikey_regular, 'fork_repo', |
|
1374 | 1374 | repoid=self.REPO, |
|
1375 | 1375 | fork_name=fork_name, |
|
1376 | 1376 | owner=TEST_USER_ADMIN_LOGIN, |
|
1377 | 1377 | ) |
|
1378 | 1378 | response = api_call(self, params) |
|
1379 | 1379 | expected = 'Only Kallithea admin can specify `owner` param' |
|
1380 | 1380 | self._compare_error(id_, expected, given=response.body) |
|
1381 | 1381 | fixture.destroy_repo(fork_name) |
|
1382 | 1382 | |
|
1383 | 1383 | def test_api_fork_repo_non_admin_no_permission_to_fork(self): |
|
1384 | 1384 | RepoModel().grant_user_permission(repo=self.REPO, |
|
1385 | 1385 | user=self.TEST_USER_LOGIN, |
|
1386 | 1386 | perm='repository.none') |
|
1387 | 1387 | fork_name = 'api-repo-fork' |
|
1388 | 1388 | id_, params = _build_data(self.apikey_regular, 'fork_repo', |
|
1389 | 1389 | repoid=self.REPO, |
|
1390 | 1390 | fork_name=fork_name, |
|
1391 | 1391 | ) |
|
1392 | 1392 | response = api_call(self, params) |
|
1393 | 1393 | expected = 'repository `%s` does not exist' % (self.REPO) |
|
1394 | 1394 | self._compare_error(id_, expected, given=response.body) |
|
1395 | 1395 | fixture.destroy_repo(fork_name) |
|
1396 | 1396 | |
|
1397 | 1397 | @parameterized.expand([('read', 'repository.read'), |
|
1398 | 1398 | ('write', 'repository.write'), |
|
1399 | 1399 | ('admin', 'repository.admin')]) |
|
1400 | 1400 | def test_api_fork_repo_non_admin_no_create_repo_permission(self, name, perm): |
|
1401 | 1401 | fork_name = 'api-repo-fork' |
|
1402 | 1402 | # regardless of base repository permission, forking is disallowed |
|
1403 | 1403 | # when repository creation is disabled |
|
1404 | 1404 | RepoModel().grant_user_permission(repo=self.REPO, |
|
1405 | 1405 | user=self.TEST_USER_LOGIN, |
|
1406 | 1406 | perm=perm) |
|
1407 | 1407 | UserModel().revoke_perm('default', 'hg.create.repository') |
|
1408 | 1408 | UserModel().grant_perm('default', 'hg.create.none') |
|
1409 | 1409 | id_, params = _build_data(self.apikey_regular, 'fork_repo', |
|
1410 | 1410 | repoid=self.REPO, |
|
1411 | 1411 | fork_name=fork_name, |
|
1412 | 1412 | ) |
|
1413 | 1413 | response = api_call(self, params) |
|
1414 | 1414 | expected = 'no permission to create repositories' |
|
1415 | 1415 | self._compare_error(id_, expected, given=response.body) |
|
1416 | 1416 | fixture.destroy_repo(fork_name) |
|
1417 | 1417 | |
|
1418 | 1418 | def test_api_fork_repo_unknown_owner(self): |
|
1419 | 1419 | fork_name = 'api-repo-fork' |
|
1420 | 1420 | owner = 'i-dont-exist' |
|
1421 | 1421 | id_, params = _build_data(self.apikey, 'fork_repo', |
|
1422 | 1422 | repoid=self.REPO, |
|
1423 | 1423 | fork_name=fork_name, |
|
1424 | 1424 | owner=owner, |
|
1425 | 1425 | ) |
|
1426 | 1426 | response = api_call(self, params) |
|
1427 | 1427 | expected = 'user `%s` does not exist' % owner |
|
1428 | 1428 | self._compare_error(id_, expected, given=response.body) |
|
1429 | 1429 | |
|
1430 | 1430 | def test_api_fork_repo_fork_exists(self): |
|
1431 | 1431 | fork_name = 'api-repo-fork' |
|
1432 | 1432 | fixture.create_fork(self.REPO, fork_name) |
|
1433 | 1433 | |
|
1434 | 1434 | try: |
|
1435 | 1435 | fork_name = 'api-repo-fork' |
|
1436 | 1436 | |
|
1437 | 1437 | id_, params = _build_data(self.apikey, 'fork_repo', |
|
1438 | 1438 | repoid=self.REPO, |
|
1439 | 1439 | fork_name=fork_name, |
|
1440 | 1440 | owner=TEST_USER_ADMIN_LOGIN, |
|
1441 | 1441 | ) |
|
1442 | 1442 | response = api_call(self, params) |
|
1443 | 1443 | |
|
1444 | 1444 | expected = "fork `%s` already exist" % fork_name |
|
1445 | 1445 | self._compare_error(id_, expected, given=response.body) |
|
1446 | 1446 | finally: |
|
1447 | 1447 | fixture.destroy_repo(fork_name) |
|
1448 | 1448 | |
|
1449 | 1449 | def test_api_fork_repo_repo_exists(self): |
|
1450 | 1450 | fork_name = self.REPO |
|
1451 | 1451 | |
|
1452 | 1452 | id_, params = _build_data(self.apikey, 'fork_repo', |
|
1453 | 1453 | repoid=self.REPO, |
|
1454 | 1454 | fork_name=fork_name, |
|
1455 | 1455 | owner=TEST_USER_ADMIN_LOGIN, |
|
1456 | 1456 | ) |
|
1457 | 1457 | response = api_call(self, params) |
|
1458 | 1458 | |
|
1459 | 1459 | expected = "repo `%s` already exist" % fork_name |
|
1460 | 1460 | self._compare_error(id_, expected, given=response.body) |
|
1461 | 1461 | |
|
1462 | 1462 | @mock.patch.object(RepoModel, 'create_fork', crash) |
|
1463 | 1463 | def test_api_fork_repo_exception_occurred(self): |
|
1464 | 1464 | fork_name = 'api-repo-fork' |
|
1465 | 1465 | id_, params = _build_data(self.apikey, 'fork_repo', |
|
1466 | 1466 | repoid=self.REPO, |
|
1467 | 1467 | fork_name=fork_name, |
|
1468 | 1468 | owner=TEST_USER_ADMIN_LOGIN, |
|
1469 | 1469 | ) |
|
1470 | 1470 | response = api_call(self, params) |
|
1471 | 1471 | |
|
1472 | 1472 | expected = 'failed to fork repository `%s` as `%s`' % (self.REPO, |
|
1473 | 1473 | fork_name) |
|
1474 | 1474 | self._compare_error(id_, expected, given=response.body) |
|
1475 | 1475 | |
|
1476 | 1476 | def test_api_get_user_group(self): |
|
1477 | 1477 | id_, params = _build_data(self.apikey, 'get_user_group', |
|
1478 | 1478 | usergroupid=TEST_USER_GROUP) |
|
1479 | 1479 | response = api_call(self, params) |
|
1480 | 1480 | |
|
1481 | 1481 | user_group = UserGroupModel().get_group(TEST_USER_GROUP) |
|
1482 | 1482 | members = [] |
|
1483 | 1483 | for user in user_group.members: |
|
1484 | 1484 | user = user.user |
|
1485 | 1485 | members.append(user.get_api_data()) |
|
1486 | 1486 | |
|
1487 | 1487 | ret = user_group.get_api_data() |
|
1488 | 1488 | ret['members'] = members |
|
1489 | 1489 | expected = ret |
|
1490 | 1490 | self._compare_ok(id_, expected, given=response.body) |
|
1491 | 1491 | |
|
1492 | 1492 | def test_api_get_user_groups(self): |
|
1493 | 1493 | gr_name = 'test_user_group2' |
|
1494 | 1494 | make_user_group(gr_name) |
|
1495 | 1495 | |
|
1496 | 1496 | id_, params = _build_data(self.apikey, 'get_user_groups', ) |
|
1497 | 1497 | response = api_call(self, params) |
|
1498 | 1498 | |
|
1499 | 1499 | try: |
|
1500 | 1500 | expected = [] |
|
1501 | 1501 | for gr_name in [TEST_USER_GROUP, 'test_user_group2']: |
|
1502 | 1502 | user_group = UserGroupModel().get_group(gr_name) |
|
1503 | 1503 | ret = user_group.get_api_data() |
|
1504 | 1504 | expected.append(ret) |
|
1505 | 1505 | self._compare_ok(id_, expected, given=response.body) |
|
1506 | 1506 | finally: |
|
1507 | 1507 | fixture.destroy_user_group(gr_name) |
|
1508 | 1508 | |
|
1509 | 1509 | def test_api_create_user_group(self): |
|
1510 | 1510 | group_name = 'some_new_group' |
|
1511 | 1511 | id_, params = _build_data(self.apikey, 'create_user_group', |
|
1512 | 1512 | group_name=group_name) |
|
1513 | 1513 | response = api_call(self, params) |
|
1514 | 1514 | |
|
1515 | 1515 | ret = { |
|
1516 | 1516 | 'msg': 'created new user group `%s`' % group_name, |
|
1517 | 1517 | 'user_group': jsonify(UserGroupModel() \ |
|
1518 | 1518 | .get_by_name(group_name) \ |
|
1519 | 1519 | .get_api_data()) |
|
1520 | 1520 | } |
|
1521 | 1521 | expected = ret |
|
1522 | 1522 | self._compare_ok(id_, expected, given=response.body) |
|
1523 | 1523 | |
|
1524 | 1524 | fixture.destroy_user_group(group_name) |
|
1525 | 1525 | |
|
1526 | 1526 | def test_api_get_user_group_that_exist(self): |
|
1527 | 1527 | id_, params = _build_data(self.apikey, 'create_user_group', |
|
1528 | 1528 | group_name=TEST_USER_GROUP) |
|
1529 | 1529 | response = api_call(self, params) |
|
1530 | 1530 | |
|
1531 | 1531 | expected = "user group `%s` already exist" % TEST_USER_GROUP |
|
1532 | 1532 | self._compare_error(id_, expected, given=response.body) |
|
1533 | 1533 | |
|
1534 | 1534 | @mock.patch.object(UserGroupModel, 'create', crash) |
|
1535 | 1535 | def test_api_get_user_group_exception_occurred(self): |
|
1536 | 1536 | group_name = 'exception_happens' |
|
1537 | 1537 | id_, params = _build_data(self.apikey, 'create_user_group', |
|
1538 | 1538 | group_name=group_name) |
|
1539 | 1539 | response = api_call(self, params) |
|
1540 | 1540 | |
|
1541 | 1541 | expected = 'failed to create group `%s`' % group_name |
|
1542 | 1542 | self._compare_error(id_, expected, given=response.body) |
|
1543 | 1543 | |
|
1544 | 1544 | @parameterized.expand([('group_name', {'group_name': 'new_group_name'}), |
|
1545 | 1545 | ('group_name', {'group_name': 'test_group_for_update'}), |
|
1546 | 1546 | ('owner', {'owner': TEST_USER_REGULAR_LOGIN}), |
|
1547 | 1547 | ('active', {'active': False}), |
|
1548 | 1548 | ('active', {'active': True})]) |
|
1549 | 1549 | def test_api_update_user_group(self, changing_attr, updates): |
|
1550 | 1550 | gr_name = 'test_group_for_update' |
|
1551 | 1551 | user_group = fixture.create_user_group(gr_name) |
|
1552 | 1552 | id_, params = _build_data(self.apikey, 'update_user_group', |
|
1553 | 1553 | usergroupid=gr_name, **updates) |
|
1554 | 1554 | response = api_call(self, params) |
|
1555 | 1555 | try: |
|
1556 | 1556 | expected = { |
|
1557 | 1557 | 'msg': 'updated user group ID:%s %s' % (user_group.users_group_id, |
|
1558 | 1558 | user_group.users_group_name), |
|
1559 | 1559 | 'user_group': user_group.get_api_data() |
|
1560 | 1560 | } |
|
1561 | 1561 | self._compare_ok(id_, expected, given=response.body) |
|
1562 | 1562 | finally: |
|
1563 | 1563 | if changing_attr == 'group_name': |
|
1564 | 1564 | # switch to updated name for proper cleanup |
|
1565 | 1565 | gr_name = updates['group_name'] |
|
1566 | 1566 | fixture.destroy_user_group(gr_name) |
|
1567 | 1567 | |
|
1568 | 1568 | @mock.patch.object(UserGroupModel, 'update', crash) |
|
1569 | 1569 | def test_api_update_user_group_exception_occurred(self): |
|
1570 | 1570 | gr_name = 'test_group' |
|
1571 | 1571 | fixture.create_user_group(gr_name) |
|
1572 | 1572 | id_, params = _build_data(self.apikey, 'update_user_group', |
|
1573 | 1573 | usergroupid=gr_name) |
|
1574 | 1574 | response = api_call(self, params) |
|
1575 | 1575 | try: |
|
1576 | 1576 | expected = 'failed to update user group `%s`' % gr_name |
|
1577 | 1577 | self._compare_error(id_, expected, given=response.body) |
|
1578 | 1578 | finally: |
|
1579 | 1579 | fixture.destroy_user_group(gr_name) |
|
1580 | 1580 | |
|
1581 | 1581 | def test_api_add_user_to_user_group(self): |
|
1582 | 1582 | gr_name = 'test_group' |
|
1583 | 1583 | fixture.create_user_group(gr_name) |
|
1584 | 1584 | id_, params = _build_data(self.apikey, 'add_user_to_user_group', |
|
1585 | 1585 | usergroupid=gr_name, |
|
1586 | 1586 | userid=TEST_USER_ADMIN_LOGIN) |
|
1587 | 1587 | response = api_call(self, params) |
|
1588 | 1588 | try: |
|
1589 | 1589 | expected = { |
|
1590 | 1590 | 'msg': 'added member `%s` to user group `%s`' % ( |
|
1591 | 1591 | TEST_USER_ADMIN_LOGIN, gr_name), |
|
1592 | 1592 | 'success': True |
|
1593 | 1593 | } |
|
1594 | 1594 | self._compare_ok(id_, expected, given=response.body) |
|
1595 | 1595 | finally: |
|
1596 | 1596 | fixture.destroy_user_group(gr_name) |
|
1597 | 1597 | |
|
1598 | 1598 | def test_api_add_user_to_user_group_that_doesnt_exist(self): |
|
1599 | 1599 | id_, params = _build_data(self.apikey, 'add_user_to_user_group', |
|
1600 | 1600 | usergroupid='false-group', |
|
1601 | 1601 | userid=TEST_USER_ADMIN_LOGIN) |
|
1602 | 1602 | response = api_call(self, params) |
|
1603 | 1603 | |
|
1604 | 1604 | expected = 'user group `%s` does not exist' % 'false-group' |
|
1605 | 1605 | self._compare_error(id_, expected, given=response.body) |
|
1606 | 1606 | |
|
1607 | 1607 | @mock.patch.object(UserGroupModel, 'add_user_to_group', crash) |
|
1608 | 1608 | def test_api_add_user_to_user_group_exception_occurred(self): |
|
1609 | 1609 | gr_name = 'test_group' |
|
1610 | 1610 | fixture.create_user_group(gr_name) |
|
1611 | 1611 | id_, params = _build_data(self.apikey, 'add_user_to_user_group', |
|
1612 | 1612 | usergroupid=gr_name, |
|
1613 | 1613 | userid=TEST_USER_ADMIN_LOGIN) |
|
1614 | 1614 | response = api_call(self, params) |
|
1615 | 1615 | |
|
1616 | 1616 | try: |
|
1617 | 1617 | expected = 'failed to add member to user group `%s`' % gr_name |
|
1618 | 1618 | self._compare_error(id_, expected, given=response.body) |
|
1619 | 1619 | finally: |
|
1620 | 1620 | fixture.destroy_user_group(gr_name) |
|
1621 | 1621 | |
|
1622 | 1622 | def test_api_remove_user_from_user_group(self): |
|
1623 | 1623 | gr_name = 'test_group_3' |
|
1624 | 1624 | gr = fixture.create_user_group(gr_name) |
|
1625 | 1625 | UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN) |
|
1626 | 1626 | Session().commit() |
|
1627 | 1627 | id_, params = _build_data(self.apikey, 'remove_user_from_user_group', |
|
1628 | 1628 | usergroupid=gr_name, |
|
1629 | 1629 | userid=TEST_USER_ADMIN_LOGIN) |
|
1630 | 1630 | response = api_call(self, params) |
|
1631 | 1631 | |
|
1632 | 1632 | try: |
|
1633 | 1633 | expected = { |
|
1634 | 1634 | 'msg': 'removed member `%s` from user group `%s`' % ( |
|
1635 | 1635 | TEST_USER_ADMIN_LOGIN, gr_name |
|
1636 | 1636 | ), |
|
1637 | 1637 | 'success': True} |
|
1638 | 1638 | self._compare_ok(id_, expected, given=response.body) |
|
1639 | 1639 | finally: |
|
1640 | 1640 | fixture.destroy_user_group(gr_name) |
|
1641 | 1641 | |
|
1642 | 1642 | @mock.patch.object(UserGroupModel, 'remove_user_from_group', crash) |
|
1643 | 1643 | def test_api_remove_user_from_user_group_exception_occurred(self): |
|
1644 | 1644 | gr_name = 'test_group_3' |
|
1645 | 1645 | gr = fixture.create_user_group(gr_name) |
|
1646 | 1646 | UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN) |
|
1647 | 1647 | Session().commit() |
|
1648 | 1648 | id_, params = _build_data(self.apikey, 'remove_user_from_user_group', |
|
1649 | 1649 | usergroupid=gr_name, |
|
1650 | 1650 | userid=TEST_USER_ADMIN_LOGIN) |
|
1651 | 1651 | response = api_call(self, params) |
|
1652 | 1652 | try: |
|
1653 | 1653 | expected = 'failed to remove member from user group `%s`' % gr_name |
|
1654 | 1654 | self._compare_error(id_, expected, given=response.body) |
|
1655 | 1655 | finally: |
|
1656 | 1656 | fixture.destroy_user_group(gr_name) |
|
1657 | 1657 | |
|
1658 | 1658 | def test_api_delete_user_group(self): |
|
1659 | 1659 | gr_name = 'test_group' |
|
1660 | 1660 | ugroup = fixture.create_user_group(gr_name) |
|
1661 | 1661 | gr_id = ugroup.users_group_id |
|
1662 | 1662 | id_, params = _build_data(self.apikey, 'delete_user_group', |
|
1663 | 1663 | usergroupid=gr_name, |
|
1664 | 1664 | userid=TEST_USER_ADMIN_LOGIN) |
|
1665 | 1665 | response = api_call(self, params) |
|
1666 | 1666 | |
|
1667 | 1667 | try: |
|
1668 | 1668 | expected = { |
|
1669 | 1669 | 'user_group': None, |
|
1670 | 1670 | 'msg': 'deleted user group ID:%s %s' % (gr_id, gr_name) |
|
1671 | 1671 | } |
|
1672 | 1672 | self._compare_ok(id_, expected, given=response.body) |
|
1673 | 1673 | finally: |
|
1674 | 1674 | if UserGroupModel().get_by_name(gr_name): |
|
1675 | 1675 | fixture.destroy_user_group(gr_name) |
|
1676 | 1676 | |
|
1677 | 1677 | def test_api_delete_user_group_that_is_assigned(self): |
|
1678 | 1678 | gr_name = 'test_group' |
|
1679 | 1679 | ugroup = fixture.create_user_group(gr_name) |
|
1680 | 1680 | gr_id = ugroup.users_group_id |
|
1681 | 1681 | |
|
1682 | 1682 | ugr_to_perm = RepoModel().grant_user_group_permission(self.REPO, gr_name, 'repository.write') |
|
1683 | 1683 | msg = 'User Group assigned to %s' % ugr_to_perm.repository.repo_name |
|
1684 | 1684 | |
|
1685 | 1685 | id_, params = _build_data(self.apikey, 'delete_user_group', |
|
1686 | 1686 | usergroupid=gr_name, |
|
1687 | 1687 | userid=TEST_USER_ADMIN_LOGIN) |
|
1688 | 1688 | response = api_call(self, params) |
|
1689 | 1689 | |
|
1690 | 1690 | try: |
|
1691 | 1691 | expected = msg |
|
1692 | 1692 | self._compare_error(id_, expected, given=response.body) |
|
1693 | 1693 | finally: |
|
1694 | 1694 | if UserGroupModel().get_by_name(gr_name): |
|
1695 | 1695 | fixture.destroy_user_group(gr_name) |
|
1696 | 1696 | |
|
1697 | 1697 | def test_api_delete_user_group_exception_occurred(self): |
|
1698 | 1698 | gr_name = 'test_group' |
|
1699 | 1699 | ugroup = fixture.create_user_group(gr_name) |
|
1700 | 1700 | gr_id = ugroup.users_group_id |
|
1701 | 1701 | id_, params = _build_data(self.apikey, 'delete_user_group', |
|
1702 | 1702 | usergroupid=gr_name, |
|
1703 | 1703 | userid=TEST_USER_ADMIN_LOGIN) |
|
1704 | 1704 | |
|
1705 | 1705 | try: |
|
1706 | 1706 | with mock.patch.object(UserGroupModel, 'delete', crash): |
|
1707 | 1707 | response = api_call(self, params) |
|
1708 | 1708 | expected = 'failed to delete user group ID:%s %s' % (gr_id, gr_name) |
|
1709 | 1709 | self._compare_error(id_, expected, given=response.body) |
|
1710 | 1710 | finally: |
|
1711 | 1711 | fixture.destroy_user_group(gr_name) |
|
1712 | 1712 | |
|
1713 | 1713 | @parameterized.expand([('none', 'repository.none'), |
|
1714 | 1714 | ('read', 'repository.read'), |
|
1715 | 1715 | ('write', 'repository.write'), |
|
1716 | 1716 | ('admin', 'repository.admin')]) |
|
1717 | 1717 | def test_api_grant_user_permission(self, name, perm): |
|
1718 | 1718 | id_, params = _build_data(self.apikey, |
|
1719 | 1719 | 'grant_user_permission', |
|
1720 | 1720 | repoid=self.REPO, |
|
1721 | 1721 | userid=TEST_USER_ADMIN_LOGIN, |
|
1722 | 1722 | perm=perm) |
|
1723 | 1723 | response = api_call(self, params) |
|
1724 | 1724 | |
|
1725 | 1725 | ret = { |
|
1726 | 1726 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1727 | 1727 | perm, TEST_USER_ADMIN_LOGIN, self.REPO |
|
1728 | 1728 | ), |
|
1729 | 1729 | 'success': True |
|
1730 | 1730 | } |
|
1731 | 1731 | expected = ret |
|
1732 | 1732 | self._compare_ok(id_, expected, given=response.body) |
|
1733 | 1733 | |
|
1734 | 1734 | def test_api_grant_user_permission_wrong_permission(self): |
|
1735 | 1735 | perm = 'haha.no.permission' |
|
1736 | 1736 | id_, params = _build_data(self.apikey, |
|
1737 | 1737 | 'grant_user_permission', |
|
1738 | 1738 | repoid=self.REPO, |
|
1739 | 1739 | userid=TEST_USER_ADMIN_LOGIN, |
|
1740 | 1740 | perm=perm) |
|
1741 | 1741 | response = api_call(self, params) |
|
1742 | 1742 | |
|
1743 | 1743 | expected = 'permission `%s` does not exist' % perm |
|
1744 | 1744 | self._compare_error(id_, expected, given=response.body) |
|
1745 | 1745 | |
|
1746 | 1746 | @mock.patch.object(RepoModel, 'grant_user_permission', crash) |
|
1747 | 1747 | def test_api_grant_user_permission_exception_when_adding(self): |
|
1748 | 1748 | perm = 'repository.read' |
|
1749 | 1749 | id_, params = _build_data(self.apikey, |
|
1750 | 1750 | 'grant_user_permission', |
|
1751 | 1751 | repoid=self.REPO, |
|
1752 | 1752 | userid=TEST_USER_ADMIN_LOGIN, |
|
1753 | 1753 | perm=perm) |
|
1754 | 1754 | response = api_call(self, params) |
|
1755 | 1755 | |
|
1756 | 1756 | expected = 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1757 | 1757 | TEST_USER_ADMIN_LOGIN, self.REPO |
|
1758 | 1758 | ) |
|
1759 | 1759 | self._compare_error(id_, expected, given=response.body) |
|
1760 | 1760 | |
|
1761 | 1761 | def test_api_revoke_user_permission(self): |
|
1762 | 1762 | id_, params = _build_data(self.apikey, |
|
1763 | 1763 | 'revoke_user_permission', |
|
1764 | 1764 | repoid=self.REPO, |
|
1765 | 1765 | userid=TEST_USER_ADMIN_LOGIN, ) |
|
1766 | 1766 | response = api_call(self, params) |
|
1767 | 1767 | |
|
1768 | 1768 | expected = { |
|
1769 | 1769 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
1770 | 1770 | TEST_USER_ADMIN_LOGIN, self.REPO |
|
1771 | 1771 | ), |
|
1772 | 1772 | 'success': True |
|
1773 | 1773 | } |
|
1774 | 1774 | self._compare_ok(id_, expected, given=response.body) |
|
1775 | 1775 | |
|
1776 | 1776 | @mock.patch.object(RepoModel, 'revoke_user_permission', crash) |
|
1777 | 1777 | def test_api_revoke_user_permission_exception_when_adding(self): |
|
1778 | 1778 | id_, params = _build_data(self.apikey, |
|
1779 | 1779 | 'revoke_user_permission', |
|
1780 | 1780 | repoid=self.REPO, |
|
1781 | 1781 | userid=TEST_USER_ADMIN_LOGIN, ) |
|
1782 | 1782 | response = api_call(self, params) |
|
1783 | 1783 | |
|
1784 | 1784 | expected = 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1785 | 1785 | TEST_USER_ADMIN_LOGIN, self.REPO |
|
1786 | 1786 | ) |
|
1787 | 1787 | self._compare_error(id_, expected, given=response.body) |
|
1788 | 1788 | |
|
1789 | 1789 | @parameterized.expand([('none', 'repository.none'), |
|
1790 | 1790 | ('read', 'repository.read'), |
|
1791 | 1791 | ('write', 'repository.write'), |
|
1792 | 1792 | ('admin', 'repository.admin')]) |
|
1793 | 1793 | def test_api_grant_user_group_permission(self, name, perm): |
|
1794 | 1794 | id_, params = _build_data(self.apikey, |
|
1795 | 1795 | 'grant_user_group_permission', |
|
1796 | 1796 | repoid=self.REPO, |
|
1797 | 1797 | usergroupid=TEST_USER_GROUP, |
|
1798 | 1798 | perm=perm) |
|
1799 | 1799 | response = api_call(self, params) |
|
1800 | 1800 | |
|
1801 | 1801 | ret = { |
|
1802 | 1802 | 'msg': 'Granted perm: `%s` for user group: `%s` in repo: `%s`' % ( |
|
1803 | 1803 | perm, TEST_USER_GROUP, self.REPO |
|
1804 | 1804 | ), |
|
1805 | 1805 | 'success': True |
|
1806 | 1806 | } |
|
1807 | 1807 | expected = ret |
|
1808 | 1808 | self._compare_ok(id_, expected, given=response.body) |
|
1809 | 1809 | |
|
1810 | 1810 | def test_api_grant_user_group_permission_wrong_permission(self): |
|
1811 | 1811 | perm = 'haha.no.permission' |
|
1812 | 1812 | id_, params = _build_data(self.apikey, |
|
1813 | 1813 | 'grant_user_group_permission', |
|
1814 | 1814 | repoid=self.REPO, |
|
1815 | 1815 | usergroupid=TEST_USER_GROUP, |
|
1816 | 1816 | perm=perm) |
|
1817 | 1817 | response = api_call(self, params) |
|
1818 | 1818 | |
|
1819 | 1819 | expected = 'permission `%s` does not exist' % perm |
|
1820 | 1820 | self._compare_error(id_, expected, given=response.body) |
|
1821 | 1821 | |
|
1822 | 1822 | @mock.patch.object(RepoModel, 'grant_user_group_permission', crash) |
|
1823 | 1823 | def test_api_grant_user_group_permission_exception_when_adding(self): |
|
1824 | 1824 | perm = 'repository.read' |
|
1825 | 1825 | id_, params = _build_data(self.apikey, |
|
1826 | 1826 | 'grant_user_group_permission', |
|
1827 | 1827 | repoid=self.REPO, |
|
1828 | 1828 | usergroupid=TEST_USER_GROUP, |
|
1829 | 1829 | perm=perm) |
|
1830 | 1830 | response = api_call(self, params) |
|
1831 | 1831 | |
|
1832 | 1832 | expected = 'failed to edit permission for user group: `%s` in repo: `%s`' % ( |
|
1833 | 1833 | TEST_USER_GROUP, self.REPO |
|
1834 | 1834 | ) |
|
1835 | 1835 | self._compare_error(id_, expected, given=response.body) |
|
1836 | 1836 | |
|
1837 | 1837 | def test_api_revoke_user_group_permission(self): |
|
1838 | 1838 | RepoModel().grant_user_group_permission(repo=self.REPO, |
|
1839 | 1839 | group_name=TEST_USER_GROUP, |
|
1840 | 1840 | perm='repository.read') |
|
1841 | 1841 | Session().commit() |
|
1842 | 1842 | id_, params = _build_data(self.apikey, |
|
1843 | 1843 | 'revoke_user_group_permission', |
|
1844 | 1844 | repoid=self.REPO, |
|
1845 | 1845 | usergroupid=TEST_USER_GROUP, ) |
|
1846 | 1846 | response = api_call(self, params) |
|
1847 | 1847 | |
|
1848 | 1848 | expected = { |
|
1849 | 1849 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
1850 | 1850 | TEST_USER_GROUP, self.REPO |
|
1851 | 1851 | ), |
|
1852 | 1852 | 'success': True |
|
1853 | 1853 | } |
|
1854 | 1854 | self._compare_ok(id_, expected, given=response.body) |
|
1855 | 1855 | |
|
1856 | 1856 | @mock.patch.object(RepoModel, 'revoke_user_group_permission', crash) |
|
1857 | 1857 | def test_api_revoke_user_group_permission_exception_when_adding(self): |
|
1858 | 1858 | id_, params = _build_data(self.apikey, |
|
1859 | 1859 | 'revoke_user_group_permission', |
|
1860 | 1860 | repoid=self.REPO, |
|
1861 | 1861 | usergroupid=TEST_USER_GROUP, ) |
|
1862 | 1862 | response = api_call(self, params) |
|
1863 | 1863 | |
|
1864 | 1864 | expected = 'failed to edit permission for user group: `%s` in repo: `%s`' % ( |
|
1865 | 1865 | TEST_USER_GROUP, self.REPO |
|
1866 | 1866 | ) |
|
1867 | 1867 | self._compare_error(id_, expected, given=response.body) |
|
1868 | 1868 | |
|
1869 | 1869 | @parameterized.expand([ |
|
1870 | 1870 | ('none', 'group.none', 'none'), |
|
1871 | 1871 | ('read', 'group.read', 'none'), |
|
1872 | 1872 | ('write', 'group.write', 'none'), |
|
1873 | 1873 | ('admin', 'group.admin', 'none'), |
|
1874 | 1874 | |
|
1875 | 1875 | ('none', 'group.none', 'all'), |
|
1876 | 1876 | ('read', 'group.read', 'all'), |
|
1877 | 1877 | ('write', 'group.write', 'all'), |
|
1878 | 1878 | ('admin', 'group.admin', 'all'), |
|
1879 | 1879 | |
|
1880 | 1880 | ('none', 'group.none', 'repos'), |
|
1881 | 1881 | ('read', 'group.read', 'repos'), |
|
1882 | 1882 | ('write', 'group.write', 'repos'), |
|
1883 | 1883 | ('admin', 'group.admin', 'repos'), |
|
1884 | 1884 | |
|
1885 | 1885 | ('none', 'group.none', 'groups'), |
|
1886 | 1886 | ('read', 'group.read', 'groups'), |
|
1887 | 1887 | ('write', 'group.write', 'groups'), |
|
1888 | 1888 | ('admin', 'group.admin', 'groups'), |
|
1889 | 1889 | ]) |
|
1890 | 1890 | def test_api_grant_user_permission_to_repo_group(self, name, perm, apply_to_children): |
|
1891 | 1891 | id_, params = _build_data(self.apikey, |
|
1892 | 1892 | 'grant_user_permission_to_repo_group', |
|
1893 | 1893 | repogroupid=TEST_REPO_GROUP, |
|
1894 | 1894 | userid=TEST_USER_ADMIN_LOGIN, |
|
1895 | 1895 | perm=perm, apply_to_children=apply_to_children) |
|
1896 | 1896 | response = api_call(self, params) |
|
1897 | 1897 | |
|
1898 | 1898 | ret = { |
|
1899 | 1899 | 'msg': 'Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % ( |
|
1900 | 1900 | perm, apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
1901 | 1901 | ), |
|
1902 | 1902 | 'success': True |
|
1903 | 1903 | } |
|
1904 | 1904 | expected = ret |
|
1905 | 1905 | self._compare_ok(id_, expected, given=response.body) |
|
1906 | 1906 | |
|
1907 | 1907 | @parameterized.expand([ |
|
1908 | 1908 | ('none_fails', 'group.none', 'none', False, False), |
|
1909 | 1909 | ('read_fails', 'group.read', 'none', False, False), |
|
1910 | 1910 | ('write_fails', 'group.write', 'none', False, False), |
|
1911 | 1911 | ('admin_fails', 'group.admin', 'none', False, False), |
|
1912 | 1912 | |
|
1913 | 1913 | # with granted perms |
|
1914 | 1914 | ('none_ok', 'group.none', 'none', True, True), |
|
1915 | 1915 | ('read_ok', 'group.read', 'none', True, True), |
|
1916 | 1916 | ('write_ok', 'group.write', 'none', True, True), |
|
1917 | 1917 | ('admin_ok', 'group.admin', 'none', True, True), |
|
1918 | 1918 | ]) |
|
1919 | 1919 | def test_api_grant_user_permission_to_repo_group_by_regular_user( |
|
1920 | 1920 | self, name, perm, apply_to_children, grant_admin, access_ok): |
|
1921 | 1921 | if grant_admin: |
|
1922 | 1922 | RepoGroupModel().grant_user_permission(TEST_REPO_GROUP, |
|
1923 | 1923 | self.TEST_USER_LOGIN, |
|
1924 | 1924 | 'group.admin') |
|
1925 | 1925 | Session().commit() |
|
1926 | 1926 | |
|
1927 | 1927 | id_, params = _build_data(self.apikey_regular, |
|
1928 | 1928 | 'grant_user_permission_to_repo_group', |
|
1929 | 1929 | repogroupid=TEST_REPO_GROUP, |
|
1930 | 1930 | userid=TEST_USER_ADMIN_LOGIN, |
|
1931 | 1931 | perm=perm, apply_to_children=apply_to_children) |
|
1932 | 1932 | response = api_call(self, params) |
|
1933 | 1933 | if access_ok: |
|
1934 | 1934 | ret = { |
|
1935 | 1935 | 'msg': 'Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % ( |
|
1936 | 1936 | perm, apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
1937 | 1937 | ), |
|
1938 | 1938 | 'success': True |
|
1939 | 1939 | } |
|
1940 | 1940 | expected = ret |
|
1941 | 1941 | self._compare_ok(id_, expected, given=response.body) |
|
1942 | 1942 | else: |
|
1943 | 1943 | expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP |
|
1944 | 1944 | self._compare_error(id_, expected, given=response.body) |
|
1945 | 1945 | |
|
1946 | 1946 | def test_api_grant_user_permission_to_repo_group_wrong_permission(self): |
|
1947 | 1947 | perm = 'haha.no.permission' |
|
1948 | 1948 | id_, params = _build_data(self.apikey, |
|
1949 | 1949 | 'grant_user_permission_to_repo_group', |
|
1950 | 1950 | repogroupid=TEST_REPO_GROUP, |
|
1951 | 1951 | userid=TEST_USER_ADMIN_LOGIN, |
|
1952 | 1952 | perm=perm) |
|
1953 | 1953 | response = api_call(self, params) |
|
1954 | 1954 | |
|
1955 | 1955 | expected = 'permission `%s` does not exist' % perm |
|
1956 | 1956 | self._compare_error(id_, expected, given=response.body) |
|
1957 | 1957 | |
|
1958 | 1958 | @mock.patch.object(RepoGroupModel, 'grant_user_permission', crash) |
|
1959 | 1959 | def test_api_grant_user_permission_to_repo_group_exception_when_adding(self): |
|
1960 | 1960 | perm = 'group.read' |
|
1961 | 1961 | id_, params = _build_data(self.apikey, |
|
1962 | 1962 | 'grant_user_permission_to_repo_group', |
|
1963 | 1963 | repogroupid=TEST_REPO_GROUP, |
|
1964 | 1964 | userid=TEST_USER_ADMIN_LOGIN, |
|
1965 | 1965 | perm=perm) |
|
1966 | 1966 | response = api_call(self, params) |
|
1967 | 1967 | |
|
1968 | 1968 | expected = 'failed to edit permission for user: `%s` in repo group: `%s`' % ( |
|
1969 | 1969 | TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
1970 | 1970 | ) |
|
1971 | 1971 | self._compare_error(id_, expected, given=response.body) |
|
1972 | 1972 | |
|
1973 | 1973 | @parameterized.expand([ |
|
1974 | 1974 | ('none', 'none'), |
|
1975 | 1975 | ('all', 'all'), |
|
1976 | 1976 | ('repos', 'repos'), |
|
1977 | 1977 | ('groups', 'groups'), |
|
1978 | 1978 | ]) |
|
1979 | 1979 | def test_api_revoke_user_permission_from_repo_group(self, name, apply_to_children): |
|
1980 | 1980 | RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP, |
|
1981 | 1981 | user=TEST_USER_ADMIN_LOGIN, |
|
1982 | 1982 | perm='group.read',) |
|
1983 | 1983 | Session().commit() |
|
1984 | 1984 | |
|
1985 | 1985 | id_, params = _build_data(self.apikey, |
|
1986 | 1986 | 'revoke_user_permission_from_repo_group', |
|
1987 | 1987 | repogroupid=TEST_REPO_GROUP, |
|
1988 | 1988 | userid=TEST_USER_ADMIN_LOGIN, |
|
1989 | 1989 | apply_to_children=apply_to_children,) |
|
1990 | 1990 | response = api_call(self, params) |
|
1991 | 1991 | |
|
1992 | 1992 | expected = { |
|
1993 | 1993 | 'msg': 'Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % ( |
|
1994 | 1994 | apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
1995 | 1995 | ), |
|
1996 | 1996 | 'success': True |
|
1997 | 1997 | } |
|
1998 | 1998 | self._compare_ok(id_, expected, given=response.body) |
|
1999 | 1999 | |
|
2000 | 2000 | @parameterized.expand([ |
|
2001 | 2001 | ('none', 'none', False, False), |
|
2002 | 2002 | ('all', 'all', False, False), |
|
2003 | 2003 | ('repos', 'repos', False, False), |
|
2004 | 2004 | ('groups', 'groups', False, False), |
|
2005 | 2005 | |
|
2006 | 2006 | # after granting admin rights |
|
2007 | 2007 | ('none', 'none', False, False), |
|
2008 | 2008 | ('all', 'all', False, False), |
|
2009 | 2009 | ('repos', 'repos', False, False), |
|
2010 | 2010 | ('groups', 'groups', False, False), |
|
2011 | 2011 | ]) |
|
2012 | 2012 | def test_api_revoke_user_permission_from_repo_group_by_regular_user( |
|
2013 | 2013 | self, name, apply_to_children, grant_admin, access_ok): |
|
2014 | 2014 | RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP, |
|
2015 | 2015 | user=TEST_USER_ADMIN_LOGIN, |
|
2016 | 2016 | perm='group.read',) |
|
2017 | 2017 | Session().commit() |
|
2018 | 2018 | |
|
2019 | 2019 | if grant_admin: |
|
2020 | 2020 | RepoGroupModel().grant_user_permission(TEST_REPO_GROUP, |
|
2021 | 2021 | self.TEST_USER_LOGIN, |
|
2022 | 2022 | 'group.admin') |
|
2023 | 2023 | Session().commit() |
|
2024 | 2024 | |
|
2025 | 2025 | id_, params = _build_data(self.apikey_regular, |
|
2026 | 2026 | 'revoke_user_permission_from_repo_group', |
|
2027 | 2027 | repogroupid=TEST_REPO_GROUP, |
|
2028 | 2028 | userid=TEST_USER_ADMIN_LOGIN, |
|
2029 | 2029 | apply_to_children=apply_to_children,) |
|
2030 | 2030 | response = api_call(self, params) |
|
2031 | 2031 | if access_ok: |
|
2032 | 2032 | expected = { |
|
2033 | 2033 | 'msg': 'Revoked perm (recursive:%s) for user: `%s` in repo group: `%s`' % ( |
|
2034 | 2034 | apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
2035 | 2035 | ), |
|
2036 | 2036 | 'success': True |
|
2037 | 2037 | } |
|
2038 | 2038 | self._compare_ok(id_, expected, given=response.body) |
|
2039 | 2039 | else: |
|
2040 | 2040 | expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP |
|
2041 | 2041 | self._compare_error(id_, expected, given=response.body) |
|
2042 | 2042 | |
|
2043 | 2043 | @mock.patch.object(RepoGroupModel, 'revoke_user_permission', crash) |
|
2044 | 2044 | def test_api_revoke_user_permission_from_repo_group_exception_when_adding(self): |
|
2045 | 2045 | id_, params = _build_data(self.apikey, |
|
2046 | 2046 | 'revoke_user_permission_from_repo_group', |
|
2047 | 2047 | repogroupid=TEST_REPO_GROUP, |
|
2048 | 2048 | userid=TEST_USER_ADMIN_LOGIN, ) |
|
2049 | 2049 | response = api_call(self, params) |
|
2050 | 2050 | |
|
2051 | 2051 | expected = 'failed to edit permission for user: `%s` in repo group: `%s`' % ( |
|
2052 | 2052 | TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
2053 | 2053 | ) |
|
2054 | 2054 | self._compare_error(id_, expected, given=response.body) |
|
2055 | 2055 | |
|
2056 | 2056 | @parameterized.expand([ |
|
2057 | 2057 | ('none', 'group.none', 'none'), |
|
2058 | 2058 | ('read', 'group.read', 'none'), |
|
2059 | 2059 | ('write', 'group.write', 'none'), |
|
2060 | 2060 | ('admin', 'group.admin', 'none'), |
|
2061 | 2061 | |
|
2062 | 2062 | ('none', 'group.none', 'all'), |
|
2063 | 2063 | ('read', 'group.read', 'all'), |
|
2064 | 2064 | ('write', 'group.write', 'all'), |
|
2065 | 2065 | ('admin', 'group.admin', 'all'), |
|
2066 | 2066 | |
|
2067 | 2067 | ('none', 'group.none', 'repos'), |
|
2068 | 2068 | ('read', 'group.read', 'repos'), |
|
2069 | 2069 | ('write', 'group.write', 'repos'), |
|
2070 | 2070 | ('admin', 'group.admin', 'repos'), |
|
2071 | 2071 | |
|
2072 | 2072 | ('none', 'group.none', 'groups'), |
|
2073 | 2073 | ('read', 'group.read', 'groups'), |
|
2074 | 2074 | ('write', 'group.write', 'groups'), |
|
2075 | 2075 | ('admin', 'group.admin', 'groups'), |
|
2076 | 2076 | ]) |
|
2077 | 2077 | def test_api_grant_user_group_permission_to_repo_group(self, name, perm, apply_to_children): |
|
2078 | 2078 | id_, params = _build_data(self.apikey, |
|
2079 | 2079 | 'grant_user_group_permission_to_repo_group', |
|
2080 | 2080 | repogroupid=TEST_REPO_GROUP, |
|
2081 | 2081 | usergroupid=TEST_USER_GROUP, |
|
2082 | 2082 | perm=perm, |
|
2083 | 2083 | apply_to_children=apply_to_children,) |
|
2084 | 2084 | response = api_call(self, params) |
|
2085 | 2085 | |
|
2086 | 2086 | ret = { |
|
2087 | 2087 | 'msg': 'Granted perm: `%s` (recursive:%s) for user group: `%s` in repo group: `%s`' % ( |
|
2088 | 2088 | perm, apply_to_children, TEST_USER_GROUP, TEST_REPO_GROUP |
|
2089 | 2089 | ), |
|
2090 | 2090 | 'success': True |
|
2091 | 2091 | } |
|
2092 | 2092 | expected = ret |
|
2093 | 2093 | self._compare_ok(id_, expected, given=response.body) |
|
2094 | 2094 | |
|
2095 | 2095 | @parameterized.expand([ |
|
2096 | 2096 | ('none_fails', 'group.none', 'none', False, False), |
|
2097 | 2097 | ('read_fails', 'group.read', 'none', False, False), |
|
2098 | 2098 | ('write_fails', 'group.write', 'none', False, False), |
|
2099 | 2099 | ('admin_fails', 'group.admin', 'none', False, False), |
|
2100 | 2100 | |
|
2101 | 2101 | # with granted perms |
|
2102 | 2102 | ('none_ok', 'group.none', 'none', True, True), |
|
2103 | 2103 | ('read_ok', 'group.read', 'none', True, True), |
|
2104 | 2104 | ('write_ok', 'group.write', 'none', True, True), |
|
2105 | 2105 | ('admin_ok', 'group.admin', 'none', True, True), |
|
2106 | 2106 | ]) |
|
2107 | 2107 | def test_api_grant_user_group_permission_to_repo_group_by_regular_user( |
|
2108 | 2108 | self, name, perm, apply_to_children, grant_admin, access_ok): |
|
2109 | 2109 | if grant_admin: |
|
2110 | 2110 | RepoGroupModel().grant_user_permission(TEST_REPO_GROUP, |
|
2111 | 2111 | self.TEST_USER_LOGIN, |
|
2112 | 2112 | 'group.admin') |
|
2113 | 2113 | Session().commit() |
|
2114 | 2114 | |
|
2115 | 2115 | id_, params = _build_data(self.apikey_regular, |
|
2116 | 2116 | 'grant_user_group_permission_to_repo_group', |
|
2117 | 2117 | repogroupid=TEST_REPO_GROUP, |
|
2118 | 2118 | usergroupid=TEST_USER_GROUP, |
|
2119 | 2119 | perm=perm, |
|
2120 | 2120 | apply_to_children=apply_to_children,) |
|
2121 | 2121 | response = api_call(self, params) |
|
2122 | 2122 | if access_ok: |
|
2123 | 2123 | ret = { |
|
2124 | 2124 | 'msg': 'Granted perm: `%s` (recursive:%s) for user group: `%s` in repo group: `%s`' % ( |
|
2125 | 2125 | perm, apply_to_children, TEST_USER_GROUP, TEST_REPO_GROUP |
|
2126 | 2126 | ), |
|
2127 | 2127 | 'success': True |
|
2128 | 2128 | } |
|
2129 | 2129 | expected = ret |
|
2130 | 2130 | self._compare_ok(id_, expected, given=response.body) |
|
2131 | 2131 | else: |
|
2132 | 2132 | expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP |
|
2133 | 2133 | self._compare_error(id_, expected, given=response.body) |
|
2134 | 2134 | |
|
2135 | 2135 | def test_api_grant_user_group_permission_to_repo_group_wrong_permission(self): |
|
2136 | 2136 | perm = 'haha.no.permission' |
|
2137 | 2137 | id_, params = _build_data(self.apikey, |
|
2138 | 2138 | 'grant_user_group_permission_to_repo_group', |
|
2139 | 2139 | repogroupid=TEST_REPO_GROUP, |
|
2140 | 2140 | usergroupid=TEST_USER_GROUP, |
|
2141 | 2141 | perm=perm) |
|
2142 | 2142 | response = api_call(self, params) |
|
2143 | 2143 | |
|
2144 | 2144 | expected = 'permission `%s` does not exist' % perm |
|
2145 | 2145 | self._compare_error(id_, expected, given=response.body) |
|
2146 | 2146 | |
|
2147 | 2147 | @mock.patch.object(RepoGroupModel, 'grant_user_group_permission', crash) |
|
2148 | 2148 | def test_api_grant_user_group_permission_exception_when_adding_to_repo_group(self): |
|
2149 | 2149 | perm = 'group.read' |
|
2150 | 2150 | id_, params = _build_data(self.apikey, |
|
2151 | 2151 | 'grant_user_group_permission_to_repo_group', |
|
2152 | 2152 | repogroupid=TEST_REPO_GROUP, |
|
2153 | 2153 | usergroupid=TEST_USER_GROUP, |
|
2154 | 2154 | perm=perm) |
|
2155 | 2155 | response = api_call(self, params) |
|
2156 | 2156 | |
|
2157 | 2157 | expected = 'failed to edit permission for user group: `%s` in repo group: `%s`' % ( |
|
2158 | 2158 | TEST_USER_GROUP, TEST_REPO_GROUP |
|
2159 | 2159 | ) |
|
2160 | 2160 | self._compare_error(id_, expected, given=response.body) |
|
2161 | 2161 | |
|
2162 | 2162 | @parameterized.expand([ |
|
2163 | 2163 | ('none', 'none'), |
|
2164 | 2164 | ('all', 'all'), |
|
2165 | 2165 | ('repos', 'repos'), |
|
2166 | 2166 | ('groups', 'groups'), |
|
2167 | 2167 | ]) |
|
2168 | 2168 | def test_api_revoke_user_group_permission_from_repo_group(self, name, apply_to_children): |
|
2169 | 2169 | RepoGroupModel().grant_user_group_permission(repo_group=TEST_REPO_GROUP, |
|
2170 | 2170 | group_name=TEST_USER_GROUP, |
|
2171 | 2171 | perm='group.read',) |
|
2172 | 2172 | Session().commit() |
|
2173 | 2173 | id_, params = _build_data(self.apikey, |
|
2174 | 2174 | 'revoke_user_group_permission_from_repo_group', |
|
2175 | 2175 | repogroupid=TEST_REPO_GROUP, |
|
2176 | 2176 | usergroupid=TEST_USER_GROUP, |
|
2177 | 2177 | apply_to_children=apply_to_children,) |
|
2178 | 2178 | response = api_call(self, params) |
|
2179 | 2179 | |
|
2180 | 2180 | expected = { |
|
2181 | 2181 | 'msg': 'Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % ( |
|
2182 | 2182 | apply_to_children, TEST_USER_GROUP, TEST_REPO_GROUP |
|
2183 | 2183 | ), |
|
2184 | 2184 | 'success': True |
|
2185 | 2185 | } |
|
2186 | 2186 | self._compare_ok(id_, expected, given=response.body) |
|
2187 | 2187 | |
|
2188 | 2188 | @parameterized.expand([ |
|
2189 | 2189 | ('none', 'none', False, False), |
|
2190 | 2190 | ('all', 'all', False, False), |
|
2191 | 2191 | ('repos', 'repos', False, False), |
|
2192 | 2192 | ('groups', 'groups', False, False), |
|
2193 | 2193 | |
|
2194 | 2194 | # after granting admin rights |
|
2195 | 2195 | ('none', 'none', False, False), |
|
2196 | 2196 | ('all', 'all', False, False), |
|
2197 | 2197 | ('repos', 'repos', False, False), |
|
2198 | 2198 | ('groups', 'groups', False, False), |
|
2199 | 2199 | ]) |
|
2200 | 2200 | def test_api_revoke_user_group_permission_from_repo_group_by_regular_user( |
|
2201 | 2201 | self, name, apply_to_children, grant_admin, access_ok): |
|
2202 | 2202 | RepoGroupModel().grant_user_permission(repo_group=TEST_REPO_GROUP, |
|
2203 | 2203 | user=TEST_USER_ADMIN_LOGIN, |
|
2204 | 2204 | perm='group.read',) |
|
2205 | 2205 | Session().commit() |
|
2206 | 2206 | |
|
2207 | 2207 | if grant_admin: |
|
2208 | 2208 | RepoGroupModel().grant_user_permission(TEST_REPO_GROUP, |
|
2209 | 2209 | self.TEST_USER_LOGIN, |
|
2210 | 2210 | 'group.admin') |
|
2211 | 2211 | Session().commit() |
|
2212 | 2212 | |
|
2213 | 2213 | id_, params = _build_data(self.apikey_regular, |
|
2214 | 2214 | 'revoke_user_group_permission_from_repo_group', |
|
2215 | 2215 | repogroupid=TEST_REPO_GROUP, |
|
2216 | 2216 | usergroupid=TEST_USER_GROUP, |
|
2217 | 2217 | apply_to_children=apply_to_children,) |
|
2218 | 2218 | response = api_call(self, params) |
|
2219 | 2219 | if access_ok: |
|
2220 | 2220 | expected = { |
|
2221 | 2221 | 'msg': 'Revoked perm (recursive:%s) for user group: `%s` in repo group: `%s`' % ( |
|
2222 | 2222 | apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP |
|
2223 | 2223 | ), |
|
2224 | 2224 | 'success': True |
|
2225 | 2225 | } |
|
2226 | 2226 | self._compare_ok(id_, expected, given=response.body) |
|
2227 | 2227 | else: |
|
2228 | 2228 | expected = 'repository group `%s` does not exist' % TEST_REPO_GROUP |
|
2229 | 2229 | self._compare_error(id_, expected, given=response.body) |
|
2230 | 2230 | |
|
2231 | 2231 | @mock.patch.object(RepoGroupModel, 'revoke_user_group_permission', crash) |
|
2232 | 2232 | def test_api_revoke_user_group_permission_from_repo_group_exception_when_adding(self): |
|
2233 | 2233 | id_, params = _build_data(self.apikey, 'revoke_user_group_permission_from_repo_group', |
|
2234 | 2234 | repogroupid=TEST_REPO_GROUP, |
|
2235 | 2235 | usergroupid=TEST_USER_GROUP,) |
|
2236 | 2236 | response = api_call(self, params) |
|
2237 | 2237 | |
|
2238 | 2238 | expected = 'failed to edit permission for user group: `%s` in repo group: `%s`' % ( |
|
2239 | 2239 | TEST_USER_GROUP, TEST_REPO_GROUP |
|
2240 | 2240 | ) |
|
2241 | 2241 | self._compare_error(id_, expected, given=response.body) |
|
2242 | 2242 | |
|
2243 | 2243 | def test_api_get_gist(self): |
|
2244 | 2244 | gist = fixture.create_gist() |
|
2245 | 2245 | gist_id = gist.gist_access_id |
|
2246 | 2246 | gist_created_on = gist.created_on |
|
2247 | 2247 | id_, params = _build_data(self.apikey, 'get_gist', |
|
2248 | 2248 | gistid=gist_id, ) |
|
2249 | 2249 | response = api_call(self, params) |
|
2250 | 2250 | |
|
2251 | 2251 | expected = { |
|
2252 | 2252 | 'access_id': gist_id, |
|
2253 | 2253 | 'created_on': gist_created_on, |
|
2254 | 2254 | 'description': 'new-gist', |
|
2255 | 2255 | 'expires': -1.0, |
|
2256 | 2256 | 'gist_id': int(gist_id), |
|
2257 | 2257 | 'type': 'public', |
|
2258 | 2258 | 'url': 'http://localhost:80/_admin/gists/%s' % gist_id |
|
2259 | 2259 | } |
|
2260 | 2260 | |
|
2261 | 2261 | self._compare_ok(id_, expected, given=response.body) |
|
2262 | 2262 | |
|
2263 | 2263 | def test_api_get_gist_that_does_not_exist(self): |
|
2264 | 2264 | id_, params = _build_data(self.apikey_regular, 'get_gist', |
|
2265 | 2265 | gistid='12345', ) |
|
2266 | 2266 | response = api_call(self, params) |
|
2267 | 2267 | expected = 'gist `%s` does not exist' % ('12345',) |
|
2268 | 2268 | self._compare_error(id_, expected, given=response.body) |
|
2269 | 2269 | |
|
2270 | 2270 | def test_api_get_gist_private_gist_without_permission(self): |
|
2271 | 2271 | gist = fixture.create_gist() |
|
2272 | 2272 | gist_id = gist.gist_access_id |
|
2273 | 2273 | gist_created_on = gist.created_on |
|
2274 | 2274 | id_, params = _build_data(self.apikey_regular, 'get_gist', |
|
2275 | 2275 | gistid=gist_id, ) |
|
2276 | 2276 | response = api_call(self, params) |
|
2277 | 2277 | |
|
2278 | 2278 | expected = 'gist `%s` does not exist' % gist_id |
|
2279 | 2279 | self._compare_error(id_, expected, given=response.body) |
|
2280 | 2280 | |
|
2281 | 2281 | def test_api_get_gists(self): |
|
2282 | 2282 | fixture.create_gist() |
|
2283 | 2283 | fixture.create_gist() |
|
2284 | 2284 | |
|
2285 | 2285 | id_, params = _build_data(self.apikey, 'get_gists') |
|
2286 | 2286 | response = api_call(self, params) |
|
2287 | 2287 | expected = response.json |
|
2288 | 2288 | self.assertEqual(len(response.json['result']), 2) |
|
2289 | 2289 | #self._compare_ok(id_, expected, given=response.body) |
|
2290 | 2290 | |
|
2291 | 2291 | def test_api_get_gists_regular_user(self): |
|
2292 | 2292 | # by admin |
|
2293 | 2293 | fixture.create_gist() |
|
2294 | 2294 | fixture.create_gist() |
|
2295 | 2295 | |
|
2296 | 2296 | # by reg user |
|
2297 | 2297 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2298 | 2298 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2299 | 2299 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2300 | 2300 | |
|
2301 | 2301 | id_, params = _build_data(self.apikey_regular, 'get_gists') |
|
2302 | 2302 | response = api_call(self, params) |
|
2303 | 2303 | expected = response.json |
|
2304 | 2304 | self.assertEqual(len(response.json['result']), 3) |
|
2305 | 2305 | #self._compare_ok(id_, expected, given=response.body) |
|
2306 | 2306 | |
|
2307 | 2307 | def test_api_get_gists_only_for_regular_user(self): |
|
2308 | 2308 | # by admin |
|
2309 | 2309 | fixture.create_gist() |
|
2310 | 2310 | fixture.create_gist() |
|
2311 | 2311 | |
|
2312 | 2312 | # by reg user |
|
2313 | 2313 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2314 | 2314 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2315 | 2315 | fixture.create_gist(owner=self.TEST_USER_LOGIN) |
|
2316 | 2316 | |
|
2317 | 2317 | id_, params = _build_data(self.apikey, 'get_gists', |
|
2318 | 2318 | userid=self.TEST_USER_LOGIN) |
|
2319 | 2319 | response = api_call(self, params) |
|
2320 | 2320 | expected = response.json |
|
2321 | 2321 | self.assertEqual(len(response.json['result']), 3) |
|
2322 | 2322 | #self._compare_ok(id_, expected, given=response.body) |
|
2323 | 2323 | |
|
2324 | 2324 | def test_api_get_gists_regular_user_with_different_userid(self): |
|
2325 | 2325 | id_, params = _build_data(self.apikey_regular, 'get_gists', |
|
2326 | 2326 | userid=TEST_USER_ADMIN_LOGIN) |
|
2327 | 2327 | response = api_call(self, params) |
|
2328 | 2328 | expected = 'userid is not the same as your user' |
|
2329 | 2329 | self._compare_error(id_, expected, given=response.body) |
|
2330 | 2330 | |
|
2331 | 2331 | def test_api_create_gist(self): |
|
2332 | 2332 | id_, params = _build_data(self.apikey_regular, 'create_gist', |
|
2333 | 2333 | lifetime=10, |
|
2334 | 2334 | description='foobar-gist', |
|
2335 | 2335 | gist_type='public', |
|
2336 | 2336 | files={'foobar': {'content': 'foo'}}) |
|
2337 | 2337 | response = api_call(self, params) |
|
2338 | 2338 | response_json = response.json |
|
2339 | 2339 | expected = { |
|
2340 | 2340 | 'gist': { |
|
2341 | 2341 | 'access_id': response_json['result']['gist']['access_id'], |
|
2342 | 2342 | 'created_on': response_json['result']['gist']['created_on'], |
|
2343 | 2343 | 'description': 'foobar-gist', |
|
2344 | 2344 | 'expires': response_json['result']['gist']['expires'], |
|
2345 | 2345 | 'gist_id': response_json['result']['gist']['gist_id'], |
|
2346 | 2346 | 'type': 'public', |
|
2347 | 2347 | 'url': response_json['result']['gist']['url'] |
|
2348 | 2348 | }, |
|
2349 | 2349 | 'msg': 'created new gist' |
|
2350 | 2350 | } |
|
2351 | 2351 | self._compare_ok(id_, expected, given=response.body) |
|
2352 | 2352 | |
|
2353 | 2353 | @mock.patch.object(GistModel, 'create', crash) |
|
2354 | 2354 | def test_api_create_gist_exception_occurred(self): |
|
2355 | 2355 | id_, params = _build_data(self.apikey_regular, 'create_gist', |
|
2356 | 2356 | files={}) |
|
2357 | 2357 | response = api_call(self, params) |
|
2358 | 2358 | expected = 'failed to create gist' |
|
2359 | 2359 | self._compare_error(id_, expected, given=response.body) |
|
2360 | 2360 | |
|
2361 | 2361 | def test_api_delete_gist(self): |
|
2362 | 2362 | gist_id = fixture.create_gist().gist_access_id |
|
2363 | 2363 | id_, params = _build_data(self.apikey, 'delete_gist', |
|
2364 | 2364 | gistid=gist_id) |
|
2365 | 2365 | response = api_call(self, params) |
|
2366 | 2366 | expected = {'gist': None, 'msg': 'deleted gist ID:%s' % gist_id} |
|
2367 | 2367 | self._compare_ok(id_, expected, given=response.body) |
|
2368 | 2368 | |
|
2369 | 2369 | def test_api_delete_gist_regular_user(self): |
|
2370 | 2370 | gist_id = fixture.create_gist(owner=self.TEST_USER_LOGIN).gist_access_id |
|
2371 | 2371 | id_, params = _build_data(self.apikey_regular, 'delete_gist', |
|
2372 | 2372 | gistid=gist_id) |
|
2373 | 2373 | response = api_call(self, params) |
|
2374 | 2374 | expected = {'gist': None, 'msg': 'deleted gist ID:%s' % gist_id} |
|
2375 | 2375 | self._compare_ok(id_, expected, given=response.body) |
|
2376 | 2376 | |
|
2377 | 2377 | def test_api_delete_gist_regular_user_no_permission(self): |
|
2378 | 2378 | gist_id = fixture.create_gist().gist_access_id |
|
2379 | 2379 | id_, params = _build_data(self.apikey_regular, 'delete_gist', |
|
2380 | 2380 | gistid=gist_id) |
|
2381 | 2381 | response = api_call(self, params) |
|
2382 | 2382 | expected = 'gist `%s` does not exist' % (gist_id,) |
|
2383 | 2383 | self._compare_error(id_, expected, given=response.body) |
|
2384 | 2384 | |
|
2385 | 2385 | @mock.patch.object(GistModel, 'delete', crash) |
|
2386 | 2386 | def test_api_delete_gist_exception_occurred(self): |
|
2387 | 2387 | gist_id = fixture.create_gist().gist_access_id |
|
2388 | 2388 | id_, params = _build_data(self.apikey, 'delete_gist', |
|
2389 | 2389 | gistid=gist_id) |
|
2390 | 2390 | response = api_call(self, params) |
|
2391 | 2391 | expected = 'failed to delete gist ID:%s' % (gist_id,) |
|
2392 | 2392 | self._compare_error(id_, expected, given=response.body) |
|
2393 | 2393 | |
|
2394 | 2394 | def test_api_get_ip(self): |
|
2395 | 2395 | id_, params = _build_data(self.apikey, 'get_ip') |
|
2396 | 2396 | response = api_call(self, params) |
|
2397 | 2397 | expected = { |
|
2398 | 2398 | 'server_ip_addr': '0.0.0.0', |
|
2399 | 2399 | 'user_ips': [] |
|
2400 | 2400 | } |
|
2401 | 2401 | self._compare_ok(id_, expected, given=response.body) |
|
2402 | 2402 | |
|
2403 | 2403 | def test_api_get_server_info(self): |
|
2404 | 2404 | id_, params = _build_data(self.apikey, 'get_server_info') |
|
2405 | 2405 | response = api_call(self, params) |
|
2406 | 2406 | expected = Setting.get_server_info() |
|
2407 | 2407 | self._compare_ok(id_, expected, given=response.body) |
@@ -1,604 +1,604 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | from sqlalchemy.orm.exc import NoResultFound |
|
16 | 16 | |
|
17 | 17 | from kallithea.tests import * |
|
18 | 18 | from kallithea.tests.fixture import Fixture |
|
19 | 19 | from kallithea.model.db import User, Permission, UserIpMap, UserApiKeys |
|
20 | 20 | from kallithea.lib.auth import check_password |
|
21 | 21 | from kallithea.model.user import UserModel |
|
22 | 22 | from kallithea.model import validators |
|
23 | 23 | from kallithea.lib import helpers as h |
|
24 | 24 | from kallithea.model.meta import Session |
|
25 | 25 | |
|
26 | 26 | fixture = Fixture() |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class TestAdminUsersController(TestController): |
|
30 | 30 | test_user_1 = 'testme' |
|
31 | 31 | |
|
32 | 32 | @classmethod |
|
33 | 33 | def teardown_class(cls): |
|
34 | 34 | if User.get_by_username(cls.test_user_1): |
|
35 | 35 | UserModel().delete(cls.test_user_1) |
|
36 | 36 | Session().commit() |
|
37 | 37 | |
|
38 | 38 | def test_index(self): |
|
39 | 39 | self.log_user() |
|
40 | 40 | response = self.app.get(url('users')) |
|
41 | 41 | # Test response... |
|
42 | 42 | |
|
43 | 43 | def test_create(self): |
|
44 | 44 | self.log_user() |
|
45 | 45 | username = 'newtestuser' |
|
46 | 46 | password = 'test12' |
|
47 | 47 | password_confirmation = password |
|
48 | 48 | name = 'name' |
|
49 | 49 | lastname = 'lastname' |
|
50 |
email = 'mail@ |
|
|
50 | email = 'mail@example.com' | |
|
51 | 51 | |
|
52 | 52 | response = self.app.post(url('users'), |
|
53 | 53 | {'username': username, |
|
54 | 54 | 'password': password, |
|
55 | 55 | 'password_confirmation': password_confirmation, |
|
56 | 56 | 'firstname': name, |
|
57 | 57 | 'active': True, |
|
58 | 58 | 'lastname': lastname, |
|
59 | 59 | 'extern_name': 'internal', |
|
60 | 60 | 'extern_type': 'internal', |
|
61 | 61 | 'email': email, |
|
62 | 62 | '_authentication_token': self.authentication_token()}) |
|
63 | 63 | |
|
64 | 64 | self.checkSessionFlash(response, '''Created user <a href="/_admin/users/''') |
|
65 | 65 | self.checkSessionFlash(response, '''/edit">%s</a>''' % (username)) |
|
66 | 66 | |
|
67 | 67 | new_user = Session().query(User).\ |
|
68 | 68 | filter(User.username == username).one() |
|
69 | 69 | |
|
70 | 70 | self.assertEqual(new_user.username, username) |
|
71 | 71 | self.assertEqual(check_password(password, new_user.password), True) |
|
72 | 72 | self.assertEqual(new_user.name, name) |
|
73 | 73 | self.assertEqual(new_user.lastname, lastname) |
|
74 | 74 | self.assertEqual(new_user.email, email) |
|
75 | 75 | |
|
76 | 76 | response.follow() |
|
77 | 77 | response = response.follow() |
|
78 | 78 | response.mustcontain("""newtestuser""") |
|
79 | 79 | |
|
80 | 80 | def test_create_err(self): |
|
81 | 81 | self.log_user() |
|
82 | 82 | username = 'new_user' |
|
83 | 83 | password = '' |
|
84 | 84 | name = 'name' |
|
85 | 85 | lastname = 'lastname' |
|
86 | email = 'errmail.com' | |
|
86 | email = 'errmail.example.com' | |
|
87 | 87 | |
|
88 | 88 | response = self.app.post(url('users'), {'username': username, |
|
89 | 89 | 'password': password, |
|
90 | 90 | 'name': name, |
|
91 | 91 | 'active': False, |
|
92 | 92 | 'lastname': lastname, |
|
93 | 93 | 'email': email, |
|
94 | 94 | '_authentication_token': self.authentication_token()}) |
|
95 | 95 | |
|
96 | 96 | msg = validators.ValidUsername(False, {})._messages['system_invalid_username'] |
|
97 | 97 | msg = h.html_escape(msg % {'username': 'new_user'}) |
|
98 | 98 | response.mustcontain("""<span class="error-message">%s</span>""" % msg) |
|
99 | 99 | response.mustcontain("""<span class="error-message">Please enter a value</span>""") |
|
100 | 100 | response.mustcontain("""<span class="error-message">An email address must contain a single @</span>""") |
|
101 | 101 | |
|
102 | 102 | def get_user(): |
|
103 | 103 | Session().query(User).filter(User.username == username).one() |
|
104 | 104 | |
|
105 | 105 | self.assertRaises(NoResultFound, get_user), 'found user in database' |
|
106 | 106 | |
|
107 | 107 | def test_new(self): |
|
108 | 108 | self.log_user() |
|
109 | 109 | response = self.app.get(url('new_user')) |
|
110 | 110 | |
|
111 | 111 | @parameterized.expand( |
|
112 | 112 | [('firstname', {'firstname': 'new_username'}), |
|
113 | 113 | ('lastname', {'lastname': 'new_username'}), |
|
114 | 114 | ('admin', {'admin': True}), |
|
115 | 115 | ('admin', {'admin': False}), |
|
116 | 116 | ('extern_type', {'extern_type': 'ldap'}), |
|
117 | 117 | ('extern_type', {'extern_type': None}), |
|
118 | 118 | ('extern_name', {'extern_name': 'test'}), |
|
119 | 119 | ('extern_name', {'extern_name': None}), |
|
120 | 120 | ('active', {'active': False}), |
|
121 | 121 | ('active', {'active': True}), |
|
122 |
('email', {'email': 'some |
|
|
122 | ('email', {'email': 'someemail@example.com'}), | |
|
123 | 123 | # ('new_password', {'new_password': 'foobar123', |
|
124 | 124 | # 'password_confirmation': 'foobar123'}) |
|
125 | 125 | ]) |
|
126 | 126 | def test_update(self, name, attrs): |
|
127 | 127 | self.log_user() |
|
128 | 128 | usr = fixture.create_user(self.test_user_1, password='qweqwe', |
|
129 | 129 | email='testme@example.com', |
|
130 | 130 | extern_type='internal', |
|
131 | 131 | extern_name=self.test_user_1, |
|
132 | 132 | skip_if_exists=True) |
|
133 | 133 | Session().commit() |
|
134 | 134 | params = usr.get_api_data(True) |
|
135 | 135 | params.update({'password_confirmation': ''}) |
|
136 | 136 | params.update({'new_password': ''}) |
|
137 | 137 | params.update(attrs) |
|
138 | 138 | if name == 'email': |
|
139 | 139 | params['emails'] = [attrs['email']] |
|
140 | 140 | if name == 'extern_type': |
|
141 | 141 | #cannot update this via form, expected value is original one |
|
142 | 142 | params['extern_type'] = "internal" |
|
143 | 143 | if name == 'extern_name': |
|
144 | 144 | #cannot update this via form, expected value is original one |
|
145 | 145 | params['extern_name'] = self.test_user_1 |
|
146 | 146 | # special case since this user is not |
|
147 | 147 | # logged in yet his data is not filled |
|
148 | 148 | # so we use creation data |
|
149 | 149 | |
|
150 | 150 | params.update({'_authentication_token': self.authentication_token()}) |
|
151 | 151 | response = self.app.put(url('user', id=usr.user_id), params) |
|
152 | 152 | self.checkSessionFlash(response, 'User updated successfully') |
|
153 | 153 | params.pop('_authentication_token') |
|
154 | 154 | |
|
155 | 155 | updated_user = User.get_by_username(self.test_user_1) |
|
156 | 156 | updated_params = updated_user.get_api_data(True) |
|
157 | 157 | updated_params.update({'password_confirmation': ''}) |
|
158 | 158 | updated_params.update({'new_password': ''}) |
|
159 | 159 | |
|
160 | 160 | self.assertEqual(params, updated_params) |
|
161 | 161 | |
|
162 | 162 | def test_delete(self): |
|
163 | 163 | self.log_user() |
|
164 | 164 | username = 'newtestuserdeleteme' |
|
165 | 165 | |
|
166 | 166 | fixture.create_user(name=username) |
|
167 | 167 | |
|
168 | 168 | new_user = Session().query(User)\ |
|
169 | 169 | .filter(User.username == username).one() |
|
170 | 170 | response = self.app.post(url('user', id=new_user.user_id), |
|
171 | 171 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
172 | 172 | |
|
173 | 173 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
174 | 174 | |
|
175 | 175 | def test_delete_repo_err(self): |
|
176 | 176 | self.log_user() |
|
177 | 177 | username = 'repoerr' |
|
178 | 178 | reponame = 'repoerr_fail' |
|
179 | 179 | |
|
180 | 180 | fixture.create_user(name=username) |
|
181 | 181 | fixture.create_repo(name=reponame, cur_user=username) |
|
182 | 182 | |
|
183 | 183 | new_user = Session().query(User)\ |
|
184 | 184 | .filter(User.username == username).one() |
|
185 | 185 | response = self.app.post(url('user', id=new_user.user_id), |
|
186 | 186 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
187 | 187 | self.checkSessionFlash(response, 'User "%s" still ' |
|
188 | 188 | 'owns 1 repositories and cannot be removed. ' |
|
189 | 189 | 'Switch owners or remove those repositories: ' |
|
190 | 190 | '%s' % (username, reponame)) |
|
191 | 191 | |
|
192 | 192 | response = self.app.post(url('delete_repo', repo_name=reponame), |
|
193 | 193 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
194 | 194 | self.checkSessionFlash(response, 'Deleted repository %s' % reponame) |
|
195 | 195 | |
|
196 | 196 | response = self.app.post(url('user', id=new_user.user_id), |
|
197 | 197 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
198 | 198 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
199 | 199 | |
|
200 | 200 | def test_delete_repo_group_err(self): |
|
201 | 201 | self.log_user() |
|
202 | 202 | username = 'repogrouperr' |
|
203 | 203 | groupname = 'repogroup_fail' |
|
204 | 204 | |
|
205 | 205 | fixture.create_user(name=username) |
|
206 | 206 | fixture.create_repo_group(name=groupname, cur_user=username) |
|
207 | 207 | |
|
208 | 208 | new_user = Session().query(User)\ |
|
209 | 209 | .filter(User.username == username).one() |
|
210 | 210 | response = self.app.post(url('user', id=new_user.user_id), |
|
211 | 211 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
212 | 212 | self.checkSessionFlash(response, 'User "%s" still ' |
|
213 | 213 | 'owns 1 repository groups and cannot be removed. ' |
|
214 | 214 | 'Switch owners or remove those repository groups: ' |
|
215 | 215 | '%s' % (username, groupname)) |
|
216 | 216 | |
|
217 | 217 | # Relevant _if_ the user deletion succeeded to make sure we can render groups without owner |
|
218 | 218 | # rg = RepoGroup.get_by_group_name(group_name=groupname) |
|
219 | 219 | # response = self.app.get(url('repos_groups', id=rg.group_id)) |
|
220 | 220 | |
|
221 | 221 | response = self.app.post(url('delete_repo_group', group_name=groupname), |
|
222 | 222 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
223 | 223 | self.checkSessionFlash(response, 'Removed repository group %s' % groupname) |
|
224 | 224 | |
|
225 | 225 | response = self.app.post(url('user', id=new_user.user_id), |
|
226 | 226 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
227 | 227 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
228 | 228 | |
|
229 | 229 | def test_delete_user_group_err(self): |
|
230 | 230 | self.log_user() |
|
231 | 231 | username = 'usergrouperr' |
|
232 | 232 | groupname = 'usergroup_fail' |
|
233 | 233 | |
|
234 | 234 | fixture.create_user(name=username) |
|
235 | 235 | ug = fixture.create_user_group(name=groupname, cur_user=username) |
|
236 | 236 | |
|
237 | 237 | new_user = Session().query(User)\ |
|
238 | 238 | .filter(User.username == username).one() |
|
239 | 239 | response = self.app.post(url('user', id=new_user.user_id), |
|
240 | 240 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
241 | 241 | self.checkSessionFlash(response, 'User "%s" still ' |
|
242 | 242 | 'owns 1 user groups and cannot be removed. ' |
|
243 | 243 | 'Switch owners or remove those user groups: ' |
|
244 | 244 | '%s' % (username, groupname)) |
|
245 | 245 | |
|
246 | 246 | # TODO: why do this fail? |
|
247 | 247 | #response = self.app.delete(url('delete_users_group', id=groupname)) |
|
248 | 248 | #self.checkSessionFlash(response, 'Removed user group %s' % groupname) |
|
249 | 249 | |
|
250 | 250 | fixture.destroy_user_group(ug.users_group_id) |
|
251 | 251 | |
|
252 | 252 | response = self.app.post(url('user', id=new_user.user_id), |
|
253 | 253 | params={'_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
254 | 254 | self.checkSessionFlash(response, 'Successfully deleted user') |
|
255 | 255 | |
|
256 | 256 | def test_show(self): |
|
257 | 257 | response = self.app.get(url('user', id=1)) |
|
258 | 258 | |
|
259 | 259 | def test_edit(self): |
|
260 | 260 | self.log_user() |
|
261 | 261 | user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
262 | 262 | response = self.app.get(url('edit_user', id=user.user_id)) |
|
263 | 263 | |
|
264 | 264 | def test_add_perm_create_repo(self): |
|
265 | 265 | self.log_user() |
|
266 | 266 | perm_none = Permission.get_by_key('hg.create.none') |
|
267 | 267 | perm_create = Permission.get_by_key('hg.create.repository') |
|
268 | 268 | |
|
269 | 269 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
270 | 270 | email='dummy', firstname='a', |
|
271 | 271 | lastname='b') |
|
272 | 272 | Session().commit() |
|
273 | 273 | uid = user.user_id |
|
274 | 274 | |
|
275 | 275 | try: |
|
276 | 276 | #User should have None permission on creation repository |
|
277 | 277 | self.assertEqual(UserModel().has_perm(user, perm_none), False) |
|
278 | 278 | self.assertEqual(UserModel().has_perm(user, perm_create), False) |
|
279 | 279 | |
|
280 | 280 | response = self.app.post(url('edit_user_perms', id=uid), |
|
281 | 281 | params=dict(_method='put', |
|
282 | 282 | create_repo_perm=True, |
|
283 | 283 | _authentication_token=self.authentication_token())) |
|
284 | 284 | |
|
285 | 285 | perm_none = Permission.get_by_key('hg.create.none') |
|
286 | 286 | perm_create = Permission.get_by_key('hg.create.repository') |
|
287 | 287 | |
|
288 | 288 | #User should have None permission on creation repository |
|
289 | 289 | self.assertEqual(UserModel().has_perm(uid, perm_none), False) |
|
290 | 290 | self.assertEqual(UserModel().has_perm(uid, perm_create), True) |
|
291 | 291 | finally: |
|
292 | 292 | UserModel().delete(uid) |
|
293 | 293 | Session().commit() |
|
294 | 294 | |
|
295 | 295 | def test_revoke_perm_create_repo(self): |
|
296 | 296 | self.log_user() |
|
297 | 297 | perm_none = Permission.get_by_key('hg.create.none') |
|
298 | 298 | perm_create = Permission.get_by_key('hg.create.repository') |
|
299 | 299 | |
|
300 | 300 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
301 | 301 | email='dummy', firstname='a', |
|
302 | 302 | lastname='b') |
|
303 | 303 | Session().commit() |
|
304 | 304 | uid = user.user_id |
|
305 | 305 | |
|
306 | 306 | try: |
|
307 | 307 | #User should have None permission on creation repository |
|
308 | 308 | self.assertEqual(UserModel().has_perm(user, perm_none), False) |
|
309 | 309 | self.assertEqual(UserModel().has_perm(user, perm_create), False) |
|
310 | 310 | |
|
311 | 311 | response = self.app.post(url('edit_user_perms', id=uid), |
|
312 | 312 | params=dict(_method='put', _authentication_token=self.authentication_token())) |
|
313 | 313 | |
|
314 | 314 | perm_none = Permission.get_by_key('hg.create.none') |
|
315 | 315 | perm_create = Permission.get_by_key('hg.create.repository') |
|
316 | 316 | |
|
317 | 317 | #User should have None permission on creation repository |
|
318 | 318 | self.assertEqual(UserModel().has_perm(uid, perm_none), True) |
|
319 | 319 | self.assertEqual(UserModel().has_perm(uid, perm_create), False) |
|
320 | 320 | finally: |
|
321 | 321 | UserModel().delete(uid) |
|
322 | 322 | Session().commit() |
|
323 | 323 | |
|
324 | 324 | def test_add_perm_fork_repo(self): |
|
325 | 325 | self.log_user() |
|
326 | 326 | perm_none = Permission.get_by_key('hg.fork.none') |
|
327 | 327 | perm_fork = Permission.get_by_key('hg.fork.repository') |
|
328 | 328 | |
|
329 | 329 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
330 | 330 | email='dummy', firstname='a', |
|
331 | 331 | lastname='b') |
|
332 | 332 | Session().commit() |
|
333 | 333 | uid = user.user_id |
|
334 | 334 | |
|
335 | 335 | try: |
|
336 | 336 | #User should have None permission on creation repository |
|
337 | 337 | self.assertEqual(UserModel().has_perm(user, perm_none), False) |
|
338 | 338 | self.assertEqual(UserModel().has_perm(user, perm_fork), False) |
|
339 | 339 | |
|
340 | 340 | response = self.app.post(url('edit_user_perms', id=uid), |
|
341 | 341 | params=dict(_method='put', |
|
342 | 342 | create_repo_perm=True, |
|
343 | 343 | _authentication_token=self.authentication_token())) |
|
344 | 344 | |
|
345 | 345 | perm_none = Permission.get_by_key('hg.create.none') |
|
346 | 346 | perm_create = Permission.get_by_key('hg.create.repository') |
|
347 | 347 | |
|
348 | 348 | #User should have None permission on creation repository |
|
349 | 349 | self.assertEqual(UserModel().has_perm(uid, perm_none), False) |
|
350 | 350 | self.assertEqual(UserModel().has_perm(uid, perm_create), True) |
|
351 | 351 | finally: |
|
352 | 352 | UserModel().delete(uid) |
|
353 | 353 | Session().commit() |
|
354 | 354 | |
|
355 | 355 | def test_revoke_perm_fork_repo(self): |
|
356 | 356 | self.log_user() |
|
357 | 357 | perm_none = Permission.get_by_key('hg.fork.none') |
|
358 | 358 | perm_fork = Permission.get_by_key('hg.fork.repository') |
|
359 | 359 | |
|
360 | 360 | user = UserModel().create_or_update(username='dummy', password='qwe', |
|
361 | 361 | email='dummy', firstname='a', |
|
362 | 362 | lastname='b') |
|
363 | 363 | Session().commit() |
|
364 | 364 | uid = user.user_id |
|
365 | 365 | |
|
366 | 366 | try: |
|
367 | 367 | #User should have None permission on creation repository |
|
368 | 368 | self.assertEqual(UserModel().has_perm(user, perm_none), False) |
|
369 | 369 | self.assertEqual(UserModel().has_perm(user, perm_fork), False) |
|
370 | 370 | |
|
371 | 371 | response = self.app.post(url('edit_user_perms', id=uid), |
|
372 | 372 | params=dict(_method='put', _authentication_token=self.authentication_token())) |
|
373 | 373 | |
|
374 | 374 | perm_none = Permission.get_by_key('hg.create.none') |
|
375 | 375 | perm_create = Permission.get_by_key('hg.create.repository') |
|
376 | 376 | |
|
377 | 377 | #User should have None permission on creation repository |
|
378 | 378 | self.assertEqual(UserModel().has_perm(uid, perm_none), True) |
|
379 | 379 | self.assertEqual(UserModel().has_perm(uid, perm_create), False) |
|
380 | 380 | finally: |
|
381 | 381 | UserModel().delete(uid) |
|
382 | 382 | Session().commit() |
|
383 | 383 | |
|
384 | 384 | def test_ips(self): |
|
385 | 385 | self.log_user() |
|
386 | 386 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
387 | 387 | response = self.app.get(url('edit_user_ips', id=user.user_id)) |
|
388 | 388 | response.mustcontain('All IP addresses are allowed') |
|
389 | 389 | |
|
390 | 390 | @parameterized.expand([ |
|
391 | 391 | ('127/24', '127.0.0.1/24', '127.0.0.0 - 127.0.0.255', False), |
|
392 | 392 | ('10/32', '10.0.0.10/32', '10.0.0.10 - 10.0.0.10', False), |
|
393 | 393 | ('0/16', '0.0.0.0/16', '0.0.0.0 - 0.0.255.255', False), |
|
394 | 394 | ('0/8', '0.0.0.0/8', '0.0.0.0 - 0.255.255.255', False), |
|
395 | 395 | ('127_bad_mask', '127.0.0.1/99', '127.0.0.1 - 127.0.0.1', True), |
|
396 | 396 | ('127_bad_ip', 'foobar', 'foobar', True), |
|
397 | 397 | ]) |
|
398 | 398 | def test_add_ip(self, test_name, ip, ip_range, failure): |
|
399 | 399 | self.log_user() |
|
400 | 400 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
401 | 401 | user_id = user.user_id |
|
402 | 402 | |
|
403 | 403 | response = self.app.put(url('edit_user_ips', id=user_id), |
|
404 | 404 | params=dict(new_ip=ip, _authentication_token=self.authentication_token())) |
|
405 | 405 | |
|
406 | 406 | if failure: |
|
407 | 407 | self.checkSessionFlash(response, 'Please enter a valid IPv4 or IPv6 address') |
|
408 | 408 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
409 | 409 | response.mustcontain(no=[ip]) |
|
410 | 410 | response.mustcontain(no=[ip_range]) |
|
411 | 411 | |
|
412 | 412 | else: |
|
413 | 413 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
414 | 414 | response.mustcontain(ip) |
|
415 | 415 | response.mustcontain(ip_range) |
|
416 | 416 | |
|
417 | 417 | ## cleanup |
|
418 | 418 | for del_ip in UserIpMap.query().filter(UserIpMap.user_id == user_id).all(): |
|
419 | 419 | Session().delete(del_ip) |
|
420 | 420 | Session().commit() |
|
421 | 421 | |
|
422 | 422 | def test_delete_ip(self): |
|
423 | 423 | self.log_user() |
|
424 | 424 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
425 | 425 | user_id = user.user_id |
|
426 | 426 | ip = '127.0.0.1/32' |
|
427 | 427 | ip_range = '127.0.0.1 - 127.0.0.1' |
|
428 | 428 | new_ip = UserModel().add_extra_ip(user_id, ip) |
|
429 | 429 | Session().commit() |
|
430 | 430 | new_ip_id = new_ip.ip_id |
|
431 | 431 | |
|
432 | 432 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
433 | 433 | response.mustcontain(ip) |
|
434 | 434 | response.mustcontain(ip_range) |
|
435 | 435 | |
|
436 | 436 | self.app.post(url('edit_user_ips', id=user_id), |
|
437 | 437 | params=dict(_method='delete', del_ip_id=new_ip_id, _authentication_token=self.authentication_token())) |
|
438 | 438 | |
|
439 | 439 | response = self.app.get(url('edit_user_ips', id=user_id)) |
|
440 | 440 | response.mustcontain('All IP addresses are allowed') |
|
441 | 441 | response.mustcontain(no=[ip]) |
|
442 | 442 | response.mustcontain(no=[ip_range]) |
|
443 | 443 | |
|
444 | 444 | def test_api_keys(self): |
|
445 | 445 | self.log_user() |
|
446 | 446 | |
|
447 | 447 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
448 | 448 | response = self.app.get(url('edit_user_api_keys', id=user.user_id)) |
|
449 | 449 | response.mustcontain(user.api_key) |
|
450 | 450 | response.mustcontain('Expires: Never') |
|
451 | 451 | |
|
452 | 452 | @parameterized.expand([ |
|
453 | 453 | ('forever', -1), |
|
454 | 454 | ('5mins', 60*5), |
|
455 | 455 | ('30days', 60*60*24*30), |
|
456 | 456 | ]) |
|
457 | 457 | def test_add_api_keys(self, desc, lifetime): |
|
458 | 458 | self.log_user() |
|
459 | 459 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
460 | 460 | user_id = user.user_id |
|
461 | 461 | |
|
462 | 462 | response = self.app.post(url('edit_user_api_keys', id=user_id), |
|
463 | 463 | {'description': desc, 'lifetime': lifetime, '_authentication_token': self.authentication_token()}) |
|
464 | 464 | self.checkSessionFlash(response, 'API key successfully created') |
|
465 | 465 | try: |
|
466 | 466 | response = response.follow() |
|
467 | 467 | user = User.get(user_id) |
|
468 | 468 | for api_key in user.api_keys: |
|
469 | 469 | response.mustcontain(api_key) |
|
470 | 470 | finally: |
|
471 | 471 | for api_key in UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all(): |
|
472 | 472 | Session().delete(api_key) |
|
473 | 473 | Session().commit() |
|
474 | 474 | |
|
475 | 475 | def test_remove_api_key(self): |
|
476 | 476 | self.log_user() |
|
477 | 477 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
478 | 478 | user_id = user.user_id |
|
479 | 479 | |
|
480 | 480 | response = self.app.post(url('edit_user_api_keys', id=user_id), |
|
481 | 481 | {'description': 'desc', 'lifetime': -1, '_authentication_token': self.authentication_token()}) |
|
482 | 482 | self.checkSessionFlash(response, 'API key successfully created') |
|
483 | 483 | response = response.follow() |
|
484 | 484 | |
|
485 | 485 | #now delete our key |
|
486 | 486 | keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all() |
|
487 | 487 | self.assertEqual(1, len(keys)) |
|
488 | 488 | |
|
489 | 489 | response = self.app.post(url('edit_user_api_keys', id=user_id), |
|
490 | 490 | {'_method': 'delete', 'del_api_key': keys[0].api_key, '_authentication_token': self.authentication_token()}) |
|
491 | 491 | self.checkSessionFlash(response, 'API key successfully deleted') |
|
492 | 492 | keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all() |
|
493 | 493 | self.assertEqual(0, len(keys)) |
|
494 | 494 | |
|
495 | 495 | def test_reset_main_api_key(self): |
|
496 | 496 | self.log_user() |
|
497 | 497 | user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
498 | 498 | user_id = user.user_id |
|
499 | 499 | api_key = user.api_key |
|
500 | 500 | response = self.app.get(url('edit_user_api_keys', id=user_id)) |
|
501 | 501 | response.mustcontain(api_key) |
|
502 | 502 | response.mustcontain('Expires: Never') |
|
503 | 503 | |
|
504 | 504 | response = self.app.post(url('edit_user_api_keys', id=user_id), |
|
505 | 505 | {'_method': 'delete', 'del_api_key_builtin': api_key, '_authentication_token': self.authentication_token()}) |
|
506 | 506 | self.checkSessionFlash(response, 'API key successfully reset') |
|
507 | 507 | response = response.follow() |
|
508 | 508 | response.mustcontain(no=[api_key]) |
|
509 | 509 | |
|
510 | 510 | # TODO To be uncommented when pytest is the test runner |
|
511 | 511 | #import pytest |
|
512 | 512 | #from kallithea.controllers.admin.users import UsersController |
|
513 | 513 | #class TestAdminUsersController_unittest(object): |
|
514 | 514 | # """ |
|
515 | 515 | # Unit tests for the users controller |
|
516 | 516 | # These are in a separate class, not deriving from TestController (and thus |
|
517 | 517 | # unittest.TestCase), to be able to benefit from pytest features like |
|
518 | 518 | # monkeypatch. |
|
519 | 519 | # """ |
|
520 | 520 | # def test_get_user_or_raise_if_default(self, monkeypatch): |
|
521 | 521 | # # flash complains about an unexisting session |
|
522 | 522 | # def flash_mock(*args, **kwargs): |
|
523 | 523 | # pass |
|
524 | 524 | # monkeypatch.setattr(h, 'flash', flash_mock) |
|
525 | 525 | # |
|
526 | 526 | # u = UsersController() |
|
527 | 527 | # # a regular user should work correctly |
|
528 | 528 | # user = User.get_by_username(TEST_USER_REGULAR_LOGIN) |
|
529 | 529 | # assert u._get_user_or_raise_if_default(user.user_id) == user |
|
530 | 530 | # # the default user should raise |
|
531 | 531 | # with pytest.raises(HTTPNotFound): |
|
532 | 532 | # u._get_user_or_raise_if_default(User.get_default_user().user_id) |
|
533 | 533 | |
|
534 | 534 | |
|
535 | 535 | class TestAdminUsersControllerForDefaultUser(TestController): |
|
536 | 536 | """ |
|
537 | 537 | Edit actions on the default user are not allowed. |
|
538 | 538 | Validate that they throw a 404 exception. |
|
539 | 539 | """ |
|
540 | 540 | def test_edit_default_user(self): |
|
541 | 541 | self.log_user() |
|
542 | 542 | user = User.get_default_user() |
|
543 | 543 | response = self.app.get(url('edit_user', id=user.user_id), status=404) |
|
544 | 544 | |
|
545 | 545 | def test_edit_advanced_default_user(self): |
|
546 | 546 | self.log_user() |
|
547 | 547 | user = User.get_default_user() |
|
548 | 548 | response = self.app.get(url('edit_user_advanced', id=user.user_id), status=404) |
|
549 | 549 | |
|
550 | 550 | # API keys |
|
551 | 551 | def test_edit_api_keys_default_user(self): |
|
552 | 552 | self.log_user() |
|
553 | 553 | user = User.get_default_user() |
|
554 | 554 | response = self.app.get(url('edit_user_api_keys', id=user.user_id), status=404) |
|
555 | 555 | |
|
556 | 556 | def test_add_api_keys_default_user(self): |
|
557 | 557 | self.log_user() |
|
558 | 558 | user = User.get_default_user() |
|
559 | 559 | response = self.app.post(url('edit_user_api_keys', id=user.user_id), |
|
560 | 560 | {'_method': 'put', '_authentication_token': self.authentication_token()}, status=404) |
|
561 | 561 | |
|
562 | 562 | def test_delete_api_keys_default_user(self): |
|
563 | 563 | self.log_user() |
|
564 | 564 | user = User.get_default_user() |
|
565 | 565 | response = self.app.post(url('edit_user_api_keys', id=user.user_id), |
|
566 | 566 | {'_method': 'delete', '_authentication_token': self.authentication_token()}, status=404) |
|
567 | 567 | |
|
568 | 568 | # Permissions |
|
569 | 569 | def test_edit_perms_default_user(self): |
|
570 | 570 | self.log_user() |
|
571 | 571 | user = User.get_default_user() |
|
572 | 572 | response = self.app.get(url('edit_user_perms', id=user.user_id), status=404) |
|
573 | 573 | |
|
574 | 574 | def test_update_perms_default_user(self): |
|
575 | 575 | self.log_user() |
|
576 | 576 | user = User.get_default_user() |
|
577 | 577 | response = self.app.post(url('edit_user_perms', id=user.user_id), |
|
578 | 578 | {'_method': 'put', '_authentication_token': self.authentication_token()}, status=404) |
|
579 | 579 | |
|
580 | 580 | # Emails |
|
581 | 581 | def test_edit_emails_default_user(self): |
|
582 | 582 | self.log_user() |
|
583 | 583 | user = User.get_default_user() |
|
584 | 584 | response = self.app.get(url('edit_user_emails', id=user.user_id), status=404) |
|
585 | 585 | |
|
586 | 586 | def test_add_emails_default_user(self): |
|
587 | 587 | self.log_user() |
|
588 | 588 | user = User.get_default_user() |
|
589 | 589 | response = self.app.post(url('edit_user_emails', id=user.user_id), |
|
590 | 590 | {'_method': 'put', '_authentication_token': self.authentication_token()}, status=404) |
|
591 | 591 | |
|
592 | 592 | def test_delete_emails_default_user(self): |
|
593 | 593 | self.log_user() |
|
594 | 594 | user = User.get_default_user() |
|
595 | 595 | response = self.app.post(url('edit_user_emails', id=user.user_id), |
|
596 | 596 | {'_method': 'delete', '_authentication_token': self.authentication_token()}, status=404) |
|
597 | 597 | |
|
598 | 598 | # IP addresses |
|
599 | 599 | # Add/delete of IP addresses for the default user is used to maintain |
|
600 | 600 | # the global IP whitelist and thus allowed. Only 'edit' is forbidden. |
|
601 | 601 | def test_edit_ip_default_user(self): |
|
602 | 602 | self.log_user() |
|
603 | 603 | user = User.get_default_user() |
|
604 | 604 | response = self.app.get(url('edit_user_ips', id=user.user_id), status=404) |
@@ -1,762 +1,762 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | import os |
|
3 | 3 | from kallithea.tests import * |
|
4 | 4 | from kallithea.model.db import Repository |
|
5 | 5 | from kallithea.model.meta import Session |
|
6 | 6 | from kallithea.tests.fixture import Fixture |
|
7 | 7 | |
|
8 | 8 | fixture = Fixture() |
|
9 | 9 | |
|
10 | 10 | ARCHIVE_SPECS = { |
|
11 | 11 | '.tar.bz2': ('application/x-bzip2', 'tbz2', ''), |
|
12 | 12 | '.tar.gz': ('application/x-gzip', 'tgz', ''), |
|
13 | 13 | '.zip': ('application/zip', 'zip', ''), |
|
14 | 14 | } |
|
15 | 15 | |
|
16 | 16 | HG_NODE_HISTORY = fixture.load_resource('hg_node_history_response.json') |
|
17 | 17 | GIT_NODE_HISTORY = fixture.load_resource('git_node_history_response.json') |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | def _set_downloads(repo_name, set_to): |
|
21 | 21 | repo = Repository.get_by_repo_name(repo_name) |
|
22 | 22 | repo.enable_downloads = set_to |
|
23 | 23 | Session().add(repo) |
|
24 | 24 | Session().commit() |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | class TestFilesController(TestController): |
|
28 | 28 | |
|
29 | 29 | def test_index(self): |
|
30 | 30 | self.log_user() |
|
31 | 31 | response = self.app.get(url(controller='files', action='index', |
|
32 | 32 | repo_name=HG_REPO, |
|
33 | 33 | revision='tip', |
|
34 | 34 | f_path='/')) |
|
35 | 35 | # Test response... |
|
36 | 36 | response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % HG_REPO) |
|
37 | 37 | response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/vcs"><i class="icon-folder-open"></i><span>vcs</span></a>' % HG_REPO) |
|
38 | 38 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.gitignore"><i class="icon-doc"></i><span>.gitignore</span></a>' % HG_REPO) |
|
39 | 39 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgignore"><i class="icon-doc"></i><span>.hgignore</span></a>' % HG_REPO) |
|
40 | 40 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.hgtags"><i class="icon-doc"></i><span>.hgtags</span></a>' % HG_REPO) |
|
41 | 41 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/.travis.yml"><i class="icon-doc"></i><span>.travis.yml</span></a>' % HG_REPO) |
|
42 | 42 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/MANIFEST.in"><i class="icon-doc"></i><span>MANIFEST.in</span></a>' % HG_REPO) |
|
43 | 43 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % HG_REPO) |
|
44 | 44 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/run_test_and_report.sh"><i class="icon-doc"></i><span>run_test_and_report.sh</span></a>' % HG_REPO) |
|
45 | 45 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.cfg"><i class="icon-doc"></i><span>setup.cfg</span></a>' % HG_REPO) |
|
46 | 46 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/setup.py"><i class="icon-doc"></i><span>setup.py</span></a>' % HG_REPO) |
|
47 | 47 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/test_and_report.sh"><i class="icon-doc"></i><span>test_and_report.sh</span></a>' % HG_REPO) |
|
48 | 48 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/tox.ini"><i class="icon-doc"></i><span>tox.ini</span></a>' % HG_REPO) |
|
49 | 49 | |
|
50 | 50 | def test_index_revision(self): |
|
51 | 51 | self.log_user() |
|
52 | 52 | |
|
53 | 53 | response = self.app.get( |
|
54 | 54 | url(controller='files', action='index', |
|
55 | 55 | repo_name=HG_REPO, |
|
56 | 56 | revision='7ba66bec8d6dbba14a2155be32408c435c5f4492', |
|
57 | 57 | f_path='/') |
|
58 | 58 | ) |
|
59 | 59 | |
|
60 | 60 | #Test response... |
|
61 | 61 | |
|
62 | 62 | response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/docs"><i class="icon-folder-open"></i><span>docs</span></a>' % HG_REPO) |
|
63 | 63 | response.mustcontain('<a class="browser-dir ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/tests"><i class="icon-folder-open"></i><span>tests</span></a>' % HG_REPO) |
|
64 | 64 | response.mustcontain('<a class="browser-file ypjax-link" href="/%s/files/7ba66bec8d6dbba14a2155be32408c435c5f4492/README.rst"><i class="icon-doc"></i><span>README.rst</span></a>' % HG_REPO) |
|
65 | 65 | response.mustcontain('1.1 KiB') |
|
66 | 66 | |
|
67 | 67 | def test_index_different_branch(self): |
|
68 | 68 | self.log_user() |
|
69 | 69 | |
|
70 | 70 | response = self.app.get(url(controller='files', action='index', |
|
71 | 71 | repo_name=HG_REPO, |
|
72 | 72 | revision='97e8b885c04894463c51898e14387d80c30ed1ee', |
|
73 | 73 | f_path='/')) |
|
74 | 74 | |
|
75 | 75 | response.mustcontain("""<option selected="selected" value="97e8b885c04894463c51898e14387d80c30ed1ee">git at 97e8b885c048</option>""") |
|
76 | 76 | |
|
77 | 77 | def test_index_paging(self): |
|
78 | 78 | self.log_user() |
|
79 | 79 | |
|
80 | 80 | for r in [(73, 'a066b25d5df7016b45a41b7e2a78c33b57adc235'), |
|
81 | 81 | (92, 'cc66b61b8455b264a7a8a2d8ddc80fcfc58c221e'), |
|
82 | 82 | (109, '75feb4c33e81186c87eac740cee2447330288412'), |
|
83 | 83 | (1, '3d8f361e72ab303da48d799ff1ac40d5ac37c67e'), |
|
84 | 84 | (0, 'b986218ba1c9b0d6a259fac9b050b1724ed8e545')]: |
|
85 | 85 | |
|
86 | 86 | response = self.app.get(url(controller='files', action='index', |
|
87 | 87 | repo_name=HG_REPO, |
|
88 | 88 | revision=r[1], |
|
89 | 89 | f_path='/')) |
|
90 | 90 | |
|
91 | 91 | response.mustcontain("""@ r%s:%s""" % (r[0], r[1][:12])) |
|
92 | 92 | |
|
93 | 93 | def test_file_source(self): |
|
94 | 94 | self.log_user() |
|
95 | 95 | response = self.app.get(url(controller='files', action='index', |
|
96 | 96 | repo_name=HG_REPO, |
|
97 | 97 | revision='8911406ad776fdd3d0b9932a2e89677e57405a48', |
|
98 | 98 | f_path='vcs/nodes.py')) |
|
99 | 99 | |
|
100 |
response.mustcontain("""<div class="commit">Partially implemented <a class="issue-tracker-link" href="https:// |
|
|
100 | response.mustcontain("""<div class="commit">Partially implemented <a class="issue-tracker-link" href="https://issues.example.com/vcs_test_hg/issue/16">#16</a>. filecontent/commit message/author/node name are safe_unicode now. | |
|
101 | 101 | In addition some other __str__ are unicode as well |
|
102 | 102 | Added test for unicode |
|
103 | 103 | Improved test to clone into uniq repository. |
|
104 | 104 | removed extra unicode conversion in diff.</div> |
|
105 | 105 | """) |
|
106 | 106 | |
|
107 | 107 | response.mustcontain("""<option selected="selected" value="8911406ad776fdd3d0b9932a2e89677e57405a48">default at 8911406ad776</option>""") |
|
108 | 108 | |
|
109 | 109 | def test_file_source_history(self): |
|
110 | 110 | self.log_user() |
|
111 | 111 | response = self.app.get(url(controller='files', action='history', |
|
112 | 112 | repo_name=HG_REPO, |
|
113 | 113 | revision='tip', |
|
114 | 114 | f_path='vcs/nodes.py'), |
|
115 | 115 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'},) |
|
116 | 116 | self.assertEqual(response.body, HG_NODE_HISTORY) |
|
117 | 117 | |
|
118 | 118 | def test_file_source_history_git(self): |
|
119 | 119 | self.log_user() |
|
120 | 120 | response = self.app.get(url(controller='files', action='history', |
|
121 | 121 | repo_name=GIT_REPO, |
|
122 | 122 | revision='master', |
|
123 | 123 | f_path='vcs/nodes.py'), |
|
124 | 124 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'},) |
|
125 | 125 | self.assertEqual(response.body, GIT_NODE_HISTORY) |
|
126 | 126 | |
|
127 | 127 | def test_file_annotation(self): |
|
128 | 128 | self.log_user() |
|
129 | 129 | response = self.app.get(url(controller='files', action='index', |
|
130 | 130 | repo_name=HG_REPO, |
|
131 | 131 | revision='tip', |
|
132 | 132 | f_path='vcs/nodes.py', |
|
133 | 133 | annotate=True)) |
|
134 | 134 | |
|
135 | 135 | response.mustcontain("""r356:25213a5fbb04""") |
|
136 | 136 | |
|
137 | 137 | def test_file_annotation_git(self): |
|
138 | 138 | self.log_user() |
|
139 | 139 | response = self.app.get(url(controller='files', action='index', |
|
140 | 140 | repo_name=GIT_REPO, |
|
141 | 141 | revision='master', |
|
142 | 142 | f_path='vcs/nodes.py', |
|
143 | 143 | annotate=True)) |
|
144 | 144 | response.mustcontain("""r345:c994f0de03b2""") |
|
145 | 145 | |
|
146 | 146 | def test_file_annotation_history(self): |
|
147 | 147 | self.log_user() |
|
148 | 148 | response = self.app.get(url(controller='files', action='history', |
|
149 | 149 | repo_name=HG_REPO, |
|
150 | 150 | revision='tip', |
|
151 | 151 | f_path='vcs/nodes.py', |
|
152 | 152 | annotate=True), |
|
153 | 153 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
154 | 154 | |
|
155 | 155 | self.assertEqual(response.body, HG_NODE_HISTORY) |
|
156 | 156 | |
|
157 | 157 | def test_file_annotation_history_git(self): |
|
158 | 158 | self.log_user() |
|
159 | 159 | response = self.app.get(url(controller='files', action='history', |
|
160 | 160 | repo_name=GIT_REPO, |
|
161 | 161 | revision='master', |
|
162 | 162 | f_path='vcs/nodes.py', |
|
163 | 163 | annotate=True), |
|
164 | 164 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
165 | 165 | |
|
166 | 166 | self.assertEqual(response.body, GIT_NODE_HISTORY) |
|
167 | 167 | |
|
168 | 168 | def test_file_authors(self): |
|
169 | 169 | self.log_user() |
|
170 | 170 | response = self.app.get(url(controller='files', action='authors', |
|
171 | 171 | repo_name=HG_REPO, |
|
172 | 172 | revision='tip', |
|
173 | 173 | f_path='vcs/nodes.py', |
|
174 | 174 | annotate=True)) |
|
175 | 175 | response.mustcontain('Marcin Kuzminski') |
|
176 | 176 | response.mustcontain('Lukasz Balcerzak') |
|
177 | 177 | |
|
178 | 178 | def test_file_authors_git(self): |
|
179 | 179 | self.log_user() |
|
180 | 180 | response = self.app.get(url(controller='files', action='authors', |
|
181 | 181 | repo_name=GIT_REPO, |
|
182 | 182 | revision='master', |
|
183 | 183 | f_path='vcs/nodes.py', |
|
184 | 184 | annotate=True)) |
|
185 | 185 | response.mustcontain('Marcin Kuzminski') |
|
186 | 186 | response.mustcontain('Lukasz Balcerzak') |
|
187 | 187 | |
|
188 | 188 | def test_archival(self): |
|
189 | 189 | self.log_user() |
|
190 | 190 | _set_downloads(HG_REPO, set_to=True) |
|
191 | 191 | for arch_ext, info in ARCHIVE_SPECS.items(): |
|
192 | 192 | short = '27cd5cce30c9%s' % arch_ext |
|
193 | 193 | fname = '27cd5cce30c96924232dffcd24178a07ffeb5dfc%s' % arch_ext |
|
194 | 194 | filename = '%s-%s' % (HG_REPO, short) |
|
195 | 195 | response = self.app.get(url(controller='files', |
|
196 | 196 | action='archivefile', |
|
197 | 197 | repo_name=HG_REPO, |
|
198 | 198 | fname=fname)) |
|
199 | 199 | |
|
200 | 200 | self.assertEqual(response.status, '200 OK') |
|
201 | 201 | heads = [ |
|
202 | 202 | ('Pragma', 'no-cache'), |
|
203 | 203 | ('Cache-Control', 'no-cache'), |
|
204 | 204 | ('Content-Disposition', 'attachment; filename=%s' % filename), |
|
205 | 205 | ('Content-Type', '%s; charset=utf-8' % info[0]), |
|
206 | 206 | ] |
|
207 | 207 | self.assertEqual(response.response._headers.items(), heads) |
|
208 | 208 | |
|
209 | 209 | def test_archival_wrong_ext(self): |
|
210 | 210 | self.log_user() |
|
211 | 211 | _set_downloads(HG_REPO, set_to=True) |
|
212 | 212 | for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']: |
|
213 | 213 | fname = '27cd5cce30c96924232dffcd24178a07ffeb5dfc%s' % arch_ext |
|
214 | 214 | |
|
215 | 215 | response = self.app.get(url(controller='files', |
|
216 | 216 | action='archivefile', |
|
217 | 217 | repo_name=HG_REPO, |
|
218 | 218 | fname=fname)) |
|
219 | 219 | response.mustcontain('Unknown archive type') |
|
220 | 220 | |
|
221 | 221 | def test_archival_wrong_revision(self): |
|
222 | 222 | self.log_user() |
|
223 | 223 | _set_downloads(HG_REPO, set_to=True) |
|
224 | 224 | for rev in ['00x000000', 'tar', 'wrong', '@##$@$42413232', '232dffcd']: |
|
225 | 225 | fname = '%s.zip' % rev |
|
226 | 226 | |
|
227 | 227 | response = self.app.get(url(controller='files', |
|
228 | 228 | action='archivefile', |
|
229 | 229 | repo_name=HG_REPO, |
|
230 | 230 | fname=fname)) |
|
231 | 231 | response.mustcontain('Unknown revision') |
|
232 | 232 | |
|
233 | 233 | #========================================================================== |
|
234 | 234 | # RAW FILE |
|
235 | 235 | #========================================================================== |
|
236 | 236 | def test_raw_file_ok(self): |
|
237 | 237 | self.log_user() |
|
238 | 238 | response = self.app.get(url(controller='files', action='rawfile', |
|
239 | 239 | repo_name=HG_REPO, |
|
240 | 240 | revision='27cd5cce30c96924232dffcd24178a07ffeb5dfc', |
|
241 | 241 | f_path='vcs/nodes.py')) |
|
242 | 242 | |
|
243 | 243 | self.assertEqual(response.content_disposition, "attachment; filename=nodes.py") |
|
244 | 244 | self.assertEqual(response.content_type, "text/x-python") |
|
245 | 245 | |
|
246 | 246 | def test_raw_file_wrong_cs(self): |
|
247 | 247 | self.log_user() |
|
248 | 248 | rev = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' |
|
249 | 249 | f_path = 'vcs/nodes.py' |
|
250 | 250 | |
|
251 | 251 | response = self.app.get(url(controller='files', action='rawfile', |
|
252 | 252 | repo_name=HG_REPO, |
|
253 | 253 | revision=rev, |
|
254 | 254 | f_path=f_path), status=404) |
|
255 | 255 | |
|
256 | 256 | msg = """Such revision does not exist for this repository""" |
|
257 | 257 | response.mustcontain(msg) |
|
258 | 258 | |
|
259 | 259 | def test_raw_file_wrong_f_path(self): |
|
260 | 260 | self.log_user() |
|
261 | 261 | rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc' |
|
262 | 262 | f_path = 'vcs/ERRORnodes.py' |
|
263 | 263 | response = self.app.get(url(controller='files', action='rawfile', |
|
264 | 264 | repo_name=HG_REPO, |
|
265 | 265 | revision=rev, |
|
266 | 266 | f_path=f_path), status=404) |
|
267 | 267 | |
|
268 | 268 | msg = "There is no file nor directory at the given path: '%s' at revision %s" % (f_path, rev[:12]) |
|
269 | 269 | response.mustcontain(msg) |
|
270 | 270 | |
|
271 | 271 | #========================================================================== |
|
272 | 272 | # RAW RESPONSE - PLAIN |
|
273 | 273 | #========================================================================== |
|
274 | 274 | def test_raw_ok(self): |
|
275 | 275 | self.log_user() |
|
276 | 276 | response = self.app.get(url(controller='files', action='raw', |
|
277 | 277 | repo_name=HG_REPO, |
|
278 | 278 | revision='27cd5cce30c96924232dffcd24178a07ffeb5dfc', |
|
279 | 279 | f_path='vcs/nodes.py')) |
|
280 | 280 | |
|
281 | 281 | self.assertEqual(response.content_type, "text/plain") |
|
282 | 282 | |
|
283 | 283 | def test_raw_wrong_cs(self): |
|
284 | 284 | self.log_user() |
|
285 | 285 | rev = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' |
|
286 | 286 | f_path = 'vcs/nodes.py' |
|
287 | 287 | |
|
288 | 288 | response = self.app.get(url(controller='files', action='raw', |
|
289 | 289 | repo_name=HG_REPO, |
|
290 | 290 | revision=rev, |
|
291 | 291 | f_path=f_path), status=404) |
|
292 | 292 | |
|
293 | 293 | msg = """Such revision does not exist for this repository""" |
|
294 | 294 | response.mustcontain(msg) |
|
295 | 295 | |
|
296 | 296 | def test_raw_wrong_f_path(self): |
|
297 | 297 | self.log_user() |
|
298 | 298 | rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc' |
|
299 | 299 | f_path = 'vcs/ERRORnodes.py' |
|
300 | 300 | response = self.app.get(url(controller='files', action='raw', |
|
301 | 301 | repo_name=HG_REPO, |
|
302 | 302 | revision=rev, |
|
303 | 303 | f_path=f_path), status=404) |
|
304 | 304 | msg = "There is no file nor directory at the given path: '%s' at revision %s" % (f_path, rev[:12]) |
|
305 | 305 | response.mustcontain(msg) |
|
306 | 306 | |
|
307 | 307 | def test_ajaxed_files_list(self): |
|
308 | 308 | self.log_user() |
|
309 | 309 | rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc' |
|
310 | 310 | response = self.app.get( |
|
311 | 311 | url('files_nodelist_home', repo_name=HG_REPO, f_path='/', |
|
312 | 312 | revision=rev), |
|
313 | 313 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}, |
|
314 | 314 | ) |
|
315 | 315 | response.mustcontain("vcs/web/simplevcs/views/repository.py") |
|
316 | 316 | |
|
317 | 317 | # Hg - ADD FILE |
|
318 | 318 | def test_add_file_view_hg(self): |
|
319 | 319 | self.log_user() |
|
320 | 320 | response = self.app.get(url('files_add_home', |
|
321 | 321 | repo_name=HG_REPO, |
|
322 | 322 | revision='tip', f_path='/')) |
|
323 | 323 | |
|
324 | 324 | def test_add_file_into_hg_missing_content(self): |
|
325 | 325 | self.log_user() |
|
326 | 326 | response = self.app.post(url('files_add_home', |
|
327 | 327 | repo_name=HG_REPO, |
|
328 | 328 | revision='tip', f_path='/'), |
|
329 | 329 | params={ |
|
330 | 330 | 'content': '', |
|
331 | 331 | '_authentication_token': self.authentication_token(), |
|
332 | 332 | }, |
|
333 | 333 | status=302) |
|
334 | 334 | |
|
335 | 335 | self.checkSessionFlash(response, 'No content') |
|
336 | 336 | |
|
337 | 337 | def test_add_file_into_hg_missing_filename(self): |
|
338 | 338 | self.log_user() |
|
339 | 339 | response = self.app.post(url('files_add_home', |
|
340 | 340 | repo_name=HG_REPO, |
|
341 | 341 | revision='tip', f_path='/'), |
|
342 | 342 | params={ |
|
343 | 343 | 'content': "foo", |
|
344 | 344 | '_authentication_token': self.authentication_token(), |
|
345 | 345 | }, |
|
346 | 346 | status=302) |
|
347 | 347 | |
|
348 | 348 | self.checkSessionFlash(response, 'No filename') |
|
349 | 349 | |
|
350 | 350 | @parameterized.expand([ |
|
351 | 351 | ('/abs', 'foo'), |
|
352 | 352 | ('../rel', 'foo'), |
|
353 | 353 | ('file/../foo', 'foo'), |
|
354 | 354 | ]) |
|
355 | 355 | def test_add_file_into_hg_bad_filenames(self, location, filename): |
|
356 | 356 | self.log_user() |
|
357 | 357 | response = self.app.post(url('files_add_home', |
|
358 | 358 | repo_name=HG_REPO, |
|
359 | 359 | revision='tip', f_path='/'), |
|
360 | 360 | params={ |
|
361 | 361 | 'content': "foo", |
|
362 | 362 | 'filename': filename, |
|
363 | 363 | 'location': location, |
|
364 | 364 | '_authentication_token': self.authentication_token(), |
|
365 | 365 | }, |
|
366 | 366 | status=302) |
|
367 | 367 | |
|
368 | 368 | self.checkSessionFlash(response, 'Location must be relative path and must not contain .. in path') |
|
369 | 369 | |
|
370 | 370 | @parameterized.expand([ |
|
371 | 371 | (1, '', 'foo.txt'), |
|
372 | 372 | (2, 'dir', 'foo.rst'), |
|
373 | 373 | (3, 'rel/dir', 'foo.bar'), |
|
374 | 374 | ]) |
|
375 | 375 | def test_add_file_into_hg(self, cnt, location, filename): |
|
376 | 376 | self.log_user() |
|
377 | 377 | repo = fixture.create_repo('commit-test-%s' % cnt, repo_type='hg') |
|
378 | 378 | response = self.app.post(url('files_add_home', |
|
379 | 379 | repo_name=repo.repo_name, |
|
380 | 380 | revision='tip', f_path='/'), |
|
381 | 381 | params={ |
|
382 | 382 | 'content': "foo", |
|
383 | 383 | 'filename': filename, |
|
384 | 384 | 'location': location, |
|
385 | 385 | '_authentication_token': self.authentication_token(), |
|
386 | 386 | }, |
|
387 | 387 | status=302) |
|
388 | 388 | try: |
|
389 | 389 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
390 | 390 | % os.path.join(location, filename)) |
|
391 | 391 | finally: |
|
392 | 392 | fixture.destroy_repo(repo.repo_name) |
|
393 | 393 | |
|
394 | 394 | # Git - add file |
|
395 | 395 | def test_add_file_view_git(self): |
|
396 | 396 | self.log_user() |
|
397 | 397 | response = self.app.get(url('files_add_home', |
|
398 | 398 | repo_name=GIT_REPO, |
|
399 | 399 | revision='tip', f_path='/')) |
|
400 | 400 | |
|
401 | 401 | def test_add_file_into_git_missing_content(self): |
|
402 | 402 | self.log_user() |
|
403 | 403 | response = self.app.post(url('files_add_home', |
|
404 | 404 | repo_name=GIT_REPO, |
|
405 | 405 | revision='tip', f_path='/'), |
|
406 | 406 | params={ |
|
407 | 407 | 'content': '', |
|
408 | 408 | '_authentication_token': self.authentication_token(), |
|
409 | 409 | }, |
|
410 | 410 | status=302) |
|
411 | 411 | self.checkSessionFlash(response, 'No content') |
|
412 | 412 | |
|
413 | 413 | def test_add_file_into_git_missing_filename(self): |
|
414 | 414 | self.log_user() |
|
415 | 415 | response = self.app.post(url('files_add_home', |
|
416 | 416 | repo_name=GIT_REPO, |
|
417 | 417 | revision='tip', f_path='/'), |
|
418 | 418 | params={ |
|
419 | 419 | 'content': "foo", |
|
420 | 420 | '_authentication_token': self.authentication_token(), |
|
421 | 421 | }, |
|
422 | 422 | status=302) |
|
423 | 423 | |
|
424 | 424 | self.checkSessionFlash(response, 'No filename') |
|
425 | 425 | |
|
426 | 426 | @parameterized.expand([ |
|
427 | 427 | ('/abs', 'foo'), |
|
428 | 428 | ('../rel', 'foo'), |
|
429 | 429 | ('file/../foo', 'foo'), |
|
430 | 430 | ]) |
|
431 | 431 | def test_add_file_into_git_bad_filenames(self, location, filename): |
|
432 | 432 | self.log_user() |
|
433 | 433 | response = self.app.post(url('files_add_home', |
|
434 | 434 | repo_name=GIT_REPO, |
|
435 | 435 | revision='tip', f_path='/'), |
|
436 | 436 | params={ |
|
437 | 437 | 'content': "foo", |
|
438 | 438 | 'filename': filename, |
|
439 | 439 | 'location': location, |
|
440 | 440 | '_authentication_token': self.authentication_token(), |
|
441 | 441 | }, |
|
442 | 442 | status=302) |
|
443 | 443 | |
|
444 | 444 | self.checkSessionFlash(response, 'Location must be relative path and must not contain .. in path') |
|
445 | 445 | |
|
446 | 446 | @parameterized.expand([ |
|
447 | 447 | (1, '', 'foo.txt'), |
|
448 | 448 | (2, 'dir', 'foo.rst'), |
|
449 | 449 | (3, 'rel/dir', 'foo.bar'), |
|
450 | 450 | ]) |
|
451 | 451 | def test_add_file_into_git(self, cnt, location, filename): |
|
452 | 452 | self.log_user() |
|
453 | 453 | repo = fixture.create_repo('commit-test-%s' % cnt, repo_type='git') |
|
454 | 454 | response = self.app.post(url('files_add_home', |
|
455 | 455 | repo_name=repo.repo_name, |
|
456 | 456 | revision='tip', f_path='/'), |
|
457 | 457 | params={ |
|
458 | 458 | 'content': "foo", |
|
459 | 459 | 'filename': filename, |
|
460 | 460 | 'location': location, |
|
461 | 461 | '_authentication_token': self.authentication_token(), |
|
462 | 462 | }, |
|
463 | 463 | status=302) |
|
464 | 464 | try: |
|
465 | 465 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
466 | 466 | % os.path.join(location, filename)) |
|
467 | 467 | finally: |
|
468 | 468 | fixture.destroy_repo(repo.repo_name) |
|
469 | 469 | |
|
470 | 470 | # Hg - EDIT |
|
471 | 471 | def test_edit_file_view_hg(self): |
|
472 | 472 | self.log_user() |
|
473 | 473 | response = self.app.get(url('files_edit_home', |
|
474 | 474 | repo_name=HG_REPO, |
|
475 | 475 | revision='tip', f_path='vcs/nodes.py')) |
|
476 | 476 | |
|
477 | 477 | def test_edit_file_view_not_on_branch_hg(self): |
|
478 | 478 | self.log_user() |
|
479 | 479 | repo = fixture.create_repo('test-edit-repo', repo_type='hg') |
|
480 | 480 | |
|
481 | 481 | ## add file |
|
482 | 482 | location = 'vcs' |
|
483 | 483 | filename = 'nodes.py' |
|
484 | 484 | response = self.app.post(url('files_add_home', |
|
485 | 485 | repo_name=repo.repo_name, |
|
486 | 486 | revision='tip', f_path='/'), |
|
487 | 487 | params={ |
|
488 | 488 | 'content': "def py():\n print 'hello'\n", |
|
489 | 489 | 'filename': filename, |
|
490 | 490 | 'location': location, |
|
491 | 491 | '_authentication_token': self.authentication_token(), |
|
492 | 492 | }, |
|
493 | 493 | status=302) |
|
494 | 494 | response.follow() |
|
495 | 495 | try: |
|
496 | 496 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
497 | 497 | % os.path.join(location, filename)) |
|
498 | 498 | response = self.app.get(url('files_edit_home', |
|
499 | 499 | repo_name=repo.repo_name, |
|
500 | 500 | revision='tip', f_path='vcs/nodes.py'), |
|
501 | 501 | status=302) |
|
502 | 502 | self.checkSessionFlash(response, |
|
503 | 503 | 'You can only edit files with revision being a valid branch') |
|
504 | 504 | finally: |
|
505 | 505 | fixture.destroy_repo(repo.repo_name) |
|
506 | 506 | |
|
507 | 507 | def test_edit_file_view_commit_changes_hg(self): |
|
508 | 508 | self.log_user() |
|
509 | 509 | repo = fixture.create_repo('test-edit-repo', repo_type='hg') |
|
510 | 510 | |
|
511 | 511 | ## add file |
|
512 | 512 | location = 'vcs' |
|
513 | 513 | filename = 'nodes.py' |
|
514 | 514 | response = self.app.post(url('files_add_home', |
|
515 | 515 | repo_name=repo.repo_name, |
|
516 | 516 | revision='tip', |
|
517 | 517 | f_path='/'), |
|
518 | 518 | params={ |
|
519 | 519 | 'content': "def py():\n print 'hello'\n", |
|
520 | 520 | 'filename': filename, |
|
521 | 521 | 'location': location, |
|
522 | 522 | '_authentication_token': self.authentication_token(), |
|
523 | 523 | }, |
|
524 | 524 | status=302) |
|
525 | 525 | response.follow() |
|
526 | 526 | try: |
|
527 | 527 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
528 | 528 | % os.path.join(location, filename)) |
|
529 | 529 | response = self.app.post(url('files_edit_home', |
|
530 | 530 | repo_name=repo.repo_name, |
|
531 | 531 | revision=repo.scm_instance.DEFAULT_BRANCH_NAME, |
|
532 | 532 | f_path='vcs/nodes.py'), |
|
533 | 533 | params={ |
|
534 | 534 | 'content': "def py():\n print 'hello world'\n", |
|
535 | 535 | 'message': 'i commited', |
|
536 | 536 | '_authentication_token': self.authentication_token(), |
|
537 | 537 | }, |
|
538 | 538 | status=302) |
|
539 | 539 | self.checkSessionFlash(response, |
|
540 | 540 | 'Successfully committed to vcs/nodes.py') |
|
541 | 541 | finally: |
|
542 | 542 | fixture.destroy_repo(repo.repo_name) |
|
543 | 543 | |
|
544 | 544 | # Git - edit |
|
545 | 545 | def test_edit_file_view_git(self): |
|
546 | 546 | self.log_user() |
|
547 | 547 | response = self.app.get(url('files_edit_home', |
|
548 | 548 | repo_name=GIT_REPO, |
|
549 | 549 | revision='tip', f_path='vcs/nodes.py')) |
|
550 | 550 | |
|
551 | 551 | def test_edit_file_view_not_on_branch_git(self): |
|
552 | 552 | self.log_user() |
|
553 | 553 | repo = fixture.create_repo('test-edit-repo', repo_type='git') |
|
554 | 554 | |
|
555 | 555 | ## add file |
|
556 | 556 | location = 'vcs' |
|
557 | 557 | filename = 'nodes.py' |
|
558 | 558 | response = self.app.post(url('files_add_home', |
|
559 | 559 | repo_name=repo.repo_name, |
|
560 | 560 | revision='tip', f_path='/'), |
|
561 | 561 | params={ |
|
562 | 562 | 'content': "def py():\n print 'hello'\n", |
|
563 | 563 | 'filename': filename, |
|
564 | 564 | 'location': location, |
|
565 | 565 | '_authentication_token': self.authentication_token(), |
|
566 | 566 | }, |
|
567 | 567 | status=302) |
|
568 | 568 | response.follow() |
|
569 | 569 | try: |
|
570 | 570 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
571 | 571 | % os.path.join(location, filename)) |
|
572 | 572 | response = self.app.get(url('files_edit_home', |
|
573 | 573 | repo_name=repo.repo_name, |
|
574 | 574 | revision='tip', f_path='vcs/nodes.py'), |
|
575 | 575 | status=302) |
|
576 | 576 | self.checkSessionFlash(response, |
|
577 | 577 | 'You can only edit files with revision being a valid branch') |
|
578 | 578 | finally: |
|
579 | 579 | fixture.destroy_repo(repo.repo_name) |
|
580 | 580 | |
|
581 | 581 | def test_edit_file_view_commit_changes_git(self): |
|
582 | 582 | self.log_user() |
|
583 | 583 | repo = fixture.create_repo('test-edit-repo', repo_type='git') |
|
584 | 584 | |
|
585 | 585 | ## add file |
|
586 | 586 | location = 'vcs' |
|
587 | 587 | filename = 'nodes.py' |
|
588 | 588 | response = self.app.post(url('files_add_home', |
|
589 | 589 | repo_name=repo.repo_name, |
|
590 | 590 | revision='tip', |
|
591 | 591 | f_path='/'), |
|
592 | 592 | params={ |
|
593 | 593 | 'content': "def py():\n print 'hello'\n", |
|
594 | 594 | 'filename': filename, |
|
595 | 595 | 'location': location, |
|
596 | 596 | '_authentication_token': self.authentication_token(), |
|
597 | 597 | }, |
|
598 | 598 | status=302) |
|
599 | 599 | response.follow() |
|
600 | 600 | try: |
|
601 | 601 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
602 | 602 | % os.path.join(location, filename)) |
|
603 | 603 | response = self.app.post(url('files_edit_home', |
|
604 | 604 | repo_name=repo.repo_name, |
|
605 | 605 | revision=repo.scm_instance.DEFAULT_BRANCH_NAME, |
|
606 | 606 | f_path='vcs/nodes.py'), |
|
607 | 607 | params={ |
|
608 | 608 | 'content': "def py():\n print 'hello world'\n", |
|
609 | 609 | 'message': 'i commited', |
|
610 | 610 | '_authentication_token': self.authentication_token(), |
|
611 | 611 | }, |
|
612 | 612 | status=302) |
|
613 | 613 | self.checkSessionFlash(response, |
|
614 | 614 | 'Successfully committed to vcs/nodes.py') |
|
615 | 615 | finally: |
|
616 | 616 | fixture.destroy_repo(repo.repo_name) |
|
617 | 617 | |
|
618 | 618 | # Hg - delete |
|
619 | 619 | def test_delete_file_view_hg(self): |
|
620 | 620 | self.log_user() |
|
621 | 621 | response = self.app.get(url('files_delete_home', |
|
622 | 622 | repo_name=HG_REPO, |
|
623 | 623 | revision='tip', f_path='vcs/nodes.py')) |
|
624 | 624 | |
|
625 | 625 | def test_delete_file_view_not_on_branch_hg(self): |
|
626 | 626 | self.log_user() |
|
627 | 627 | repo = fixture.create_repo('test-delete-repo', repo_type='hg') |
|
628 | 628 | |
|
629 | 629 | ## add file |
|
630 | 630 | location = 'vcs' |
|
631 | 631 | filename = 'nodes.py' |
|
632 | 632 | response = self.app.post(url('files_add_home', |
|
633 | 633 | repo_name=repo.repo_name, |
|
634 | 634 | revision='tip', f_path='/'), |
|
635 | 635 | params={ |
|
636 | 636 | 'content': "def py():\n print 'hello'\n", |
|
637 | 637 | 'filename': filename, |
|
638 | 638 | 'location': location, |
|
639 | 639 | '_authentication_token': self.authentication_token(), |
|
640 | 640 | }, |
|
641 | 641 | status=302) |
|
642 | 642 | response.follow() |
|
643 | 643 | try: |
|
644 | 644 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
645 | 645 | % os.path.join(location, filename)) |
|
646 | 646 | response = self.app.get(url('files_delete_home', |
|
647 | 647 | repo_name=repo.repo_name, |
|
648 | 648 | revision='tip', f_path='vcs/nodes.py'), |
|
649 | 649 | status=302) |
|
650 | 650 | self.checkSessionFlash(response, |
|
651 | 651 | 'You can only delete files with revision being a valid branch') |
|
652 | 652 | finally: |
|
653 | 653 | fixture.destroy_repo(repo.repo_name) |
|
654 | 654 | |
|
655 | 655 | def test_delete_file_view_commit_changes_hg(self): |
|
656 | 656 | self.log_user() |
|
657 | 657 | repo = fixture.create_repo('test-delete-repo', repo_type='hg') |
|
658 | 658 | |
|
659 | 659 | ## add file |
|
660 | 660 | location = 'vcs' |
|
661 | 661 | filename = 'nodes.py' |
|
662 | 662 | response = self.app.post(url('files_add_home', |
|
663 | 663 | repo_name=repo.repo_name, |
|
664 | 664 | revision='tip', |
|
665 | 665 | f_path='/'), |
|
666 | 666 | params={ |
|
667 | 667 | 'content': "def py():\n print 'hello'\n", |
|
668 | 668 | 'filename': filename, |
|
669 | 669 | 'location': location, |
|
670 | 670 | '_authentication_token': self.authentication_token(), |
|
671 | 671 | }, |
|
672 | 672 | status=302) |
|
673 | 673 | response.follow() |
|
674 | 674 | try: |
|
675 | 675 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
676 | 676 | % os.path.join(location, filename)) |
|
677 | 677 | response = self.app.post(url('files_delete_home', |
|
678 | 678 | repo_name=repo.repo_name, |
|
679 | 679 | revision=repo.scm_instance.DEFAULT_BRANCH_NAME, |
|
680 | 680 | f_path='vcs/nodes.py'), |
|
681 | 681 | params={ |
|
682 | 682 | 'message': 'i commited', |
|
683 | 683 | '_authentication_token': self.authentication_token(), |
|
684 | 684 | }, |
|
685 | 685 | status=302) |
|
686 | 686 | self.checkSessionFlash(response, |
|
687 | 687 | 'Successfully deleted file vcs/nodes.py') |
|
688 | 688 | finally: |
|
689 | 689 | fixture.destroy_repo(repo.repo_name) |
|
690 | 690 | |
|
691 | 691 | # Git - delete |
|
692 | 692 | def test_delete_file_view_git(self): |
|
693 | 693 | self.log_user() |
|
694 | 694 | response = self.app.get(url('files_delete_home', |
|
695 | 695 | repo_name=HG_REPO, |
|
696 | 696 | revision='tip', f_path='vcs/nodes.py')) |
|
697 | 697 | |
|
698 | 698 | def test_delete_file_view_not_on_branch_git(self): |
|
699 | 699 | self.log_user() |
|
700 | 700 | repo = fixture.create_repo('test-delete-repo', repo_type='git') |
|
701 | 701 | |
|
702 | 702 | ## add file |
|
703 | 703 | location = 'vcs' |
|
704 | 704 | filename = 'nodes.py' |
|
705 | 705 | response = self.app.post(url('files_add_home', |
|
706 | 706 | repo_name=repo.repo_name, |
|
707 | 707 | revision='tip', f_path='/'), |
|
708 | 708 | params={ |
|
709 | 709 | 'content': "def py():\n print 'hello'\n", |
|
710 | 710 | 'filename': filename, |
|
711 | 711 | 'location': location, |
|
712 | 712 | '_authentication_token': self.authentication_token(), |
|
713 | 713 | }, |
|
714 | 714 | status=302) |
|
715 | 715 | response.follow() |
|
716 | 716 | try: |
|
717 | 717 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
718 | 718 | % os.path.join(location, filename)) |
|
719 | 719 | response = self.app.get(url('files_delete_home', |
|
720 | 720 | repo_name=repo.repo_name, |
|
721 | 721 | revision='tip', f_path='vcs/nodes.py'), |
|
722 | 722 | status=302) |
|
723 | 723 | self.checkSessionFlash(response, |
|
724 | 724 | 'You can only delete files with revision being a valid branch') |
|
725 | 725 | finally: |
|
726 | 726 | fixture.destroy_repo(repo.repo_name) |
|
727 | 727 | |
|
728 | 728 | def test_delete_file_view_commit_changes_git(self): |
|
729 | 729 | self.log_user() |
|
730 | 730 | repo = fixture.create_repo('test-delete-repo', repo_type='git') |
|
731 | 731 | |
|
732 | 732 | ## add file |
|
733 | 733 | location = 'vcs' |
|
734 | 734 | filename = 'nodes.py' |
|
735 | 735 | response = self.app.post(url('files_add_home', |
|
736 | 736 | repo_name=repo.repo_name, |
|
737 | 737 | revision='tip', |
|
738 | 738 | f_path='/'), |
|
739 | 739 | params={ |
|
740 | 740 | 'content': "def py():\n print 'hello'\n", |
|
741 | 741 | 'filename': filename, |
|
742 | 742 | 'location': location, |
|
743 | 743 | '_authentication_token': self.authentication_token(), |
|
744 | 744 | }, |
|
745 | 745 | status=302) |
|
746 | 746 | response.follow() |
|
747 | 747 | try: |
|
748 | 748 | self.checkSessionFlash(response, 'Successfully committed to %s' |
|
749 | 749 | % os.path.join(location, filename)) |
|
750 | 750 | response = self.app.post(url('files_delete_home', |
|
751 | 751 | repo_name=repo.repo_name, |
|
752 | 752 | revision=repo.scm_instance.DEFAULT_BRANCH_NAME, |
|
753 | 753 | f_path='vcs/nodes.py'), |
|
754 | 754 | params={ |
|
755 | 755 | 'message': 'i commited', |
|
756 | 756 | '_authentication_token': self.authentication_token(), |
|
757 | 757 | }, |
|
758 | 758 | status=302) |
|
759 | 759 | self.checkSessionFlash(response, |
|
760 | 760 | 'Successfully deleted file vcs/nodes.py') |
|
761 | 761 | finally: |
|
762 | 762 | fixture.destroy_repo(repo.repo_name) |
@@ -1,500 +1,500 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | import re |
|
3 | 3 | import time |
|
4 | 4 | |
|
5 | 5 | import mock |
|
6 | 6 | |
|
7 | 7 | from kallithea.tests import * |
|
8 | 8 | from kallithea.tests.fixture import Fixture |
|
9 | 9 | from kallithea.lib.utils2 import generate_api_key |
|
10 | 10 | from kallithea.lib.auth import check_password |
|
11 | 11 | from kallithea.lib import helpers as h |
|
12 | 12 | from kallithea.model.api_key import ApiKeyModel |
|
13 | 13 | from kallithea.model import validators |
|
14 | 14 | from kallithea.model.db import User, Notification |
|
15 | 15 | from kallithea.model.meta import Session |
|
16 | 16 | from kallithea.model.user import UserModel |
|
17 | 17 | |
|
18 | 18 | fixture = Fixture() |
|
19 | 19 | |
|
20 | 20 | |
|
21 | 21 | class TestLoginController(TestController): |
|
22 | 22 | def setUp(self): |
|
23 | 23 | self.remove_all_notifications() |
|
24 | 24 | self.assertEqual(Notification.query().all(), []) |
|
25 | 25 | |
|
26 | 26 | def test_index(self): |
|
27 | 27 | response = self.app.get(url(controller='login', action='index')) |
|
28 | 28 | self.assertEqual(response.status, '200 OK') |
|
29 | 29 | # Test response... |
|
30 | 30 | |
|
31 | 31 | def test_login_admin_ok(self): |
|
32 | 32 | response = self.app.post(url(controller='login', action='index'), |
|
33 | 33 | {'username': TEST_USER_ADMIN_LOGIN, |
|
34 | 34 | 'password': TEST_USER_ADMIN_PASS}) |
|
35 | 35 | self.assertEqual(response.status, '302 Found') |
|
36 | 36 | self.assert_authenticated_user(response, TEST_USER_ADMIN_LOGIN) |
|
37 | 37 | |
|
38 | 38 | response = response.follow() |
|
39 | 39 | response.mustcontain('/%s' % HG_REPO) |
|
40 | 40 | |
|
41 | 41 | def test_login_regular_ok(self): |
|
42 | 42 | response = self.app.post(url(controller='login', action='index'), |
|
43 | 43 | {'username': TEST_USER_REGULAR_LOGIN, |
|
44 | 44 | 'password': TEST_USER_REGULAR_PASS}) |
|
45 | 45 | |
|
46 | 46 | self.assertEqual(response.status, '302 Found') |
|
47 | 47 | self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN) |
|
48 | 48 | |
|
49 | 49 | response = response.follow() |
|
50 | 50 | response.mustcontain('/%s' % HG_REPO) |
|
51 | 51 | |
|
52 | 52 | def test_login_ok_came_from(self): |
|
53 | 53 | test_came_from = '/_admin/users' |
|
54 | 54 | response = self.app.post(url(controller='login', action='index', |
|
55 | 55 | came_from=test_came_from), |
|
56 | 56 | {'username': TEST_USER_ADMIN_LOGIN, |
|
57 | 57 | 'password': TEST_USER_ADMIN_PASS}) |
|
58 | 58 | self.assertEqual(response.status, '302 Found') |
|
59 | 59 | response = response.follow() |
|
60 | 60 | |
|
61 | 61 | self.assertEqual(response.status, '200 OK') |
|
62 | 62 | response.mustcontain('Users Administration') |
|
63 | 63 | |
|
64 | 64 | def test_login_do_not_remember(self): |
|
65 | 65 | response = self.app.post(url(controller='login', action='index'), |
|
66 | 66 | {'username': TEST_USER_REGULAR_LOGIN, |
|
67 | 67 | 'password': TEST_USER_REGULAR_PASS, |
|
68 | 68 | 'remember': False}) |
|
69 | 69 | |
|
70 | 70 | self.assertIn('Set-Cookie', response.headers) |
|
71 | 71 | for cookie in response.headers.getall('Set-Cookie'): |
|
72 | 72 | self.assertFalse(re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), |
|
73 | 73 | 'Cookie %r has expiration date, but should be a session cookie' % cookie) |
|
74 | 74 | |
|
75 | 75 | def test_login_remember(self): |
|
76 | 76 | response = self.app.post(url(controller='login', action='index'), |
|
77 | 77 | {'username': TEST_USER_REGULAR_LOGIN, |
|
78 | 78 | 'password': TEST_USER_REGULAR_PASS, |
|
79 | 79 | 'remember': True}) |
|
80 | 80 | |
|
81 | 81 | self.assertIn('Set-Cookie', response.headers) |
|
82 | 82 | for cookie in response.headers.getall('Set-Cookie'): |
|
83 | 83 | self.assertTrue(re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), |
|
84 | 84 | 'Cookie %r should have expiration date, but is a session cookie' % cookie) |
|
85 | 85 | |
|
86 | 86 | def test_logout(self): |
|
87 | 87 | response = self.app.post(url(controller='login', action='index'), |
|
88 | 88 | {'username': TEST_USER_REGULAR_LOGIN, |
|
89 | 89 | 'password': TEST_USER_REGULAR_PASS}) |
|
90 | 90 | |
|
91 | 91 | # Verify that a login session has been established. |
|
92 | 92 | response = self.app.get(url(controller='login', action='index')) |
|
93 | 93 | response = response.follow() |
|
94 | 94 | self.assertIn('authuser', response.session) |
|
95 | 95 | |
|
96 | 96 | response.click('Log Out') |
|
97 | 97 | |
|
98 | 98 | # Verify that the login session has been terminated. |
|
99 | 99 | response = self.app.get(url(controller='login', action='index')) |
|
100 | 100 | self.assertNotIn('authuser', response.session) |
|
101 | 101 | |
|
102 | 102 | @parameterized.expand([ |
|
103 | 103 | ('data:text/html,<script>window.alert("xss")</script>',), |
|
104 | 104 | ('mailto:test@example.com',), |
|
105 | 105 | ('file:///etc/passwd',), |
|
106 |
('ftp:// |
|
|
107 |
('http://other. |
|
|
106 | ('ftp://ftp.example.com',), | |
|
107 | ('http://other.example.com/bl%C3%A5b%C3%A6rgr%C3%B8d',), | |
|
108 | 108 | ]) |
|
109 | 109 | def test_login_bad_came_froms(self, url_came_from): |
|
110 | 110 | response = self.app.post(url(controller='login', action='index', |
|
111 | 111 | came_from=url_came_from), |
|
112 | 112 | {'username': TEST_USER_ADMIN_LOGIN, |
|
113 | 113 | 'password': TEST_USER_ADMIN_PASS}) |
|
114 | 114 | self.assertEqual(response.status, '302 Found') |
|
115 | 115 | self.assertEqual(response._environ['paste.testing_variables'] |
|
116 | 116 | ['tmpl_context'].came_from, '/') |
|
117 | 117 | response = response.follow() |
|
118 | 118 | |
|
119 | 119 | self.assertEqual(response.status, '200 OK') |
|
120 | 120 | |
|
121 | 121 | def test_login_short_password(self): |
|
122 | 122 | response = self.app.post(url(controller='login', action='index'), |
|
123 | 123 | {'username': TEST_USER_ADMIN_LOGIN, |
|
124 | 124 | 'password': 'as'}) |
|
125 | 125 | self.assertEqual(response.status, '200 OK') |
|
126 | 126 | |
|
127 | 127 | response.mustcontain('Enter 3 characters or more') |
|
128 | 128 | |
|
129 | 129 | def test_login_wrong_username_password(self): |
|
130 | 130 | response = self.app.post(url(controller='login', action='index'), |
|
131 | 131 | {'username': 'error', |
|
132 | 132 | 'password': 'test12'}) |
|
133 | 133 | |
|
134 | 134 | response.mustcontain('Invalid username or password') |
|
135 | 135 | |
|
136 | 136 | # verify that get arguments are correctly passed along login redirection |
|
137 | 137 | |
|
138 | 138 | @parameterized.expand([ |
|
139 | 139 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
140 | 140 | ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'}, |
|
141 | 141 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
142 | 142 | ]) |
|
143 | 143 | def test_redirection_to_login_form_preserves_get_args(self, args, args_encoded): |
|
144 | 144 | with fixture.anon_access(False): |
|
145 | 145 | response = self.app.get(url(controller='summary', action='index', |
|
146 | 146 | repo_name=HG_REPO, |
|
147 | 147 | **args)) |
|
148 | 148 | self.assertEqual(response.status, '302 Found') |
|
149 | 149 | for encoded in args_encoded: |
|
150 | 150 | self.assertIn(encoded, response.location) |
|
151 | 151 | |
|
152 | 152 | @parameterized.expand([ |
|
153 | 153 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
154 | 154 | ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'}, |
|
155 | 155 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
156 | 156 | ]) |
|
157 | 157 | def test_login_form_preserves_get_args(self, args, args_encoded): |
|
158 | 158 | response = self.app.get(url(controller='login', action='index', |
|
159 | 159 | came_from = '/_admin/users', |
|
160 | 160 | **args)) |
|
161 | 161 | for encoded in args_encoded: |
|
162 | 162 | self.assertIn(encoded, response.form.action) |
|
163 | 163 | |
|
164 | 164 | @parameterized.expand([ |
|
165 | 165 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
166 | 166 | ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'}, |
|
167 | 167 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
168 | 168 | ]) |
|
169 | 169 | def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded): |
|
170 | 170 | response = self.app.post(url(controller='login', action='index', |
|
171 | 171 | came_from = '/_admin/users', |
|
172 | 172 | **args), |
|
173 | 173 | {'username': TEST_USER_ADMIN_LOGIN, |
|
174 | 174 | 'password': TEST_USER_ADMIN_PASS}) |
|
175 | 175 | self.assertEqual(response.status, '302 Found') |
|
176 | 176 | for encoded in args_encoded: |
|
177 | 177 | self.assertIn(encoded, response.location) |
|
178 | 178 | |
|
179 | 179 | @parameterized.expand([ |
|
180 | 180 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
181 | 181 | ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'}, |
|
182 | 182 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
183 | 183 | ]) |
|
184 | 184 | def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded): |
|
185 | 185 | response = self.app.post(url(controller='login', action='index', |
|
186 | 186 | came_from = '/_admin/users', |
|
187 | 187 | **args), |
|
188 | 188 | {'username': 'error', |
|
189 | 189 | 'password': 'test12'}) |
|
190 | 190 | |
|
191 | 191 | response.mustcontain('Invalid username or password') |
|
192 | 192 | for encoded in args_encoded: |
|
193 | 193 | self.assertIn(encoded, response.form.action) |
|
194 | 194 | |
|
195 | 195 | #========================================================================== |
|
196 | 196 | # REGISTRATIONS |
|
197 | 197 | #========================================================================== |
|
198 | 198 | def test_register(self): |
|
199 | 199 | response = self.app.get(url(controller='login', action='register')) |
|
200 | 200 | response.mustcontain('Sign Up') |
|
201 | 201 | |
|
202 | 202 | def test_register_err_same_username(self): |
|
203 | 203 | uname = TEST_USER_ADMIN_LOGIN |
|
204 | 204 | response = self.app.post(url(controller='login', action='register'), |
|
205 | 205 | {'username': uname, |
|
206 | 206 | 'password': 'test12', |
|
207 | 207 | 'password_confirmation': 'test12', |
|
208 |
'email': 'goodmail@ |
|
|
208 | 'email': 'goodmail@example.com', | |
|
209 | 209 | 'firstname': 'test', |
|
210 | 210 | 'lastname': 'test'}) |
|
211 | 211 | |
|
212 | 212 | msg = validators.ValidUsername()._messages['username_exists'] |
|
213 | 213 | msg = h.html_escape(msg % {'username': uname}) |
|
214 | 214 | response.mustcontain(msg) |
|
215 | 215 | |
|
216 | 216 | def test_register_err_same_email(self): |
|
217 | 217 | response = self.app.post(url(controller='login', action='register'), |
|
218 | 218 | {'username': 'test_admin_0', |
|
219 | 219 | 'password': 'test12', |
|
220 | 220 | 'password_confirmation': 'test12', |
|
221 | 221 | 'email': TEST_USER_ADMIN_EMAIL, |
|
222 | 222 | 'firstname': 'test', |
|
223 | 223 | 'lastname': 'test'}) |
|
224 | 224 | |
|
225 | 225 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
226 | 226 | response.mustcontain(msg) |
|
227 | 227 | |
|
228 | 228 | def test_register_err_same_email_case_sensitive(self): |
|
229 | 229 | response = self.app.post(url(controller='login', action='register'), |
|
230 | 230 | {'username': 'test_admin_1', |
|
231 | 231 | 'password': 'test12', |
|
232 | 232 | 'password_confirmation': 'test12', |
|
233 | 233 | 'email': TEST_USER_ADMIN_EMAIL.title(), |
|
234 | 234 | 'firstname': 'test', |
|
235 | 235 | 'lastname': 'test'}) |
|
236 | 236 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
237 | 237 | response.mustcontain(msg) |
|
238 | 238 | |
|
239 | 239 | def test_register_err_wrong_data(self): |
|
240 | 240 | response = self.app.post(url(controller='login', action='register'), |
|
241 | 241 | {'username': 'xs', |
|
242 | 242 | 'password': 'test', |
|
243 | 243 | 'password_confirmation': 'test', |
|
244 | 244 | 'email': 'goodmailm', |
|
245 | 245 | 'firstname': 'test', |
|
246 | 246 | 'lastname': 'test'}) |
|
247 | 247 | self.assertEqual(response.status, '200 OK') |
|
248 | 248 | response.mustcontain('An email address must contain a single @') |
|
249 | 249 | response.mustcontain('Enter a value 6 characters long or more') |
|
250 | 250 | |
|
251 | 251 | def test_register_err_username(self): |
|
252 | 252 | response = self.app.post(url(controller='login', action='register'), |
|
253 | 253 | {'username': 'error user', |
|
254 | 254 | 'password': 'test12', |
|
255 | 255 | 'password_confirmation': 'test12', |
|
256 | 256 | 'email': 'goodmailm', |
|
257 | 257 | 'firstname': 'test', |
|
258 | 258 | 'lastname': 'test'}) |
|
259 | 259 | |
|
260 | 260 | response.mustcontain('An email address must contain a single @') |
|
261 | 261 | response.mustcontain('Username may only contain ' |
|
262 | 262 | 'alphanumeric characters underscores, ' |
|
263 | 263 | 'periods or dashes and must begin with an ' |
|
264 | 264 | 'alphanumeric character') |
|
265 | 265 | |
|
266 | 266 | def test_register_err_case_sensitive(self): |
|
267 | 267 | usr = TEST_USER_ADMIN_LOGIN.title() |
|
268 | 268 | response = self.app.post(url(controller='login', action='register'), |
|
269 | 269 | {'username': usr, |
|
270 | 270 | 'password': 'test12', |
|
271 | 271 | 'password_confirmation': 'test12', |
|
272 | 272 | 'email': 'goodmailm', |
|
273 | 273 | 'firstname': 'test', |
|
274 | 274 | 'lastname': 'test'}) |
|
275 | 275 | |
|
276 | 276 | response.mustcontain('An email address must contain a single @') |
|
277 | 277 | msg = validators.ValidUsername()._messages['username_exists'] |
|
278 | 278 | msg = h.html_escape(msg % {'username': usr}) |
|
279 | 279 | response.mustcontain(msg) |
|
280 | 280 | |
|
281 | 281 | def test_register_special_chars(self): |
|
282 | 282 | response = self.app.post(url(controller='login', action='register'), |
|
283 | 283 | {'username': 'xxxaxn', |
|
284 | 284 | 'password': 'ąćźżąśśśś', |
|
285 | 285 | 'password_confirmation': 'ąćźżąśśśś', |
|
286 | 286 | 'email': 'goodmailm@test.plx', |
|
287 | 287 | 'firstname': 'test', |
|
288 | 288 | 'lastname': 'test'}) |
|
289 | 289 | |
|
290 | 290 | msg = validators.ValidPassword()._messages['invalid_password'] |
|
291 | 291 | response.mustcontain(msg) |
|
292 | 292 | |
|
293 | 293 | def test_register_password_mismatch(self): |
|
294 | 294 | response = self.app.post(url(controller='login', action='register'), |
|
295 | 295 | {'username': 'xs', |
|
296 | 296 | 'password': '123qwe', |
|
297 | 297 | 'password_confirmation': 'qwe123', |
|
298 | 298 | 'email': 'goodmailm@test.plxa', |
|
299 | 299 | 'firstname': 'test', |
|
300 | 300 | 'lastname': 'test'}) |
|
301 | 301 | msg = validators.ValidPasswordsMatch('password', 'password_confirmation')._messages['password_mismatch'] |
|
302 | 302 | response.mustcontain(msg) |
|
303 | 303 | |
|
304 | 304 | def test_register_ok(self): |
|
305 | 305 | username = 'test_regular4' |
|
306 | 306 | password = 'qweqwe' |
|
307 |
email = 'user |
|
|
307 | email = 'user4@example.com' | |
|
308 | 308 | name = 'testname' |
|
309 | 309 | lastname = 'testlastname' |
|
310 | 310 | |
|
311 | 311 | response = self.app.post(url(controller='login', action='register'), |
|
312 | 312 | {'username': username, |
|
313 | 313 | 'password': password, |
|
314 | 314 | 'password_confirmation': password, |
|
315 | 315 | 'email': email, |
|
316 | 316 | 'firstname': name, |
|
317 | 317 | 'lastname': lastname, |
|
318 | 318 | 'admin': True}) # This should be overriden |
|
319 | 319 | self.assertEqual(response.status, '302 Found') |
|
320 | 320 | self.checkSessionFlash(response, 'You have successfully registered into Kallithea') |
|
321 | 321 | |
|
322 | 322 | ret = Session().query(User).filter(User.username == 'test_regular4').one() |
|
323 | 323 | self.assertEqual(ret.username, username) |
|
324 | 324 | self.assertEqual(check_password(password, ret.password), True) |
|
325 | 325 | self.assertEqual(ret.email, email) |
|
326 | 326 | self.assertEqual(ret.name, name) |
|
327 | 327 | self.assertEqual(ret.lastname, lastname) |
|
328 | 328 | self.assertNotEqual(ret.api_key, None) |
|
329 | 329 | self.assertEqual(ret.admin, False) |
|
330 | 330 | |
|
331 | 331 | #========================================================================== |
|
332 | 332 | # PASSWORD RESET |
|
333 | 333 | #========================================================================== |
|
334 | 334 | |
|
335 | 335 | def test_forgot_password_wrong_mail(self): |
|
336 | 336 | bad_email = 'username%wrongmail.org' |
|
337 | 337 | response = self.app.post( |
|
338 | 338 | url(controller='login', action='password_reset'), |
|
339 | 339 | {'email': bad_email, } |
|
340 | 340 | ) |
|
341 | 341 | |
|
342 | 342 | response.mustcontain('An email address must contain a single @') |
|
343 | 343 | |
|
344 | 344 | def test_forgot_password(self): |
|
345 | 345 | response = self.app.get(url(controller='login', |
|
346 | 346 | action='password_reset')) |
|
347 | 347 | self.assertEqual(response.status, '200 OK') |
|
348 | 348 | |
|
349 | 349 | username = 'test_password_reset_1' |
|
350 | 350 | password = 'qweqwe' |
|
351 |
email = 'username@ |
|
|
351 | email = 'username@example.com' | |
|
352 | 352 | name = 'passwd' |
|
353 | 353 | lastname = 'reset' |
|
354 | 354 | timestamp = int(time.time()) |
|
355 | 355 | |
|
356 | 356 | new = User() |
|
357 | 357 | new.username = username |
|
358 | 358 | new.password = password |
|
359 | 359 | new.email = email |
|
360 | 360 | new.name = name |
|
361 | 361 | new.lastname = lastname |
|
362 | 362 | new.api_key = generate_api_key() |
|
363 | 363 | Session().add(new) |
|
364 | 364 | Session().commit() |
|
365 | 365 | |
|
366 | 366 | response = self.app.post(url(controller='login', |
|
367 | 367 | action='password_reset'), |
|
368 | 368 | {'email': email, }) |
|
369 | 369 | |
|
370 | 370 | self.checkSessionFlash(response, 'A password reset confirmation code has been sent') |
|
371 | 371 | |
|
372 | 372 | response = response.follow() |
|
373 | 373 | |
|
374 | 374 | # BAD TOKEN |
|
375 | 375 | |
|
376 | 376 | token = "bad" |
|
377 | 377 | |
|
378 | 378 | response = self.app.post(url(controller='login', |
|
379 | 379 | action='password_reset_confirmation'), |
|
380 | 380 | {'email': email, |
|
381 | 381 | 'timestamp': timestamp, |
|
382 | 382 | 'password': "p@ssw0rd", |
|
383 | 383 | 'password_confirm': "p@ssw0rd", |
|
384 | 384 | 'token': token, |
|
385 | 385 | }) |
|
386 | 386 | self.assertEqual(response.status, '200 OK') |
|
387 | 387 | response.mustcontain('Invalid password reset token') |
|
388 | 388 | |
|
389 | 389 | # GOOD TOKEN |
|
390 | 390 | |
|
391 | 391 | # TODO: The token should ideally be taken from the mail sent |
|
392 | 392 | # above, instead of being recalculated. |
|
393 | 393 | |
|
394 | 394 | token = UserModel().get_reset_password_token( |
|
395 | 395 | User.get_by_username(username), timestamp, self.authentication_token()) |
|
396 | 396 | |
|
397 | 397 | response = self.app.get(url(controller='login', |
|
398 | 398 | action='password_reset_confirmation', |
|
399 | 399 | email=email, |
|
400 | 400 | timestamp=timestamp, |
|
401 | 401 | token=token)) |
|
402 | 402 | self.assertEqual(response.status, '200 OK') |
|
403 | 403 | response.mustcontain("You are about to set a new password for the email address %s" % email) |
|
404 | 404 | |
|
405 | 405 | response = self.app.post(url(controller='login', |
|
406 | 406 | action='password_reset_confirmation'), |
|
407 | 407 | {'email': email, |
|
408 | 408 | 'timestamp': timestamp, |
|
409 | 409 | 'password': "p@ssw0rd", |
|
410 | 410 | 'password_confirm': "p@ssw0rd", |
|
411 | 411 | 'token': token, |
|
412 | 412 | }) |
|
413 | 413 | self.assertEqual(response.status, '302 Found') |
|
414 | 414 | self.checkSessionFlash(response, 'Successfully updated password') |
|
415 | 415 | |
|
416 | 416 | response = response.follow() |
|
417 | 417 | |
|
418 | 418 | #========================================================================== |
|
419 | 419 | # API |
|
420 | 420 | #========================================================================== |
|
421 | 421 | |
|
422 | 422 | def _get_api_whitelist(self, values=None): |
|
423 | 423 | config = {'api_access_controllers_whitelist': values or []} |
|
424 | 424 | return config |
|
425 | 425 | |
|
426 | 426 | @parameterized.expand([ |
|
427 | 427 | ('none', None), |
|
428 | 428 | ('empty_string', ''), |
|
429 | 429 | ('fake_number', '123456'), |
|
430 | 430 | ('proper_api_key', None) |
|
431 | 431 | ]) |
|
432 | 432 | def test_access_not_whitelisted_page_via_api_key(self, test_name, api_key): |
|
433 | 433 | whitelist = self._get_api_whitelist([]) |
|
434 | 434 | with mock.patch('kallithea.CONFIG', whitelist): |
|
435 | 435 | self.assertEqual([], |
|
436 | 436 | whitelist['api_access_controllers_whitelist']) |
|
437 | 437 | if test_name == 'proper_api_key': |
|
438 | 438 | #use builtin if api_key is None |
|
439 | 439 | api_key = User.get_first_admin().api_key |
|
440 | 440 | |
|
441 | 441 | with fixture.anon_access(False): |
|
442 | 442 | self.app.get(url(controller='changeset', |
|
443 | 443 | action='changeset_raw', |
|
444 | 444 | repo_name=HG_REPO, revision='tip', api_key=api_key), |
|
445 | 445 | status=403) |
|
446 | 446 | |
|
447 | 447 | @parameterized.expand([ |
|
448 | 448 | ('none', None, 302), |
|
449 | 449 | ('empty_string', '', 302), |
|
450 | 450 | ('fake_number', '123456', 302), |
|
451 | 451 | ('fake_not_alnum', 'a-z', 302), |
|
452 | 452 | ('fake_api_key', '0123456789abcdef0123456789ABCDEF01234567', 302), |
|
453 | 453 | ('proper_api_key', None, 200) |
|
454 | 454 | ]) |
|
455 | 455 | def test_access_whitelisted_page_via_api_key(self, test_name, api_key, code): |
|
456 | 456 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
457 | 457 | with mock.patch('kallithea.CONFIG', whitelist): |
|
458 | 458 | self.assertEqual(['ChangesetController:changeset_raw'], |
|
459 | 459 | whitelist['api_access_controllers_whitelist']) |
|
460 | 460 | if test_name == 'proper_api_key': |
|
461 | 461 | api_key = User.get_first_admin().api_key |
|
462 | 462 | |
|
463 | 463 | with fixture.anon_access(False): |
|
464 | 464 | self.app.get(url(controller='changeset', |
|
465 | 465 | action='changeset_raw', |
|
466 | 466 | repo_name=HG_REPO, revision='tip', api_key=api_key), |
|
467 | 467 | status=code) |
|
468 | 468 | |
|
469 | 469 | def test_access_page_via_extra_api_key(self): |
|
470 | 470 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
471 | 471 | with mock.patch('kallithea.CONFIG', whitelist): |
|
472 | 472 | self.assertEqual(['ChangesetController:changeset_raw'], |
|
473 | 473 | whitelist['api_access_controllers_whitelist']) |
|
474 | 474 | |
|
475 | 475 | new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test') |
|
476 | 476 | Session().commit() |
|
477 | 477 | with fixture.anon_access(False): |
|
478 | 478 | self.app.get(url(controller='changeset', |
|
479 | 479 | action='changeset_raw', |
|
480 | 480 | repo_name=HG_REPO, revision='tip', api_key=new_api_key.api_key), |
|
481 | 481 | status=200) |
|
482 | 482 | |
|
483 | 483 | def test_access_page_via_expired_api_key(self): |
|
484 | 484 | whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw']) |
|
485 | 485 | with mock.patch('kallithea.CONFIG', whitelist): |
|
486 | 486 | self.assertEqual(['ChangesetController:changeset_raw'], |
|
487 | 487 | whitelist['api_access_controllers_whitelist']) |
|
488 | 488 | |
|
489 | 489 | new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test') |
|
490 | 490 | Session().commit() |
|
491 | 491 | #patch the API key and make it expired |
|
492 | 492 | new_api_key.expires = 0 |
|
493 | 493 | Session().add(new_api_key) |
|
494 | 494 | Session().commit() |
|
495 | 495 | with fixture.anon_access(False): |
|
496 | 496 | self.app.get(url(controller='changeset', |
|
497 | 497 | action='changeset_raw', |
|
498 | 498 | repo_name=HG_REPO, revision='tip', |
|
499 | 499 | api_key=new_api_key.api_key), |
|
500 | 500 | status=302) |
@@ -1,247 +1,247 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | from kallithea.model.db import User, UserFollowing, Repository, UserApiKeys |
|
4 | 4 | from kallithea.tests import * |
|
5 | 5 | from kallithea.tests.fixture import Fixture |
|
6 | 6 | from kallithea.lib import helpers as h |
|
7 | 7 | from kallithea.model.user import UserModel |
|
8 | 8 | from kallithea.model.meta import Session |
|
9 | 9 | |
|
10 | 10 | fixture = Fixture() |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | class TestMyAccountController(TestController): |
|
14 | 14 | test_user_1 = 'testme' |
|
15 | 15 | |
|
16 | 16 | @classmethod |
|
17 | 17 | def teardown_class(cls): |
|
18 | 18 | if User.get_by_username(cls.test_user_1): |
|
19 | 19 | UserModel().delete(cls.test_user_1) |
|
20 | 20 | Session().commit() |
|
21 | 21 | |
|
22 | 22 | def test_my_account(self): |
|
23 | 23 | self.log_user() |
|
24 | 24 | response = self.app.get(url('my_account')) |
|
25 | 25 | |
|
26 | 26 | response.mustcontain('value="%s' % TEST_USER_ADMIN_LOGIN) |
|
27 | 27 | |
|
28 | 28 | def test_my_account_my_repos(self): |
|
29 | 29 | self.log_user() |
|
30 | 30 | response = self.app.get(url('my_account_repos')) |
|
31 | 31 | cnt = Repository.query().filter(Repository.user == |
|
32 | 32 | User.get_by_username(TEST_USER_ADMIN_LOGIN)).count() |
|
33 | 33 | response.mustcontain('"totalRecords": %s' % cnt) |
|
34 | 34 | |
|
35 | 35 | def test_my_account_my_watched(self): |
|
36 | 36 | self.log_user() |
|
37 | 37 | response = self.app.get(url('my_account_watched')) |
|
38 | 38 | |
|
39 | 39 | cnt = UserFollowing.query().filter(UserFollowing.user == |
|
40 | 40 | User.get_by_username(TEST_USER_ADMIN_LOGIN)).count() |
|
41 | 41 | response.mustcontain('"totalRecords": %s' % cnt) |
|
42 | 42 | |
|
43 | 43 | def test_my_account_my_emails(self): |
|
44 | 44 | self.log_user() |
|
45 | 45 | response = self.app.get(url('my_account_emails')) |
|
46 | 46 | response.mustcontain('No additional emails specified') |
|
47 | 47 | |
|
48 | 48 | def test_my_account_my_emails_add_existing_email(self): |
|
49 | 49 | self.log_user() |
|
50 | 50 | response = self.app.get(url('my_account_emails')) |
|
51 | 51 | response.mustcontain('No additional emails specified') |
|
52 | 52 | response = self.app.post(url('my_account_emails'), |
|
53 | 53 | {'new_email': TEST_USER_REGULAR_EMAIL, '_authentication_token': self.authentication_token()}) |
|
54 | 54 | self.checkSessionFlash(response, 'This email address is already in use') |
|
55 | 55 | |
|
56 | 56 | def test_my_account_my_emails_add_mising_email_in_form(self): |
|
57 | 57 | self.log_user() |
|
58 | 58 | response = self.app.get(url('my_account_emails')) |
|
59 | 59 | response.mustcontain('No additional emails specified') |
|
60 | 60 | response = self.app.post(url('my_account_emails'), |
|
61 | 61 | {'_authentication_token': self.authentication_token()}) |
|
62 | 62 | self.checkSessionFlash(response, 'Please enter an email address') |
|
63 | 63 | |
|
64 | 64 | def test_my_account_my_emails_add_remove(self): |
|
65 | 65 | self.log_user() |
|
66 | 66 | response = self.app.get(url('my_account_emails')) |
|
67 | 67 | response.mustcontain('No additional emails specified') |
|
68 | 68 | |
|
69 | 69 | response = self.app.post(url('my_account_emails'), |
|
70 |
{'new_email': ' |
|
|
70 | {'new_email': 'barz@example.com', '_authentication_token': self.authentication_token()}) | |
|
71 | 71 | |
|
72 | 72 | response = self.app.get(url('my_account_emails')) |
|
73 | 73 | |
|
74 | 74 | from kallithea.model.db import UserEmailMap |
|
75 | 75 | email_id = UserEmailMap.query()\ |
|
76 | 76 | .filter(UserEmailMap.user == User.get_by_username(TEST_USER_ADMIN_LOGIN))\ |
|
77 |
.filter(UserEmailMap.email == ' |
|
|
77 | .filter(UserEmailMap.email == 'barz@example.com').one().email_id | |
|
78 | 78 | |
|
79 |
response.mustcontain(' |
|
|
79 | response.mustcontain('barz@example.com') | |
|
80 | 80 | response.mustcontain('<input id="del_email_id" name="del_email_id" type="hidden" value="%s" />' % email_id) |
|
81 | 81 | |
|
82 | 82 | response = self.app.post(url('my_account_emails'), |
|
83 | 83 | {'del_email_id': email_id, '_method': 'delete', '_authentication_token': self.authentication_token()}) |
|
84 | 84 | self.checkSessionFlash(response, 'Removed email from user') |
|
85 | 85 | response = self.app.get(url('my_account_emails')) |
|
86 | 86 | response.mustcontain('No additional emails specified') |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | @parameterized.expand( |
|
90 | 90 | [('firstname', {'firstname': 'new_username'}), |
|
91 | 91 | ('lastname', {'lastname': 'new_username'}), |
|
92 | 92 | ('admin', {'admin': True}), |
|
93 | 93 | ('admin', {'admin': False}), |
|
94 | 94 | ('extern_type', {'extern_type': 'ldap'}), |
|
95 | 95 | ('extern_type', {'extern_type': None}), |
|
96 | 96 | #('extern_name', {'extern_name': 'test'}), |
|
97 | 97 | #('extern_name', {'extern_name': None}), |
|
98 | 98 | ('active', {'active': False}), |
|
99 | 99 | ('active', {'active': True}), |
|
100 |
('email', {'email': 'some |
|
|
100 | ('email', {'email': 'someemail@example.com'}), | |
|
101 | 101 | # ('new_password', {'new_password': 'foobar123', |
|
102 | 102 | # 'password_confirmation': 'foobar123'}) |
|
103 | 103 | ]) |
|
104 | 104 | def test_my_account_update(self, name, attrs): |
|
105 | 105 | usr = fixture.create_user(self.test_user_1, password='qweqwe', |
|
106 | 106 | email='testme@example.com', |
|
107 | 107 | extern_type='internal', |
|
108 | 108 | extern_name=self.test_user_1, |
|
109 | 109 | skip_if_exists=True) |
|
110 | 110 | params = usr.get_api_data(True) # current user data |
|
111 | 111 | user_id = usr.user_id |
|
112 | 112 | self.log_user(username=self.test_user_1, password='qweqwe') |
|
113 | 113 | |
|
114 | 114 | params.update({'password_confirmation': ''}) |
|
115 | 115 | params.update({'new_password': ''}) |
|
116 | 116 | params.update({'extern_type': 'internal'}) |
|
117 | 117 | params.update({'extern_name': self.test_user_1}) |
|
118 | 118 | params.update({'_authentication_token': self.authentication_token()}) |
|
119 | 119 | |
|
120 | 120 | params.update(attrs) |
|
121 | 121 | response = self.app.post(url('my_account'), params) |
|
122 | 122 | |
|
123 | 123 | self.checkSessionFlash(response, |
|
124 | 124 | 'Your account was updated successfully') |
|
125 | 125 | |
|
126 | 126 | updated_user = User.get_by_username(self.test_user_1) |
|
127 | 127 | updated_params = updated_user.get_api_data(True) |
|
128 | 128 | updated_params.update({'password_confirmation': ''}) |
|
129 | 129 | updated_params.update({'new_password': ''}) |
|
130 | 130 | |
|
131 | 131 | params['last_login'] = updated_params['last_login'] |
|
132 | 132 | if name == 'email': |
|
133 | 133 | params['emails'] = [attrs['email']] |
|
134 | 134 | if name == 'extern_type': |
|
135 | 135 | #cannot update this via form, expected value is original one |
|
136 | 136 | params['extern_type'] = "internal" |
|
137 | 137 | if name == 'extern_name': |
|
138 | 138 | #cannot update this via form, expected value is original one |
|
139 | 139 | params['extern_name'] = str(user_id) |
|
140 | 140 | if name == 'active': |
|
141 | 141 | #my account cannot deactivate account |
|
142 | 142 | params['active'] = True |
|
143 | 143 | if name == 'admin': |
|
144 | 144 | #my account cannot make you an admin ! |
|
145 | 145 | params['admin'] = False |
|
146 | 146 | |
|
147 | 147 | params.pop('_authentication_token') |
|
148 | 148 | self.assertEqual(params, updated_params) |
|
149 | 149 | |
|
150 | 150 | def test_my_account_update_err_email_exists(self): |
|
151 | 151 | self.log_user() |
|
152 | 152 | |
|
153 | 153 | new_email = TEST_USER_REGULAR_EMAIL # already existing email |
|
154 | 154 | response = self.app.post(url('my_account'), |
|
155 | 155 | params=dict( |
|
156 | 156 | username=TEST_USER_ADMIN_LOGIN, |
|
157 | 157 | new_password=TEST_USER_ADMIN_PASS, |
|
158 | 158 | password_confirmation='test122', |
|
159 | 159 | firstname='NewName', |
|
160 | 160 | lastname='NewLastname', |
|
161 | 161 | email=new_email, |
|
162 | 162 | _authentication_token=self.authentication_token()) |
|
163 | 163 | ) |
|
164 | 164 | |
|
165 | 165 | response.mustcontain('This email address is already in use') |
|
166 | 166 | |
|
167 | 167 | def test_my_account_update_err(self): |
|
168 | 168 | self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
169 | 169 | |
|
170 | 170 | new_email = 'newmail.pl' |
|
171 | 171 | response = self.app.post(url('my_account'), |
|
172 | 172 | params=dict( |
|
173 | 173 | username=TEST_USER_ADMIN_LOGIN, |
|
174 | 174 | new_password=TEST_USER_ADMIN_PASS, |
|
175 | 175 | password_confirmation='test122', |
|
176 | 176 | firstname='NewName', |
|
177 | 177 | lastname='NewLastname', |
|
178 | 178 | email=new_email, |
|
179 | 179 | _authentication_token=self.authentication_token())) |
|
180 | 180 | |
|
181 | 181 | response.mustcontain('An email address must contain a single @') |
|
182 | 182 | from kallithea.model import validators |
|
183 | 183 | msg = validators.ValidUsername(edit=False, old_data={})\ |
|
184 | 184 | ._messages['username_exists'] |
|
185 | 185 | msg = h.html_escape(msg % {'username': TEST_USER_ADMIN_LOGIN}) |
|
186 | 186 | response.mustcontain(msg) |
|
187 | 187 | |
|
188 | 188 | def test_my_account_api_keys(self): |
|
189 | 189 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
190 | 190 | user = User.get(usr['user_id']) |
|
191 | 191 | response = self.app.get(url('my_account_api_keys')) |
|
192 | 192 | response.mustcontain(user.api_key) |
|
193 | 193 | response.mustcontain('Expires: Never') |
|
194 | 194 | |
|
195 | 195 | @parameterized.expand([ |
|
196 | 196 | ('forever', -1), |
|
197 | 197 | ('5mins', 60*5), |
|
198 | 198 | ('30days', 60*60*24*30), |
|
199 | 199 | ]) |
|
200 | 200 | def test_my_account_add_api_keys(self, desc, lifetime): |
|
201 | 201 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
202 | 202 | user = User.get(usr['user_id']) |
|
203 | 203 | response = self.app.post(url('my_account_api_keys'), |
|
204 | 204 | {'description': desc, 'lifetime': lifetime, '_authentication_token': self.authentication_token()}) |
|
205 | 205 | self.checkSessionFlash(response, 'API key successfully created') |
|
206 | 206 | try: |
|
207 | 207 | response = response.follow() |
|
208 | 208 | user = User.get(usr['user_id']) |
|
209 | 209 | for api_key in user.api_keys: |
|
210 | 210 | response.mustcontain(api_key) |
|
211 | 211 | finally: |
|
212 | 212 | for api_key in UserApiKeys.query().all(): |
|
213 | 213 | Session().delete(api_key) |
|
214 | 214 | Session().commit() |
|
215 | 215 | |
|
216 | 216 | def test_my_account_remove_api_key(self): |
|
217 | 217 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
218 | 218 | user = User.get(usr['user_id']) |
|
219 | 219 | response = self.app.post(url('my_account_api_keys'), |
|
220 | 220 | {'description': 'desc', 'lifetime': -1, '_authentication_token': self.authentication_token()}) |
|
221 | 221 | self.checkSessionFlash(response, 'API key successfully created') |
|
222 | 222 | response = response.follow() |
|
223 | 223 | |
|
224 | 224 | #now delete our key |
|
225 | 225 | keys = UserApiKeys.query().all() |
|
226 | 226 | self.assertEqual(1, len(keys)) |
|
227 | 227 | |
|
228 | 228 | response = self.app.post(url('my_account_api_keys'), |
|
229 | 229 | {'_method': 'delete', 'del_api_key': keys[0].api_key, '_authentication_token': self.authentication_token()}) |
|
230 | 230 | self.checkSessionFlash(response, 'API key successfully deleted') |
|
231 | 231 | keys = UserApiKeys.query().all() |
|
232 | 232 | self.assertEqual(0, len(keys)) |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | def test_my_account_reset_main_api_key(self): |
|
236 | 236 | usr = self.log_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS) |
|
237 | 237 | user = User.get(usr['user_id']) |
|
238 | 238 | api_key = user.api_key |
|
239 | 239 | response = self.app.get(url('my_account_api_keys')) |
|
240 | 240 | response.mustcontain(api_key) |
|
241 | 241 | response.mustcontain('Expires: Never') |
|
242 | 242 | |
|
243 | 243 | response = self.app.post(url('my_account_api_keys'), |
|
244 | 244 | {'_method': 'delete', 'del_api_key_builtin': api_key, '_authentication_token': self.authentication_token()}) |
|
245 | 245 | self.checkSessionFlash(response, 'API key successfully reset') |
|
246 | 246 | response = response.follow() |
|
247 | 247 | response.mustcontain(no=[api_key]) |
@@ -1,537 +1,537 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.tests.other.manual_test_vcs_operations |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Test suite for making push/pull operations. |
|
19 | 19 | |
|
20 | 20 | Run it in two terminals:: |
|
21 | 21 | paster serve kallithea/tests/test.ini |
|
22 | 22 | KALLITHEA_WHOOSH_TEST_DISABLE=1 KALLITHEA_NO_TMP_PATH=1 nosetests kallithea/tests/other/manual_test_vcs_operations.py |
|
23 | 23 | |
|
24 | 24 | You must have git > 1.8.1 for tests to work fine |
|
25 | 25 | |
|
26 | 26 | This file was forked by the Kallithea project in July 2014. |
|
27 | 27 | Original author and date, and relevant copyright and licensing information is below: |
|
28 | 28 | :created_on: Dec 30, 2010 |
|
29 | 29 | :author: marcink |
|
30 | 30 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
31 | 31 | :license: GPLv3, see LICENSE.md for more details. |
|
32 | 32 | |
|
33 | 33 | """ |
|
34 | 34 | |
|
35 | 35 | import re |
|
36 | 36 | import tempfile |
|
37 | 37 | import time |
|
38 | 38 | from os.path import join as jn |
|
39 | 39 | |
|
40 | 40 | from tempfile import _RandomNameSequence |
|
41 | 41 | from subprocess import Popen, PIPE |
|
42 | 42 | |
|
43 | 43 | from kallithea.tests import * |
|
44 | 44 | from kallithea.model.db import User, Repository, UserIpMap, CacheInvalidation |
|
45 | 45 | from kallithea.model.meta import Session |
|
46 | 46 | from kallithea.model.repo import RepoModel |
|
47 | 47 | from kallithea.model.user import UserModel |
|
48 | 48 | |
|
49 | 49 | DEBUG = True |
|
50 | 50 | HOST = '127.0.0.1:4999' # test host |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class Command(object): |
|
54 | 54 | |
|
55 | 55 | def __init__(self, cwd): |
|
56 | 56 | self.cwd = cwd |
|
57 | 57 | |
|
58 | 58 | def execute(self, cmd, *args): |
|
59 | 59 | """ |
|
60 | 60 | Runs command on the system with given ``args``. |
|
61 | 61 | """ |
|
62 | 62 | |
|
63 | 63 | command = cmd + ' ' + ' '.join(args) |
|
64 | 64 | if DEBUG: |
|
65 | 65 | print '*** CMD %s ***' % command |
|
66 | 66 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) |
|
67 | 67 | stdout, stderr = p.communicate() |
|
68 | 68 | if DEBUG: |
|
69 | 69 | print 'stdout:', repr(stdout) |
|
70 | 70 | print 'stderr:', repr(stderr) |
|
71 | 71 | return stdout, stderr |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | def _get_tmp_dir(): |
|
75 | 75 | return tempfile.mkdtemp(prefix='rc_integration_test') |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def _construct_url(repo, dest=None, **kwargs): |
|
79 | 79 | if dest is None: |
|
80 | 80 | #make temp clone |
|
81 | 81 | dest = _get_tmp_dir() |
|
82 | 82 | params = { |
|
83 | 83 | 'user': TEST_USER_ADMIN_LOGIN, |
|
84 | 84 | 'passwd': TEST_USER_ADMIN_PASS, |
|
85 | 85 | 'host': HOST, |
|
86 | 86 | 'cloned_repo': repo, |
|
87 | 87 | 'dest': dest |
|
88 | 88 | } |
|
89 | 89 | params.update(**kwargs) |
|
90 | 90 | if params['user'] and params['passwd']: |
|
91 | 91 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s %(dest)s' % params |
|
92 | 92 | else: |
|
93 | 93 | _url = 'http://(host)s/%(cloned_repo)s %(dest)s' % params |
|
94 | 94 | return _url |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | def _add_files_and_push(vcs, DEST, **kwargs): |
|
98 | 98 | """ |
|
99 | 99 | Generate some files, add it to DEST repo and push back |
|
100 | 100 | vcs is git or hg and defines what VCS we want to make those files for |
|
101 | 101 | |
|
102 | 102 | :param vcs: |
|
103 | 103 | :param DEST: |
|
104 | 104 | """ |
|
105 | 105 | # commit some stuff into this repo |
|
106 | 106 | cwd = path = jn(DEST) |
|
107 | 107 | #added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next()) |
|
108 | 108 | added_file = jn(path, '%ssetup.py' % _RandomNameSequence().next()) |
|
109 | 109 | Command(cwd).execute('touch %s' % added_file) |
|
110 | 110 | Command(cwd).execute('%s add %s' % (vcs, added_file)) |
|
111 | 111 | |
|
112 | 112 | for i in xrange(kwargs.get('files_no', 3)): |
|
113 | 113 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) |
|
114 | 114 | Command(cwd).execute(cmd) |
|
115 |
author_str = 'User ǝɯɐᴎ <me@e |
|
|
115 | author_str = 'User ǝɯɐᴎ <me@example.com>' | |
|
116 | 116 | if vcs == 'hg': |
|
117 | 117 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( |
|
118 | 118 | i, author_str, added_file |
|
119 | 119 | ) |
|
120 | 120 | elif vcs == 'git': |
|
121 |
cmd = """EMAIL="me@e |
|
|
121 | cmd = """EMAIL="me@example.com" git commit -m 'commited new %s' --author '%s' %s """ % ( | |
|
122 | 122 | i, author_str, added_file |
|
123 | 123 | ) |
|
124 | 124 | Command(cwd).execute(cmd) |
|
125 | 125 | |
|
126 | 126 | # PUSH it back |
|
127 | 127 | _REPO = None |
|
128 | 128 | if vcs == 'hg': |
|
129 | 129 | _REPO = HG_REPO |
|
130 | 130 | elif vcs == 'git': |
|
131 | 131 | _REPO = GIT_REPO |
|
132 | 132 | |
|
133 | 133 | kwargs['dest'] = '' |
|
134 | 134 | clone_url = _construct_url(_REPO, **kwargs) |
|
135 | 135 | if 'clone_url' in kwargs: |
|
136 | 136 | clone_url = kwargs['clone_url'] |
|
137 | 137 | stdout = stderr = None |
|
138 | 138 | if vcs == 'hg': |
|
139 | 139 | stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) |
|
140 | 140 | elif vcs == 'git': |
|
141 | 141 | stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") |
|
142 | 142 | |
|
143 | 143 | return stdout, stderr |
|
144 | 144 | |
|
145 | 145 | |
|
146 | 146 | def set_anonymous_access(enable=True): |
|
147 | 147 | user = User.get_by_username(User.DEFAULT_USER) |
|
148 | 148 | user.active = enable |
|
149 | 149 | Session().add(user) |
|
150 | 150 | Session().commit() |
|
151 | 151 | print '\tanonymous access is now:', enable |
|
152 | 152 | if enable != User.get_by_username(User.DEFAULT_USER).active: |
|
153 | 153 | raise Exception('Cannot set anonymous access') |
|
154 | 154 | |
|
155 | 155 | |
|
156 | 156 | #============================================================================== |
|
157 | 157 | # TESTS |
|
158 | 158 | #============================================================================== |
|
159 | 159 | |
|
160 | 160 | |
|
161 | 161 | def _check_proper_git_push(stdout, stderr): |
|
162 | 162 | #WTF Git stderr is output ?! |
|
163 | 163 | assert 'fatal' not in stderr |
|
164 | 164 | assert 'rejected' not in stderr |
|
165 | 165 | assert 'Pushing to' in stderr |
|
166 | 166 | assert 'master -> master' in stderr |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | class TestVCSOperations(BaseTestCase): |
|
170 | 170 | |
|
171 | 171 | @classmethod |
|
172 | 172 | def setup_class(cls): |
|
173 | 173 | #DISABLE ANONYMOUS ACCESS |
|
174 | 174 | set_anonymous_access(False) |
|
175 | 175 | |
|
176 | 176 | def setUp(self): |
|
177 | 177 | r = Repository.get_by_repo_name(GIT_REPO) |
|
178 | 178 | Repository.unlock(r) |
|
179 | 179 | r.enable_locking = False |
|
180 | 180 | Session().add(r) |
|
181 | 181 | Session().commit() |
|
182 | 182 | |
|
183 | 183 | r = Repository.get_by_repo_name(HG_REPO) |
|
184 | 184 | Repository.unlock(r) |
|
185 | 185 | r.enable_locking = False |
|
186 | 186 | Session().add(r) |
|
187 | 187 | Session().commit() |
|
188 | 188 | |
|
189 | 189 | def test_clone_hg_repo_by_admin(self): |
|
190 | 190 | clone_url = _construct_url(HG_REPO) |
|
191 | 191 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
192 | 192 | |
|
193 | 193 | assert 'requesting all changes' in stdout |
|
194 | 194 | assert 'adding changesets' in stdout |
|
195 | 195 | assert 'adding manifests' in stdout |
|
196 | 196 | assert 'adding file changes' in stdout |
|
197 | 197 | |
|
198 | 198 | assert stderr == '' |
|
199 | 199 | |
|
200 | 200 | def test_clone_git_repo_by_admin(self): |
|
201 | 201 | clone_url = _construct_url(GIT_REPO) |
|
202 | 202 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
203 | 203 | |
|
204 | 204 | assert 'Cloning into' in stdout + stderr |
|
205 | 205 | assert stderr == '' or stdout == '' |
|
206 | 206 | |
|
207 | 207 | def test_clone_wrong_credentials_hg(self): |
|
208 | 208 | clone_url = _construct_url(HG_REPO, passwd='bad!') |
|
209 | 209 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
210 | 210 | assert 'abort: authorization failed' in stderr |
|
211 | 211 | |
|
212 | 212 | def test_clone_wrong_credentials_git(self): |
|
213 | 213 | clone_url = _construct_url(GIT_REPO, passwd='bad!') |
|
214 | 214 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
215 | 215 | assert 'fatal: Authentication failed' in stderr |
|
216 | 216 | |
|
217 | 217 | def test_clone_git_dir_as_hg(self): |
|
218 | 218 | clone_url = _construct_url(GIT_REPO) |
|
219 | 219 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
220 | 220 | assert 'HTTP Error 404: Not Found' in stderr |
|
221 | 221 | |
|
222 | 222 | def test_clone_hg_repo_as_git(self): |
|
223 | 223 | clone_url = _construct_url(HG_REPO) |
|
224 | 224 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
225 | 225 | assert 'not found' in stderr |
|
226 | 226 | |
|
227 | 227 | def test_clone_non_existing_path_hg(self): |
|
228 | 228 | clone_url = _construct_url('trololo') |
|
229 | 229 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
230 | 230 | assert 'HTTP Error 404: Not Found' in stderr |
|
231 | 231 | |
|
232 | 232 | def test_clone_non_existing_path_git(self): |
|
233 | 233 | clone_url = _construct_url('trololo') |
|
234 | 234 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
235 | 235 | assert 'not found' in stderr |
|
236 | 236 | |
|
237 | 237 | def test_push_new_file_hg(self): |
|
238 | 238 | DEST = _get_tmp_dir() |
|
239 | 239 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
240 | 240 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
241 | 241 | |
|
242 | 242 | stdout, stderr = _add_files_and_push('hg', DEST) |
|
243 | 243 | |
|
244 | 244 | assert 'pushing to' in stdout |
|
245 | 245 | assert 'Repository size' in stdout |
|
246 | 246 | assert 'Last revision is now' in stdout |
|
247 | 247 | |
|
248 | 248 | def test_push_new_file_git(self): |
|
249 | 249 | DEST = _get_tmp_dir() |
|
250 | 250 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
251 | 251 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
252 | 252 | |
|
253 | 253 | # commit some stuff into this repo |
|
254 | 254 | stdout, stderr = _add_files_and_push('git', DEST) |
|
255 | 255 | |
|
256 | 256 | print [(x.repo_full_path,x.repo_path) for x in Repository.get_all()] |
|
257 | 257 | _check_proper_git_push(stdout, stderr) |
|
258 | 258 | |
|
259 | 259 | def test_push_invalidates_cache_hg(self): |
|
260 | 260 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
261 | 261 | ==HG_REPO).scalar() |
|
262 | 262 | if not key: |
|
263 | 263 | key = CacheInvalidation(HG_REPO, HG_REPO) |
|
264 | 264 | |
|
265 | 265 | key.cache_active = True |
|
266 | 266 | Session().add(key) |
|
267 | 267 | Session().commit() |
|
268 | 268 | |
|
269 | 269 | DEST = _get_tmp_dir() |
|
270 | 270 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
271 | 271 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
272 | 272 | |
|
273 | 273 | stdout, stderr = _add_files_and_push('hg', DEST, files_no=1) |
|
274 | 274 | |
|
275 | 275 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
276 | 276 | ==HG_REPO).one() |
|
277 | 277 | self.assertEqual(key.cache_active, False) |
|
278 | 278 | |
|
279 | 279 | def test_push_invalidates_cache_git(self): |
|
280 | 280 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
281 | 281 | ==GIT_REPO).scalar() |
|
282 | 282 | if not key: |
|
283 | 283 | key = CacheInvalidation(GIT_REPO, GIT_REPO) |
|
284 | 284 | |
|
285 | 285 | key.cache_active = True |
|
286 | 286 | Session().add(key) |
|
287 | 287 | Session().commit() |
|
288 | 288 | |
|
289 | 289 | DEST = _get_tmp_dir() |
|
290 | 290 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
291 | 291 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
292 | 292 | |
|
293 | 293 | # commit some stuff into this repo |
|
294 | 294 | stdout, stderr = _add_files_and_push('git', DEST, files_no=1) |
|
295 | 295 | _check_proper_git_push(stdout, stderr) |
|
296 | 296 | |
|
297 | 297 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
298 | 298 | ==GIT_REPO).one() |
|
299 | 299 | print CacheInvalidation.get_all() |
|
300 | 300 | self.assertEqual(key.cache_active, False) |
|
301 | 301 | |
|
302 | 302 | def test_push_wrong_credentials_hg(self): |
|
303 | 303 | DEST = _get_tmp_dir() |
|
304 | 304 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
305 | 305 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
306 | 306 | |
|
307 | 307 | stdout, stderr = _add_files_and_push('hg', DEST, user='bad', |
|
308 | 308 | passwd='name') |
|
309 | 309 | |
|
310 | 310 | assert 'abort: authorization failed' in stderr |
|
311 | 311 | |
|
312 | 312 | def test_push_wrong_credentials_git(self): |
|
313 | 313 | DEST = _get_tmp_dir() |
|
314 | 314 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
315 | 315 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
316 | 316 | |
|
317 | 317 | stdout, stderr = _add_files_and_push('git', DEST, user='bad', |
|
318 | 318 | passwd='name') |
|
319 | 319 | |
|
320 | 320 | assert 'fatal: Authentication failed' in stderr |
|
321 | 321 | |
|
322 | 322 | def test_push_back_to_wrong_url_hg(self): |
|
323 | 323 | DEST = _get_tmp_dir() |
|
324 | 324 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
325 | 325 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
326 | 326 | |
|
327 | 327 | stdout, stderr = _add_files_and_push('hg', DEST, |
|
328 | 328 | clone_url='http://%s/tmp' % HOST) |
|
329 | 329 | |
|
330 | 330 | assert 'HTTP Error 404: Not Found' in stderr |
|
331 | 331 | |
|
332 | 332 | def test_push_back_to_wrong_url_git(self): |
|
333 | 333 | DEST = _get_tmp_dir() |
|
334 | 334 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
335 | 335 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
336 | 336 | |
|
337 | 337 | stdout, stderr = _add_files_and_push('git', DEST, |
|
338 | 338 | clone_url='http://%s/tmp' % HOST) |
|
339 | 339 | |
|
340 | 340 | assert 'not found' in stderr |
|
341 | 341 | |
|
342 | 342 | def test_clone_and_create_lock_hg(self): |
|
343 | 343 | # enable locking |
|
344 | 344 | r = Repository.get_by_repo_name(HG_REPO) |
|
345 | 345 | r.enable_locking = True |
|
346 | 346 | Session().add(r) |
|
347 | 347 | Session().commit() |
|
348 | 348 | # clone |
|
349 | 349 | clone_url = _construct_url(HG_REPO) |
|
350 | 350 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
351 | 351 | |
|
352 | 352 | #check if lock was made |
|
353 | 353 | r = Repository.get_by_repo_name(HG_REPO) |
|
354 | 354 | assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id |
|
355 | 355 | |
|
356 | 356 | def test_clone_and_create_lock_git(self): |
|
357 | 357 | # enable locking |
|
358 | 358 | r = Repository.get_by_repo_name(GIT_REPO) |
|
359 | 359 | r.enable_locking = True |
|
360 | 360 | Session().add(r) |
|
361 | 361 | Session().commit() |
|
362 | 362 | # clone |
|
363 | 363 | clone_url = _construct_url(GIT_REPO) |
|
364 | 364 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
365 | 365 | |
|
366 | 366 | #check if lock was made |
|
367 | 367 | r = Repository.get_by_repo_name(GIT_REPO) |
|
368 | 368 | assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id |
|
369 | 369 | |
|
370 | 370 | def test_clone_after_repo_was_locked_hg(self): |
|
371 | 371 | #lock repo |
|
372 | 372 | r = Repository.get_by_repo_name(HG_REPO) |
|
373 | 373 | Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) |
|
374 | 374 | #pull fails since repo is locked |
|
375 | 375 | clone_url = _construct_url(HG_REPO) |
|
376 | 376 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
377 | 377 | msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`""" |
|
378 | 378 | % (HG_REPO, TEST_USER_ADMIN_LOGIN)) |
|
379 | 379 | assert msg in stderr |
|
380 | 380 | |
|
381 | 381 | def test_clone_after_repo_was_locked_git(self): |
|
382 | 382 | #lock repo |
|
383 | 383 | r = Repository.get_by_repo_name(GIT_REPO) |
|
384 | 384 | Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) |
|
385 | 385 | #pull fails since repo is locked |
|
386 | 386 | clone_url = _construct_url(GIT_REPO) |
|
387 | 387 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
388 | 388 | msg = ("""The requested URL returned error: 423""") |
|
389 | 389 | assert msg in stderr |
|
390 | 390 | |
|
391 | 391 | def test_push_on_locked_repo_by_other_user_hg(self): |
|
392 | 392 | #clone some temp |
|
393 | 393 | DEST = _get_tmp_dir() |
|
394 | 394 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
395 | 395 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
396 | 396 | |
|
397 | 397 | #lock repo |
|
398 | 398 | r = Repository.get_by_repo_name(HG_REPO) |
|
399 | 399 | # let this user actually push ! |
|
400 | 400 | RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, |
|
401 | 401 | perm='repository.write') |
|
402 | 402 | Session().commit() |
|
403 | 403 | Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) |
|
404 | 404 | |
|
405 | 405 | #push fails repo is locked by other user ! |
|
406 | 406 | stdout, stderr = _add_files_and_push('hg', DEST, |
|
407 | 407 | user=TEST_USER_REGULAR_LOGIN, |
|
408 | 408 | passwd=TEST_USER_REGULAR_PASS) |
|
409 | 409 | msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`""" |
|
410 | 410 | % (HG_REPO, TEST_USER_ADMIN_LOGIN)) |
|
411 | 411 | assert msg in stderr |
|
412 | 412 | |
|
413 | 413 | def test_push_on_locked_repo_by_other_user_git(self): |
|
414 | 414 | #clone some temp |
|
415 | 415 | DEST = _get_tmp_dir() |
|
416 | 416 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
417 | 417 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
418 | 418 | |
|
419 | 419 | #lock repo |
|
420 | 420 | r = Repository.get_by_repo_name(GIT_REPO) |
|
421 | 421 | # let this user actually push ! |
|
422 | 422 | RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, |
|
423 | 423 | perm='repository.write') |
|
424 | 424 | Session().commit() |
|
425 | 425 | Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) |
|
426 | 426 | |
|
427 | 427 | #push fails repo is locked by other user ! |
|
428 | 428 | stdout, stderr = _add_files_and_push('git', DEST, |
|
429 | 429 | user=TEST_USER_REGULAR_LOGIN, |
|
430 | 430 | passwd=TEST_USER_REGULAR_PASS) |
|
431 | 431 | err = 'Repository `%s` locked by user `%s`' % (GIT_REPO, TEST_USER_ADMIN_LOGIN) |
|
432 | 432 | assert err in stderr |
|
433 | 433 | |
|
434 | 434 | #TODO: fix this somehow later on Git, Git is stupid and even if we throw |
|
435 | 435 | #back 423 to it, it makes ANOTHER request and we fail there with 405 :/ |
|
436 | 436 | |
|
437 | 437 | msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`""" |
|
438 | 438 | % (GIT_REPO, TEST_USER_ADMIN_LOGIN)) |
|
439 | 439 | #msg = "405 Method Not Allowed" |
|
440 | 440 | #assert msg in stderr |
|
441 | 441 | |
|
442 | 442 | def test_push_unlocks_repository_hg(self): |
|
443 | 443 | # enable locking |
|
444 | 444 | r = Repository.get_by_repo_name(HG_REPO) |
|
445 | 445 | r.enable_locking = True |
|
446 | 446 | Session().add(r) |
|
447 | 447 | Session().commit() |
|
448 | 448 | #clone some temp |
|
449 | 449 | DEST = _get_tmp_dir() |
|
450 | 450 | clone_url = _construct_url(HG_REPO, dest=DEST) |
|
451 | 451 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
452 | 452 | |
|
453 | 453 | #check for lock repo after clone |
|
454 | 454 | r = Repository.get_by_repo_name(HG_REPO) |
|
455 | 455 | uid = User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id |
|
456 | 456 | assert r.locked[0] == uid |
|
457 | 457 | |
|
458 | 458 | #push is ok and repo is now unlocked |
|
459 | 459 | stdout, stderr = _add_files_and_push('hg', DEST) |
|
460 | 460 | assert ('remote: Released lock on repo `%s`' % HG_REPO) in stdout |
|
461 | 461 | #we need to cleanup the Session Here ! |
|
462 | 462 | Session.remove() |
|
463 | 463 | r = Repository.get_by_repo_name(HG_REPO) |
|
464 | 464 | assert r.locked == [None, None] |
|
465 | 465 | |
|
466 | 466 | #TODO: fix me ! somehow during tests hooks don't get called on Git |
|
467 | 467 | def test_push_unlocks_repository_git(self): |
|
468 | 468 | # enable locking |
|
469 | 469 | r = Repository.get_by_repo_name(GIT_REPO) |
|
470 | 470 | r.enable_locking = True |
|
471 | 471 | Session().add(r) |
|
472 | 472 | Session().commit() |
|
473 | 473 | #clone some temp |
|
474 | 474 | DEST = _get_tmp_dir() |
|
475 | 475 | clone_url = _construct_url(GIT_REPO, dest=DEST) |
|
476 | 476 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
477 | 477 | |
|
478 | 478 | #check for lock repo after clone |
|
479 | 479 | r = Repository.get_by_repo_name(GIT_REPO) |
|
480 | 480 | assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id |
|
481 | 481 | |
|
482 | 482 | #push is ok and repo is now unlocked |
|
483 | 483 | stdout, stderr = _add_files_and_push('git', DEST) |
|
484 | 484 | _check_proper_git_push(stdout, stderr) |
|
485 | 485 | |
|
486 | 486 | #assert ('remote: Released lock on repo `%s`' % GIT_REPO) in stdout |
|
487 | 487 | #we need to cleanup the Session Here ! |
|
488 | 488 | Session.remove() |
|
489 | 489 | r = Repository.get_by_repo_name(GIT_REPO) |
|
490 | 490 | assert r.locked == [None, None] |
|
491 | 491 | |
|
492 | 492 | def test_ip_restriction_hg(self): |
|
493 | 493 | user_model = UserModel() |
|
494 | 494 | try: |
|
495 | 495 | user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
496 | 496 | Session().commit() |
|
497 | 497 | clone_url = _construct_url(HG_REPO) |
|
498 | 498 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
499 | 499 | assert 'abort: HTTP Error 403: Forbidden' in stderr |
|
500 | 500 | finally: |
|
501 | 501 | #release IP restrictions |
|
502 | 502 | for ip in UserIpMap.getAll(): |
|
503 | 503 | UserIpMap.delete(ip.ip_id) |
|
504 | 504 | Session().commit() |
|
505 | 505 | |
|
506 | 506 | time.sleep(2) |
|
507 | 507 | clone_url = _construct_url(HG_REPO) |
|
508 | 508 | stdout, stderr = Command('/tmp').execute('hg clone', clone_url) |
|
509 | 509 | |
|
510 | 510 | assert 'requesting all changes' in stdout |
|
511 | 511 | assert 'adding changesets' in stdout |
|
512 | 512 | assert 'adding manifests' in stdout |
|
513 | 513 | assert 'adding file changes' in stdout |
|
514 | 514 | |
|
515 | 515 | assert stderr == '' |
|
516 | 516 | |
|
517 | 517 | def test_ip_restriction_git(self): |
|
518 | 518 | user_model = UserModel() |
|
519 | 519 | try: |
|
520 | 520 | user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
521 | 521 | Session().commit() |
|
522 | 522 | clone_url = _construct_url(GIT_REPO) |
|
523 | 523 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
524 | 524 | # The message apparently changed in Git 1.8.3, so match it loosely. |
|
525 | 525 | assert re.search(r'\b403\b', stderr) |
|
526 | 526 | finally: |
|
527 | 527 | #release IP restrictions |
|
528 | 528 | for ip in UserIpMap.getAll(): |
|
529 | 529 | UserIpMap.delete(ip.ip_id) |
|
530 | 530 | Session().commit() |
|
531 | 531 | |
|
532 | 532 | time.sleep(2) |
|
533 | 533 | clone_url = _construct_url(GIT_REPO) |
|
534 | 534 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
535 | 535 | |
|
536 | 536 | assert 'Cloning into' in stdout + stderr |
|
537 | 537 | assert stderr == '' or stdout == '' |
@@ -1,374 +1,374 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.tests.other.test_libs |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Package for testing various lib/helper functions in kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jun 9, 2011 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import datetime |
|
29 | 29 | import hashlib |
|
30 | 30 | import mock |
|
31 | 31 | from kallithea.tests import * |
|
32 | 32 | from kallithea.lib.utils2 import AttributeDict |
|
33 | 33 | from kallithea.model.db import Repository |
|
34 | 34 | |
|
35 | 35 | proto = 'http' |
|
36 | 36 | TEST_URLS = [ |
|
37 | 37 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
38 | 38 | '%s://127.0.0.1' % proto), |
|
39 | 39 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
40 | 40 | '%s://127.0.0.1' % proto), |
|
41 | 41 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
42 | 42 | '%s://127.0.0.1' % proto), |
|
43 | 43 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
44 | 44 | '%s://127.0.0.1:8080' % proto), |
|
45 |
('%s:// |
|
|
46 |
'%s:// |
|
|
47 |
('%s://user:pass@ |
|
|
45 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], | |
|
46 | '%s://example.com' % proto), | |
|
47 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', | |
|
48 | 48 | '8080'], |
|
49 |
'%s:// |
|
|
49 | '%s://example.com:8080' % proto), | |
|
50 | 50 | ] |
|
51 | 51 | |
|
52 | 52 | proto = 'https' |
|
53 | 53 | TEST_URLS += [ |
|
54 | 54 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
55 | 55 | '%s://127.0.0.1' % proto), |
|
56 | 56 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
57 | 57 | '%s://127.0.0.1' % proto), |
|
58 | 58 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
59 | 59 | '%s://127.0.0.1' % proto), |
|
60 | 60 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
61 | 61 | '%s://127.0.0.1:8080' % proto), |
|
62 |
('%s:// |
|
|
63 |
'%s:// |
|
|
64 |
('%s://user:pass@ |
|
|
62 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], | |
|
63 | '%s://example.com' % proto), | |
|
64 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', | |
|
65 | 65 | '8080'], |
|
66 |
'%s:// |
|
|
66 | '%s://example.com:8080' % proto), | |
|
67 | 67 | ] |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | class TestLibs(BaseTestCase): |
|
71 | 71 | |
|
72 | 72 | @parameterized.expand(TEST_URLS) |
|
73 | 73 | def test_uri_filter(self, test_url, expected, expected_creds): |
|
74 | 74 | from kallithea.lib.utils2 import uri_filter |
|
75 | 75 | self.assertEqual(uri_filter(test_url), expected) |
|
76 | 76 | |
|
77 | 77 | @parameterized.expand(TEST_URLS) |
|
78 | 78 | def test_credentials_filter(self, test_url, expected, expected_creds): |
|
79 | 79 | from kallithea.lib.utils2 import credentials_filter |
|
80 | 80 | self.assertEqual(credentials_filter(test_url), expected_creds) |
|
81 | 81 | |
|
82 | 82 | @parameterized.expand([('t', True), |
|
83 | 83 | ('true', True), |
|
84 | 84 | ('y', True), |
|
85 | 85 | ('yes', True), |
|
86 | 86 | ('on', True), |
|
87 | 87 | ('1', True), |
|
88 | 88 | ('Y', True), |
|
89 | 89 | ('yeS', True), |
|
90 | 90 | ('Y', True), |
|
91 | 91 | ('TRUE', True), |
|
92 | 92 | ('T', True), |
|
93 | 93 | ('False', False), |
|
94 | 94 | ('F', False), |
|
95 | 95 | ('FALSE', False), |
|
96 | 96 | ('0', False), |
|
97 | 97 | ('-1', False), |
|
98 | 98 | ('', False) |
|
99 | 99 | ]) |
|
100 | 100 | def test_str2bool(self, str_bool, expected): |
|
101 | 101 | from kallithea.lib.utils2 import str2bool |
|
102 | 102 | self.assertEqual(str2bool(str_bool), expected) |
|
103 | 103 | |
|
104 | 104 | def test_mention_extractor(self): |
|
105 | 105 | from kallithea.lib.utils2 import extract_mentioned_users |
|
106 | 106 | sample = ( |
|
107 |
"@first hi there @world here's my email username@e |
|
|
107 | "@first hi there @world here's my email username@example.com " | |
|
108 | 108 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three " |
|
109 | 109 | "@UPPER @cAmEL @2one_more22 @john please see this http://org.pl " |
|
110 | 110 | "@marian.user just do it @marco-polo and next extract @marco_polo " |
|
111 |
"user.dot hej ! not-needed maril@ |
|
|
111 | "user.dot hej ! not-needed maril@example.com" | |
|
112 | 112 | ) |
|
113 | 113 | |
|
114 | 114 | s = sorted([ |
|
115 | 115 | '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john', |
|
116 | 116 | 'marian.user', 'marco-polo', 'marco_polo', 'world'], key=lambda k: k.lower()) |
|
117 | 117 | self.assertEqual(s, extract_mentioned_users(sample)) |
|
118 | 118 | |
|
119 | 119 | @parameterized.expand([ |
|
120 | 120 | (dict(), u'just now'), |
|
121 | 121 | (dict(seconds= -1), u'1 second ago'), |
|
122 | 122 | (dict(seconds= -60 * 2), u'2 minutes ago'), |
|
123 | 123 | (dict(hours= -1), u'1 hour ago'), |
|
124 | 124 | (dict(hours= -24), u'1 day ago'), |
|
125 | 125 | (dict(hours= -24 * 5), u'5 days ago'), |
|
126 | 126 | (dict(months= -1), u'1 month ago'), |
|
127 | 127 | (dict(months= -1, days= -2), u'1 month and 2 days ago'), |
|
128 | 128 | (dict(months= -1, days= -20), u'1 month and 19 days ago'), |
|
129 | 129 | (dict(years= -1, months= -1), u'1 year and 1 month ago'), |
|
130 | 130 | (dict(years= -1, months= -10), u'1 year and 10 months ago'), |
|
131 | 131 | (dict(years= -2, months= -4), u'2 years and 4 months ago'), |
|
132 | 132 | (dict(years= -2, months= -11), u'2 years and 11 months ago'), |
|
133 | 133 | (dict(years= -3, months= -2), u'3 years and 2 months ago'), |
|
134 | 134 | ]) |
|
135 | 135 | def test_age(self, age_args, expected): |
|
136 | 136 | from kallithea.lib.utils2 import age |
|
137 | 137 | from dateutil import relativedelta |
|
138 | 138 | n = datetime.datetime(year=2012, month=5, day=17) |
|
139 | 139 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
140 | 140 | self.assertEqual(age(n + delt(**age_args), now=n), expected) |
|
141 | 141 | |
|
142 | 142 | @parameterized.expand([ |
|
143 | 143 | (dict(), u'just now'), |
|
144 | 144 | (dict(seconds= -1), u'1 second ago'), |
|
145 | 145 | (dict(seconds= -60 * 2), u'2 minutes ago'), |
|
146 | 146 | (dict(hours= -1), u'1 hour ago'), |
|
147 | 147 | (dict(hours= -24), u'1 day ago'), |
|
148 | 148 | (dict(hours= -24 * 5), u'5 days ago'), |
|
149 | 149 | (dict(months= -1), u'1 month ago'), |
|
150 | 150 | (dict(months= -1, days= -2), u'1 month ago'), |
|
151 | 151 | (dict(months= -1, days= -20), u'1 month ago'), |
|
152 | 152 | (dict(years= -1, months= -1), u'13 months ago'), |
|
153 | 153 | (dict(years= -1, months= -10), u'22 months ago'), |
|
154 | 154 | (dict(years= -2, months= -4), u'2 years ago'), |
|
155 | 155 | (dict(years= -2, months= -11), u'3 years ago'), |
|
156 | 156 | (dict(years= -3, months= -2), u'3 years ago'), |
|
157 | 157 | (dict(years= -4, months= -8), u'5 years ago'), |
|
158 | 158 | ]) |
|
159 | 159 | def test_age_short(self, age_args, expected): |
|
160 | 160 | from kallithea.lib.utils2 import age |
|
161 | 161 | from dateutil import relativedelta |
|
162 | 162 | n = datetime.datetime(year=2012, month=5, day=17) |
|
163 | 163 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
164 | 164 | self.assertEqual(age(n + delt(**age_args), show_short_version=True, now=n), expected) |
|
165 | 165 | |
|
166 | 166 | @parameterized.expand([ |
|
167 | 167 | (dict(), u'just now'), |
|
168 | 168 | (dict(seconds=1), u'in 1 second'), |
|
169 | 169 | (dict(seconds=60 * 2), u'in 2 minutes'), |
|
170 | 170 | (dict(hours=1), u'in 1 hour'), |
|
171 | 171 | (dict(hours=24), u'in 1 day'), |
|
172 | 172 | (dict(hours=24 * 5), u'in 5 days'), |
|
173 | 173 | (dict(months=1), u'in 1 month'), |
|
174 | 174 | (dict(months=1, days=1), u'in 1 month and 1 day'), |
|
175 | 175 | (dict(years=1, months=1), u'in 1 year and 1 month') |
|
176 | 176 | ]) |
|
177 | 177 | def test_age_in_future(self, age_args, expected): |
|
178 | 178 | from kallithea.lib.utils2 import age |
|
179 | 179 | from dateutil import relativedelta |
|
180 | 180 | n = datetime.datetime(year=2012, month=5, day=17) |
|
181 | 181 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
182 | 182 | self.assertEqual(age(n + delt(**age_args), now=n), expected) |
|
183 | 183 | |
|
184 | 184 | def test_tag_exctrator(self): |
|
185 | 185 | sample = ( |
|
186 | 186 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" |
|
187 |
"[requires] [stale] [see<>=>] [see => http:// |
|
|
187 | "[requires] [stale] [see<>=>] [see => http://example.com]" | |
|
188 | 188 | "[requires => url] [lang => python] [just a tag]" |
|
189 | 189 | "[,d] [ => ULR ] [obsolete] [desc]]" |
|
190 | 190 | ) |
|
191 | 191 | from kallithea.lib.helpers import urlify_text |
|
192 | 192 | res = urlify_text(sample, stylize=True) |
|
193 | 193 | self.assertIn('<div class="metatag" tag="tag">tag</div>', res) |
|
194 | 194 | self.assertIn('<div class="metatag" tag="obsolete">obsolete</div>', res) |
|
195 | 195 | self.assertIn('<div class="metatag" tag="stale">stale</div>', res) |
|
196 | 196 | self.assertIn('<div class="metatag" tag="lang">python</div>', res) |
|
197 | 197 | self.assertIn('<div class="metatag" tag="requires">requires => <a href="/url">url</a></div>', res) |
|
198 | 198 | self.assertIn('<div class="metatag" tag="tag">tag</div>', res) |
|
199 | 199 | |
|
200 | 200 | def test_alternative_gravatar(self): |
|
201 | 201 | from kallithea.lib.helpers import gravatar_url |
|
202 | 202 | _md5 = lambda s: hashlib.md5(s).hexdigest() |
|
203 | 203 | |
|
204 | 204 | #mock pylons.url |
|
205 | 205 | class fake_url(object): |
|
206 | 206 | @classmethod |
|
207 | 207 | def current(cls, *args, **kwargs): |
|
208 |
return 'https:// |
|
|
208 | return 'https://example.com' | |
|
209 | 209 | |
|
210 | 210 | #mock pylons.tmpl_context |
|
211 | 211 | def fake_tmpl_context(_url): |
|
212 | 212 | _c = AttributeDict() |
|
213 | 213 | _c.visual = AttributeDict() |
|
214 | 214 | _c.visual.use_gravatar = True |
|
215 | 215 | _c.visual.gravatar_url = _url |
|
216 | 216 | |
|
217 | 217 | return _c |
|
218 | 218 | |
|
219 | 219 | |
|
220 | 220 | with mock.patch('pylons.url', fake_url): |
|
221 |
fake = fake_tmpl_context(_url='http:// |
|
|
221 | fake = fake_tmpl_context(_url='http://example.com/{email}') | |
|
222 | 222 | with mock.patch('pylons.tmpl_context', fake): |
|
223 | 223 | from pylons import url |
|
224 |
assert url.current() == 'https:// |
|
|
225 |
grav = gravatar_url(email_address='test@ |
|
|
226 |
assert grav == 'http:// |
|
|
224 | assert url.current() == 'https://example.com' | |
|
225 | grav = gravatar_url(email_address='test@example.com', size=24) | |
|
226 | assert grav == 'http://example.com/test@example.com' | |
|
227 | 227 | |
|
228 |
fake = fake_tmpl_context(_url='http:// |
|
|
228 | fake = fake_tmpl_context(_url='http://example.com/{email}') | |
|
229 | 229 | with mock.patch('pylons.tmpl_context', fake): |
|
230 |
grav = gravatar_url(email_address='test@ |
|
|
231 |
assert grav == 'http:// |
|
|
230 | grav = gravatar_url(email_address='test@example.com', size=24) | |
|
231 | assert grav == 'http://example.com/test@example.com' | |
|
232 | 232 | |
|
233 |
fake = fake_tmpl_context(_url='http:// |
|
|
233 | fake = fake_tmpl_context(_url='http://example.com/{md5email}') | |
|
234 | 234 | with mock.patch('pylons.tmpl_context', fake): |
|
235 |
em = 'test@ |
|
|
235 | em = 'test@example.com' | |
|
236 | 236 | grav = gravatar_url(email_address=em, size=24) |
|
237 |
assert grav == 'http:// |
|
|
237 | assert grav == 'http://example.com/%s' % (_md5(em)) | |
|
238 | 238 | |
|
239 |
fake = fake_tmpl_context(_url='http:// |
|
|
239 | fake = fake_tmpl_context(_url='http://example.com/{md5email}/{size}') | |
|
240 | 240 | with mock.patch('pylons.tmpl_context', fake): |
|
241 |
em = 'test@ |
|
|
241 | em = 'test@example.com' | |
|
242 | 242 | grav = gravatar_url(email_address=em, size=24) |
|
243 |
assert grav == 'http:// |
|
|
243 | assert grav == 'http://example.com/%s/%s' % (_md5(em), 24) | |
|
244 | 244 | |
|
245 | 245 | fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}') |
|
246 | 246 | with mock.patch('pylons.tmpl_context', fake): |
|
247 |
em = 'test@ |
|
|
247 | em = 'test@example.com' | |
|
248 | 248 | grav = gravatar_url(email_address=em, size=24) |
|
249 |
assert grav == 'https:// |
|
|
249 | assert grav == 'https://example.com/%s/%s' % (_md5(em), 24) | |
|
250 | 250 | |
|
251 | 251 | @parameterized.expand([ |
|
252 | 252 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'), |
|
253 | 253 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/group/repo1'), |
|
254 | 254 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/prefix', 'http://vps1:8000/prefix/group/repo1'), |
|
255 | 255 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix', 'http://user@vps1:8000/prefix/group/repo1'), |
|
256 | 256 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix', 'http://username@vps1:8000/prefix/group/repo1'), |
|
257 | 257 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'), |
|
258 | 258 | (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'), |
|
259 | 259 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'), |
|
260 | 260 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'), |
|
261 | 261 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'), |
|
262 | 262 | ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://vps1:8000/_23'), |
|
263 |
('https://{user}@proxy1. |
|
|
264 |
('https://{user}@proxy1. |
|
|
265 |
('https://proxy1. |
|
|
263 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://username@proxy1.example.com/group/repo1'), | |
|
264 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.example.com/group/repo1'), | |
|
265 | ('https://proxy1.example.com/{user}/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://proxy1.example.com/username/group/repo1'), | |
|
266 | 266 | ]) |
|
267 | 267 | def test_clone_url_generator(self, tmpl, repo_name, overrides, prefix, expected): |
|
268 | 268 | from kallithea.lib.utils2 import get_clone_url |
|
269 | 269 | clone_url = get_clone_url(uri_tmpl=tmpl, qualified_home_url='http://vps1:8000'+prefix, |
|
270 | 270 | repo_name=repo_name, repo_id=23, **overrides) |
|
271 | 271 | self.assertEqual(clone_url, expected) |
|
272 | 272 | |
|
273 | 273 | def _quick_url(self, text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None): |
|
274 | 274 | """ |
|
275 | 275 | Changes `some text url[foo]` => `some text <a href="/">foo</a> |
|
276 | 276 | |
|
277 | 277 | :param text: |
|
278 | 278 | """ |
|
279 | 279 | import re |
|
280 | 280 | # quickly change expected url[] into a link |
|
281 | 281 | URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])') |
|
282 | 282 | |
|
283 | 283 | def url_func(match_obj): |
|
284 | 284 | _url = match_obj.groups()[0] |
|
285 | 285 | return tmpl % (url_ or '/some-url', _url) |
|
286 | 286 | return URL_PAT.sub(url_func, text) |
|
287 | 287 | |
|
288 | 288 | @parameterized.expand([ |
|
289 | 289 | ("", |
|
290 | 290 | ""), |
|
291 | 291 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", |
|
292 | 292 | "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"), |
|
293 | 293 | ("from rev 000000000000", |
|
294 | 294 | "from rev url[000000000000]"), |
|
295 | 295 | ("from rev 000000000000123123 also rev 000000000000", |
|
296 | 296 | "from rev url[000000000000123123] also rev url[000000000000]"), |
|
297 | 297 | ("this should-000 00", |
|
298 | 298 | "this should-000 00"), |
|
299 | 299 | ("longtextffffffffff rev 123123123123", |
|
300 | 300 | "longtextffffffffff rev url[123123123123]"), |
|
301 | 301 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", |
|
302 | 302 | "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"), |
|
303 | 303 | ("ffffffffffff some text traalaa", |
|
304 | 304 | "url[ffffffffffff] some text traalaa"), |
|
305 | 305 | ("""Multi line |
|
306 | 306 | 123123123123 |
|
307 | 307 | some text 123123123123 |
|
308 | 308 | sometimes ! |
|
309 | 309 | """, |
|
310 | 310 | """Multi line |
|
311 | 311 | url[123123123123] |
|
312 | 312 | some text url[123123123123] |
|
313 | 313 | sometimes ! |
|
314 | 314 | """) |
|
315 | 315 | ]) |
|
316 | 316 | def test_urlify_changesets(self, sample, expected): |
|
317 | 317 | def fake_url(self, *args, **kwargs): |
|
318 | 318 | return '/some-url' |
|
319 | 319 | |
|
320 | 320 | expected = self._quick_url(expected) |
|
321 | 321 | |
|
322 | 322 | with mock.patch('pylons.url', fake_url): |
|
323 | 323 | from kallithea.lib.helpers import urlify_changesets |
|
324 | 324 | self.assertEqual(urlify_changesets(sample, 'repo_name'), expected) |
|
325 | 325 | |
|
326 | 326 | @parameterized.expand([ |
|
327 | 327 | ("", |
|
328 | 328 | "", |
|
329 | 329 | ""), |
|
330 | 330 | ("https://svn.apache.org/repos", |
|
331 | 331 | "url[https://svn.apache.org/repos]", |
|
332 | 332 | "https://svn.apache.org/repos"), |
|
333 | 333 | ("http://svn.apache.org/repos", |
|
334 | 334 | "url[http://svn.apache.org/repos]", |
|
335 | 335 | "http://svn.apache.org/repos"), |
|
336 | 336 | ("from rev a also rev http://google.com", |
|
337 | 337 | "from rev a also rev url[http://google.com]", |
|
338 | 338 | "http://google.com"), |
|
339 | 339 | ("""Multi line |
|
340 | https://foo.bar.com | |
|
340 | https://foo.bar.example.com | |
|
341 | 341 | some text lalala""", |
|
342 | 342 | """Multi line |
|
343 | url[https://foo.bar.com] | |
|
343 | url[https://foo.bar.example.com] | |
|
344 | 344 | some text lalala""", |
|
345 | "https://foo.bar.com") | |
|
345 | "https://foo.bar.example.com") | |
|
346 | 346 | ]) |
|
347 | 347 | def test_urlify_test(self, sample, expected, url_): |
|
348 | 348 | from kallithea.lib.helpers import urlify_text |
|
349 | 349 | expected = self._quick_url(expected, |
|
350 | 350 | tmpl="""<a href="%s">%s</a>""", url_=url_) |
|
351 | 351 | self.assertEqual(urlify_text(sample), expected) |
|
352 | 352 | |
|
353 | 353 | @parameterized.expand([ |
|
354 | 354 | ("", None), |
|
355 | 355 | ("/_2", '2'), |
|
356 | 356 | ("_2", '2'), |
|
357 | 357 | ("/_2/", '2'), |
|
358 | 358 | ("_2/", '2'), |
|
359 | 359 | |
|
360 | 360 | ("/_21", '21'), |
|
361 | 361 | ("_21", '21'), |
|
362 | 362 | ("/_21/", '21'), |
|
363 | 363 | ("_21/", '21'), |
|
364 | 364 | |
|
365 | 365 | ("/_21/foobar", '21'), |
|
366 | 366 | ("_21/121", '21'), |
|
367 | 367 | ("/_21/_12", '21'), |
|
368 | 368 | ("_21/prefix/foo", '21'), |
|
369 | 369 | ]) |
|
370 | 370 | def test_get_repo_by_id(self, test, expected): |
|
371 | 371 | from kallithea.lib.utils import _extract_id_from_repo_name |
|
372 | 372 | _test = _extract_id_from_repo_name(test) |
|
373 | 373 | self.assertEqual(_test, expected, msg='url:%s, got:`%s` expected: `%s`' |
|
374 | 374 | % (test, _test, expected)) |
@@ -1,222 +1,222 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.tests.scripts.manual_test_concurrency |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Test suite for making push/pull operations |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Dec 30, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | import os |
|
30 | 30 | import sys |
|
31 | 31 | import shutil |
|
32 | 32 | import logging |
|
33 | 33 | from os.path import join as jn |
|
34 | 34 | from os.path import dirname as dn |
|
35 | 35 | |
|
36 | 36 | from tempfile import _RandomNameSequence |
|
37 | 37 | from subprocess import Popen, PIPE |
|
38 | 38 | |
|
39 | 39 | from paste.deploy import appconfig |
|
40 | 40 | from sqlalchemy import engine_from_config |
|
41 | 41 | |
|
42 | 42 | from kallithea.lib.utils import add_cache |
|
43 | 43 | from kallithea.model import init_model |
|
44 | 44 | from kallithea.model import meta |
|
45 | 45 | from kallithea.model.db import User, Repository |
|
46 | 46 | from kallithea.lib.auth import get_crypt_password |
|
47 | 47 | |
|
48 | 48 | from kallithea.tests import TESTS_TMP_PATH, HG_REPO |
|
49 | 49 | from kallithea.config.environment import load_environment |
|
50 | 50 | |
|
51 | 51 | rel_path = dn(dn(dn(dn(os.path.abspath(__file__))))) |
|
52 | 52 | conf = appconfig('config:development.ini', relative_to=rel_path) |
|
53 | 53 | load_environment(conf.global_conf, conf.local_conf) |
|
54 | 54 | |
|
55 | 55 | add_cache(conf) |
|
56 | 56 | |
|
57 | 57 | USER = TEST_USER_ADMIN_LOGIN |
|
58 | 58 | PASS = TEST_USER_ADMIN_PASS |
|
59 | 59 | HOST = 'server.local' |
|
60 | 60 | METHOD = 'pull' |
|
61 | 61 | DEBUG = True |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class Command(object): |
|
66 | 66 | |
|
67 | 67 | def __init__(self, cwd): |
|
68 | 68 | self.cwd = cwd |
|
69 | 69 | |
|
70 | 70 | def execute(self, cmd, *args): |
|
71 | 71 | """Runs command on the system with given ``args``. |
|
72 | 72 | """ |
|
73 | 73 | |
|
74 | 74 | command = cmd + ' ' + ' '.join(args) |
|
75 | 75 | log.debug('Executing %s', command) |
|
76 | 76 | if DEBUG: |
|
77 | 77 | print command |
|
78 | 78 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) |
|
79 | 79 | stdout, stderr = p.communicate() |
|
80 | 80 | if DEBUG: |
|
81 | 81 | print stdout, stderr |
|
82 | 82 | return stdout, stderr |
|
83 | 83 | |
|
84 | 84 | |
|
85 | 85 | def get_session(): |
|
86 | 86 | engine = engine_from_config(conf, 'sqlalchemy.db1.') |
|
87 | 87 | init_model(engine) |
|
88 | 88 | sa = meta.Session |
|
89 | 89 | return sa |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def create_test_user(force=True): |
|
93 | 93 | print 'creating test user' |
|
94 | 94 | sa = get_session() |
|
95 | 95 | |
|
96 | 96 | user = sa.query(User).filter(User.username == USER).scalar() |
|
97 | 97 | |
|
98 | 98 | if force and user is not None: |
|
99 | 99 | print 'removing current user' |
|
100 | 100 | for repo in sa.query(Repository).filter(Repository.user == user).all(): |
|
101 | 101 | sa.delete(repo) |
|
102 | 102 | sa.delete(user) |
|
103 | 103 | sa.commit() |
|
104 | 104 | |
|
105 | 105 | if user is None or force: |
|
106 | 106 | print 'creating new one' |
|
107 | 107 | new_usr = User() |
|
108 | 108 | new_usr.username = USER |
|
109 | 109 | new_usr.password = get_crypt_password(PASS) |
|
110 |
new_usr.email = 'mail@ |
|
|
110 | new_usr.email = 'mail@example.com' | |
|
111 | 111 | new_usr.name = 'test' |
|
112 | 112 | new_usr.lastname = 'lasttestname' |
|
113 | 113 | new_usr.active = True |
|
114 | 114 | new_usr.admin = True |
|
115 | 115 | sa.add(new_usr) |
|
116 | 116 | sa.commit() |
|
117 | 117 | |
|
118 | 118 | print 'done' |
|
119 | 119 | |
|
120 | 120 | |
|
121 | 121 | def create_test_repo(force=True): |
|
122 | 122 | print 'creating test repo' |
|
123 | 123 | from kallithea.model.repo import RepoModel |
|
124 | 124 | sa = get_session() |
|
125 | 125 | |
|
126 | 126 | user = sa.query(User).filter(User.username == USER).scalar() |
|
127 | 127 | if user is None: |
|
128 | 128 | raise Exception('user not found') |
|
129 | 129 | |
|
130 | 130 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() |
|
131 | 131 | |
|
132 | 132 | if repo is None: |
|
133 | 133 | print 'repo not found creating' |
|
134 | 134 | |
|
135 | 135 | form_data = {'repo_name': HG_REPO, |
|
136 | 136 | 'repo_type': 'hg', |
|
137 | 137 | 'private':False, |
|
138 | 138 | 'clone_uri': '' } |
|
139 | 139 | rm = RepoModel(sa) |
|
140 | 140 | rm.base_path = '/home/hg' |
|
141 | 141 | rm.create(form_data, user) |
|
142 | 142 | |
|
143 | 143 | print 'done' |
|
144 | 144 | |
|
145 | 145 | |
|
146 | 146 | def set_anonymous_access(enable=True): |
|
147 | 147 | sa = get_session() |
|
148 | 148 | user = sa.query(User).filter(User.username == 'default').one() |
|
149 | 149 | user.active = enable |
|
150 | 150 | sa.add(user) |
|
151 | 151 | sa.commit() |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | def get_anonymous_access(): |
|
155 | 155 | sa = get_session() |
|
156 | 156 | return sa.query(User).filter(User.username == 'default').one().active |
|
157 | 157 | |
|
158 | 158 | |
|
159 | 159 | #============================================================================== |
|
160 | 160 | # TESTS |
|
161 | 161 | #============================================================================== |
|
162 | 162 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, |
|
163 | 163 | seq=None, backend='hg'): |
|
164 | 164 | cwd = path = jn(TESTS_TMP_PATH, repo) |
|
165 | 165 | |
|
166 | 166 | if seq is None: |
|
167 | 167 | seq = _RandomNameSequence().next() |
|
168 | 168 | |
|
169 | 169 | try: |
|
170 | 170 | shutil.rmtree(path, ignore_errors=True) |
|
171 | 171 | os.makedirs(path) |
|
172 | 172 | #print 'made dirs %s' % jn(path) |
|
173 | 173 | except OSError: |
|
174 | 174 | raise |
|
175 | 175 | |
|
176 | 176 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ |
|
177 | 177 | {'user': USER, |
|
178 | 178 | 'pass': PASS, |
|
179 | 179 | 'host': HOST, |
|
180 | 180 | 'cloned_repo': repo, } |
|
181 | 181 | |
|
182 | 182 | dest = path + seq |
|
183 | 183 | if method == 'pull': |
|
184 | 184 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) |
|
185 | 185 | else: |
|
186 | 186 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) |
|
187 | 187 | print stdout,'sdasdsadsa' |
|
188 | 188 | if not no_errors: |
|
189 | 189 | if backend == 'hg': |
|
190 | 190 | assert """adding file changes""" in stdout, 'no messages about cloning' |
|
191 | 191 | assert """abort""" not in stderr , 'got error from clone' |
|
192 | 192 | elif backend == 'git': |
|
193 | 193 | assert """Cloning into""" in stdout, 'no messages about cloning' |
|
194 | 194 | |
|
195 | 195 | if __name__ == '__main__': |
|
196 | 196 | try: |
|
197 | 197 | create_test_user(force=False) |
|
198 | 198 | seq = None |
|
199 | 199 | import time |
|
200 | 200 | |
|
201 | 201 | try: |
|
202 | 202 | METHOD = sys.argv[3] |
|
203 | 203 | except IndexError: |
|
204 | 204 | pass |
|
205 | 205 | |
|
206 | 206 | try: |
|
207 | 207 | backend = sys.argv[4] |
|
208 | 208 | except IndexError: |
|
209 | 209 | backend = 'hg' |
|
210 | 210 | |
|
211 | 211 | if METHOD == 'pull': |
|
212 | 212 | seq = _RandomNameSequence().next() |
|
213 | 213 | test_clone_with_credentials(repo=sys.argv[1], method='clone', |
|
214 | 214 | seq=seq, backend=backend) |
|
215 | 215 | s = time.time() |
|
216 | 216 | for i in range(1, int(sys.argv[2]) + 1): |
|
217 | 217 | print 'take', i |
|
218 | 218 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, |
|
219 | 219 | seq=seq, backend=backend) |
|
220 | 220 | print 'time taken %.3f' % (time.time() - s) |
|
221 | 221 | except Exception as e: |
|
222 | 222 | sys.exit('stop on %s' % e) |
@@ -1,591 +1,591 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 | 3 | # Kallithea - config for tests: # |
|
4 | 4 | # initial_repo_scan = true # |
|
5 | 5 | # vcs_full_cache = false # |
|
6 | 6 | # sqlalchemy and kallithea_test.sqlite # |
|
7 | 7 | # custom logging # |
|
8 | 8 | # # |
|
9 | 9 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
10 | 10 | ################################################################################ |
|
11 | 11 | ################################################################################ |
|
12 | 12 | |
|
13 | 13 | [DEFAULT] |
|
14 | 14 | debug = true |
|
15 | 15 | pdebug = false |
|
16 | 16 | |
|
17 | 17 | ################################################################################ |
|
18 | 18 | ## Email settings ## |
|
19 | 19 | ## ## |
|
20 | 20 | ## Refer to the documentation ("Email settings") for more details. ## |
|
21 | 21 | ## ## |
|
22 | 22 | ## It is recommended to use a valid sender address that passes access ## |
|
23 | 23 | ## validation and spam filtering in mail servers. ## |
|
24 | 24 | ################################################################################ |
|
25 | 25 | |
|
26 | 26 | ## 'From' header for application emails. You can optionally add a name. |
|
27 | 27 | ## Default: |
|
28 | 28 | #app_email_from = Kallithea |
|
29 | 29 | ## Examples: |
|
30 | 30 | #app_email_from = Kallithea <kallithea-noreply@example.com> |
|
31 | 31 | #app_email_from = kallithea-noreply@example.com |
|
32 | 32 | |
|
33 | 33 | ## Subject prefix for application emails. |
|
34 | 34 | ## A space between this prefix and the real subject is automatically added. |
|
35 | 35 | ## Default: |
|
36 | 36 | #email_prefix = |
|
37 | 37 | ## Example: |
|
38 | 38 | #email_prefix = [Kallithea] |
|
39 | 39 | |
|
40 | 40 | ## Recipients for error emails and fallback recipients of application mails. |
|
41 | 41 | ## Multiple addresses can be specified, space-separated. |
|
42 | 42 | ## Only addresses are allowed, do not add any name part. |
|
43 | 43 | ## Default: |
|
44 | 44 | #email_to = |
|
45 | 45 | ## Examples: |
|
46 | 46 | #email_to = admin@example.com |
|
47 | 47 | #email_to = admin@example.com another_admin@example.com |
|
48 | 48 | |
|
49 | 49 | ## 'From' header for error emails. You can optionally add a name. |
|
50 | 50 | ## Default: |
|
51 | 51 | #error_email_from = pylons@yourapp.com |
|
52 | 52 | ## Examples: |
|
53 | 53 | #error_email_from = Kallithea Errors <kallithea-noreply@example.com> |
|
54 | 54 | #error_email_from = paste_error@example.com |
|
55 | 55 | |
|
56 | 56 | ## SMTP server settings |
|
57 | 57 | ## Only smtp_server is mandatory. All other settings take the specified default |
|
58 | 58 | ## values. |
|
59 |
#smtp_server = |
|
|
59 | #smtp_server = smtp.example.com | |
|
60 | 60 | #smtp_username = |
|
61 | 61 | #smtp_password = |
|
62 | 62 | #smtp_port = 25 |
|
63 | 63 | #smtp_use_tls = false |
|
64 | 64 | #smtp_use_ssl = false |
|
65 | 65 | ## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.). |
|
66 | 66 | ## If empty, use any of the authentication parameters supported by the server. |
|
67 | 67 | #smtp_auth = |
|
68 | 68 | |
|
69 | 69 | [server:main] |
|
70 | 70 | ## PASTE ## |
|
71 | 71 | #use = egg:Paste#http |
|
72 | 72 | ## nr of worker threads to spawn |
|
73 | 73 | #threadpool_workers = 5 |
|
74 | 74 | ## max request before thread respawn |
|
75 | 75 | #threadpool_max_requests = 10 |
|
76 | 76 | ## option to use threads of process |
|
77 | 77 | #use_threadpool = true |
|
78 | 78 | |
|
79 | 79 | ## WAITRESS ## |
|
80 | 80 | use = egg:waitress#main |
|
81 | 81 | ## number of worker threads |
|
82 | 82 | threads = 5 |
|
83 | 83 | ## MAX BODY SIZE 100GB |
|
84 | 84 | max_request_body_size = 107374182400 |
|
85 | 85 | ## use poll instead of select, fixes fd limits, may not work on old |
|
86 | 86 | ## windows systems. |
|
87 | 87 | #asyncore_use_poll = True |
|
88 | 88 | |
|
89 | 89 | ## GUNICORN ## |
|
90 | 90 | #use = egg:gunicorn#main |
|
91 | 91 | ## number of process workers. You must set `instance_id = *` when this option |
|
92 | 92 | ## is set to more than one worker |
|
93 | 93 | #workers = 1 |
|
94 | 94 | ## process name |
|
95 | 95 | #proc_name = kallithea |
|
96 | 96 | ## type of worker class, one of sync, eventlet, gevent, tornado |
|
97 | 97 | ## recommended for bigger setup is using of of other than sync one |
|
98 | 98 | #worker_class = sync |
|
99 | 99 | #max_requests = 1000 |
|
100 | 100 | ## ammount of time a worker can handle request before it gets killed and |
|
101 | 101 | ## restarted |
|
102 | 102 | #timeout = 3600 |
|
103 | 103 | |
|
104 | 104 | ## UWSGI ## |
|
105 | 105 | ## run with uwsgi --ini-paste-logged <inifile.ini> |
|
106 | 106 | #[uwsgi] |
|
107 | 107 | #socket = /tmp/uwsgi.sock |
|
108 | 108 | #master = true |
|
109 | 109 | #http = 127.0.0.1:5000 |
|
110 | 110 | |
|
111 | 111 | ## set as deamon and redirect all output to file |
|
112 | 112 | #daemonize = ./uwsgi_kallithea.log |
|
113 | 113 | |
|
114 | 114 | ## master process PID |
|
115 | 115 | #pidfile = ./uwsgi_kallithea.pid |
|
116 | 116 | |
|
117 | 117 | ## stats server with workers statistics, use uwsgitop |
|
118 | 118 | ## for monitoring, `uwsgitop 127.0.0.1:1717` |
|
119 | 119 | #stats = 127.0.0.1:1717 |
|
120 | 120 | #memory-report = true |
|
121 | 121 | |
|
122 | 122 | ## log 5XX errors |
|
123 | 123 | #log-5xx = true |
|
124 | 124 | |
|
125 | 125 | ## Set the socket listen queue size. |
|
126 | 126 | #listen = 256 |
|
127 | 127 | |
|
128 | 128 | ## Gracefully Reload workers after the specified amount of managed requests |
|
129 | 129 | ## (avoid memory leaks). |
|
130 | 130 | #max-requests = 1000 |
|
131 | 131 | |
|
132 | 132 | ## enable large buffers |
|
133 | 133 | #buffer-size = 65535 |
|
134 | 134 | |
|
135 | 135 | ## socket and http timeouts ## |
|
136 | 136 | #http-timeout = 3600 |
|
137 | 137 | #socket-timeout = 3600 |
|
138 | 138 | |
|
139 | 139 | ## Log requests slower than the specified number of milliseconds. |
|
140 | 140 | #log-slow = 10 |
|
141 | 141 | |
|
142 | 142 | ## Exit if no app can be loaded. |
|
143 | 143 | #need-app = true |
|
144 | 144 | |
|
145 | 145 | ## Set lazy mode (load apps in workers instead of master). |
|
146 | 146 | #lazy = true |
|
147 | 147 | |
|
148 | 148 | ## scaling ## |
|
149 | 149 | ## set cheaper algorithm to use, if not set default will be used |
|
150 | 150 | #cheaper-algo = spare |
|
151 | 151 | |
|
152 | 152 | ## minimum number of workers to keep at all times |
|
153 | 153 | #cheaper = 1 |
|
154 | 154 | |
|
155 | 155 | ## number of workers to spawn at startup |
|
156 | 156 | #cheaper-initial = 1 |
|
157 | 157 | |
|
158 | 158 | ## maximum number of workers that can be spawned |
|
159 | 159 | #workers = 4 |
|
160 | 160 | |
|
161 | 161 | ## how many workers should be spawned at a time |
|
162 | 162 | #cheaper-step = 1 |
|
163 | 163 | |
|
164 | 164 | ## COMMON ## |
|
165 | 165 | host = 127.0.0.1 |
|
166 | 166 | port = 4999 |
|
167 | 167 | |
|
168 | 168 | ## middleware for hosting the WSGI application under a URL prefix |
|
169 | 169 | #[filter:proxy-prefix] |
|
170 | 170 | #use = egg:PasteDeploy#prefix |
|
171 | 171 | #prefix = /<your-prefix> |
|
172 | 172 | |
|
173 | 173 | [app:main] |
|
174 | 174 | use = egg:kallithea |
|
175 | 175 | ## enable proxy prefix middleware |
|
176 | 176 | #filter-with = proxy-prefix |
|
177 | 177 | |
|
178 | 178 | full_stack = true |
|
179 | 179 | static_files = true |
|
180 | 180 | ## Available Languages: |
|
181 | 181 | ## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW |
|
182 | 182 | lang = |
|
183 | 183 | cache_dir = %(here)s/data |
|
184 | 184 | index_dir = %(here)s/data/index |
|
185 | 185 | |
|
186 | 186 | ## perform a full repository scan on each server start, this should be |
|
187 | 187 | ## set to false after first startup, to allow faster server restarts. |
|
188 | 188 | #initial_repo_scan = false |
|
189 | 189 | initial_repo_scan = true |
|
190 | 190 | |
|
191 | 191 | ## uncomment and set this path to use archive download cache |
|
192 | 192 | archive_cache_dir = %(here)s/tarballcache |
|
193 | 193 | |
|
194 | 194 | ## change this to unique ID for security |
|
195 | 195 | app_instance_uuid = test |
|
196 | 196 | |
|
197 | 197 | ## cut off limit for large diffs (size in bytes) |
|
198 | 198 | cut_off_limit = 256000 |
|
199 | 199 | |
|
200 | 200 | ## use cache version of scm repo everywhere |
|
201 | 201 | #vcs_full_cache = true |
|
202 | 202 | vcs_full_cache = false |
|
203 | 203 | |
|
204 | 204 | ## force https in Kallithea, fixes https redirects, assumes it's always https |
|
205 | 205 | force_https = false |
|
206 | 206 | |
|
207 | 207 | ## use Strict-Transport-Security headers |
|
208 | 208 | use_htsts = false |
|
209 | 209 | |
|
210 | 210 | ## number of commits stats will parse on each iteration |
|
211 | 211 | commit_parse_limit = 25 |
|
212 | 212 | |
|
213 | 213 | ## path to git executable |
|
214 | 214 | git_path = git |
|
215 | 215 | |
|
216 | 216 | ## git rev filter option, --all is the default filter, if you need to |
|
217 | 217 | ## hide all refs in changelog switch this to --branches --tags |
|
218 | 218 | #git_rev_filter = --branches --tags |
|
219 | 219 | |
|
220 | 220 | ## RSS feed options |
|
221 | 221 | rss_cut_off_limit = 256000 |
|
222 | 222 | rss_items_per_page = 10 |
|
223 | 223 | rss_include_diff = false |
|
224 | 224 | |
|
225 | 225 | ## options for showing and identifying changesets |
|
226 | 226 | show_sha_length = 12 |
|
227 | 227 | show_revision_number = true |
|
228 | 228 | |
|
229 | 229 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
230 | 230 | ## url that does rewrites to _admin/gists/<gistid>. |
|
231 |
## example: http://gist. |
|
|
232 |
## Kallithea url, ie. http[s]:// |
|
|
231 | ## example: http://gist.example.com/{gistid}. Empty means use the internal | |
|
232 | ## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid> | |
|
233 | 233 | gist_alias_url = |
|
234 | 234 | |
|
235 | 235 | ## white list of API enabled controllers. This allows to add list of |
|
236 | 236 | ## controllers to which access will be enabled by api_key. eg: to enable |
|
237 | 237 | ## api access to raw_files put `FilesController:raw`, to enable access to patches |
|
238 | 238 | ## add `ChangesetController:changeset_patch`. This list should be "," separated |
|
239 | 239 | ## Syntax is <ControllerClass>:<function>. Check debug logs for generated names |
|
240 | 240 | ## Recommended settings below are commented out: |
|
241 | 241 | api_access_controllers_whitelist = |
|
242 | 242 | # ChangesetController:changeset_patch, |
|
243 | 243 | # ChangesetController:changeset_raw, |
|
244 | 244 | # FilesController:raw, |
|
245 | 245 | # FilesController:archivefile |
|
246 | 246 | |
|
247 | 247 | ## default encoding used to convert from and to unicode |
|
248 | 248 | ## can be also a comma seperated list of encoding in case of mixed encodings |
|
249 | 249 | default_encoding = utf8 |
|
250 | 250 | |
|
251 | 251 | ## issue tracker for Kallithea (leave blank to disable, absent for default) |
|
252 | 252 | #bugtracker = https://bitbucket.org/conservancy/kallithea/issues |
|
253 | 253 | |
|
254 | 254 | ## issue tracking mapping for commits messages |
|
255 | 255 | ## comment out issue_pat, issue_server, issue_prefix to enable |
|
256 | 256 | |
|
257 | 257 | ## pattern to get the issues from commit messages |
|
258 | 258 | ## default one used here is #<numbers> with a regex passive group for `#` |
|
259 | 259 | ## {id} will be all groups matched from this pattern |
|
260 | 260 | |
|
261 | 261 | issue_pat = (?:\s*#)(\d+) |
|
262 | 262 | |
|
263 | 263 | ## server url to the issue, each {id} will be replaced with match |
|
264 | 264 | ## fetched from the regex and {repo} is replaced with full repository name |
|
265 | 265 | ## including groups {repo_name} is replaced with just name of repo |
|
266 | 266 | |
|
267 |
issue_server_link = https:// |
|
|
267 | issue_server_link = https://issues.example.com/{repo}/issue/{id} | |
|
268 | 268 | |
|
269 | 269 | ## prefix to add to link to indicate it's an url |
|
270 | 270 | ## #314 will be replaced by <issue_prefix><id> |
|
271 | 271 | |
|
272 | 272 | issue_prefix = # |
|
273 | 273 | |
|
274 | 274 | ## issue_pat, issue_server_link, issue_prefix can have suffixes to specify |
|
275 | 275 | ## multiple patterns, to other issues server, wiki or others |
|
276 | 276 | ## below an example how to create a wiki pattern |
|
277 |
# wiki-some-id -> https:// |
|
|
277 | # wiki-some-id -> https://wiki.example.com/some-id | |
|
278 | 278 | |
|
279 | 279 | #issue_pat_wiki = (?:wiki-)(.+) |
|
280 |
#issue_server_link_wiki = https:// |
|
|
280 | #issue_server_link_wiki = https://wiki.example.com/{id} | |
|
281 | 281 | #issue_prefix_wiki = WIKI- |
|
282 | 282 | |
|
283 | 283 | ## instance-id prefix |
|
284 | 284 | ## a prefix key for this instance used for cache invalidation when running |
|
285 | 285 | ## multiple instances of kallithea, make sure it's globally unique for |
|
286 | 286 | ## all running kallithea instances. Leave empty if you don't use it |
|
287 | 287 | instance_id = |
|
288 | 288 | |
|
289 | 289 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
290 | 290 | ## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with |
|
291 | 291 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
292 | 292 | auth_ret_code = |
|
293 | 293 | |
|
294 | 294 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
295 | 295 | ## codes don't break the transactions while 4XX codes do |
|
296 | 296 | lock_ret_code = 423 |
|
297 | 297 | |
|
298 | 298 | ## allows to change the repository location in settings page |
|
299 | 299 | allow_repo_location_change = True |
|
300 | 300 | |
|
301 | 301 | ## allows to setup custom hooks in settings page |
|
302 | 302 | allow_custom_hooks_settings = True |
|
303 | 303 | |
|
304 | 304 | #################################### |
|
305 | 305 | ### CELERY CONFIG #### |
|
306 | 306 | #################################### |
|
307 | 307 | |
|
308 | 308 | use_celery = false |
|
309 | 309 | broker.host = localhost |
|
310 | 310 | broker.vhost = rabbitmqhost |
|
311 | 311 | broker.port = 5672 |
|
312 | 312 | broker.user = rabbitmq |
|
313 | 313 | broker.password = qweqwe |
|
314 | 314 | |
|
315 | 315 | celery.imports = kallithea.lib.celerylib.tasks |
|
316 | 316 | |
|
317 | 317 | celery.result.backend = amqp |
|
318 | 318 | celery.result.dburi = amqp:// |
|
319 | 319 | celery.result.serialier = json |
|
320 | 320 | |
|
321 | 321 | #celery.send.task.error.emails = true |
|
322 | 322 | #celery.amqp.task.result.expires = 18000 |
|
323 | 323 | |
|
324 | 324 | celeryd.concurrency = 2 |
|
325 | 325 | #celeryd.log.file = celeryd.log |
|
326 | 326 | celeryd.log.level = DEBUG |
|
327 | 327 | celeryd.max.tasks.per.child = 1 |
|
328 | 328 | |
|
329 | 329 | ## tasks will never be sent to the queue, but executed locally instead. |
|
330 | 330 | celery.always.eager = false |
|
331 | 331 | |
|
332 | 332 | #################################### |
|
333 | 333 | ### BEAKER CACHE #### |
|
334 | 334 | #################################### |
|
335 | 335 | |
|
336 | 336 | beaker.cache.data_dir = %(here)s/data/cache/data |
|
337 | 337 | beaker.cache.lock_dir = %(here)s/data/cache/lock |
|
338 | 338 | |
|
339 | 339 | beaker.cache.regions = short_term,long_term,sql_cache_short |
|
340 | 340 | |
|
341 | 341 | beaker.cache.short_term.type = memory |
|
342 | 342 | beaker.cache.short_term.expire = 60 |
|
343 | 343 | beaker.cache.short_term.key_length = 256 |
|
344 | 344 | |
|
345 | 345 | beaker.cache.long_term.type = memory |
|
346 | 346 | beaker.cache.long_term.expire = 36000 |
|
347 | 347 | beaker.cache.long_term.key_length = 256 |
|
348 | 348 | |
|
349 | 349 | beaker.cache.sql_cache_short.type = memory |
|
350 | 350 | beaker.cache.sql_cache_short.expire = 1 |
|
351 | 351 | beaker.cache.sql_cache_short.key_length = 256 |
|
352 | 352 | |
|
353 | 353 | #################################### |
|
354 | 354 | ### BEAKER SESSION #### |
|
355 | 355 | #################################### |
|
356 | 356 | |
|
357 | 357 | ## Name of session cookie. Should be unique for a given host and path, even when running |
|
358 | 358 | ## on different ports. Otherwise, cookie sessions will be shared and messed up. |
|
359 | 359 | beaker.session.key = kallithea |
|
360 | 360 | ## Sessions should always only be accessible by the browser, not directly by JavaScript. |
|
361 | 361 | beaker.session.httponly = true |
|
362 | 362 | ## Session lifetime. 2592000 seconds is 30 days. |
|
363 | 363 | beaker.session.timeout = 2592000 |
|
364 | 364 | |
|
365 | 365 | ## Server secret used with HMAC to ensure integrity of cookies. |
|
366 | 366 | beaker.session.secret = {74e0cd75-b339-478b-b129-07dd221def1f} |
|
367 | 367 | ## Further, encrypt the data with AES. |
|
368 | 368 | #beaker.session.encrypt_key = <key_for_encryption> |
|
369 | 369 | #beaker.session.validate_key = <validation_key> |
|
370 | 370 | |
|
371 | 371 | ## Type of storage used for the session, current types are |
|
372 | 372 | ## dbm, file, memcached, database, and memory. |
|
373 | 373 | |
|
374 | 374 | ## File system storage of session data. (default) |
|
375 | 375 | #beaker.session.type = file |
|
376 | 376 | |
|
377 | 377 | ## Cookie only, store all session data inside the cookie. Requires secure secrets. |
|
378 | 378 | #beaker.session.type = cookie |
|
379 | 379 | |
|
380 | 380 | ## Database storage of session data. |
|
381 | 381 | #beaker.session.type = ext:database |
|
382 | 382 | #beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea |
|
383 | 383 | #beaker.session.table_name = db_session |
|
384 | 384 | |
|
385 | 385 | ############################ |
|
386 | 386 | ## ERROR HANDLING SYSTEMS ## |
|
387 | 387 | ############################ |
|
388 | 388 | |
|
389 | 389 | #################### |
|
390 | 390 | ### [errormator] ### |
|
391 | 391 | #################### |
|
392 | 392 | |
|
393 | 393 | ## Errormator is tailored to work with Kallithea, see |
|
394 | 394 | ## http://errormator.com for details how to obtain an account |
|
395 | 395 | ## you must install python package `errormator_client` to make it work |
|
396 | 396 | |
|
397 | 397 | ## errormator enabled |
|
398 | 398 | errormator = false |
|
399 | 399 | |
|
400 | 400 | errormator.server_url = https://api.errormator.com |
|
401 | 401 | errormator.api_key = YOUR_API_KEY |
|
402 | 402 | |
|
403 | 403 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
404 | 404 | |
|
405 | 405 | ## enables 404 error logging (default False) |
|
406 | 406 | errormator.report_404 = false |
|
407 | 407 | |
|
408 | 408 | ## time in seconds after request is considered being slow (default 1) |
|
409 | 409 | errormator.slow_request_time = 1 |
|
410 | 410 | |
|
411 | 411 | ## record slow requests in application |
|
412 | 412 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
413 | 413 | errormator.slow_requests = true |
|
414 | 414 | |
|
415 | 415 | ## enable hooking to application loggers |
|
416 | 416 | #errormator.logging = true |
|
417 | 417 | |
|
418 | 418 | ## minimum log level for log capture |
|
419 | 419 | #errormator.logging.level = WARNING |
|
420 | 420 | |
|
421 | 421 | ## send logs only from erroneous/slow requests |
|
422 | 422 | ## (saves API quota for intensive logging) |
|
423 | 423 | errormator.logging_on_error = false |
|
424 | 424 | |
|
425 | 425 | ## list of additonal keywords that should be grabbed from environ object |
|
426 | 426 | ## can be string with comma separated list of words in lowercase |
|
427 | 427 | ## (by default client will always send following info: |
|
428 | 428 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
429 | 429 | ## start with HTTP* this list be extended with additional keywords here |
|
430 | 430 | errormator.environ_keys_whitelist = |
|
431 | 431 | |
|
432 | 432 | ## list of keywords that should be blanked from request object |
|
433 | 433 | ## can be string with comma separated list of words in lowercase |
|
434 | 434 | ## (by default client will always blank keys that contain following words |
|
435 | 435 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
436 | 436 | ## this list be extended with additional keywords set here |
|
437 | 437 | errormator.request_keys_blacklist = |
|
438 | 438 | |
|
439 | 439 | ## list of namespaces that should be ignores when gathering log entries |
|
440 | 440 | ## can be string with comma separated list of namespaces |
|
441 | 441 | ## (by default the client ignores own entries: errormator_client.client) |
|
442 | 442 | errormator.log_namespace_blacklist = |
|
443 | 443 | |
|
444 | 444 | ################ |
|
445 | 445 | ### [sentry] ### |
|
446 | 446 | ################ |
|
447 | 447 | |
|
448 | 448 | ## sentry is a alternative open source error aggregator |
|
449 | 449 | ## you must install python packages `sentry` and `raven` to enable |
|
450 | 450 | |
|
451 | 451 | sentry.dsn = YOUR_DNS |
|
452 | 452 | sentry.servers = |
|
453 | 453 | sentry.name = |
|
454 | 454 | sentry.key = |
|
455 | 455 | sentry.public_key = |
|
456 | 456 | sentry.secret_key = |
|
457 | 457 | sentry.project = |
|
458 | 458 | sentry.site = |
|
459 | 459 | sentry.include_paths = |
|
460 | 460 | sentry.exclude_paths = |
|
461 | 461 | |
|
462 | 462 | ################################################################################ |
|
463 | 463 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
464 | 464 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
465 | 465 | ## execute malicious code after an exception is raised. ## |
|
466 | 466 | ################################################################################ |
|
467 | 467 | set debug = false |
|
468 | 468 | |
|
469 | 469 | ################################## |
|
470 | 470 | ### LOGVIEW CONFIG ### |
|
471 | 471 | ################################## |
|
472 | 472 | |
|
473 | 473 | logview.sqlalchemy = #faa |
|
474 | 474 | logview.pylons.templating = #bfb |
|
475 | 475 | logview.pylons.util = #eee |
|
476 | 476 | |
|
477 | 477 | ######################################################### |
|
478 | 478 | ### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ### |
|
479 | 479 | ######################################################### |
|
480 | 480 | |
|
481 | 481 | # SQLITE [default] |
|
482 | 482 | #sqlalchemy.db1.url = sqlite:///%(here)s/kallithea.db?timeout=60 |
|
483 | 483 | sqlalchemy.db1.url = sqlite:///%(here)s/kallithea_test.sqlite |
|
484 | 484 | |
|
485 | 485 | # POSTGRESQL |
|
486 | 486 | #sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea |
|
487 | 487 | |
|
488 | 488 | # MySQL |
|
489 | 489 | #sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea |
|
490 | 490 | |
|
491 | 491 | # see sqlalchemy docs for others |
|
492 | 492 | |
|
493 | 493 | sqlalchemy.db1.echo = false |
|
494 | 494 | sqlalchemy.db1.pool_recycle = 3600 |
|
495 | 495 | sqlalchemy.db1.convert_unicode = true |
|
496 | 496 | |
|
497 | 497 | ################################ |
|
498 | 498 | ### LOGGING CONFIGURATION #### |
|
499 | 499 | ################################ |
|
500 | 500 | |
|
501 | 501 | [loggers] |
|
502 | 502 | keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer |
|
503 | 503 | |
|
504 | 504 | [handlers] |
|
505 | 505 | keys = console, console_sql |
|
506 | 506 | |
|
507 | 507 | [formatters] |
|
508 | 508 | keys = generic, color_formatter, color_formatter_sql |
|
509 | 509 | |
|
510 | 510 | ############# |
|
511 | 511 | ## LOGGERS ## |
|
512 | 512 | ############# |
|
513 | 513 | |
|
514 | 514 | [logger_root] |
|
515 | 515 | #level = NOTSET |
|
516 | 516 | level = DEBUG |
|
517 | 517 | handlers = console |
|
518 | 518 | |
|
519 | 519 | [logger_routes] |
|
520 | 520 | level = DEBUG |
|
521 | 521 | handlers = |
|
522 | 522 | qualname = routes.middleware |
|
523 | 523 | ## "level = DEBUG" logs the route matched and routing variables. |
|
524 | 524 | propagate = 1 |
|
525 | 525 | |
|
526 | 526 | [logger_beaker] |
|
527 | 527 | level = DEBUG |
|
528 | 528 | handlers = |
|
529 | 529 | qualname = beaker.container |
|
530 | 530 | propagate = 1 |
|
531 | 531 | |
|
532 | 532 | [logger_templates] |
|
533 | 533 | level = INFO |
|
534 | 534 | handlers = |
|
535 | 535 | qualname = pylons.templating |
|
536 | 536 | propagate = 1 |
|
537 | 537 | |
|
538 | 538 | [logger_kallithea] |
|
539 | 539 | level = DEBUG |
|
540 | 540 | handlers = |
|
541 | 541 | qualname = kallithea |
|
542 | 542 | propagate = 1 |
|
543 | 543 | |
|
544 | 544 | [logger_sqlalchemy] |
|
545 | 545 | #level = INFO |
|
546 | 546 | #handlers = console_sql |
|
547 | 547 | level = ERROR |
|
548 | 548 | handlers = console |
|
549 | 549 | qualname = sqlalchemy.engine |
|
550 | 550 | propagate = 0 |
|
551 | 551 | |
|
552 | 552 | [logger_whoosh_indexer] |
|
553 | 553 | level = DEBUG |
|
554 | 554 | handlers = |
|
555 | 555 | qualname = whoosh_indexer |
|
556 | 556 | propagate = 1 |
|
557 | 557 | |
|
558 | 558 | ############## |
|
559 | 559 | ## HANDLERS ## |
|
560 | 560 | ############## |
|
561 | 561 | |
|
562 | 562 | [handler_console] |
|
563 | 563 | class = StreamHandler |
|
564 | 564 | args = (sys.stderr,) |
|
565 | 565 | #level = INFO |
|
566 | 566 | level = NOTSET |
|
567 | 567 | formatter = generic |
|
568 | 568 | |
|
569 | 569 | [handler_console_sql] |
|
570 | 570 | class = StreamHandler |
|
571 | 571 | args = (sys.stderr,) |
|
572 | 572 | level = WARN |
|
573 | 573 | formatter = generic |
|
574 | 574 | |
|
575 | 575 | ################ |
|
576 | 576 | ## FORMATTERS ## |
|
577 | 577 | ################ |
|
578 | 578 | |
|
579 | 579 | [formatter_generic] |
|
580 | 580 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
581 | 581 | datefmt = %Y-%m-%d %H:%M:%S |
|
582 | 582 | |
|
583 | 583 | [formatter_color_formatter] |
|
584 | 584 | class = kallithea.lib.colored_formatter.ColorFormatter |
|
585 | 585 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
586 | 586 | datefmt = %Y-%m-%d %H:%M:%S |
|
587 | 587 | |
|
588 | 588 | [formatter_color_formatter_sql] |
|
589 | 589 | class = kallithea.lib.colored_formatter.ColorFormatterSql |
|
590 | 590 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
591 | 591 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,278 +1,278 b'' | |||
|
1 | 1 | |
|
2 | 2 | import os |
|
3 | 3 | import mock |
|
4 | 4 | import time |
|
5 | 5 | import shutil |
|
6 | 6 | import tempfile |
|
7 | 7 | import datetime |
|
8 | 8 | from kallithea.lib.vcs.utils.compat import unittest |
|
9 | 9 | from kallithea.lib.vcs.utils.paths import get_dirs_for_path |
|
10 | 10 | from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs |
|
11 | 11 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
12 | 12 | from kallithea.lib.vcs.utils.helpers import get_scms_for_path |
|
13 | 13 | from kallithea.lib.vcs.utils.helpers import get_total_seconds |
|
14 | 14 | from kallithea.lib.vcs.utils.helpers import parse_changesets |
|
15 | 15 | from kallithea.lib.vcs.utils.helpers import parse_datetime |
|
16 | 16 | from kallithea.lib.vcs.utils import author_email, author_name |
|
17 | 17 | from kallithea.lib.vcs.utils.paths import get_user_home |
|
18 | 18 | from kallithea.lib.vcs.exceptions import VCSError |
|
19 | 19 | |
|
20 | 20 | from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_GIT_REPO, TEST_TMP_PATH |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | class PathsTest(unittest.TestCase): |
|
24 | 24 | |
|
25 | 25 | def _test_get_dirs_for_path(self, path, expected): |
|
26 | 26 | """ |
|
27 | 27 | Tests if get_dirs_for_path returns same as expected. |
|
28 | 28 | """ |
|
29 | 29 | expected = sorted(expected) |
|
30 | 30 | result = sorted(get_dirs_for_path(path)) |
|
31 | 31 | self.assertEqual(result, expected, |
|
32 | 32 | msg="%s != %s which was expected result for path %s" |
|
33 | 33 | % (result, expected, path)) |
|
34 | 34 | |
|
35 | 35 | def test_get_dirs_for_path(self): |
|
36 | 36 | path = 'foo/bar/baz/file' |
|
37 | 37 | paths_and_results = ( |
|
38 | 38 | ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']), |
|
39 | 39 | ('foo/bar/', ['foo', 'foo/bar']), |
|
40 | 40 | ('foo/bar', ['foo']), |
|
41 | 41 | ) |
|
42 | 42 | for path, expected in paths_and_results: |
|
43 | 43 | self._test_get_dirs_for_path(path, expected) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def test_get_scm(self): |
|
47 | 47 | self.assertEqual(('hg', TEST_HG_REPO), get_scm(TEST_HG_REPO)) |
|
48 | 48 | self.assertEqual(('git', TEST_GIT_REPO), get_scm(TEST_GIT_REPO)) |
|
49 | 49 | |
|
50 | 50 | def test_get_two_scms_for_path(self): |
|
51 | 51 | multialias_repo_path = os.path.join(TEST_TMP_PATH, 'hg-git-repo-2') |
|
52 | 52 | if os.path.isdir(multialias_repo_path): |
|
53 | 53 | shutil.rmtree(multialias_repo_path) |
|
54 | 54 | |
|
55 | 55 | os.mkdir(multialias_repo_path) |
|
56 | 56 | |
|
57 | 57 | self.assertRaises(VCSError, get_scm, multialias_repo_path) |
|
58 | 58 | |
|
59 | 59 | def test_get_scm_error_path(self): |
|
60 | 60 | self.assertRaises(VCSError, get_scm, 'err') |
|
61 | 61 | |
|
62 | 62 | def test_get_scms_for_path(self): |
|
63 | 63 | dirpath = tempfile.gettempdir() |
|
64 | 64 | new = os.path.join(dirpath, 'vcs-scms-for-path-%s' % time.time()) |
|
65 | 65 | os.mkdir(new) |
|
66 | 66 | self.assertEqual(get_scms_for_path(new), []) |
|
67 | 67 | |
|
68 | 68 | os.mkdir(os.path.join(new, '.tux')) |
|
69 | 69 | self.assertEqual(get_scms_for_path(new), []) |
|
70 | 70 | |
|
71 | 71 | os.mkdir(os.path.join(new, '.git')) |
|
72 | 72 | self.assertEqual(set(get_scms_for_path(new)), set(['git'])) |
|
73 | 73 | |
|
74 | 74 | os.mkdir(os.path.join(new, '.hg')) |
|
75 | 75 | self.assertEqual(set(get_scms_for_path(new)), set(['git', 'hg'])) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | class TestParseChangesets(unittest.TestCase): |
|
79 | 79 | |
|
80 | 80 | def test_main_is_returned_correctly(self): |
|
81 | 81 | self.assertEqual(parse_changesets('123456'), { |
|
82 | 82 | 'start': None, |
|
83 | 83 | 'main': '123456', |
|
84 | 84 | 'end': None, |
|
85 | 85 | }) |
|
86 | 86 | |
|
87 | 87 | def test_start_is_returned_correctly(self): |
|
88 | 88 | self.assertEqual(parse_changesets('aaabbb..'), { |
|
89 | 89 | 'start': 'aaabbb', |
|
90 | 90 | 'main': None, |
|
91 | 91 | 'end': None, |
|
92 | 92 | }) |
|
93 | 93 | |
|
94 | 94 | def test_end_is_returned_correctly(self): |
|
95 | 95 | self.assertEqual(parse_changesets('..cccddd'), { |
|
96 | 96 | 'start': None, |
|
97 | 97 | 'main': None, |
|
98 | 98 | 'end': 'cccddd', |
|
99 | 99 | }) |
|
100 | 100 | |
|
101 | 101 | def test_that_two_or_three_dots_are_allowed(self): |
|
102 | 102 | text1 = 'a..b' |
|
103 | 103 | text2 = 'a...b' |
|
104 | 104 | self.assertEqual(parse_changesets(text1), parse_changesets(text2)) |
|
105 | 105 | |
|
106 | 106 | def test_that_input_is_stripped_first(self): |
|
107 | 107 | text1 = 'a..bb' |
|
108 | 108 | text2 = ' a..bb\t\n\t ' |
|
109 | 109 | self.assertEqual(parse_changesets(text1), parse_changesets(text2)) |
|
110 | 110 | |
|
111 | 111 | def test_that_exception_is_raised(self): |
|
112 | 112 | text = '123456.789012' # single dot is not recognized |
|
113 | 113 | with self.assertRaises(ValueError): |
|
114 | 114 | parse_changesets(text) |
|
115 | 115 | |
|
116 | 116 | def test_non_alphanumeric_raises_exception(self): |
|
117 | 117 | with self.assertRaises(ValueError): |
|
118 | 118 | parse_changesets('aaa@bbb') |
|
119 | 119 | |
|
120 | 120 | |
|
121 | 121 | class TestParseDatetime(unittest.TestCase): |
|
122 | 122 | |
|
123 | 123 | def test_datetime_text(self): |
|
124 | 124 | self.assertEqual(parse_datetime('2010-04-07 21:29:41'), |
|
125 | 125 | datetime.datetime(2010, 4, 7, 21, 29, 41)) |
|
126 | 126 | |
|
127 | 127 | def test_no_seconds(self): |
|
128 | 128 | self.assertEqual(parse_datetime('2010-04-07 21:29'), |
|
129 | 129 | datetime.datetime(2010, 4, 7, 21, 29)) |
|
130 | 130 | |
|
131 | 131 | def test_date_only(self): |
|
132 | 132 | self.assertEqual(parse_datetime('2010-04-07'), |
|
133 | 133 | datetime.datetime(2010, 4, 7)) |
|
134 | 134 | |
|
135 | 135 | def test_another_format(self): |
|
136 | 136 | self.assertEqual(parse_datetime('04/07/10 21:29:41'), |
|
137 | 137 | datetime.datetime(2010, 4, 7, 21, 29, 41)) |
|
138 | 138 | |
|
139 | 139 | def test_now(self): |
|
140 | 140 | self.assertTrue(parse_datetime('now') - datetime.datetime.now() < |
|
141 | 141 | datetime.timedelta(seconds=1)) |
|
142 | 142 | |
|
143 | 143 | def test_today(self): |
|
144 | 144 | today = datetime.date.today() |
|
145 | 145 | self.assertEqual(parse_datetime('today'), |
|
146 | 146 | datetime.datetime(*today.timetuple()[:3])) |
|
147 | 147 | |
|
148 | 148 | def test_yesterday(self): |
|
149 | 149 | yesterday = datetime.date.today() - datetime.timedelta(days=1) |
|
150 | 150 | self.assertEqual(parse_datetime('yesterday'), |
|
151 | 151 | datetime.datetime(*yesterday.timetuple()[:3])) |
|
152 | 152 | |
|
153 | 153 | def test_tomorrow(self): |
|
154 | 154 | tomorrow = datetime.date.today() + datetime.timedelta(days=1) |
|
155 | 155 | args = tomorrow.timetuple()[:3] + (23, 59, 59) |
|
156 | 156 | self.assertEqual(parse_datetime('tomorrow'), datetime.datetime(*args)) |
|
157 | 157 | |
|
158 | 158 | def test_days(self): |
|
159 | 159 | timestamp = datetime.datetime.today() - datetime.timedelta(days=3) |
|
160 | 160 | args = timestamp.timetuple()[:3] + (0, 0, 0, 0) |
|
161 | 161 | expected = datetime.datetime(*args) |
|
162 | 162 | self.assertEqual(parse_datetime('3d'), expected) |
|
163 | 163 | self.assertEqual(parse_datetime('3 d'), expected) |
|
164 | 164 | self.assertEqual(parse_datetime('3 day'), expected) |
|
165 | 165 | self.assertEqual(parse_datetime('3 days'), expected) |
|
166 | 166 | |
|
167 | 167 | def test_weeks(self): |
|
168 | 168 | timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7) |
|
169 | 169 | args = timestamp.timetuple()[:3] + (0, 0, 0, 0) |
|
170 | 170 | expected = datetime.datetime(*args) |
|
171 | 171 | self.assertEqual(parse_datetime('3w'), expected) |
|
172 | 172 | self.assertEqual(parse_datetime('3 w'), expected) |
|
173 | 173 | self.assertEqual(parse_datetime('3 week'), expected) |
|
174 | 174 | self.assertEqual(parse_datetime('3 weeks'), expected) |
|
175 | 175 | |
|
176 | 176 | def test_mixed(self): |
|
177 | 177 | timestamp = datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3) |
|
178 | 178 | args = timestamp.timetuple()[:3] + (0, 0, 0, 0) |
|
179 | 179 | expected = datetime.datetime(*args) |
|
180 | 180 | self.assertEqual(parse_datetime('2w3d'), expected) |
|
181 | 181 | self.assertEqual(parse_datetime('2w 3d'), expected) |
|
182 | 182 | self.assertEqual(parse_datetime('2w 3 days'), expected) |
|
183 | 183 | self.assertEqual(parse_datetime('2 weeks 3 days'), expected) |
|
184 | 184 | |
|
185 | 185 | |
|
186 | 186 | class TestAuthorExtractors(unittest.TestCase): |
|
187 |
TEST_AUTHORS = [("Username Last'o'Name <username@ |
|
|
188 |
("Username Last'o'Name", "username@ |
|
|
189 |
("Username Last'o'Name Spaces < username@ |
|
|
190 |
("Username Last'o'Name Spaces", "username@ |
|
|
191 |
("Username Last'o'Name <username.lastname@ |
|
|
192 |
("Username Last'o'Name", "username.lastname@ |
|
|
193 |
('mrf RFC_SPEC <username+lastname@ |
|
|
194 |
('mrf RFC_SPEC', 'username+lastname@ |
|
|
195 |
('username <user@e |
|
|
196 |
('username', 'user@e |
|
|
197 |
('username <user@e |
|
|
198 |
('username', 'user@e |
|
|
199 |
('broken missing@e |
|
|
200 |
('broken', 'missing@e |
|
|
201 |
('<justemail@ |
|
|
202 |
('', 'justemail@ |
|
|
187 | TEST_AUTHORS = [("Username Last'o'Name <username@example.com>", | |
|
188 | ("Username Last'o'Name", "username@example.com")), | |
|
189 | ("Username Last'o'Name Spaces < username@example.com >", | |
|
190 | ("Username Last'o'Name Spaces", "username@example.com")), | |
|
191 | ("Username Last'o'Name <username.lastname@example.com>", | |
|
192 | ("Username Last'o'Name", "username.lastname@example.com")), | |
|
193 | ('mrf RFC_SPEC <username+lastname@example.com>', | |
|
194 | ('mrf RFC_SPEC', 'username+lastname@example.com')), | |
|
195 | ('username <user@example.com>', | |
|
196 | ('username', 'user@example.com')), | |
|
197 | ('username <user@example.com', | |
|
198 | ('username', 'user@example.com')), | |
|
199 | ('broken missing@example.com', | |
|
200 | ('broken', 'missing@example.com')), | |
|
201 | ('<justemail@example.com>', | |
|
202 | ('', 'justemail@example.com')), | |
|
203 | 203 | ('justname', |
|
204 | 204 | ('justname', '')), |
|
205 |
('Mr Double Name withemail@e |
|
|
206 |
('Mr Double Name', 'withemail@e |
|
|
205 | ('Mr Double Name withemail@example.com ', | |
|
206 | ('Mr Double Name', 'withemail@example.com')), | |
|
207 | 207 | ] |
|
208 | 208 | |
|
209 | 209 | def test_author_email(self): |
|
210 | 210 | |
|
211 | 211 | for test_str, result in self.TEST_AUTHORS: |
|
212 | 212 | self.assertEqual(result[1], author_email(test_str)) |
|
213 | 213 | |
|
214 | 214 | |
|
215 | 215 | def test_author_name(self): |
|
216 | 216 | |
|
217 | 217 | for test_str, result in self.TEST_AUTHORS: |
|
218 | 218 | self.assertEqual(result[0], author_name(test_str)) |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | class TestGetDictForAttrs(unittest.TestCase): |
|
222 | 222 | |
|
223 | 223 | def test_returned_dict_has_expected_attrs(self): |
|
224 | 224 | obj = mock.Mock() |
|
225 | 225 | obj.NOT_INCLUDED = 'this key/value should not be included' |
|
226 | 226 | obj.CONST = True |
|
227 | 227 | obj.foo = 'aaa' |
|
228 | 228 | obj.attrs = {'foo': 'bar'} |
|
229 | 229 | obj.date = datetime.datetime(2010, 12, 31) |
|
230 | 230 | obj.count = 1001 |
|
231 | 231 | |
|
232 | 232 | self.assertEqual(get_dict_for_attrs(obj, ['CONST', 'foo', 'attrs', |
|
233 | 233 | 'date', 'count']), { |
|
234 | 234 | 'CONST': True, |
|
235 | 235 | 'foo': 'aaa', |
|
236 | 236 | 'attrs': {'foo': 'bar'}, |
|
237 | 237 | 'date': datetime.datetime(2010, 12, 31), |
|
238 | 238 | 'count': 1001, |
|
239 | 239 | }) |
|
240 | 240 | |
|
241 | 241 | |
|
242 | 242 | class TestGetTotalSeconds(unittest.TestCase): |
|
243 | 243 | |
|
244 | 244 | def assertTotalSecondsEqual(self, timedelta, expected_seconds): |
|
245 | 245 | result = get_total_seconds(timedelta) |
|
246 | 246 | self.assertEqual(result, expected_seconds, |
|
247 | 247 | "We computed %s seconds for %s but expected %s" |
|
248 | 248 | % (result, timedelta, expected_seconds)) |
|
249 | 249 | |
|
250 | 250 | def test_get_total_seconds_returns_proper_value(self): |
|
251 | 251 | self.assertTotalSecondsEqual(datetime.timedelta(seconds=1001), 1001) |
|
252 | 252 | |
|
253 | 253 | def test_get_total_seconds_returns_proper_value_for_partial_seconds(self): |
|
254 | 254 | self.assertTotalSecondsEqual(datetime.timedelta(seconds=50.65), 50.65) |
|
255 | 255 | |
|
256 | 256 | |
|
257 | 257 | class TestGetUserHome(unittest.TestCase): |
|
258 | 258 | |
|
259 | 259 | @mock.patch.object(os, 'environ', {}) |
|
260 | 260 | def test_defaults_to_none(self): |
|
261 | 261 | self.assertEqual(get_user_home(), '') |
|
262 | 262 | |
|
263 | 263 | @mock.patch.object(os, 'environ', {'HOME': '/home/foobar'}) |
|
264 | 264 | def test_unix_like(self): |
|
265 | 265 | self.assertEqual(get_user_home(), '/home/foobar') |
|
266 | 266 | |
|
267 | 267 | @mock.patch.object(os, 'environ', {'USERPROFILE': '/Users/foobar'}) |
|
268 | 268 | def test_windows_like(self): |
|
269 | 269 | self.assertEqual(get_user_home(), '/Users/foobar') |
|
270 | 270 | |
|
271 | 271 | @mock.patch.object(os, 'environ', {'HOME': '/home/foobar', |
|
272 | 272 | 'USERPROFILE': '/Users/foobar'}) |
|
273 | 273 | def test_prefers_home_over_userprofile(self): |
|
274 | 274 | self.assertEqual(get_user_home(), '/home/foobar') |
|
275 | 275 | |
|
276 | 276 | |
|
277 | 277 | if __name__ == '__main__': |
|
278 | 278 | unittest.main() |
General Comments 0
You need to be logged in to leave comments.
Login now