Show More
@@ -1,770 +1,776 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import sys |
|
24 | 24 | import time |
|
25 | 25 | import platform |
|
26 | 26 | import collections |
|
27 | 27 | import pkg_resources |
|
28 | 28 | import logging |
|
29 | 29 | import resource |
|
30 | 30 | |
|
31 | 31 | from pyramid.compat import configparser |
|
32 | 32 | |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | psutil = None |
|
37 | 37 | |
|
38 | 38 | try: |
|
39 | 39 | # cygwin cannot have yet psutil support. |
|
40 | 40 | import psutil as psutil |
|
41 | 41 | except ImportError: |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | _NA = 'NOT AVAILABLE' |
|
46 | 46 | |
|
47 | 47 | STATE_OK = 'ok' |
|
48 | 48 | STATE_ERR = 'error' |
|
49 | 49 | STATE_WARN = 'warning' |
|
50 | 50 | |
|
51 | 51 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | # HELPERS |
|
55 | 55 | def percentage(part, whole): |
|
56 | 56 | whole = float(whole) |
|
57 | 57 | if whole > 0: |
|
58 | 58 | return round(100 * float(part) / whole, 1) |
|
59 | 59 | return 0.0 |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def get_storage_size(storage_path): |
|
63 | 63 | sizes = [] |
|
64 | 64 | for file_ in os.listdir(storage_path): |
|
65 | 65 | storage_file = os.path.join(storage_path, file_) |
|
66 | 66 | if os.path.isfile(storage_file): |
|
67 | 67 | try: |
|
68 | 68 | sizes.append(os.path.getsize(storage_file)) |
|
69 | 69 | except OSError: |
|
70 | log.exception('Failed to get size of storage file %s', | |
|
71 | storage_file) | |
|
70 | log.exception('Failed to get size of storage file %s', storage_file) | |
|
72 | 71 | pass |
|
73 | 72 | |
|
74 | 73 | return sum(sizes) |
|
75 | 74 | |
|
76 | 75 | |
|
77 | 76 | def get_resource(resource_type): |
|
78 | 77 | try: |
|
79 | 78 | return resource.getrlimit(resource_type) |
|
80 | 79 | except Exception: |
|
81 | 80 | return 'NOT_SUPPORTED' |
|
82 | 81 | |
|
83 | 82 | |
|
83 | def get_cert_path(ini_path): | |
|
84 | default = '/etc/ssl/certs/ca-certificates.crt' | |
|
85 | control_ca_bundle = os.path.join( | |
|
86 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), | |
|
87 | '.rccontrol-profile/etc/ca-bundle.crt') | |
|
88 | if os.path.isfile(control_ca_bundle): | |
|
89 | default = control_ca_bundle | |
|
90 | ||
|
91 | return default | |
|
92 | ||
|
84 | 93 | class SysInfoRes(object): |
|
85 | 94 | def __init__(self, value, state=None, human_value=None): |
|
86 | 95 | self.value = value |
|
87 | 96 | self.state = state or STATE_OK_DEFAULT |
|
88 | 97 | self.human_value = human_value or value |
|
89 | 98 | |
|
90 | 99 | def __json__(self): |
|
91 | 100 | return { |
|
92 | 101 | 'value': self.value, |
|
93 | 102 | 'state': self.state, |
|
94 | 103 | 'human_value': self.human_value, |
|
95 | 104 | } |
|
96 | 105 | |
|
97 | 106 | def get_value(self): |
|
98 | 107 | return self.__json__() |
|
99 | 108 | |
|
100 | 109 | def __str__(self): |
|
101 | 110 | return '<SysInfoRes({})>'.format(self.__json__()) |
|
102 | 111 | |
|
103 | 112 | |
|
104 | 113 | class SysInfo(object): |
|
105 | 114 | |
|
106 | 115 | def __init__(self, func_name, **kwargs): |
|
107 | 116 | self.func_name = func_name |
|
108 | 117 | self.value = _NA |
|
109 | 118 | self.state = None |
|
110 | 119 | self.kwargs = kwargs or {} |
|
111 | 120 | |
|
112 | 121 | def __call__(self): |
|
113 | 122 | computed = self.compute(**self.kwargs) |
|
114 | 123 | if not isinstance(computed, SysInfoRes): |
|
115 | 124 | raise ValueError( |
|
116 | 125 | 'computed value for {} is not instance of ' |
|
117 | 126 | '{}, got {} instead'.format( |
|
118 | 127 | self.func_name, SysInfoRes, type(computed))) |
|
119 | 128 | return computed.__json__() |
|
120 | 129 | |
|
121 | 130 | def __str__(self): |
|
122 | 131 | return '<SysInfo({})>'.format(self.func_name) |
|
123 | 132 | |
|
124 | 133 | def compute(self, **kwargs): |
|
125 | 134 | return self.func_name(**kwargs) |
|
126 | 135 | |
|
127 | 136 | |
|
128 | 137 | # SysInfo functions |
|
129 | 138 | def python_info(): |
|
130 | 139 | value = dict(version=' '.join(platform._sys_version()), |
|
131 | 140 | executable=sys.executable) |
|
132 | 141 | return SysInfoRes(value=value) |
|
133 | 142 | |
|
134 | 143 | |
|
135 | 144 | def py_modules(): |
|
136 | 145 | mods = dict([(p.project_name, p.version) |
|
137 | 146 | for p in pkg_resources.working_set]) |
|
138 | 147 | value = sorted(mods.items(), key=lambda k: k[0].lower()) |
|
139 | 148 | return SysInfoRes(value=value) |
|
140 | 149 | |
|
141 | 150 | |
|
142 | 151 | def platform_type(): |
|
143 | 152 | from rhodecode.lib.utils import safe_unicode, generate_platform_uuid |
|
144 | 153 | |
|
145 | 154 | value = dict( |
|
146 | 155 | name=safe_unicode(platform.platform()), |
|
147 | 156 | uuid=generate_platform_uuid() |
|
148 | 157 | ) |
|
149 | 158 | return SysInfoRes(value=value) |
|
150 | 159 | |
|
151 | 160 | |
|
152 | 161 | def locale_info(): |
|
153 | 162 | import locale |
|
154 | 163 | |
|
155 | 164 | value = dict( |
|
156 | 165 | locale_default=locale.getdefaultlocale(), |
|
157 | 166 | locale_lc_all=locale.getlocale(locale.LC_ALL), |
|
158 | 167 | lang_env=os.environ.get('LANG'), |
|
159 | 168 | lc_all_env=os.environ.get('LC_ALL'), |
|
160 | 169 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), |
|
161 | 170 | ) |
|
162 | 171 | human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format( |
|
163 | 172 | value['lang_env'], value['locale_lc_all'], value['locale_default']) |
|
164 | 173 | return SysInfoRes(value=value, human_value=human_value) |
|
165 | 174 | |
|
166 | 175 | |
|
167 | 176 | def ulimit_info(): |
|
168 | 177 | data = collections.OrderedDict([ |
|
169 | 178 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), |
|
170 | 179 | ('file size', get_resource(resource.RLIMIT_FSIZE)), |
|
171 | 180 | ('stack size', get_resource(resource.RLIMIT_STACK)), |
|
172 | 181 | ('core file size', get_resource(resource.RLIMIT_CORE)), |
|
173 | 182 | ('address space size', get_resource(resource.RLIMIT_AS)), |
|
174 | 183 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), |
|
175 | 184 | ('heap size', get_resource(resource.RLIMIT_DATA)), |
|
176 | 185 | ('rss size', get_resource(resource.RLIMIT_RSS)), |
|
177 | 186 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), |
|
178 | 187 | ('open files', get_resource(resource.RLIMIT_NOFILE)), |
|
179 | 188 | ]) |
|
180 | 189 | |
|
181 | 190 | text = ', '.join('{}:{}'.format(k,v) for k,v in data.items()) |
|
182 | 191 | |
|
183 | 192 | value = { |
|
184 | 193 | 'limits': data, |
|
185 | 194 | 'text': text, |
|
186 | 195 | } |
|
187 | 196 | return SysInfoRes(value=value) |
|
188 | 197 | |
|
189 | 198 | |
|
190 | 199 | def uptime(): |
|
191 | 200 | from rhodecode.lib.helpers import age, time_to_datetime |
|
192 | 201 | from rhodecode.translation import TranslationString |
|
193 | 202 | |
|
194 | 203 | value = dict(boot_time=0, uptime=0, text='') |
|
195 | 204 | state = STATE_OK_DEFAULT |
|
196 | 205 | if not psutil: |
|
197 | 206 | return SysInfoRes(value=value, state=state) |
|
198 | 207 | |
|
199 | 208 | boot_time = psutil.boot_time() |
|
200 | 209 | value['boot_time'] = boot_time |
|
201 | 210 | value['uptime'] = time.time() - boot_time |
|
202 | 211 | |
|
203 | 212 | date_or_age = age(time_to_datetime(boot_time)) |
|
204 | 213 | if isinstance(date_or_age, TranslationString): |
|
205 | 214 | date_or_age = date_or_age.interpolate() |
|
206 | 215 | |
|
207 | 216 | human_value = value.copy() |
|
208 | 217 | human_value['boot_time'] = time_to_datetime(boot_time) |
|
209 | 218 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) |
|
210 | 219 | |
|
211 | 220 | human_value['text'] = u'Server started {}'.format(date_or_age) |
|
212 | 221 | return SysInfoRes(value=value, human_value=human_value) |
|
213 | 222 | |
|
214 | 223 | |
|
215 | 224 | def memory(): |
|
216 | 225 | from rhodecode.lib.helpers import format_byte_size_binary |
|
217 | 226 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, |
|
218 | 227 | percent_used=0, free=0, inactive=0, active=0, shared=0, |
|
219 | 228 | total=0, buffers=0, text='') |
|
220 | 229 | |
|
221 | 230 | state = STATE_OK_DEFAULT |
|
222 | 231 | if not psutil: |
|
223 | 232 | return SysInfoRes(value=value, state=state) |
|
224 | 233 | |
|
225 | 234 | value.update(dict(psutil.virtual_memory()._asdict())) |
|
226 | 235 | value['used_real'] = value['total'] - value['available'] |
|
227 | 236 | value['percent_used'] = psutil._common.usage_percent( |
|
228 | 237 | value['used_real'], value['total'], 1) |
|
229 | 238 | |
|
230 | 239 | human_value = value.copy() |
|
231 | 240 | human_value['text'] = '%s/%s, %s%% used' % ( |
|
232 | 241 | format_byte_size_binary(value['used_real']), |
|
233 | 242 | format_byte_size_binary(value['total']), |
|
234 | 243 | value['percent_used'],) |
|
235 | 244 | |
|
236 | 245 | keys = value.keys()[::] |
|
237 | 246 | keys.pop(keys.index('percent')) |
|
238 | 247 | keys.pop(keys.index('percent_used')) |
|
239 | 248 | keys.pop(keys.index('text')) |
|
240 | 249 | for k in keys: |
|
241 | 250 | human_value[k] = format_byte_size_binary(value[k]) |
|
242 | 251 | |
|
243 | 252 | if state['type'] == STATE_OK and value['percent_used'] > 90: |
|
244 | 253 | msg = 'Critical: your available RAM memory is very low.' |
|
245 | 254 | state = {'message': msg, 'type': STATE_ERR} |
|
246 | 255 | |
|
247 | 256 | elif state['type'] == STATE_OK and value['percent_used'] > 70: |
|
248 | 257 | msg = 'Warning: your available RAM memory is running low.' |
|
249 | 258 | state = {'message': msg, 'type': STATE_WARN} |
|
250 | 259 | |
|
251 | 260 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
252 | 261 | |
|
253 | 262 | |
|
254 | 263 | def machine_load(): |
|
255 | 264 | value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''} |
|
256 | 265 | state = STATE_OK_DEFAULT |
|
257 | 266 | if not psutil: |
|
258 | 267 | return SysInfoRes(value=value, state=state) |
|
259 | 268 | |
|
260 | 269 | # load averages |
|
261 | 270 | if hasattr(psutil.os, 'getloadavg'): |
|
262 | 271 | value.update(dict( |
|
263 | 272 | zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))) |
|
264 | 273 | |
|
265 | 274 | human_value = value.copy() |
|
266 | 275 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( |
|
267 | 276 | value['1_min'], value['5_min'], value['15_min']) |
|
268 | 277 | |
|
269 | 278 | if state['type'] == STATE_OK and value['15_min'] > 5: |
|
270 | 279 | msg = 'Warning: your machine load is very high.' |
|
271 | 280 | state = {'message': msg, 'type': STATE_WARN} |
|
272 | 281 | |
|
273 | 282 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
274 | 283 | |
|
275 | 284 | |
|
276 | 285 | def cpu(): |
|
277 | 286 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} |
|
278 | 287 | state = STATE_OK_DEFAULT |
|
279 | 288 | |
|
280 | 289 | if not psutil: |
|
281 | 290 | return SysInfoRes(value=value, state=state) |
|
282 | 291 | |
|
283 | 292 | value['cpu'] = psutil.cpu_percent(0.5) |
|
284 | 293 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) |
|
285 | 294 | value['cpu_count'] = psutil.cpu_count() |
|
286 | 295 | |
|
287 | 296 | human_value = value.copy() |
|
288 | 297 | human_value['text'] = '{} cores at {} %'.format( |
|
289 | 298 | value['cpu_count'], value['cpu']) |
|
290 | 299 | |
|
291 | 300 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
292 | 301 | |
|
293 | 302 | |
|
294 | 303 | def storage(): |
|
295 | 304 | from rhodecode.lib.helpers import format_byte_size_binary |
|
296 | 305 | from rhodecode.model.settings import VcsSettingsModel |
|
297 | 306 | path = VcsSettingsModel().get_repos_location() |
|
298 | 307 | |
|
299 | 308 | value = dict(percent=0, used=0, total=0, path=path, text='') |
|
300 | 309 | state = STATE_OK_DEFAULT |
|
301 | 310 | if not psutil: |
|
302 | 311 | return SysInfoRes(value=value, state=state) |
|
303 | 312 | |
|
304 | 313 | try: |
|
305 | 314 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
306 | 315 | except Exception as e: |
|
307 | 316 | log.exception('Failed to fetch disk info') |
|
308 | 317 | state = {'message': str(e), 'type': STATE_ERR} |
|
309 | 318 | |
|
310 | 319 | human_value = value.copy() |
|
311 | 320 | human_value['used'] = format_byte_size_binary(value['used']) |
|
312 | 321 | human_value['total'] = format_byte_size_binary(value['total']) |
|
313 | 322 | human_value['text'] = "{}/{}, {}% used".format( |
|
314 | 323 | format_byte_size_binary(value['used']), |
|
315 | 324 | format_byte_size_binary(value['total']), |
|
316 | 325 | value['percent']) |
|
317 | 326 | |
|
318 | 327 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
319 | 328 | msg = 'Critical: your disk space is very low.' |
|
320 | 329 | state = {'message': msg, 'type': STATE_ERR} |
|
321 | 330 | |
|
322 | 331 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
323 | 332 | msg = 'Warning: your disk space is running low.' |
|
324 | 333 | state = {'message': msg, 'type': STATE_WARN} |
|
325 | 334 | |
|
326 | 335 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
327 | 336 | |
|
328 | 337 | |
|
329 | 338 | def storage_inodes(): |
|
330 | 339 | from rhodecode.model.settings import VcsSettingsModel |
|
331 | 340 | path = VcsSettingsModel().get_repos_location() |
|
332 | 341 | |
|
333 | 342 | value = dict(percent=0, free=0, used=0, total=0, path=path, text='') |
|
334 | 343 | state = STATE_OK_DEFAULT |
|
335 | 344 | if not psutil: |
|
336 | 345 | return SysInfoRes(value=value, state=state) |
|
337 | 346 | |
|
338 | 347 | try: |
|
339 | 348 | i_stat = os.statvfs(path) |
|
340 | 349 | value['free'] = i_stat.f_ffree |
|
341 | 350 | value['used'] = i_stat.f_files-i_stat.f_favail |
|
342 | 351 | value['total'] = i_stat.f_files |
|
343 | 352 | value['percent'] = percentage(value['used'], value['total']) |
|
344 | 353 | except Exception as e: |
|
345 | 354 | log.exception('Failed to fetch disk inodes info') |
|
346 | 355 | state = {'message': str(e), 'type': STATE_ERR} |
|
347 | 356 | |
|
348 | 357 | human_value = value.copy() |
|
349 | 358 | human_value['text'] = "{}/{}, {}% used".format( |
|
350 | 359 | value['used'], value['total'], value['percent']) |
|
351 | 360 | |
|
352 | 361 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
353 | 362 | msg = 'Critical: your disk free inodes are very low.' |
|
354 | 363 | state = {'message': msg, 'type': STATE_ERR} |
|
355 | 364 | |
|
356 | 365 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
357 | 366 | msg = 'Warning: your disk free inodes are running low.' |
|
358 | 367 | state = {'message': msg, 'type': STATE_WARN} |
|
359 | 368 | |
|
360 | 369 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
361 | 370 | |
|
362 | 371 | |
|
363 | 372 | def storage_archives(): |
|
364 | 373 | import rhodecode |
|
365 | 374 | from rhodecode.lib.utils import safe_str |
|
366 | 375 | from rhodecode.lib.helpers import format_byte_size_binary |
|
367 | 376 | |
|
368 | 377 | msg = 'Enable this by setting ' \ |
|
369 | 378 | 'archive_cache_dir=/path/to/cache option in the .ini file' |
|
370 | 379 | path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg)) |
|
371 | 380 | |
|
372 | 381 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
373 | 382 | state = STATE_OK_DEFAULT |
|
374 | 383 | try: |
|
375 | 384 | items_count = 0 |
|
376 | 385 | used = 0 |
|
377 | 386 | for root, dirs, files in os.walk(path): |
|
378 | 387 | if root == path: |
|
379 | 388 | items_count = len(files) |
|
380 | 389 | |
|
381 | 390 | for f in files: |
|
382 | 391 | try: |
|
383 | 392 | used += os.path.getsize(os.path.join(root, f)) |
|
384 | 393 | except OSError: |
|
385 | 394 | pass |
|
386 | 395 | value.update({ |
|
387 | 396 | 'percent': 100, |
|
388 | 397 | 'used': used, |
|
389 | 398 | 'total': used, |
|
390 | 399 | 'items': items_count |
|
391 | 400 | }) |
|
392 | 401 | |
|
393 | 402 | except Exception as e: |
|
394 | 403 | log.exception('failed to fetch archive cache storage') |
|
395 | 404 | state = {'message': str(e), 'type': STATE_ERR} |
|
396 | 405 | |
|
397 | 406 | human_value = value.copy() |
|
398 | 407 | human_value['used'] = format_byte_size_binary(value['used']) |
|
399 | 408 | human_value['total'] = format_byte_size_binary(value['total']) |
|
400 | 409 | human_value['text'] = "{} ({} items)".format( |
|
401 | 410 | human_value['used'], value['items']) |
|
402 | 411 | |
|
403 | 412 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
404 | 413 | |
|
405 | 414 | |
|
406 | 415 | def storage_gist(): |
|
407 | 416 | from rhodecode.model.gist import GIST_STORE_LOC |
|
408 | 417 | from rhodecode.model.settings import VcsSettingsModel |
|
409 | 418 | from rhodecode.lib.utils import safe_str |
|
410 | 419 | from rhodecode.lib.helpers import format_byte_size_binary |
|
411 | 420 | path = safe_str(os.path.join( |
|
412 | 421 | VcsSettingsModel().get_repos_location(), GIST_STORE_LOC)) |
|
413 | 422 | |
|
414 | 423 | # gist storage |
|
415 | 424 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
416 | 425 | state = STATE_OK_DEFAULT |
|
417 | 426 | |
|
418 | 427 | try: |
|
419 | 428 | items_count = 0 |
|
420 | 429 | used = 0 |
|
421 | 430 | for root, dirs, files in os.walk(path): |
|
422 | 431 | if root == path: |
|
423 | 432 | items_count = len(dirs) |
|
424 | 433 | |
|
425 | 434 | for f in files: |
|
426 | 435 | try: |
|
427 | 436 | used += os.path.getsize(os.path.join(root, f)) |
|
428 | 437 | except OSError: |
|
429 | 438 | pass |
|
430 | 439 | value.update({ |
|
431 | 440 | 'percent': 100, |
|
432 | 441 | 'used': used, |
|
433 | 442 | 'total': used, |
|
434 | 443 | 'items': items_count |
|
435 | 444 | }) |
|
436 | 445 | except Exception as e: |
|
437 | 446 | log.exception('failed to fetch gist storage items') |
|
438 | 447 | state = {'message': str(e), 'type': STATE_ERR} |
|
439 | 448 | |
|
440 | 449 | human_value = value.copy() |
|
441 | 450 | human_value['used'] = format_byte_size_binary(value['used']) |
|
442 | 451 | human_value['total'] = format_byte_size_binary(value['total']) |
|
443 | 452 | human_value['text'] = "{} ({} items)".format( |
|
444 | 453 | human_value['used'], value['items']) |
|
445 | 454 | |
|
446 | 455 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
447 | 456 | |
|
448 | 457 | |
|
449 | 458 | def storage_temp(): |
|
450 | 459 | import tempfile |
|
451 | 460 | from rhodecode.lib.helpers import format_byte_size_binary |
|
452 | 461 | |
|
453 | 462 | path = tempfile.gettempdir() |
|
454 | 463 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
455 | 464 | state = STATE_OK_DEFAULT |
|
456 | 465 | |
|
457 | 466 | if not psutil: |
|
458 | 467 | return SysInfoRes(value=value, state=state) |
|
459 | 468 | |
|
460 | 469 | try: |
|
461 | 470 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
462 | 471 | except Exception as e: |
|
463 | 472 | log.exception('Failed to fetch temp dir info') |
|
464 | 473 | state = {'message': str(e), 'type': STATE_ERR} |
|
465 | 474 | |
|
466 | 475 | human_value = value.copy() |
|
467 | 476 | human_value['used'] = format_byte_size_binary(value['used']) |
|
468 | 477 | human_value['total'] = format_byte_size_binary(value['total']) |
|
469 | 478 | human_value['text'] = "{}/{}, {}% used".format( |
|
470 | 479 | format_byte_size_binary(value['used']), |
|
471 | 480 | format_byte_size_binary(value['total']), |
|
472 | 481 | value['percent']) |
|
473 | 482 | |
|
474 | 483 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
475 | 484 | |
|
476 | 485 | |
|
477 | 486 | def search_info(): |
|
478 | 487 | import rhodecode |
|
479 | 488 | from rhodecode.lib.index import searcher_from_config |
|
480 | 489 | |
|
481 | 490 | backend = rhodecode.CONFIG.get('search.module', '') |
|
482 | 491 | location = rhodecode.CONFIG.get('search.location', '') |
|
483 | 492 | |
|
484 | 493 | try: |
|
485 | 494 | searcher = searcher_from_config(rhodecode.CONFIG) |
|
486 | 495 | searcher = searcher.__class__.__name__ |
|
487 | 496 | except Exception: |
|
488 | 497 | searcher = None |
|
489 | 498 | |
|
490 | 499 | value = dict( |
|
491 | 500 | backend=backend, searcher=searcher, location=location, text='') |
|
492 | 501 | state = STATE_OK_DEFAULT |
|
493 | 502 | |
|
494 | 503 | human_value = value.copy() |
|
495 | 504 | human_value['text'] = "backend:`{}`".format(human_value['backend']) |
|
496 | 505 | |
|
497 | 506 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
498 | 507 | |
|
499 | 508 | |
|
500 | 509 | def git_info(): |
|
501 | 510 | from rhodecode.lib.vcs.backends import git |
|
502 | 511 | state = STATE_OK_DEFAULT |
|
503 | 512 | value = human_value = '' |
|
504 | 513 | try: |
|
505 | 514 | value = git.discover_git_version(raise_on_exc=True) |
|
506 | 515 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
507 | 516 | except Exception as e: |
|
508 | 517 | state = {'message': str(e), 'type': STATE_ERR} |
|
509 | 518 | |
|
510 | 519 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
511 | 520 | |
|
512 | 521 | |
|
513 | 522 | def hg_info(): |
|
514 | 523 | from rhodecode.lib.vcs.backends import hg |
|
515 | 524 | state = STATE_OK_DEFAULT |
|
516 | 525 | value = human_value = '' |
|
517 | 526 | try: |
|
518 | 527 | value = hg.discover_hg_version(raise_on_exc=True) |
|
519 | 528 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
520 | 529 | except Exception as e: |
|
521 | 530 | state = {'message': str(e), 'type': STATE_ERR} |
|
522 | 531 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
523 | 532 | |
|
524 | 533 | |
|
525 | 534 | def svn_info(): |
|
526 | 535 | from rhodecode.lib.vcs.backends import svn |
|
527 | 536 | state = STATE_OK_DEFAULT |
|
528 | 537 | value = human_value = '' |
|
529 | 538 | try: |
|
530 | 539 | value = svn.discover_svn_version(raise_on_exc=True) |
|
531 | 540 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
532 | 541 | except Exception as e: |
|
533 | 542 | state = {'message': str(e), 'type': STATE_ERR} |
|
534 | 543 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
535 | 544 | |
|
536 | 545 | |
|
537 | 546 | def vcs_backends(): |
|
538 | 547 | import rhodecode |
|
539 | 548 | value = rhodecode.CONFIG.get('vcs.backends') |
|
540 | 549 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) |
|
541 | 550 | return SysInfoRes(value=value, human_value=human_value) |
|
542 | 551 | |
|
543 | 552 | |
|
544 | 553 | def vcs_server(): |
|
545 | 554 | import rhodecode |
|
546 | 555 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
547 | 556 | |
|
548 | 557 | server_url = rhodecode.CONFIG.get('vcs.server') |
|
549 | 558 | enabled = rhodecode.CONFIG.get('vcs.server.enable') |
|
550 | 559 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' |
|
551 | 560 | state = STATE_OK_DEFAULT |
|
552 | 561 | version = None |
|
553 | 562 | workers = 0 |
|
554 | 563 | |
|
555 | 564 | try: |
|
556 | 565 | data = get_vcsserver_service_data() |
|
557 | 566 | if data and 'version' in data: |
|
558 | 567 | version = data['version'] |
|
559 | 568 | |
|
560 | 569 | if data and 'config' in data: |
|
561 | 570 | conf = data['config'] |
|
562 | 571 | workers = conf.get('workers', 'NOT AVAILABLE') |
|
563 | 572 | |
|
564 | 573 | connection = 'connected' |
|
565 | 574 | except Exception as e: |
|
566 | 575 | connection = 'failed' |
|
567 | 576 | state = {'message': str(e), 'type': STATE_ERR} |
|
568 | 577 | |
|
569 | 578 | value = dict( |
|
570 | 579 | url=server_url, |
|
571 | 580 | enabled=enabled, |
|
572 | 581 | protocol=protocol, |
|
573 | 582 | connection=connection, |
|
574 | 583 | version=version, |
|
575 | 584 | text='', |
|
576 | 585 | ) |
|
577 | 586 | |
|
578 | 587 | human_value = value.copy() |
|
579 | 588 | human_value['text'] = \ |
|
580 | 589 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( |
|
581 | 590 | url=server_url, ver=version, workers=workers, mode=protocol, |
|
582 | 591 | conn=connection) |
|
583 | 592 | |
|
584 | 593 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
585 | 594 | |
|
586 | 595 | |
|
587 | 596 | def rhodecode_app_info(): |
|
588 | 597 | import rhodecode |
|
589 | 598 | edition = rhodecode.CONFIG.get('rhodecode.edition') |
|
590 | 599 | |
|
591 | 600 | value = dict( |
|
592 | 601 | rhodecode_version=rhodecode.__version__, |
|
593 | 602 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), |
|
594 | 603 | text='' |
|
595 | 604 | ) |
|
596 | 605 | human_value = value.copy() |
|
597 | 606 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( |
|
598 | 607 | edition=edition, ver=value['rhodecode_version'] |
|
599 | 608 | ) |
|
600 | 609 | return SysInfoRes(value=value, human_value=human_value) |
|
601 | 610 | |
|
602 | 611 | |
|
603 | 612 | def rhodecode_config(): |
|
604 | 613 | import rhodecode |
|
605 | 614 | path = rhodecode.CONFIG.get('__file__') |
|
606 | 615 | rhodecode_ini_safe = rhodecode.CONFIG.copy() |
|
616 | cert_path = get_cert_path(path) | |
|
607 | 617 | |
|
608 | 618 | try: |
|
609 | 619 | config = configparser.ConfigParser() |
|
610 | 620 | config.read(path) |
|
611 | 621 | parsed_ini = config |
|
612 | 622 | if parsed_ini.has_section('server:main'): |
|
613 | 623 | parsed_ini = dict(parsed_ini.items('server:main')) |
|
614 | 624 | except Exception: |
|
615 | 625 | log.exception('Failed to read .ini file for display') |
|
616 | 626 | parsed_ini = {} |
|
617 | 627 | |
|
618 | cert_path = os.path.join( | |
|
619 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(path)))), | |
|
620 | '.rccontrol-profile/etc/ca-bundle.crt') | |
|
621 | ||
|
622 | 628 | rhodecode_ini_safe['server:main'] = parsed_ini |
|
623 | 629 | |
|
624 | 630 | blacklist = [ |
|
625 | 631 | 'rhodecode_license_key', |
|
626 | 632 | 'routes.map', |
|
627 | 633 | 'sqlalchemy.db1.url', |
|
628 | 634 | 'channelstream.secret', |
|
629 | 635 | 'beaker.session.secret', |
|
630 | 636 | 'rhodecode.encrypted_values.secret', |
|
631 | 637 | 'rhodecode_auth_github_consumer_key', |
|
632 | 638 | 'rhodecode_auth_github_consumer_secret', |
|
633 | 639 | 'rhodecode_auth_google_consumer_key', |
|
634 | 640 | 'rhodecode_auth_google_consumer_secret', |
|
635 | 641 | 'rhodecode_auth_bitbucket_consumer_secret', |
|
636 | 642 | 'rhodecode_auth_bitbucket_consumer_key', |
|
637 | 643 | 'rhodecode_auth_twitter_consumer_secret', |
|
638 | 644 | 'rhodecode_auth_twitter_consumer_key', |
|
639 | 645 | |
|
640 | 646 | 'rhodecode_auth_twitter_secret', |
|
641 | 647 | 'rhodecode_auth_github_secret', |
|
642 | 648 | 'rhodecode_auth_google_secret', |
|
643 | 649 | 'rhodecode_auth_bitbucket_secret', |
|
644 | 650 | |
|
645 | 651 | 'appenlight.api_key', |
|
646 | 652 | ('app_conf', 'sqlalchemy.db1.url') |
|
647 | 653 | ] |
|
648 | 654 | for k in blacklist: |
|
649 | 655 | if isinstance(k, tuple): |
|
650 | 656 | section, key = k |
|
651 | 657 | if section in rhodecode_ini_safe: |
|
652 | 658 | rhodecode_ini_safe[section] = '**OBFUSCATED**' |
|
653 | 659 | else: |
|
654 | 660 | rhodecode_ini_safe.pop(k, None) |
|
655 | 661 | |
|
656 | 662 | # TODO: maybe put some CONFIG checks here ? |
|
657 | 663 | return SysInfoRes(value={'config': rhodecode_ini_safe, |
|
658 | 664 | 'path': path, 'cert_path': cert_path}) |
|
659 | 665 | |
|
660 | 666 | |
|
661 | 667 | def database_info(): |
|
662 | 668 | import rhodecode |
|
663 | 669 | from sqlalchemy.engine import url as engine_url |
|
664 | 670 | from rhodecode.model.meta import Base as sql_base, Session |
|
665 | 671 | from rhodecode.model.db import DbMigrateVersion |
|
666 | 672 | |
|
667 | 673 | state = STATE_OK_DEFAULT |
|
668 | 674 | |
|
669 | 675 | db_migrate = DbMigrateVersion.query().filter( |
|
670 | 676 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() |
|
671 | 677 | |
|
672 | 678 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) |
|
673 | 679 | |
|
674 | 680 | try: |
|
675 | 681 | engine = sql_base.metadata.bind |
|
676 | 682 | db_server_info = engine.dialect._get_server_version_info( |
|
677 | 683 | Session.connection(bind=engine)) |
|
678 | 684 | db_version = '.'.join(map(str, db_server_info)) |
|
679 | 685 | except Exception: |
|
680 | 686 | log.exception('failed to fetch db version') |
|
681 | 687 | db_version = 'UNKNOWN' |
|
682 | 688 | |
|
683 | 689 | db_info = dict( |
|
684 | 690 | migrate_version=db_migrate.version, |
|
685 | 691 | type=db_url_obj.get_backend_name(), |
|
686 | 692 | version=db_version, |
|
687 | 693 | url=repr(db_url_obj) |
|
688 | 694 | ) |
|
689 | 695 | current_version = db_migrate.version |
|
690 | 696 | expected_version = rhodecode.__dbversion__ |
|
691 | 697 | if state['type'] == STATE_OK and current_version != expected_version: |
|
692 | 698 | msg = 'Critical: database schema mismatch, ' \ |
|
693 | 699 | 'expected version {}, got {}. ' \ |
|
694 | 700 | 'Please run migrations on your database.'.format( |
|
695 | 701 | expected_version, current_version) |
|
696 | 702 | state = {'message': msg, 'type': STATE_ERR} |
|
697 | 703 | |
|
698 | 704 | human_value = db_info.copy() |
|
699 | 705 | human_value['url'] = "{} @ migration version: {}".format( |
|
700 | 706 | db_info['url'], db_info['migrate_version']) |
|
701 | 707 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) |
|
702 | 708 | return SysInfoRes(value=db_info, state=state, human_value=human_value) |
|
703 | 709 | |
|
704 | 710 | |
|
705 | 711 | def server_info(environ): |
|
706 | 712 | import rhodecode |
|
707 | 713 | from rhodecode.lib.base import get_server_ip_addr, get_server_port |
|
708 | 714 | |
|
709 | 715 | value = { |
|
710 | 716 | 'server_ip': '%s:%s' % ( |
|
711 | 717 | get_server_ip_addr(environ, log_errors=False), |
|
712 | 718 | get_server_port(environ) |
|
713 | 719 | ), |
|
714 | 720 | 'server_id': rhodecode.CONFIG.get('instance_id'), |
|
715 | 721 | } |
|
716 | 722 | return SysInfoRes(value=value) |
|
717 | 723 | |
|
718 | 724 | |
|
719 | 725 | def usage_info(): |
|
720 | 726 | from rhodecode.model.db import User, Repository |
|
721 | 727 | value = { |
|
722 | 728 | 'users': User.query().count(), |
|
723 | 729 | 'users_active': User.query().filter(User.active == True).count(), |
|
724 | 730 | 'repositories': Repository.query().count(), |
|
725 | 731 | 'repository_types': { |
|
726 | 732 | 'hg': Repository.query().filter( |
|
727 | 733 | Repository.repo_type == 'hg').count(), |
|
728 | 734 | 'git': Repository.query().filter( |
|
729 | 735 | Repository.repo_type == 'git').count(), |
|
730 | 736 | 'svn': Repository.query().filter( |
|
731 | 737 | Repository.repo_type == 'svn').count(), |
|
732 | 738 | }, |
|
733 | 739 | } |
|
734 | 740 | return SysInfoRes(value=value) |
|
735 | 741 | |
|
736 | 742 | |
|
737 | 743 | def get_system_info(environ): |
|
738 | 744 | environ = environ or {} |
|
739 | 745 | return { |
|
740 | 746 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), |
|
741 | 747 | 'rhodecode_config': SysInfo(rhodecode_config)(), |
|
742 | 748 | 'rhodecode_usage': SysInfo(usage_info)(), |
|
743 | 749 | 'python': SysInfo(python_info)(), |
|
744 | 750 | 'py_modules': SysInfo(py_modules)(), |
|
745 | 751 | |
|
746 | 752 | 'platform': SysInfo(platform_type)(), |
|
747 | 753 | 'locale': SysInfo(locale_info)(), |
|
748 | 754 | 'server': SysInfo(server_info, environ=environ)(), |
|
749 | 755 | 'database': SysInfo(database_info)(), |
|
750 | 756 | 'ulimit': SysInfo(ulimit_info)(), |
|
751 | 757 | 'storage': SysInfo(storage)(), |
|
752 | 758 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
753 | 759 | 'storage_archive': SysInfo(storage_archives)(), |
|
754 | 760 | 'storage_gist': SysInfo(storage_gist)(), |
|
755 | 761 | 'storage_temp': SysInfo(storage_temp)(), |
|
756 | 762 | |
|
757 | 763 | 'search': SysInfo(search_info)(), |
|
758 | 764 | |
|
759 | 765 | 'uptime': SysInfo(uptime)(), |
|
760 | 766 | 'load': SysInfo(machine_load)(), |
|
761 | 767 | 'cpu': SysInfo(cpu)(), |
|
762 | 768 | 'memory': SysInfo(memory)(), |
|
763 | 769 | |
|
764 | 770 | 'vcs_backends': SysInfo(vcs_backends)(), |
|
765 | 771 | 'vcs_server': SysInfo(vcs_server)(), |
|
766 | 772 | |
|
767 | 773 | 'git': SysInfo(git_info)(), |
|
768 | 774 | 'hg': SysInfo(hg_info)(), |
|
769 | 775 | 'svn': SysInfo(svn_info)(), |
|
770 | 776 | } |
@@ -1,305 +1,311 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Client for the VCSServer implemented based on HTTP. |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import copy |
|
26 | 26 | import logging |
|
27 | 27 | import threading |
|
28 | 28 | import urllib2 |
|
29 | 29 | import urlparse |
|
30 | 30 | import uuid |
|
31 | 31 | import traceback |
|
32 | 32 | |
|
33 | 33 | import pycurl |
|
34 | 34 | import msgpack |
|
35 | 35 | import requests |
|
36 | 36 | from requests.packages.urllib3.util.retry import Retry |
|
37 | 37 | |
|
38 | from . import exceptions, CurlSession | |
|
38 | import rhodecode | |
|
39 | from rhodecode.lib.system_info import get_cert_path | |
|
40 | from rhodecode.lib.vcs import exceptions, CurlSession | |
|
39 | 41 | |
|
40 | 42 | |
|
41 | 43 | log = logging.getLogger(__name__) |
|
42 | 44 | |
|
43 | 45 | |
|
44 | 46 | # TODO: mikhail: Keep it in sync with vcsserver's |
|
45 | 47 | # HTTPApplication.ALLOWED_EXCEPTIONS |
|
46 | 48 | EXCEPTIONS_MAP = { |
|
47 | 49 | 'KeyError': KeyError, |
|
48 | 50 | 'URLError': urllib2.URLError, |
|
49 | 51 | } |
|
50 | 52 | |
|
51 | 53 | |
|
52 | 54 | class RepoMaker(object): |
|
53 | 55 | |
|
54 | 56 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): |
|
55 | 57 | self.url = urlparse.urljoin( |
|
56 | 58 | 'http://%s' % server_and_port, backend_endpoint) |
|
57 | 59 | self._session_factory = session_factory |
|
58 | 60 | self.backend_type = backend_type |
|
59 | 61 | |
|
60 | 62 | def __call__(self, path, config, with_wire=None): |
|
61 | 63 | log.debug('RepoMaker call on %s', path) |
|
62 | 64 | return RemoteRepo( |
|
63 | 65 | path, config, self.url, self._session_factory(), |
|
64 | 66 | with_wire=with_wire) |
|
65 | 67 | |
|
66 | 68 | def __getattr__(self, name): |
|
67 | 69 | def f(*args, **kwargs): |
|
68 | 70 | return self._call(name, *args, **kwargs) |
|
69 | 71 | return f |
|
70 | 72 | |
|
71 | 73 | @exceptions.map_vcs_exceptions |
|
72 | 74 | def _call(self, name, *args, **kwargs): |
|
73 | 75 | payload = { |
|
74 | 76 | 'id': str(uuid.uuid4()), |
|
75 | 77 | 'method': name, |
|
76 | 78 | 'backend': self.backend_type, |
|
77 | 79 | 'params': {'args': args, 'kwargs': kwargs} |
|
78 | 80 | } |
|
79 | 81 | return _remote_call( |
|
80 | 82 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
81 | 83 | |
|
82 | 84 | |
|
83 | 85 | class ServiceConnection(object): |
|
84 | 86 | def __init__(self, server_and_port, backend_endpoint, session_factory): |
|
85 | 87 | self.url = urlparse.urljoin( |
|
86 | 88 | 'http://%s' % server_and_port, backend_endpoint) |
|
87 | 89 | self._session_factory = session_factory |
|
88 | 90 | |
|
89 | 91 | def __getattr__(self, name): |
|
90 | 92 | def f(*args, **kwargs): |
|
91 | 93 | return self._call(name, *args, **kwargs) |
|
92 | 94 | |
|
93 | 95 | return f |
|
94 | 96 | |
|
95 | 97 | @exceptions.map_vcs_exceptions |
|
96 | 98 | def _call(self, name, *args, **kwargs): |
|
97 | 99 | payload = { |
|
98 | 100 | 'id': str(uuid.uuid4()), |
|
99 | 101 | 'method': name, |
|
100 | 102 | 'params': {'args': args, 'kwargs': kwargs} |
|
101 | 103 | } |
|
102 | 104 | return _remote_call( |
|
103 | 105 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
104 | 106 | |
|
105 | 107 | |
|
106 | 108 | class RemoteRepo(object): |
|
107 | 109 | |
|
108 | 110 | def __init__(self, path, config, url, session, with_wire=None): |
|
109 | 111 | self.url = url |
|
110 | 112 | self._session = session |
|
111 | 113 | self._wire = { |
|
112 | 114 | "path": path, |
|
113 | 115 | "config": config, |
|
114 | 116 | "context": self._create_vcs_cache_context(), |
|
115 | 117 | } |
|
116 | 118 | if with_wire: |
|
117 | 119 | self._wire.update(with_wire) |
|
118 | 120 | |
|
119 | 121 | # johbo: Trading complexity for performance. Avoiding the call to |
|
120 | 122 | # log.debug brings a few percent gain even if is is not active. |
|
121 | 123 | if log.isEnabledFor(logging.DEBUG): |
|
122 | 124 | self._call = self._call_with_logging |
|
123 | 125 | |
|
126 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) | |
|
127 | ||
|
124 | 128 | def __getattr__(self, name): |
|
125 | 129 | def f(*args, **kwargs): |
|
126 | 130 | return self._call(name, *args, **kwargs) |
|
127 | 131 | return f |
|
128 | 132 | |
|
129 | 133 | @exceptions.map_vcs_exceptions |
|
130 | 134 | def _call(self, name, *args, **kwargs): |
|
131 | 135 | # TODO: oliver: This is currently necessary pre-call since the |
|
132 | 136 | # config object is being changed for hooking scenarios |
|
133 | 137 | wire = copy.deepcopy(self._wire) |
|
134 | 138 | wire["config"] = wire["config"].serialize() |
|
139 | ||
|
140 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) | |
|
135 | 141 | payload = { |
|
136 | 142 | 'id': str(uuid.uuid4()), |
|
137 | 143 | 'method': name, |
|
138 | 144 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} |
|
139 | 145 | } |
|
140 | 146 | return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session) |
|
141 | 147 | |
|
142 | 148 | def _call_with_logging(self, name, *args, **kwargs): |
|
143 | 149 | context_uid = self._wire.get('context') |
|
144 | 150 | log.debug('Calling %s@%s with args:%r. wire_context: %s', |
|
145 | 151 | self.url, name, args, context_uid) |
|
146 | 152 | return RemoteRepo._call(self, name, *args, **kwargs) |
|
147 | 153 | |
|
148 | 154 | def __getitem__(self, key): |
|
149 | 155 | return self.revision(key) |
|
150 | 156 | |
|
151 | 157 | def _create_vcs_cache_context(self): |
|
152 | 158 | """ |
|
153 | 159 | Creates a unique string which is passed to the VCSServer on every |
|
154 | 160 | remote call. It is used as cache key in the VCSServer. |
|
155 | 161 | """ |
|
156 | 162 | return str(uuid.uuid4()) |
|
157 | 163 | |
|
158 | 164 | def invalidate_vcs_cache(self): |
|
159 | 165 | """ |
|
160 | 166 | This invalidates the context which is sent to the VCSServer on every |
|
161 | 167 | call to a remote method. It forces the VCSServer to create a fresh |
|
162 | 168 | repository instance on the next call to a remote method. |
|
163 | 169 | """ |
|
164 | 170 | self._wire['context'] = self._create_vcs_cache_context() |
|
165 | 171 | |
|
166 | 172 | |
|
167 | 173 | class RemoteObject(object): |
|
168 | 174 | |
|
169 | 175 | def __init__(self, url, session): |
|
170 | 176 | self._url = url |
|
171 | 177 | self._session = session |
|
172 | 178 | |
|
173 | 179 | # johbo: Trading complexity for performance. Avoiding the call to |
|
174 | 180 | # log.debug brings a few percent gain even if is is not active. |
|
175 | 181 | if log.isEnabledFor(logging.DEBUG): |
|
176 | 182 | self._call = self._call_with_logging |
|
177 | 183 | |
|
178 | 184 | def __getattr__(self, name): |
|
179 | 185 | def f(*args, **kwargs): |
|
180 | 186 | return self._call(name, *args, **kwargs) |
|
181 | 187 | return f |
|
182 | 188 | |
|
183 | 189 | @exceptions.map_vcs_exceptions |
|
184 | 190 | def _call(self, name, *args, **kwargs): |
|
185 | 191 | payload = { |
|
186 | 192 | 'id': str(uuid.uuid4()), |
|
187 | 193 | 'method': name, |
|
188 | 194 | 'params': {'args': args, 'kwargs': kwargs} |
|
189 | 195 | } |
|
190 | 196 | return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session) |
|
191 | 197 | |
|
192 | 198 | def _call_with_logging(self, name, *args, **kwargs): |
|
193 | 199 | log.debug('Calling %s@%s', self._url, name) |
|
194 | 200 | return RemoteObject._call(self, name, *args, **kwargs) |
|
195 | 201 | |
|
196 | 202 | |
|
197 | 203 | def _remote_call(url, payload, exceptions_map, session): |
|
198 | 204 | try: |
|
199 | 205 | response = session.post(url, data=msgpack.packb(payload)) |
|
200 | 206 | except pycurl.error as e: |
|
201 | 207 | msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc()) |
|
202 | 208 | raise exceptions.HttpVCSCommunicationError(msg) |
|
203 | 209 | except Exception as e: |
|
204 | 210 | message = getattr(e, 'message', '') |
|
205 | 211 | if 'Failed to connect' in message: |
|
206 | 212 | # gevent doesn't return proper pycurl errors |
|
207 | 213 | raise exceptions.HttpVCSCommunicationError(e) |
|
208 | 214 | else: |
|
209 | 215 | raise |
|
210 | 216 | |
|
211 | 217 | if response.status_code >= 400: |
|
212 | 218 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
213 | 219 | url, response.status_code) |
|
214 | 220 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
215 | 221 | |
|
216 | 222 | try: |
|
217 | 223 | response = msgpack.unpackb(response.content) |
|
218 | 224 | except Exception: |
|
219 | 225 | log.exception('Failed to decode response %r', response.content) |
|
220 | 226 | raise |
|
221 | 227 | |
|
222 | 228 | error = response.get('error') |
|
223 | 229 | if error: |
|
224 | 230 | type_ = error.get('type', 'Exception') |
|
225 | 231 | exc = exceptions_map.get(type_, Exception) |
|
226 | 232 | exc = exc(error.get('message')) |
|
227 | 233 | try: |
|
228 | 234 | exc._vcs_kind = error['_vcs_kind'] |
|
229 | 235 | except KeyError: |
|
230 | 236 | pass |
|
231 | 237 | |
|
232 | 238 | try: |
|
233 | 239 | exc._vcs_server_traceback = error['traceback'] |
|
234 | 240 | except KeyError: |
|
235 | 241 | pass |
|
236 | 242 | |
|
237 | 243 | raise exc |
|
238 | 244 | return response.get('result') |
|
239 | 245 | |
|
240 | 246 | |
|
241 | 247 | class VcsHttpProxy(object): |
|
242 | 248 | |
|
243 | 249 | CHUNK_SIZE = 16384 |
|
244 | 250 | |
|
245 | 251 | def __init__(self, server_and_port, backend_endpoint): |
|
246 | 252 | |
|
247 | 253 | |
|
248 | 254 | retries = Retry(total=5, connect=None, read=None, redirect=None) |
|
249 | 255 | |
|
250 | 256 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) |
|
251 | 257 | self.base_url = urlparse.urljoin( |
|
252 | 258 | 'http://%s' % server_and_port, backend_endpoint) |
|
253 | 259 | self.session = requests.Session() |
|
254 | 260 | self.session.mount('http://', adapter) |
|
255 | 261 | |
|
256 | 262 | def handle(self, environment, input_data, *args, **kwargs): |
|
257 | 263 | data = { |
|
258 | 264 | 'environment': environment, |
|
259 | 265 | 'input_data': input_data, |
|
260 | 266 | 'args': args, |
|
261 | 267 | 'kwargs': kwargs |
|
262 | 268 | } |
|
263 | 269 | result = self.session.post( |
|
264 | 270 | self.base_url, msgpack.packb(data), stream=True) |
|
265 | 271 | return self._get_result(result) |
|
266 | 272 | |
|
267 | 273 | def _deserialize_and_raise(self, error): |
|
268 | 274 | exception = Exception(error['message']) |
|
269 | 275 | try: |
|
270 | 276 | exception._vcs_kind = error['_vcs_kind'] |
|
271 | 277 | except KeyError: |
|
272 | 278 | pass |
|
273 | 279 | raise exception |
|
274 | 280 | |
|
275 | 281 | def _iterate(self, result): |
|
276 | 282 | unpacker = msgpack.Unpacker() |
|
277 | 283 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): |
|
278 | 284 | unpacker.feed(line) |
|
279 | 285 | for chunk in unpacker: |
|
280 | 286 | yield chunk |
|
281 | 287 | |
|
282 | 288 | def _get_result(self, result): |
|
283 | 289 | iterator = self._iterate(result) |
|
284 | 290 | error = iterator.next() |
|
285 | 291 | if error: |
|
286 | 292 | self._deserialize_and_raise(error) |
|
287 | 293 | |
|
288 | 294 | status = iterator.next() |
|
289 | 295 | headers = iterator.next() |
|
290 | 296 | |
|
291 | 297 | return iterator, status, headers |
|
292 | 298 | |
|
293 | 299 | |
|
294 | 300 | class ThreadlocalSessionFactory(object): |
|
295 | 301 | """ |
|
296 | 302 | Creates one CurlSession per thread on demand. |
|
297 | 303 | """ |
|
298 | 304 | |
|
299 | 305 | def __init__(self): |
|
300 | 306 | self._thread_local = threading.local() |
|
301 | 307 | |
|
302 | 308 | def __call__(self): |
|
303 | 309 | if not hasattr(self._thread_local, 'curl_session'): |
|
304 | 310 | self._thread_local.curl_session = CurlSession() |
|
305 | 311 | return self._thread_local.curl_session |
@@ -1,336 +1,355 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
16 | 16 | pyroutes.register('robots', '/robots.txt', []); |
|
17 | 17 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
18 | 18 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
19 | 19 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
20 | 20 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
21 | 21 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
22 | 22 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
23 | 23 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
24 | 24 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
25 | 25 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
26 | 26 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
27 | 27 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
28 | 28 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
29 | 29 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
30 | 30 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
31 | 31 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
32 | 32 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
33 | 33 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
34 | 34 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
35 | 35 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
36 | 36 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
37 | 37 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
38 | 38 | pyroutes.register('admin_home', '/_admin', []); |
|
39 | 39 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
40 | 40 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
41 | 41 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
42 | 42 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
43 | 43 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
44 | 44 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
45 | 45 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
46 | 46 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
47 | 47 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
48 | 48 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
49 | 49 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); |
|
50 | 50 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
51 | 51 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
52 | 52 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
53 | 53 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
54 | 54 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
55 | 55 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
56 | 56 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
57 | 57 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
58 | 58 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
59 | 59 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
60 | 60 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
61 | 61 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
62 | 62 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
63 | 63 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
64 | 64 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
65 | 65 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
66 | 66 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
67 | 67 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
68 | 68 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
69 | 69 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
70 | 70 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
71 | 71 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
72 | 72 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
73 | 73 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
74 | 74 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
75 | 75 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
76 | 76 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
77 | 77 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
78 | 78 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
79 | 79 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
80 | 80 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
81 | 81 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
82 | 82 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
83 | pyroutes.register('admin_settings_automation', '/_admin/_admin/settings/automation', []); | |
|
84 | 83 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
85 | 84 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
86 | 85 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
87 | 86 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
88 | 87 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
89 | 88 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
90 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); | |
|
91 | 89 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
92 | 90 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
93 | 91 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
94 | 92 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
95 | 93 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
96 | 94 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
97 | 95 | pyroutes.register('users', '/_admin/users', []); |
|
98 | 96 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
99 | 97 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
100 | 98 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
101 | 99 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
102 | 100 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
103 | 101 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
104 | 102 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
105 | 103 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
106 | 104 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
107 | 105 | pyroutes.register('user_force_password_reset', '/_admin/users/%(user_id)s/password_reset', ['user_id']); |
|
108 | 106 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
109 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); | |
|
110 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); | |
|
111 | 107 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
112 | 108 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
113 | 109 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
114 | 110 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
115 | 111 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
116 | 112 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
117 | 113 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
118 | 114 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
119 | 115 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
120 | 116 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
121 | 117 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
122 | 118 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
123 | 119 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
124 | 120 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
125 | 121 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
126 | 122 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
127 | 123 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
128 | 124 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
129 | 125 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
130 | 126 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
131 | 127 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
132 | 128 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
133 | 129 | pyroutes.register('repos', '/_admin/repos', []); |
|
134 | 130 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
135 | 131 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
136 | 132 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
137 | 133 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
138 | 134 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
139 | 135 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
140 | 136 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
141 | 137 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
142 | pyroutes.register('login', '/_admin/login', []); | |
|
143 | 138 | pyroutes.register('logout', '/_admin/logout', []); |
|
144 | pyroutes.register('register', '/_admin/register', []); | |
|
145 | 139 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
146 | 140 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
147 | 141 | pyroutes.register('home', '/', []); |
|
148 | 142 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
149 | 143 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
150 | 144 | pyroutes.register('repo_list_data', '/_repos', []); |
|
151 | 145 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
152 | 146 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
153 | 147 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
154 | 148 | pyroutes.register('journal', '/_admin/journal', []); |
|
155 | 149 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
156 | 150 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
157 | 151 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
158 | 152 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
159 | 153 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
160 | 154 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
161 | 155 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
162 | 156 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
163 | 157 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
164 | 158 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
165 | 159 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
166 | 160 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
167 | 161 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
168 | 162 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
169 | 163 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
170 | 164 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
171 | 165 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
172 | 166 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
173 | 167 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
174 | 168 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
175 | 169 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
176 | 170 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
177 | 171 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
178 | 172 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
179 | 173 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
180 | 174 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
181 | 175 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
182 | 176 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
183 | 177 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
184 | 178 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
185 | 179 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
186 | 180 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
187 | 181 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
188 | 182 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
189 | 183 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
190 | 184 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
191 | 185 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
192 | 186 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
193 | 187 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
194 | 188 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
195 | 189 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | 190 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
197 | 191 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
198 | 192 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
199 | 193 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
200 | 194 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
201 | 195 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
202 | 196 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
203 | 197 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
204 | 198 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
205 | 199 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
206 | 200 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); |
|
207 | 201 | pyroutes.register('repo_changelog_elements_file', '/%(repo_name)s/changelog_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
208 | 202 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
209 | 203 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
210 | 204 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
211 | 205 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
212 | 206 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
213 | 207 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
214 | 208 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
215 | 209 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
216 | 210 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
217 | 211 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
218 | 212 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
219 | 213 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
220 | 214 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
221 | 215 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
222 | 216 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
223 | 217 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
224 | 218 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
225 | 219 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
226 | 220 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
227 | 221 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
228 | 222 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
229 | 223 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
230 | 224 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
231 | 225 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
232 | 226 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
233 | 227 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
234 | 228 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
235 | 229 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
236 | 230 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
237 | 231 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
238 | 232 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
239 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); | |
|
240 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
|
241 | 233 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
242 | 234 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
243 | 235 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
244 | 236 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
245 | 237 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
246 | 238 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
247 | 239 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
248 | 240 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
249 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); | |
|
250 | 241 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
251 | 242 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
252 | 243 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
253 | 244 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
254 | 245 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
255 | 246 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
256 | 247 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
257 | 248 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
258 | 249 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
259 | 250 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
260 | 251 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
261 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); | |
|
262 | 252 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
263 | 253 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
264 | 254 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
265 | 255 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
266 | 256 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
267 | 257 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
268 | 258 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
269 | 259 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
270 | 260 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
271 | 261 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
272 | 262 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
273 | 263 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
274 | 264 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
275 | 265 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
276 | 266 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
277 | 267 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
278 | 268 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
279 | 269 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
280 | 270 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
281 | 271 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
282 | 272 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
283 | 273 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
284 | 274 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
285 | 275 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
286 | 276 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
287 | 277 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
288 | 278 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
289 | 279 | pyroutes.register('search', '/_admin/search', []); |
|
290 | 280 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); |
|
291 | 281 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
292 | 282 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
293 | 283 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
294 | 284 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
295 | 285 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
296 | 286 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
297 | 287 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
298 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); | |
|
299 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); | |
|
300 | 288 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
301 | 289 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
302 | 290 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
303 | 291 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
304 | 292 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
305 | 293 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
306 | 294 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
307 | 295 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
308 | 296 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
309 | 297 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
310 | 298 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
311 | 299 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
312 | 300 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
313 | 301 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
314 | 302 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
315 | 303 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
316 | 304 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
317 | 305 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
318 | 306 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
319 | 307 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
320 | 308 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
321 | 309 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
322 | 310 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
323 | 311 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
324 | 312 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
325 | 313 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
326 | 314 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
327 | 315 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
328 | 316 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
329 | 317 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
330 | 318 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
331 | 319 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
332 | 320 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
333 | 321 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
334 | 322 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
335 | 323 | pyroutes.register('apiv2', '/_admin/api', []); |
|
324 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); | |
|
325 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); | |
|
326 | pyroutes.register('login', '/_admin/login', []); | |
|
327 | pyroutes.register('register', '/_admin/register', []); | |
|
328 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); | |
|
329 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); | |
|
330 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
|
331 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); | |
|
332 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); | |
|
333 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); | |
|
334 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); | |
|
335 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); | |
|
336 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); | |
|
337 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); | |
|
338 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); | |
|
339 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); | |
|
340 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); | |
|
341 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); | |
|
342 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); | |
|
343 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); | |
|
344 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); | |
|
345 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); | |
|
346 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); | |
|
347 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); | |
|
348 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); | |
|
349 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); | |
|
350 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); | |
|
351 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); | |
|
352 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); | |
|
353 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); | |
|
354 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
|
336 | 355 | } |
General Comments 0
You need to be logged in to leave comments.
Login now