##// END OF EJS Templates
system-info: expose inode limits and usage. Fixes #4282
marcink -
r1027:f516f8a4 default
parent child Browse files
Show More
@@ -1,65 +1,67 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.model.scm import ScmModel
25 25 from rhodecode.api.tests.utils import build_data, api_call, assert_ok
26 26
27 27
28 28 @pytest.fixture
29 29 def http_host_stub():
30 30 """
31 31 To ensure that we can get an IP address, this test shall run with a
32 32 hostname set to "localhost".
33 33 """
34 34 return 'localhost:80'
35 35
36 36
37 37 @pytest.mark.usefixtures("testuser_api", "app")
38 38 class TestGetServerInfo(object):
39 39 def test_api_get_server_info(self):
40 40 id_, params = build_data(self.apikey, 'get_server_info')
41 41 response = api_call(self.app, params)
42 42 resp = response.json
43 43 expected = ScmModel().get_server_info()
44 44 expected['memory'] = resp['result']['memory']
45 45 expected['uptime'] = resp['result']['uptime']
46 46 expected['load'] = resp['result']['load']
47 47 expected['cpu'] = resp['result']['cpu']
48 48 expected['disk'] = resp['result']['disk']
49 expected['disk_inodes'] = resp['result']['disk_inodes']
49 50 expected['server_ip'] = '127.0.0.1:80'
50 51
51 52 assert_ok(id_, expected, given=response.body)
52 53
53 54 def test_api_get_server_info_ip(self):
54 55 id_, params = build_data(self.apikey, 'get_server_info')
55 56 response = api_call(self.app, params)
56 57 resp = response.json
57 58 expected = ScmModel().get_server_info({'SERVER_NAME': 'unknown'})
58 59 expected['memory'] = resp['result']['memory']
59 60 expected['uptime'] = resp['result']['uptime']
60 61 expected['load'] = resp['result']['load']
61 62 expected['cpu'] = resp['result']['cpu']
62 63 expected['disk'] = resp['result']['disk']
64 expected['disk_inodes'] = resp['result']['disk_inodes']
63 65 expected['server_ip'] = '127.0.0.1:80'
64 66
65 67 assert_ok(id_, expected, given=response.body)
@@ -1,1102 +1,1120 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import time
29 29 import traceback
30 30 import logging
31 31 import cStringIO
32 32 import pkg_resources
33 33
34 34 import pylons
35 35 from pylons.i18n.translation import _
36 36 from sqlalchemy import func
37 37 from zope.cachedescriptors.property import Lazy as LazyProperty
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.vcs import get_backend
41 41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 42 from rhodecode.lib.vcs.nodes import FileNode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib import helpers as h
45 45
46 46 from rhodecode.lib.auth import (
47 47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 48 HasUserGroupPermissionAny)
49 49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 50 from rhodecode.lib import hooks_utils, caches
51 51 from rhodecode.lib.utils import (
52 52 get_filesystem_repos, action_logger, make_db_config)
53 53 from rhodecode.lib.utils2 import (
54 54 safe_str, safe_unicode, get_server_url, md5)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.db import (
57 57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 58 PullRequest, DbMigrateVersion)
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class UserTemp(object):
65 65 def __init__(self, user_id):
66 66 self.user_id = user_id
67 67
68 68 def __repr__(self):
69 69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70 70
71 71
72 72 class RepoTemp(object):
73 73 def __init__(self, repo_id):
74 74 self.repo_id = repo_id
75 75
76 76 def __repr__(self):
77 77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78 78
79 79
80 80 class SimpleCachedRepoList(object):
81 81 """
82 82 Lighter version of of iteration of repos without the scm initialisation,
83 83 and with cache usage
84 84 """
85 85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 86 self.db_repo_list = db_repo_list
87 87 self.repos_path = repos_path
88 88 self.order_by = order_by
89 89 self.reversed = (order_by or '').startswith('-')
90 90 if not perm_set:
91 91 perm_set = ['repository.read', 'repository.write',
92 92 'repository.admin']
93 93 self.perm_set = perm_set
94 94
95 95 def __len__(self):
96 96 return len(self.db_repo_list)
97 97
98 98 def __repr__(self):
99 99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100 100
101 101 def __iter__(self):
102 102 for dbr in self.db_repo_list:
103 103 # check permission at this level
104 104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 105 dbr.repo_name, 'SimpleCachedRepoList check')
106 106 if not has_perm:
107 107 continue
108 108
109 109 tmp_d = {
110 110 'name': dbr.repo_name,
111 111 'dbrepo': dbr.get_dict(),
112 112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 113 }
114 114 yield tmp_d
115 115
116 116
117 117 class _PermCheckIterator(object):
118 118
119 119 def __init__(
120 120 self, obj_list, obj_attr, perm_set, perm_checker,
121 121 extra_kwargs=None):
122 122 """
123 123 Creates iterator from given list of objects, additionally
124 124 checking permission for them from perm_set var
125 125
126 126 :param obj_list: list of db objects
127 127 :param obj_attr: attribute of object to pass into perm_checker
128 128 :param perm_set: list of permissions to check
129 129 :param perm_checker: callable to check permissions against
130 130 """
131 131 self.obj_list = obj_list
132 132 self.obj_attr = obj_attr
133 133 self.perm_set = perm_set
134 134 self.perm_checker = perm_checker
135 135 self.extra_kwargs = extra_kwargs or {}
136 136
137 137 def __len__(self):
138 138 return len(self.obj_list)
139 139
140 140 def __repr__(self):
141 141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142 142
143 143 def __iter__(self):
144 144 checker = self.perm_checker(*self.perm_set)
145 145 for db_obj in self.obj_list:
146 146 # check permission at this level
147 147 name = getattr(db_obj, self.obj_attr, None)
148 148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 149 continue
150 150
151 151 yield db_obj
152 152
153 153
154 154 class RepoList(_PermCheckIterator):
155 155
156 156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 157 if not perm_set:
158 158 perm_set = [
159 159 'repository.read', 'repository.write', 'repository.admin']
160 160
161 161 super(RepoList, self).__init__(
162 162 obj_list=db_repo_list,
163 163 obj_attr='repo_name', perm_set=perm_set,
164 164 perm_checker=HasRepoPermissionAny,
165 165 extra_kwargs=extra_kwargs)
166 166
167 167
168 168 class RepoGroupList(_PermCheckIterator):
169 169
170 170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 171 if not perm_set:
172 172 perm_set = ['group.read', 'group.write', 'group.admin']
173 173
174 174 super(RepoGroupList, self).__init__(
175 175 obj_list=db_repo_group_list,
176 176 obj_attr='group_name', perm_set=perm_set,
177 177 perm_checker=HasRepoGroupPermissionAny,
178 178 extra_kwargs=extra_kwargs)
179 179
180 180
181 181 class UserGroupList(_PermCheckIterator):
182 182
183 183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 184 if not perm_set:
185 185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186 186
187 187 super(UserGroupList, self).__init__(
188 188 obj_list=db_user_group_list,
189 189 obj_attr='users_group_name', perm_set=perm_set,
190 190 perm_checker=HasUserGroupPermissionAny,
191 191 extra_kwargs=extra_kwargs)
192 192
193 193
194 194 class ScmModel(BaseModel):
195 195 """
196 196 Generic Scm Model
197 197 """
198 198
199 199 @LazyProperty
200 200 def repos_path(self):
201 201 """
202 202 Gets the repositories root path from database
203 203 """
204 204
205 205 settings_model = VcsSettingsModel(sa=self.sa)
206 206 return settings_model.get_repos_location()
207 207
208 208 def repo_scan(self, repos_path=None):
209 209 """
210 210 Listing of repositories in given path. This path should not be a
211 211 repository itself. Return a dictionary of repository objects
212 212
213 213 :param repos_path: path to directory containing repositories
214 214 """
215 215
216 216 if repos_path is None:
217 217 repos_path = self.repos_path
218 218
219 219 log.info('scanning for repositories in %s', repos_path)
220 220
221 221 config = make_db_config()
222 222 config.set('extensions', 'largefiles', '')
223 223 repos = {}
224 224
225 225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 226 # name need to be decomposed and put back together using the /
227 227 # since this is internal storage separator for rhodecode
228 228 name = Repository.normalize_repo_name(name)
229 229
230 230 try:
231 231 if name in repos:
232 232 raise RepositoryError('Duplicate repository name %s '
233 233 'found in %s' % (name, path))
234 234 elif path[0] in rhodecode.BACKENDS:
235 235 klass = get_backend(path[0])
236 236 repos[name] = klass(path[1], config=config)
237 237 except OSError:
238 238 continue
239 239 log.debug('found %s paths with repositories', len(repos))
240 240 return repos
241 241
242 242 def get_repos(self, all_repos=None, sort_key=None):
243 243 """
244 244 Get all repositories from db and for each repo create it's
245 245 backend instance and fill that backed with information from database
246 246
247 247 :param all_repos: list of repository names as strings
248 248 give specific repositories list, good for filtering
249 249
250 250 :param sort_key: initial sorting of repositories
251 251 """
252 252 if all_repos is None:
253 253 all_repos = self.sa.query(Repository)\
254 254 .filter(Repository.group_id == None)\
255 255 .order_by(func.lower(Repository.repo_name)).all()
256 256 repo_iter = SimpleCachedRepoList(
257 257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 258 return repo_iter
259 259
260 260 def get_repo_groups(self, all_groups=None):
261 261 if all_groups is None:
262 262 all_groups = RepoGroup.query()\
263 263 .filter(RepoGroup.group_parent_id == None).all()
264 264 return [x for x in RepoGroupList(all_groups)]
265 265
266 266 def mark_for_invalidation(self, repo_name, delete=False):
267 267 """
268 268 Mark caches of this repo invalid in the database. `delete` flag
269 269 removes the cache entries
270 270
271 271 :param repo_name: the repo_name for which caches should be marked
272 272 invalid, or deleted
273 273 :param delete: delete the entry keys instead of setting bool
274 274 flag on them
275 275 """
276 276 CacheKey.set_invalidate(repo_name, delete=delete)
277 277 repo = Repository.get_by_repo_name(repo_name)
278 278
279 279 if repo:
280 280 config = repo._config
281 281 config.set('extensions', 'largefiles', '')
282 282 repo.update_commit_cache(config=config, cs_cache=None)
283 283 caches.clear_repo_caches(repo_name)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 action_logger(UserTemp(user_id),
295 295 'stopped_following_repo',
296 296 RepoTemp(follow_repo_id))
297 297 return
298 298 except Exception:
299 299 log.error(traceback.format_exc())
300 300 raise
301 301
302 302 try:
303 303 f = UserFollowing()
304 304 f.user_id = user_id
305 305 f.follows_repo_id = follow_repo_id
306 306 self.sa.add(f)
307 307
308 308 action_logger(UserTemp(user_id),
309 309 'started_following_repo',
310 310 RepoTemp(follow_repo_id))
311 311 except Exception:
312 312 log.error(traceback.format_exc())
313 313 raise
314 314
315 315 def toggle_following_user(self, follow_user_id, user_id):
316 316 f = self.sa.query(UserFollowing)\
317 317 .filter(UserFollowing.follows_user_id == follow_user_id)\
318 318 .filter(UserFollowing.user_id == user_id).scalar()
319 319
320 320 if f is not None:
321 321 try:
322 322 self.sa.delete(f)
323 323 return
324 324 except Exception:
325 325 log.error(traceback.format_exc())
326 326 raise
327 327
328 328 try:
329 329 f = UserFollowing()
330 330 f.user_id = user_id
331 331 f.follows_user_id = follow_user_id
332 332 self.sa.add(f)
333 333 except Exception:
334 334 log.error(traceback.format_exc())
335 335 raise
336 336
337 337 def is_following_repo(self, repo_name, user_id, cache=False):
338 338 r = self.sa.query(Repository)\
339 339 .filter(Repository.repo_name == repo_name).scalar()
340 340
341 341 f = self.sa.query(UserFollowing)\
342 342 .filter(UserFollowing.follows_repository == r)\
343 343 .filter(UserFollowing.user_id == user_id).scalar()
344 344
345 345 return f is not None
346 346
347 347 def is_following_user(self, username, user_id, cache=False):
348 348 u = User.get_by_username(username)
349 349
350 350 f = self.sa.query(UserFollowing)\
351 351 .filter(UserFollowing.follows_user == u)\
352 352 .filter(UserFollowing.user_id == user_id).scalar()
353 353
354 354 return f is not None
355 355
356 356 def get_followers(self, repo):
357 357 repo = self._get_repo(repo)
358 358
359 359 return self.sa.query(UserFollowing)\
360 360 .filter(UserFollowing.follows_repository == repo).count()
361 361
362 362 def get_forks(self, repo):
363 363 repo = self._get_repo(repo)
364 364 return self.sa.query(Repository)\
365 365 .filter(Repository.fork == repo).count()
366 366
367 367 def get_pull_requests(self, repo):
368 368 repo = self._get_repo(repo)
369 369 return self.sa.query(PullRequest)\
370 370 .filter(PullRequest.target_repo == repo)\
371 371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
372 372
373 373 def mark_as_fork(self, repo, fork, user):
374 374 repo = self._get_repo(repo)
375 375 fork = self._get_repo(fork)
376 376 if fork and repo.repo_id == fork.repo_id:
377 377 raise Exception("Cannot set repository as fork of itself")
378 378
379 379 if fork and repo.repo_type != fork.repo_type:
380 380 raise RepositoryError(
381 381 "Cannot set repository as fork of repository with other type")
382 382
383 383 repo.fork = fork
384 384 self.sa.add(repo)
385 385 return repo
386 386
387 387 def pull_changes(self, repo, username):
388 388 dbrepo = self._get_repo(repo)
389 389 clone_uri = dbrepo.clone_uri
390 390 if not clone_uri:
391 391 raise Exception("This repository doesn't have a clone uri")
392 392
393 393 repo = dbrepo.scm_instance(cache=False)
394 394 # TODO: marcink fix this an re-enable since we need common logic
395 395 # for hg/git remove hooks so we don't trigger them on fetching
396 396 # commits from remote
397 397 repo.config.clear_section('hooks')
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.pull(clone_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def commit_change(self, repo, repo_name, commit, user, author, message,
410 410 content, f_path):
411 411 """
412 412 Commits changes
413 413
414 414 :param repo: SCM instance
415 415
416 416 """
417 417 user = self._get_user(user)
418 418
419 419 # decoding here will force that we have proper encoded values
420 420 # in any other case this will throw exceptions and deny commit
421 421 content = safe_str(content)
422 422 path = safe_str(f_path)
423 423 # message and author needs to be unicode
424 424 # proper backend should then translate that into required type
425 425 message = safe_unicode(message)
426 426 author = safe_unicode(author)
427 427 imc = repo.in_memory_commit
428 428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
429 429 try:
430 430 # TODO: handle pre-push action !
431 431 tip = imc.commit(
432 432 message=message, author=author, parents=[commit],
433 433 branch=commit.branch)
434 434 except Exception as e:
435 435 log.error(traceback.format_exc())
436 436 raise IMCCommitError(str(e))
437 437 finally:
438 438 # always clear caches, if commit fails we want fresh object also
439 439 self.mark_for_invalidation(repo_name)
440 440
441 441 # We trigger the post-push action
442 442 hooks_utils.trigger_post_push_hook(
443 443 username=user.username, action='push_local', repo_name=repo_name,
444 444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
445 445 return tip
446 446
447 447 def _sanitize_path(self, f_path):
448 448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
449 449 raise NonRelativePathError('%s is not an relative path' % f_path)
450 450 if f_path:
451 451 f_path = os.path.normpath(f_path)
452 452 return f_path
453 453
454 454 def get_dirnode_metadata(self, commit, dir_node):
455 455 if not dir_node.is_dir():
456 456 return []
457 457
458 458 data = []
459 459 for node in dir_node:
460 460 if not node.is_file():
461 461 # we skip file-nodes
462 462 continue
463 463
464 464 last_commit = node.last_commit
465 465 last_commit_date = last_commit.date
466 466 data.append({
467 467 'name': node.name,
468 468 'size': h.format_byte_size_binary(node.size),
469 469 'modified_at': h.format_date(last_commit_date),
470 470 'modified_ts': last_commit_date.isoformat(),
471 471 'revision': last_commit.revision,
472 472 'short_id': last_commit.short_id,
473 473 'message': h.escape(last_commit.message),
474 474 'author': h.escape(last_commit.author),
475 475 'user_profile': h.gravatar_with_user(last_commit.author),
476 476 })
477 477
478 478 return data
479 479
480 480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
481 481 extended_info=False, content=False, max_file_bytes=None):
482 482 """
483 483 recursive walk in root dir and return a set of all path in that dir
484 484 based on repository walk function
485 485
486 486 :param repo_name: name of repository
487 487 :param commit_id: commit id for which to list nodes
488 488 :param root_path: root path to list
489 489 :param flat: return as a list, if False returns a dict with description
490 490 :param max_file_bytes: will not return file contents over this limit
491 491
492 492 """
493 493 _files = list()
494 494 _dirs = list()
495 495 try:
496 496 _repo = self._get_repo(repo_name)
497 497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
498 498 root_path = root_path.lstrip('/')
499 499 for __, dirs, files in commit.walk(root_path):
500 500 for f in files:
501 501 _content = None
502 502 _data = f.unicode_path
503 503 over_size_limit = (max_file_bytes is not None
504 504 and f.size > max_file_bytes)
505 505
506 506 if not flat:
507 507 _data = {
508 508 "name": f.unicode_path,
509 509 "type": "file",
510 510 }
511 511 if extended_info:
512 512 _data.update({
513 513 "md5": f.md5,
514 514 "binary": f.is_binary,
515 515 "size": f.size,
516 516 "extension": f.extension,
517 517 "mimetype": f.mimetype,
518 518 "lines": f.lines()[0]
519 519 })
520 520
521 521 if content:
522 522 full_content = None
523 523 if not f.is_binary and not over_size_limit:
524 524 full_content = safe_str(f.content)
525 525
526 526 _data.update({
527 527 "content": full_content,
528 528 })
529 529 _files.append(_data)
530 530 for d in dirs:
531 531 _data = d.unicode_path
532 532 if not flat:
533 533 _data = {
534 534 "name": d.unicode_path,
535 535 "type": "dir",
536 536 }
537 537 if extended_info:
538 538 _data.update({
539 539 "md5": None,
540 540 "binary": None,
541 541 "size": None,
542 542 "extension": None,
543 543 })
544 544 if content:
545 545 _data.update({
546 546 "content": None
547 547 })
548 548 _dirs.append(_data)
549 549 except RepositoryError:
550 550 log.debug("Exception in get_nodes", exc_info=True)
551 551 raise
552 552
553 553 return _dirs, _files
554 554
555 555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
556 556 author=None, trigger_push_hook=True):
557 557 """
558 558 Commits given multiple nodes into repo
559 559
560 560 :param user: RhodeCode User object or user_id, the commiter
561 561 :param repo: RhodeCode Repository object
562 562 :param message: commit message
563 563 :param nodes: mapping {filename:{'content':content},...}
564 564 :param parent_commit: parent commit, can be empty than it's
565 565 initial commit
566 566 :param author: author of commit, cna be different that commiter
567 567 only for git
568 568 :param trigger_push_hook: trigger push hooks
569 569
570 570 :returns: new commited commit
571 571 """
572 572
573 573 user = self._get_user(user)
574 574 scm_instance = repo.scm_instance(cache=False)
575 575
576 576 processed_nodes = []
577 577 for f_path in nodes:
578 578 f_path = self._sanitize_path(f_path)
579 579 content = nodes[f_path]['content']
580 580 f_path = safe_str(f_path)
581 581 # decoding here will force that we have proper encoded values
582 582 # in any other case this will throw exceptions and deny commit
583 583 if isinstance(content, (basestring,)):
584 584 content = safe_str(content)
585 585 elif isinstance(content, (file, cStringIO.OutputType,)):
586 586 content = content.read()
587 587 else:
588 588 raise Exception('Content is of unrecognized type %s' % (
589 589 type(content)
590 590 ))
591 591 processed_nodes.append((f_path, content))
592 592
593 593 message = safe_unicode(message)
594 594 commiter = user.full_contact
595 595 author = safe_unicode(author) if author else commiter
596 596
597 597 imc = scm_instance.in_memory_commit
598 598
599 599 if not parent_commit:
600 600 parent_commit = EmptyCommit(alias=scm_instance.alias)
601 601
602 602 if isinstance(parent_commit, EmptyCommit):
603 603 # EmptyCommit means we we're editing empty repository
604 604 parents = None
605 605 else:
606 606 parents = [parent_commit]
607 607 # add multiple nodes
608 608 for path, content in processed_nodes:
609 609 imc.add(FileNode(path, content=content))
610 610 # TODO: handle pre push scenario
611 611 tip = imc.commit(message=message,
612 612 author=author,
613 613 parents=parents,
614 614 branch=parent_commit.branch)
615 615
616 616 self.mark_for_invalidation(repo.repo_name)
617 617 if trigger_push_hook:
618 618 hooks_utils.trigger_post_push_hook(
619 619 username=user.username, action='push_local',
620 620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
621 621 commit_ids=[tip.raw_id])
622 622 return tip
623 623
624 624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
625 625 author=None, trigger_push_hook=True):
626 626 user = self._get_user(user)
627 627 scm_instance = repo.scm_instance(cache=False)
628 628
629 629 message = safe_unicode(message)
630 630 commiter = user.full_contact
631 631 author = safe_unicode(author) if author else commiter
632 632
633 633 imc = scm_instance.in_memory_commit
634 634
635 635 if not parent_commit:
636 636 parent_commit = EmptyCommit(alias=scm_instance.alias)
637 637
638 638 if isinstance(parent_commit, EmptyCommit):
639 639 # EmptyCommit means we we're editing empty repository
640 640 parents = None
641 641 else:
642 642 parents = [parent_commit]
643 643
644 644 # add multiple nodes
645 645 for _filename, data in nodes.items():
646 646 # new filename, can be renamed from the old one, also sanitaze
647 647 # the path for any hack around relative paths like ../../ etc.
648 648 filename = self._sanitize_path(data['filename'])
649 649 old_filename = self._sanitize_path(_filename)
650 650 content = data['content']
651 651
652 652 filenode = FileNode(old_filename, content=content)
653 653 op = data['op']
654 654 if op == 'add':
655 655 imc.add(filenode)
656 656 elif op == 'del':
657 657 imc.remove(filenode)
658 658 elif op == 'mod':
659 659 if filename != old_filename:
660 660 # TODO: handle renames more efficient, needs vcs lib
661 661 # changes
662 662 imc.remove(filenode)
663 663 imc.add(FileNode(filename, content=content))
664 664 else:
665 665 imc.change(filenode)
666 666
667 667 try:
668 668 # TODO: handle pre push scenario
669 669 # commit changes
670 670 tip = imc.commit(message=message,
671 671 author=author,
672 672 parents=parents,
673 673 branch=parent_commit.branch)
674 674 except NodeNotChangedError:
675 675 raise
676 676 except Exception as e:
677 677 log.exception("Unexpected exception during call to imc.commit")
678 678 raise IMCCommitError(str(e))
679 679 finally:
680 680 # always clear caches, if commit fails we want fresh object also
681 681 self.mark_for_invalidation(repo.repo_name)
682 682
683 683 if trigger_push_hook:
684 684 hooks_utils.trigger_post_push_hook(
685 685 username=user.username, action='push_local',
686 686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
687 687 commit_ids=[tip.raw_id])
688 688
689 689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
690 690 author=None, trigger_push_hook=True):
691 691 """
692 692 Deletes given multiple nodes into `repo`
693 693
694 694 :param user: RhodeCode User object or user_id, the committer
695 695 :param repo: RhodeCode Repository object
696 696 :param message: commit message
697 697 :param nodes: mapping {filename:{'content':content},...}
698 698 :param parent_commit: parent commit, can be empty than it's initial
699 699 commit
700 700 :param author: author of commit, cna be different that commiter only
701 701 for git
702 702 :param trigger_push_hook: trigger push hooks
703 703
704 704 :returns: new commit after deletion
705 705 """
706 706
707 707 user = self._get_user(user)
708 708 scm_instance = repo.scm_instance(cache=False)
709 709
710 710 processed_nodes = []
711 711 for f_path in nodes:
712 712 f_path = self._sanitize_path(f_path)
713 713 # content can be empty but for compatabilty it allows same dicts
714 714 # structure as add_nodes
715 715 content = nodes[f_path].get('content')
716 716 processed_nodes.append((f_path, content))
717 717
718 718 message = safe_unicode(message)
719 719 commiter = user.full_contact
720 720 author = safe_unicode(author) if author else commiter
721 721
722 722 imc = scm_instance.in_memory_commit
723 723
724 724 if not parent_commit:
725 725 parent_commit = EmptyCommit(alias=scm_instance.alias)
726 726
727 727 if isinstance(parent_commit, EmptyCommit):
728 728 # EmptyCommit means we we're editing empty repository
729 729 parents = None
730 730 else:
731 731 parents = [parent_commit]
732 732 # add multiple nodes
733 733 for path, content in processed_nodes:
734 734 imc.remove(FileNode(path, content=content))
735 735
736 736 # TODO: handle pre push scenario
737 737 tip = imc.commit(message=message,
738 738 author=author,
739 739 parents=parents,
740 740 branch=parent_commit.branch)
741 741
742 742 self.mark_for_invalidation(repo.repo_name)
743 743 if trigger_push_hook:
744 744 hooks_utils.trigger_post_push_hook(
745 745 username=user.username, action='push_local',
746 746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
747 747 commit_ids=[tip.raw_id])
748 748 return tip
749 749
750 750 def strip(self, repo, commit_id, branch):
751 751 scm_instance = repo.scm_instance(cache=False)
752 752 scm_instance.config.clear_section('hooks')
753 753 scm_instance.strip(commit_id, branch)
754 754 self.mark_for_invalidation(repo.repo_name)
755 755
756 756 def get_unread_journal(self):
757 757 return self.sa.query(UserLog).count()
758 758
759 759 def get_repo_landing_revs(self, repo=None):
760 760 """
761 761 Generates select option with tags branches and bookmarks (for hg only)
762 762 grouped by type
763 763
764 764 :param repo:
765 765 """
766 766
767 767 hist_l = []
768 768 choices = []
769 769 repo = self._get_repo(repo)
770 770 hist_l.append(['rev:tip', _('latest tip')])
771 771 choices.append('rev:tip')
772 772 if not repo:
773 773 return choices, hist_l
774 774
775 775 repo = repo.scm_instance()
776 776
777 777 branches_group = (
778 778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
779 779 for b in repo.branches],
780 780 _("Branches"))
781 781 hist_l.append(branches_group)
782 782 choices.extend([x[0] for x in branches_group[0]])
783 783
784 784 if repo.alias == 'hg':
785 785 bookmarks_group = (
786 786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
787 787 for b in repo.bookmarks],
788 788 _("Bookmarks"))
789 789 hist_l.append(bookmarks_group)
790 790 choices.extend([x[0] for x in bookmarks_group[0]])
791 791
792 792 tags_group = (
793 793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
794 794 for t in repo.tags],
795 795 _("Tags"))
796 796 hist_l.append(tags_group)
797 797 choices.extend([x[0] for x in tags_group[0]])
798 798
799 799 return choices, hist_l
800 800
801 801 def install_git_hook(self, repo, force_create=False):
802 802 """
803 803 Creates a rhodecode hook inside a git repository
804 804
805 805 :param repo: Instance of VCS repo
806 806 :param force_create: Create even if same name hook exists
807 807 """
808 808
809 809 loc = os.path.join(repo.path, 'hooks')
810 810 if not repo.bare:
811 811 loc = os.path.join(repo.path, '.git', 'hooks')
812 812 if not os.path.isdir(loc):
813 813 os.makedirs(loc, mode=0777)
814 814
815 815 tmpl_post = pkg_resources.resource_string(
816 816 'rhodecode', '/'.join(
817 817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
818 818 tmpl_pre = pkg_resources.resource_string(
819 819 'rhodecode', '/'.join(
820 820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
821 821
822 822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
823 823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
824 824 log.debug('Installing git hook in repo %s', repo)
825 825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
826 826
827 827 if _rhodecode_hook or force_create:
828 828 log.debug('writing %s hook file !', h_type)
829 829 try:
830 830 with open(_hook_file, 'wb') as f:
831 831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
832 832 tmpl = tmpl.replace('_ENV_', sys.executable)
833 833 f.write(tmpl)
834 834 os.chmod(_hook_file, 0755)
835 835 except IOError:
836 836 log.exception('error writing hook file %s', _hook_file)
837 837 else:
838 838 log.debug('skipping writing hook file')
839 839
840 840 def install_svn_hooks(self, repo, force_create=False):
841 841 """
842 842 Creates rhodecode hooks inside a svn repository
843 843
844 844 :param repo: Instance of VCS repo
845 845 :param force_create: Create even if same name hook exists
846 846 """
847 847 hooks_path = os.path.join(repo.path, 'hooks')
848 848 if not os.path.isdir(hooks_path):
849 849 os.makedirs(hooks_path)
850 850 post_commit_tmpl = pkg_resources.resource_string(
851 851 'rhodecode', '/'.join(
852 852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
853 853 pre_commit_template = pkg_resources.resource_string(
854 854 'rhodecode', '/'.join(
855 855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
856 856 templates = {
857 857 'post-commit': post_commit_tmpl,
858 858 'pre-commit': pre_commit_template
859 859 }
860 860 for filename in templates:
861 861 _hook_file = os.path.join(hooks_path, filename)
862 862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
863 863 if _rhodecode_hook or force_create:
864 864 log.debug('writing %s hook file !', filename)
865 865 template = templates[filename]
866 866 try:
867 867 with open(_hook_file, 'wb') as f:
868 868 template = template.replace(
869 869 '_TMPL_', rhodecode.__version__)
870 870 template = template.replace('_ENV_', sys.executable)
871 871 f.write(template)
872 872 os.chmod(_hook_file, 0755)
873 873 except IOError:
874 874 log.exception('error writing hook file %s', filename)
875 875 else:
876 876 log.debug('skipping writing hook file')
877 877
878 878 def install_hooks(self, repo, repo_type):
879 879 if repo_type == 'git':
880 880 self.install_git_hook(repo)
881 881 elif repo_type == 'svn':
882 882 self.install_svn_hooks(repo)
883 883
884 884 def get_server_info(self, environ=None):
885 885 import platform
886 886 import rhodecode
887 887 import pkg_resources
888 888 from rhodecode.model.meta import Base as sql_base, Session
889 889 from sqlalchemy.engine import url
890 890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
891 891 from rhodecode.lib.vcs.backends.git import discover_git_version
892 892 from rhodecode.model.gist import GIST_STORE_LOC
893 893
894 def percentage(part, whole):
895 return 100 * float(part) / float(whole)
896
894 897 try:
895 898 # cygwin cannot have yet psutil support.
896 899 import psutil
897 900 except ImportError:
898 901 psutil = None
899 902
900 903 environ = environ or {}
901 904 _NA = 'NOT AVAILABLE'
902 905 _memory = _NA
903 906 _uptime = _NA
904 907 _boot_time = _NA
905 908 _cpu = _NA
906 909 _disk = dict(percent=0, used=0, total=0, error='')
910 _disk_inodes = dict(percent=0, free=0, used=0, total=0, error='')
907 911 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
908 912
909 913 model = VcsSettingsModel()
910 914 storage_path = model.get_repos_location()
911 915 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
912 916 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
913 917 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
914 918
915 919 if psutil:
916 920 # disk storage
917 921 try:
918 922 _disk = dict(psutil.disk_usage(storage_path)._asdict())
919 923 except Exception as e:
920 924 log.exception('Failed to fetch disk info')
921 925 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
922 926
927 # disk inodes usage
928 try:
929 i_stat = os.statvfs(storage_path)
930
931 _disk_inodes['used'] = i_stat.f_ffree
932 _disk_inodes['free'] = i_stat.f_favail
933 _disk_inodes['total'] = i_stat.f_files
934 _disk_inodes['percent'] = percentage(
935 _disk_inodes['used'], _disk_inodes['total'])
936 except Exception as e:
937 log.exception('Failed to fetch disk inodes info')
938 _disk_inodes['error'] = str(e)
939
923 940 # memory
924 941 _memory = dict(psutil.virtual_memory()._asdict())
925 942 _memory['percent2'] = psutil._common.usage_percent(
926 943 (_memory['total'] - _memory['free']),
927 944 _memory['total'], 1)
928 945
929 946 # load averages
930 947 if hasattr(psutil.os, 'getloadavg'):
931 948 _load = dict(zip(
932 949 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
933 950 _uptime = time.time() - psutil.boot_time()
934 951 _boot_time = psutil.boot_time()
935 952 _cpu = psutil.cpu_percent(0.5)
936 953
937 954 mods = dict([(p.project_name, p.version)
938 955 for p in pkg_resources.working_set])
939 956
940 957 def get_storage_size(storage_path):
941 958 sizes = []
942 959 for file_ in os.listdir(storage_path):
943 960 storage_file = os.path.join(storage_path, file_)
944 961 if os.path.isfile(storage_file):
945 962 try:
946 963 sizes.append(os.path.getsize(storage_file))
947 964 except OSError:
948 965 log.exception('Failed to get size of storage file %s',
949 966 storage_file)
950 967 pass
951 968
952 969 return sum(sizes)
953 970
954 971 # archive cache storage
955 972 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
956 973 try:
957 974 archive_storage_path_exists = os.path.isdir(
958 975 archive_storage_path)
959 976 if archive_storage_path and archive_storage_path_exists:
960 977 used = get_storage_size(archive_storage_path)
961 978 _disk_archive.update({
962 979 'used': used,
963 980 'total': used,
964 981 })
965 982 except Exception as e:
966 983 log.exception('failed to fetch archive cache storage')
967 984 _disk_archive['error'] = str(e)
968 985
969 986 # search index storage
970 987 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
971 988 try:
972 989 search_index_storage_path_exists = os.path.isdir(
973 990 search_index_storage_path)
974 991 if search_index_storage_path_exists:
975 992 used = get_storage_size(search_index_storage_path)
976 993 _disk_index.update({
977 994 'percent': 100,
978 995 'used': used,
979 996 'total': used,
980 997 })
981 998 except Exception as e:
982 999 log.exception('failed to fetch search index storage')
983 1000 _disk_index['error'] = str(e)
984 1001
985 1002 # gist storage
986 1003 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
987 1004 try:
988 1005 items_count = 0
989 1006 used = 0
990 1007 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
991 1008 if root == gist_storage_path:
992 1009 items_count = len(dirs)
993 1010
994 1011 for f in files:
995 1012 try:
996 1013 used += os.path.getsize(os.path.join(root, f))
997 1014 except OSError:
998 1015 pass
999 1016 _disk_gist.update({
1000 1017 'percent': 100,
1001 1018 'used': used,
1002 1019 'total': used,
1003 1020 'items': items_count
1004 1021 })
1005 1022 except Exception as e:
1006 1023 log.exception('failed to fetch gist storage items')
1007 1024 _disk_gist['error'] = str(e)
1008 1025
1009 1026 # GIT info
1010 1027 git_ver = discover_git_version()
1011 1028
1012 1029 # SVN info
1013 1030 # TODO: johbo: Add discover_svn_version to replace this code.
1014 1031 try:
1015 1032 import svn.core
1016 1033 svn_ver = svn.core.SVN_VERSION
1017 1034 except ImportError:
1018 1035 svn_ver = None
1019 1036
1020 1037 # DB stuff
1021 1038 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1022 1039 db_type = db_info.__to_string__()
1023 1040 try:
1024 1041 engine = sql_base.metadata.bind
1025 1042 db_server_info = engine.dialect._get_server_version_info(
1026 1043 Session.connection(bind=engine))
1027 1044 db_version = '%s %s' % (db_info.drivername,
1028 1045 '.'.join(map(str, db_server_info)))
1029 1046 except Exception:
1030 1047 log.exception('failed to fetch db version')
1031 1048 db_version = '%s %s' % (db_info.drivername, '?')
1032 1049
1033 1050 db_migrate = DbMigrateVersion.query().filter(
1034 1051 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1035 1052 db_migrate_version = db_migrate.version
1036 1053
1037 1054 info = {
1038 1055 'py_version': ' '.join(platform._sys_version()),
1039 1056 'py_path': sys.executable,
1040 1057 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1041 1058
1042 1059 'platform': safe_unicode(platform.platform()),
1043 1060 'storage': storage_path,
1044 1061 'archive_storage': archive_storage_path,
1045 1062 'index_storage': search_index_storage_path,
1046 1063 'gist_storage': gist_storage_path,
1047 1064
1048 1065
1049 1066 'db_type': db_type,
1050 1067 'db_version': db_version,
1051 1068 'db_migrate_version': db_migrate_version,
1052 1069
1053 1070 'rhodecode_version': rhodecode.__version__,
1054 1071 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1055 1072 'server_ip': '%s:%s' % (
1056 1073 get_server_ip_addr(environ, log_errors=False),
1057 1074 get_server_port(environ)
1058 1075 ),
1059 1076 'server_id': rhodecode.CONFIG.get('instance_id'),
1060 1077
1061 1078 'git_version': safe_unicode(git_ver),
1062 1079 'hg_version': mods.get('mercurial'),
1063 1080 'svn_version': svn_ver,
1064 1081
1065 1082 'uptime': _uptime,
1066 1083 'boot_time': _boot_time,
1067 1084 'load': _load,
1068 1085 'cpu': _cpu,
1069 1086 'memory': _memory,
1070 1087 'disk': _disk,
1088 'disk_inodes': _disk_inodes,
1071 1089 'disk_archive': _disk_archive,
1072 1090 'disk_gist': _disk_gist,
1073 1091 'disk_index': _disk_index,
1074 1092 }
1075 1093 return info
1076 1094
1077 1095
1078 1096 def _check_rhodecode_hook(hook_path):
1079 1097 """
1080 1098 Check if the hook was created by RhodeCode
1081 1099 """
1082 1100 if not os.path.exists(hook_path):
1083 1101 return True
1084 1102
1085 1103 log.debug('hook exists, checking if it is from rhodecode')
1086 1104 hook_content = _read_hook(hook_path)
1087 1105 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1088 1106 if matches:
1089 1107 try:
1090 1108 version = matches.groups()[0]
1091 1109 log.debug('got %s, it is rhodecode', version)
1092 1110 return True
1093 1111 except Exception:
1094 1112 log.exception("Exception while reading the hook version.")
1095 1113
1096 1114 return False
1097 1115
1098 1116
1099 1117 def _read_hook(hook_path):
1100 1118 with open(hook_path, 'rb') as f:
1101 1119 content = f.read()
1102 1120 return content
@@ -1,89 +1,90 b''
1 1 <%
2 2 elems = [
3 3 ## general
4 4 (_('RhodeCode Enterprise version'), h.literal('%s <div class="link" id="check_for_update" >%s</div>' % (c.rhodecode_version, _('check for updates'))), ''),
5 5 (_('Upgrade info endpoint'), h.literal('%s <br/><span >%s.</span>' % (c.rhodecode_update_url, _('Note: please make sure this server can access this url'))), ''),
6 6 (_('Configuration INI file'), c.rhodecode_config_ini, ''),
7 7 ## systems stats
8 8 (_('RhodeCode Enterprise Server IP'), c.server_ip, ''),
9 9 (_('RhodeCode Enterprise Server ID'), c.server_id, ''),
10 10 (_('Platform'), c.platform, ''),
11 11 (_('Uptime'), c.uptime_age, ''),
12 12 (_('Storage location'), c.storage, ''),
13 13 (_('Storage disk space'), "%s/%s, %s%% used%s" % (h.format_byte_size_binary(c.disk['used']), h.format_byte_size_binary(c.disk['total']),(c.disk['percent']), ' %s' % c.disk['error'] if 'error' in c.disk else ''), ''),
14 (_('Storage file limit (inodes)'), "%s/%s, %.1f%% used%s" % (c.disk_inodes['used'], c.disk_inodes['total'],(c.disk_inodes['percent']), ' %s' % c.disk_inodes['error'] if 'error' in c.disk_inodes else ''), ''),
14 15
15 16 (_('Search index storage'), c.index_storage, ''),
16 17 (_('Search index size'), "%s %s" % (h.format_byte_size_binary(c.disk_index['used']), ' %s' % c.disk_index['error'] if 'error' in c.disk_index else ''), ''),
17 18
18 19 (_('Gist storage'), c.gist_storage, ''),
19 20 (_('Gist storage size'), "%s (%s items)%s" % (h.format_byte_size_binary(c.disk_gist['used']),c.disk_gist['items'], ' %s' % c.disk_gist['error'] if 'error' in c.disk_gist else ''), ''),
20 21
21 22 (_('Archive cache'), h.literal('%s <br/><span >%s.</span>' % (c.archive_storage, _('Enable this by setting archive_cache_dir=/path/to/cache option in the .ini file'))), ''),
22 23 (_('Archive cache size'), "%s%s" % (h.format_byte_size_binary(c.disk_archive['used']), ' %s' % c.disk_archive['error'] if 'error' in c.disk_archive else ''), ''),
23 24
24 25 (_('System memory'), c.system_memory, ''),
25 26 (_('CPU'), '%s %%' %(c.cpu), ''),
26 27 (_('Load'), '1min: %s, 5min: %s, 15min: %s' %(c.load['1_min'],c.load['5_min'],c.load['15_min']), ''),
27 28
28 29 ## rhodecode stuff
29 30 (_('Python version'), c.py_version, ''),
30 31 (_('Python path'), c.py_path, ''),
31 32 (_('GIT version'), c.git_version, ''),
32 33 (_('HG version'), c.hg_version, ''),
33 34 (_('SVN version'), c.svn_version, ''),
34 35 (_('Database'), "%s @ version: %s" % (c.db_type, c.db_migrate_version), ''),
35 36 (_('Database version'), c.db_version, ''),
36 37
37 38 ]
38 39 %>
39 40
40 41 <div id="update_notice" style="display: none; margin: -40px 0px 20px 0px">
41 42 <div>${_('Checking for updates...')}</div>
42 43 </div>
43 44
44 45
45 46 <div class="panel panel-default">
46 47 <div class="panel-heading">
47 48 <h3 class="panel-title">${_('System Info')}</h3>
48 49 % if c.allowed_to_snapshot:
49 50 <a href="${url('admin_settings_system', snapshot=1)}" class="panel-edit">${_('create snapshot')}</a>
50 51 % endif
51 52 </div>
52 53 <div class="panel-body">
53 54 <dl class="dl-horizontal settings">
54 55 %for dt, dd, tt in elems:
55 56 <dt>${dt}:</dt>
56 57 <dd title="${tt}">${dd}</dd>
57 58 %endfor
58 59 </dl>
59 60 </div>
60 61 </div>
61 62
62 63 <div class="panel panel-default">
63 64 <div class="panel-heading">
64 65 <h3 class="panel-title">${_('Python Packages')}</h3>
65 66 </div>
66 67 <div class="panel-body">
67 68 <table class="table">
68 69 <colgroup>
69 70 <col class='label'>
70 71 <col class='content'>
71 72 </colgroup>
72 73 <tbody>
73 74 %for key, value in c.py_modules:
74 75 <tr>
75 76 <td>${key}</td>
76 77 <td>${value}</td>
77 78 </tr>
78 79 %endfor
79 80 </tbody>
80 81 </table>
81 82 </div>
82 83 </div>
83 84
84 85 <script>
85 86 $('#check_for_update').click(function(e){
86 87 $('#update_notice').show();
87 88 $('#update_notice').load("${h.url('admin_settings_system_update',version=c.rhodecode_version, platform=c.platform)}");
88 89 })
89 90 </script>
General Comments 0
You need to be logged in to leave comments. Login now