##// END OF EJS Templates
scm-model: add extra validation logic for remote uri to filter out bad scheduler entries.
marcink -
r3071:144eaab4 default
parent child Browse files
Show More
@@ -1,818 +1,831 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 import re
27 import sys
28 26 import traceback
29 27 import logging
30 28 import cStringIO
31 import pkg_resources
32 29
33 30 from sqlalchemy import func
34 31 from zope.cachedescriptors.property import Lazy as LazyProperty
35 32
36 33 import rhodecode
37 34 from rhodecode.lib.vcs import get_backend
38 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 36 from rhodecode.lib.vcs.nodes import FileNode
40 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 38 from rhodecode.lib import helpers as h, rc_cache
42 39 from rhodecode.lib.auth import (
43 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 41 HasUserGroupPermissionAny)
45 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 43 from rhodecode.lib import hooks_utils
47 44 from rhodecode.lib.utils import (
48 45 get_filesystem_repos, make_db_config)
49 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 47 from rhodecode.lib.system_info import get_system_info
51 48 from rhodecode.model import BaseModel
52 49 from rhodecode.model.db import (
53 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 51 PullRequest)
55 52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
56 54
57 55 log = logging.getLogger(__name__)
58 56
59 57
60 58 class UserTemp(object):
61 59 def __init__(self, user_id):
62 60 self.user_id = user_id
63 61
64 62 def __repr__(self):
65 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66 64
67 65
68 66 class RepoTemp(object):
69 67 def __init__(self, repo_id):
70 68 self.repo_id = repo_id
71 69
72 70 def __repr__(self):
73 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74 72
75 73
76 74 class SimpleCachedRepoList(object):
77 75 """
78 76 Lighter version of of iteration of repos without the scm initialisation,
79 77 and with cache usage
80 78 """
81 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 80 self.db_repo_list = db_repo_list
83 81 self.repos_path = repos_path
84 82 self.order_by = order_by
85 83 self.reversed = (order_by or '').startswith('-')
86 84 if not perm_set:
87 85 perm_set = ['repository.read', 'repository.write',
88 86 'repository.admin']
89 87 self.perm_set = perm_set
90 88
91 89 def __len__(self):
92 90 return len(self.db_repo_list)
93 91
94 92 def __repr__(self):
95 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96 94
97 95 def __iter__(self):
98 96 for dbr in self.db_repo_list:
99 97 # check permission at this level
100 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 99 dbr.repo_name, 'SimpleCachedRepoList check')
102 100 if not has_perm:
103 101 continue
104 102
105 103 tmp_d = {
106 104 'name': dbr.repo_name,
107 105 'dbrepo': dbr.get_dict(),
108 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 107 }
110 108 yield tmp_d
111 109
112 110
113 111 class _PermCheckIterator(object):
114 112
115 113 def __init__(
116 114 self, obj_list, obj_attr, perm_set, perm_checker,
117 115 extra_kwargs=None):
118 116 """
119 117 Creates iterator from given list of objects, additionally
120 118 checking permission for them from perm_set var
121 119
122 120 :param obj_list: list of db objects
123 121 :param obj_attr: attribute of object to pass into perm_checker
124 122 :param perm_set: list of permissions to check
125 123 :param perm_checker: callable to check permissions against
126 124 """
127 125 self.obj_list = obj_list
128 126 self.obj_attr = obj_attr
129 127 self.perm_set = perm_set
130 128 self.perm_checker = perm_checker
131 129 self.extra_kwargs = extra_kwargs or {}
132 130
133 131 def __len__(self):
134 132 return len(self.obj_list)
135 133
136 134 def __repr__(self):
137 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138 136
139 137 def __iter__(self):
140 138 checker = self.perm_checker(*self.perm_set)
141 139 for db_obj in self.obj_list:
142 140 # check permission at this level
143 141 name = getattr(db_obj, self.obj_attr, None)
144 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 143 continue
146 144
147 145 yield db_obj
148 146
149 147
150 148 class RepoList(_PermCheckIterator):
151 149
152 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 151 if not perm_set:
154 152 perm_set = [
155 153 'repository.read', 'repository.write', 'repository.admin']
156 154
157 155 super(RepoList, self).__init__(
158 156 obj_list=db_repo_list,
159 157 obj_attr='repo_name', perm_set=perm_set,
160 158 perm_checker=HasRepoPermissionAny,
161 159 extra_kwargs=extra_kwargs)
162 160
163 161
164 162 class RepoGroupList(_PermCheckIterator):
165 163
166 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 165 if not perm_set:
168 166 perm_set = ['group.read', 'group.write', 'group.admin']
169 167
170 168 super(RepoGroupList, self).__init__(
171 169 obj_list=db_repo_group_list,
172 170 obj_attr='group_name', perm_set=perm_set,
173 171 perm_checker=HasRepoGroupPermissionAny,
174 172 extra_kwargs=extra_kwargs)
175 173
176 174
177 175 class UserGroupList(_PermCheckIterator):
178 176
179 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 178 if not perm_set:
181 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182 180
183 181 super(UserGroupList, self).__init__(
184 182 obj_list=db_user_group_list,
185 183 obj_attr='users_group_name', perm_set=perm_set,
186 184 perm_checker=HasUserGroupPermissionAny,
187 185 extra_kwargs=extra_kwargs)
188 186
189 187
190 188 class ScmModel(BaseModel):
191 189 """
192 190 Generic Scm Model
193 191 """
194 192
195 193 @LazyProperty
196 194 def repos_path(self):
197 195 """
198 196 Gets the repositories root path from database
199 197 """
200 198
201 199 settings_model = VcsSettingsModel(sa=self.sa)
202 200 return settings_model.get_repos_location()
203 201
204 202 def repo_scan(self, repos_path=None):
205 203 """
206 204 Listing of repositories in given path. This path should not be a
207 205 repository itself. Return a dictionary of repository objects
208 206
209 207 :param repos_path: path to directory containing repositories
210 208 """
211 209
212 210 if repos_path is None:
213 211 repos_path = self.repos_path
214 212
215 213 log.info('scanning for repositories in %s', repos_path)
216 214
217 215 config = make_db_config()
218 216 config.set('extensions', 'largefiles', '')
219 217 repos = {}
220 218
221 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 220 # name need to be decomposed and put back together using the /
223 221 # since this is internal storage separator for rhodecode
224 222 name = Repository.normalize_repo_name(name)
225 223
226 224 try:
227 225 if name in repos:
228 226 raise RepositoryError('Duplicate repository name %s '
229 227 'found in %s' % (name, path))
230 228 elif path[0] in rhodecode.BACKENDS:
231 229 klass = get_backend(path[0])
232 230 repos[name] = klass(path[1], config=config)
233 231 except OSError:
234 232 continue
235 233 log.debug('found %s paths with repositories', len(repos))
236 234 return repos
237 235
238 236 def get_repos(self, all_repos=None, sort_key=None):
239 237 """
240 238 Get all repositories from db and for each repo create it's
241 239 backend instance and fill that backed with information from database
242 240
243 241 :param all_repos: list of repository names as strings
244 242 give specific repositories list, good for filtering
245 243
246 244 :param sort_key: initial sorting of repositories
247 245 """
248 246 if all_repos is None:
249 247 all_repos = self.sa.query(Repository)\
250 248 .filter(Repository.group_id == None)\
251 249 .order_by(func.lower(Repository.repo_name)).all()
252 250 repo_iter = SimpleCachedRepoList(
253 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 252 return repo_iter
255 253
256 254 def get_repo_groups(self, all_groups=None):
257 255 if all_groups is None:
258 256 all_groups = RepoGroup.query()\
259 257 .filter(RepoGroup.group_parent_id == None).all()
260 258 return [x for x in RepoGroupList(all_groups)]
261 259
262 260 def mark_for_invalidation(self, repo_name, delete=False):
263 261 """
264 262 Mark caches of this repo invalid in the database. `delete` flag
265 263 removes the cache entries
266 264
267 265 :param repo_name: the repo_name for which caches should be marked
268 266 invalid, or deleted
269 267 :param delete: delete the entry keys instead of setting bool
270 268 flag on them, and also purge caches used by the dogpile
271 269 """
272 270 repo = Repository.get_by_repo_name(repo_name)
273 271
274 272 if repo:
275 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
276 274 repo_id=repo.repo_id)
277 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
278 276
279 277 repo_id = repo.repo_id
280 278 config = repo._config
281 279 config.set('extensions', 'largefiles', '')
282 280 repo.update_commit_cache(config=config, cs_cache=None)
283 281 if delete:
284 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
285 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
286 284
287 285 def toggle_following_repo(self, follow_repo_id, user_id):
288 286
289 287 f = self.sa.query(UserFollowing)\
290 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
291 289 .filter(UserFollowing.user_id == user_id).scalar()
292 290
293 291 if f is not None:
294 292 try:
295 293 self.sa.delete(f)
296 294 return
297 295 except Exception:
298 296 log.error(traceback.format_exc())
299 297 raise
300 298
301 299 try:
302 300 f = UserFollowing()
303 301 f.user_id = user_id
304 302 f.follows_repo_id = follow_repo_id
305 303 self.sa.add(f)
306 304 except Exception:
307 305 log.error(traceback.format_exc())
308 306 raise
309 307
310 308 def toggle_following_user(self, follow_user_id, user_id):
311 309 f = self.sa.query(UserFollowing)\
312 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
313 311 .filter(UserFollowing.user_id == user_id).scalar()
314 312
315 313 if f is not None:
316 314 try:
317 315 self.sa.delete(f)
318 316 return
319 317 except Exception:
320 318 log.error(traceback.format_exc())
321 319 raise
322 320
323 321 try:
324 322 f = UserFollowing()
325 323 f.user_id = user_id
326 324 f.follows_user_id = follow_user_id
327 325 self.sa.add(f)
328 326 except Exception:
329 327 log.error(traceback.format_exc())
330 328 raise
331 329
332 330 def is_following_repo(self, repo_name, user_id, cache=False):
333 331 r = self.sa.query(Repository)\
334 332 .filter(Repository.repo_name == repo_name).scalar()
335 333
336 334 f = self.sa.query(UserFollowing)\
337 335 .filter(UserFollowing.follows_repository == r)\
338 336 .filter(UserFollowing.user_id == user_id).scalar()
339 337
340 338 return f is not None
341 339
342 340 def is_following_user(self, username, user_id, cache=False):
343 341 u = User.get_by_username(username)
344 342
345 343 f = self.sa.query(UserFollowing)\
346 344 .filter(UserFollowing.follows_user == u)\
347 345 .filter(UserFollowing.user_id == user_id).scalar()
348 346
349 347 return f is not None
350 348
351 349 def get_followers(self, repo):
352 350 repo = self._get_repo(repo)
353 351
354 352 return self.sa.query(UserFollowing)\
355 353 .filter(UserFollowing.follows_repository == repo).count()
356 354
357 355 def get_forks(self, repo):
358 356 repo = self._get_repo(repo)
359 357 return self.sa.query(Repository)\
360 358 .filter(Repository.fork == repo).count()
361 359
362 360 def get_pull_requests(self, repo):
363 361 repo = self._get_repo(repo)
364 362 return self.sa.query(PullRequest)\
365 363 .filter(PullRequest.target_repo == repo)\
366 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
367 365
368 366 def mark_as_fork(self, repo, fork, user):
369 367 repo = self._get_repo(repo)
370 368 fork = self._get_repo(fork)
371 369 if fork and repo.repo_id == fork.repo_id:
372 370 raise Exception("Cannot set repository as fork of itself")
373 371
374 372 if fork and repo.repo_type != fork.repo_type:
375 373 raise RepositoryError(
376 374 "Cannot set repository as fork of repository with other type")
377 375
378 376 repo.fork = fork
379 377 self.sa.add(repo)
380 378 return repo
381 379
382 380 def pull_changes(self, repo, username, remote_uri=None):
383 381 dbrepo = self._get_repo(repo)
384 382 remote_uri = remote_uri or dbrepo.clone_uri
385 383 if not remote_uri:
386 384 raise Exception("This repository doesn't have a clone uri")
387 385
388 386 repo = dbrepo.scm_instance(cache=False)
389 # TODO: marcink fix this an re-enable since we need common logic
390 # for hg/git remove hooks so we don't trigger them on fetching
391 # commits from remote
392 387 repo.config.clear_section('hooks')
393 388
389 try:
390 # NOTE(marcink): add extra validation so we skip invalid urls
391 # this is due this tasks can be executed via scheduler without
392 # proper validation of remote_uri
393 config = make_db_config(clear_session=False)
394 url_validator(remote_uri, dbrepo.repo_type, config)
395 except InvalidCloneUrl:
396 raise
397
394 398 repo_name = dbrepo.repo_name
395 399 try:
396 400 # TODO: we need to make sure those operations call proper hooks !
397 401 repo.pull(remote_uri)
398 402
399 403 self.mark_for_invalidation(repo_name)
400 404 except Exception:
401 405 log.error(traceback.format_exc())
402 406 raise
403 407
404 408 def push_changes(self, repo, username, remote_uri=None):
405 409 dbrepo = self._get_repo(repo)
406 410 remote_uri = remote_uri or dbrepo.push_uri
407 411 if not remote_uri:
408 412 raise Exception("This repository doesn't have a clone uri")
409 413
410 414 repo = dbrepo.scm_instance(cache=False)
411 415 repo.config.clear_section('hooks')
412 416
413 417 try:
418 # NOTE(marcink): add extra validation so we skip invalid urls
419 # this is due this tasks can be executed via scheduler without
420 # proper validation of remote_uri
421 config = make_db_config(clear_session=False)
422 url_validator(remote_uri, dbrepo.repo_type, config)
423 except InvalidCloneUrl:
424 raise
425
426 try:
414 427 repo.push(remote_uri)
415 428 except Exception:
416 429 log.error(traceback.format_exc())
417 430 raise
418 431
419 432 def commit_change(self, repo, repo_name, commit, user, author, message,
420 433 content, f_path):
421 434 """
422 435 Commits changes
423 436
424 437 :param repo: SCM instance
425 438
426 439 """
427 440 user = self._get_user(user)
428 441
429 442 # decoding here will force that we have proper encoded values
430 443 # in any other case this will throw exceptions and deny commit
431 444 content = safe_str(content)
432 445 path = safe_str(f_path)
433 446 # message and author needs to be unicode
434 447 # proper backend should then translate that into required type
435 448 message = safe_unicode(message)
436 449 author = safe_unicode(author)
437 450 imc = repo.in_memory_commit
438 451 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
439 452 try:
440 453 # TODO: handle pre-push action !
441 454 tip = imc.commit(
442 455 message=message, author=author, parents=[commit],
443 456 branch=commit.branch)
444 457 except Exception as e:
445 458 log.error(traceback.format_exc())
446 459 raise IMCCommitError(str(e))
447 460 finally:
448 461 # always clear caches, if commit fails we want fresh object also
449 462 self.mark_for_invalidation(repo_name)
450 463
451 464 # We trigger the post-push action
452 465 hooks_utils.trigger_post_push_hook(
453 466 username=user.username, action='push_local', repo_name=repo_name,
454 467 repo_alias=repo.alias, commit_ids=[tip.raw_id])
455 468 return tip
456 469
457 470 def _sanitize_path(self, f_path):
458 471 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
459 472 raise NonRelativePathError('%s is not an relative path' % f_path)
460 473 if f_path:
461 474 f_path = os.path.normpath(f_path)
462 475 return f_path
463 476
464 477 def get_dirnode_metadata(self, request, commit, dir_node):
465 478 if not dir_node.is_dir():
466 479 return []
467 480
468 481 data = []
469 482 for node in dir_node:
470 483 if not node.is_file():
471 484 # we skip file-nodes
472 485 continue
473 486
474 487 last_commit = node.last_commit
475 488 last_commit_date = last_commit.date
476 489 data.append({
477 490 'name': node.name,
478 491 'size': h.format_byte_size_binary(node.size),
479 492 'modified_at': h.format_date(last_commit_date),
480 493 'modified_ts': last_commit_date.isoformat(),
481 494 'revision': last_commit.revision,
482 495 'short_id': last_commit.short_id,
483 496 'message': h.escape(last_commit.message),
484 497 'author': h.escape(last_commit.author),
485 498 'user_profile': h.gravatar_with_user(
486 499 request, last_commit.author),
487 500 })
488 501
489 502 return data
490 503
491 504 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
492 505 extended_info=False, content=False, max_file_bytes=None):
493 506 """
494 507 recursive walk in root dir and return a set of all path in that dir
495 508 based on repository walk function
496 509
497 510 :param repo_name: name of repository
498 511 :param commit_id: commit id for which to list nodes
499 512 :param root_path: root path to list
500 513 :param flat: return as a list, if False returns a dict with description
501 514 :param max_file_bytes: will not return file contents over this limit
502 515
503 516 """
504 517 _files = list()
505 518 _dirs = list()
506 519 try:
507 520 _repo = self._get_repo(repo_name)
508 521 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
509 522 root_path = root_path.lstrip('/')
510 523 for __, dirs, files in commit.walk(root_path):
511 524 for f in files:
512 525 _content = None
513 526 _data = f.unicode_path
514 527 over_size_limit = (max_file_bytes is not None
515 528 and f.size > max_file_bytes)
516 529
517 530 if not flat:
518 531 _data = {
519 532 "name": h.escape(f.unicode_path),
520 533 "type": "file",
521 534 }
522 535 if extended_info:
523 536 _data.update({
524 537 "md5": f.md5,
525 538 "binary": f.is_binary,
526 539 "size": f.size,
527 540 "extension": f.extension,
528 541 "mimetype": f.mimetype,
529 542 "lines": f.lines()[0]
530 543 })
531 544
532 545 if content:
533 546 full_content = None
534 547 if not f.is_binary and not over_size_limit:
535 548 full_content = safe_str(f.content)
536 549
537 550 _data.update({
538 551 "content": full_content,
539 552 })
540 553 _files.append(_data)
541 554 for d in dirs:
542 555 _data = d.unicode_path
543 556 if not flat:
544 557 _data = {
545 558 "name": h.escape(d.unicode_path),
546 559 "type": "dir",
547 560 }
548 561 if extended_info:
549 562 _data.update({
550 563 "md5": None,
551 564 "binary": None,
552 565 "size": None,
553 566 "extension": None,
554 567 })
555 568 if content:
556 569 _data.update({
557 570 "content": None
558 571 })
559 572 _dirs.append(_data)
560 573 except RepositoryError:
561 574 log.debug("Exception in get_nodes", exc_info=True)
562 575 raise
563 576
564 577 return _dirs, _files
565 578
566 579 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
567 580 author=None, trigger_push_hook=True):
568 581 """
569 582 Commits given multiple nodes into repo
570 583
571 584 :param user: RhodeCode User object or user_id, the commiter
572 585 :param repo: RhodeCode Repository object
573 586 :param message: commit message
574 587 :param nodes: mapping {filename:{'content':content},...}
575 588 :param parent_commit: parent commit, can be empty than it's
576 589 initial commit
577 590 :param author: author of commit, cna be different that commiter
578 591 only for git
579 592 :param trigger_push_hook: trigger push hooks
580 593
581 594 :returns: new commited commit
582 595 """
583 596
584 597 user = self._get_user(user)
585 598 scm_instance = repo.scm_instance(cache=False)
586 599
587 600 processed_nodes = []
588 601 for f_path in nodes:
589 602 f_path = self._sanitize_path(f_path)
590 603 content = nodes[f_path]['content']
591 604 f_path = safe_str(f_path)
592 605 # decoding here will force that we have proper encoded values
593 606 # in any other case this will throw exceptions and deny commit
594 607 if isinstance(content, (basestring,)):
595 608 content = safe_str(content)
596 609 elif isinstance(content, (file, cStringIO.OutputType,)):
597 610 content = content.read()
598 611 else:
599 612 raise Exception('Content is of unrecognized type %s' % (
600 613 type(content)
601 614 ))
602 615 processed_nodes.append((f_path, content))
603 616
604 617 message = safe_unicode(message)
605 618 commiter = user.full_contact
606 619 author = safe_unicode(author) if author else commiter
607 620
608 621 imc = scm_instance.in_memory_commit
609 622
610 623 if not parent_commit:
611 624 parent_commit = EmptyCommit(alias=scm_instance.alias)
612 625
613 626 if isinstance(parent_commit, EmptyCommit):
614 627 # EmptyCommit means we we're editing empty repository
615 628 parents = None
616 629 else:
617 630 parents = [parent_commit]
618 631 # add multiple nodes
619 632 for path, content in processed_nodes:
620 633 imc.add(FileNode(path, content=content))
621 634 # TODO: handle pre push scenario
622 635 tip = imc.commit(message=message,
623 636 author=author,
624 637 parents=parents,
625 638 branch=parent_commit.branch)
626 639
627 640 self.mark_for_invalidation(repo.repo_name)
628 641 if trigger_push_hook:
629 642 hooks_utils.trigger_post_push_hook(
630 643 username=user.username, action='push_local',
631 644 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
632 645 commit_ids=[tip.raw_id])
633 646 return tip
634 647
635 648 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
636 649 author=None, trigger_push_hook=True):
637 650 user = self._get_user(user)
638 651 scm_instance = repo.scm_instance(cache=False)
639 652
640 653 message = safe_unicode(message)
641 654 commiter = user.full_contact
642 655 author = safe_unicode(author) if author else commiter
643 656
644 657 imc = scm_instance.in_memory_commit
645 658
646 659 if not parent_commit:
647 660 parent_commit = EmptyCommit(alias=scm_instance.alias)
648 661
649 662 if isinstance(parent_commit, EmptyCommit):
650 663 # EmptyCommit means we we're editing empty repository
651 664 parents = None
652 665 else:
653 666 parents = [parent_commit]
654 667
655 668 # add multiple nodes
656 669 for _filename, data in nodes.items():
657 670 # new filename, can be renamed from the old one, also sanitaze
658 671 # the path for any hack around relative paths like ../../ etc.
659 672 filename = self._sanitize_path(data['filename'])
660 673 old_filename = self._sanitize_path(_filename)
661 674 content = data['content']
662 675
663 676 filenode = FileNode(old_filename, content=content)
664 677 op = data['op']
665 678 if op == 'add':
666 679 imc.add(filenode)
667 680 elif op == 'del':
668 681 imc.remove(filenode)
669 682 elif op == 'mod':
670 683 if filename != old_filename:
671 684 # TODO: handle renames more efficient, needs vcs lib
672 685 # changes
673 686 imc.remove(filenode)
674 687 imc.add(FileNode(filename, content=content))
675 688 else:
676 689 imc.change(filenode)
677 690
678 691 try:
679 692 # TODO: handle pre push scenario
680 693 # commit changes
681 694 tip = imc.commit(message=message,
682 695 author=author,
683 696 parents=parents,
684 697 branch=parent_commit.branch)
685 698 except NodeNotChangedError:
686 699 raise
687 700 except Exception as e:
688 701 log.exception("Unexpected exception during call to imc.commit")
689 702 raise IMCCommitError(str(e))
690 703 finally:
691 704 # always clear caches, if commit fails we want fresh object also
692 705 self.mark_for_invalidation(repo.repo_name)
693 706
694 707 if trigger_push_hook:
695 708 hooks_utils.trigger_post_push_hook(
696 709 username=user.username, action='push_local',
697 710 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
698 711 commit_ids=[tip.raw_id])
699 712
700 713 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
701 714 author=None, trigger_push_hook=True):
702 715 """
703 716 Deletes given multiple nodes into `repo`
704 717
705 718 :param user: RhodeCode User object or user_id, the committer
706 719 :param repo: RhodeCode Repository object
707 720 :param message: commit message
708 721 :param nodes: mapping {filename:{'content':content},...}
709 722 :param parent_commit: parent commit, can be empty than it's initial
710 723 commit
711 724 :param author: author of commit, cna be different that commiter only
712 725 for git
713 726 :param trigger_push_hook: trigger push hooks
714 727
715 728 :returns: new commit after deletion
716 729 """
717 730
718 731 user = self._get_user(user)
719 732 scm_instance = repo.scm_instance(cache=False)
720 733
721 734 processed_nodes = []
722 735 for f_path in nodes:
723 736 f_path = self._sanitize_path(f_path)
724 737 # content can be empty but for compatabilty it allows same dicts
725 738 # structure as add_nodes
726 739 content = nodes[f_path].get('content')
727 740 processed_nodes.append((f_path, content))
728 741
729 742 message = safe_unicode(message)
730 743 commiter = user.full_contact
731 744 author = safe_unicode(author) if author else commiter
732 745
733 746 imc = scm_instance.in_memory_commit
734 747
735 748 if not parent_commit:
736 749 parent_commit = EmptyCommit(alias=scm_instance.alias)
737 750
738 751 if isinstance(parent_commit, EmptyCommit):
739 752 # EmptyCommit means we we're editing empty repository
740 753 parents = None
741 754 else:
742 755 parents = [parent_commit]
743 756 # add multiple nodes
744 757 for path, content in processed_nodes:
745 758 imc.remove(FileNode(path, content=content))
746 759
747 760 # TODO: handle pre push scenario
748 761 tip = imc.commit(message=message,
749 762 author=author,
750 763 parents=parents,
751 764 branch=parent_commit.branch)
752 765
753 766 self.mark_for_invalidation(repo.repo_name)
754 767 if trigger_push_hook:
755 768 hooks_utils.trigger_post_push_hook(
756 769 username=user.username, action='push_local',
757 770 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
758 771 commit_ids=[tip.raw_id])
759 772 return tip
760 773
761 774 def strip(self, repo, commit_id, branch):
762 775 scm_instance = repo.scm_instance(cache=False)
763 776 scm_instance.config.clear_section('hooks')
764 777 scm_instance.strip(commit_id, branch)
765 778 self.mark_for_invalidation(repo.repo_name)
766 779
767 780 def get_unread_journal(self):
768 781 return self.sa.query(UserLog).count()
769 782
770 783 def get_repo_landing_revs(self, translator, repo=None):
771 784 """
772 785 Generates select option with tags branches and bookmarks (for hg only)
773 786 grouped by type
774 787
775 788 :param repo:
776 789 """
777 790 _ = translator
778 791 repo = self._get_repo(repo)
779 792
780 793 hist_l = [
781 794 ['rev:tip', _('latest tip')]
782 795 ]
783 796 choices = [
784 797 'rev:tip'
785 798 ]
786 799
787 800 if not repo:
788 801 return choices, hist_l
789 802
790 803 repo = repo.scm_instance()
791 804
792 805 branches_group = (
793 806 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
794 807 for b in repo.branches],
795 808 _("Branches"))
796 809 hist_l.append(branches_group)
797 810 choices.extend([x[0] for x in branches_group[0]])
798 811
799 812 if repo.alias == 'hg':
800 813 bookmarks_group = (
801 814 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
802 815 for b in repo.bookmarks],
803 816 _("Bookmarks"))
804 817 hist_l.append(bookmarks_group)
805 818 choices.extend([x[0] for x in bookmarks_group[0]])
806 819
807 820 tags_group = (
808 821 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
809 822 for t in repo.tags],
810 823 _("Tags"))
811 824 hist_l.append(tags_group)
812 825 choices.extend([x[0] for x in tags_group[0]])
813 826
814 827 return choices, hist_l
815 828
816 829 def get_server_info(self, environ=None):
817 830 server_info = get_system_info(environ)
818 831 return server_info
General Comments 0
You need to be logged in to leave comments. Login now