##// END OF EJS Templates
search-api: don't escape files exposed by the full-text-search API....
marcink -
r3489:b852ad38 default
parent child Browse files
Show More
@@ -1,941 +1,939 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 klass = get_backend(path[0])
230 230 repos[name] = klass(path[1], config=config)
231 231 except OSError:
232 232 continue
233 233 log.debug('found %s paths with repositories', len(repos))
234 234 return repos
235 235
236 236 def get_repos(self, all_repos=None, sort_key=None):
237 237 """
238 238 Get all repositories from db and for each repo create it's
239 239 backend instance and fill that backed with information from database
240 240
241 241 :param all_repos: list of repository names as strings
242 242 give specific repositories list, good for filtering
243 243
244 244 :param sort_key: initial sorting of repositories
245 245 """
246 246 if all_repos is None:
247 247 all_repos = self.sa.query(Repository)\
248 248 .filter(Repository.group_id == None)\
249 249 .order_by(func.lower(Repository.repo_name)).all()
250 250 repo_iter = SimpleCachedRepoList(
251 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 252 return repo_iter
253 253
254 254 def get_repo_groups(self, all_groups=None):
255 255 if all_groups is None:
256 256 all_groups = RepoGroup.query()\
257 257 .filter(RepoGroup.group_parent_id == None).all()
258 258 return [x for x in RepoGroupList(all_groups)]
259 259
260 260 def mark_for_invalidation(self, repo_name, delete=False):
261 261 """
262 262 Mark caches of this repo invalid in the database. `delete` flag
263 263 removes the cache entries
264 264
265 265 :param repo_name: the repo_name for which caches should be marked
266 266 invalid, or deleted
267 267 :param delete: delete the entry keys instead of setting bool
268 268 flag on them, and also purge caches used by the dogpile
269 269 """
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271
272 272 if repo:
273 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 274 repo_id=repo.repo_id)
275 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 276
277 277 repo_id = repo.repo_id
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 if delete:
282 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 return
295 295 except Exception:
296 296 log.error(traceback.format_exc())
297 297 raise
298 298
299 299 try:
300 300 f = UserFollowing()
301 301 f.user_id = user_id
302 302 f.follows_repo_id = follow_repo_id
303 303 self.sa.add(f)
304 304 except Exception:
305 305 log.error(traceback.format_exc())
306 306 raise
307 307
308 308 def toggle_following_user(self, follow_user_id, user_id):
309 309 f = self.sa.query(UserFollowing)\
310 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 311 .filter(UserFollowing.user_id == user_id).scalar()
312 312
313 313 if f is not None:
314 314 try:
315 315 self.sa.delete(f)
316 316 return
317 317 except Exception:
318 318 log.error(traceback.format_exc())
319 319 raise
320 320
321 321 try:
322 322 f = UserFollowing()
323 323 f.user_id = user_id
324 324 f.follows_user_id = follow_user_id
325 325 self.sa.add(f)
326 326 except Exception:
327 327 log.error(traceback.format_exc())
328 328 raise
329 329
330 330 def is_following_repo(self, repo_name, user_id, cache=False):
331 331 r = self.sa.query(Repository)\
332 332 .filter(Repository.repo_name == repo_name).scalar()
333 333
334 334 f = self.sa.query(UserFollowing)\
335 335 .filter(UserFollowing.follows_repository == r)\
336 336 .filter(UserFollowing.user_id == user_id).scalar()
337 337
338 338 return f is not None
339 339
340 340 def is_following_user(self, username, user_id, cache=False):
341 341 u = User.get_by_username(username)
342 342
343 343 f = self.sa.query(UserFollowing)\
344 344 .filter(UserFollowing.follows_user == u)\
345 345 .filter(UserFollowing.user_id == user_id).scalar()
346 346
347 347 return f is not None
348 348
349 349 def get_followers(self, repo):
350 350 repo = self._get_repo(repo)
351 351
352 352 return self.sa.query(UserFollowing)\
353 353 .filter(UserFollowing.follows_repository == repo).count()
354 354
355 355 def get_forks(self, repo):
356 356 repo = self._get_repo(repo)
357 357 return self.sa.query(Repository)\
358 358 .filter(Repository.fork == repo).count()
359 359
360 360 def get_pull_requests(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(PullRequest)\
363 363 .filter(PullRequest.target_repo == repo)\
364 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 365
366 366 def mark_as_fork(self, repo, fork, user):
367 367 repo = self._get_repo(repo)
368 368 fork = self._get_repo(fork)
369 369 if fork and repo.repo_id == fork.repo_id:
370 370 raise Exception("Cannot set repository as fork of itself")
371 371
372 372 if fork and repo.repo_type != fork.repo_type:
373 373 raise RepositoryError(
374 374 "Cannot set repository as fork of repository with other type")
375 375
376 376 repo.fork = fork
377 377 self.sa.add(repo)
378 378 return repo
379 379
380 380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 381 dbrepo = self._get_repo(repo)
382 382 remote_uri = remote_uri or dbrepo.clone_uri
383 383 if not remote_uri:
384 384 raise Exception("This repository doesn't have a clone uri")
385 385
386 386 repo = dbrepo.scm_instance(cache=False)
387 387 repo.config.clear_section('hooks')
388 388
389 389 try:
390 390 # NOTE(marcink): add extra validation so we skip invalid urls
391 391 # this is due this tasks can be executed via scheduler without
392 392 # proper validation of remote_uri
393 393 if validate_uri:
394 394 config = make_db_config(clear_session=False)
395 395 url_validator(remote_uri, dbrepo.repo_type, config)
396 396 except InvalidCloneUrl:
397 397 raise
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.fetch(remote_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 410 dbrepo = self._get_repo(repo)
411 411 remote_uri = remote_uri or dbrepo.push_uri
412 412 if not remote_uri:
413 413 raise Exception("This repository doesn't have a clone uri")
414 414
415 415 repo = dbrepo.scm_instance(cache=False)
416 416 repo.config.clear_section('hooks')
417 417
418 418 try:
419 419 # NOTE(marcink): add extra validation so we skip invalid urls
420 420 # this is due this tasks can be executed via scheduler without
421 421 # proper validation of remote_uri
422 422 if validate_uri:
423 423 config = make_db_config(clear_session=False)
424 424 url_validator(remote_uri, dbrepo.repo_type, config)
425 425 except InvalidCloneUrl:
426 426 raise
427 427
428 428 try:
429 429 repo.push(remote_uri)
430 430 except Exception:
431 431 log.error(traceback.format_exc())
432 432 raise
433 433
434 434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 435 content, f_path):
436 436 """
437 437 Commits changes
438 438
439 439 :param repo: SCM instance
440 440
441 441 """
442 442 user = self._get_user(user)
443 443
444 444 # decoding here will force that we have proper encoded values
445 445 # in any other case this will throw exceptions and deny commit
446 446 content = safe_str(content)
447 447 path = safe_str(f_path)
448 448 # message and author needs to be unicode
449 449 # proper backend should then translate that into required type
450 450 message = safe_unicode(message)
451 451 author = safe_unicode(author)
452 452 imc = repo.in_memory_commit
453 453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 454 try:
455 455 # TODO: handle pre-push action !
456 456 tip = imc.commit(
457 457 message=message, author=author, parents=[commit],
458 458 branch=commit.branch)
459 459 except Exception as e:
460 460 log.error(traceback.format_exc())
461 461 raise IMCCommitError(str(e))
462 462 finally:
463 463 # always clear caches, if commit fails we want fresh object also
464 464 self.mark_for_invalidation(repo_name)
465 465
466 466 # We trigger the post-push action
467 467 hooks_utils.trigger_post_push_hook(
468 468 username=user.username, action='push_local', hook_type='post_push',
469 469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 470 return tip
471 471
472 472 def _sanitize_path(self, f_path):
473 473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 475 if f_path:
476 476 f_path = os.path.normpath(f_path)
477 477 return f_path
478 478
479 479 def get_dirnode_metadata(self, request, commit, dir_node):
480 480 if not dir_node.is_dir():
481 481 return []
482 482
483 483 data = []
484 484 for node in dir_node:
485 485 if not node.is_file():
486 486 # we skip file-nodes
487 487 continue
488 488
489 489 last_commit = node.last_commit
490 490 last_commit_date = last_commit.date
491 491 data.append({
492 492 'name': node.name,
493 493 'size': h.format_byte_size_binary(node.size),
494 494 'modified_at': h.format_date(last_commit_date),
495 495 'modified_ts': last_commit_date.isoformat(),
496 496 'revision': last_commit.revision,
497 497 'short_id': last_commit.short_id,
498 498 'message': h.escape(last_commit.message),
499 499 'author': h.escape(last_commit.author),
500 500 'user_profile': h.gravatar_with_user(
501 501 request, last_commit.author),
502 502 })
503 503
504 504 return data
505 505
506 506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 507 extended_info=False, content=False, max_file_bytes=None):
508 508 """
509 509 recursive walk in root dir and return a set of all path in that dir
510 510 based on repository walk function
511 511
512 512 :param repo_name: name of repository
513 513 :param commit_id: commit id for which to list nodes
514 514 :param root_path: root path to list
515 515 :param flat: return as a list, if False returns a dict with description
516 516 :param extended_info: show additional info such as md5, binary, size etc
517 517 :param content: add nodes content to the return data
518 518 :param max_file_bytes: will not return file contents over this limit
519 519
520 520 """
521 521 _files = list()
522 522 _dirs = list()
523 523 try:
524 524 _repo = self._get_repo(repo_name)
525 525 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
526 526 root_path = root_path.lstrip('/')
527 527 for __, dirs, files in commit.walk(root_path):
528 528
529 529 for f in files:
530 530 _content = None
531 531 _data = f_name = f.unicode_path
532 532
533 533 if not flat:
534 534 _data = {
535 535 "name": h.escape(f_name),
536 536 "type": "file",
537 537 }
538 538 if extended_info:
539 539 _data.update({
540 540 "md5": f.md5,
541 541 "binary": f.is_binary,
542 542 "size": f.size,
543 543 "extension": f.extension,
544 544 "mimetype": f.mimetype,
545 545 "lines": f.lines()[0]
546 546 })
547 547
548 548 if content:
549 549 over_size_limit = (max_file_bytes is not None
550 550 and f.size > max_file_bytes)
551 551 full_content = None
552 552 if not f.is_binary and not over_size_limit:
553 553 full_content = safe_str(f.content)
554 554
555 555 _data.update({
556 556 "content": full_content,
557 557 })
558 558 _files.append(_data)
559 559
560 560 for d in dirs:
561 561 _data = d_name = d.unicode_path
562 562 if not flat:
563 563 _data = {
564 564 "name": h.escape(d_name),
565 565 "type": "dir",
566 566 }
567 567 if extended_info:
568 568 _data.update({
569 569 "md5": None,
570 570 "binary": None,
571 571 "size": None,
572 572 "extension": None,
573 573 })
574 574 if content:
575 575 _data.update({
576 576 "content": None
577 577 })
578 578 _dirs.append(_data)
579 579 except RepositoryError:
580 580 log.exception("Exception in get_nodes")
581 581 raise
582 582
583 583 return _dirs, _files
584 584
585 585 def get_node(self, repo_name, commit_id, file_path,
586 586 extended_info=False, content=False, max_file_bytes=None, cache=True):
587 587 """
588 588 retrieve single node from commit
589 589 """
590 590 try:
591 591
592 592 _repo = self._get_repo(repo_name)
593 593 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
594 594
595 595 file_node = commit.get_node(file_path)
596 596 if file_node.is_dir():
597 597 raise RepositoryError('The given path is a directory')
598 598
599 599 _content = None
600 600 f_name = file_node.unicode_path
601 601
602 602 file_data = {
603 603 "name": h.escape(f_name),
604 604 "type": "file",
605 605 }
606 606
607 607 if extended_info:
608 608 file_data.update({
609 609 "extension": file_node.extension,
610 610 "mimetype": file_node.mimetype,
611 611 })
612 612
613 613 if cache:
614 614 md5 = file_node.md5
615 615 is_binary = file_node.is_binary
616 616 size = file_node.size
617 617 else:
618 618 is_binary, md5, size, _content = file_node.metadata_uncached()
619 619
620 620 file_data.update({
621 621 "md5": md5,
622 622 "binary": is_binary,
623 623 "size": size,
624 624 })
625 625
626 626 if content and cache:
627 627 # get content + cache
628 628 size = file_node.size
629 629 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
630 630 full_content = None
631 631 if not file_node.is_binary and not over_size_limit:
632 632 full_content = safe_unicode(file_node.content)
633 633
634 634 file_data.update({
635 635 "content": full_content,
636 636 })
637 637 elif content:
638 638 # get content *without* cache
639 639 if _content is None:
640 640 is_binary, md5, size, _content = file_node.metadata_uncached()
641 641
642 642 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
643 643 full_content = None
644 644 if not is_binary and not over_size_limit:
645 645 full_content = safe_unicode(_content)
646 646
647 647 file_data.update({
648 648 "content": full_content,
649 649 })
650 650
651 651 except RepositoryError:
652 652 log.exception("Exception in get_node")
653 653 raise
654 654
655 655 return file_data
656 656
657 657 def get_fts_data(self, repo_name, commit_id, root_path='/'):
658 658 """
659 659 Fetch node tree for usage in full text search
660 660 """
661 661
662 662 tree_info = list()
663 663
664 664 try:
665 665 _repo = self._get_repo(repo_name)
666 666 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
667 667 root_path = root_path.lstrip('/')
668 668 for __, dirs, files in commit.walk(root_path):
669 669
670 670 for f in files:
671 _content = None
672 _data = f_name = f.unicode_path
673 671 is_binary, md5, size, _content = f.metadata_uncached()
674 672 _data = {
675 "name": h.escape(f_name),
673 "name": f.unicode_path,
676 674 "md5": md5,
677 675 "extension": f.extension,
678 676 "binary": is_binary,
679 677 "size": size
680 678 }
681 679
682 680 tree_info.append(_data)
683 681
684 682 except RepositoryError:
685 683 log.exception("Exception in get_nodes")
686 684 raise
687 685
688 686 return tree_info
689 687
690 688 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
691 689 author=None, trigger_push_hook=True):
692 690 """
693 691 Commits given multiple nodes into repo
694 692
695 693 :param user: RhodeCode User object or user_id, the commiter
696 694 :param repo: RhodeCode Repository object
697 695 :param message: commit message
698 696 :param nodes: mapping {filename:{'content':content},...}
699 697 :param parent_commit: parent commit, can be empty than it's
700 698 initial commit
701 699 :param author: author of commit, cna be different that commiter
702 700 only for git
703 701 :param trigger_push_hook: trigger push hooks
704 702
705 703 :returns: new commited commit
706 704 """
707 705
708 706 user = self._get_user(user)
709 707 scm_instance = repo.scm_instance(cache=False)
710 708
711 709 processed_nodes = []
712 710 for f_path in nodes:
713 711 f_path = self._sanitize_path(f_path)
714 712 content = nodes[f_path]['content']
715 713 f_path = safe_str(f_path)
716 714 # decoding here will force that we have proper encoded values
717 715 # in any other case this will throw exceptions and deny commit
718 716 if isinstance(content, (basestring,)):
719 717 content = safe_str(content)
720 718 elif isinstance(content, (file, cStringIO.OutputType,)):
721 719 content = content.read()
722 720 else:
723 721 raise Exception('Content is of unrecognized type %s' % (
724 722 type(content)
725 723 ))
726 724 processed_nodes.append((f_path, content))
727 725
728 726 message = safe_unicode(message)
729 727 commiter = user.full_contact
730 728 author = safe_unicode(author) if author else commiter
731 729
732 730 imc = scm_instance.in_memory_commit
733 731
734 732 if not parent_commit:
735 733 parent_commit = EmptyCommit(alias=scm_instance.alias)
736 734
737 735 if isinstance(parent_commit, EmptyCommit):
738 736 # EmptyCommit means we we're editing empty repository
739 737 parents = None
740 738 else:
741 739 parents = [parent_commit]
742 740 # add multiple nodes
743 741 for path, content in processed_nodes:
744 742 imc.add(FileNode(path, content=content))
745 743 # TODO: handle pre push scenario
746 744 tip = imc.commit(message=message,
747 745 author=author,
748 746 parents=parents,
749 747 branch=parent_commit.branch)
750 748
751 749 self.mark_for_invalidation(repo.repo_name)
752 750 if trigger_push_hook:
753 751 hooks_utils.trigger_post_push_hook(
754 752 username=user.username, action='push_local',
755 753 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
756 754 hook_type='post_push',
757 755 commit_ids=[tip.raw_id])
758 756 return tip
759 757
760 758 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
761 759 author=None, trigger_push_hook=True):
762 760 user = self._get_user(user)
763 761 scm_instance = repo.scm_instance(cache=False)
764 762
765 763 message = safe_unicode(message)
766 764 commiter = user.full_contact
767 765 author = safe_unicode(author) if author else commiter
768 766
769 767 imc = scm_instance.in_memory_commit
770 768
771 769 if not parent_commit:
772 770 parent_commit = EmptyCommit(alias=scm_instance.alias)
773 771
774 772 if isinstance(parent_commit, EmptyCommit):
775 773 # EmptyCommit means we we're editing empty repository
776 774 parents = None
777 775 else:
778 776 parents = [parent_commit]
779 777
780 778 # add multiple nodes
781 779 for _filename, data in nodes.items():
782 780 # new filename, can be renamed from the old one, also sanitaze
783 781 # the path for any hack around relative paths like ../../ etc.
784 782 filename = self._sanitize_path(data['filename'])
785 783 old_filename = self._sanitize_path(_filename)
786 784 content = data['content']
787 785 file_mode = data.get('mode')
788 786 filenode = FileNode(old_filename, content=content, mode=file_mode)
789 787 op = data['op']
790 788 if op == 'add':
791 789 imc.add(filenode)
792 790 elif op == 'del':
793 791 imc.remove(filenode)
794 792 elif op == 'mod':
795 793 if filename != old_filename:
796 794 # TODO: handle renames more efficient, needs vcs lib changes
797 795 imc.remove(filenode)
798 796 imc.add(FileNode(filename, content=content, mode=file_mode))
799 797 else:
800 798 imc.change(filenode)
801 799
802 800 try:
803 801 # TODO: handle pre push scenario commit changes
804 802 tip = imc.commit(message=message,
805 803 author=author,
806 804 parents=parents,
807 805 branch=parent_commit.branch)
808 806 except NodeNotChangedError:
809 807 raise
810 808 except Exception as e:
811 809 log.exception("Unexpected exception during call to imc.commit")
812 810 raise IMCCommitError(str(e))
813 811 finally:
814 812 # always clear caches, if commit fails we want fresh object also
815 813 self.mark_for_invalidation(repo.repo_name)
816 814
817 815 if trigger_push_hook:
818 816 hooks_utils.trigger_post_push_hook(
819 817 username=user.username, action='push_local', hook_type='post_push',
820 818 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
821 819 commit_ids=[tip.raw_id])
822 820
823 821 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
824 822 author=None, trigger_push_hook=True):
825 823 """
826 824 Deletes given multiple nodes into `repo`
827 825
828 826 :param user: RhodeCode User object or user_id, the committer
829 827 :param repo: RhodeCode Repository object
830 828 :param message: commit message
831 829 :param nodes: mapping {filename:{'content':content},...}
832 830 :param parent_commit: parent commit, can be empty than it's initial
833 831 commit
834 832 :param author: author of commit, cna be different that commiter only
835 833 for git
836 834 :param trigger_push_hook: trigger push hooks
837 835
838 836 :returns: new commit after deletion
839 837 """
840 838
841 839 user = self._get_user(user)
842 840 scm_instance = repo.scm_instance(cache=False)
843 841
844 842 processed_nodes = []
845 843 for f_path in nodes:
846 844 f_path = self._sanitize_path(f_path)
847 845 # content can be empty but for compatabilty it allows same dicts
848 846 # structure as add_nodes
849 847 content = nodes[f_path].get('content')
850 848 processed_nodes.append((f_path, content))
851 849
852 850 message = safe_unicode(message)
853 851 commiter = user.full_contact
854 852 author = safe_unicode(author) if author else commiter
855 853
856 854 imc = scm_instance.in_memory_commit
857 855
858 856 if not parent_commit:
859 857 parent_commit = EmptyCommit(alias=scm_instance.alias)
860 858
861 859 if isinstance(parent_commit, EmptyCommit):
862 860 # EmptyCommit means we we're editing empty repository
863 861 parents = None
864 862 else:
865 863 parents = [parent_commit]
866 864 # add multiple nodes
867 865 for path, content in processed_nodes:
868 866 imc.remove(FileNode(path, content=content))
869 867
870 868 # TODO: handle pre push scenario
871 869 tip = imc.commit(message=message,
872 870 author=author,
873 871 parents=parents,
874 872 branch=parent_commit.branch)
875 873
876 874 self.mark_for_invalidation(repo.repo_name)
877 875 if trigger_push_hook:
878 876 hooks_utils.trigger_post_push_hook(
879 877 username=user.username, action='push_local', hook_type='post_push',
880 878 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
881 879 commit_ids=[tip.raw_id])
882 880 return tip
883 881
884 882 def strip(self, repo, commit_id, branch):
885 883 scm_instance = repo.scm_instance(cache=False)
886 884 scm_instance.config.clear_section('hooks')
887 885 scm_instance.strip(commit_id, branch)
888 886 self.mark_for_invalidation(repo.repo_name)
889 887
890 888 def get_unread_journal(self):
891 889 return self.sa.query(UserLog).count()
892 890
893 891 def get_repo_landing_revs(self, translator, repo=None):
894 892 """
895 893 Generates select option with tags branches and bookmarks (for hg only)
896 894 grouped by type
897 895
898 896 :param repo:
899 897 """
900 898 _ = translator
901 899 repo = self._get_repo(repo)
902 900
903 901 hist_l = [
904 902 ['rev:tip', _('latest tip')]
905 903 ]
906 904 choices = [
907 905 'rev:tip'
908 906 ]
909 907
910 908 if not repo:
911 909 return choices, hist_l
912 910
913 911 repo = repo.scm_instance()
914 912
915 913 branches_group = (
916 914 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
917 915 for b in repo.branches],
918 916 _("Branches"))
919 917 hist_l.append(branches_group)
920 918 choices.extend([x[0] for x in branches_group[0]])
921 919
922 920 if repo.alias == 'hg':
923 921 bookmarks_group = (
924 922 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
925 923 for b in repo.bookmarks],
926 924 _("Bookmarks"))
927 925 hist_l.append(bookmarks_group)
928 926 choices.extend([x[0] for x in bookmarks_group[0]])
929 927
930 928 tags_group = (
931 929 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
932 930 for t in repo.tags],
933 931 _("Tags"))
934 932 hist_l.append(tags_group)
935 933 choices.extend([x[0] for x in tags_group[0]])
936 934
937 935 return choices, hist_l
938 936
939 937 def get_server_info(self, environ=None):
940 938 server_info = get_system_info(environ)
941 939 return server_info
General Comments 0
You need to be logged in to leave comments. Login now