##// END OF EJS Templates
api: fixed unicode problems on get_node, and ensure no cached items are used if cache=false
marcink -
r3488:4bcd6578 default
parent child Browse files
Show More
@@ -1,933 +1,941 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 klass = get_backend(path[0])
230 230 repos[name] = klass(path[1], config=config)
231 231 except OSError:
232 232 continue
233 233 log.debug('found %s paths with repositories', len(repos))
234 234 return repos
235 235
236 236 def get_repos(self, all_repos=None, sort_key=None):
237 237 """
238 238 Get all repositories from db and for each repo create it's
239 239 backend instance and fill that backed with information from database
240 240
241 241 :param all_repos: list of repository names as strings
242 242 give specific repositories list, good for filtering
243 243
244 244 :param sort_key: initial sorting of repositories
245 245 """
246 246 if all_repos is None:
247 247 all_repos = self.sa.query(Repository)\
248 248 .filter(Repository.group_id == None)\
249 249 .order_by(func.lower(Repository.repo_name)).all()
250 250 repo_iter = SimpleCachedRepoList(
251 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 252 return repo_iter
253 253
254 254 def get_repo_groups(self, all_groups=None):
255 255 if all_groups is None:
256 256 all_groups = RepoGroup.query()\
257 257 .filter(RepoGroup.group_parent_id == None).all()
258 258 return [x for x in RepoGroupList(all_groups)]
259 259
260 260 def mark_for_invalidation(self, repo_name, delete=False):
261 261 """
262 262 Mark caches of this repo invalid in the database. `delete` flag
263 263 removes the cache entries
264 264
265 265 :param repo_name: the repo_name for which caches should be marked
266 266 invalid, or deleted
267 267 :param delete: delete the entry keys instead of setting bool
268 268 flag on them, and also purge caches used by the dogpile
269 269 """
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271
272 272 if repo:
273 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 274 repo_id=repo.repo_id)
275 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 276
277 277 repo_id = repo.repo_id
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 if delete:
282 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 return
295 295 except Exception:
296 296 log.error(traceback.format_exc())
297 297 raise
298 298
299 299 try:
300 300 f = UserFollowing()
301 301 f.user_id = user_id
302 302 f.follows_repo_id = follow_repo_id
303 303 self.sa.add(f)
304 304 except Exception:
305 305 log.error(traceback.format_exc())
306 306 raise
307 307
308 308 def toggle_following_user(self, follow_user_id, user_id):
309 309 f = self.sa.query(UserFollowing)\
310 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 311 .filter(UserFollowing.user_id == user_id).scalar()
312 312
313 313 if f is not None:
314 314 try:
315 315 self.sa.delete(f)
316 316 return
317 317 except Exception:
318 318 log.error(traceback.format_exc())
319 319 raise
320 320
321 321 try:
322 322 f = UserFollowing()
323 323 f.user_id = user_id
324 324 f.follows_user_id = follow_user_id
325 325 self.sa.add(f)
326 326 except Exception:
327 327 log.error(traceback.format_exc())
328 328 raise
329 329
330 330 def is_following_repo(self, repo_name, user_id, cache=False):
331 331 r = self.sa.query(Repository)\
332 332 .filter(Repository.repo_name == repo_name).scalar()
333 333
334 334 f = self.sa.query(UserFollowing)\
335 335 .filter(UserFollowing.follows_repository == r)\
336 336 .filter(UserFollowing.user_id == user_id).scalar()
337 337
338 338 return f is not None
339 339
340 340 def is_following_user(self, username, user_id, cache=False):
341 341 u = User.get_by_username(username)
342 342
343 343 f = self.sa.query(UserFollowing)\
344 344 .filter(UserFollowing.follows_user == u)\
345 345 .filter(UserFollowing.user_id == user_id).scalar()
346 346
347 347 return f is not None
348 348
349 349 def get_followers(self, repo):
350 350 repo = self._get_repo(repo)
351 351
352 352 return self.sa.query(UserFollowing)\
353 353 .filter(UserFollowing.follows_repository == repo).count()
354 354
355 355 def get_forks(self, repo):
356 356 repo = self._get_repo(repo)
357 357 return self.sa.query(Repository)\
358 358 .filter(Repository.fork == repo).count()
359 359
360 360 def get_pull_requests(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(PullRequest)\
363 363 .filter(PullRequest.target_repo == repo)\
364 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 365
366 366 def mark_as_fork(self, repo, fork, user):
367 367 repo = self._get_repo(repo)
368 368 fork = self._get_repo(fork)
369 369 if fork and repo.repo_id == fork.repo_id:
370 370 raise Exception("Cannot set repository as fork of itself")
371 371
372 372 if fork and repo.repo_type != fork.repo_type:
373 373 raise RepositoryError(
374 374 "Cannot set repository as fork of repository with other type")
375 375
376 376 repo.fork = fork
377 377 self.sa.add(repo)
378 378 return repo
379 379
380 380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 381 dbrepo = self._get_repo(repo)
382 382 remote_uri = remote_uri or dbrepo.clone_uri
383 383 if not remote_uri:
384 384 raise Exception("This repository doesn't have a clone uri")
385 385
386 386 repo = dbrepo.scm_instance(cache=False)
387 387 repo.config.clear_section('hooks')
388 388
389 389 try:
390 390 # NOTE(marcink): add extra validation so we skip invalid urls
391 391 # this is due this tasks can be executed via scheduler without
392 392 # proper validation of remote_uri
393 393 if validate_uri:
394 394 config = make_db_config(clear_session=False)
395 395 url_validator(remote_uri, dbrepo.repo_type, config)
396 396 except InvalidCloneUrl:
397 397 raise
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.fetch(remote_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 410 dbrepo = self._get_repo(repo)
411 411 remote_uri = remote_uri or dbrepo.push_uri
412 412 if not remote_uri:
413 413 raise Exception("This repository doesn't have a clone uri")
414 414
415 415 repo = dbrepo.scm_instance(cache=False)
416 416 repo.config.clear_section('hooks')
417 417
418 418 try:
419 419 # NOTE(marcink): add extra validation so we skip invalid urls
420 420 # this is due this tasks can be executed via scheduler without
421 421 # proper validation of remote_uri
422 422 if validate_uri:
423 423 config = make_db_config(clear_session=False)
424 424 url_validator(remote_uri, dbrepo.repo_type, config)
425 425 except InvalidCloneUrl:
426 426 raise
427 427
428 428 try:
429 429 repo.push(remote_uri)
430 430 except Exception:
431 431 log.error(traceback.format_exc())
432 432 raise
433 433
434 434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 435 content, f_path):
436 436 """
437 437 Commits changes
438 438
439 439 :param repo: SCM instance
440 440
441 441 """
442 442 user = self._get_user(user)
443 443
444 444 # decoding here will force that we have proper encoded values
445 445 # in any other case this will throw exceptions and deny commit
446 446 content = safe_str(content)
447 447 path = safe_str(f_path)
448 448 # message and author needs to be unicode
449 449 # proper backend should then translate that into required type
450 450 message = safe_unicode(message)
451 451 author = safe_unicode(author)
452 452 imc = repo.in_memory_commit
453 453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 454 try:
455 455 # TODO: handle pre-push action !
456 456 tip = imc.commit(
457 457 message=message, author=author, parents=[commit],
458 458 branch=commit.branch)
459 459 except Exception as e:
460 460 log.error(traceback.format_exc())
461 461 raise IMCCommitError(str(e))
462 462 finally:
463 463 # always clear caches, if commit fails we want fresh object also
464 464 self.mark_for_invalidation(repo_name)
465 465
466 466 # We trigger the post-push action
467 467 hooks_utils.trigger_post_push_hook(
468 468 username=user.username, action='push_local', hook_type='post_push',
469 469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 470 return tip
471 471
472 472 def _sanitize_path(self, f_path):
473 473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 475 if f_path:
476 476 f_path = os.path.normpath(f_path)
477 477 return f_path
478 478
479 479 def get_dirnode_metadata(self, request, commit, dir_node):
480 480 if not dir_node.is_dir():
481 481 return []
482 482
483 483 data = []
484 484 for node in dir_node:
485 485 if not node.is_file():
486 486 # we skip file-nodes
487 487 continue
488 488
489 489 last_commit = node.last_commit
490 490 last_commit_date = last_commit.date
491 491 data.append({
492 492 'name': node.name,
493 493 'size': h.format_byte_size_binary(node.size),
494 494 'modified_at': h.format_date(last_commit_date),
495 495 'modified_ts': last_commit_date.isoformat(),
496 496 'revision': last_commit.revision,
497 497 'short_id': last_commit.short_id,
498 498 'message': h.escape(last_commit.message),
499 499 'author': h.escape(last_commit.author),
500 500 'user_profile': h.gravatar_with_user(
501 501 request, last_commit.author),
502 502 })
503 503
504 504 return data
505 505
506 506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 507 extended_info=False, content=False, max_file_bytes=None):
508 508 """
509 509 recursive walk in root dir and return a set of all path in that dir
510 510 based on repository walk function
511 511
512 512 :param repo_name: name of repository
513 513 :param commit_id: commit id for which to list nodes
514 514 :param root_path: root path to list
515 515 :param flat: return as a list, if False returns a dict with description
516 516 :param extended_info: show additional info such as md5, binary, size etc
517 517 :param content: add nodes content to the return data
518 518 :param max_file_bytes: will not return file contents over this limit
519 519
520 520 """
521 521 _files = list()
522 522 _dirs = list()
523 523 try:
524 524 _repo = self._get_repo(repo_name)
525 525 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
526 526 root_path = root_path.lstrip('/')
527 527 for __, dirs, files in commit.walk(root_path):
528 528
529 529 for f in files:
530 530 _content = None
531 531 _data = f_name = f.unicode_path
532 532
533 533 if not flat:
534 534 _data = {
535 535 "name": h.escape(f_name),
536 536 "type": "file",
537 537 }
538 538 if extended_info:
539 539 _data.update({
540 540 "md5": f.md5,
541 541 "binary": f.is_binary,
542 542 "size": f.size,
543 543 "extension": f.extension,
544 544 "mimetype": f.mimetype,
545 545 "lines": f.lines()[0]
546 546 })
547 547
548 548 if content:
549 549 over_size_limit = (max_file_bytes is not None
550 550 and f.size > max_file_bytes)
551 551 full_content = None
552 552 if not f.is_binary and not over_size_limit:
553 553 full_content = safe_str(f.content)
554 554
555 555 _data.update({
556 556 "content": full_content,
557 557 })
558 558 _files.append(_data)
559 559
560 560 for d in dirs:
561 561 _data = d_name = d.unicode_path
562 562 if not flat:
563 563 _data = {
564 564 "name": h.escape(d_name),
565 565 "type": "dir",
566 566 }
567 567 if extended_info:
568 568 _data.update({
569 569 "md5": None,
570 570 "binary": None,
571 571 "size": None,
572 572 "extension": None,
573 573 })
574 574 if content:
575 575 _data.update({
576 576 "content": None
577 577 })
578 578 _dirs.append(_data)
579 579 except RepositoryError:
580 580 log.exception("Exception in get_nodes")
581 581 raise
582 582
583 583 return _dirs, _files
584 584
585 585 def get_node(self, repo_name, commit_id, file_path,
586 586 extended_info=False, content=False, max_file_bytes=None, cache=True):
587 587 """
588 588 retrieve single node from commit
589 589 """
590 590 try:
591 591
592 592 _repo = self._get_repo(repo_name)
593 593 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
594 594
595 595 file_node = commit.get_node(file_path)
596 596 if file_node.is_dir():
597 597 raise RepositoryError('The given path is a directory')
598 598
599 599 _content = None
600 600 f_name = file_node.unicode_path
601 601
602 602 file_data = {
603 603 "name": h.escape(f_name),
604 604 "type": "file",
605 605 }
606 606
607 607 if extended_info:
608 608 file_data.update({
609 609 "extension": file_node.extension,
610 610 "mimetype": file_node.mimetype,
611 611 })
612 612
613 613 if cache:
614 614 md5 = file_node.md5
615 615 is_binary = file_node.is_binary
616 616 size = file_node.size
617 617 else:
618 618 is_binary, md5, size, _content = file_node.metadata_uncached()
619 619
620 620 file_data.update({
621 621 "md5": md5,
622 622 "binary": is_binary,
623 623 "size": size,
624 624 })
625 625
626 if content:
627 over_size_limit = (max_file_bytes is not None
628 and file_node.size > max_file_bytes)
626 if content and cache:
627 # get content + cache
628 size = file_node.size
629 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
629 630 full_content = None
630 631 if not file_node.is_binary and not over_size_limit:
631 if cache:
632 full_content = safe_str(file_node.content)
633 else:
634 if _content is None:
635 is_binary, md5, size, _content = \
636 file_node.metadata_uncached()
637 full_content = safe_str(_content)
632 full_content = safe_unicode(file_node.content)
633
634 file_data.update({
635 "content": full_content,
636 })
637 elif content:
638 # get content *without* cache
639 if _content is None:
640 is_binary, md5, size, _content = file_node.metadata_uncached()
641
642 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
643 full_content = None
644 if not is_binary and not over_size_limit:
645 full_content = safe_unicode(_content)
638 646
639 647 file_data.update({
640 648 "content": full_content,
641 649 })
642 650
643 651 except RepositoryError:
644 652 log.exception("Exception in get_node")
645 653 raise
646 654
647 655 return file_data
648 656
649 657 def get_fts_data(self, repo_name, commit_id, root_path='/'):
650 658 """
651 659 Fetch node tree for usage in full text search
652 660 """
653 661
654 662 tree_info = list()
655 663
656 664 try:
657 665 _repo = self._get_repo(repo_name)
658 666 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
659 667 root_path = root_path.lstrip('/')
660 668 for __, dirs, files in commit.walk(root_path):
661 669
662 670 for f in files:
663 671 _content = None
664 672 _data = f_name = f.unicode_path
665 673 is_binary, md5, size, _content = f.metadata_uncached()
666 674 _data = {
667 675 "name": h.escape(f_name),
668 676 "md5": md5,
669 677 "extension": f.extension,
670 678 "binary": is_binary,
671 679 "size": size
672 680 }
673 681
674 682 tree_info.append(_data)
675 683
676 684 except RepositoryError:
677 685 log.exception("Exception in get_nodes")
678 686 raise
679 687
680 688 return tree_info
681 689
682 690 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
683 691 author=None, trigger_push_hook=True):
684 692 """
685 693 Commits given multiple nodes into repo
686 694
687 695 :param user: RhodeCode User object or user_id, the commiter
688 696 :param repo: RhodeCode Repository object
689 697 :param message: commit message
690 698 :param nodes: mapping {filename:{'content':content},...}
691 699 :param parent_commit: parent commit, can be empty than it's
692 700 initial commit
693 701 :param author: author of commit, cna be different that commiter
694 702 only for git
695 703 :param trigger_push_hook: trigger push hooks
696 704
697 705 :returns: new commited commit
698 706 """
699 707
700 708 user = self._get_user(user)
701 709 scm_instance = repo.scm_instance(cache=False)
702 710
703 711 processed_nodes = []
704 712 for f_path in nodes:
705 713 f_path = self._sanitize_path(f_path)
706 714 content = nodes[f_path]['content']
707 715 f_path = safe_str(f_path)
708 716 # decoding here will force that we have proper encoded values
709 717 # in any other case this will throw exceptions and deny commit
710 718 if isinstance(content, (basestring,)):
711 719 content = safe_str(content)
712 720 elif isinstance(content, (file, cStringIO.OutputType,)):
713 721 content = content.read()
714 722 else:
715 723 raise Exception('Content is of unrecognized type %s' % (
716 724 type(content)
717 725 ))
718 726 processed_nodes.append((f_path, content))
719 727
720 728 message = safe_unicode(message)
721 729 commiter = user.full_contact
722 730 author = safe_unicode(author) if author else commiter
723 731
724 732 imc = scm_instance.in_memory_commit
725 733
726 734 if not parent_commit:
727 735 parent_commit = EmptyCommit(alias=scm_instance.alias)
728 736
729 737 if isinstance(parent_commit, EmptyCommit):
730 738 # EmptyCommit means we we're editing empty repository
731 739 parents = None
732 740 else:
733 741 parents = [parent_commit]
734 742 # add multiple nodes
735 743 for path, content in processed_nodes:
736 744 imc.add(FileNode(path, content=content))
737 745 # TODO: handle pre push scenario
738 746 tip = imc.commit(message=message,
739 747 author=author,
740 748 parents=parents,
741 749 branch=parent_commit.branch)
742 750
743 751 self.mark_for_invalidation(repo.repo_name)
744 752 if trigger_push_hook:
745 753 hooks_utils.trigger_post_push_hook(
746 754 username=user.username, action='push_local',
747 755 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
748 756 hook_type='post_push',
749 757 commit_ids=[tip.raw_id])
750 758 return tip
751 759
752 760 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
753 761 author=None, trigger_push_hook=True):
754 762 user = self._get_user(user)
755 763 scm_instance = repo.scm_instance(cache=False)
756 764
757 765 message = safe_unicode(message)
758 766 commiter = user.full_contact
759 767 author = safe_unicode(author) if author else commiter
760 768
761 769 imc = scm_instance.in_memory_commit
762 770
763 771 if not parent_commit:
764 772 parent_commit = EmptyCommit(alias=scm_instance.alias)
765 773
766 774 if isinstance(parent_commit, EmptyCommit):
767 775 # EmptyCommit means we we're editing empty repository
768 776 parents = None
769 777 else:
770 778 parents = [parent_commit]
771 779
772 780 # add multiple nodes
773 781 for _filename, data in nodes.items():
774 782 # new filename, can be renamed from the old one, also sanitaze
775 783 # the path for any hack around relative paths like ../../ etc.
776 784 filename = self._sanitize_path(data['filename'])
777 785 old_filename = self._sanitize_path(_filename)
778 786 content = data['content']
779 787 file_mode = data.get('mode')
780 788 filenode = FileNode(old_filename, content=content, mode=file_mode)
781 789 op = data['op']
782 790 if op == 'add':
783 791 imc.add(filenode)
784 792 elif op == 'del':
785 793 imc.remove(filenode)
786 794 elif op == 'mod':
787 795 if filename != old_filename:
788 796 # TODO: handle renames more efficient, needs vcs lib changes
789 797 imc.remove(filenode)
790 798 imc.add(FileNode(filename, content=content, mode=file_mode))
791 799 else:
792 800 imc.change(filenode)
793 801
794 802 try:
795 803 # TODO: handle pre push scenario commit changes
796 804 tip = imc.commit(message=message,
797 805 author=author,
798 806 parents=parents,
799 807 branch=parent_commit.branch)
800 808 except NodeNotChangedError:
801 809 raise
802 810 except Exception as e:
803 811 log.exception("Unexpected exception during call to imc.commit")
804 812 raise IMCCommitError(str(e))
805 813 finally:
806 814 # always clear caches, if commit fails we want fresh object also
807 815 self.mark_for_invalidation(repo.repo_name)
808 816
809 817 if trigger_push_hook:
810 818 hooks_utils.trigger_post_push_hook(
811 819 username=user.username, action='push_local', hook_type='post_push',
812 820 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
813 821 commit_ids=[tip.raw_id])
814 822
815 823 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
816 824 author=None, trigger_push_hook=True):
817 825 """
818 826 Deletes given multiple nodes into `repo`
819 827
820 828 :param user: RhodeCode User object or user_id, the committer
821 829 :param repo: RhodeCode Repository object
822 830 :param message: commit message
823 831 :param nodes: mapping {filename:{'content':content},...}
824 832 :param parent_commit: parent commit, can be empty than it's initial
825 833 commit
826 834 :param author: author of commit, cna be different that commiter only
827 835 for git
828 836 :param trigger_push_hook: trigger push hooks
829 837
830 838 :returns: new commit after deletion
831 839 """
832 840
833 841 user = self._get_user(user)
834 842 scm_instance = repo.scm_instance(cache=False)
835 843
836 844 processed_nodes = []
837 845 for f_path in nodes:
838 846 f_path = self._sanitize_path(f_path)
839 847 # content can be empty but for compatabilty it allows same dicts
840 848 # structure as add_nodes
841 849 content = nodes[f_path].get('content')
842 850 processed_nodes.append((f_path, content))
843 851
844 852 message = safe_unicode(message)
845 853 commiter = user.full_contact
846 854 author = safe_unicode(author) if author else commiter
847 855
848 856 imc = scm_instance.in_memory_commit
849 857
850 858 if not parent_commit:
851 859 parent_commit = EmptyCommit(alias=scm_instance.alias)
852 860
853 861 if isinstance(parent_commit, EmptyCommit):
854 862 # EmptyCommit means we we're editing empty repository
855 863 parents = None
856 864 else:
857 865 parents = [parent_commit]
858 866 # add multiple nodes
859 867 for path, content in processed_nodes:
860 868 imc.remove(FileNode(path, content=content))
861 869
862 870 # TODO: handle pre push scenario
863 871 tip = imc.commit(message=message,
864 872 author=author,
865 873 parents=parents,
866 874 branch=parent_commit.branch)
867 875
868 876 self.mark_for_invalidation(repo.repo_name)
869 877 if trigger_push_hook:
870 878 hooks_utils.trigger_post_push_hook(
871 879 username=user.username, action='push_local', hook_type='post_push',
872 880 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
873 881 commit_ids=[tip.raw_id])
874 882 return tip
875 883
876 884 def strip(self, repo, commit_id, branch):
877 885 scm_instance = repo.scm_instance(cache=False)
878 886 scm_instance.config.clear_section('hooks')
879 887 scm_instance.strip(commit_id, branch)
880 888 self.mark_for_invalidation(repo.repo_name)
881 889
882 890 def get_unread_journal(self):
883 891 return self.sa.query(UserLog).count()
884 892
885 893 def get_repo_landing_revs(self, translator, repo=None):
886 894 """
887 895 Generates select option with tags branches and bookmarks (for hg only)
888 896 grouped by type
889 897
890 898 :param repo:
891 899 """
892 900 _ = translator
893 901 repo = self._get_repo(repo)
894 902
895 903 hist_l = [
896 904 ['rev:tip', _('latest tip')]
897 905 ]
898 906 choices = [
899 907 'rev:tip'
900 908 ]
901 909
902 910 if not repo:
903 911 return choices, hist_l
904 912
905 913 repo = repo.scm_instance()
906 914
907 915 branches_group = (
908 916 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
909 917 for b in repo.branches],
910 918 _("Branches"))
911 919 hist_l.append(branches_group)
912 920 choices.extend([x[0] for x in branches_group[0]])
913 921
914 922 if repo.alias == 'hg':
915 923 bookmarks_group = (
916 924 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
917 925 for b in repo.bookmarks],
918 926 _("Bookmarks"))
919 927 hist_l.append(bookmarks_group)
920 928 choices.extend([x[0] for x in bookmarks_group[0]])
921 929
922 930 tags_group = (
923 931 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
924 932 for t in repo.tags],
925 933 _("Tags"))
926 934 hist_l.append(tags_group)
927 935 choices.extend([x[0] for x in tags_group[0]])
928 936
929 937 return choices, hist_l
930 938
931 939 def get_server_info(self, environ=None):
932 940 server_info = get_system_info(environ)
933 941 return server_info
General Comments 0
You need to be logged in to leave comments. Login now