##// END OF EJS Templates
Added VCS into rhodecode core for faster and easier deployments of new versions
marcink -
r2007:324ac367 beta
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,41 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs
4 ~~~
5
6 Various version Control System (vcs) management abstraction layer for
7 Python.
8
9 :created_on: Apr 8, 2010
10 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
11 """
12
13 VERSION = (0, 2, 3, 'dev')
14
15 __version__ = '.'.join((str(each) for each in VERSION[:4]))
16
17 __all__ = [
18 'get_version', 'get_repo', 'get_backend',
19 'VCSError', 'RepositoryError', 'ChangesetError']
20
21 import sys
22 from rhodecode.lib.vcs.backends import get_repo, get_backend
23 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError, ChangesetError
24
25
26 def get_version():
27 """
28 Returns shorter version (digit parts only) as string.
29 """
30 return '.'.join((str(each) for each in VERSION[:3]))
31
32 def main(argv=None):
33 if argv is None:
34 argv = sys.argv
35 from rhodecode.lib.vcs.cli import ExecutionManager
36 manager = ExecutionManager(argv)
37 manager.execute()
38 return 0
39
40 if __name__ == '__main__':
41 sys.exit(main(sys.argv))
@@ -0,0 +1,63 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.backends
4 ~~~~~~~~~~~~
5
6 Main package for scm backends
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11 import os
12 from pprint import pformat
13 from rhodecode.lib.vcs.conf import settings
14 from rhodecode.lib.vcs.exceptions import VCSError
15 from rhodecode.lib.vcs.utils.helpers import get_scm
16 from rhodecode.lib.vcs.utils.paths import abspath
17 from rhodecode.lib.vcs.utils.imports import import_class
18
19
20 def get_repo(path=None, alias=None, create=False):
21 """
22 Returns ``Repository`` object of type linked with given ``alias`` at
23 the specified ``path``. If ``alias`` is not given it will try to guess it
24 using get_scm method
25 """
26 if create:
27 if not (path or alias):
28 raise TypeError("If create is specified, we need path and scm type")
29 return get_backend(alias)(path, create=True)
30 if path is None:
31 path = abspath(os.path.curdir)
32 try:
33 scm, path = get_scm(path, search_recursively=True)
34 path = abspath(path)
35 alias = scm
36 except VCSError:
37 raise VCSError("No scm found at %s" % path)
38 if alias is None:
39 alias = get_scm(path)[0]
40
41 backend = get_backend(alias)
42 repo = backend(path, create=create)
43 return repo
44
45
46 def get_backend(alias):
47 """
48 Returns ``Repository`` class identified by the given alias or raises
49 VCSError if alias is not recognized or backend class cannot be imported.
50 """
51 if alias not in settings.BACKENDS:
52 raise VCSError("Given alias '%s' is not recognized! Allowed aliases:\n"
53 "%s" % (alias, pformat(settings.BACKENDS.keys())))
54 backend_path = settings.BACKENDS[alias]
55 klass = import_class(backend_path)
56 return klass
57
58
59 def get_supported_backends():
60 """
61 Returns list of aliases of supported backends.
62 """
63 return settings.BACKENDS.keys()
This diff has been collapsed as it changes many lines, (911 lines changed) Show them Hide them
@@ -0,0 +1,911 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.backends.base
4 ~~~~~~~~~~~~~~~~~
5
6 Base for all available scm backends
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11
12
13 from itertools import chain
14 from rhodecode.lib.vcs.utils import author_name, author_email
15 from rhodecode.lib.vcs.utils.lazy import LazyProperty
16 from rhodecode.lib.vcs.utils.helpers import get_dict_for_attrs
17 from rhodecode.lib.vcs.conf import settings
18
19 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
20 NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, \
21 NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, \
22 RepositoryError
23
24
25 class BaseRepository(object):
26 """
27 Base Repository for final backends
28
29 **Attributes**
30
31 ``DEFAULT_BRANCH_NAME``
32 name of default branch (i.e. "trunk" for svn, "master" for git etc.
33
34 ``scm``
35 alias of scm, i.e. *git* or *hg*
36
37 ``repo``
38 object from external api
39
40 ``revisions``
41 list of all available revisions' ids, in ascending order
42
43 ``changesets``
44 storage dict caching returned changesets
45
46 ``path``
47 absolute path to the repository
48
49 ``branches``
50 branches as list of changesets
51
52 ``tags``
53 tags as list of changesets
54 """
55 scm = None
56 DEFAULT_BRANCH_NAME = None
57 EMPTY_CHANGESET = '0' * 40
58
59 def __init__(self, repo_path, create=False, **kwargs):
60 """
61 Initializes repository. Raises RepositoryError if repository could
62 not be find at the given ``repo_path`` or directory at ``repo_path``
63 exists and ``create`` is set to True.
64
65 :param repo_path: local path of the repository
66 :param create=False: if set to True, would try to craete repository.
67 :param src_url=None: if set, should be proper url from which repository
68 would be cloned; requires ``create`` parameter to be set to True -
69 raises RepositoryError if src_url is set and create evaluates to
70 False
71 """
72 raise NotImplementedError
73
74 def __str__(self):
75 return '<%s at %s>' % (self.__class__.__name__, self.path)
76
77 def __repr__(self):
78 return self.__str__()
79
80 def __len__(self):
81 return self.count()
82
83 @LazyProperty
84 def alias(self):
85 for k, v in settings.BACKENDS.items():
86 if v.split('.')[-1] == str(self.__class__.__name__):
87 return k
88
89 @LazyProperty
90 def name(self):
91 raise NotImplementedError
92
93 @LazyProperty
94 def owner(self):
95 raise NotImplementedError
96
97 @LazyProperty
98 def description(self):
99 raise NotImplementedError
100
101 @LazyProperty
102 def size(self):
103 """
104 Returns combined size in bytes for all repository files
105 """
106
107 size = 0
108 try:
109 tip = self.get_changeset()
110 for topnode, dirs, files in tip.walk('/'):
111 for f in files:
112 size += tip.get_file_size(f.path)
113 for dir in dirs:
114 for f in files:
115 size += tip.get_file_size(f.path)
116
117 except RepositoryError, e:
118 pass
119 return size
120
121 def is_valid(self):
122 """
123 Validates repository.
124 """
125 raise NotImplementedError
126
127 def get_last_change(self):
128 self.get_changesets()
129
130 #==========================================================================
131 # CHANGESETS
132 #==========================================================================
133
134 def get_changeset(self, revision=None):
135 """
136 Returns instance of ``Changeset`` class. If ``revision`` is None, most
137 recent changeset is returned.
138
139 :raises ``EmptyRepositoryError``: if there are no revisions
140 """
141 raise NotImplementedError
142
143 def __iter__(self):
144 """
145 Allows Repository objects to be iterated.
146
147 *Requires* implementation of ``__getitem__`` method.
148 """
149 for revision in self.revisions:
150 yield self.get_changeset(revision)
151
152 def get_changesets(self, start=None, end=None, start_date=None,
153 end_date=None, branch_name=None, reverse=False):
154 """
155 Returns iterator of ``MercurialChangeset`` objects from start to end
156 not inclusive This should behave just like a list, ie. end is not
157 inclusive
158
159 :param start: None or str
160 :param end: None or str
161 :param start_date:
162 :param end_date:
163 :param branch_name:
164 :param reversed:
165 """
166 raise NotImplementedError
167
168 def __getslice__(self, i, j):
169 """
170 Returns a iterator of sliced repository
171 """
172 for rev in self.revisions[i:j]:
173 yield self.get_changeset(rev)
174
175 def __getitem__(self, key):
176 return self.get_changeset(key)
177
178 def count(self):
179 return len(self.revisions)
180
181 def tag(self, name, user, revision=None, message=None, date=None, **opts):
182 """
183 Creates and returns a tag for the given ``revision``.
184
185 :param name: name for new tag
186 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
187 :param revision: changeset id for which new tag would be created
188 :param message: message of the tag's commit
189 :param date: date of tag's commit
190
191 :raises TagAlreadyExistError: if tag with same name already exists
192 """
193 raise NotImplementedError
194
195 def remove_tag(self, name, user, message=None, date=None):
196 """
197 Removes tag with the given ``name``.
198
199 :param name: name of the tag to be removed
200 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
201 :param message: message of the tag's removal commit
202 :param date: date of tag's removal commit
203
204 :raises TagDoesNotExistError: if tag with given name does not exists
205 """
206 raise NotImplementedError
207
208 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
209 context=3):
210 """
211 Returns (git like) *diff*, as plain text. Shows changes introduced by
212 ``rev2`` since ``rev1``.
213
214 :param rev1: Entry point from which diff is shown. Can be
215 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
216 the changes since empty state of the repository until ``rev2``
217 :param rev2: Until which revision changes should be shown.
218 :param ignore_whitespace: If set to ``True``, would not show whitespace
219 changes. Defaults to ``False``.
220 :param context: How many lines before/after changed lines should be
221 shown. Defaults to ``3``.
222 """
223 raise NotImplementedError
224
225 # ========== #
226 # COMMIT API #
227 # ========== #
228
229 @LazyProperty
230 def in_memory_changeset(self):
231 """
232 Returns ``InMemoryChangeset`` object for this repository.
233 """
234 raise NotImplementedError
235
236 def add(self, filenode, **kwargs):
237 """
238 Commit api function that will add given ``FileNode`` into this
239 repository.
240
241 :raises ``NodeAlreadyExistsError``: if there is a file with same path
242 already in repository
243 :raises ``NodeAlreadyAddedError``: if given node is already marked as
244 *added*
245 """
246 raise NotImplementedError
247
248 def remove(self, filenode, **kwargs):
249 """
250 Commit api function that will remove given ``FileNode`` into this
251 repository.
252
253 :raises ``EmptyRepositoryError``: if there are no changesets yet
254 :raises ``NodeDoesNotExistError``: if there is no file with given path
255 """
256 raise NotImplementedError
257
258 def commit(self, message, **kwargs):
259 """
260 Persists current changes made on this repository and returns newly
261 created changeset.
262
263 :raises ``NothingChangedError``: if no changes has been made
264 """
265 raise NotImplementedError
266
267 def get_state(self):
268 """
269 Returns dictionary with ``added``, ``changed`` and ``removed`` lists
270 containing ``FileNode`` objects.
271 """
272 raise NotImplementedError
273
274 def get_config_value(self, section, name, config_file=None):
275 """
276 Returns configuration value for a given [``section``] and ``name``.
277
278 :param section: Section we want to retrieve value from
279 :param name: Name of configuration we want to retrieve
280 :param config_file: A path to file which should be used to retrieve
281 configuration from (might also be a list of file paths)
282 """
283 raise NotImplementedError
284
285 def get_user_name(self, config_file=None):
286 """
287 Returns user's name from global configuration file.
288
289 :param config_file: A path to file which should be used to retrieve
290 configuration from (might also be a list of file paths)
291 """
292 raise NotImplementedError
293
294 def get_user_email(self, config_file=None):
295 """
296 Returns user's email from global configuration file.
297
298 :param config_file: A path to file which should be used to retrieve
299 configuration from (might also be a list of file paths)
300 """
301 raise NotImplementedError
302
303 # =========== #
304 # WORKDIR API #
305 # =========== #
306
307 @LazyProperty
308 def workdir(self):
309 """
310 Returns ``Workdir`` instance for this repository.
311 """
312 raise NotImplementedError
313
314
315 class BaseChangeset(object):
316 """
317 Each backend should implement it's changeset representation.
318
319 **Attributes**
320
321 ``repository``
322 repository object within which changeset exists
323
324 ``id``
325 may be ``raw_id`` or i.e. for mercurial's tip just ``tip``
326
327 ``raw_id``
328 raw changeset representation (i.e. full 40 length sha for git
329 backend)
330
331 ``short_id``
332 shortened (if apply) version of ``raw_id``; it would be simple
333 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
334 as ``raw_id`` for subversion
335
336 ``revision``
337 revision number as integer
338
339 ``files``
340 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
341
342 ``dirs``
343 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
344
345 ``nodes``
346 combined list of ``Node`` objects
347
348 ``author``
349 author of the changeset, as unicode
350
351 ``message``
352 message of the changeset, as unicode
353
354 ``parents``
355 list of parent changesets
356
357 ``last``
358 ``True`` if this is last changeset in repository, ``False``
359 otherwise; trying to access this attribute while there is no
360 changesets would raise ``EmptyRepositoryError``
361 """
362 def __str__(self):
363 return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
364 self.short_id)
365
366 def __repr__(self):
367 return self.__str__()
368
369 def __unicode__(self):
370 return u'%s:%s' % (self.revision, self.short_id)
371
372 def __eq__(self, other):
373 return self.raw_id == other.raw_id
374
375 @LazyProperty
376 def last(self):
377 if self.repository is None:
378 raise ChangesetError("Cannot check if it's most recent revision")
379 return self.raw_id == self.repository.revisions[-1]
380
381 @LazyProperty
382 def parents(self):
383 """
384 Returns list of parents changesets.
385 """
386 raise NotImplementedError
387
388 @LazyProperty
389 def id(self):
390 """
391 Returns string identifying this changeset.
392 """
393 raise NotImplementedError
394
395 @LazyProperty
396 def raw_id(self):
397 """
398 Returns raw string identifying this changeset.
399 """
400 raise NotImplementedError
401
402 @LazyProperty
403 def short_id(self):
404 """
405 Returns shortened version of ``raw_id`` attribute, as string,
406 identifying this changeset, useful for web representation.
407 """
408 raise NotImplementedError
409
410 @LazyProperty
411 def revision(self):
412 """
413 Returns integer identifying this changeset.
414
415 """
416 raise NotImplementedError
417
418 @LazyProperty
419 def author(self):
420 """
421 Returns Author for given commit
422 """
423
424 raise NotImplementedError
425
426 @LazyProperty
427 def author_name(self):
428 """
429 Returns Author name for given commit
430 """
431
432 return author_name(self.author)
433
434 @LazyProperty
435 def author_email(self):
436 """
437 Returns Author email address for given commit
438 """
439
440 return author_email(self.author)
441
442 def get_file_mode(self, path):
443 """
444 Returns stat mode of the file at the given ``path``.
445 """
446 raise NotImplementedError
447
448 def get_file_content(self, path):
449 """
450 Returns content of the file at the given ``path``.
451 """
452 raise NotImplementedError
453
454 def get_file_size(self, path):
455 """
456 Returns size of the file at the given ``path``.
457 """
458 raise NotImplementedError
459
460 def get_file_changeset(self, path):
461 """
462 Returns last commit of the file at the given ``path``.
463 """
464 raise NotImplementedError
465
466 def get_file_history(self, path):
467 """
468 Returns history of file as reversed list of ``Changeset`` objects for
469 which file at given ``path`` has been modified.
470 """
471 raise NotImplementedError
472
473 def get_nodes(self, path):
474 """
475 Returns combined ``DirNode`` and ``FileNode`` objects list representing
476 state of changeset at the given ``path``.
477
478 :raises ``ChangesetError``: if node at the given ``path`` is not
479 instance of ``DirNode``
480 """
481 raise NotImplementedError
482
483 def get_node(self, path):
484 """
485 Returns ``Node`` object from the given ``path``.
486
487 :raises ``NodeDoesNotExistError``: if there is no node at the given
488 ``path``
489 """
490 raise NotImplementedError
491
492 def fill_archive(self, stream=None, kind='tgz', prefix=None):
493 """
494 Fills up given stream.
495
496 :param stream: file like object.
497 :param kind: one of following: ``zip``, ``tar``, ``tgz``
498 or ``tbz2``. Default: ``tgz``.
499 :param prefix: name of root directory in archive.
500 Default is repository name and changeset's raw_id joined with dash.
501
502 repo-tip.<kind>
503 """
504
505 raise NotImplementedError
506
507 def get_chunked_archive(self, **kwargs):
508 """
509 Returns iterable archive. Tiny wrapper around ``fill_archive`` method.
510
511 :param chunk_size: extra parameter which controls size of returned
512 chunks. Default:8k.
513 """
514
515 chunk_size = kwargs.pop('chunk_size', 8192)
516 stream = kwargs.get('stream')
517 self.fill_archive(**kwargs)
518 while True:
519 data = stream.read(chunk_size)
520 if not data:
521 break
522 yield data
523
524 @LazyProperty
525 def root(self):
526 """
527 Returns ``RootNode`` object for this changeset.
528 """
529 return self.get_node('')
530
531 def next(self, branch=None):
532 """
533 Returns next changeset from current, if branch is gives it will return
534 next changeset belonging to this branch
535
536 :param branch: show changesets within the given named branch
537 """
538 raise NotImplementedError
539
540 def prev(self, branch=None):
541 """
542 Returns previous changeset from current, if branch is gives it will
543 return previous changeset belonging to this branch
544
545 :param branch: show changesets within the given named branch
546 """
547 raise NotImplementedError
548
549 @LazyProperty
550 def added(self):
551 """
552 Returns list of added ``FileNode`` objects.
553 """
554 raise NotImplementedError
555
556 @LazyProperty
557 def changed(self):
558 """
559 Returns list of modified ``FileNode`` objects.
560 """
561 raise NotImplementedError
562
563 @LazyProperty
564 def removed(self):
565 """
566 Returns list of removed ``FileNode`` objects.
567 """
568 raise NotImplementedError
569
570 @LazyProperty
571 def size(self):
572 """
573 Returns total number of bytes from contents of all filenodes.
574 """
575 return sum((node.size for node in self.get_filenodes_generator()))
576
577 def walk(self, topurl=''):
578 """
579 Similar to os.walk method. Insted of filesystem it walks through
580 changeset starting at given ``topurl``. Returns generator of tuples
581 (topnode, dirnodes, filenodes).
582 """
583 topnode = self.get_node(topurl)
584 yield (topnode, topnode.dirs, topnode.files)
585 for dirnode in topnode.dirs:
586 for tup in self.walk(dirnode.path):
587 yield tup
588
589 def get_filenodes_generator(self):
590 """
591 Returns generator that yields *all* file nodes.
592 """
593 for topnode, dirs, files in self.walk():
594 for node in files:
595 yield node
596
597 def as_dict(self):
598 """
599 Returns dictionary with changeset's attributes and their values.
600 """
601 data = get_dict_for_attrs(self, ['id', 'raw_id', 'short_id',
602 'revision', 'date', 'message'])
603 data['author'] = {'name': self.author_name, 'email': self.author_email}
604 data['added'] = [node.path for node in self.added]
605 data['changed'] = [node.path for node in self.changed]
606 data['removed'] = [node.path for node in self.removed]
607 return data
608
609
610 class BaseWorkdir(object):
611 """
612 Working directory representation of single repository.
613
614 :attribute: repository: repository object of working directory
615 """
616
617 def __init__(self, repository):
618 self.repository = repository
619
620 def get_branch(self):
621 """
622 Returns name of current branch.
623 """
624 raise NotImplementedError
625
626 def get_changeset(self):
627 """
628 Returns current changeset.
629 """
630 raise NotImplementedError
631
632 def get_added(self):
633 """
634 Returns list of ``FileNode`` objects marked as *new* in working
635 directory.
636 """
637 raise NotImplementedError
638
639 def get_changed(self):
640 """
641 Returns list of ``FileNode`` objects *changed* in working directory.
642 """
643 raise NotImplementedError
644
645 def get_removed(self):
646 """
647 Returns list of ``RemovedFileNode`` objects marked as *removed* in
648 working directory.
649 """
650 raise NotImplementedError
651
652 def get_untracked(self):
653 """
654 Returns list of ``FileNode`` objects which are present within working
655 directory however are not tracked by repository.
656 """
657 raise NotImplementedError
658
659 def get_status(self):
660 """
661 Returns dict with ``added``, ``changed``, ``removed`` and ``untracked``
662 lists.
663 """
664 raise NotImplementedError
665
666 def commit(self, message, **kwargs):
667 """
668 Commits local (from working directory) changes and returns newly
669 created
670 ``Changeset``. Updates repository's ``revisions`` list.
671
672 :raises ``CommitError``: if any error occurs while committing
673 """
674 raise NotImplementedError
675
676 def update(self, revision=None):
677 """
678 Fetches content of the given revision and populates it within working
679 directory.
680 """
681 raise NotImplementedError
682
683 def checkout_branch(self, branch=None):
684 """
685 Checks out ``branch`` or the backend's default branch.
686
687 Raises ``BranchDoesNotExistError`` if the branch does not exist.
688 """
689 raise NotImplementedError
690
691
692 class BaseInMemoryChangeset(object):
693 """
694 Represents differences between repository's state (most recent head) and
695 changes made *in place*.
696
697 **Attributes**
698
699 ``repository``
700 repository object for this in-memory-changeset
701
702 ``added``
703 list of ``FileNode`` objects marked as *added*
704
705 ``changed``
706 list of ``FileNode`` objects marked as *changed*
707
708 ``removed``
709 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
710 *removed*
711
712 ``parents``
713 list of ``Changeset`` representing parents of in-memory changeset.
714 Should always be 2-element sequence.
715
716 """
717
718 def __init__(self, repository):
719 self.repository = repository
720 self.added = []
721 self.changed = []
722 self.removed = []
723 self.parents = []
724
725 def add(self, *filenodes):
726 """
727 Marks given ``FileNode`` objects as *to be committed*.
728
729 :raises ``NodeAlreadyExistsError``: if node with same path exists at
730 latest changeset
731 :raises ``NodeAlreadyAddedError``: if node with same path is already
732 marked as *added*
733 """
734 # Check if not already marked as *added* first
735 for node in filenodes:
736 if node.path in (n.path for n in self.added):
737 raise NodeAlreadyAddedError("Such FileNode %s is already "
738 "marked for addition" % node.path)
739 for node in filenodes:
740 self.added.append(node)
741
742 def change(self, *filenodes):
743 """
744 Marks given ``FileNode`` objects to be *changed* in next commit.
745
746 :raises ``EmptyRepositoryError``: if there are no changesets yet
747 :raises ``NodeAlreadyExistsError``: if node with same path is already
748 marked to be *changed*
749 :raises ``NodeAlreadyRemovedError``: if node with same path is already
750 marked to be *removed*
751 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
752 changeset
753 :raises ``NodeNotChangedError``: if node hasn't really be changed
754 """
755 for node in filenodes:
756 if node.path in (n.path for n in self.removed):
757 raise NodeAlreadyRemovedError("Node at %s is already marked "
758 "as removed" % node.path)
759 try:
760 self.repository.get_changeset()
761 except EmptyRepositoryError:
762 raise EmptyRepositoryError("Nothing to change - try to *add* new "
763 "nodes rather than changing them")
764 for node in filenodes:
765 if node.path in (n.path for n in self.changed):
766 raise NodeAlreadyChangedError("Node at '%s' is already "
767 "marked as changed" % node.path)
768 self.changed.append(node)
769
770 def remove(self, *filenodes):
771 """
772 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
773 *removed* in next commit.
774
775 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
776 be *removed*
777 :raises ``NodeAlreadyChangedError``: if node has been already marked to
778 be *changed*
779 """
780 for node in filenodes:
781 if node.path in (n.path for n in self.removed):
782 raise NodeAlreadyRemovedError("Node is already marked to "
783 "for removal at %s" % node.path)
784 if node.path in (n.path for n in self.changed):
785 raise NodeAlreadyChangedError("Node is already marked to "
786 "be changed at %s" % node.path)
787 # We only mark node as *removed* - real removal is done by
788 # commit method
789 self.removed.append(node)
790
791 def reset(self):
792 """
793 Resets this instance to initial state (cleans ``added``, ``changed``
794 and ``removed`` lists).
795 """
796 self.added = []
797 self.changed = []
798 self.removed = []
799 self.parents = []
800
801 def get_ipaths(self):
802 """
803 Returns generator of paths from nodes marked as added, changed or
804 removed.
805 """
806 for node in chain(self.added, self.changed, self.removed):
807 yield node.path
808
809 def get_paths(self):
810 """
811 Returns list of paths from nodes marked as added, changed or removed.
812 """
813 return list(self.get_ipaths())
814
815 def check_integrity(self, parents=None):
816 """
817 Checks in-memory changeset's integrity. Also, sets parents if not
818 already set.
819
820 :raises CommitError: if any error occurs (i.e.
821 ``NodeDoesNotExistError``).
822 """
823 if not self.parents:
824 parents = parents or []
825 if len(parents) == 0:
826 try:
827 parents = [self.repository.get_changeset(), None]
828 except EmptyRepositoryError:
829 parents = [None, None]
830 elif len(parents) == 1:
831 parents += [None]
832 self.parents = parents
833
834 # Local parents, only if not None
835 parents = [p for p in self.parents if p]
836
837 # Check nodes marked as added
838 for p in parents:
839 for node in self.added:
840 try:
841 p.get_node(node.path)
842 except NodeDoesNotExistError:
843 pass
844 else:
845 raise NodeAlreadyExistsError("Node at %s already exists "
846 "at %s" % (node.path, p))
847
848 # Check nodes marked as changed
849 missing = set(self.changed)
850 not_changed = set(self.changed)
851 if self.changed and not parents:
852 raise NodeDoesNotExistError(str(self.changed[0].path))
853 for p in parents:
854 for node in self.changed:
855 try:
856 old = p.get_node(node.path)
857 missing.remove(node)
858 if old.content != node.content:
859 not_changed.remove(node)
860 except NodeDoesNotExistError:
861 pass
862 if self.changed and missing:
863 raise NodeDoesNotExistError("Node at %s is missing "
864 "(parents: %s)" % (node.path, parents))
865
866 if self.changed and not_changed:
867 raise NodeNotChangedError("Node at %s wasn't actually changed "
868 "since parents' changesets: %s" % (not_changed.pop().path,
869 parents)
870 )
871
872 # Check nodes marked as removed
873 if self.removed and not parents:
874 raise NodeDoesNotExistError("Cannot remove node at %s as there "
875 "were no parents specified" % self.removed[0].path)
876 really_removed = set()
877 for p in parents:
878 for node in self.removed:
879 try:
880 p.get_node(node.path)
881 really_removed.add(node)
882 except ChangesetError:
883 pass
884 not_removed = set(self.removed) - really_removed
885 if not_removed:
886 raise NodeDoesNotExistError("Cannot remove node at %s from "
887 "following parents: %s" % (not_removed[0], parents))
888
889 def commit(self, message, author, parents=None, branch=None, date=None,
890 **kwargs):
891 """
892 Performs in-memory commit (doesn't check workdir in any way) and
893 returns newly created ``Changeset``. Updates repository's
894 ``revisions``.
895
896 .. note::
897 While overriding this method each backend's should call
898 ``self.check_integrity(parents)`` in the first place.
899
900 :param message: message of the commit
901 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
902 :param parents: single parent or sequence of parents from which commit
903 would be derieved
904 :param date: ``datetime.datetime`` instance. Defaults to
905 ``datetime.datetime.now()``.
906 :param branch: branch name, as string. If none given, default backend's
907 branch would be used.
908
909 :raises ``CommitError``: if any error occurs while committing
910 """
911 raise NotImplementedError
@@ -0,0 +1,9 b''
1 from .repository import GitRepository
2 from .changeset import GitChangeset
3 from .inmemory import GitInMemoryChangeset
4 from .workdir import GitWorkdir
5
6
7 __all__ = [
8 'GitRepository', 'GitChangeset', 'GitInMemoryChangeset', 'GitWorkdir',
9 ]
@@ -0,0 +1,450 b''
1 import re
2 from itertools import chain
3 from dulwich import objects
4 from subprocess import Popen, PIPE
5 from rhodecode.lib.vcs.conf import settings
6 from rhodecode.lib.vcs.exceptions import RepositoryError
7 from rhodecode.lib.vcs.exceptions import ChangesetError
8 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
9 from rhodecode.lib.vcs.exceptions import VCSError
10 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
11 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
12 from rhodecode.lib.vcs.backends.base import BaseChangeset
13 from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, RemovedFileNode
14 from rhodecode.lib.vcs.utils import safe_unicode
15 from rhodecode.lib.vcs.utils import date_fromtimestamp
16 from rhodecode.lib.vcs.utils.lazy import LazyProperty
17
18
19 class GitChangeset(BaseChangeset):
20 """
21 Represents state of the repository at single revision.
22 """
23
24 def __init__(self, repository, revision):
25 self._stat_modes = {}
26 self.repository = repository
27 self.raw_id = revision
28 self.revision = repository.revisions.index(revision)
29
30 self.short_id = self.raw_id[:12]
31 self.id = self.raw_id
32 try:
33 commit = self.repository._repo.get_object(self.raw_id)
34 except KeyError:
35 raise RepositoryError("Cannot get object with id %s" % self.raw_id)
36 self._commit = commit
37 self._tree_id = commit.tree
38
39 try:
40 self.message = safe_unicode(commit.message[:-1])
41 # Always strip last eol
42 except UnicodeDecodeError:
43 self.message = commit.message[:-1].decode(commit.encoding
44 or 'utf-8')
45 #self.branch = None
46 self.tags = []
47 #tree = self.repository.get_object(self._tree_id)
48 self.nodes = {}
49 self._paths = {}
50
51 @LazyProperty
52 def author(self):
53 return safe_unicode(self._commit.committer)
54
55 @LazyProperty
56 def date(self):
57 return date_fromtimestamp(self._commit.commit_time,
58 self._commit.commit_timezone)
59
60 @LazyProperty
61 def status(self):
62 """
63 Returns modified, added, removed, deleted files for current changeset
64 """
65 return self.changed, self.added, self.removed
66
67 @LazyProperty
68 def branch(self):
69 # TODO: Cache as we walk (id <-> branch name mapping)
70 refs = self.repository._repo.get_refs()
71 heads = [(key[len('refs/heads/'):], val) for key, val in refs.items()
72 if key.startswith('refs/heads/')]
73
74 for name, id in heads:
75 walker = self.repository._repo.object_store.get_graph_walker([id])
76 while True:
77 id = walker.next()
78 if not id:
79 break
80 if id == self.id:
81 return safe_unicode(name)
82 raise ChangesetError("This should not happen... Have you manually "
83 "change id of the changeset?")
84
85 def _fix_path(self, path):
86 """
87 Paths are stored without trailing slash so we need to get rid off it if
88 needed.
89 """
90 if path.endswith('/'):
91 path = path.rstrip('/')
92 return path
93
94 def _get_id_for_path(self, path):
95 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
96 if not path in self._paths:
97 path = path.strip('/')
98 # set root tree
99 tree = self.repository._repo[self._commit.tree]
100 if path == '':
101 self._paths[''] = tree.id
102 return tree.id
103 splitted = path.split('/')
104 dirs, name = splitted[:-1], splitted[-1]
105 curdir = ''
106 for dir in dirs:
107 if curdir:
108 curdir = '/'.join((curdir, dir))
109 else:
110 curdir = dir
111 #if curdir in self._paths:
112 ## This path have been already traversed
113 ## Update tree and continue
114 #tree = self.repository._repo[self._paths[curdir]]
115 #continue
116 dir_id = None
117 for item, stat, id in tree.iteritems():
118 if curdir:
119 item_path = '/'.join((curdir, item))
120 else:
121 item_path = item
122 self._paths[item_path] = id
123 self._stat_modes[item_path] = stat
124 if dir == item:
125 dir_id = id
126 if dir_id:
127 # Update tree
128 tree = self.repository._repo[dir_id]
129 if not isinstance(tree, objects.Tree):
130 raise ChangesetError('%s is not a directory' % curdir)
131 else:
132 raise ChangesetError('%s have not been found' % curdir)
133 for item, stat, id in tree.iteritems():
134 if curdir:
135 name = '/'.join((curdir, item))
136 else:
137 name = item
138 self._paths[name] = id
139 self._stat_modes[name] = stat
140 if not path in self._paths:
141 raise NodeDoesNotExistError("There is no file nor directory "
142 "at the given path %r at revision %r"
143 % (path, self.short_id))
144 return self._paths[path]
145
146 def _get_kind(self, path):
147 id = self._get_id_for_path(path)
148 obj = self.repository._repo[id]
149 if isinstance(obj, objects.Blob):
150 return NodeKind.FILE
151 elif isinstance(obj, objects.Tree):
152 return NodeKind.DIR
153
154 def _get_file_nodes(self):
155 return chain(*(t[2] for t in self.walk()))
156
157 @LazyProperty
158 def parents(self):
159 """
160 Returns list of parents changesets.
161 """
162 return [self.repository.get_changeset(parent)
163 for parent in self._commit.parents]
164
165 def next(self, branch=None):
166
167 if branch and self.branch != branch:
168 raise VCSError('Branch option used on changeset not belonging '
169 'to that branch')
170
171 def _next(changeset, branch):
172 try:
173 next_ = changeset.revision + 1
174 next_rev = changeset.repository.revisions[next_]
175 except IndexError:
176 raise ChangesetDoesNotExistError
177 cs = changeset.repository.get_changeset(next_rev)
178
179 if branch and branch != cs.branch:
180 return _next(cs, branch)
181
182 return cs
183
184 return _next(self, branch)
185
186 def prev(self, branch=None):
187 if branch and self.branch != branch:
188 raise VCSError('Branch option used on changeset not belonging '
189 'to that branch')
190
191 def _prev(changeset, branch):
192 try:
193 prev_ = changeset.revision - 1
194 if prev_ < 0:
195 raise IndexError
196 prev_rev = changeset.repository.revisions[prev_]
197 except IndexError:
198 raise ChangesetDoesNotExistError
199
200 cs = changeset.repository.get_changeset(prev_rev)
201
202 if branch and branch != cs.branch:
203 return _prev(cs, branch)
204
205 return cs
206
207 return _prev(self, branch)
208
209 def get_file_mode(self, path):
210 """
211 Returns stat mode of the file at the given ``path``.
212 """
213 # ensure path is traversed
214 self._get_id_for_path(path)
215 return self._stat_modes[path]
216
217 def get_file_content(self, path):
218 """
219 Returns content of the file at given ``path``.
220 """
221 id = self._get_id_for_path(path)
222 blob = self.repository._repo[id]
223 return blob.as_pretty_string()
224
225 def get_file_size(self, path):
226 """
227 Returns size of the file at given ``path``.
228 """
229 id = self._get_id_for_path(path)
230 blob = self.repository._repo[id]
231 return blob.raw_length()
232
233 def get_file_changeset(self, path):
234 """
235 Returns last commit of the file at the given ``path``.
236 """
237 node = self.get_node(path)
238 return node.history[0]
239
240 def get_file_history(self, path):
241 """
242 Returns history of file as reversed list of ``Changeset`` objects for
243 which file at given ``path`` has been modified.
244
245 TODO: This function now uses os underlying 'git' and 'grep' commands
246 which is generally not good. Should be replaced with algorithm
247 iterating commits.
248 """
249 cmd = 'log --name-status -p %s -- "%s" | grep "^commit"' \
250 % (self.id, path)
251 so, se = self.repository.run_git_command(cmd)
252 ids = re.findall(r'\w{40}', so)
253 return [self.repository.get_changeset(id) for id in ids]
254
255 def get_file_annotate(self, path):
256 """
257 Returns a list of three element tuples with lineno,changeset and line
258
259 TODO: This function now uses os underlying 'git' command which is
260 generally not good. Should be replaced with algorithm iterating
261 commits.
262 """
263 cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
264 # -l ==> outputs long shas (and we need all 40 characters)
265 # --root ==> doesn't put '^' character for bounderies
266 # -r sha ==> blames for the given revision
267 so, se = self.repository.run_git_command(cmd)
268 annotate = []
269 for i, blame_line in enumerate(so.split('\n')[:-1]):
270 ln_no = i + 1
271 id, line = re.split(r' \(.+?\) ', blame_line, 1)
272 annotate.append((ln_no, self.repository.get_changeset(id), line))
273 return annotate
274
275 def fill_archive(self, stream=None, kind='tgz', prefix=None,
276 subrepos=False):
277 """
278 Fills up given stream.
279
280 :param stream: file like object.
281 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
282 Default: ``tgz``.
283 :param prefix: name of root directory in archive.
284 Default is repository name and changeset's raw_id joined with dash
285 (``repo-tip.<KIND>``).
286 :param subrepos: include subrepos in this archive.
287
288 :raise ImproperArchiveTypeError: If given kind is wrong.
289 :raise VcsError: If given stream is None
290
291 """
292 allowed_kinds = settings.ARCHIVE_SPECS.keys()
293 if kind not in allowed_kinds:
294 raise ImproperArchiveTypeError('Archive kind not supported use one'
295 'of %s', allowed_kinds)
296
297 if prefix is None:
298 prefix = '%s-%s' % (self.repository.name, self.short_id)
299 elif prefix.startswith('/'):
300 raise VCSError("Prefix cannot start with leading slash")
301 elif prefix.strip() == '':
302 raise VCSError("Prefix cannot be empty")
303
304 if kind == 'zip':
305 frmt = 'zip'
306 else:
307 frmt = 'tar'
308 cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix,
309 self.raw_id)
310 if kind == 'tgz':
311 cmd += ' | gzip -9'
312 elif kind == 'tbz2':
313 cmd += ' | bzip2 -9'
314
315 if stream is None:
316 raise VCSError('You need to pass in a valid stream for filling'
317 ' with archival data')
318 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
319 cwd=self.repository.path)
320
321 buffer_size = 1024 * 8
322 chunk = popen.stdout.read(buffer_size)
323 while chunk:
324 stream.write(chunk)
325 chunk = popen.stdout.read(buffer_size)
326 # Make sure all descriptors would be read
327 popen.communicate()
328
329 def get_nodes(self, path):
330 if self._get_kind(path) != NodeKind.DIR:
331 raise ChangesetError("Directory does not exist for revision %r at "
332 " %r" % (self.revision, path))
333 path = self._fix_path(path)
334 id = self._get_id_for_path(path)
335 tree = self.repository._repo[id]
336 dirnodes = []
337 filenodes = []
338 for name, stat, id in tree.iteritems():
339 obj = self.repository._repo.get_object(id)
340 if path != '':
341 obj_path = '/'.join((path, name))
342 else:
343 obj_path = name
344 if obj_path not in self._stat_modes:
345 self._stat_modes[obj_path] = stat
346 if isinstance(obj, objects.Tree):
347 dirnodes.append(DirNode(obj_path, changeset=self))
348 elif isinstance(obj, objects.Blob):
349 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
350 else:
351 raise ChangesetError("Requested object should be Tree "
352 "or Blob, is %r" % type(obj))
353 nodes = dirnodes + filenodes
354 for node in nodes:
355 if not node.path in self.nodes:
356 self.nodes[node.path] = node
357 nodes.sort()
358 return nodes
359
360 def get_node(self, path):
361 if isinstance(path, unicode):
362 path = path.encode('utf-8')
363 path = self._fix_path(path)
364 if not path in self.nodes:
365 try:
366 id = self._get_id_for_path(path)
367 except ChangesetError:
368 raise NodeDoesNotExistError("Cannot find one of parents' "
369 "directories for a given path: %s" % path)
370 obj = self.repository._repo.get_object(id)
371 if isinstance(obj, objects.Tree):
372 if path == '':
373 node = RootNode(changeset=self)
374 else:
375 node = DirNode(path, changeset=self)
376 node._tree = obj
377 elif isinstance(obj, objects.Blob):
378 node = FileNode(path, changeset=self)
379 node._blob = obj
380 else:
381 raise NodeDoesNotExistError("There is no file nor directory "
382 "at the given path %r at revision %r"
383 % (path, self.short_id))
384 # cache node
385 self.nodes[path] = node
386 return self.nodes[path]
387
388 @LazyProperty
389 def affected_files(self):
390 """
391 Get's a fast accessible file changes for given changeset
392 """
393
394 return self.added + self.changed
395
396 @LazyProperty
397 def _diff_name_status(self):
398 output = []
399 for parent in self.parents:
400 cmd = 'diff --name-status %s %s' % (parent.raw_id, self.raw_id)
401 so, se = self.repository.run_git_command(cmd)
402 output.append(so.strip())
403 return '\n'.join(output)
404
405 def _get_paths_for_status(self, status):
406 """
407 Returns sorted list of paths for given ``status``.
408
409 :param status: one of: *added*, *modified* or *deleted*
410 """
411 paths = set()
412 char = status[0].upper()
413 for line in self._diff_name_status.splitlines():
414 if not line:
415 continue
416 if line.startswith(char):
417 splitted = line.split(char,1)
418 if not len(splitted) == 2:
419 raise VCSError("Couldn't parse diff result:\n%s\n\n and "
420 "particularly that line: %s" % (self._diff_name_status,
421 line))
422 paths.add(splitted[1].strip())
423 return sorted(paths)
424
425 @LazyProperty
426 def added(self):
427 """
428 Returns list of added ``FileNode`` objects.
429 """
430 if not self.parents:
431 return list(self._get_file_nodes())
432 return [self.get_node(path) for path in self._get_paths_for_status('added')]
433
434 @LazyProperty
435 def changed(self):
436 """
437 Returns list of modified ``FileNode`` objects.
438 """
439 if not self.parents:
440 return []
441 return [self.get_node(path) for path in self._get_paths_for_status('modified')]
442
443 @LazyProperty
444 def removed(self):
445 """
446 Returns list of removed ``FileNode`` objects.
447 """
448 if not self.parents:
449 return []
450 return [RemovedFileNode(path) for path in self._get_paths_for_status('deleted')]
@@ -0,0 +1,347 b''
1 # config.py - Reading and writing Git config files
2 # Copyright (C) 2011 Jelmer Vernooij <jelmer@samba.org>
3 #
4 # This program is free software; you can redistribute it and/or
5 # modify it under the terms of the GNU General Public License
6 # as published by the Free Software Foundation; version 2
7 # of the License or (at your option) a later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software
16 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
17 # MA 02110-1301, USA.
18
19 """Reading and writing Git configuration files.
20
21 TODO:
22 * preserve formatting when updating configuration files
23 * treat subsection names as case-insensitive for [branch.foo] style
24 subsections
25 """
26
27 # Taken from dulwich not yet released 0.8.3 version (until it is actually
28 # released)
29
30 import errno
31 import os
32 import re
33
34 from dulwich.file import GitFile
35
36
37 class Config(object):
38 """A Git configuration."""
39
40 def get(self, section, name):
41 """Retrieve the contents of a configuration setting.
42
43 :param section: Tuple with section name and optional subsection namee
44 :param subsection: Subsection name
45 :return: Contents of the setting
46 :raise KeyError: if the value is not set
47 """
48 raise NotImplementedError(self.get)
49
50 def get_boolean(self, section, name, default=None):
51 """Retrieve a configuration setting as boolean.
52
53 :param section: Tuple with section name and optional subsection namee
54 :param name: Name of the setting, including section and possible
55 subsection.
56 :return: Contents of the setting
57 :raise KeyError: if the value is not set
58 """
59 try:
60 value = self.get(section, name)
61 except KeyError:
62 return default
63 if value.lower() == "true":
64 return True
65 elif value.lower() == "false":
66 return False
67 raise ValueError("not a valid boolean string: %r" % value)
68
69 def set(self, section, name, value):
70 """Set a configuration value.
71
72 :param name: Name of the configuration value, including section
73 and optional subsection
74 :param: Value of the setting
75 """
76 raise NotImplementedError(self.set)
77
78
79 class ConfigDict(Config):
80 """Git configuration stored in a dictionary."""
81
82 def __init__(self, values=None):
83 """Create a new ConfigDict."""
84 if values is None:
85 values = {}
86 self._values = values
87
88 def __repr__(self):
89 return "%s(%r)" % (self.__class__.__name__, self._values)
90
91 def __eq__(self, other):
92 return (
93 isinstance(other, self.__class__) and
94 other._values == self._values)
95
96 @classmethod
97 def _parse_setting(cls, name):
98 parts = name.split(".")
99 if len(parts) == 3:
100 return (parts[0], parts[1], parts[2])
101 else:
102 return (parts[0], None, parts[1])
103
104 def get(self, section, name):
105 if isinstance(section, basestring):
106 section = (section, )
107 if len(section) > 1:
108 try:
109 return self._values[section][name]
110 except KeyError:
111 pass
112 return self._values[(section[0],)][name]
113
114 def set(self, section, name, value):
115 if isinstance(section, basestring):
116 section = (section, )
117 self._values.setdefault(section, {})[name] = value
118
119
120 def _format_string(value):
121 if (value.startswith(" ") or
122 value.startswith("\t") or
123 value.endswith(" ") or
124 value.endswith("\t")):
125 return '"%s"' % _escape_value(value)
126 return _escape_value(value)
127
128
129 def _parse_string(value):
130 value = value.strip()
131 ret = []
132 block = []
133 in_quotes = False
134 for c in value:
135 if c == "\"":
136 in_quotes = (not in_quotes)
137 ret.append(_unescape_value("".join(block)))
138 block = []
139 elif c in ("#", ";") and not in_quotes:
140 # the rest of the line is a comment
141 break
142 else:
143 block.append(c)
144
145 if in_quotes:
146 raise ValueError("value starts with quote but lacks end quote")
147
148 ret.append(_unescape_value("".join(block)).rstrip())
149
150 return "".join(ret)
151
152
153 def _unescape_value(value):
154 """Unescape a value."""
155 def unescape(c):
156 return {
157 "\\\\": "\\",
158 "\\\"": "\"",
159 "\\n": "\n",
160 "\\t": "\t",
161 "\\b": "\b",
162 }[c.group(0)]
163 return re.sub(r"(\\.)", unescape, value)
164
165
166 def _escape_value(value):
167 """Escape a value."""
168 return value.replace("\\", "\\\\").replace("\n", "\\n")\
169 .replace("\t", "\\t").replace("\"", "\\\"")
170
171
172 def _check_variable_name(name):
173 for c in name:
174 if not c.isalnum() and c != '-':
175 return False
176 return True
177
178
179 def _check_section_name(name):
180 for c in name:
181 if not c.isalnum() and c not in ('-', '.'):
182 return False
183 return True
184
185
186 def _strip_comments(line):
187 line = line.split("#")[0]
188 line = line.split(";")[0]
189 return line
190
191
192 class ConfigFile(ConfigDict):
193 """A Git configuration file, like .git/config or ~/.gitconfig.
194 """
195
196 @classmethod
197 def from_file(cls, f):
198 """Read configuration from a file-like object."""
199 ret = cls()
200 section = None
201 setting = None
202 for lineno, line in enumerate(f.readlines()):
203 line = line.lstrip()
204 if setting is None:
205 if _strip_comments(line).strip() == "":
206 continue
207 if line[0] == "[":
208 line = _strip_comments(line).rstrip()
209 if line[-1] != "]":
210 raise ValueError("expected trailing ]")
211 key = line.strip()
212 pts = key[1:-1].split(" ", 1)
213 pts[0] = pts[0].lower()
214 if len(pts) == 2:
215 if pts[1][0] != "\"" or pts[1][-1] != "\"":
216 raise ValueError(
217 "Invalid subsection " + pts[1])
218 else:
219 pts[1] = pts[1][1:-1]
220 if not _check_section_name(pts[0]):
221 raise ValueError("invalid section name %s" %
222 pts[0])
223 section = (pts[0], pts[1])
224 else:
225 if not _check_section_name(pts[0]):
226 raise ValueError("invalid section name %s" %
227 pts[0])
228 pts = pts[0].split(".", 1)
229 if len(pts) == 2:
230 section = (pts[0], pts[1])
231 else:
232 section = (pts[0], )
233 ret._values[section] = {}
234 else:
235 if section is None:
236 raise ValueError("setting %r without section" % line)
237 try:
238 setting, value = line.split("=", 1)
239 except ValueError:
240 setting = line
241 value = "true"
242 setting = setting.strip().lower()
243 if not _check_variable_name(setting):
244 raise ValueError("invalid variable name %s" % setting)
245 if value.endswith("\\\n"):
246 value = value[:-2]
247 continuation = True
248 else:
249 continuation = False
250 value = _parse_string(value)
251 ret._values[section][setting] = value
252 if not continuation:
253 setting = None
254 else: # continuation line
255 if line.endswith("\\\n"):
256 line = line[:-2]
257 continuation = True
258 else:
259 continuation = False
260 value = _parse_string(line)
261 ret._values[section][setting] += value
262 if not continuation:
263 setting = None
264 return ret
265
266 @classmethod
267 def from_path(cls, path):
268 """Read configuration from a file on disk."""
269 f = GitFile(path, 'rb')
270 try:
271 ret = cls.from_file(f)
272 ret.path = path
273 return ret
274 finally:
275 f.close()
276
277 def write_to_path(self, path=None):
278 """Write configuration to a file on disk."""
279 if path is None:
280 path = self.path
281 f = GitFile(path, 'wb')
282 try:
283 self.write_to_file(f)
284 finally:
285 f.close()
286
287 def write_to_file(self, f):
288 """Write configuration to a file-like object."""
289 for section, values in self._values.iteritems():
290 try:
291 section_name, subsection_name = section
292 except ValueError:
293 (section_name, ) = section
294 subsection_name = None
295 if subsection_name is None:
296 f.write("[%s]\n" % section_name)
297 else:
298 f.write("[%s \"%s\"]\n" % (section_name, subsection_name))
299 for key, value in values.iteritems():
300 f.write("%s = %s\n" % (key, _escape_value(value)))
301
302
303 class StackedConfig(Config):
304 """Configuration which reads from multiple config files.."""
305
306 def __init__(self, backends, writable=None):
307 self.backends = backends
308 self.writable = writable
309
310 def __repr__(self):
311 return "<%s for %r>" % (self.__class__.__name__, self.backends)
312
313 @classmethod
314 def default_backends(cls):
315 """Retrieve the default configuration.
316
317 This will look in the repository configuration (if for_path is
318 specified), the users' home directory and the system
319 configuration.
320 """
321 paths = []
322 paths.append(os.path.expanduser("~/.gitconfig"))
323 paths.append("/etc/gitconfig")
324 backends = []
325 for path in paths:
326 try:
327 cf = ConfigFile.from_path(path)
328 except (IOError, OSError), e:
329 if e.errno != errno.ENOENT:
330 raise
331 else:
332 continue
333 backends.append(cf)
334 return backends
335
336 def get(self, section, name):
337 for backend in self.backends:
338 try:
339 return backend.get(section, name)
340 except KeyError:
341 pass
342 raise KeyError(name)
343
344 def set(self, section, name, value):
345 if self.writable is None:
346 raise NotImplementedError(self.set)
347 return self.writable.set(section, name, value)
@@ -0,0 +1,192 b''
1 import time
2 import datetime
3 import posixpath
4 from dulwich import objects
5 from dulwich.repo import Repo
6 from rhodecode.lib.vcs.backends.base import BaseInMemoryChangeset
7 from rhodecode.lib.vcs.exceptions import RepositoryError
8
9
10 class GitInMemoryChangeset(BaseInMemoryChangeset):
11
12 def commit(self, message, author, parents=None, branch=None, date=None,
13 **kwargs):
14 """
15 Performs in-memory commit (doesn't check workdir in any way) and
16 returns newly created ``Changeset``. Updates repository's
17 ``revisions``.
18
19 :param message: message of the commit
20 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
21 :param parents: single parent or sequence of parents from which commit
22 would be derieved
23 :param date: ``datetime.datetime`` instance. Defaults to
24 ``datetime.datetime.now()``.
25 :param branch: branch name, as string. If none given, default backend's
26 branch would be used.
27
28 :raises ``CommitError``: if any error occurs while committing
29 """
30 self.check_integrity(parents)
31
32 from .repository import GitRepository
33 if branch is None:
34 branch = GitRepository.DEFAULT_BRANCH_NAME
35
36 repo = self.repository._repo
37 object_store = repo.object_store
38
39 ENCODING = "UTF-8"
40 DIRMOD = 040000
41
42 # Create tree and populates it with blobs
43 commit_tree = self.parents[0] and repo[self.parents[0]._commit.tree] or\
44 objects.Tree()
45 for node in self.added + self.changed:
46 # Compute subdirs if needed
47 dirpath, nodename = posixpath.split(node.path)
48 dirnames = dirpath and dirpath.split('/') or []
49 parent = commit_tree
50 ancestors = [('', parent)]
51
52 # Tries to dig for the deepest existing tree
53 while dirnames:
54 curdir = dirnames.pop(0)
55 try:
56 dir_id = parent[curdir][1]
57 except KeyError:
58 # put curdir back into dirnames and stops
59 dirnames.insert(0, curdir)
60 break
61 else:
62 # If found, updates parent
63 parent = self.repository._repo[dir_id]
64 ancestors.append((curdir, parent))
65 # Now parent is deepest exising tree and we need to create subtrees
66 # for dirnames (in reverse order) [this only applies for nodes from added]
67 new_trees = []
68 blob = objects.Blob.from_string(node.content.encode(ENCODING))
69 node_path = node.name.encode(ENCODING)
70 if dirnames:
71 # If there are trees which should be created we need to build
72 # them now (in reverse order)
73 reversed_dirnames = list(reversed(dirnames))
74 curtree = objects.Tree()
75 curtree[node_path] = node.mode, blob.id
76 new_trees.append(curtree)
77 for dirname in reversed_dirnames[:-1]:
78 newtree = objects.Tree()
79 #newtree.add(DIRMOD, dirname, curtree.id)
80 newtree[dirname] = DIRMOD, curtree.id
81 new_trees.append(newtree)
82 curtree = newtree
83 parent[reversed_dirnames[-1]] = DIRMOD, curtree.id
84 else:
85 parent.add(node.mode, node_path, blob.id)
86 new_trees.append(parent)
87 # Update ancestors
88 for parent, tree, path in reversed([(a[1], b[1], b[0]) for a, b in
89 zip(ancestors, ancestors[1:])]):
90 parent[path] = DIRMOD, tree.id
91 object_store.add_object(tree)
92
93 object_store.add_object(blob)
94 for tree in new_trees:
95 object_store.add_object(tree)
96 for node in self.removed:
97 paths = node.path.split('/')
98 tree = commit_tree
99 trees = [tree]
100 # Traverse deep into the forest...
101 for path in paths:
102 try:
103 obj = self.repository._repo[tree[path][1]]
104 if isinstance(obj, objects.Tree):
105 trees.append(obj)
106 tree = obj
107 except KeyError:
108 break
109 # Cut down the blob and all rotten trees on the way back...
110 for path, tree in reversed(zip(paths, trees)):
111 del tree[path]
112 if tree:
113 # This tree still has elements - don't remove it or any
114 # of it's parents
115 break
116
117 object_store.add_object(commit_tree)
118
119 # Create commit
120 commit = objects.Commit()
121 commit.tree = commit_tree.id
122 commit.parents = [p._commit.id for p in self.parents if p]
123 commit.author = commit.committer = author
124 commit.encoding = ENCODING
125 commit.message = message + ' '
126
127 # Compute date
128 if date is None:
129 date = time.time()
130 elif isinstance(date, datetime.datetime):
131 date = time.mktime(date.timetuple())
132
133 author_time = kwargs.pop('author_time', date)
134 commit.commit_time = int(date)
135 commit.author_time = int(author_time)
136 tz = time.timezone
137 author_tz = kwargs.pop('author_timezone', tz)
138 commit.commit_timezone = tz
139 commit.author_timezone = author_tz
140
141 object_store.add_object(commit)
142
143 ref = 'refs/heads/%s' % branch
144 repo.refs[ref] = commit.id
145 repo.refs.set_symbolic_ref('HEAD', ref)
146
147 # Update vcs repository object & recreate dulwich repo
148 self.repository.revisions.append(commit.id)
149 self.repository._repo = Repo(self.repository.path)
150 tip = self.repository.get_changeset()
151 self.reset()
152 return tip
153
154 def _get_missing_trees(self, path, root_tree):
155 """
156 Creates missing ``Tree`` objects for the given path.
157
158 :param path: path given as a string. It may be a path to a file node
159 (i.e. ``foo/bar/baz.txt``) or directory path - in that case it must
160 end with slash (i.e. ``foo/bar/``).
161 :param root_tree: ``dulwich.objects.Tree`` object from which we start
162 traversing (should be commit's root tree)
163 """
164 dirpath = posixpath.split(path)[0]
165 dirs = dirpath.split('/')
166 if not dirs or dirs == ['']:
167 return []
168
169 def get_tree_for_dir(tree, dirname):
170 for name, mode, id in tree.iteritems():
171 if name == dirname:
172 obj = self.repository._repo[id]
173 if isinstance(obj, objects.Tree):
174 return obj
175 else:
176 raise RepositoryError("Cannot create directory %s "
177 "at tree %s as path is occupied and is not a "
178 "Tree" % (dirname, tree))
179 return None
180
181 trees = []
182 parent = root_tree
183 for dirname in dirs:
184 tree = get_tree_for_dir(parent, dirname)
185 if tree is None:
186 tree = objects.Tree()
187 dirmode = 040000
188 parent.add(dirmode, dirname, tree.id)
189 parent = tree
190 # Always append tree
191 trees.append(tree)
192 return trees
This diff has been collapsed as it changes many lines, (508 lines changed) Show them Hide them
@@ -0,0 +1,508 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.backends.git
4 ~~~~~~~~~~~~~~~~
5
6 Git backend implementation.
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11
12 import os
13 import re
14 import time
15 import posixpath
16 from dulwich.repo import Repo, NotGitRepository
17 #from dulwich.config import ConfigFile
18 from string import Template
19 from subprocess import Popen, PIPE
20 from rhodecode.lib.vcs.backends.base import BaseRepository
21 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
22 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
23 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
24 from rhodecode.lib.vcs.exceptions import RepositoryError
25 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
26 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
27 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
28 from rhodecode.lib.vcs.utils.lazy import LazyProperty
29 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
30 from rhodecode.lib.vcs.utils.paths import abspath
31 from rhodecode.lib.vcs.utils.paths import get_user_home
32 from .workdir import GitWorkdir
33 from .changeset import GitChangeset
34 from .inmemory import GitInMemoryChangeset
35 from .config import ConfigFile
36
37
38 class GitRepository(BaseRepository):
39 """
40 Git repository backend.
41 """
42 DEFAULT_BRANCH_NAME = 'master'
43 scm = 'git'
44
45 def __init__(self, repo_path, create=False, src_url=None,
46 update_after_clone=False, bare=False):
47
48 self.path = abspath(repo_path)
49 self._repo = self._get_repo(create, src_url, update_after_clone, bare)
50 try:
51 self.head = self._repo.head()
52 except KeyError:
53 self.head = None
54
55 self._config_files = [
56 bare and abspath(self.path, 'config') or abspath(self.path, '.git',
57 'config'),
58 abspath(get_user_home(), '.gitconfig'),
59 ]
60
61 @LazyProperty
62 def revisions(self):
63 """
64 Returns list of revisions' ids, in ascending order. Being lazy
65 attribute allows external tools to inject shas from cache.
66 """
67 return self._get_all_revisions()
68
69 def run_git_command(self, cmd):
70 """
71 Runs given ``cmd`` as git command and returns tuple
72 (returncode, stdout, stderr).
73
74 .. note::
75 This method exists only until log/blame functionality is implemented
76 at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing
77 os command's output is road to hell...
78
79 :param cmd: git command to be executed
80 """
81 #cmd = '(cd %s && git %s)' % (self.path, cmd)
82 if isinstance(cmd, basestring):
83 cmd = 'git %s' % cmd
84 else:
85 cmd = ['git'] + cmd
86 try:
87 opts = dict(
88 shell=isinstance(cmd, basestring),
89 stdout=PIPE,
90 stderr=PIPE)
91 if os.path.isdir(self.path):
92 opts['cwd'] = self.path
93 p = Popen(cmd, **opts)
94 except OSError, err:
95 raise RepositoryError("Couldn't run git command (%s).\n"
96 "Original error was:%s" % (cmd, err))
97 so, se = p.communicate()
98 if not se.startswith("fatal: bad default revision 'HEAD'") and \
99 p.returncode != 0:
100 raise RepositoryError("Couldn't run git command (%s).\n"
101 "stderr:\n%s" % (cmd, se))
102 return so, se
103
104 def _check_url(self, url):
105 """
106 Functon will check given url and try to verify if it's a valid
107 link. Sometimes it may happened that mercurial will issue basic
108 auth request that can cause whole API to hang when used from python
109 or other external calls.
110
111 On failures it'll raise urllib2.HTTPError
112 """
113
114 #TODO: implement this
115 pass
116
117 def _get_repo(self, create, src_url=None, update_after_clone=False,
118 bare=False):
119 if create and os.path.exists(self.path):
120 raise RepositoryError("Location already exist")
121 if src_url and not create:
122 raise RepositoryError("Create should be set to True if src_url is "
123 "given (clone operation creates repository)")
124 try:
125 if create and src_url:
126 self._check_url(src_url)
127 self.clone(src_url, update_after_clone, bare)
128 return Repo(self.path)
129 elif create:
130 os.mkdir(self.path)
131 if bare:
132 return Repo.init_bare(self.path)
133 else:
134 return Repo.init(self.path)
135 else:
136 return Repo(self.path)
137 except (NotGitRepository, OSError), err:
138 raise RepositoryError(err)
139
140 def _get_all_revisions(self):
141 cmd = 'rev-list --all --date-order'
142 try:
143 so, se = self.run_git_command(cmd)
144 except RepositoryError:
145 # Can be raised for empty repositories
146 return []
147 revisions = so.splitlines()
148 revisions.reverse()
149 return revisions
150
151 def _get_revision(self, revision):
152 """
153 For git backend we always return integer here. This way we ensure
154 that changset's revision attribute would become integer.
155 """
156 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
157 is_bstr = lambda o: isinstance(o, (str, unicode))
158 is_null = lambda o: len(o) == revision.count('0')
159
160 if len(self.revisions) == 0:
161 raise EmptyRepositoryError("There are no changesets yet")
162
163 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
164 revision = self.revisions[-1]
165
166 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
167 or isinstance(revision, int) or is_null(revision)):
168 try:
169 revision = self.revisions[int(revision)]
170 except:
171 raise ChangesetDoesNotExistError("Revision %r does not exist "
172 "for this repository %s" % (revision, self))
173
174 elif is_bstr(revision):
175 if not pattern.match(revision) or revision not in self.revisions:
176 raise ChangesetDoesNotExistError("Revision %r does not exist "
177 "for this repository %s" % (revision, self))
178
179 # Ensure we return full id
180 if not pattern.match(str(revision)):
181 raise ChangesetDoesNotExistError("Given revision %r not recognized"
182 % revision)
183 return revision
184
185 def _get_archives(self, archive_name='tip'):
186
187 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
188 yield {"type": i[0], "extension": i[1], "node": archive_name}
189
190 def _get_url(self, url):
191 """
192 Returns normalized url. If schema is not given, would fall to
193 filesystem (``file:///``) schema.
194 """
195 url = str(url)
196 if url != 'default' and not '://' in url:
197 url = ':///'.join(('file', url))
198 return url
199
200 @LazyProperty
201 def name(self):
202 return os.path.basename(self.path)
203
204 @LazyProperty
205 def last_change(self):
206 """
207 Returns last change made on this repository as datetime object
208 """
209 return date_fromtimestamp(self._get_mtime(), makedate()[1])
210
211 def _get_mtime(self):
212 try:
213 return time.mktime(self.get_changeset().date.timetuple())
214 except RepositoryError:
215 # fallback to filesystem
216 in_path = os.path.join(self.path, '.git', "index")
217 he_path = os.path.join(self.path, '.git', "HEAD")
218 if os.path.exists(in_path):
219 return os.stat(in_path).st_mtime
220 else:
221 return os.stat(he_path).st_mtime
222
223 @LazyProperty
224 def description(self):
225 undefined_description = u'unknown'
226 description_path = os.path.join(self.path, '.git', 'description')
227 if os.path.isfile(description_path):
228 return safe_unicode(open(description_path).read())
229 else:
230 return undefined_description
231
232 @LazyProperty
233 def contact(self):
234 undefined_contact = u'Unknown'
235 return undefined_contact
236
237 @property
238 def branches(self):
239 if not self.revisions:
240 return {}
241 refs = self._repo.refs.as_dict()
242 sortkey = lambda ctx: ctx[0]
243 _branches = [('/'.join(ref.split('/')[2:]), head)
244 for ref, head in refs.items()
245 if ref.startswith('refs/heads/') or
246 ref.startswith('refs/remotes/') and not ref.endswith('/HEAD')]
247 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
248
249 def _get_tags(self):
250 if not self.revisions:
251 return {}
252 sortkey = lambda ctx: ctx[0]
253 _tags = [('/'.join(ref.split('/')[2:]), head) for ref, head in
254 self._repo.get_refs().items() if ref.startswith('refs/tags/')]
255 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
256
257 @LazyProperty
258 def tags(self):
259 return self._get_tags()
260
261 def tag(self, name, user, revision=None, message=None, date=None,
262 **kwargs):
263 """
264 Creates and returns a tag for the given ``revision``.
265
266 :param name: name for new tag
267 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
268 :param revision: changeset id for which new tag would be created
269 :param message: message of the tag's commit
270 :param date: date of tag's commit
271
272 :raises TagAlreadyExistError: if tag with same name already exists
273 """
274 if name in self.tags:
275 raise TagAlreadyExistError("Tag %s already exists" % name)
276 changeset = self.get_changeset(revision)
277 message = message or "Added tag %s for commit %s" % (name,
278 changeset.raw_id)
279 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
280
281 self.tags = self._get_tags()
282 return changeset
283
284 def remove_tag(self, name, user, message=None, date=None):
285 """
286 Removes tag with the given ``name``.
287
288 :param name: name of the tag to be removed
289 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
290 :param message: message of the tag's removal commit
291 :param date: date of tag's removal commit
292
293 :raises TagDoesNotExistError: if tag with given name does not exists
294 """
295 if name not in self.tags:
296 raise TagDoesNotExistError("Tag %s does not exist" % name)
297 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
298 try:
299 os.remove(tagpath)
300 self.tags = self._get_tags()
301 except OSError, e:
302 raise RepositoryError(e.strerror)
303
304 def get_changeset(self, revision=None):
305 """
306 Returns ``GitChangeset`` object representing commit from git repository
307 at the given revision or head (most recent commit) if None given.
308 """
309 if isinstance(revision, GitChangeset):
310 return revision
311 revision = self._get_revision(revision)
312 changeset = GitChangeset(repository=self, revision=revision)
313 return changeset
314
315 def get_changesets(self, start=None, end=None, start_date=None,
316 end_date=None, branch_name=None, reverse=False):
317 """
318 Returns iterator of ``GitChangeset`` objects from start to end (both
319 are inclusive), in ascending date order (unless ``reverse`` is set).
320
321 :param start: changeset ID, as str; first returned changeset
322 :param end: changeset ID, as str; last returned changeset
323 :param start_date: if specified, changesets with commit date less than
324 ``start_date`` would be filtered out from returned set
325 :param end_date: if specified, changesets with commit date greater than
326 ``end_date`` would be filtered out from returned set
327 :param branch_name: if specified, changesets not reachable from given
328 branch would be filtered out from returned set
329 :param reverse: if ``True``, returned generator would be reversed
330 (meaning that returned changesets would have descending date order)
331
332 :raise BranchDoesNotExistError: If given ``branch_name`` does not
333 exist.
334 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
335 ``end`` could not be found.
336
337 """
338 if branch_name and branch_name not in self.branches:
339 raise BranchDoesNotExistError("Branch '%s' not found" \
340 % branch_name)
341 # %H at format means (full) commit hash, initial hashes are retrieved
342 # in ascending date order
343 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
344 cmd_params = {}
345 if start_date:
346 cmd_template += ' --since "$since"'
347 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
348 if end_date:
349 cmd_template += ' --until "$until"'
350 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
351 if branch_name:
352 cmd_template += ' $branch_name'
353 cmd_params['branch_name'] = branch_name
354 else:
355 cmd_template += ' --all'
356
357 cmd = Template(cmd_template).safe_substitute(**cmd_params)
358 revs = self.run_git_command(cmd)[0].splitlines()
359 start_pos = 0
360 end_pos = len(revs)
361 if start:
362 _start = self._get_revision(start)
363 try:
364 start_pos = revs.index(_start)
365 except ValueError:
366 pass
367
368 if end is not None:
369 _end = self._get_revision(end)
370 try:
371 end_pos = revs.index(_end)
372 except ValueError:
373 pass
374
375 if None not in [start, end] and start_pos > end_pos:
376 raise RepositoryError('start cannot be after end')
377
378 if end_pos is not None:
379 end_pos += 1
380
381 revs = revs[start_pos:end_pos]
382 if reverse:
383 revs = reversed(revs)
384 for rev in revs:
385 yield self.get_changeset(rev)
386
387 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
388 context=3):
389 """
390 Returns (git like) *diff*, as plain text. Shows changes introduced by
391 ``rev2`` since ``rev1``.
392
393 :param rev1: Entry point from which diff is shown. Can be
394 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
395 the changes since empty state of the repository until ``rev2``
396 :param rev2: Until which revision changes should be shown.
397 :param ignore_whitespace: If set to ``True``, would not show whitespace
398 changes. Defaults to ``False``.
399 :param context: How many lines before/after changed lines should be
400 shown. Defaults to ``3``.
401 """
402 flags = ['-U%s' % context]
403 if ignore_whitespace:
404 flags.append('-w')
405
406 if rev1 == self.EMPTY_CHANGESET:
407 rev2 = self.get_changeset(rev2).raw_id
408 cmd = ' '.join(['show'] + flags + [rev2])
409 else:
410 rev1 = self.get_changeset(rev1).raw_id
411 rev2 = self.get_changeset(rev2).raw_id
412 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
413
414 if path:
415 cmd += ' -- "%s"' % path
416 stdout, stderr = self.run_git_command(cmd)
417 # If we used 'show' command, strip first few lines (until actual diff
418 # starts)
419 if rev1 == self.EMPTY_CHANGESET:
420 lines = stdout.splitlines()
421 x = 0
422 for line in lines:
423 if line.startswith('diff'):
424 break
425 x += 1
426 # Append new line just like 'diff' command do
427 stdout = '\n'.join(lines[x:]) + '\n'
428 return stdout
429
430 @LazyProperty
431 def in_memory_changeset(self):
432 """
433 Returns ``GitInMemoryChangeset`` object for this repository.
434 """
435 return GitInMemoryChangeset(self)
436
437 def clone(self, url, update_after_clone=True, bare=False):
438 """
439 Tries to clone changes from external location.
440
441 :param update_after_clone: If set to ``False``, git won't checkout
442 working directory
443 :param bare: If set to ``True``, repository would be cloned into
444 *bare* git repository (no working directory at all).
445 """
446 url = self._get_url(url)
447 cmd = ['clone']
448 if bare:
449 cmd.append('--bare')
450 elif not update_after_clone:
451 cmd.append('--no-checkout')
452 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
453 cmd = ' '.join(cmd)
454 # If error occurs run_git_command raises RepositoryError already
455 self.run_git_command(cmd)
456
457 @LazyProperty
458 def workdir(self):
459 """
460 Returns ``Workdir`` instance for this repository.
461 """
462 return GitWorkdir(self)
463
464 def get_config_value(self, section, name, config_file=None):
465 """
466 Returns configuration value for a given [``section``] and ``name``.
467
468 :param section: Section we want to retrieve value from
469 :param name: Name of configuration we want to retrieve
470 :param config_file: A path to file which should be used to retrieve
471 configuration from (might also be a list of file paths)
472 """
473 if config_file is None:
474 config_file = []
475 elif isinstance(config_file, basestring):
476 config_file = [config_file]
477
478 def gen_configs():
479 for path in config_file + self._config_files:
480 try:
481 yield ConfigFile.from_path(path)
482 except (IOError, OSError, ValueError):
483 continue
484
485 for config in gen_configs():
486 try:
487 return config.get(section, name)
488 except KeyError:
489 continue
490 return None
491
492 def get_user_name(self, config_file=None):
493 """
494 Returns user's name from global configuration file.
495
496 :param config_file: A path to file which should be used to retrieve
497 configuration from (might also be a list of file paths)
498 """
499 return self.get_config_value('user', 'name', config_file)
500
501 def get_user_email(self, config_file=None):
502 """
503 Returns user's email from global configuration file.
504
505 :param config_file: A path to file which should be used to retrieve
506 configuration from (might also be a list of file paths)
507 """
508 return self.get_config_value('user', 'email', config_file)
@@ -0,0 +1,31 b''
1 import re
2 from rhodecode.lib.vcs.backends.base import BaseWorkdir
3 from rhodecode.lib.vcs.exceptions import RepositoryError
4 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
5
6
7 class GitWorkdir(BaseWorkdir):
8
9 def get_branch(self):
10 headpath = self.repository._repo.refs.refpath('HEAD')
11 try:
12 content = open(headpath).read()
13 match = re.match(r'^ref: refs/heads/(?P<branch>.+)\n$', content)
14 if match:
15 return match.groupdict()['branch']
16 else:
17 raise RepositoryError("Couldn't compute workdir's branch")
18 except IOError:
19 # Try naive way...
20 raise RepositoryError("Couldn't compute workdir's branch")
21
22 def get_changeset(self):
23 return self.repository.get_changeset(
24 self.repository._repo.refs.as_dict().get('HEAD'))
25
26 def checkout_branch(self, branch=None):
27 if branch is None:
28 branch = self.repository.DEFAULT_BRANCH_NAME
29 if branch not in self.repository.branches:
30 raise BranchDoesNotExistError
31 self.repository.run_git_command(['checkout', branch])
@@ -0,0 +1,21 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.backends.hg
4 ~~~~~~~~~~~~~~~~
5
6 Mercurial backend implementation.
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11
12 from .repository import MercurialRepository
13 from .changeset import MercurialChangeset
14 from .inmemory import MercurialInMemoryChangeset
15 from .workdir import MercurialWorkdir
16
17
18 __all__ = [
19 'MercurialRepository', 'MercurialChangeset',
20 'MercurialInMemoryChangeset', 'MercurialWorkdir',
21 ]
@@ -0,0 +1,338 b''
1 import os
2 import posixpath
3
4 from rhodecode.lib.vcs.backends.base import BaseChangeset
5 from rhodecode.lib.vcs.conf import settings
6 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError, \
7 ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
8 from rhodecode.lib.vcs.nodes import AddedFileNodesGenerator, ChangedFileNodesGenerator, \
9 DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode
10
11 from rhodecode.lib.vcs.utils import safe_str, safe_unicode, date_fromtimestamp
12 from rhodecode.lib.vcs.utils.lazy import LazyProperty
13 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
14
15 from ...utils.hgcompat import archival, hex
16
17
18 class MercurialChangeset(BaseChangeset):
19 """
20 Represents state of the repository at the single revision.
21 """
22
23 def __init__(self, repository, revision):
24 self.repository = repository
25 self.raw_id = revision
26 self._ctx = repository._repo[revision]
27 self.revision = self._ctx._rev
28 self.nodes = {}
29
30 @LazyProperty
31 def tags(self):
32 return map(safe_unicode, self._ctx.tags())
33
34 @LazyProperty
35 def branch(self):
36 return safe_unicode(self._ctx.branch())
37
38 @LazyProperty
39 def message(self):
40 return safe_unicode(self._ctx.description())
41
42 @LazyProperty
43 def author(self):
44 return safe_unicode(self._ctx.user())
45
46 @LazyProperty
47 def date(self):
48 return date_fromtimestamp(*self._ctx.date())
49
50 @LazyProperty
51 def status(self):
52 """
53 Returns modified, added, removed, deleted files for current changeset
54 """
55 return self.repository._repo.status(self._ctx.p1().node(),
56 self._ctx.node())
57
58 @LazyProperty
59 def _file_paths(self):
60 return list(self._ctx)
61
62 @LazyProperty
63 def _dir_paths(self):
64 p = list(set(get_dirs_for_path(*self._file_paths)))
65 p.insert(0, '')
66 return p
67
68 @LazyProperty
69 def _paths(self):
70 return self._dir_paths + self._file_paths
71
72 @LazyProperty
73 def id(self):
74 if self.last:
75 return u'tip'
76 return self.short_id
77
78 @LazyProperty
79 def short_id(self):
80 return self.raw_id[:12]
81
82 @LazyProperty
83 def parents(self):
84 """
85 Returns list of parents changesets.
86 """
87 return [self.repository.get_changeset(parent.rev())
88 for parent in self._ctx.parents() if parent.rev() >= 0]
89
90 def next(self, branch=None):
91
92 if branch and self.branch != branch:
93 raise VCSError('Branch option used on changeset not belonging '
94 'to that branch')
95
96 def _next(changeset, branch):
97 try:
98 next_ = changeset.revision + 1
99 next_rev = changeset.repository.revisions[next_]
100 except IndexError:
101 raise ChangesetDoesNotExistError
102 cs = changeset.repository.get_changeset(next_rev)
103
104 if branch and branch != cs.branch:
105 return _next(cs, branch)
106
107 return cs
108
109 return _next(self, branch)
110
111 def prev(self, branch=None):
112 if branch and self.branch != branch:
113 raise VCSError('Branch option used on changeset not belonging '
114 'to that branch')
115
116 def _prev(changeset, branch):
117 try:
118 prev_ = changeset.revision - 1
119 if prev_ < 0:
120 raise IndexError
121 prev_rev = changeset.repository.revisions[prev_]
122 except IndexError:
123 raise ChangesetDoesNotExistError
124
125 cs = changeset.repository.get_changeset(prev_rev)
126
127 if branch and branch != cs.branch:
128 return _prev(cs, branch)
129
130 return cs
131
132 return _prev(self, branch)
133
134 def _fix_path(self, path):
135 """
136 Paths are stored without trailing slash so we need to get rid off it if
137 needed. Also mercurial keeps filenodes as str so we need to decode
138 from unicode to str
139 """
140 if path.endswith('/'):
141 path = path.rstrip('/')
142
143 return safe_str(path)
144
145 def _get_kind(self, path):
146 path = self._fix_path(path)
147 if path in self._file_paths:
148 return NodeKind.FILE
149 elif path in self._dir_paths:
150 return NodeKind.DIR
151 else:
152 raise ChangesetError("Node does not exist at the given path %r"
153 % (path))
154
155 def _get_filectx(self, path):
156 path = self._fix_path(path)
157 if self._get_kind(path) != NodeKind.FILE:
158 raise ChangesetError("File does not exist for revision %r at "
159 " %r" % (self.revision, path))
160 return self._ctx.filectx(path)
161
162 def get_file_mode(self, path):
163 """
164 Returns stat mode of the file at the given ``path``.
165 """
166 fctx = self._get_filectx(path)
167 if 'x' in fctx.flags():
168 return 0100755
169 else:
170 return 0100644
171
172 def get_file_content(self, path):
173 """
174 Returns content of the file at given ``path``.
175 """
176 fctx = self._get_filectx(path)
177 return fctx.data()
178
179 def get_file_size(self, path):
180 """
181 Returns size of the file at given ``path``.
182 """
183 fctx = self._get_filectx(path)
184 return fctx.size()
185
186 def get_file_changeset(self, path):
187 """
188 Returns last commit of the file at the given ``path``.
189 """
190 fctx = self._get_filectx(path)
191 changeset = self.repository.get_changeset(fctx.linkrev())
192 return changeset
193
194 def get_file_history(self, path):
195 """
196 Returns history of file as reversed list of ``Changeset`` objects for
197 which file at given ``path`` has been modified.
198 """
199 fctx = self._get_filectx(path)
200 nodes = [fctx.filectx(x).node() for x in fctx.filelog()]
201 changesets = [self.repository.get_changeset(hex(node))
202 for node in reversed(nodes)]
203 return changesets
204
205 def get_file_annotate(self, path):
206 """
207 Returns a list of three element tuples with lineno,changeset and line
208 """
209 fctx = self._get_filectx(path)
210 annotate = []
211 for i, annotate_data in enumerate(fctx.annotate()):
212 ln_no = i + 1
213 annotate.append((ln_no, self.repository\
214 .get_changeset(hex(annotate_data[0].node())),
215 annotate_data[1],))
216
217 return annotate
218
219 def fill_archive(self, stream=None, kind='tgz', prefix=None,
220 subrepos=False):
221 """
222 Fills up given stream.
223
224 :param stream: file like object.
225 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
226 Default: ``tgz``.
227 :param prefix: name of root directory in archive.
228 Default is repository name and changeset's raw_id joined with dash
229 (``repo-tip.<KIND>``).
230 :param subrepos: include subrepos in this archive.
231
232 :raise ImproperArchiveTypeError: If given kind is wrong.
233 :raise VcsError: If given stream is None
234 """
235
236 allowed_kinds = settings.ARCHIVE_SPECS.keys()
237 if kind not in allowed_kinds:
238 raise ImproperArchiveTypeError('Archive kind not supported use one'
239 'of %s', allowed_kinds)
240
241 if stream is None:
242 raise VCSError('You need to pass in a valid stream for filling'
243 ' with archival data')
244
245 if prefix is None:
246 prefix = '%s-%s' % (self.repository.name, self.short_id)
247 elif prefix.startswith('/'):
248 raise VCSError("Prefix cannot start with leading slash")
249 elif prefix.strip() == '':
250 raise VCSError("Prefix cannot be empty")
251
252 archival.archive(self.repository._repo, stream, self.raw_id,
253 kind, prefix=prefix, subrepos=subrepos)
254
255 #stream.close()
256
257 if stream.closed and hasattr(stream, 'name'):
258 stream = open(stream.name, 'rb')
259 elif hasattr(stream, 'mode') and 'r' not in stream.mode:
260 stream = open(stream.name, 'rb')
261 else:
262 stream.seek(0)
263
264 def get_nodes(self, path):
265 """
266 Returns combined ``DirNode`` and ``FileNode`` objects list representing
267 state of changeset at the given ``path``. If node at the given ``path``
268 is not instance of ``DirNode``, ChangesetError would be raised.
269 """
270
271 if self._get_kind(path) != NodeKind.DIR:
272 raise ChangesetError("Directory does not exist for revision %r at "
273 " %r" % (self.revision, path))
274 path = self._fix_path(path)
275 filenodes = [FileNode(f, changeset=self) for f in self._file_paths
276 if os.path.dirname(f) == path]
277 dirs = path == '' and '' or [d for d in self._dir_paths
278 if d and posixpath.dirname(d) == path]
279 dirnodes = [DirNode(d, changeset=self) for d in dirs
280 if os.path.dirname(d) == path]
281 nodes = dirnodes + filenodes
282 # cache nodes
283 for node in nodes:
284 self.nodes[node.path] = node
285 nodes.sort()
286 return nodes
287
288 def get_node(self, path):
289 """
290 Returns ``Node`` object from the given ``path``. If there is no node at
291 the given ``path``, ``ChangesetError`` would be raised.
292 """
293
294 path = self._fix_path(path)
295
296 if not path in self.nodes:
297 if path in self._file_paths:
298 node = FileNode(path, changeset=self)
299 elif path in self._dir_paths or path in self._dir_paths:
300 if path == '':
301 node = RootNode(changeset=self)
302 else:
303 node = DirNode(path, changeset=self)
304 else:
305 raise NodeDoesNotExistError("There is no file nor directory "
306 "at the given path: %r at revision %r"
307 % (path, self.short_id))
308 # cache node
309 self.nodes[path] = node
310 return self.nodes[path]
311
312 @LazyProperty
313 def affected_files(self):
314 """
315 Get's a fast accessible file changes for given changeset
316 """
317 return self._ctx.files()
318
319 @property
320 def added(self):
321 """
322 Returns list of added ``FileNode`` objects.
323 """
324 return AddedFileNodesGenerator([n for n in self.status[1]], self)
325
326 @property
327 def changed(self):
328 """
329 Returns list of modified ``FileNode`` objects.
330 """
331 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
332
333 @property
334 def removed(self):
335 """
336 Returns list of removed ``FileNode`` objects.
337 """
338 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -0,0 +1,110 b''
1 import datetime
2 import errno
3
4 from rhodecode.lib.vcs.backends.base import BaseInMemoryChangeset
5 from rhodecode.lib.vcs.exceptions import RepositoryError
6
7 from ...utils.hgcompat import memfilectx, memctx, hex
8
9
10 class MercurialInMemoryChangeset(BaseInMemoryChangeset):
11
12 def commit(self, message, author, parents=None, branch=None, date=None,
13 **kwargs):
14 """
15 Performs in-memory commit (doesn't check workdir in any way) and
16 returns newly created ``Changeset``. Updates repository's
17 ``revisions``.
18
19 :param message: message of the commit
20 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
21 :param parents: single parent or sequence of parents from which commit
22 would be derieved
23 :param date: ``datetime.datetime`` instance. Defaults to
24 ``datetime.datetime.now()``.
25 :param branch: branch name, as string. If none given, default backend's
26 branch would be used.
27
28 :raises ``CommitError``: if any error occurs while committing
29 """
30 self.check_integrity(parents)
31
32 from .repository import MercurialRepository
33 if not isinstance(message, str) or not isinstance(author, str):
34 raise RepositoryError('Given message and author needs to be '
35 'an <str> instance')
36
37 if branch is None:
38 branch = MercurialRepository.DEFAULT_BRANCH_NAME
39 kwargs['branch'] = branch
40
41 def filectxfn(_repo, memctx, path):
42 """
43 Marks given path as added/changed/removed in a given _repo. This is
44 for internal mercurial commit function.
45 """
46
47 # check if this path is removed
48 if path in (node.path for node in self.removed):
49 # Raising exception is a way to mark node for removal
50 raise IOError(errno.ENOENT, '%s is deleted' % path)
51
52 # check if this path is added
53 for node in self.added:
54 if node.path == path:
55 return memfilectx(path=node.path,
56 data=(node.content.encode('utf8')
57 if not node.is_binary else node.content),
58 islink=False,
59 isexec=node.is_executable,
60 copied=False)
61
62 # or changed
63 for node in self.changed:
64 if node.path == path:
65 return memfilectx(path=node.path,
66 data=(node.content.encode('utf8')
67 if not node.is_binary else node.content),
68 islink=False,
69 isexec=node.is_executable,
70 copied=False)
71
72 raise RepositoryError("Given path haven't been marked as added,"
73 "changed or removed (%s)" % path)
74
75 parents = [None, None]
76 for i, parent in enumerate(self.parents):
77 if parent is not None:
78 parents[i] = parent._ctx.node()
79
80 if date and isinstance(date, datetime.datetime):
81 date = date.ctime()
82
83 commit_ctx = memctx(repo=self.repository._repo,
84 parents=parents,
85 text='',
86 files=self.get_paths(),
87 filectxfn=filectxfn,
88 user=author,
89 date=date,
90 extra=kwargs)
91
92 # injecting given _repo params
93 commit_ctx._text = message
94 commit_ctx._user = author
95 commit_ctx._date = date
96
97 # TODO: Catch exceptions!
98 n = self.repository._repo.commitctx(commit_ctx)
99 # Returns mercurial node
100 self._commit_ctx = commit_ctx # For reference
101 # Update vcs repository object & recreate mercurial _repo
102 # new_ctx = self.repository._repo[node]
103 # new_tip = self.repository.get_changeset(new_ctx.hex())
104 new_id = hex(n)
105 self.repository.revisions.append(new_id)
106 self._repo = self.repository._get_repo(create=False)
107 self.repository.branches = self.repository._get_branches()
108 tip = self.repository.get_changeset()
109 self.reset()
110 return tip
This diff has been collapsed as it changes many lines, (521 lines changed) Show them Hide them
@@ -0,0 +1,521 b''
1 import os
2 import time
3 import datetime
4 import urllib
5 import urllib2
6
7 from rhodecode.lib.vcs.backends.base import BaseRepository
8 from .workdir import MercurialWorkdir
9 from .changeset import MercurialChangeset
10 from .inmemory import MercurialInMemoryChangeset
11
12 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError, \
13 ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, \
14 VCSError, TagAlreadyExistError, TagDoesNotExistError
15 from rhodecode.lib.vcs.utils import author_email, author_name, date_fromtimestamp, \
16 makedate, safe_unicode
17 from rhodecode.lib.vcs.utils.lazy import LazyProperty
18 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
19 from rhodecode.lib.vcs.utils.paths import abspath
20
21 from ...utils.hgcompat import ui, nullid, match, patch, diffopts, clone, \
22 get_contact, pull, localrepository, RepoLookupError, Abort, RepoError, hex
23
24
25 class MercurialRepository(BaseRepository):
26 """
27 Mercurial repository backend
28 """
29 DEFAULT_BRANCH_NAME = 'default'
30 scm = 'hg'
31
32 def __init__(self, repo_path, create=False, baseui=None, src_url=None,
33 update_after_clone=False):
34 """
35 Raises RepositoryError if repository could not be find at the given
36 ``repo_path``.
37
38 :param repo_path: local path of the repository
39 :param create=False: if set to True, would try to create repository if
40 it does not exist rather than raising exception
41 :param baseui=None: user data
42 :param src_url=None: would try to clone repository from given location
43 :param update_after_clone=False: sets update of working copy after
44 making a clone
45 """
46
47 if not isinstance(repo_path, str):
48 raise VCSError('Mercurial backend requires repository path to '
49 'be instance of <str> got %s instead' %
50 type(repo_path))
51
52 self.path = abspath(repo_path)
53 self.baseui = baseui or ui.ui()
54 # We've set path and ui, now we can set _repo itself
55 self._repo = self._get_repo(create, src_url, update_after_clone)
56
57 @property
58 def _empty(self):
59 """
60 Checks if repository is empty without any changesets
61 """
62 # TODO: Following raises errors when using InMemoryChangeset...
63 # return len(self._repo.changelog) == 0
64 return len(self.revisions) == 0
65
66 @LazyProperty
67 def revisions(self):
68 """
69 Returns list of revisions' ids, in ascending order. Being lazy
70 attribute allows external tools to inject shas from cache.
71 """
72 return self._get_all_revisions()
73
74 @LazyProperty
75 def name(self):
76 return os.path.basename(self.path)
77
78 @LazyProperty
79 def branches(self):
80 return self._get_branches()
81
82 def _get_branches(self, closed=False):
83 """
84 Get's branches for this repository
85 Returns only not closed branches by default
86
87 :param closed: return also closed branches for mercurial
88 """
89
90 if self._empty:
91 return {}
92
93 def _branchtags(localrepo):
94 """
95 Patched version of mercurial branchtags to not return the closed
96 branches
97
98 :param localrepo: locarepository instance
99 """
100
101 bt = {}
102 bt_closed = {}
103 for bn, heads in localrepo.branchmap().iteritems():
104 tip = heads[-1]
105 if 'close' in localrepo.changelog.read(tip)[5]:
106 bt_closed[bn] = tip
107 else:
108 bt[bn] = tip
109
110 if closed:
111 bt.update(bt_closed)
112 return bt
113
114 sortkey = lambda ctx: ctx[0] # sort by name
115 _branches = [(safe_unicode(n), hex(h),) for n, h in
116 _branchtags(self._repo).items()]
117
118 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
119
120 @LazyProperty
121 def tags(self):
122 """
123 Get's tags for this repository
124 """
125 return self._get_tags()
126
127 def _get_tags(self):
128 if self._empty:
129 return {}
130
131 sortkey = lambda ctx: ctx[0] # sort by name
132 _tags = [(safe_unicode(n), hex(h),) for n, h in
133 self._repo.tags().items()]
134
135 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
136
137 def tag(self, name, user, revision=None, message=None, date=None,
138 **kwargs):
139 """
140 Creates and returns a tag for the given ``revision``.
141
142 :param name: name for new tag
143 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
144 :param revision: changeset id for which new tag would be created
145 :param message: message of the tag's commit
146 :param date: date of tag's commit
147
148 :raises TagAlreadyExistError: if tag with same name already exists
149 """
150 if name in self.tags:
151 raise TagAlreadyExistError("Tag %s already exists" % name)
152 changeset = self.get_changeset(revision)
153 local = kwargs.setdefault('local', False)
154
155 if message is None:
156 message = "Added tag %s for changeset %s" % (name,
157 changeset.short_id)
158
159 if date is None:
160 date = datetime.datetime.now().ctime()
161
162 try:
163 self._repo.tag(name, changeset._ctx.node(), message, local, user,
164 date)
165 except Abort, e:
166 raise RepositoryError(e.message)
167
168 # Reinitialize tags
169 self.tags = self._get_tags()
170 tag_id = self.tags[name]
171
172 return self.get_changeset(revision=tag_id)
173
174 def remove_tag(self, name, user, message=None, date=None):
175 """
176 Removes tag with the given ``name``.
177
178 :param name: name of the tag to be removed
179 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
180 :param message: message of the tag's removal commit
181 :param date: date of tag's removal commit
182
183 :raises TagDoesNotExistError: if tag with given name does not exists
184 """
185 if name not in self.tags:
186 raise TagDoesNotExistError("Tag %s does not exist" % name)
187 if message is None:
188 message = "Removed tag %s" % name
189 if date is None:
190 date = datetime.datetime.now().ctime()
191 local = False
192
193 try:
194 self._repo.tag(name, nullid, message, local, user, date)
195 self.tags = self._get_tags()
196 except Abort, e:
197 raise RepositoryError(e.message)
198
199 @LazyProperty
200 def bookmarks(self):
201 """
202 Get's bookmarks for this repository
203 """
204 return self._get_bookmarks()
205
206 def _get_bookmarks(self):
207 if self._empty:
208 return {}
209
210 sortkey = lambda ctx: ctx[0] # sort by name
211 _bookmarks = [(safe_unicode(n), hex(h),) for n, h in
212 self._repo._bookmarks.items()]
213 return OrderedDict(sorted(_bookmarks, key=sortkey, reverse=True))
214
215 def _get_all_revisions(self):
216
217 return map(lambda x: hex(x[7]), self._repo.changelog.index)[:-1]
218
219 def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
220 context=3):
221 """
222 Returns (git like) *diff*, as plain text. Shows changes introduced by
223 ``rev2`` since ``rev1``.
224
225 :param rev1: Entry point from which diff is shown. Can be
226 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
227 the changes since empty state of the repository until ``rev2``
228 :param rev2: Until which revision changes should be shown.
229 :param ignore_whitespace: If set to ``True``, would not show whitespace
230 changes. Defaults to ``False``.
231 :param context: How many lines before/after changed lines should be
232 shown. Defaults to ``3``.
233 """
234 # Check if given revisions are present at repository (may raise
235 # ChangesetDoesNotExistError)
236 if rev1 != self.EMPTY_CHANGESET:
237 self.get_changeset(rev1)
238 self.get_changeset(rev2)
239
240 file_filter = match(self.path, '', [path])
241 return ''.join(patch.diff(self._repo, rev1, rev2, match=file_filter,
242 opts=diffopts(git=True,
243 ignorews=ignore_whitespace,
244 context=context)))
245
246 def _check_url(self, url):
247 """
248 Function will check given url and try to verify if it's a valid
249 link. Sometimes it may happened that mercurial will issue basic
250 auth request that can cause whole API to hang when used from python
251 or other external calls.
252
253 On failures it'll raise urllib2.HTTPError, return code 200 if url
254 is valid or True if it's a local path
255 """
256
257 from mercurial.util import url as Url
258
259 # those authnadlers are patched for python 2.6.5 bug an
260 # infinit looping when given invalid resources
261 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
262
263 # check first if it's not an local url
264 if os.path.isdir(url) or url.startswith('file:'):
265 return True
266
267 handlers = []
268 test_uri, authinfo = Url(url).authinfo()
269
270 if authinfo:
271 #create a password manager
272 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
273 passmgr.add_password(*authinfo)
274
275 handlers.extend((httpbasicauthhandler(passmgr),
276 httpdigestauthhandler(passmgr)))
277
278 o = urllib2.build_opener(*handlers)
279 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
280 ('Accept', 'application/mercurial-0.1')]
281
282 q = {"cmd": 'between'}
283 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
284 qs = '?%s' % urllib.urlencode(q)
285 cu = "%s%s" % (test_uri, qs)
286 req = urllib2.Request(cu, None, {})
287
288 try:
289 resp = o.open(req)
290 return resp.code == 200
291 except Exception, e:
292 # means it cannot be cloned
293 raise urllib2.URLError(e)
294
295 def _get_repo(self, create, src_url=None, update_after_clone=False):
296 """
297 Function will check for mercurial repository in given path and return
298 a localrepo object. If there is no repository in that path it will
299 raise an exception unless ``create`` parameter is set to True - in
300 that case repository would be created and returned.
301 If ``src_url`` is given, would try to clone repository from the
302 location at given clone_point. Additionally it'll make update to
303 working copy accordingly to ``update_after_clone`` flag
304 """
305 try:
306 if src_url:
307 url = str(self._get_url(src_url))
308 opts = {}
309 if not update_after_clone:
310 opts.update({'noupdate': True})
311 try:
312 self._check_url(url)
313 clone(self.baseui, url, self.path, **opts)
314 # except urllib2.URLError:
315 # raise Abort("Got HTTP 404 error")
316 except Exception:
317 raise
318 # Don't try to create if we've already cloned repo
319 create = False
320 return localrepository(self.baseui, self.path, create=create)
321 except (Abort, RepoError), err:
322 if create:
323 msg = "Cannot create repository at %s. Original error was %s"\
324 % (self.path, err)
325 else:
326 msg = "Not valid repository at %s. Original error was %s"\
327 % (self.path, err)
328 raise RepositoryError(msg)
329
330 @LazyProperty
331 def in_memory_changeset(self):
332 return MercurialInMemoryChangeset(self)
333
334 @LazyProperty
335 def description(self):
336 undefined_description = u'unknown'
337 return safe_unicode(self._repo.ui.config('web', 'description',
338 undefined_description, untrusted=True))
339
340 @LazyProperty
341 def contact(self):
342 undefined_contact = u'Unknown'
343 return safe_unicode(get_contact(self._repo.ui.config)
344 or undefined_contact)
345
346 @LazyProperty
347 def last_change(self):
348 """
349 Returns last change made on this repository as datetime object
350 """
351 return date_fromtimestamp(self._get_mtime(), makedate()[1])
352
353 def _get_mtime(self):
354 try:
355 return time.mktime(self.get_changeset().date.timetuple())
356 except RepositoryError:
357 #fallback to filesystem
358 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
359 st_path = os.path.join(self.path, '.hg', "store")
360 if os.path.exists(cl_path):
361 return os.stat(cl_path).st_mtime
362 else:
363 return os.stat(st_path).st_mtime
364
365 def _get_hidden(self):
366 return self._repo.ui.configbool("web", "hidden", untrusted=True)
367
368 def _get_revision(self, revision):
369 """
370 Get's an ID revision given as str. This will always return a fill
371 40 char revision number
372
373 :param revision: str or int or None
374 """
375
376 if self._empty:
377 raise EmptyRepositoryError("There are no changesets yet")
378
379 if revision in [-1, 'tip', None]:
380 revision = 'tip'
381
382 try:
383 revision = hex(self._repo.lookup(revision))
384 except (IndexError, ValueError, RepoLookupError, TypeError):
385 raise ChangesetDoesNotExistError("Revision %r does not "
386 "exist for this repository %s" \
387 % (revision, self))
388 return revision
389
390 def _get_archives(self, archive_name='tip'):
391 allowed = self.baseui.configlist("web", "allow_archive",
392 untrusted=True)
393 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
394 if i[0] in allowed or self._repo.ui.configbool("web",
395 "allow" + i[0],
396 untrusted=True):
397 yield {"type": i[0], "extension": i[1], "node": archive_name}
398
399 def _get_url(self, url):
400 """
401 Returns normalized url. If schema is not given, would fall
402 to filesystem
403 (``file:///``) schema.
404 """
405 url = str(url)
406 if url != 'default' and not '://' in url:
407 url = "file:" + urllib.pathname2url(url)
408 return url
409
410 def get_changeset(self, revision=None):
411 """
412 Returns ``MercurialChangeset`` object representing repository's
413 changeset at the given ``revision``.
414 """
415 revision = self._get_revision(revision)
416 changeset = MercurialChangeset(repository=self, revision=revision)
417 return changeset
418
419 def get_changesets(self, start=None, end=None, start_date=None,
420 end_date=None, branch_name=None, reverse=False):
421 """
422 Returns iterator of ``MercurialChangeset`` objects from start to end
423 (both are inclusive)
424
425 :param start: None, str, int or mercurial lookup format
426 :param end: None, str, int or mercurial lookup format
427 :param start_date:
428 :param end_date:
429 :param branch_name:
430 :param reversed: return changesets in reversed order
431 """
432
433 start_raw_id = self._get_revision(start)
434 start_pos = self.revisions.index(start_raw_id) if start else None
435 end_raw_id = self._get_revision(end)
436 end_pos = self.revisions.index(end_raw_id) if end else None
437
438 if None not in [start, end] and start_pos > end_pos:
439 raise RepositoryError("start revision '%s' cannot be "
440 "after end revision '%s'" % (start, end))
441
442 if branch_name and branch_name not in self.branches.keys():
443 raise BranchDoesNotExistError('Such branch %s does not exists for'
444 ' this repository' % branch_name)
445 if end_pos is not None:
446 end_pos += 1
447
448 slice_ = reversed(self.revisions[start_pos:end_pos]) if reverse else \
449 self.revisions[start_pos:end_pos]
450
451 for id_ in slice_:
452 cs = self.get_changeset(id_)
453 if branch_name and cs.branch != branch_name:
454 continue
455 if start_date and cs.date < start_date:
456 continue
457 if end_date and cs.date > end_date:
458 continue
459
460 yield cs
461
462 def pull(self, url):
463 """
464 Tries to pull changes from external location.
465 """
466 url = self._get_url(url)
467 try:
468 pull(self.baseui, self._repo, url)
469 except Abort, err:
470 # Propagate error but with vcs's type
471 raise RepositoryError(str(err))
472
473 @LazyProperty
474 def workdir(self):
475 """
476 Returns ``Workdir`` instance for this repository.
477 """
478 return MercurialWorkdir(self)
479
480 def get_config_value(self, section, name, config_file=None):
481 """
482 Returns configuration value for a given [``section``] and ``name``.
483
484 :param section: Section we want to retrieve value from
485 :param name: Name of configuration we want to retrieve
486 :param config_file: A path to file which should be used to retrieve
487 configuration from (might also be a list of file paths)
488 """
489 if config_file is None:
490 config_file = []
491 elif isinstance(config_file, basestring):
492 config_file = [config_file]
493
494 config = self._repo.ui
495 for path in config_file:
496 config.readconfig(path)
497 return config.config(section, name)
498
499 def get_user_name(self, config_file=None):
500 """
501 Returns user's name from global configuration file.
502
503 :param config_file: A path to file which should be used to retrieve
504 configuration from (might also be a list of file paths)
505 """
506 username = self.get_config_value('ui', 'username')
507 if username:
508 return author_name(username)
509 return None
510
511 def get_user_email(self, config_file=None):
512 """
513 Returns user's email from global configuration file.
514
515 :param config_file: A path to file which should be used to retrieve
516 configuration from (might also be a list of file paths)
517 """
518 username = self.get_config_value('ui', 'username')
519 if username:
520 return author_email(username)
521 return None
@@ -0,0 +1,21 b''
1 from rhodecode.lib.vcs.backends.base import BaseWorkdir
2 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
3
4 from ...utils.hgcompat import hg_merge
5
6
7 class MercurialWorkdir(BaseWorkdir):
8
9 def get_branch(self):
10 return self.repository._repo.dirstate.branch()
11
12 def get_changeset(self):
13 return self.repository.get_changeset()
14
15 def checkout_branch(self, branch=None):
16 if branch is None:
17 branch = self.repository.DEFAULT_BRANCH_NAME
18 if branch not in self.repository.branches:
19 raise BranchDoesNotExistError
20
21 hg_merge.update(self.repository._repo, branch, False, False, None)
1 NO CONTENT: new file 100644
@@ -0,0 +1,33 b''
1 import os
2 import tempfile
3 from rhodecode.lib.vcs.utils.paths import get_user_home
4
5 abspath = lambda * p: os.path.abspath(os.path.join(*p))
6
7 VCSRC_PATH = os.environ.get('VCSRC_PATH')
8
9 if not VCSRC_PATH:
10 HOME_ = get_user_home()
11 if not HOME_:
12 HOME_ = tempfile.gettempdir()
13
14 VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc')
15 if os.path.isdir(VCSRC_PATH):
16 VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py')
17
18 BACKENDS = {
19 'hg': 'vcs.backends.hg.MercurialRepository',
20 'git': 'vcs.backends.git.GitRepository',
21 }
22
23 ARCHIVE_SPECS = {
24 'tar': ('application/x-tar', '.tar'),
25 'tbz2': ('application/x-bzip2', '.tar.bz2'),
26 'tgz': ('application/x-gzip', '.tar.gz'),
27 'zip': ('application/zip', '.zip'),
28 }
29
30 BACKENDS = {
31 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
32 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
33 }
@@ -0,0 +1,93 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.exceptions
4 ~~~~~~~~~~~~~~
5
6 Custom exceptions module
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11
12
13 class VCSError(Exception):
14 pass
15
16
17 class RepositoryError(VCSError):
18 pass
19
20
21 class EmptyRepositoryError(RepositoryError):
22 pass
23
24
25 class TagAlreadyExistError(RepositoryError):
26 pass
27
28
29 class TagDoesNotExistError(RepositoryError):
30 pass
31
32
33 class BranchAlreadyExistError(RepositoryError):
34 pass
35
36
37 class BranchDoesNotExistError(RepositoryError):
38 pass
39
40
41 class ChangesetError(RepositoryError):
42 pass
43
44
45 class ChangesetDoesNotExistError(ChangesetError):
46 pass
47
48
49 class CommitError(RepositoryError):
50 pass
51
52
53 class NothingChangedError(CommitError):
54 pass
55
56
57 class NodeError(VCSError):
58 pass
59
60
61 class RemovedFileNodeError(NodeError):
62 pass
63
64
65 class NodeAlreadyExistsError(CommitError):
66 pass
67
68
69 class NodeAlreadyChangedError(CommitError):
70 pass
71
72
73 class NodeDoesNotExistError(CommitError):
74 pass
75
76
77 class NodeNotChangedError(CommitError):
78 pass
79
80
81 class NodeAlreadyAddedError(CommitError):
82 pass
83
84
85 class NodeAlreadyRemovedError(CommitError):
86 pass
87
88
89 class ImproperArchiveTypeError(VCSError):
90 pass
91
92 class CommandError(VCSError):
93 pass
This diff has been collapsed as it changes many lines, (551 lines changed) Show them Hide them
@@ -0,0 +1,551 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.nodes
4 ~~~~~~~~~
5
6 Module holding everything related to vcs nodes.
7
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11 import stat
12 import posixpath
13 import mimetypes
14
15 from rhodecode.lib.vcs.utils.lazy import LazyProperty
16 from rhodecode.lib.vcs.utils import safe_unicode
17 from rhodecode.lib.vcs.exceptions import NodeError
18 from rhodecode.lib.vcs.exceptions import RemovedFileNodeError
19
20 from pygments import lexers
21
22
23 class NodeKind:
24 DIR = 1
25 FILE = 2
26
27
28 class NodeState:
29 ADDED = u'added'
30 CHANGED = u'changed'
31 NOT_CHANGED = u'not changed'
32 REMOVED = u'removed'
33
34
35 class NodeGeneratorBase(object):
36 """
37 Base class for removed added and changed filenodes, it's a lazy generator
38 class that will create filenodes only on iteration or call
39
40 The len method doesn't need to create filenodes at all
41 """
42
43 def __init__(self, current_paths, cs):
44 self.cs = cs
45 self.current_paths = current_paths
46
47 def __call__(self):
48 return [n for n in self]
49
50 def __getslice__(self, i, j):
51 for p in self.current_paths[i:j]:
52 yield self.cs.get_node(p)
53
54 def __len__(self):
55 return len(self.current_paths)
56
57 def __iter__(self):
58 for p in self.current_paths:
59 yield self.cs.get_node(p)
60
61
62 class AddedFileNodesGenerator(NodeGeneratorBase):
63 """
64 Class holding Added files for current changeset
65 """
66 pass
67
68
69 class ChangedFileNodesGenerator(NodeGeneratorBase):
70 """
71 Class holding Changed files for current changeset
72 """
73 pass
74
75
76 class RemovedFileNodesGenerator(NodeGeneratorBase):
77 """
78 Class holding removed files for current changeset
79 """
80 def __iter__(self):
81 for p in self.current_paths:
82 yield RemovedFileNode(path=p)
83
84 def __getslice__(self, i, j):
85 for p in self.current_paths[i:j]:
86 yield RemovedFileNode(path=p)
87
88
89 class Node(object):
90 """
91 Simplest class representing file or directory on repository. SCM backends
92 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
93 directly.
94
95 Node's ``path`` cannot start with slash as we operate on *relative* paths
96 only. Moreover, every single node is identified by the ``path`` attribute,
97 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
98 """
99
100 def __init__(self, path, kind):
101 if path.startswith('/'):
102 raise NodeError("Cannot initialize Node objects with slash at "
103 "the beginning as only relative paths are supported")
104 self.path = path.rstrip('/')
105 if path == '' and kind != NodeKind.DIR:
106 raise NodeError("Only DirNode and its subclasses may be "
107 "initialized with empty path")
108 self.kind = kind
109 #self.dirs, self.files = [], []
110 if self.is_root() and not self.is_dir():
111 raise NodeError("Root node cannot be FILE kind")
112
113 @LazyProperty
114 def parent(self):
115 parent_path = self.get_parent_path()
116 if parent_path:
117 if self.changeset:
118 return self.changeset.get_node(parent_path)
119 return DirNode(parent_path)
120 return None
121
122 @LazyProperty
123 def name(self):
124 """
125 Returns name of the node so if its path
126 then only last part is returned.
127 """
128 return safe_unicode(self.path.rstrip('/').split('/')[-1])
129
130 def _get_kind(self):
131 return self._kind
132
133 def _set_kind(self, kind):
134 if hasattr(self, '_kind'):
135 raise NodeError("Cannot change node's kind")
136 else:
137 self._kind = kind
138 # Post setter check (path's trailing slash)
139 if self.path.endswith('/'):
140 raise NodeError("Node's path cannot end with slash")
141
142 kind = property(_get_kind, _set_kind)
143
144 def __cmp__(self, other):
145 """
146 Comparator using name of the node, needed for quick list sorting.
147 """
148 kind_cmp = cmp(self.kind, other.kind)
149 if kind_cmp:
150 return kind_cmp
151 return cmp(self.name, other.name)
152
153 def __eq__(self, other):
154 for attr in ['name', 'path', 'kind']:
155 if getattr(self, attr) != getattr(other, attr):
156 return False
157 if self.is_file():
158 if self.content != other.content:
159 return False
160 else:
161 # For DirNode's check without entering each dir
162 self_nodes_paths = list(sorted(n.path for n in self.nodes))
163 other_nodes_paths = list(sorted(n.path for n in self.nodes))
164 if self_nodes_paths != other_nodes_paths:
165 return False
166 return True
167
168 def __nq__(self, other):
169 return not self.__eq__(other)
170
171 def __repr__(self):
172 return '<%s %r>' % (self.__class__.__name__, self.path)
173
174 def __str__(self):
175 return self.__repr__()
176
177 def __unicode__(self):
178 return self.name
179
180 def get_parent_path(self):
181 """
182 Returns node's parent path or empty string if node is root.
183 """
184 if self.is_root():
185 return ''
186 return posixpath.dirname(self.path.rstrip('/')) + '/'
187
188 def is_file(self):
189 """
190 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
191 otherwise.
192 """
193 return self.kind == NodeKind.FILE
194
195 def is_dir(self):
196 """
197 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
198 otherwise.
199 """
200 return self.kind == NodeKind.DIR
201
202 def is_root(self):
203 """
204 Returns ``True`` if node is a root node and ``False`` otherwise.
205 """
206 return self.kind == NodeKind.DIR and self.path == ''
207
208 @LazyProperty
209 def added(self):
210 return self.state is NodeState.ADDED
211
212 @LazyProperty
213 def changed(self):
214 return self.state is NodeState.CHANGED
215
216 @LazyProperty
217 def not_changed(self):
218 return self.state is NodeState.NOT_CHANGED
219
220 @LazyProperty
221 def removed(self):
222 return self.state is NodeState.REMOVED
223
224
225 class FileNode(Node):
226 """
227 Class representing file nodes.
228
229 :attribute: path: path to the node, relative to repostiory's root
230 :attribute: content: if given arbitrary sets content of the file
231 :attribute: changeset: if given, first time content is accessed, callback
232 :attribute: mode: octal stat mode for a node. Default is 0100644.
233 """
234
235 def __init__(self, path, content=None, changeset=None, mode=None):
236 """
237 Only one of ``content`` and ``changeset`` may be given. Passing both
238 would raise ``NodeError`` exception.
239
240 :param path: relative path to the node
241 :param content: content may be passed to constructor
242 :param changeset: if given, will use it to lazily fetch content
243 :param mode: octal representation of ST_MODE (i.e. 0100644)
244 """
245
246 if content and changeset:
247 raise NodeError("Cannot use both content and changeset")
248 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
249 self.changeset = changeset
250 self._content = content
251 self._mode = mode or 0100644
252
253 @LazyProperty
254 def mode(self):
255 """
256 Returns lazily mode of the FileNode. If ``changeset`` is not set, would
257 use value given at initialization or 0100644 (default).
258 """
259 if self.changeset:
260 mode = self.changeset.get_file_mode(self.path)
261 else:
262 mode = self._mode
263 return mode
264
265 @property
266 def content(self):
267 """
268 Returns lazily content of the FileNode. If possible, would try to
269 decode content from UTF-8.
270 """
271 if self.changeset:
272 content = self.changeset.get_file_content(self.path)
273 else:
274 content = self._content
275
276 if bool(content and '\0' in content):
277 return content
278 return safe_unicode(content)
279
280 @LazyProperty
281 def size(self):
282 if self.changeset:
283 return self.changeset.get_file_size(self.path)
284 raise NodeError("Cannot retrieve size of the file without related "
285 "changeset attribute")
286
287 @LazyProperty
288 def message(self):
289 if self.changeset:
290 return self.last_changeset.message
291 raise NodeError("Cannot retrieve message of the file without related "
292 "changeset attribute")
293
294 @LazyProperty
295 def last_changeset(self):
296 if self.changeset:
297 return self.changeset.get_file_changeset(self.path)
298 raise NodeError("Cannot retrieve last changeset of the file without "
299 "related changeset attribute")
300
301 def get_mimetype(self):
302 """
303 Mimetype is calculated based on the file's content. If ``_mimetype``
304 attribute is available, it will be returned (backends which store
305 mimetypes or can easily recognize them, should set this private
306 attribute to indicate that type should *NOT* be calculated).
307 """
308 if hasattr(self, '_mimetype'):
309 if (isinstance(self._mimetype,(tuple,list,)) and
310 len(self._mimetype) == 2):
311 return self._mimetype
312 else:
313 raise NodeError('given _mimetype attribute must be an 2 '
314 'element list or tuple')
315
316 mtype,encoding = mimetypes.guess_type(self.name)
317
318 if mtype is None:
319 if self.is_binary:
320 mtype = 'application/octet-stream'
321 encoding = None
322 else:
323 mtype = 'text/plain'
324 encoding = None
325 return mtype,encoding
326
327 @LazyProperty
328 def mimetype(self):
329 """
330 Wrapper around full mimetype info. It returns only type of fetched
331 mimetype without the encoding part. use get_mimetype function to fetch
332 full set of (type,encoding)
333 """
334 return self.get_mimetype()[0]
335
336 @LazyProperty
337 def mimetype_main(self):
338 return self.mimetype.split('/')[0]
339
340 @LazyProperty
341 def lexer(self):
342 """
343 Returns pygment's lexer class. Would try to guess lexer taking file's
344 content, name and mimetype.
345 """
346 try:
347 lexer = lexers.guess_lexer_for_filename(self.name, self.content)
348 except lexers.ClassNotFound:
349 lexer = lexers.TextLexer()
350 # returns first alias
351 return lexer
352
353 @LazyProperty
354 def lexer_alias(self):
355 """
356 Returns first alias of the lexer guessed for this file.
357 """
358 return self.lexer.aliases[0]
359
360 @LazyProperty
361 def history(self):
362 """
363 Returns a list of changeset for this file in which the file was changed
364 """
365 if self.changeset is None:
366 raise NodeError('Unable to get changeset for this FileNode')
367 return self.changeset.get_file_history(self.path)
368
369 @LazyProperty
370 def annotate(self):
371 """
372 Returns a list of three element tuples with lineno,changeset and line
373 """
374 if self.changeset is None:
375 raise NodeError('Unable to get changeset for this FileNode')
376 return self.changeset.get_file_annotate(self.path)
377
378 @LazyProperty
379 def state(self):
380 if not self.changeset:
381 raise NodeError("Cannot check state of the node if it's not "
382 "linked with changeset")
383 elif self.path in (node.path for node in self.changeset.added):
384 return NodeState.ADDED
385 elif self.path in (node.path for node in self.changeset.changed):
386 return NodeState.CHANGED
387 else:
388 return NodeState.NOT_CHANGED
389
390 @property
391 def is_binary(self):
392 """
393 Returns True if file has binary content.
394 """
395 bin = '\0' in self.content
396 return bin
397
398 @LazyProperty
399 def extension(self):
400 """Returns filenode extension"""
401 return self.name.split('.')[-1]
402
403 def is_executable(self):
404 """
405 Returns ``True`` if file has executable flag turned on.
406 """
407 return bool(self.mode & stat.S_IXUSR)
408
409
410 class RemovedFileNode(FileNode):
411 """
412 Dummy FileNode class - trying to access any public attribute except path,
413 name, kind or state (or methods/attributes checking those two) would raise
414 RemovedFileNodeError.
415 """
416 ALLOWED_ATTRIBUTES = ['name', 'path', 'state', 'is_root', 'is_file',
417 'is_dir', 'kind', 'added', 'changed', 'not_changed', 'removed']
418
419 def __init__(self, path):
420 """
421 :param path: relative path to the node
422 """
423 super(RemovedFileNode, self).__init__(path=path)
424
425 def __getattribute__(self, attr):
426 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
427 return super(RemovedFileNode, self).__getattribute__(attr)
428 raise RemovedFileNodeError("Cannot access attribute %s on "
429 "RemovedFileNode" % attr)
430
431 @LazyProperty
432 def state(self):
433 return NodeState.REMOVED
434
435
436 class DirNode(Node):
437 """
438 DirNode stores list of files and directories within this node.
439 Nodes may be used standalone but within repository context they
440 lazily fetch data within same repositorty's changeset.
441 """
442
443 def __init__(self, path, nodes=(), changeset=None):
444 """
445 Only one of ``nodes`` and ``changeset`` may be given. Passing both
446 would raise ``NodeError`` exception.
447
448 :param path: relative path to the node
449 :param nodes: content may be passed to constructor
450 :param changeset: if given, will use it to lazily fetch content
451 :param size: always 0 for ``DirNode``
452 """
453 if nodes and changeset:
454 raise NodeError("Cannot use both nodes and changeset")
455 super(DirNode, self).__init__(path, NodeKind.DIR)
456 self.changeset = changeset
457 self._nodes = nodes
458
459 @LazyProperty
460 def content(self):
461 raise NodeError("%s represents a dir and has no ``content`` attribute"
462 % self)
463
464 @LazyProperty
465 def nodes(self):
466 if self.changeset:
467 nodes = self.changeset.get_nodes(self.path)
468 else:
469 nodes = self._nodes
470 self._nodes_dict = dict((node.path, node) for node in nodes)
471 return sorted(nodes)
472
473 @LazyProperty
474 def files(self):
475 return sorted((node for node in self.nodes if node.is_file()))
476
477 @LazyProperty
478 def dirs(self):
479 return sorted((node for node in self.nodes if node.is_dir()))
480
481 def __iter__(self):
482 for node in self.nodes:
483 yield node
484
485 def get_node(self, path):
486 """
487 Returns node from within this particular ``DirNode``, so it is now
488 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
489 'docs'. In order to access deeper nodes one must fetch nodes between
490 them first - this would work::
491
492 docs = root.get_node('docs')
493 docs.get_node('api').get_node('index.rst')
494
495 :param: path - relative to the current node
496
497 .. note::
498 To access lazily (as in example above) node have to be initialized
499 with related changeset object - without it node is out of
500 context and may know nothing about anything else than nearest
501 (located at same level) nodes.
502 """
503 try:
504 path = path.rstrip('/')
505 if path == '':
506 raise NodeError("Cannot retrieve node without path")
507 self.nodes # access nodes first in order to set _nodes_dict
508 paths = path.split('/')
509 if len(paths) == 1:
510 if not self.is_root():
511 path = '/'.join((self.path, paths[0]))
512 else:
513 path = paths[0]
514 return self._nodes_dict[path]
515 elif len(paths) > 1:
516 if self.changeset is None:
517 raise NodeError("Cannot access deeper "
518 "nodes without changeset")
519 else:
520 path1, path2 = paths[0], '/'.join(paths[1:])
521 return self.get_node(path1).get_node(path2)
522 else:
523 raise KeyError
524 except KeyError:
525 raise NodeError("Node does not exist at %s" % path)
526
527 @LazyProperty
528 def state(self):
529 raise NodeError("Cannot access state of DirNode")
530
531 @LazyProperty
532 def size(self):
533 size = 0
534 for root, dirs, files in self.changeset.walk(self.path):
535 for f in files:
536 size += f.size
537
538 return size
539
540
541 class RootNode(DirNode):
542 """
543 DirNode being the root node of the repository.
544 """
545
546 def __init__(self, nodes=(), changeset=None):
547 super(RootNode, self).__init__(path='', nodes=nodes,
548 changeset=changeset)
549
550 def __repr__(self):
551 return '<%s>' % self.__class__.__name__
@@ -0,0 +1,133 b''
1 """
2 This module provides some useful tools for ``vcs`` like annotate/diff html
3 output. It also includes some internal helpers.
4 """
5 import sys
6 import time
7 import datetime
8
9
10 def makedate():
11 lt = time.localtime()
12 if lt[8] == 1 and time.daylight:
13 tz = time.altzone
14 else:
15 tz = time.timezone
16 return time.mktime(lt), tz
17
18
19 def date_fromtimestamp(unixts, tzoffset=0):
20 """
21 Makes a local datetime object out of unix timestamp
22
23 :param unixts:
24 :param tzoffset:
25 """
26
27 return datetime.datetime.fromtimestamp(float(unixts))
28
29
30 def safe_unicode(str_, from_encoding='utf8'):
31 """
32 safe unicode function. Does few trick to turn str_ into unicode
33
34 In case of UnicodeDecode error we try to return it with encoding detected
35 by chardet library if it fails fallback to unicode with errors replaced
36
37 :param str_: string to decode
38 :rtype: unicode
39 :returns: unicode object
40 """
41 if isinstance(str_, unicode):
42 return str_
43
44 try:
45 return unicode(str_)
46 except UnicodeDecodeError:
47 pass
48
49 try:
50 return unicode(str_, from_encoding)
51 except UnicodeDecodeError:
52 pass
53
54 try:
55 import chardet
56 encoding = chardet.detect(str_)['encoding']
57 if encoding is None:
58 raise Exception()
59 return str_.decode(encoding)
60 except (ImportError, UnicodeDecodeError, Exception):
61 return unicode(str_, from_encoding, 'replace')
62
63
64 def safe_str(unicode_, to_encoding='utf8'):
65 """
66 safe str function. Does few trick to turn unicode_ into string
67
68 In case of UnicodeEncodeError we try to return it with encoding detected
69 by chardet library if it fails fallback to string with errors replaced
70
71 :param unicode_: unicode to encode
72 :rtype: str
73 :returns: str object
74 """
75
76 if isinstance(unicode_, str):
77 return unicode_
78
79 try:
80 return unicode_.encode(to_encoding)
81 except UnicodeEncodeError:
82 pass
83
84 try:
85 import chardet
86 encoding = chardet.detect(unicode_)['encoding']
87 print encoding
88 if encoding is None:
89 raise UnicodeEncodeError()
90
91 return unicode_.encode(encoding)
92 except (ImportError, UnicodeEncodeError):
93 return unicode_.encode(to_encoding, 'replace')
94
95 return safe_str
96
97
98 def author_email(author):
99 """
100 returns email address of given author.
101 If any of <,> sign are found, it fallbacks to regex findall()
102 and returns first found result or empty string
103
104 Regex taken from http://www.regular-expressions.info/email.html
105 """
106 import re
107 r = author.find('>')
108 l = author.find('<')
109
110 if l == -1 or r == -1:
111 # fallback to regex match of email out of a string
112 email_re = re.compile(r"""[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!"""
113 r"""#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z"""
114 r"""0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]"""
115 r"""*[a-z0-9])?""", re.IGNORECASE)
116 m = re.findall(email_re, author)
117 return m[0] if m else ''
118
119 return author[l + 1:r].strip()
120
121
122 def author_name(author):
123 """
124 get name of author, or else username.
125 It'll try to find an email in the author string and just cut it off
126 to get the username
127 """
128
129 if not '@' in author:
130 return author
131 else:
132 return author.replace(author_email(author), '').replace('<', '')\
133 .replace('>', '').strip()
@@ -0,0 +1,177 b''
1 from rhodecode.lib.vcs.exceptions import VCSError
2 from rhodecode.lib.vcs.nodes import FileNode
3 from pygments.formatters import HtmlFormatter
4 from pygments import highlight
5
6 import StringIO
7
8
9 def annotate_highlight(filenode, annotate_from_changeset_func=None,
10 order=None, headers=None, **options):
11 """
12 Returns html portion containing annotated table with 3 columns: line
13 numbers, changeset information and pygmentized line of code.
14
15 :param filenode: FileNode object
16 :param annotate_from_changeset_func: function taking changeset and
17 returning single annotate cell; needs break line at the end
18 :param order: ordered sequence of ``ls`` (line numbers column),
19 ``annotate`` (annotate column), ``code`` (code column); Default is
20 ``['ls', 'annotate', 'code']``
21 :param headers: dictionary with headers (keys are whats in ``order``
22 parameter)
23 """
24 options['linenos'] = True
25 formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
26 headers=headers,
27 annotate_from_changeset_func=annotate_from_changeset_func, **options)
28 lexer = filenode.lexer
29 highlighted = highlight(filenode.content, lexer, formatter)
30 return highlighted
31
32
33 class AnnotateHtmlFormatter(HtmlFormatter):
34
35 def __init__(self, filenode, annotate_from_changeset_func=None,
36 order=None, **options):
37 """
38 If ``annotate_from_changeset_func`` is passed it should be a function
39 which returns string from the given changeset. For example, we may pass
40 following function as ``annotate_from_changeset_func``::
41
42 def changeset_to_anchor(changeset):
43 return '<a href="/changesets/%s/">%s</a>\n' %\
44 (changeset.id, changeset.id)
45
46 :param annotate_from_changeset_func: see above
47 :param order: (default: ``['ls', 'annotate', 'code']``); order of
48 columns;
49 :param options: standard pygment's HtmlFormatter options, there is
50 extra option tough, ``headers``. For instance we can pass::
51
52 formatter = AnnotateHtmlFormatter(filenode, headers={
53 'ls': '#',
54 'annotate': 'Annotate',
55 'code': 'Code',
56 })
57
58 """
59 super(AnnotateHtmlFormatter, self).__init__(**options)
60 self.annotate_from_changeset_func = annotate_from_changeset_func
61 self.order = order or ('ls', 'annotate', 'code')
62 headers = options.pop('headers', None)
63 if headers and not ('ls' in headers and 'annotate' in headers and
64 'code' in headers):
65 raise ValueError("If headers option dict is specified it must "
66 "all 'ls', 'annotate' and 'code' keys")
67 self.headers = headers
68 if isinstance(filenode, FileNode):
69 self.filenode = filenode
70 else:
71 raise VCSError("This formatter expect FileNode parameter, not %r"
72 % type(filenode))
73
74 def annotate_from_changeset(self, changeset):
75 """
76 Returns full html line for single changeset per annotated line.
77 """
78 if self.annotate_from_changeset_func:
79 return self.annotate_from_changeset_func(changeset)
80 else:
81 return ''.join((changeset.id, '\n'))
82
83 def _wrap_tablelinenos(self, inner):
84 dummyoutfile = StringIO.StringIO()
85 lncount = 0
86 for t, line in inner:
87 if t:
88 lncount += 1
89 dummyoutfile.write(line)
90
91 fl = self.linenostart
92 mw = len(str(lncount + fl - 1))
93 sp = self.linenospecial
94 st = self.linenostep
95 la = self.lineanchors
96 aln = self.anchorlinenos
97 if sp:
98 lines = []
99
100 for i in range(fl, fl + lncount):
101 if i % st == 0:
102 if i % sp == 0:
103 if aln:
104 lines.append('<a href="#%s-%d" class="special">'
105 '%*d</a>' %
106 (la, i, mw, i))
107 else:
108 lines.append('<span class="special">'
109 '%*d</span>' % (mw, i))
110 else:
111 if aln:
112 lines.append('<a href="#%s-%d">'
113 '%*d</a>' % (la, i, mw, i))
114 else:
115 lines.append('%*d' % (mw, i))
116 else:
117 lines.append('')
118 ls = '\n'.join(lines)
119 else:
120 lines = []
121 for i in range(fl, fl + lncount):
122 if i % st == 0:
123 if aln:
124 lines.append('<a href="#%s-%d">%*d</a>' \
125 % (la, i, mw, i))
126 else:
127 lines.append('%*d' % (mw, i))
128 else:
129 lines.append('')
130 ls = '\n'.join(lines)
131
132 annotate_changesets = [tup[1] for tup in self.filenode.annotate]
133 # If pygments cropped last lines break we need do that too
134 ln_cs = len(annotate_changesets)
135 ln_ = len(ls.splitlines())
136 if ln_cs > ln_:
137 annotate_changesets = annotate_changesets[:ln_ - ln_cs]
138 annotate = ''.join((self.annotate_from_changeset(changeset)
139 for changeset in annotate_changesets))
140 # in case you wonder about the seemingly redundant <div> here:
141 # since the content in the other cell also is wrapped in a div,
142 # some browsers in some configurations seem to mess up the formatting.
143 '''
144 yield 0, ('<table class="%stable">' % self.cssclass +
145 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
146 ls + '</pre></div></td>' +
147 '<td class="code">')
148 yield 0, dummyoutfile.getvalue()
149 yield 0, '</td></tr></table>'
150
151 '''
152 headers_row = []
153 if self.headers:
154 headers_row = ['<tr class="annotate-header">']
155 for key in self.order:
156 td = ''.join(('<td>', self.headers[key], '</td>'))
157 headers_row.append(td)
158 headers_row.append('</tr>')
159
160 body_row_start = ['<tr>']
161 for key in self.order:
162 if key == 'ls':
163 body_row_start.append(
164 '<td class="linenos"><div class="linenodiv"><pre>' +
165 ls + '</pre></div></td>')
166 elif key == 'annotate':
167 body_row_start.append(
168 '<td class="annotate"><div class="annotatediv"><pre>' +
169 annotate + '</pre></div></td>')
170 elif key == 'code':
171 body_row_start.append('<td class="code">')
172 yield 0, ('<table class="%stable">' % self.cssclass +
173 ''.join(headers_row) +
174 ''.join(body_row_start)
175 )
176 yield 0, dummyoutfile.getvalue()
177 yield 0, '</td></tr></table>'
@@ -0,0 +1,67 b''
1 # -*- coding: utf-8 -*-
2 """
3 vcs.utils.archivers
4 ~~~~~~~~~~~~~~~~~~~
5
6 set of archiver functions for creating archives from repository content
7
8 :created_on: Jan 21, 2011
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
11
12
13 class BaseArchiver(object):
14
15 def __init__(self):
16 self.archive_file = self._get_archive_file()
17
18 def addfile(self):
19 """
20 Adds a file to archive container
21 """
22 pass
23
24 def close(self):
25 """
26 Closes and finalizes operation of archive container object
27 """
28 self.archive_file.close()
29
30 def _get_archive_file(self):
31 """
32 Returns container for specific archive
33 """
34 raise NotImplementedError()
35
36
37 class TarArchiver(BaseArchiver):
38 pass
39
40
41 class Tbz2Archiver(BaseArchiver):
42 pass
43
44
45 class TgzArchiver(BaseArchiver):
46 pass
47
48
49 class ZipArchiver(BaseArchiver):
50 pass
51
52
53 def get_archiver(self, kind):
54 """
55 Returns instance of archiver class specific to given kind
56
57 :param kind: archive kind
58 """
59
60 archivers = {
61 'tar': TarArchiver,
62 'tbz2': Tbz2Archiver,
63 'tgz': TgzArchiver,
64 'zip': ZipArchiver,
65 }
66
67 return archivers[kind]()
@@ -0,0 +1,47 b''
1 from mercurial import ui, config
2
3
4 def make_ui(self, path='hgwebdir.config'):
5 """
6 A funcion that will read python rc files and make an ui from read options
7
8 :param path: path to mercurial config file
9 """
10 #propagated from mercurial documentation
11 sections = [
12 'alias',
13 'auth',
14 'decode/encode',
15 'defaults',
16 'diff',
17 'email',
18 'extensions',
19 'format',
20 'merge-patterns',
21 'merge-tools',
22 'hooks',
23 'http_proxy',
24 'smtp',
25 'patch',
26 'paths',
27 'profiling',
28 'server',
29 'trusted',
30 'ui',
31 'web',
32 ]
33
34 repos = path
35 baseui = ui.ui()
36 cfg = config.config()
37 cfg.read(repos)
38 self.paths = cfg.items('paths')
39 self.base_path = self.paths[0][1].replace('*', '')
40 self.check_repo_dir(self.paths)
41 self.set_statics(cfg)
42
43 for section in sections:
44 for k, v in cfg.items(section):
45 baseui.setconfig(section, k, v)
46
47 return baseui
@@ -0,0 +1,13 b''
1 """
2 Various utilities to work with Python < 2.7.
3
4 Those utilities may be deleted once ``vcs`` stops support for older Python
5 versions.
6 """
7 import sys
8
9
10 if sys.version_info >= (2, 7):
11 unittest = __import__('unittest')
12 else:
13 unittest = __import__('unittest2')
@@ -0,0 +1,460 b''
1 # -*- coding: utf-8 -*-
2 # original copyright: 2007-2008 by Armin Ronacher
3 # licensed under the BSD license.
4
5 import re
6 import difflib
7 import logging
8
9 from difflib import unified_diff
10 from itertools import tee, imap
11
12 from mercurial.match import match
13
14 from rhodecode.lib.vcs.exceptions import VCSError
15 from rhodecode.lib.vcs.nodes import FileNode, NodeError
16
17
18 def get_udiff(filenode_old, filenode_new,show_whitespace=True):
19 """
20 Returns unified diff between given ``filenode_old`` and ``filenode_new``.
21 """
22 try:
23 filenode_old_date = filenode_old.last_changeset.date
24 except NodeError:
25 filenode_old_date = None
26
27 try:
28 filenode_new_date = filenode_new.last_changeset.date
29 except NodeError:
30 filenode_new_date = None
31
32 for filenode in (filenode_old, filenode_new):
33 if not isinstance(filenode, FileNode):
34 raise VCSError("Given object should be FileNode object, not %s"
35 % filenode.__class__)
36
37 if filenode_old_date and filenode_new_date:
38 if not filenode_old_date < filenode_new_date:
39 logging.debug("Generating udiff for filenodes with not increasing "
40 "dates")
41
42 vcs_udiff = unified_diff(filenode_old.content.splitlines(True),
43 filenode_new.content.splitlines(True),
44 filenode_old.name,
45 filenode_new.name,
46 filenode_old_date,
47 filenode_old_date)
48 return vcs_udiff
49
50
51 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True):
52 """
53 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
54
55 :param ignore_whitespace: ignore whitespaces in diff
56 """
57
58 for filenode in (filenode_old, filenode_new):
59 if not isinstance(filenode, FileNode):
60 raise VCSError("Given object should be FileNode object, not %s"
61 % filenode.__class__)
62
63 old_raw_id = getattr(filenode_old.changeset, 'raw_id', '0' * 40)
64 new_raw_id = getattr(filenode_new.changeset, 'raw_id', '0' * 40)
65
66 repo = filenode_new.changeset.repository
67 vcs_gitdiff = repo._get_diff(old_raw_id, new_raw_id, filenode_new.path,
68 ignore_whitespace)
69
70 return vcs_gitdiff
71
72
73 class DiffProcessor(object):
74 """
75 Give it a unified diff and it returns a list of the files that were
76 mentioned in the diff together with a dict of meta information that
77 can be used to render it in a HTML template.
78 """
79 _chunk_re = re.compile(r'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
80
81 def __init__(self, diff, differ='diff', format='udiff'):
82 """
83 :param diff: a text in diff format or generator
84 :param format: format of diff passed, `udiff` or `gitdiff`
85 """
86 if isinstance(diff, basestring):
87 diff = [diff]
88
89 self.__udiff = diff
90 self.__format = format
91 self.adds = 0
92 self.removes = 0
93
94 if isinstance(self.__udiff, basestring):
95 self.lines = iter(self.__udiff.splitlines(1))
96
97 elif self.__format == 'gitdiff':
98 udiff_copy = self.copy_iterator()
99 self.lines = imap(self.escaper, self._parse_gitdiff(udiff_copy))
100 else:
101 udiff_copy = self.copy_iterator()
102 self.lines = imap(self.escaper, udiff_copy)
103
104 # Select a differ.
105 if differ == 'difflib':
106 self.differ = self._highlight_line_difflib
107 else:
108 self.differ = self._highlight_line_udiff
109
110 def escaper(self, string):
111 return string.replace('<', '&lt;').replace('>', '&gt;')
112
113 def copy_iterator(self):
114 """
115 make a fresh copy of generator, we should not iterate thru
116 an original as it's needed for repeating operations on
117 this instance of DiffProcessor
118 """
119 self.__udiff, iterator_copy = tee(self.__udiff)
120 return iterator_copy
121
122 def _extract_rev(self, line1, line2):
123 """
124 Extract the filename and revision hint from a line.
125 """
126
127 try:
128 if line1.startswith('--- ') and line2.startswith('+++ '):
129 l1 = line1[4:].split(None, 1)
130 old_filename = l1[0].lstrip('a/') if len(l1) >= 1 else None
131 old_rev = l1[1] if len(l1) == 2 else 'old'
132
133 l2 = line2[4:].split(None, 1)
134 new_filename = l2[0].lstrip('b/') if len(l1) >= 1 else None
135 new_rev = l2[1] if len(l2) == 2 else 'new'
136
137 filename = old_filename if (old_filename !=
138 'dev/null') else new_filename
139
140 return filename, new_rev, old_rev
141 except (ValueError, IndexError):
142 pass
143
144 return None, None, None
145
146 def _parse_gitdiff(self, diffiterator):
147 def line_decoder(l):
148 if l.startswith('+') and not l.startswith('+++'):
149 self.adds += 1
150 elif l.startswith('-') and not l.startswith('---'):
151 self.removes += 1
152 return l.decode('utf8', 'replace')
153
154 output = list(diffiterator)
155 size = len(output)
156
157 if size == 2:
158 l = []
159 l.extend([output[0]])
160 l.extend(output[1].splitlines(1))
161 return map(line_decoder, l)
162 elif size == 1:
163 return map(line_decoder, output[0].splitlines(1))
164 elif size == 0:
165 return []
166
167 raise Exception('wrong size of diff %s' % size)
168
169 def _highlight_line_difflib(self, line, next):
170 """
171 Highlight inline changes in both lines.
172 """
173
174 if line['action'] == 'del':
175 old, new = line, next
176 else:
177 old, new = next, line
178
179 oldwords = re.split(r'(\W)', old['line'])
180 newwords = re.split(r'(\W)', new['line'])
181
182 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
183
184 oldfragments, newfragments = [], []
185 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
186 oldfrag = ''.join(oldwords[i1:i2])
187 newfrag = ''.join(newwords[j1:j2])
188 if tag != 'equal':
189 if oldfrag:
190 oldfrag = '<del>%s</del>' % oldfrag
191 if newfrag:
192 newfrag = '<ins>%s</ins>' % newfrag
193 oldfragments.append(oldfrag)
194 newfragments.append(newfrag)
195
196 old['line'] = "".join(oldfragments)
197 new['line'] = "".join(newfragments)
198
199 def _highlight_line_udiff(self, line, next):
200 """
201 Highlight inline changes in both lines.
202 """
203 start = 0
204 limit = min(len(line['line']), len(next['line']))
205 while start < limit and line['line'][start] == next['line'][start]:
206 start += 1
207 end = -1
208 limit -= start
209 while -end <= limit and line['line'][end] == next['line'][end]:
210 end -= 1
211 end += 1
212 if start or end:
213 def do(l):
214 last = end + len(l['line'])
215 if l['action'] == 'add':
216 tag = 'ins'
217 else:
218 tag = 'del'
219 l['line'] = '%s<%s>%s</%s>%s' % (
220 l['line'][:start],
221 tag,
222 l['line'][start:last],
223 tag,
224 l['line'][last:]
225 )
226 do(line)
227 do(next)
228
229 def _parse_udiff(self):
230 """
231 Parse the diff an return data for the template.
232 """
233 lineiter = self.lines
234 files = []
235 try:
236 line = lineiter.next()
237 # skip first context
238 skipfirst = True
239 while 1:
240 # continue until we found the old file
241 if not line.startswith('--- '):
242 line = lineiter.next()
243 continue
244
245 chunks = []
246 filename, old_rev, new_rev = \
247 self._extract_rev(line, lineiter.next())
248 files.append({
249 'filename': filename,
250 'old_revision': old_rev,
251 'new_revision': new_rev,
252 'chunks': chunks
253 })
254
255 line = lineiter.next()
256 while line:
257 match = self._chunk_re.match(line)
258 if not match:
259 break
260
261 lines = []
262 chunks.append(lines)
263
264 old_line, old_end, new_line, new_end = \
265 [int(x or 1) for x in match.groups()[:-1]]
266 old_line -= 1
267 new_line -= 1
268 context = len(match.groups()) == 5
269 old_end += old_line
270 new_end += new_line
271
272 if context:
273 if not skipfirst:
274 lines.append({
275 'old_lineno': '...',
276 'new_lineno': '...',
277 'action': 'context',
278 'line': line,
279 })
280 else:
281 skipfirst = False
282
283 line = lineiter.next()
284 while old_line < old_end or new_line < new_end:
285 if line:
286 command, line = line[0], line[1:]
287 else:
288 command = ' '
289 affects_old = affects_new = False
290
291 # ignore those if we don't expect them
292 if command in '#@':
293 continue
294 elif command == '+':
295 affects_new = True
296 action = 'add'
297 elif command == '-':
298 affects_old = True
299 action = 'del'
300 else:
301 affects_old = affects_new = True
302 action = 'unmod'
303
304 old_line += affects_old
305 new_line += affects_new
306 lines.append({
307 'old_lineno': affects_old and old_line or '',
308 'new_lineno': affects_new and new_line or '',
309 'action': action,
310 'line': line
311 })
312 line = lineiter.next()
313
314 except StopIteration:
315 pass
316
317 # highlight inline changes
318 for file in files:
319 for chunk in chunks:
320 lineiter = iter(chunk)
321 #first = True
322 try:
323 while 1:
324 line = lineiter.next()
325 if line['action'] != 'unmod':
326 nextline = lineiter.next()
327 if nextline['action'] == 'unmod' or \
328 nextline['action'] == line['action']:
329 continue
330 self.differ(line, nextline)
331 except StopIteration:
332 pass
333
334 return files
335
336 def prepare(self):
337 """
338 Prepare the passed udiff for HTML rendering. It'l return a list
339 of dicts
340 """
341 return self._parse_udiff()
342
343 def _safe_id(self, idstring):
344 """Make a string safe for including in an id attribute.
345
346 The HTML spec says that id attributes 'must begin with
347 a letter ([A-Za-z]) and may be followed by any number
348 of letters, digits ([0-9]), hyphens ("-"), underscores
349 ("_"), colons (":"), and periods (".")'. These regexps
350 are slightly over-zealous, in that they remove colons
351 and periods unnecessarily.
352
353 Whitespace is transformed into underscores, and then
354 anything which is not a hyphen or a character that
355 matches \w (alphanumerics and underscore) is removed.
356
357 """
358 # Transform all whitespace to underscore
359 idstring = re.sub(r'\s', "_", '%s' % idstring)
360 # Remove everything that is not a hyphen or a member of \w
361 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
362 return idstring
363
364 def raw_diff(self):
365 """
366 Returns raw string as udiff
367 """
368 udiff_copy = self.copy_iterator()
369 if self.__format == 'gitdiff':
370 udiff_copy = self._parse_gitdiff(udiff_copy)
371 return u''.join(udiff_copy)
372
373 def as_html(self, table_class='code-difftable', line_class='line',
374 new_lineno_class='lineno old', old_lineno_class='lineno new',
375 code_class='code'):
376 """
377 Return udiff as html table with customized css classes
378 """
379 def _link_to_if(condition, label, url):
380 """
381 Generates a link if condition is meet or just the label if not.
382 """
383
384 if condition:
385 return '''<a href="%(url)s">%(label)s</a>''' % {'url': url,
386 'label': label}
387 else:
388 return label
389 diff_lines = self.prepare()
390 _html_empty = True
391 _html = []
392 _html.append('''<table class="%(table_class)s">\n''' \
393 % {'table_class': table_class})
394 for diff in diff_lines:
395 for line in diff['chunks']:
396 _html_empty = False
397 for change in line:
398 _html.append('''<tr class="%(line_class)s %(action)s">\n''' \
399 % {'line_class': line_class,
400 'action': change['action']})
401 anchor_old_id = ''
402 anchor_new_id = ''
403 anchor_old = "%(filename)s_o%(oldline_no)s" % \
404 {'filename': self._safe_id(diff['filename']),
405 'oldline_no': change['old_lineno']}
406 anchor_new = "%(filename)s_n%(oldline_no)s" % \
407 {'filename': self._safe_id(diff['filename']),
408 'oldline_no': change['new_lineno']}
409 cond_old = change['old_lineno'] != '...' and \
410 change['old_lineno']
411 cond_new = change['new_lineno'] != '...' and \
412 change['new_lineno']
413 if cond_old:
414 anchor_old_id = 'id="%s"' % anchor_old
415 if cond_new:
416 anchor_new_id = 'id="%s"' % anchor_new
417 ###########################################################
418 # OLD LINE NUMBER
419 ###########################################################
420 _html.append('''\t<td %(a_id)s class="%(old_lineno_cls)s">''' \
421 % {'a_id': anchor_old_id,
422 'old_lineno_cls': old_lineno_class})
423
424 _html.append('''<pre>%(link)s</pre>''' \
425 % {'link':
426 _link_to_if(cond_old, change['old_lineno'], '#%s' \
427 % anchor_old)})
428 _html.append('''</td>\n''')
429 ###########################################################
430 # NEW LINE NUMBER
431 ###########################################################
432
433 _html.append('''\t<td %(a_id)s class="%(new_lineno_cls)s">''' \
434 % {'a_id': anchor_new_id,
435 'new_lineno_cls': new_lineno_class})
436
437 _html.append('''<pre>%(link)s</pre>''' \
438 % {'link':
439 _link_to_if(cond_new, change['new_lineno'], '#%s' \
440 % anchor_new)})
441 _html.append('''</td>\n''')
442 ###########################################################
443 # CODE
444 ###########################################################
445 _html.append('''\t<td class="%(code_class)s">''' \
446 % {'code_class': code_class})
447 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' \
448 % {'code': change['line']})
449 _html.append('''\t</td>''')
450 _html.append('''\n</tr>\n''')
451 _html.append('''</table>''')
452 if _html_empty:
453 return None
454 return ''.join(_html)
455
456 def stat(self):
457 """
458 Returns tuple of adde,and removed lines for this instance
459 """
460 return self.adds, self.removes
@@ -0,0 +1,13 b''
1 import imp
2
3
4 def create_module(name, path):
5 """
6 Returns module created *on the fly*. Returned module would have name same
7 as given ``name`` and would contain code read from file at the given
8 ``path`` (it may also be a zip or package containing *__main__* module).
9 """
10 module = imp.new_module(name)
11 module.__file__ = path
12 execfile(path, module.__dict__)
13 return module
@@ -0,0 +1,28 b''
1 def filesizeformat(bytes, sep=' '):
2 """
3 Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
4 102 B, 2.3 GB etc).
5
6 Grabbed from Django (http://www.djangoproject.com), slightly modified.
7
8 :param bytes: size in bytes (as integer)
9 :param sep: string separator between number and abbreviation
10 """
11 try:
12 bytes = float(bytes)
13 except (TypeError, ValueError, UnicodeDecodeError):
14 return '0%sB' % sep
15
16 if bytes < 1024:
17 size = bytes
18 template = '%.0f%sB'
19 elif bytes < 1024 * 1024:
20 size = bytes / 1024
21 template = '%.0f%sKB'
22 elif bytes < 1024 * 1024 * 1024:
23 size = bytes / 1024 / 1024
24 template = '%.1f%sMB'
25 else:
26 size = bytes / 1024 / 1024 / 1024
27 template = '%.2f%sGB'
28 return template % (size, sep)
@@ -0,0 +1,252 b''
1 """
2 Utitlites aimed to help achieve mostly basic tasks.
3 """
4 from __future__ import division
5
6 import re
7 import time
8 import datetime
9 import os.path
10 from subprocess import Popen, PIPE
11 from rhodecode.lib.vcs.exceptions import VCSError
12 from rhodecode.lib.vcs.exceptions import RepositoryError
13 from rhodecode.lib.vcs.utils.paths import abspath
14
15 ALIASES = ['hg', 'git']
16
17
18 def get_scm(path, search_recursively=False, explicit_alias=None):
19 """
20 Returns one of alias from ``ALIASES`` (in order of precedence same as
21 shortcuts given in ``ALIASES``) and top working dir path for the given
22 argument. If no scm-specific directory is found or more than one scm is
23 found at that directory, ``VCSError`` is raised.
24
25 :param search_recursively: if set to ``True``, this function would try to
26 move up to parent directory every time no scm is recognized for the
27 currently checked path. Default: ``False``.
28 :param explicit_alias: can be one of available backend aliases, when given
29 it will return given explicit alias in repositories under more than one
30 version control, if explicit_alias is different than found it will raise
31 VCSError
32 """
33 if not os.path.isdir(path):
34 raise VCSError("Given path %s is not a directory" % path)
35
36 def get_scms(path):
37 return [(scm, path) for scm in get_scms_for_path(path)]
38
39 found_scms = get_scms(path)
40 while not found_scms and search_recursively:
41 newpath = abspath(path, '..')
42 if newpath == path:
43 break
44 path = newpath
45 found_scms = get_scms(path)
46
47 if len(found_scms) > 1:
48 for scm in found_scms:
49 if scm[0] == explicit_alias:
50 return scm
51 raise VCSError('More than one [%s] scm found at given path %s'
52 % (','.join((x[0] for x in found_scms)), path))
53
54 if len(found_scms) is 0:
55 raise VCSError('No scm found at given path %s' % path)
56
57 return found_scms[0]
58
59
60 def get_scms_for_path(path):
61 """
62 Returns all scm's found at the given path. If no scm is recognized
63 - empty list is returned.
64
65 :param path: path to directory which should be checked. May be callable.
66
67 :raises VCSError: if given ``path`` is not a directory
68 """
69 from rhodecode.lib.vcs.backends import get_backend
70 if hasattr(path, '__call__'):
71 path = path()
72 if not os.path.isdir(path):
73 raise VCSError("Given path %r is not a directory" % path)
74
75 result = []
76 for key in ALIASES:
77 dirname = os.path.join(path, '.' + key)
78 if os.path.isdir(dirname):
79 result.append(key)
80 continue
81 # We still need to check if it's not bare repository as
82 # bare repos don't have working directories
83 try:
84 get_backend(key)(path)
85 result.append(key)
86 continue
87 except RepositoryError:
88 # Wrong backend
89 pass
90 except VCSError:
91 # No backend at all
92 pass
93 return result
94
95
96 def get_repo_paths(path):
97 """
98 Returns path's subdirectories which seems to be a repository.
99 """
100 repo_paths = []
101 dirnames = (os.path.abspath(dirname) for dirname in os.listdir(path))
102 for dirname in dirnames:
103 try:
104 get_scm(dirname)
105 repo_paths.append(dirname)
106 except VCSError:
107 pass
108 return repo_paths
109
110
111 def run_command(cmd, *args):
112 """
113 Runs command on the system with given ``args``.
114 """
115 command = ' '.join((cmd, args))
116 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
117 stdout, stderr = p.communicate()
118 return p.retcode, stdout, stderr
119
120
121 def get_highlighted_code(name, code, type='terminal'):
122 """
123 If pygments are available on the system
124 then returned output is colored. Otherwise
125 unchanged content is returned.
126 """
127 import logging
128 try:
129 import pygments
130 pygments
131 except ImportError:
132 return code
133 from pygments import highlight
134 from pygments.lexers import guess_lexer_for_filename, ClassNotFound
135 from pygments.formatters import TerminalFormatter
136
137 try:
138 lexer = guess_lexer_for_filename(name, code)
139 formatter = TerminalFormatter()
140 content = highlight(code, lexer, formatter)
141 except ClassNotFound:
142 logging.debug("Couldn't guess Lexer, will not use pygments.")
143 content = code
144 return content
145
146 def parse_changesets(text):
147 """
148 Returns dictionary with *start*, *main* and *end* ids.
149
150 Examples::
151
152 >>> parse_changesets('aaabbb')
153 {'start': None, 'main': 'aaabbb', 'end': None}
154 >>> parse_changesets('aaabbb..cccddd')
155 {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
156
157 """
158 text = text.strip()
159 CID_RE = r'[a-zA-Z0-9]+'
160 if not '..' in text:
161 m = re.match(r'^(?P<cid>%s)$' % CID_RE, text)
162 if m:
163 return {
164 'start': None,
165 'main': text,
166 'end': None,
167 }
168 else:
169 RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE)
170 m = re.match(RE, text)
171 if m:
172 result = m.groupdict()
173 result['main'] = None
174 return result
175 raise ValueError("IDs not recognized")
176
177 def parse_datetime(text):
178 """
179 Parses given text and returns ``datetime.datetime`` instance or raises
180 ``ValueError``.
181
182 :param text: string of desired date/datetime or something more verbose,
183 like *yesterday*, *2weeks 3days*, etc.
184 """
185
186 text = text.strip().lower()
187
188 INPUT_FORMATS = (
189 '%Y-%m-%d %H:%M:%S',
190 '%Y-%m-%d %H:%M',
191 '%Y-%m-%d',
192 '%m/%d/%Y %H:%M:%S',
193 '%m/%d/%Y %H:%M',
194 '%m/%d/%Y',
195 '%m/%d/%y %H:%M:%S',
196 '%m/%d/%y %H:%M',
197 '%m/%d/%y',
198 )
199 for format in INPUT_FORMATS:
200 try:
201 return datetime.datetime(*time.strptime(text, format)[:6])
202 except ValueError:
203 pass
204
205 # Try descriptive texts
206 if text == 'tomorrow':
207 future = datetime.datetime.now() + datetime.timedelta(days=1)
208 args = future.timetuple()[:3] + (23, 59, 59)
209 return datetime.datetime(*args)
210 elif text == 'today':
211 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
212 elif text == 'now':
213 return datetime.datetime.now()
214 elif text == 'yesterday':
215 past = datetime.datetime.now() - datetime.timedelta(days=1)
216 return datetime.datetime(*past.timetuple()[:3])
217 else:
218 days = 0
219 matched = re.match(
220 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
221 if matched:
222 groupdict = matched.groupdict()
223 if groupdict['days']:
224 days += int(matched.groupdict()['days'])
225 if groupdict['weeks']:
226 days += int(matched.groupdict()['weeks']) * 7
227 past = datetime.datetime.now() - datetime.timedelta(days=days)
228 return datetime.datetime(*past.timetuple()[:3])
229
230 raise ValueError('Wrong date: "%s"' % text)
231
232
233 def get_dict_for_attrs(obj, attrs):
234 """
235 Returns dictionary for each attribute from given ``obj``.
236 """
237 data = {}
238 for attr in attrs:
239 data[attr] = getattr(obj, attr)
240 return data
241
242
243 def get_total_seconds(timedelta):
244 """
245 Backported for Python 2.5.
246
247 See http://docs.python.org/library/datetime.html.
248 """
249 return ((timedelta.microseconds + (
250 timedelta.seconds +
251 timedelta.days * 24 * 60 * 60
252 ) * 10**6) / 10**6)
@@ -0,0 +1,12 b''
1 """Mercurial libs compatibility
2
3 """
4 from mercurial import archival, merge as hg_merge, patch, ui
5 from mercurial.commands import clone, nullid, pull
6 from mercurial.context import memctx, memfilectx
7 from mercurial.error import RepoError, RepoLookupError, Abort
8 from mercurial.hgweb.common import get_contact
9 from mercurial.localrepo import localrepository
10 from mercurial.match import match
11 from mercurial.mdiff import diffopts
12 from mercurial.node import hex
@@ -0,0 +1,27 b''
1 from rhodecode.lib.vcs.exceptions import VCSError
2
3
4 def import_class(class_path):
5 """
6 Returns class from the given path.
7
8 For example, in order to get class located at
9 ``vcs.backends.hg.MercurialRepository``:
10
11 try:
12 hgrepo = import_class('vcs.backends.hg.MercurialRepository')
13 except VCSError:
14 # hadle error
15 """
16 splitted = class_path.split('.')
17 mod_path = '.'.join(splitted[:-1])
18 class_name = splitted[-1]
19 try:
20 class_mod = __import__(mod_path, {}, {}, [class_name])
21 except ImportError, err:
22 msg = "There was problem while trying to import backend class. "\
23 "Original error was:\n%s" % err
24 raise VCSError(msg)
25 cls = getattr(class_mod, class_name)
26
27 return cls
@@ -0,0 +1,27 b''
1 class LazyProperty(object):
2 """
3 Decorator for easier creation of ``property`` from potentially expensive to
4 calculate attribute of the class.
5
6 Usage::
7
8 class Foo(object):
9 @LazyProperty
10 def bar(self):
11 print 'Calculating self._bar'
12 return 42
13
14 Taken from http://blog.pythonisito.com/2008/08/lazy-descriptors.html and
15 used widely.
16 """
17
18 def __init__(self, func):
19 self._func = func
20 self.__name__ = func.__name__
21 self.__doc__ = func.__doc__
22
23 def __get__(self, obj, klass=None):
24 if obj is None:
25 return None
26 result = obj.__dict__[self.__name__] = self._func(obj)
27 return result
@@ -0,0 +1,72 b''
1 import os
2
3
4 class LockFile(object):
5 """Provides methods to obtain, check for, and release a file based lock which
6 should be used to handle concurrent access to the same file.
7
8 As we are a utility class to be derived from, we only use protected methods.
9
10 Locks will automatically be released on destruction"""
11 __slots__ = ("_file_path", "_owns_lock")
12
13 def __init__(self, file_path):
14 self._file_path = file_path
15 self._owns_lock = False
16
17 def __del__(self):
18 self._release_lock()
19
20 def _lock_file_path(self):
21 """:return: Path to lockfile"""
22 return "%s.lock" % (self._file_path)
23
24 def _has_lock(self):
25 """:return: True if we have a lock and if the lockfile still exists
26 :raise AssertionError: if our lock-file does not exist"""
27 if not self._owns_lock:
28 return False
29
30 return True
31
32 def _obtain_lock_or_raise(self):
33 """Create a lock file as flag for other instances, mark our instance as lock-holder
34
35 :raise IOError: if a lock was already present or a lock file could not be written"""
36 if self._has_lock():
37 return
38 lock_file = self._lock_file_path()
39 if os.path.isfile(lock_file):
40 raise IOError("Lock for file %r did already exist, delete %r in case the lock is illegal" % (self._file_path, lock_file))
41
42 try:
43 fd = os.open(lock_file, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0)
44 os.close(fd)
45 except OSError,e:
46 raise IOError(str(e))
47
48 self._owns_lock = True
49
50 def _obtain_lock(self):
51 """The default implementation will raise if a lock cannot be obtained.
52 Subclasses may override this method to provide a different implementation"""
53 return self._obtain_lock_or_raise()
54
55 def _release_lock(self):
56 """Release our lock if we have one"""
57 if not self._has_lock():
58 return
59
60 # if someone removed our file beforhand, lets just flag this issue
61 # instead of failing, to make it more usable.
62 lfp = self._lock_file_path()
63 try:
64 # on bloody windows, the file needs write permissions to be removable.
65 # Why ...
66 if os.name == 'nt':
67 os.chmod(lfp, 0777)
68 # END handle win32
69 os.remove(lfp)
70 except OSError:
71 pass
72 self._owns_lock = False
@@ -0,0 +1,102 b''
1 """Ordered dict implementation"""
2 from UserDict import DictMixin
3
4
5 class OrderedDict(dict, DictMixin):
6
7 def __init__(self, *args, **kwds):
8 if len(args) > 1:
9 raise TypeError('expected at most 1 arguments, got %d' % len(args))
10 try:
11 self.__end
12 except AttributeError:
13 self.clear()
14 self.update(*args, **kwds)
15
16 def clear(self):
17 self.__end = end = []
18 end += [None, end, end] # sentinel node for doubly linked list
19 self.__map = {} # key --> [key, prev, next]
20 dict.clear(self)
21
22 def __setitem__(self, key, value):
23 if key not in self:
24 end = self.__end
25 curr = end[1]
26 curr[2] = end[1] = self.__map[key] = [key, curr, end]
27 dict.__setitem__(self, key, value)
28
29 def __delitem__(self, key):
30 dict.__delitem__(self, key)
31 key, prev, next = self.__map.pop(key)
32 prev[2] = next
33 next[1] = prev
34
35 def __iter__(self):
36 end = self.__end
37 curr = end[2]
38 while curr is not end:
39 yield curr[0]
40 curr = curr[2]
41
42 def __reversed__(self):
43 end = self.__end
44 curr = end[1]
45 while curr is not end:
46 yield curr[0]
47 curr = curr[1]
48
49 def popitem(self, last=True):
50 if not self:
51 raise KeyError('dictionary is empty')
52 if last:
53 key = reversed(self).next()
54 else:
55 key = iter(self).next()
56 value = self.pop(key)
57 return key, value
58
59 def __reduce__(self):
60 items = [[k, self[k]] for k in self]
61 tmp = self.__map, self.__end
62 del self.__map, self.__end
63 inst_dict = vars(self).copy()
64 self.__map, self.__end = tmp
65 if inst_dict:
66 return (self.__class__, (items,), inst_dict)
67 return self.__class__, (items,)
68
69 def keys(self):
70 return list(self)
71
72 setdefault = DictMixin.setdefault
73 update = DictMixin.update
74 pop = DictMixin.pop
75 values = DictMixin.values
76 items = DictMixin.items
77 iterkeys = DictMixin.iterkeys
78 itervalues = DictMixin.itervalues
79 iteritems = DictMixin.iteritems
80
81 def __repr__(self):
82 if not self:
83 return '%s()' % (self.__class__.__name__,)
84 return '%s(%r)' % (self.__class__.__name__, self.items())
85
86 def copy(self):
87 return self.__class__(self)
88
89 @classmethod
90 def fromkeys(cls, iterable, value=None):
91 d = cls()
92 for key in iterable:
93 d[key] = value
94 return d
95
96 def __eq__(self, other):
97 if isinstance(other, OrderedDict):
98 return len(self) == len(other) and self.items() == other.items()
99 return dict.__eq__(self, other)
100
101 def __ne__(self, other):
102 return not self == other
@@ -0,0 +1,36 b''
1 import os
2
3 abspath = lambda * p: os.path.abspath(os.path.join(*p))
4
5
6 def get_dirs_for_path(*paths):
7 """
8 Returns list of directories, including intermediate.
9 """
10 for path in paths:
11 head = path
12 while head:
13 head, tail = os.path.split(head)
14 if head:
15 yield head
16 else:
17 # We don't need to yield empty path
18 break
19
20
21 def get_dir_size(path):
22 root_path = path
23 size = 0
24 for path, dirs, files in os.walk(root_path):
25 for f in files:
26 try:
27 size += os.path.getsize(os.path.join(path, f))
28 except OSError:
29 pass
30 return size
31
32 def get_user_home():
33 """
34 Returns home path of the user.
35 """
36 return os.getenv('HOME', os.getenv('USERPROFILE'))
@@ -0,0 +1,419 b''
1 # encoding: UTF-8
2 import sys
3 import datetime
4 from string import Template
5 from rhodecode.lib.vcs.utils.filesize import filesizeformat
6 from rhodecode.lib.vcs.utils.helpers import get_total_seconds
7
8
9 class ProgressBarError(Exception):
10 pass
11
12 class AlreadyFinishedError(ProgressBarError):
13 pass
14
15
16 class ProgressBar(object):
17
18 default_elements = ['percentage', 'bar', 'steps']
19
20 def __init__(self, steps=100, stream=None, elements=None):
21 self.step = 0
22 self.steps = steps
23 self.stream = stream or sys.stderr
24 self.bar_char = '='
25 self.width = 50
26 self.separator = ' | '
27 self.elements = elements or self.default_elements
28 self.started = None
29 self.finished = False
30 self.steps_label = 'Step'
31 self.time_label = 'Time'
32 self.eta_label = 'ETA'
33 self.speed_label = 'Speed'
34 self.transfer_label = 'Transfer'
35
36 def __str__(self):
37 return self.get_line()
38
39 def __iter__(self):
40 start = self.step
41 end = self.steps + 1
42 for x in xrange(start, end):
43 self.render(x)
44 yield x
45
46 def get_separator(self):
47 return self.separator
48
49 def get_bar_char(self):
50 return self.bar_char
51
52 def get_bar(self):
53 char = self.get_bar_char()
54 perc = self.get_percentage()
55 length = int(self.width * perc / 100)
56 bar = char * length
57 bar = bar.ljust(self.width)
58 return bar
59
60 def get_elements(self):
61 return self.elements
62
63 def get_template(self):
64 separator = self.get_separator()
65 elements = self.get_elements()
66 return Template(separator.join((('$%s' % e) for e in elements)))
67
68 def get_total_time(self, current_time=None):
69 if current_time is None:
70 current_time = datetime.datetime.now()
71 if not self.started:
72 return datetime.timedelta()
73 return current_time - self.started
74
75 def get_rendered_total_time(self):
76 delta = self.get_total_time()
77 if not delta:
78 ttime = '-'
79 else:
80 ttime = str(delta)
81 return '%s %s' % (self.time_label, ttime)
82
83 def get_eta(self, current_time=None):
84 if current_time is None:
85 current_time = datetime.datetime.now()
86 if self.step == 0:
87 return datetime.timedelta()
88 total_seconds = get_total_seconds(self.get_total_time())
89 eta_seconds = total_seconds * self.steps / self.step - total_seconds
90 return datetime.timedelta(seconds=int(eta_seconds))
91
92 def get_rendered_eta(self):
93 eta = self.get_eta()
94 if not eta:
95 eta = '--:--:--'
96 else:
97 eta = str(eta).rjust(8)
98 return '%s: %s' % (self.eta_label, eta)
99
100 def get_percentage(self):
101 return float(self.step) / self.steps * 100
102
103 def get_rendered_percentage(self):
104 perc = self.get_percentage()
105 return ('%s%%' % (int(perc))).rjust(5)
106
107 def get_rendered_steps(self):
108 return '%s: %s/%s' % (self.steps_label, self.step, self.steps)
109
110 def get_rendered_speed(self, step=None, total_seconds=None):
111 if step is None:
112 step = self.step
113 if total_seconds is None:
114 total_seconds = get_total_seconds(self.get_total_time())
115 if step <= 0 or total_seconds <= 0:
116 speed = '-'
117 else:
118 speed = filesizeformat(float(step) / total_seconds)
119 return '%s: %s/s' % (self.speed_label, speed)
120
121 def get_rendered_transfer(self, step=None, steps=None):
122 if step is None:
123 step = self.step
124 if steps is None:
125 steps = self.steps
126
127 if steps <= 0:
128 return '%s: -' % self.transfer_label
129 total = filesizeformat(float(steps))
130 if step <= 0:
131 transferred = '-'
132 else:
133 transferred = filesizeformat(float(step))
134 return '%s: %s / %s' % (self.transfer_label, transferred, total)
135
136 def get_context(self):
137 return {
138 'percentage': self.get_rendered_percentage(),
139 'bar': self.get_bar(),
140 'steps': self.get_rendered_steps(),
141 'time': self.get_rendered_total_time(),
142 'eta': self.get_rendered_eta(),
143 'speed': self.get_rendered_speed(),
144 'transfer': self.get_rendered_transfer(),
145 }
146
147 def get_line(self):
148 template = self.get_template()
149 context = self.get_context()
150 return template.safe_substitute(**context)
151
152 def write(self, data):
153 self.stream.write(data)
154
155 def render(self, step):
156 if not self.started:
157 self.started = datetime.datetime.now()
158 if self.finished:
159 raise AlreadyFinishedError
160 self.step = step
161 self.write('\r%s' % self)
162 if step == self.steps:
163 self.finished = True
164 if step == self.steps:
165 self.write('\n')
166
167
168 """
169 termcolors.py
170
171 Grabbed from Django (http://www.djangoproject.com)
172 """
173
174 color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
175 foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
176 background = dict([(color_names[x], '4%s' % x) for x in range(8)])
177
178 RESET = '0'
179 opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
180
181 def colorize(text='', opts=(), **kwargs):
182 """
183 Returns your text, enclosed in ANSI graphics codes.
184
185 Depends on the keyword arguments 'fg' and 'bg', and the contents of
186 the opts tuple/list.
187
188 Returns the RESET code if no parameters are given.
189
190 Valid colors:
191 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
192
193 Valid options:
194 'bold'
195 'underscore'
196 'blink'
197 'reverse'
198 'conceal'
199 'noreset' - string will not be auto-terminated with the RESET code
200
201 Examples:
202 colorize('hello', fg='red', bg='blue', opts=('blink',))
203 colorize()
204 colorize('goodbye', opts=('underscore',))
205 print colorize('first line', fg='red', opts=('noreset',))
206 print 'this should be red too'
207 print colorize('and so should this')
208 print 'this should not be red'
209 """
210 code_list = []
211 if text == '' and len(opts) == 1 and opts[0] == 'reset':
212 return '\x1b[%sm' % RESET
213 for k, v in kwargs.iteritems():
214 if k == 'fg':
215 code_list.append(foreground[v])
216 elif k == 'bg':
217 code_list.append(background[v])
218 for o in opts:
219 if o in opt_dict:
220 code_list.append(opt_dict[o])
221 if 'noreset' not in opts:
222 text = text + '\x1b[%sm' % RESET
223 return ('\x1b[%sm' % ';'.join(code_list)) + text
224
225 def make_style(opts=(), **kwargs):
226 """
227 Returns a function with default parameters for colorize()
228
229 Example:
230 bold_red = make_style(opts=('bold',), fg='red')
231 print bold_red('hello')
232 KEYWORD = make_style(fg='yellow')
233 COMMENT = make_style(fg='blue', opts=('bold',))
234 """
235 return lambda text: colorize(text, opts, **kwargs)
236
237 NOCOLOR_PALETTE = 'nocolor'
238 DARK_PALETTE = 'dark'
239 LIGHT_PALETTE = 'light'
240
241 PALETTES = {
242 NOCOLOR_PALETTE: {
243 'ERROR': {},
244 'NOTICE': {},
245 'SQL_FIELD': {},
246 'SQL_COLTYPE': {},
247 'SQL_KEYWORD': {},
248 'SQL_TABLE': {},
249 'HTTP_INFO': {},
250 'HTTP_SUCCESS': {},
251 'HTTP_REDIRECT': {},
252 'HTTP_NOT_MODIFIED': {},
253 'HTTP_BAD_REQUEST': {},
254 'HTTP_NOT_FOUND': {},
255 'HTTP_SERVER_ERROR': {},
256 },
257 DARK_PALETTE: {
258 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
259 'NOTICE': { 'fg': 'red' },
260 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
261 'SQL_COLTYPE': { 'fg': 'green' },
262 'SQL_KEYWORD': { 'fg': 'yellow' },
263 'SQL_TABLE': { 'opts': ('bold',) },
264 'HTTP_INFO': { 'opts': ('bold',) },
265 'HTTP_SUCCESS': { },
266 'HTTP_REDIRECT': { 'fg': 'green' },
267 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' },
268 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
269 'HTTP_NOT_FOUND': { 'fg': 'yellow' },
270 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
271 },
272 LIGHT_PALETTE: {
273 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
274 'NOTICE': { 'fg': 'red' },
275 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
276 'SQL_COLTYPE': { 'fg': 'green' },
277 'SQL_KEYWORD': { 'fg': 'blue' },
278 'SQL_TABLE': { 'opts': ('bold',) },
279 'HTTP_INFO': { 'opts': ('bold',) },
280 'HTTP_SUCCESS': { },
281 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) },
282 'HTTP_NOT_MODIFIED': { 'fg': 'green' },
283 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
284 'HTTP_NOT_FOUND': { 'fg': 'red' },
285 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
286 }
287 }
288 DEFAULT_PALETTE = DARK_PALETTE
289
290 # ---------------------------- #
291 # --- End of termcolors.py --- #
292 # ---------------------------- #
293
294
295 class ColoredProgressBar(ProgressBar):
296
297 BAR_COLORS = (
298 (10, 'red'),
299 (30, 'magenta'),
300 (50, 'yellow'),
301 (99, 'green'),
302 (100, 'blue'),
303 )
304
305 def get_line(self):
306 line = super(ColoredProgressBar, self).get_line()
307 perc = self.get_percentage()
308 if perc > 100:
309 color = 'blue'
310 for max_perc, color in self.BAR_COLORS:
311 if perc <= max_perc:
312 break
313 return colorize(line, fg=color)
314
315
316 class AnimatedProgressBar(ProgressBar):
317
318 def get_bar_char(self):
319 chars = '-/|\\'
320 if self.step >= self.steps:
321 return '='
322 return chars[self.step % len(chars)]
323
324
325 class BarOnlyProgressBar(ProgressBar):
326
327 default_elements = ['bar', 'steps']
328
329 def get_bar(self):
330 bar = super(BarOnlyProgressBar, self).get_bar()
331 perc = self.get_percentage()
332 perc_text = '%s%%' % int(perc)
333 text = (' %s%% ' % (perc_text)).center(self.width, '=')
334 L = text.find(' ')
335 R = text.rfind(' ')
336 bar = ' '.join((bar[:L], perc_text, bar[R:]))
337 return bar
338
339
340 class AnimatedColoredProgressBar(AnimatedProgressBar,
341 ColoredProgressBar):
342 pass
343
344
345 class BarOnlyColoredProgressBar(ColoredProgressBar,
346 BarOnlyProgressBar):
347 pass
348
349
350
351 def main():
352 import time
353
354 print "Standard progress bar..."
355 bar = ProgressBar(30)
356 for x in xrange(1, 31):
357 bar.render(x)
358 time.sleep(0.02)
359 bar.stream.write('\n')
360 print
361
362 print "Empty bar..."
363 bar = ProgressBar(50)
364 bar.render(0)
365 print
366 print
367
368 print "Colored bar..."
369 bar = ColoredProgressBar(20)
370 for x in bar:
371 time.sleep(0.01)
372 print
373
374 print "Animated char bar..."
375 bar = AnimatedProgressBar(20)
376 for x in bar:
377 time.sleep(0.01)
378 print
379
380 print "Animated + colored char bar..."
381 bar = AnimatedColoredProgressBar(20)
382 for x in bar:
383 time.sleep(0.01)
384 print
385
386 print "Bar only ..."
387 bar = BarOnlyProgressBar(20)
388 for x in bar:
389 time.sleep(0.01)
390 print
391
392 print "Colored, longer bar-only, eta, total time ..."
393 bar = BarOnlyColoredProgressBar(40)
394 bar.width = 60
395 bar.elements += ['time', 'eta']
396 for x in bar:
397 time.sleep(0.01)
398 print
399 print
400
401 print "File transfer bar, breaks after 2 seconds ..."
402 total_bytes = 1024 * 1024 * 2
403 bar = ProgressBar(total_bytes)
404 bar.width = 50
405 bar.elements.remove('steps')
406 bar.elements += ['transfer', 'time', 'eta', 'speed']
407 for x in xrange(0, bar.steps, 1024):
408 bar.render(x)
409 time.sleep(0.01)
410 now = datetime.datetime.now()
411 if now - bar.started >= datetime.timedelta(seconds=2):
412 break
413 print
414 print
415
416
417
418 if __name__ == '__main__':
419 main()
@@ -0,0 +1,200 b''
1 """
2 termcolors.py
3
4 Grabbed from Django (http://www.djangoproject.com)
5 """
6
7 color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
8 foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
9 background = dict([(color_names[x], '4%s' % x) for x in range(8)])
10
11 RESET = '0'
12 opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
13
14 def colorize(text='', opts=(), **kwargs):
15 """
16 Returns your text, enclosed in ANSI graphics codes.
17
18 Depends on the keyword arguments 'fg' and 'bg', and the contents of
19 the opts tuple/list.
20
21 Returns the RESET code if no parameters are given.
22
23 Valid colors:
24 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
25
26 Valid options:
27 'bold'
28 'underscore'
29 'blink'
30 'reverse'
31 'conceal'
32 'noreset' - string will not be auto-terminated with the RESET code
33
34 Examples:
35 colorize('hello', fg='red', bg='blue', opts=('blink',))
36 colorize()
37 colorize('goodbye', opts=('underscore',))
38 print colorize('first line', fg='red', opts=('noreset',))
39 print 'this should be red too'
40 print colorize('and so should this')
41 print 'this should not be red'
42 """
43 code_list = []
44 if text == '' and len(opts) == 1 and opts[0] == 'reset':
45 return '\x1b[%sm' % RESET
46 for k, v in kwargs.iteritems():
47 if k == 'fg':
48 code_list.append(foreground[v])
49 elif k == 'bg':
50 code_list.append(background[v])
51 for o in opts:
52 if o in opt_dict:
53 code_list.append(opt_dict[o])
54 if 'noreset' not in opts:
55 text = text + '\x1b[%sm' % RESET
56 return ('\x1b[%sm' % ';'.join(code_list)) + text
57
58 def make_style(opts=(), **kwargs):
59 """
60 Returns a function with default parameters for colorize()
61
62 Example:
63 bold_red = make_style(opts=('bold',), fg='red')
64 print bold_red('hello')
65 KEYWORD = make_style(fg='yellow')
66 COMMENT = make_style(fg='blue', opts=('bold',))
67 """
68 return lambda text: colorize(text, opts, **kwargs)
69
70 NOCOLOR_PALETTE = 'nocolor'
71 DARK_PALETTE = 'dark'
72 LIGHT_PALETTE = 'light'
73
74 PALETTES = {
75 NOCOLOR_PALETTE: {
76 'ERROR': {},
77 'NOTICE': {},
78 'SQL_FIELD': {},
79 'SQL_COLTYPE': {},
80 'SQL_KEYWORD': {},
81 'SQL_TABLE': {},
82 'HTTP_INFO': {},
83 'HTTP_SUCCESS': {},
84 'HTTP_REDIRECT': {},
85 'HTTP_NOT_MODIFIED': {},
86 'HTTP_BAD_REQUEST': {},
87 'HTTP_NOT_FOUND': {},
88 'HTTP_SERVER_ERROR': {},
89 },
90 DARK_PALETTE: {
91 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
92 'NOTICE': { 'fg': 'red' },
93 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
94 'SQL_COLTYPE': { 'fg': 'green' },
95 'SQL_KEYWORD': { 'fg': 'yellow' },
96 'SQL_TABLE': { 'opts': ('bold',) },
97 'HTTP_INFO': { 'opts': ('bold',) },
98 'HTTP_SUCCESS': { },
99 'HTTP_REDIRECT': { 'fg': 'green' },
100 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' },
101 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
102 'HTTP_NOT_FOUND': { 'fg': 'yellow' },
103 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
104 },
105 LIGHT_PALETTE: {
106 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
107 'NOTICE': { 'fg': 'red' },
108 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
109 'SQL_COLTYPE': { 'fg': 'green' },
110 'SQL_KEYWORD': { 'fg': 'blue' },
111 'SQL_TABLE': { 'opts': ('bold',) },
112 'HTTP_INFO': { 'opts': ('bold',) },
113 'HTTP_SUCCESS': { },
114 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) },
115 'HTTP_NOT_MODIFIED': { 'fg': 'green' },
116 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
117 'HTTP_NOT_FOUND': { 'fg': 'red' },
118 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
119 }
120 }
121 DEFAULT_PALETTE = DARK_PALETTE
122
123 def parse_color_setting(config_string):
124 """Parse a DJANGO_COLORS environment variable to produce the system palette
125
126 The general form of a pallete definition is:
127
128 "palette;role=fg;role=fg/bg;role=fg,option,option;role=fg/bg,option,option"
129
130 where:
131 palette is a named palette; one of 'light', 'dark', or 'nocolor'.
132 role is a named style used by Django
133 fg is a background color.
134 bg is a background color.
135 option is a display options.
136
137 Specifying a named palette is the same as manually specifying the individual
138 definitions for each role. Any individual definitions following the pallete
139 definition will augment the base palette definition.
140
141 Valid roles:
142 'error', 'notice', 'sql_field', 'sql_coltype', 'sql_keyword', 'sql_table',
143 'http_info', 'http_success', 'http_redirect', 'http_bad_request',
144 'http_not_found', 'http_server_error'
145
146 Valid colors:
147 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
148
149 Valid options:
150 'bold', 'underscore', 'blink', 'reverse', 'conceal'
151
152 """
153 if not config_string:
154 return PALETTES[DEFAULT_PALETTE]
155
156 # Split the color configuration into parts
157 parts = config_string.lower().split(';')
158 palette = PALETTES[NOCOLOR_PALETTE].copy()
159 for part in parts:
160 if part in PALETTES:
161 # A default palette has been specified
162 palette.update(PALETTES[part])
163 elif '=' in part:
164 # Process a palette defining string
165 definition = {}
166
167 # Break the definition into the role,
168 # plus the list of specific instructions.
169 # The role must be in upper case
170 role, instructions = part.split('=')
171 role = role.upper()
172
173 styles = instructions.split(',')
174 styles.reverse()
175
176 # The first instruction can contain a slash
177 # to break apart fg/bg.
178 colors = styles.pop().split('/')
179 colors.reverse()
180 fg = colors.pop()
181 if fg in color_names:
182 definition['fg'] = fg
183 if colors and colors[-1] in color_names:
184 definition['bg'] = colors[-1]
185
186 # All remaining instructions are options
187 opts = tuple(s for s in styles if s in opt_dict.keys())
188 if opts:
189 definition['opts'] = opts
190
191 # The nocolor palette has all available roles.
192 # Use that palette as the basis for determining
193 # if the role is valid.
194 if role in PALETTES[NOCOLOR_PALETTE] and definition:
195 palette[role] = definition
196
197 # If there are no colors specified, return the empty palette.
198 if palette == PALETTES[NOCOLOR_PALETTE]:
199 return None
200 return palette
@@ -1,19 +1,20 b''
1 1 syntax: glob
2 2 *.pyc
3 3 *.swp
4 4 *.sqlite
5 5 Paste*.egg
6 6
7 7 syntax: regexp
8 8 ^build
9 9 ^docs/build/
10 10 ^docs/_build/
11 11 ^data$
12 12 ^\.settings$
13 13 ^\.project$
14 14 ^\.pydevproject$
15 15 ^\.coverage$
16 16 ^rhodecode\.db$
17 17 ^test\.db$
18 18 ^RhodeCode\.egg-info$
19 19 ^rc\.ini$
20 ^fabfile.py
@@ -1,18 +1,17 b''
1 1 Pylons==1.0.0
2 2 Beaker==1.6.2
3 3 WebHelpers>=1.2
4 4 formencode==1.2.4
5 5 SQLAlchemy==0.7.4
6 6 Mako==0.5.0
7 7 pygments>=1.4
8 8 whoosh>=2.3.0,<2.4
9 9 celery>=2.2.5,<2.3
10 10 babel
11 11 python-dateutil>=1.5.0,<2.0.0
12 12 dulwich>=0.8.0,<0.9.0
13 vcs>=0.2.3.dev
14 13 webob==1.0.8
15 markdown==2.0.3
14 markdown==2.1.1
16 15 docutils==0.8.1
17 16 py-bcrypt
18 17 mercurial>=2.1,<2.2 No newline at end of file
@@ -1,93 +1,92 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.__init__
4 4 ~~~~~~~~~~~~~~~~~~
5 5
6 6 RhodeCode, a web based repository management based on pylons
7 7 versioning implementation: http://semver.org/
8 8
9 9 :created_on: Apr 9, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26 import sys
27 27 import platform
28 28
29 29 VERSION = (1, 3, 0, 'beta')
30 30 __version__ = '.'.join((str(each) for each in VERSION[:4]))
31 31 __dbversion__ = 5 # defines current db version for migrations
32 32 __platform__ = platform.system()
33 33 __license__ = 'GPLv3'
34 34 __py_version__ = sys.version_info
35 35
36 36 PLATFORM_WIN = ('Windows')
37 37 PLATFORM_OTHERS = ('Linux', 'Darwin', 'FreeBSD', 'OpenBSD', 'SunOS')
38 38
39 39 requirements = [
40 40 "Pylons==1.0.0",
41 41 "Beaker==1.6.2",
42 42 "WebHelpers>=1.2",
43 43 "formencode==1.2.4",
44 44 "SQLAlchemy==0.7.4",
45 45 "Mako==0.5.0",
46 46 "pygments>=1.4",
47 47 "whoosh>=2.3.0,<2.4",
48 48 "celery>=2.2.5,<2.3",
49 49 "babel",
50 50 "python-dateutil>=1.5.0,<2.0.0",
51 51 "dulwich>=0.8.0,<0.9.0",
52 "vcs>=0.2.3.dev",
53 52 "webob==1.0.8",
54 "markdown==2.0.3",
53 "markdown==2.1.1",
55 54 "docutils==0.8.1",
56 55 ]
57 56
58 57 if __py_version__ < (2, 6):
59 58 requirements.append("simplejson")
60 59 requirements.append("pysqlite")
61 60
62 61 if __platform__ in PLATFORM_WIN:
63 62 requirements.append("mercurial>=2.1,<2.2")
64 63 else:
65 64 requirements.append("py-bcrypt")
66 65 requirements.append("mercurial>=2.1,<2.2")
67 66
68 67
69 68 try:
70 69 from rhodecode.lib import get_current_revision
71 70 _rev = get_current_revision()
72 71 except ImportError:
73 72 # this is needed when doing some setup.py operations
74 73 _rev = False
75 74
76 75 if len(VERSION) > 3 and _rev:
77 76 __version__ += ' [rev:%s]' % _rev[0]
78 77
79 78
80 79 def get_version():
81 80 """Returns shorter version (digit parts only) as string."""
82 81
83 82 return '.'.join((str(each) for each in VERSION[:3]))
84 83
85 84 BACKENDS = {
86 85 'hg': 'Mercurial repository',
87 86 'git': 'Git repository',
88 87 }
89 88
90 89 CELERY_ON = False
91 90
92 91 # link to config for pylons
93 92 CONFIG = None
@@ -1,134 +1,134 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.controllers.changelog
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 changelog controller for rhodecode
7 7
8 8 :created_on: Apr 21, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import logging
27 27 import traceback
28 28
29 29 from mercurial import graphmod
30 30 from pylons import request, url, session, tmpl_context as c
31 31 from pylons.controllers.util import redirect
32 32 from pylons.i18n.translation import _
33 33
34 34 import rhodecode.lib.helpers as h
35 35 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
36 36 from rhodecode.lib.base import BaseRepoController, render
37 37 from rhodecode.lib.helpers import RepoPage
38 38 from rhodecode.lib.compat import json
39 39
40 from vcs.exceptions import RepositoryError, ChangesetDoesNotExistError
40 from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError
41 41 from rhodecode.model.db import Repository
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class ChangelogController(BaseRepoController):
47 47
48 48 @LoginRequired()
49 49 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
50 50 'repository.admin')
51 51 def __before__(self):
52 52 super(ChangelogController, self).__before__()
53 53 c.affected_files_cut_off = 60
54 54
55 55 def index(self):
56 56 limit = 100
57 57 default = 20
58 58 if request.params.get('size'):
59 59 try:
60 60 int_size = int(request.params.get('size'))
61 61 except ValueError:
62 62 int_size = default
63 63 int_size = int_size if int_size <= limit else limit
64 64 c.size = int_size
65 65 session['changelog_size'] = c.size
66 66 session.save()
67 67 else:
68 68 c.size = int(session.get('changelog_size', default))
69 69
70 70 p = int(request.params.get('page', 1))
71 71 branch_name = request.params.get('branch', None)
72 72 try:
73 73 if branch_name:
74 74 collection = [z for z in
75 75 c.rhodecode_repo.get_changesets(start=0,
76 76 branch_name=branch_name)]
77 77 c.total_cs = len(collection)
78 78 else:
79 79 collection = c.rhodecode_repo
80 80 c.total_cs = len(c.rhodecode_repo)
81 81
82 82 c.pagination = RepoPage(collection, page=p, item_count=c.total_cs,
83 83 items_per_page=c.size, branch=branch_name)
84 84 collection = list(c.pagination)
85 85 page_revisions = [x.raw_id for x in collection]
86 86 c.comments = c.rhodecode_db_repo.comments(page_revisions)
87 87
88 88 except (RepositoryError, ChangesetDoesNotExistError, Exception), e:
89 89 log.error(traceback.format_exc())
90 90 h.flash(str(e), category='warning')
91 91 return redirect(url('home'))
92 92
93 93 self._graph(c.rhodecode_repo, collection, c.total_cs, c.size, p)
94 94
95 95 c.branch_name = branch_name
96 96 c.branch_filters = [('', _('All Branches'))] + \
97 97 [(k, k) for k in c.rhodecode_repo.branches.keys()]
98 98
99 99 return render('changelog/changelog.html')
100 100
101 101 def changelog_details(self, cs):
102 102 if request.environ.get('HTTP_X_PARTIAL_XHR'):
103 103 c.cs = c.rhodecode_repo.get_changeset(cs)
104 104 return render('changelog/changelog_details.html')
105 105
106 106 def _graph(self, repo, collection, repo_size, size, p):
107 107 """
108 108 Generates a DAG graph for mercurial
109 109
110 110 :param repo: repo instance
111 111 :param size: number of commits to show
112 112 :param p: page number
113 113 """
114 114 if not collection:
115 115 c.jsdata = json.dumps([])
116 116 return
117 117
118 118 data = []
119 119 revs = [x.revision for x in collection]
120 120
121 121 if repo.alias == 'git':
122 122 for _ in revs:
123 123 vtx = [0, 1]
124 124 edges = [[0, 0, 1]]
125 125 data.append(['', vtx, edges])
126 126
127 127 elif repo.alias == 'hg':
128 128 c.dag = graphmod.colored(graphmod.dagwalker(repo._repo, revs))
129 129 for (id, type, ctx, vtx, edges) in c.dag:
130 130 if type != graphmod.CHANGESET:
131 131 continue
132 132 data.append(['', vtx, edges])
133 133
134 134 c.jsdata = json.dumps(data)
@@ -1,367 +1,367 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.controllers.changeset
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 changeset controller for pylons showoing changes beetween
7 7 revisions
8 8
9 9 :created_on: Apr 25, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26 import logging
27 27 import traceback
28 28 from collections import defaultdict
29 29 from webob.exc import HTTPForbidden
30 30
31 31 from pylons import tmpl_context as c, url, request, response
32 32 from pylons.i18n.translation import _
33 33 from pylons.controllers.util import redirect
34 34 from pylons.decorators import jsonify
35 35
36 from vcs.exceptions import RepositoryError, ChangesetError, \
36 from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetError, \
37 37 ChangesetDoesNotExistError
38 from vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
39 39
40 40 import rhodecode.lib.helpers as h
41 41 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
42 42 from rhodecode.lib.base import BaseRepoController, render
43 43 from rhodecode.lib.utils import EmptyChangeset
44 44 from rhodecode.lib.compat import OrderedDict
45 45 from rhodecode.lib import diffs
46 46 from rhodecode.model.db import ChangesetComment
47 47 from rhodecode.model.comment import ChangesetCommentsModel
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.lib.diffs import wrapped_diff
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 def anchor_url(revision, path):
55 55 fid = h.FID(revision, path)
56 56 return h.url.current(anchor=fid, **request.GET)
57 57
58 58
59 59 def get_ignore_ws(fid, GET):
60 60 ig_ws_global = request.GET.get('ignorews')
61 61 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
62 62 if ig_ws:
63 63 try:
64 64 return int(ig_ws[0].split(':')[-1])
65 65 except:
66 66 pass
67 67 return ig_ws_global
68 68
69 69
70 70 def _ignorews_url(fileid=None):
71 71
72 72 params = defaultdict(list)
73 73 lbl = _('show white space')
74 74 ig_ws = get_ignore_ws(fileid, request.GET)
75 75 ln_ctx = get_line_ctx(fileid, request.GET)
76 76 # global option
77 77 if fileid is None:
78 78 if ig_ws is None:
79 79 params['ignorews'] += [1]
80 80 lbl = _('ignore white space')
81 81 ctx_key = 'context'
82 82 ctx_val = ln_ctx
83 83 # per file options
84 84 else:
85 85 if ig_ws is None:
86 86 params[fileid] += ['WS:1']
87 87 lbl = _('ignore white space')
88 88
89 89 ctx_key = fileid
90 90 ctx_val = 'C:%s' % ln_ctx
91 91 # if we have passed in ln_ctx pass it along to our params
92 92 if ln_ctx:
93 93 params[ctx_key] += [ctx_val]
94 94
95 95 params['anchor'] = fileid
96 96 img = h.image('/images/icons/text_strikethrough.png', lbl, class_='icon')
97 97 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
98 98
99 99
100 100 def get_line_ctx(fid, GET):
101 101 ln_ctx_global = request.GET.get('context')
102 102 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
103 103
104 104 if ln_ctx:
105 105 retval = ln_ctx[0].split(':')[-1]
106 106 else:
107 107 retval = ln_ctx_global
108 108
109 109 try:
110 110 return int(retval)
111 111 except:
112 112 return
113 113
114 114
115 115 def _context_url(fileid=None):
116 116 """
117 117 Generates url for context lines
118 118
119 119 :param fileid:
120 120 """
121 121 ig_ws = get_ignore_ws(fileid, request.GET)
122 122 ln_ctx = (get_line_ctx(fileid, request.GET) or 3) * 2
123 123
124 124 params = defaultdict(list)
125 125
126 126 # global option
127 127 if fileid is None:
128 128 if ln_ctx > 0:
129 129 params['context'] += [ln_ctx]
130 130
131 131 if ig_ws:
132 132 ig_ws_key = 'ignorews'
133 133 ig_ws_val = 1
134 134
135 135 # per file option
136 136 else:
137 137 params[fileid] += ['C:%s' % ln_ctx]
138 138 ig_ws_key = fileid
139 139 ig_ws_val = 'WS:%s' % 1
140 140
141 141 if ig_ws:
142 142 params[ig_ws_key] += [ig_ws_val]
143 143
144 144 lbl = _('%s line context') % ln_ctx
145 145
146 146 params['anchor'] = fileid
147 147 img = h.image('/images/icons/table_add.png', lbl, class_='icon')
148 148 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
149 149
150 150
151 151 class ChangesetController(BaseRepoController):
152 152
153 153 @LoginRequired()
154 154 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
155 155 'repository.admin')
156 156 def __before__(self):
157 157 super(ChangesetController, self).__before__()
158 158 c.affected_files_cut_off = 60
159 159
160 160 def index(self, revision):
161 161
162 162 c.anchor_url = anchor_url
163 163 c.ignorews_url = _ignorews_url
164 164 c.context_url = _context_url
165 165
166 166 #get ranges of revisions if preset
167 167 rev_range = revision.split('...')[:2]
168 168 enable_comments = True
169 169 try:
170 170 if len(rev_range) == 2:
171 171 enable_comments = False
172 172 rev_start = rev_range[0]
173 173 rev_end = rev_range[1]
174 174 rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start,
175 175 end=rev_end)
176 176 else:
177 177 rev_ranges = [c.rhodecode_repo.get_changeset(revision)]
178 178
179 179 c.cs_ranges = list(rev_ranges)
180 180 if not c.cs_ranges:
181 181 raise RepositoryError('Changeset range returned empty result')
182 182
183 183 except (RepositoryError, ChangesetDoesNotExistError, Exception), e:
184 184 log.error(traceback.format_exc())
185 185 h.flash(str(e), category='warning')
186 186 return redirect(url('home'))
187 187
188 188 c.changes = OrderedDict()
189 189
190 190 c.lines_added = 0 # count of lines added
191 191 c.lines_deleted = 0 # count of lines removes
192 192
193 193 cumulative_diff = 0
194 194 c.cut_off = False # defines if cut off limit is reached
195 195
196 196 c.comments = []
197 197 c.inline_comments = []
198 198 c.inline_cnt = 0
199 199 # Iterate over ranges (default changeset view is always one changeset)
200 200 for changeset in c.cs_ranges:
201 201 c.comments.extend(ChangesetCommentsModel()\
202 202 .get_comments(c.rhodecode_db_repo.repo_id,
203 203 changeset.raw_id))
204 204 inlines = ChangesetCommentsModel()\
205 205 .get_inline_comments(c.rhodecode_db_repo.repo_id,
206 206 changeset.raw_id)
207 207 c.inline_comments.extend(inlines)
208 208 c.changes[changeset.raw_id] = []
209 209 try:
210 210 changeset_parent = changeset.parents[0]
211 211 except IndexError:
212 212 changeset_parent = None
213 213
214 214 #==================================================================
215 215 # ADDED FILES
216 216 #==================================================================
217 217 for node in changeset.added:
218 218 fid = h.FID(revision, node.path)
219 219 line_context_lcl = get_line_ctx(fid, request.GET)
220 220 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
221 221 lim = self.cut_off_limit
222 222 if cumulative_diff > self.cut_off_limit:
223 223 lim = -1
224 224 size, cs1, cs2, diff, st = wrapped_diff(filenode_old=None,
225 225 filenode_new=node,
226 226 cut_off_limit=lim,
227 227 ignore_whitespace=ign_whitespace_lcl,
228 228 line_context=line_context_lcl,
229 229 enable_comments=enable_comments)
230 230 cumulative_diff += size
231 231 c.lines_added += st[0]
232 232 c.lines_deleted += st[1]
233 233 c.changes[changeset.raw_id].append(('added', node, diff,
234 234 cs1, cs2, st))
235 235
236 236 #==================================================================
237 237 # CHANGED FILES
238 238 #==================================================================
239 239 for node in changeset.changed:
240 240 try:
241 241 filenode_old = changeset_parent.get_node(node.path)
242 242 except ChangesetError:
243 243 log.warning('Unable to fetch parent node for diff')
244 244 filenode_old = FileNode(node.path, '', EmptyChangeset())
245 245
246 246 fid = h.FID(revision, node.path)
247 247 line_context_lcl = get_line_ctx(fid, request.GET)
248 248 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
249 249 lim = self.cut_off_limit
250 250 if cumulative_diff > self.cut_off_limit:
251 251 lim = -1
252 252 size, cs1, cs2, diff, st = wrapped_diff(filenode_old=filenode_old,
253 253 filenode_new=node,
254 254 cut_off_limit=lim,
255 255 ignore_whitespace=ign_whitespace_lcl,
256 256 line_context=line_context_lcl,
257 257 enable_comments=enable_comments)
258 258 cumulative_diff += size
259 259 c.lines_added += st[0]
260 260 c.lines_deleted += st[1]
261 261 c.changes[changeset.raw_id].append(('changed', node, diff,
262 262 cs1, cs2, st))
263 263
264 264 #==================================================================
265 265 # REMOVED FILES
266 266 #==================================================================
267 267 for node in changeset.removed:
268 268 c.changes[changeset.raw_id].append(('removed', node, None,
269 269 None, None, (0, 0)))
270 270
271 271 # count inline comments
272 272 for path, lines in c.inline_comments:
273 273 for comments in lines.values():
274 274 c.inline_cnt += len(comments)
275 275
276 276 if len(c.cs_ranges) == 1:
277 277 c.changeset = c.cs_ranges[0]
278 278 c.changes = c.changes[c.changeset.raw_id]
279 279
280 280 return render('changeset/changeset.html')
281 281 else:
282 282 return render('changeset/changeset_range.html')
283 283
284 284 def raw_changeset(self, revision):
285 285
286 286 method = request.GET.get('diff', 'show')
287 287 ignore_whitespace = request.GET.get('ignorews') == '1'
288 288 line_context = request.GET.get('context', 3)
289 289 try:
290 290 c.scm_type = c.rhodecode_repo.alias
291 291 c.changeset = c.rhodecode_repo.get_changeset(revision)
292 292 except RepositoryError:
293 293 log.error(traceback.format_exc())
294 294 return redirect(url('home'))
295 295 else:
296 296 try:
297 297 c.changeset_parent = c.changeset.parents[0]
298 298 except IndexError:
299 299 c.changeset_parent = None
300 300 c.changes = []
301 301
302 302 for node in c.changeset.added:
303 303 filenode_old = FileNode(node.path, '')
304 304 if filenode_old.is_binary or node.is_binary:
305 305 diff = _('binary file') + '\n'
306 306 else:
307 307 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
308 308 ignore_whitespace=ignore_whitespace,
309 309 context=line_context)
310 310 diff = diffs.DiffProcessor(f_gitdiff,
311 311 format='gitdiff').raw_diff()
312 312
313 313 cs1 = None
314 314 cs2 = node.last_changeset.raw_id
315 315 c.changes.append(('added', node, diff, cs1, cs2))
316 316
317 317 for node in c.changeset.changed:
318 318 filenode_old = c.changeset_parent.get_node(node.path)
319 319 if filenode_old.is_binary or node.is_binary:
320 320 diff = _('binary file')
321 321 else:
322 322 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
323 323 ignore_whitespace=ignore_whitespace,
324 324 context=line_context)
325 325 diff = diffs.DiffProcessor(f_gitdiff,
326 326 format='gitdiff').raw_diff()
327 327
328 328 cs1 = filenode_old.last_changeset.raw_id
329 329 cs2 = node.last_changeset.raw_id
330 330 c.changes.append(('changed', node, diff, cs1, cs2))
331 331
332 332 response.content_type = 'text/plain'
333 333
334 334 if method == 'download':
335 335 response.content_disposition = 'attachment; filename=%s.patch' \
336 336 % revision
337 337
338 338 c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id for x in
339 339 c.changeset.parents])
340 340
341 341 c.diffs = ''
342 342 for x in c.changes:
343 343 c.diffs += x[2]
344 344
345 345 return render('changeset/raw_changeset.html')
346 346
347 347 def comment(self, repo_name, revision):
348 348 ChangesetCommentsModel().create(text=request.POST.get('text'),
349 349 repo_id=c.rhodecode_db_repo.repo_id,
350 350 user_id=c.rhodecode_user.user_id,
351 351 revision=revision,
352 352 f_path=request.POST.get('f_path'),
353 353 line_no=request.POST.get('line'))
354 354 Session.commit()
355 355 return redirect(h.url('changeset_home', repo_name=repo_name,
356 356 revision=revision))
357 357
358 358 @jsonify
359 359 def delete_comment(self, repo_name, comment_id):
360 360 co = ChangesetComment.get(comment_id)
361 361 owner = lambda: co.author.user_id == c.rhodecode_user.user_id
362 362 if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner:
363 363 ChangesetCommentsModel().delete(comment=co)
364 364 Session.commit()
365 365 return True
366 366 else:
367 367 raise HTTPForbidden()
@@ -1,492 +1,492 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.controllers.files
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Files controller for RhodeCode
7 7
8 8 :created_on: Apr 21, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import logging
28 28 import traceback
29 29
30 30 from pylons import request, response, tmpl_context as c, url
31 31 from pylons.i18n.translation import _
32 32 from pylons.controllers.util import redirect
33 33 from pylons.decorators import jsonify
34 34
35 from vcs.conf import settings
36 from vcs.exceptions import RepositoryError, ChangesetDoesNotExistError, \
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError, \
37 37 EmptyRepositoryError, ImproperArchiveTypeError, VCSError, \
38 38 NodeAlreadyExistsError
39 from vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
40 40
41 41 from rhodecode.lib.compat import OrderedDict
42 42 from rhodecode.lib import convert_line_endings, detect_mode, safe_str
43 43 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
44 44 from rhodecode.lib.base import BaseRepoController, render
45 45 from rhodecode.lib.utils import EmptyChangeset
46 46 from rhodecode.lib import diffs
47 47 import rhodecode.lib.helpers as h
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\
50 50 _context_url, get_line_ctx, get_ignore_ws
51 51 from rhodecode.lib.diffs import wrapped_diff
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class FilesController(BaseRepoController):
58 58
59 59 @LoginRequired()
60 60 def __before__(self):
61 61 super(FilesController, self).__before__()
62 62 c.cut_off_limit = self.cut_off_limit
63 63
64 64 def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True):
65 65 """
66 66 Safe way to get changeset if error occur it redirects to tip with
67 67 proper message
68 68
69 69 :param rev: revision to fetch
70 70 :param repo_name: repo name to redirect after
71 71 """
72 72
73 73 try:
74 74 return c.rhodecode_repo.get_changeset(rev)
75 75 except EmptyRepositoryError, e:
76 76 if not redirect_after:
77 77 return None
78 78 url_ = url('files_add_home',
79 79 repo_name=c.repo_name,
80 80 revision=0, f_path='')
81 81 add_new = '<a href="%s">[%s]</a>' % (url_, _('add new'))
82 82 h.flash(h.literal(_('There are no files yet %s' % add_new)),
83 83 category='warning')
84 84 redirect(h.url('summary_home', repo_name=repo_name))
85 85
86 86 except RepositoryError, e:
87 87 h.flash(str(e), category='warning')
88 88 redirect(h.url('files_home', repo_name=repo_name, revision='tip'))
89 89
90 90 def __get_filenode_or_redirect(self, repo_name, cs, path):
91 91 """
92 92 Returns file_node, if error occurs or given path is directory,
93 93 it'll redirect to top level path
94 94
95 95 :param repo_name: repo_name
96 96 :param cs: given changeset
97 97 :param path: path to lookup
98 98 """
99 99
100 100 try:
101 101 file_node = cs.get_node(path)
102 102 if file_node.is_dir():
103 103 raise RepositoryError('given path is a directory')
104 104 except RepositoryError, e:
105 105 h.flash(str(e), category='warning')
106 106 redirect(h.url('files_home', repo_name=repo_name,
107 107 revision=cs.raw_id))
108 108
109 109 return file_node
110 110
111 111 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
112 112 'repository.admin')
113 113 def index(self, repo_name, revision, f_path):
114 114 # redirect to given revision from form if given
115 115 post_revision = request.POST.get('at_rev', None)
116 116 if post_revision:
117 117 cs = self.__get_cs_or_redirect(post_revision, repo_name)
118 118 redirect(url('files_home', repo_name=c.repo_name,
119 119 revision=cs.raw_id, f_path=f_path))
120 120
121 121 c.changeset = self.__get_cs_or_redirect(revision, repo_name)
122 122 c.branch = request.GET.get('branch', None)
123 123 c.f_path = f_path
124 124
125 125 cur_rev = c.changeset.revision
126 126
127 127 # prev link
128 128 try:
129 129 prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch)
130 130 c.url_prev = url('files_home', repo_name=c.repo_name,
131 131 revision=prev_rev.raw_id, f_path=f_path)
132 132 if c.branch:
133 133 c.url_prev += '?branch=%s' % c.branch
134 134 except (ChangesetDoesNotExistError, VCSError):
135 135 c.url_prev = '#'
136 136
137 137 # next link
138 138 try:
139 139 next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch)
140 140 c.url_next = url('files_home', repo_name=c.repo_name,
141 141 revision=next_rev.raw_id, f_path=f_path)
142 142 if c.branch:
143 143 c.url_next += '?branch=%s' % c.branch
144 144 except (ChangesetDoesNotExistError, VCSError):
145 145 c.url_next = '#'
146 146
147 147 # files or dirs
148 148 try:
149 149 c.file = c.changeset.get_node(f_path)
150 150
151 151 if c.file.is_file():
152 152 c.file_history = self._get_node_history(c.changeset, f_path)
153 153 else:
154 154 c.file_history = []
155 155 except RepositoryError, e:
156 156 h.flash(str(e), category='warning')
157 157 redirect(h.url('files_home', repo_name=repo_name,
158 158 revision=revision))
159 159
160 160 return render('files/files.html')
161 161
162 162 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
163 163 'repository.admin')
164 164 def rawfile(self, repo_name, revision, f_path):
165 165 cs = self.__get_cs_or_redirect(revision, repo_name)
166 166 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
167 167
168 168 response.content_disposition = 'attachment; filename=%s' % \
169 169 safe_str(f_path.split(os.sep)[-1])
170 170
171 171 response.content_type = file_node.mimetype
172 172 return file_node.content
173 173
174 174 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
175 175 'repository.admin')
176 176 def raw(self, repo_name, revision, f_path):
177 177 cs = self.__get_cs_or_redirect(revision, repo_name)
178 178 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
179 179
180 180 raw_mimetype_mapping = {
181 181 # map original mimetype to a mimetype used for "show as raw"
182 182 # you can also provide a content-disposition to override the
183 183 # default "attachment" disposition.
184 184 # orig_type: (new_type, new_dispo)
185 185
186 186 # show images inline:
187 187 'image/x-icon': ('image/x-icon', 'inline'),
188 188 'image/png': ('image/png', 'inline'),
189 189 'image/gif': ('image/gif', 'inline'),
190 190 'image/jpeg': ('image/jpeg', 'inline'),
191 191 'image/svg+xml': ('image/svg+xml', 'inline'),
192 192 }
193 193
194 194 mimetype = file_node.mimetype
195 195 try:
196 196 mimetype, dispo = raw_mimetype_mapping[mimetype]
197 197 except KeyError:
198 198 # we don't know anything special about this, handle it safely
199 199 if file_node.is_binary:
200 200 # do same as download raw for binary files
201 201 mimetype, dispo = 'application/octet-stream', 'attachment'
202 202 else:
203 203 # do not just use the original mimetype, but force text/plain,
204 204 # otherwise it would serve text/html and that might be unsafe.
205 205 # Note: underlying vcs library fakes text/plain mimetype if the
206 206 # mimetype can not be determined and it thinks it is not
207 207 # binary.This might lead to erroneous text display in some
208 208 # cases, but helps in other cases, like with text files
209 209 # without extension.
210 210 mimetype, dispo = 'text/plain', 'inline'
211 211
212 212 if dispo == 'attachment':
213 213 dispo = 'attachment; filename=%s' % \
214 214 safe_str(f_path.split(os.sep)[-1])
215 215
216 216 response.content_disposition = dispo
217 217 response.content_type = mimetype
218 218 return file_node.content
219 219
220 220 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
221 221 'repository.admin')
222 222 def annotate(self, repo_name, revision, f_path):
223 223 c.cs = self.__get_cs_or_redirect(revision, repo_name)
224 224 c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
225 225
226 226 c.file_history = self._get_node_history(c.cs, f_path)
227 227 c.f_path = f_path
228 228 return render('files/files_annotate.html')
229 229
230 230 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
231 231 def edit(self, repo_name, revision, f_path):
232 232 r_post = request.POST
233 233
234 234 c.cs = self.__get_cs_or_redirect(revision, repo_name)
235 235 c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
236 236
237 237 if c.file.is_binary:
238 238 return redirect(url('files_home', repo_name=c.repo_name,
239 239 revision=c.cs.raw_id, f_path=f_path))
240 240
241 241 c.f_path = f_path
242 242
243 243 if r_post:
244 244
245 245 old_content = c.file.content
246 246 sl = old_content.splitlines(1)
247 247 first_line = sl[0] if sl else ''
248 248 # modes: 0 - Unix, 1 - Mac, 2 - DOS
249 249 mode = detect_mode(first_line, 0)
250 250 content = convert_line_endings(r_post.get('content'), mode)
251 251
252 252 message = r_post.get('message') or (_('Edited %s via RhodeCode')
253 253 % (f_path))
254 254 author = self.rhodecode_user.full_contact
255 255
256 256 if content == old_content:
257 257 h.flash(_('No changes'),
258 258 category='warning')
259 259 return redirect(url('changeset_home', repo_name=c.repo_name,
260 260 revision='tip'))
261 261
262 262 try:
263 263 self.scm_model.commit_change(repo=c.rhodecode_repo,
264 264 repo_name=repo_name, cs=c.cs,
265 265 user=self.rhodecode_user,
266 266 author=author, message=message,
267 267 content=content, f_path=f_path)
268 268 h.flash(_('Successfully committed to %s' % f_path),
269 269 category='success')
270 270
271 271 except Exception:
272 272 log.error(traceback.format_exc())
273 273 h.flash(_('Error occurred during commit'), category='error')
274 274 return redirect(url('changeset_home',
275 275 repo_name=c.repo_name, revision='tip'))
276 276
277 277 return render('files/files_edit.html')
278 278
279 279 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
280 280 def add(self, repo_name, revision, f_path):
281 281 r_post = request.POST
282 282 c.cs = self.__get_cs_or_redirect(revision, repo_name,
283 283 redirect_after=False)
284 284 if c.cs is None:
285 285 c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias)
286 286
287 287 c.f_path = f_path
288 288
289 289 if r_post:
290 290 unix_mode = 0
291 291 content = convert_line_endings(r_post.get('content'), unix_mode)
292 292
293 293 message = r_post.get('message') or (_('Added %s via RhodeCode')
294 294 % (f_path))
295 295 location = r_post.get('location')
296 296 filename = r_post.get('filename')
297 297 file_obj = r_post.get('upload_file', None)
298 298
299 299 if file_obj is not None and hasattr(file_obj, 'filename'):
300 300 filename = file_obj.filename
301 301 content = file_obj.file
302 302
303 303 node_path = os.path.join(location, filename)
304 304 author = self.rhodecode_user.full_contact
305 305
306 306 if not content:
307 307 h.flash(_('No content'), category='warning')
308 308 return redirect(url('changeset_home', repo_name=c.repo_name,
309 309 revision='tip'))
310 310 if not filename:
311 311 h.flash(_('No filename'), category='warning')
312 312 return redirect(url('changeset_home', repo_name=c.repo_name,
313 313 revision='tip'))
314 314
315 315 try:
316 316 self.scm_model.create_node(repo=c.rhodecode_repo,
317 317 repo_name=repo_name, cs=c.cs,
318 318 user=self.rhodecode_user,
319 319 author=author, message=message,
320 320 content=content, f_path=node_path)
321 321 h.flash(_('Successfully committed to %s' % node_path),
322 322 category='success')
323 323 except NodeAlreadyExistsError, e:
324 324 h.flash(_(e), category='error')
325 325 except Exception:
326 326 log.error(traceback.format_exc())
327 327 h.flash(_('Error occurred during commit'), category='error')
328 328 return redirect(url('changeset_home',
329 329 repo_name=c.repo_name, revision='tip'))
330 330
331 331 return render('files/files_add.html')
332 332
333 333 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
334 334 'repository.admin')
335 335 def archivefile(self, repo_name, fname):
336 336
337 337 fileformat = None
338 338 revision = None
339 339 ext = None
340 340 subrepos = request.GET.get('subrepos') == 'true'
341 341
342 342 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
343 343 archive_spec = fname.split(ext_data[1])
344 344 if len(archive_spec) == 2 and archive_spec[1] == '':
345 345 fileformat = a_type or ext_data[1]
346 346 revision = archive_spec[0]
347 347 ext = ext_data[1]
348 348
349 349 try:
350 350 dbrepo = RepoModel().get_by_repo_name(repo_name)
351 351 if dbrepo.enable_downloads is False:
352 352 return _('downloads disabled')
353 353
354 354 if c.rhodecode_repo.alias == 'hg':
355 355 # patch and reset hooks section of UI config to not run any
356 356 # hooks on fetching archives with subrepos
357 357 for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'):
358 358 c.rhodecode_repo._repo.ui.setconfig('hooks', k, None)
359 359
360 360 cs = c.rhodecode_repo.get_changeset(revision)
361 361 content_type = settings.ARCHIVE_SPECS[fileformat][0]
362 362 except ChangesetDoesNotExistError:
363 363 return _('Unknown revision %s') % revision
364 364 except EmptyRepositoryError:
365 365 return _('Empty repository')
366 366 except (ImproperArchiveTypeError, KeyError):
367 367 return _('Unknown archive type')
368 368
369 369 response.content_type = content_type
370 370 response.content_disposition = 'attachment; filename=%s-%s%s' \
371 371 % (repo_name, revision, ext)
372 372
373 373 import tempfile
374 374 archive = tempfile.mkstemp()[1]
375 375 t = open(archive, 'wb')
376 376 cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos)
377 377
378 378 def get_chunked_archive(archive):
379 379 stream = open(archive, 'rb')
380 380 while True:
381 381 data = stream.read(4096)
382 382 if not data:
383 383 os.remove(archive)
384 384 break
385 385 yield data
386 386
387 387 return get_chunked_archive(archive)
388 388
389 389 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
390 390 'repository.admin')
391 391 def diff(self, repo_name, f_path):
392 392 ignore_whitespace = request.GET.get('ignorews') == '1'
393 393 line_context = request.GET.get('context', 3)
394 394 diff1 = request.GET.get('diff1', '')
395 395 diff2 = request.GET.get('diff2', '')
396 396 c.action = request.GET.get('diff')
397 397 c.no_changes = diff1 == diff2
398 398 c.f_path = f_path
399 399 c.big_diff = False
400 400 c.anchor_url = anchor_url
401 401 c.ignorews_url = _ignorews_url
402 402 c.context_url = _context_url
403 403 c.changes = OrderedDict()
404 404 c.changes[diff2] = []
405 405 try:
406 406 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
407 407 c.changeset_1 = c.rhodecode_repo.get_changeset(diff1)
408 408 node1 = c.changeset_1.get_node(f_path)
409 409 else:
410 410 c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo)
411 411 node1 = FileNode('.', '', changeset=c.changeset_1)
412 412
413 413 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
414 414 c.changeset_2 = c.rhodecode_repo.get_changeset(diff2)
415 415 node2 = c.changeset_2.get_node(f_path)
416 416 else:
417 417 c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo)
418 418 node2 = FileNode('.', '', changeset=c.changeset_2)
419 419 except RepositoryError:
420 420 return redirect(url('files_home', repo_name=c.repo_name,
421 421 f_path=f_path))
422 422
423 423 if c.action == 'download':
424 424 _diff = diffs.get_gitdiff(node1, node2,
425 425 ignore_whitespace=ignore_whitespace,
426 426 context=line_context)
427 427 diff = diffs.DiffProcessor(_diff, format='gitdiff')
428 428
429 429 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
430 430 response.content_type = 'text/plain'
431 431 response.content_disposition = 'attachment; filename=%s' \
432 432 % diff_name
433 433 return diff.raw_diff()
434 434
435 435 elif c.action == 'raw':
436 436 _diff = diffs.get_gitdiff(node1, node2,
437 437 ignore_whitespace=ignore_whitespace,
438 438 context=line_context)
439 439 diff = diffs.DiffProcessor(_diff, format='gitdiff')
440 440 response.content_type = 'text/plain'
441 441 return diff.raw_diff()
442 442
443 443 else:
444 444 fid = h.FID(diff2, node2.path)
445 445 line_context_lcl = get_line_ctx(fid, request.GET)
446 446 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
447 447
448 448 lim = request.GET.get('fulldiff') or self.cut_off_limit
449 449 _, cs1, cs2, diff, st = wrapped_diff(filenode_old=node1,
450 450 filenode_new=node2,
451 451 cut_off_limit=lim,
452 452 ignore_whitespace=ign_whitespace_lcl,
453 453 line_context=line_context_lcl,
454 454 enable_comments=False)
455 455
456 456 c.changes = [('', node2, diff, cs1, cs2, st,)]
457 457
458 458 return render('files/file_diff.html')
459 459
460 460 def _get_node_history(self, cs, f_path):
461 461 changesets = cs.get_file_history(f_path)
462 462 hist_l = []
463 463
464 464 changesets_group = ([], _("Changesets"))
465 465 branches_group = ([], _("Branches"))
466 466 tags_group = ([], _("Tags"))
467 467
468 468 for chs in changesets:
469 469 n_desc = 'r%s:%s (%s)' % (chs.revision, chs.short_id, chs.branch)
470 470 changesets_group[0].append((chs.raw_id, n_desc,))
471 471
472 472 hist_l.append(changesets_group)
473 473
474 474 for name, chs in c.rhodecode_repo.branches.items():
475 475 branches_group[0].append((chs, name),)
476 476 hist_l.append(branches_group)
477 477
478 478 for name, chs in c.rhodecode_repo.tags.items():
479 479 tags_group[0].append((chs, name),)
480 480 hist_l.append(tags_group)
481 481
482 482 return hist_l
483 483
484 484 @jsonify
485 485 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
486 486 'repository.admin')
487 487 def nodelist(self, repo_name, revision, f_path):
488 488 if request.environ.get('HTTP_X_PARTIAL_XHR'):
489 489 cs = self.__get_cs_or_redirect(revision, repo_name)
490 490 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
491 491 flat=False)
492 492 return _d + _f
@@ -1,233 +1,233 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.controllers.summary
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Summary controller for Rhodecode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import traceback
27 27 import calendar
28 28 import logging
29 29 from time import mktime
30 30 from datetime import timedelta, date
31 31 from itertools import product
32 32 from urlparse import urlparse
33 33
34 from vcs.exceptions import ChangesetError, EmptyRepositoryError, \
34 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
35 35 NodeDoesNotExistError
36 36
37 37 from pylons import tmpl_context as c, request, url, config
38 38 from pylons.i18n.translation import _
39 39
40 40 from beaker.cache import cache_region, region_invalidate
41 41
42 42 from rhodecode.model.db import Statistics, CacheInvalidation
43 43 from rhodecode.lib import ALL_READMES, ALL_EXTS
44 44 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
45 45 from rhodecode.lib.base import BaseRepoController, render
46 46 from rhodecode.lib.utils import EmptyChangeset
47 47 from rhodecode.lib.markup_renderer import MarkupRenderer
48 48 from rhodecode.lib.celerylib import run_task
49 49 from rhodecode.lib.celerylib.tasks import get_commits_stats, \
50 50 LANGUAGES_EXTENSIONS_MAP
51 51 from rhodecode.lib.helpers import RepoPage
52 52 from rhodecode.lib.compat import json, OrderedDict
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56 README_FILES = [''.join([x[0][0], x[1][0]]) for x in
57 57 sorted(list(product(ALL_READMES, ALL_EXTS)),
58 58 key=lambda y:y[0][1] + y[1][1])]
59 59
60 60
61 61 class SummaryController(BaseRepoController):
62 62
63 63 @LoginRequired()
64 64 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
65 65 'repository.admin')
66 66 def __before__(self):
67 67 super(SummaryController, self).__before__()
68 68
69 69 def index(self, repo_name):
70 70 c.dbrepo = dbrepo = c.rhodecode_db_repo
71 71 c.following = self.scm_model.is_following_repo(repo_name,
72 72 self.rhodecode_user.user_id)
73 73
74 74 def url_generator(**kw):
75 75 return url('shortlog_home', repo_name=repo_name, size=10, **kw)
76 76
77 77 c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
78 78 items_per_page=10, url=url_generator)
79 79
80 80 if self.rhodecode_user.username == 'default':
81 81 # for default(anonymous) user we don't need to pass credentials
82 82 username = ''
83 83 password = ''
84 84 else:
85 85 username = str(self.rhodecode_user.username)
86 86 password = '@'
87 87
88 88 parsed_url = urlparse(url.current(qualified=True))
89 89
90 90 default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
91 91
92 92 uri_tmpl = config.get('clone_uri', default_clone_uri)
93 93 uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
94 94
95 95 uri_dict = {
96 96 'user': username,
97 97 'pass': password,
98 98 'scheme': parsed_url.scheme,
99 99 'netloc': parsed_url.netloc,
100 100 'path': parsed_url.path
101 101 }
102 102 uri = uri_tmpl % uri_dict
103 103 # generate another clone url by id
104 104 uri_dict.update({'path': '/_%s' % c.dbrepo.repo_id})
105 105 uri_id = uri_tmpl % uri_dict
106 106
107 107 c.clone_repo_url = uri
108 108 c.clone_repo_url_id = uri_id
109 109 c.repo_tags = OrderedDict()
110 110 for name, hash in c.rhodecode_repo.tags.items()[:10]:
111 111 try:
112 112 c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash)
113 113 except ChangesetError:
114 114 c.repo_tags[name] = EmptyChangeset(hash)
115 115
116 116 c.repo_branches = OrderedDict()
117 117 for name, hash in c.rhodecode_repo.branches.items()[:10]:
118 118 try:
119 119 c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash)
120 120 except ChangesetError:
121 121 c.repo_branches[name] = EmptyChangeset(hash)
122 122
123 123 td = date.today() + timedelta(days=1)
124 124 td_1m = td - timedelta(days=calendar.mdays[td.month])
125 125 td_1y = td - timedelta(days=365)
126 126
127 127 ts_min_m = mktime(td_1m.timetuple())
128 128 ts_min_y = mktime(td_1y.timetuple())
129 129 ts_max_y = mktime(td.timetuple())
130 130
131 131 if dbrepo.enable_statistics:
132 132 c.show_stats = True
133 133 c.no_data_msg = _('No data loaded yet')
134 134 run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
135 135 else:
136 136 c.show_stats = False
137 137 c.no_data_msg = _('Statistics are disabled for this repository')
138 138 c.ts_min = ts_min_m
139 139 c.ts_max = ts_max_y
140 140
141 141 stats = self.sa.query(Statistics)\
142 142 .filter(Statistics.repository == dbrepo)\
143 143 .scalar()
144 144
145 145 c.stats_percentage = 0
146 146
147 147 if stats and stats.languages:
148 148 c.no_data = False is dbrepo.enable_statistics
149 149 lang_stats_d = json.loads(stats.languages)
150 150 c.commit_data = stats.commit_activity
151 151 c.overview_data = stats.commit_activity_combined
152 152
153 153 lang_stats = ((x, {"count": y,
154 154 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
155 155 for x, y in lang_stats_d.items())
156 156
157 157 c.trending_languages = json.dumps(
158 158 sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
159 159 )
160 160 last_rev = stats.stat_on_revision + 1
161 161 c.repo_last_rev = c.rhodecode_repo.count()\
162 162 if c.rhodecode_repo.revisions else 0
163 163 if last_rev == 0 or c.repo_last_rev == 0:
164 164 pass
165 165 else:
166 166 c.stats_percentage = '%.2f' % ((float((last_rev)) /
167 167 c.repo_last_rev) * 100)
168 168 else:
169 169 c.commit_data = json.dumps({})
170 170 c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
171 171 c.trending_languages = json.dumps({})
172 172 c.no_data = True
173 173
174 174 c.enable_downloads = dbrepo.enable_downloads
175 175 if c.enable_downloads:
176 176 c.download_options = self._get_download_links(c.rhodecode_repo)
177 177
178 178 c.readme_data, c.readme_file = self.__get_readme_data(c.rhodecode_repo)
179 179 return render('summary/summary.html')
180 180
181 181 def __get_readme_data(self, repo):
182 182
183 183 @cache_region('long_term')
184 184 def _get_readme_from_cache(key):
185 185 readme_data = None
186 186 readme_file = None
187 187 log.debug('Fetching readme file')
188 188 try:
189 189 cs = repo.get_changeset('tip')
190 190 renderer = MarkupRenderer()
191 191 for f in README_FILES:
192 192 try:
193 193 readme = cs.get_node(f)
194 194 readme_file = f
195 195 readme_data = renderer.render(readme.content, f)
196 196 log.debug('Found readme %s' % readme_file)
197 197 break
198 198 except NodeDoesNotExistError:
199 199 continue
200 200 except ChangesetError:
201 201 pass
202 202 except EmptyRepositoryError:
203 203 pass
204 204 except Exception:
205 205 log.error(traceback.format_exc())
206 206
207 207 return readme_data, readme_file
208 208
209 209 key = repo.name + '_README'
210 210 inv = CacheInvalidation.invalidate(key)
211 211 if inv is not None:
212 212 region_invalidate(_get_readme_from_cache, None, key)
213 213 CacheInvalidation.set_valid(inv.cache_key)
214 214 return _get_readme_from_cache(key)
215 215
216 216 def _get_download_links(self, repo):
217 217
218 218 download_l = []
219 219
220 220 branches_group = ([], _("Branches"))
221 221 tags_group = ([], _("Tags"))
222 222
223 223 for name, chs in c.rhodecode_repo.branches.items():
224 224 #chs = chs.split(':')[-1]
225 225 branches_group[0].append((chs, name),)
226 226 download_l.append(branches_group)
227 227
228 228 for name, chs in c.rhodecode_repo.tags.items():
229 229 #chs = chs.split(':')[-1]
230 230 tags_group[0].append((chs, name),)
231 231 download_l.append(tags_group)
232 232
233 233 return download_l
@@ -1,454 +1,454 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.__init__
4 4 ~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Some simple helper functions
7 7
8 8 :created_on: Jan 5, 2011
9 9 :author: marcink
10 10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import re
28 from vcs.utils.lazy import LazyProperty
28 from rhodecode.lib.vcs.utils.lazy import LazyProperty
29 29
30 30
31 31 def __get_lem():
32 32 from pygments import lexers
33 33 from string import lower
34 34 from collections import defaultdict
35 35
36 36 d = defaultdict(lambda: [])
37 37
38 38 def __clean(s):
39 39 s = s.lstrip('*')
40 40 s = s.lstrip('.')
41 41
42 42 if s.find('[') != -1:
43 43 exts = []
44 44 start, stop = s.find('['), s.find(']')
45 45
46 46 for suffix in s[start + 1:stop]:
47 47 exts.append(s[:s.find('[')] + suffix)
48 48 return map(lower, exts)
49 49 else:
50 50 return map(lower, [s])
51 51
52 52 for lx, t in sorted(lexers.LEXERS.items()):
53 53 m = map(__clean, t[-2])
54 54 if m:
55 55 m = reduce(lambda x, y: x + y, m)
56 56 for ext in m:
57 57 desc = lx.replace('Lexer', '')
58 58 d[ext].append(desc)
59 59
60 60 return dict(d)
61 61
62 62 # language map is also used by whoosh indexer, which for those specified
63 63 # extensions will index it's content
64 64 LANGUAGES_EXTENSIONS_MAP = __get_lem()
65 65
66 66 # Additional mappings that are not present in the pygments lexers
67 67 # NOTE: that this will overide any mappings in LANGUAGES_EXTENSIONS_MAP
68 68 ADDITIONAL_MAPPINGS = {'xaml': 'XAML'}
69 69
70 70 LANGUAGES_EXTENSIONS_MAP.update(ADDITIONAL_MAPPINGS)
71 71
72 72 # list of readme files to search in file tree and display in summary
73 73 # attached weights defines the search order lower is first
74 74 ALL_READMES = [
75 75 ('readme', 0), ('README', 0), ('Readme', 0),
76 76 ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
77 77 ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
78 78 ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
79 79 ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
80 80 ]
81 81
82 82 # extension together with weights to search lower is first
83 83 RST_EXTS = [
84 84 ('', 0), ('.rst', 1), ('.rest', 1),
85 ('.RST', 2) , ('.REST', 2),
85 ('.RST', 2), ('.REST', 2),
86 86 ('.txt', 3), ('.TXT', 3)
87 87 ]
88 88
89 89 MARKDOWN_EXTS = [
90 90 ('.md', 1), ('.MD', 1),
91 91 ('.mkdn', 2), ('.MKDN', 2),
92 92 ('.mdown', 3), ('.MDOWN', 3),
93 93 ('.markdown', 4), ('.MARKDOWN', 4)
94 94 ]
95 95
96 96 PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)]
97 97
98 98 ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS
99 99
100 100
101 101 def str2bool(_str):
102 102 """
103 103 returs True/False value from given string, it tries to translate the
104 104 string into boolean
105 105
106 106 :param _str: string value to translate into boolean
107 107 :rtype: boolean
108 108 :returns: boolean from given string
109 109 """
110 110 if _str is None:
111 111 return False
112 112 if _str in (True, False):
113 113 return _str
114 114 _str = str(_str).strip().lower()
115 115 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
116 116
117 117
118 118 def convert_line_endings(line, mode):
119 119 """
120 120 Converts a given line "line end" accordingly to given mode
121 121
122 122 Available modes are::
123 123 0 - Unix
124 124 1 - Mac
125 125 2 - DOS
126 126
127 127 :param line: given line to convert
128 128 :param mode: mode to convert to
129 129 :rtype: str
130 130 :return: converted line according to mode
131 131 """
132 132 from string import replace
133 133
134 134 if mode == 0:
135 135 line = replace(line, '\r\n', '\n')
136 136 line = replace(line, '\r', '\n')
137 137 elif mode == 1:
138 138 line = replace(line, '\r\n', '\r')
139 139 line = replace(line, '\n', '\r')
140 140 elif mode == 2:
141 import re
142 141 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
143 142 return line
144 143
145 144
146 145 def detect_mode(line, default):
147 146 """
148 147 Detects line break for given line, if line break couldn't be found
149 148 given default value is returned
150 149
151 150 :param line: str line
152 151 :param default: default
153 152 :rtype: int
154 153 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
155 154 """
156 155 if line.endswith('\r\n'):
157 156 return 2
158 157 elif line.endswith('\n'):
159 158 return 0
160 159 elif line.endswith('\r'):
161 160 return 1
162 161 else:
163 162 return default
164 163
165 164
166 165 def generate_api_key(username, salt=None):
167 166 """
168 167 Generates unique API key for given username, if salt is not given
169 168 it'll be generated from some random string
170 169
171 170 :param username: username as string
172 171 :param salt: salt to hash generate KEY
173 172 :rtype: str
174 173 :returns: sha1 hash from username+salt
175 174 """
176 175 from tempfile import _RandomNameSequence
177 176 import hashlib
178 177
179 178 if salt is None:
180 179 salt = _RandomNameSequence().next()
181 180
182 181 return hashlib.sha1(username + salt).hexdigest()
183 182
184 183
185 184 def safe_unicode(str_, from_encoding='utf8'):
186 185 """
187 186 safe unicode function. Does few trick to turn str_ into unicode
188 187
189 188 In case of UnicodeDecode error we try to return it with encoding detected
190 189 by chardet library if it fails fallback to unicode with errors replaced
191 190
192 191 :param str_: string to decode
193 192 :rtype: unicode
194 193 :returns: unicode object
195 194 """
196 195 if isinstance(str_, unicode):
197 196 return str_
198 197
199 198 try:
200 199 return unicode(str_)
201 200 except UnicodeDecodeError:
202 201 pass
203 202
204 203 try:
205 204 return unicode(str_, from_encoding)
206 205 except UnicodeDecodeError:
207 206 pass
208 207
209 208 try:
210 209 import chardet
211 210 encoding = chardet.detect(str_)['encoding']
212 211 if encoding is None:
213 212 raise Exception()
214 213 return str_.decode(encoding)
215 214 except (ImportError, UnicodeDecodeError, Exception):
216 215 return unicode(str_, from_encoding, 'replace')
217 216
218 217
219 218 def safe_str(unicode_, to_encoding='utf8'):
220 219 """
221 220 safe str function. Does few trick to turn unicode_ into string
222 221
223 222 In case of UnicodeEncodeError we try to return it with encoding detected
224 223 by chardet library if it fails fallback to string with errors replaced
225 224
226 225 :param unicode_: unicode to encode
227 226 :rtype: str
228 227 :returns: str object
229 228 """
230 229
231 230 if not isinstance(unicode_, basestring):
232 231 return str(unicode_)
233 232
234 233 if isinstance(unicode_, str):
235 234 return unicode_
236 235
237 236 try:
238 237 return unicode_.encode(to_encoding)
239 238 except UnicodeEncodeError:
240 239 pass
241 240
242 241 try:
243 242 import chardet
244 243 encoding = chardet.detect(unicode_)['encoding']
245 244 print encoding
246 245 if encoding is None:
247 246 raise UnicodeEncodeError()
248 247
249 248 return unicode_.encode(encoding)
250 249 except (ImportError, UnicodeEncodeError):
251 250 return unicode_.encode(to_encoding, 'replace')
252 251
253 252 return safe_str
254 253
255 254
256 255 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
257 256 """
258 257 Custom engine_from_config functions that makes sure we use NullPool for
259 258 file based sqlite databases. This prevents errors on sqlite. This only
260 259 applies to sqlalchemy versions < 0.7.0
261 260
262 261 """
263 262 import sqlalchemy
264 263 from sqlalchemy import engine_from_config as efc
265 264 import logging
266 265
267 266 if int(sqlalchemy.__version__.split('.')[1]) < 7:
268 267
269 268 # This solution should work for sqlalchemy < 0.7.0, and should use
270 269 # proxy=TimerProxy() for execution time profiling
271 270
272 271 from sqlalchemy.pool import NullPool
273 272 url = configuration[prefix + 'url']
274 273
275 274 if url.startswith('sqlite'):
276 275 kwargs.update({'poolclass': NullPool})
277 276 return efc(configuration, prefix, **kwargs)
278 277 else:
279 278 import time
280 279 from sqlalchemy import event
281 280 from sqlalchemy.engine import Engine
282 281
283 282 log = logging.getLogger('sqlalchemy.engine')
284 283 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
285 284 engine = efc(configuration, prefix, **kwargs)
286 285
287 286 def color_sql(sql):
288 287 COLOR_SEQ = "\033[1;%dm"
289 288 COLOR_SQL = YELLOW
290 289 normal = '\x1b[0m'
291 290 return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
292 291
293 292 if configuration['debug']:
294 293 #attach events only for debug configuration
295 294
296 295 def before_cursor_execute(conn, cursor, statement,
297 296 parameters, context, executemany):
298 297 context._query_start_time = time.time()
299 298 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
300 299
301 300
302 301 def after_cursor_execute(conn, cursor, statement,
303 302 parameters, context, executemany):
304 303 total = time.time() - context._query_start_time
305 304 log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
306 305
307 306 event.listen(engine, "before_cursor_execute",
308 307 before_cursor_execute)
309 308 event.listen(engine, "after_cursor_execute",
310 309 after_cursor_execute)
311 310
312 311 return engine
313 312
314 313
315 314 def age(curdate):
316 315 """
317 316 turns a datetime into an age string.
318 317
319 318 :param curdate: datetime object
320 319 :rtype: unicode
321 320 :returns: unicode words describing age
322 321 """
323 322
324 323 from datetime import datetime
325 324 from webhelpers.date import time_ago_in_words
326 325
327 _ = lambda s:s
326 _ = lambda s: s
328 327
329 328 if not curdate:
330 329 return ''
331 330
332 331 agescales = [(_(u"year"), 3600 * 24 * 365),
333 332 (_(u"month"), 3600 * 24 * 30),
334 333 (_(u"day"), 3600 * 24),
335 334 (_(u"hour"), 3600),
336 335 (_(u"minute"), 60),
337 336 (_(u"second"), 1), ]
338 337
339 338 age = datetime.now() - curdate
340 339 age_seconds = (age.days * agescales[2][1]) + age.seconds
341 340 pos = 1
342 341 for scale in agescales:
343 342 if scale[1] <= age_seconds:
344 if pos == 6:pos = 5
343 if pos == 6:
344 pos = 5
345 345 return '%s %s' % (time_ago_in_words(curdate,
346 346 agescales[pos][0]), _('ago'))
347 347 pos += 1
348 348
349 349 return _(u'just now')
350 350
351 351
352 352 def uri_filter(uri):
353 353 """
354 354 Removes user:password from given url string
355 355
356 356 :param uri:
357 357 :rtype: unicode
358 358 :returns: filtered list of strings
359 359 """
360 360 if not uri:
361 361 return ''
362 362
363 363 proto = ''
364 364
365 365 for pat in ('https://', 'http://'):
366 366 if uri.startswith(pat):
367 367 uri = uri[len(pat):]
368 368 proto = pat
369 369 break
370 370
371 371 # remove passwords and username
372 372 uri = uri[uri.find('@') + 1:]
373 373
374 374 # get the port
375 375 cred_pos = uri.find(':')
376 376 if cred_pos == -1:
377 377 host, port = uri, None
378 378 else:
379 379 host, port = uri[:cred_pos], uri[cred_pos + 1:]
380 380
381 381 return filter(None, [proto, host, port])
382 382
383 383
384 384 def credentials_filter(uri):
385 385 """
386 386 Returns a url with removed credentials
387 387
388 388 :param uri:
389 389 """
390 390
391 391 uri = uri_filter(uri)
392 392 #check if we have port
393 393 if len(uri) > 2 and uri[2]:
394 394 uri[2] = ':' + uri[2]
395 395
396 396 return ''.join(uri)
397 397
398 398
399 399 def get_changeset_safe(repo, rev):
400 400 """
401 401 Safe version of get_changeset if this changeset doesn't exists for a
402 402 repo it returns a Dummy one instead
403 403
404 404 :param repo:
405 405 :param rev:
406 406 """
407 from vcs.backends.base import BaseRepository
408 from vcs.exceptions import RepositoryError
407 from rhodecode.lib.vcs.backends.base import BaseRepository
408 from rhodecode.lib.vcs.exceptions import RepositoryError
409 409 if not isinstance(repo, BaseRepository):
410 410 raise Exception('You must pass an Repository '
411 411 'object as first argument got %s', type(repo))
412 412
413 413 try:
414 414 cs = repo.get_changeset(rev)
415 415 except RepositoryError:
416 416 from rhodecode.lib.utils import EmptyChangeset
417 417 cs = EmptyChangeset(requested_revision=rev)
418 418 return cs
419 419
420 420
421 421 def get_current_revision(quiet=False):
422 422 """
423 423 Returns tuple of (number, id) from repository containing this package
424 424 or None if repository could not be found.
425 425
426 426 :param quiet: prints error for fetching revision if True
427 427 """
428 428
429 429 try:
430 from vcs import get_repo
431 from vcs.utils.helpers import get_scm
430 from rhodecode.lib.vcs import get_repo
431 from rhodecode.lib.vcs.utils.helpers import get_scm
432 432 repopath = os.path.join(os.path.dirname(__file__), '..', '..')
433 433 scm = get_scm(repopath)[0]
434 434 repo = get_repo(path=repopath, alias=scm)
435 435 tip = repo.get_changeset()
436 436 return (tip.revision, tip.short_id)
437 437 except Exception, err:
438 438 if not quiet:
439 439 print ("Cannot retrieve rhodecode's revision. Original error "
440 440 "was: %s" % err)
441 441 return None
442 442
443 443
444 444 def extract_mentioned_users(s):
445 445 """
446 446 Returns unique usernames from given string s that have @mention
447 447
448 448 :param s: string to get mentions
449 449 """
450 450 usrs = {}
451 451 for username in re.findall(r'(?:^@|\s@)(\w+)', s):
452 452 usrs[username] = username
453 453
454 454 return sorted(usrs.keys())
@@ -1,190 +1,190 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.annotate
4 4 ~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Anontation library for usage in rhodecode, previously part of vcs
7 7
8 8 :created_on: Dec 4, 2011
9 9 :author: marcink
10 10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13
14 from vcs.exceptions import VCSError
15 from vcs.nodes import FileNode
14 from rhodecode.lib.vcs.exceptions import VCSError
15 from rhodecode.lib.vcs.nodes import FileNode
16 16 from pygments.formatters import HtmlFormatter
17 17 from pygments import highlight
18 18
19 19 import StringIO
20 20
21 21
22 22 def annotate_highlight(filenode, annotate_from_changeset_func=None,
23 23 order=None, headers=None, **options):
24 24 """
25 25 Returns html portion containing annotated table with 3 columns: line
26 26 numbers, changeset information and pygmentized line of code.
27 27
28 28 :param filenode: FileNode object
29 29 :param annotate_from_changeset_func: function taking changeset and
30 30 returning single annotate cell; needs break line at the end
31 31 :param order: ordered sequence of ``ls`` (line numbers column),
32 32 ``annotate`` (annotate column), ``code`` (code column); Default is
33 33 ``['ls', 'annotate', 'code']``
34 34 :param headers: dictionary with headers (keys are whats in ``order``
35 35 parameter)
36 36 """
37 37 options['linenos'] = True
38 38 formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
39 39 headers=headers,
40 40 annotate_from_changeset_func=annotate_from_changeset_func, **options)
41 41 lexer = filenode.lexer
42 42 highlighted = highlight(filenode.content, lexer, formatter)
43 43 return highlighted
44 44
45 45
46 46 class AnnotateHtmlFormatter(HtmlFormatter):
47 47
48 48 def __init__(self, filenode, annotate_from_changeset_func=None,
49 49 order=None, **options):
50 50 """
51 51 If ``annotate_from_changeset_func`` is passed it should be a function
52 52 which returns string from the given changeset. For example, we may pass
53 53 following function as ``annotate_from_changeset_func``::
54 54
55 55 def changeset_to_anchor(changeset):
56 56 return '<a href="/changesets/%s/">%s</a>\n' %\
57 57 (changeset.id, changeset.id)
58 58
59 59 :param annotate_from_changeset_func: see above
60 60 :param order: (default: ``['ls', 'annotate', 'code']``); order of
61 61 columns;
62 62 :param options: standard pygment's HtmlFormatter options, there is
63 63 extra option tough, ``headers``. For instance we can pass::
64 64
65 65 formatter = AnnotateHtmlFormatter(filenode, headers={
66 66 'ls': '#',
67 67 'annotate': 'Annotate',
68 68 'code': 'Code',
69 69 })
70 70
71 71 """
72 72 super(AnnotateHtmlFormatter, self).__init__(**options)
73 73 self.annotate_from_changeset_func = annotate_from_changeset_func
74 74 self.order = order or ('ls', 'annotate', 'code')
75 75 headers = options.pop('headers', None)
76 76 if headers and not ('ls' in headers and 'annotate' in headers and
77 77 'code' in headers):
78 78 raise ValueError("If headers option dict is specified it must "
79 79 "all 'ls', 'annotate' and 'code' keys")
80 80 self.headers = headers
81 81 if isinstance(filenode, FileNode):
82 82 self.filenode = filenode
83 83 else:
84 84 raise VCSError("This formatter expect FileNode parameter, not %r"
85 85 % type(filenode))
86 86
87 87 def annotate_from_changeset(self, changeset):
88 88 """
89 89 Returns full html line for single changeset per annotated line.
90 90 """
91 91 if self.annotate_from_changeset_func:
92 92 return self.annotate_from_changeset_func(changeset)
93 93 else:
94 94 return ''.join((changeset.id, '\n'))
95 95
96 96 def _wrap_tablelinenos(self, inner):
97 97 dummyoutfile = StringIO.StringIO()
98 98 lncount = 0
99 99 for t, line in inner:
100 100 if t:
101 101 lncount += 1
102 102 dummyoutfile.write(line)
103 103
104 104 fl = self.linenostart
105 105 mw = len(str(lncount + fl - 1))
106 106 sp = self.linenospecial
107 107 st = self.linenostep
108 108 la = self.lineanchors
109 109 aln = self.anchorlinenos
110 110 if sp:
111 111 lines = []
112 112
113 113 for i in range(fl, fl + lncount):
114 114 if i % st == 0:
115 115 if i % sp == 0:
116 116 if aln:
117 117 lines.append('<a href="#%s-%d" class="special">'
118 118 '%*d</a>' %
119 119 (la, i, mw, i))
120 120 else:
121 121 lines.append('<span class="special">'
122 122 '%*d</span>' % (mw, i))
123 123 else:
124 124 if aln:
125 125 lines.append('<a href="#%s-%d">'
126 126 '%*d</a>' % (la, i, mw, i))
127 127 else:
128 128 lines.append('%*d' % (mw, i))
129 129 else:
130 130 lines.append('')
131 131 ls = '\n'.join(lines)
132 132 else:
133 133 lines = []
134 134 for i in range(fl, fl + lncount):
135 135 if i % st == 0:
136 136 if aln:
137 137 lines.append('<a href="#%s-%d">%*d</a>' \
138 138 % (la, i, mw, i))
139 139 else:
140 140 lines.append('%*d' % (mw, i))
141 141 else:
142 142 lines.append('')
143 143 ls = '\n'.join(lines)
144 144
145 145 annotate_changesets = [tup[1] for tup in self.filenode.annotate]
146 146 # If pygments cropped last lines break we need do that too
147 147 ln_cs = len(annotate_changesets)
148 148 ln_ = len(ls.splitlines())
149 149 if ln_cs > ln_:
150 150 annotate_changesets = annotate_changesets[:ln_ - ln_cs]
151 151 annotate = ''.join((self.annotate_from_changeset(changeset)
152 152 for changeset in annotate_changesets))
153 153 # in case you wonder about the seemingly redundant <div> here:
154 154 # since the content in the other cell also is wrapped in a div,
155 155 # some browsers in some configurations seem to mess up the formatting.
156 156 '''
157 157 yield 0, ('<table class="%stable">' % self.cssclass +
158 158 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
159 159 ls + '</pre></div></td>' +
160 160 '<td class="code">')
161 161 yield 0, dummyoutfile.getvalue()
162 162 yield 0, '</td></tr></table>'
163 163
164 164 '''
165 165 headers_row = []
166 166 if self.headers:
167 167 headers_row = ['<tr class="annotate-header">']
168 168 for key in self.order:
169 169 td = ''.join(('<td>', self.headers[key], '</td>'))
170 170 headers_row.append(td)
171 171 headers_row.append('</tr>')
172 172
173 173 body_row_start = ['<tr>']
174 174 for key in self.order:
175 175 if key == 'ls':
176 176 body_row_start.append(
177 177 '<td class="linenos"><div class="linenodiv"><pre>' +
178 178 ls + '</pre></div></td>')
179 179 elif key == 'annotate':
180 180 body_row_start.append(
181 181 '<td class="annotate"><div class="annotatediv"><pre>' +
182 182 annotate + '</pre></div></td>')
183 183 elif key == 'code':
184 184 body_row_start.append('<td class="code">')
185 185 yield 0, ('<table class="%stable">' % self.cssclass +
186 186 ''.join(headers_row) +
187 187 ''.join(body_row_start)
188 188 )
189 189 yield 0, dummyoutfile.getvalue()
190 190 yield 0, '</td></tr></table>'
@@ -1,128 +1,128 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.celerylib.__init__
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 celery libs for RhodeCode
7 7
8 8 :created_on: Nov 27, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import sys
28 28 import socket
29 29 import traceback
30 30 import logging
31 31 from os.path import dirname as dn, join as jn
32 32 from pylons import config
33 33
34 34 from hashlib import md5
35 35 from decorator import decorator
36 36
37 from vcs.utils.lazy import LazyProperty
37 from rhodecode.lib.vcs.utils.lazy import LazyProperty
38 38 from rhodecode import CELERY_ON
39 39 from rhodecode.lib import str2bool, safe_str
40 40 from rhodecode.lib.pidlock import DaemonLock, LockHeld
41 41 from rhodecode.model import init_model
42 42 from rhodecode.model import meta
43 43 from rhodecode.model.db import Statistics, Repository, User
44 44
45 45 from sqlalchemy import engine_from_config
46 46
47 47 from celery.messaging import establish_connection
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class ResultWrapper(object):
53 53 def __init__(self, task):
54 54 self.task = task
55 55
56 56 @LazyProperty
57 57 def result(self):
58 58 return self.task
59 59
60 60
61 61 def run_task(task, *args, **kwargs):
62 62 if CELERY_ON:
63 63 try:
64 64 t = task.apply_async(args=args, kwargs=kwargs)
65 65 log.info('running task %s:%s' % (t.task_id, task))
66 66 return t
67 67
68 68 except socket.error, e:
69 69 if isinstance(e, IOError) and e.errno == 111:
70 70 log.debug('Unable to connect to celeryd. Sync execution')
71 71 else:
72 72 log.error(traceback.format_exc())
73 73 except KeyError, e:
74 74 log.debug('Unable to connect to celeryd. Sync execution')
75 75 except Exception, e:
76 76 log.error(traceback.format_exc())
77 77
78 78 log.debug('executing task %s in sync mode' % task)
79 79 return ResultWrapper(task(*args, **kwargs))
80 80
81 81
82 82 def __get_lockkey(func, *fargs, **fkwargs):
83 83 params = list(fargs)
84 84 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
85 85
86 86 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
87 87
88 88 lockkey = 'task_%s.lock' % \
89 89 md5(func_name + '-' + '-'.join(map(safe_str, params))).hexdigest()
90 90 return lockkey
91 91
92 92
93 93 def locked_task(func):
94 94 def __wrapper(func, *fargs, **fkwargs):
95 95 lockkey = __get_lockkey(func, *fargs, **fkwargs)
96 96 lockkey_path = config['here']
97 97
98 98 log.info('running task with lockkey %s' % lockkey)
99 99 try:
100 100 l = DaemonLock(file_=jn(lockkey_path, lockkey))
101 101 ret = func(*fargs, **fkwargs)
102 102 l.release()
103 103 return ret
104 104 except LockHeld:
105 105 log.info('LockHeld')
106 106 return 'Task with key %s already running' % lockkey
107 107
108 108 return decorator(__wrapper, func)
109 109
110 110
111 111 def get_session():
112 112 if CELERY_ON:
113 113 engine = engine_from_config(config, 'sqlalchemy.db1.')
114 114 init_model(engine)
115 115 sa = meta.Session
116 116 return sa
117 117
118 118
119 119 def dbsession(func):
120 120 def __wrapper(func, *fargs, **fkwargs):
121 121 try:
122 122 ret = func(*fargs, **fkwargs)
123 123 return ret
124 124 finally:
125 125 if CELERY_ON:
126 126 meta.Session.remove()
127 127
128 128 return decorator(__wrapper, func)
@@ -1,414 +1,414 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.celerylib.tasks
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 RhodeCode task modules, containing all task that suppose to be run
7 7 by celery daemon
8 8
9 9 :created_on: Oct 6, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26 from celery.decorators import task
27 27
28 28 import os
29 29 import traceback
30 30 import logging
31 31 from os.path import join as jn
32 32
33 33 from time import mktime
34 34 from operator import itemgetter
35 35 from string import lower
36 36
37 37 from pylons import config, url
38 38 from pylons.i18n.translation import _
39 39
40 from vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 41
42 42 from rhodecode import CELERY_ON
43 43 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
44 44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 46 from rhodecode.lib.helpers import person
47 47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 48 from rhodecode.lib.utils import add_cache, action_logger
49 49 from rhodecode.lib.compat import json, OrderedDict
50 50
51 51 from rhodecode.model.db import Statistics, Repository, User
52 52
53 53
54 54 add_cache(config)
55 55
56 56 __all__ = ['whoosh_index', 'get_commits_stats',
57 57 'reset_user_password', 'send_email']
58 58
59 59
60 60 def get_logger(cls):
61 61 if CELERY_ON:
62 62 try:
63 63 log = cls.get_logger()
64 64 except:
65 65 log = logging.getLogger(__name__)
66 66 else:
67 67 log = logging.getLogger(__name__)
68 68
69 69 return log
70 70
71 71
72 72 @task(ignore_result=True)
73 73 @locked_task
74 74 @dbsession
75 75 def whoosh_index(repo_location, full_index):
76 76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
77 77 log = whoosh_index.get_logger(whoosh_index)
78 78 DBS = get_session()
79 79
80 80 index_location = config['index_dir']
81 81 WhooshIndexingDaemon(index_location=index_location,
82 82 repo_location=repo_location, sa=DBS)\
83 83 .run(full_index=full_index)
84 84
85 85
86 86 @task(ignore_result=True)
87 87 @dbsession
88 88 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
89 89 log = get_logger(get_commits_stats)
90 90 DBS = get_session()
91 91 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
92 92 ts_max_y)
93 93 lockkey_path = config['here']
94 94
95 95 log.info('running task with lockkey %s' % lockkey)
96 96
97 97 try:
98 98 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
99 99
100 100 # for js data compatibility cleans the key for person from '
101 101 akc = lambda k: person(k).replace('"', "")
102 102
103 103 co_day_auth_aggr = {}
104 104 commits_by_day_aggregate = {}
105 105 repo = Repository.get_by_repo_name(repo_name)
106 106 if repo is None:
107 107 return True
108 108
109 109 repo = repo.scm_instance
110 110 repo_size = repo.count()
111 111 # return if repo have no revisions
112 112 if repo_size < 1:
113 113 lock.release()
114 114 return True
115 115
116 116 skip_date_limit = True
117 117 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
118 118 last_rev = None
119 119 last_cs = None
120 120 timegetter = itemgetter('time')
121 121
122 122 dbrepo = DBS.query(Repository)\
123 123 .filter(Repository.repo_name == repo_name).scalar()
124 124 cur_stats = DBS.query(Statistics)\
125 125 .filter(Statistics.repository == dbrepo).scalar()
126 126
127 127 if cur_stats is not None:
128 128 last_rev = cur_stats.stat_on_revision
129 129
130 130 if last_rev == repo.get_changeset().revision and repo_size > 1:
131 131 # pass silently without any work if we're not on first revision or
132 132 # current state of parsing revision(from db marker) is the
133 133 # last revision
134 134 lock.release()
135 135 return True
136 136
137 137 if cur_stats:
138 138 commits_by_day_aggregate = OrderedDict(json.loads(
139 139 cur_stats.commit_activity_combined))
140 140 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
141 141
142 142 log.debug('starting parsing %s' % parse_limit)
143 143 lmktime = mktime
144 144
145 145 last_rev = last_rev + 1 if last_rev >= 0 else 0
146 146 log.debug('Getting revisions from %s to %s' % (
147 147 last_rev, last_rev + parse_limit)
148 148 )
149 149 for cs in repo[last_rev:last_rev + parse_limit]:
150 150 last_cs = cs # remember last parsed changeset
151 151 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
152 152 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
153 153
154 154 if akc(cs.author) in co_day_auth_aggr:
155 155 try:
156 156 l = [timegetter(x) for x in
157 157 co_day_auth_aggr[akc(cs.author)]['data']]
158 158 time_pos = l.index(k)
159 159 except ValueError:
160 160 time_pos = False
161 161
162 162 if time_pos >= 0 and time_pos is not False:
163 163
164 164 datadict = \
165 165 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
166 166
167 167 datadict["commits"] += 1
168 168 datadict["added"] += len(cs.added)
169 169 datadict["changed"] += len(cs.changed)
170 170 datadict["removed"] += len(cs.removed)
171 171
172 172 else:
173 173 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
174 174
175 175 datadict = {"time": k,
176 176 "commits": 1,
177 177 "added": len(cs.added),
178 178 "changed": len(cs.changed),
179 179 "removed": len(cs.removed),
180 180 }
181 181 co_day_auth_aggr[akc(cs.author)]['data']\
182 182 .append(datadict)
183 183
184 184 else:
185 185 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
186 186 co_day_auth_aggr[akc(cs.author)] = {
187 187 "label": akc(cs.author),
188 188 "data": [{"time":k,
189 189 "commits":1,
190 190 "added":len(cs.added),
191 191 "changed":len(cs.changed),
192 192 "removed":len(cs.removed),
193 193 }],
194 194 "schema": ["commits"],
195 195 }
196 196
197 197 #gather all data by day
198 198 if k in commits_by_day_aggregate:
199 199 commits_by_day_aggregate[k] += 1
200 200 else:
201 201 commits_by_day_aggregate[k] = 1
202 202
203 203 overview_data = sorted(commits_by_day_aggregate.items(),
204 204 key=itemgetter(0))
205 205
206 206 if not co_day_auth_aggr:
207 207 co_day_auth_aggr[akc(repo.contact)] = {
208 208 "label": akc(repo.contact),
209 209 "data": [0, 1],
210 210 "schema": ["commits"],
211 211 }
212 212
213 213 stats = cur_stats if cur_stats else Statistics()
214 214 stats.commit_activity = json.dumps(co_day_auth_aggr)
215 215 stats.commit_activity_combined = json.dumps(overview_data)
216 216
217 217 log.debug('last revison %s' % last_rev)
218 218 leftovers = len(repo.revisions[last_rev:])
219 219 log.debug('revisions to parse %s' % leftovers)
220 220
221 221 if last_rev == 0 or leftovers < parse_limit:
222 222 log.debug('getting code trending stats')
223 223 stats.languages = json.dumps(__get_codes_stats(repo_name))
224 224
225 225 try:
226 226 stats.repository = dbrepo
227 227 stats.stat_on_revision = last_cs.revision if last_cs else 0
228 228 DBS.add(stats)
229 229 DBS.commit()
230 230 except:
231 231 log.error(traceback.format_exc())
232 232 DBS.rollback()
233 233 lock.release()
234 234 return False
235 235
236 236 #final release
237 237 lock.release()
238 238
239 239 #execute another task if celery is enabled
240 240 if len(repo.revisions) > 1 and CELERY_ON:
241 241 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
242 242 return True
243 243 except LockHeld:
244 244 log.info('LockHeld')
245 245 return 'Task with key %s already running' % lockkey
246 246
247 247 @task(ignore_result=True)
248 248 @dbsession
249 249 def send_password_link(user_email):
250 250 from rhodecode.model.notification import EmailNotificationModel
251 251
252 252 log = get_logger(send_password_link)
253 253 DBS = get_session()
254 254
255 255 try:
256 256 user = User.get_by_email(user_email)
257 257 if user:
258 258 log.debug('password reset user found %s' % user)
259 259 link = url('reset_password_confirmation', key=user.api_key,
260 260 qualified=True)
261 261 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
262 262 body = EmailNotificationModel().get_email_tmpl(reg_type,
263 263 **{'user':user.short_contact,
264 264 'reset_url':link})
265 265 log.debug('sending email')
266 266 run_task(send_email, user_email,
267 267 _("password reset link"), body)
268 268 log.info('send new password mail to %s' % user_email)
269 269 else:
270 270 log.debug("password reset email %s not found" % user_email)
271 271 except:
272 272 log.error(traceback.format_exc())
273 273 return False
274 274
275 275 return True
276 276
277 277 @task(ignore_result=True)
278 278 @dbsession
279 279 def reset_user_password(user_email):
280 280 from rhodecode.lib import auth
281 281
282 282 log = get_logger(reset_user_password)
283 283 DBS = get_session()
284 284
285 285 try:
286 286 try:
287 287 user = User.get_by_email(user_email)
288 288 new_passwd = auth.PasswordGenerator().gen_password(8,
289 289 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
290 290 if user:
291 291 user.password = auth.get_crypt_password(new_passwd)
292 292 user.api_key = auth.generate_api_key(user.username)
293 293 DBS.add(user)
294 294 DBS.commit()
295 295 log.info('change password for %s' % user_email)
296 296 if new_passwd is None:
297 297 raise Exception('unable to generate new password')
298 298 except:
299 299 log.error(traceback.format_exc())
300 300 DBS.rollback()
301 301
302 302 run_task(send_email, user_email,
303 303 'Your new password',
304 304 'Your new RhodeCode password:%s' % (new_passwd))
305 305 log.info('send new password mail to %s' % user_email)
306 306
307 307 except:
308 308 log.error('Failed to update user password')
309 309 log.error(traceback.format_exc())
310 310
311 311 return True
312 312
313 313
314 314 @task(ignore_result=True)
315 315 @dbsession
316 316 def send_email(recipients, subject, body, html_body=''):
317 317 """
318 318 Sends an email with defined parameters from the .ini files.
319 319
320 320 :param recipients: list of recipients, it this is empty the defined email
321 321 address from field 'email_to' is used instead
322 322 :param subject: subject of the mail
323 323 :param body: body of the mail
324 324 :param html_body: html version of body
325 325 """
326 326 log = get_logger(send_email)
327 327 DBS = get_session()
328 328
329 329 email_config = config
330 330 subject = "%s %s" % (email_config.get('email_prefix'), subject)
331 331 if not recipients:
332 332 # if recipients are not defined we send to email_config + all admins
333 333 admins = [u.email for u in User.query()
334 334 .filter(User.admin == True).all()]
335 335 recipients = [email_config.get('email_to')] + admins
336 336
337 337 mail_from = email_config.get('app_email_from', 'RhodeCode')
338 338 user = email_config.get('smtp_username')
339 339 passwd = email_config.get('smtp_password')
340 340 mail_server = email_config.get('smtp_server')
341 341 mail_port = email_config.get('smtp_port')
342 342 tls = str2bool(email_config.get('smtp_use_tls'))
343 343 ssl = str2bool(email_config.get('smtp_use_ssl'))
344 344 debug = str2bool(config.get('debug'))
345 345 smtp_auth = email_config.get('smtp_auth')
346 346
347 347 try:
348 348 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
349 349 mail_port, ssl, tls, debug=debug)
350 350 m.send(recipients, subject, body, html_body)
351 351 except:
352 352 log.error('Mail sending failed')
353 353 log.error(traceback.format_exc())
354 354 return False
355 355 return True
356 356
357 357
358 358 @task(ignore_result=True)
359 359 @dbsession
360 360 def create_repo_fork(form_data, cur_user):
361 361 """
362 362 Creates a fork of repository using interval VCS methods
363 363
364 364 :param form_data:
365 365 :param cur_user:
366 366 """
367 367 from rhodecode.model.repo import RepoModel
368 368
369 369 log = get_logger(create_repo_fork)
370 370 DBS = get_session()
371 371
372 372 base_path = Repository.base_path()
373 373
374 374 RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True)
375 375
376 376 alias = form_data['repo_type']
377 377 org_repo_name = form_data['org_path']
378 378 fork_name = form_data['repo_name_full']
379 379 update_after_clone = form_data['update_after_clone']
380 380 source_repo_path = os.path.join(base_path, org_repo_name)
381 381 destination_fork_path = os.path.join(base_path, fork_name)
382 382
383 383 log.info('creating fork of %s as %s', source_repo_path,
384 384 destination_fork_path)
385 385 backend = get_backend(alias)
386 386 backend(safe_str(destination_fork_path), create=True,
387 387 src_url=safe_str(source_repo_path),
388 388 update_after_clone=update_after_clone)
389 389 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
390 390 org_repo_name, '', DBS)
391 391
392 392 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
393 393 fork_name, '', DBS)
394 394 # finally commit at latest possible stage
395 395 DBS.commit()
396 396
397 397 def __get_codes_stats(repo_name):
398 398 repo = Repository.get_by_repo_name(repo_name).scm_instance
399 399
400 400 tip = repo.get_changeset()
401 401 code_stats = {}
402 402
403 403 def aggregate(cs):
404 404 for f in cs[2]:
405 405 ext = lower(f.extension)
406 406 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
407 407 if ext in code_stats:
408 408 code_stats[ext] += 1
409 409 else:
410 410 code_stats[ext] = 1
411 411
412 412 map(aggregate, tip.walk('/'))
413 413
414 414 return code_stats or {}
@@ -1,1098 +1,1098 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.db
4 4 ~~~~~~~~~~~~~~~~~~
5 5
6 6 Database Models for RhodeCode
7 7
8 8 :created_on: Apr 08, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import logging
28 28 import datetime
29 29 import traceback
30 30 from datetime import date
31 31
32 32 from sqlalchemy import *
33 33 from sqlalchemy.ext.hybrid import hybrid_property
34 34 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
35 35 from beaker.cache import cache_region, region_invalidate
36 36
37 from vcs import get_backend
38 from vcs.utils.helpers import get_scm
39 from vcs.exceptions import VCSError
40 from vcs.utils.lazy import LazyProperty
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.utils.helpers import get_scm
39 from rhodecode.lib.vcs.exceptions import VCSError
40 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 41
42 42 from rhodecode.lib import str2bool, safe_str, get_changeset_safe, \
43 43 generate_api_key, safe_unicode
44 44 from rhodecode.lib.exceptions import UsersGroupsAssignedException
45 45 from rhodecode.lib.compat import json
46 46
47 47 from rhodecode.model.meta import Base, Session
48 48 from rhodecode.lib.caching_query import FromCache
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53 #==============================================================================
54 54 # BASE CLASSES
55 55 #==============================================================================
56 56
57 57 class ModelSerializer(json.JSONEncoder):
58 58 """
59 59 Simple Serializer for JSON,
60 60
61 61 usage::
62 62
63 63 to make object customized for serialization implement a __json__
64 64 method that will return a dict for serialization into json
65 65
66 66 example::
67 67
68 68 class Task(object):
69 69
70 70 def __init__(self, name, value):
71 71 self.name = name
72 72 self.value = value
73 73
74 74 def __json__(self):
75 75 return dict(name=self.name,
76 76 value=self.value)
77 77
78 78 """
79 79
80 80 def default(self, obj):
81 81
82 82 if hasattr(obj, '__json__'):
83 83 return obj.__json__()
84 84 else:
85 85 return json.JSONEncoder.default(self, obj)
86 86
87 87 class BaseModel(object):
88 88 """Base Model for all classess
89 89
90 90 """
91 91
92 92 @classmethod
93 93 def _get_keys(cls):
94 94 """return column names for this model """
95 95 return class_mapper(cls).c.keys()
96 96
97 97 def get_dict(self):
98 98 """return dict with keys and values corresponding
99 99 to this model data """
100 100
101 101 d = {}
102 102 for k in self._get_keys():
103 103 d[k] = getattr(self, k)
104 104 return d
105 105
106 106 def get_appstruct(self):
107 107 """return list with keys and values tupples corresponding
108 108 to this model data """
109 109
110 110 l = []
111 111 for k in self._get_keys():
112 112 l.append((k, getattr(self, k),))
113 113 return l
114 114
115 115 def populate_obj(self, populate_dict):
116 116 """populate model with data from given populate_dict"""
117 117
118 118 for k in self._get_keys():
119 119 if k in populate_dict:
120 120 setattr(self, k, populate_dict[k])
121 121
122 122 @classmethod
123 123 def query(cls):
124 124 return Session.query(cls)
125 125
126 126 @classmethod
127 127 def get(cls, id_):
128 128 if id_:
129 129 return cls.query().get(id_)
130 130
131 131 @classmethod
132 132 def getAll(cls):
133 133 return cls.query().all()
134 134
135 135 @classmethod
136 136 def delete(cls, id_):
137 137 obj = cls.query().get(id_)
138 138 Session.delete(obj)
139 139 Session.commit()
140 140
141 141
142 142 class RhodeCodeSetting(Base, BaseModel):
143 143 __tablename__ = 'rhodecode_settings'
144 144 __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
145 145 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
146 146 app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
147 147 _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
148 148
149 149 def __init__(self, k='', v=''):
150 150 self.app_settings_name = k
151 151 self.app_settings_value = v
152 152
153 153
154 154 @validates('_app_settings_value')
155 155 def validate_settings_value(self, key, val):
156 156 assert type(val) == unicode
157 157 return val
158 158
159 159 @hybrid_property
160 160 def app_settings_value(self):
161 161 v = self._app_settings_value
162 162 if v == 'ldap_active':
163 163 v = str2bool(v)
164 164 return v
165 165
166 166 @app_settings_value.setter
167 167 def app_settings_value(self, val):
168 168 """
169 169 Setter that will always make sure we use unicode in app_settings_value
170 170
171 171 :param val:
172 172 """
173 173 self._app_settings_value = safe_unicode(val)
174 174
175 175 def __repr__(self):
176 176 return "<%s('%s:%s')>" % (self.__class__.__name__,
177 177 self.app_settings_name, self.app_settings_value)
178 178
179 179
180 180 @classmethod
181 181 def get_by_name(cls, ldap_key):
182 182 return cls.query()\
183 183 .filter(cls.app_settings_name == ldap_key).scalar()
184 184
185 185 @classmethod
186 186 def get_app_settings(cls, cache=False):
187 187
188 188 ret = cls.query()
189 189
190 190 if cache:
191 191 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
192 192
193 193 if not ret:
194 194 raise Exception('Could not get application settings !')
195 195 settings = {}
196 196 for each in ret:
197 197 settings['rhodecode_' + each.app_settings_name] = \
198 198 each.app_settings_value
199 199
200 200 return settings
201 201
202 202 @classmethod
203 203 def get_ldap_settings(cls, cache=False):
204 204 ret = cls.query()\
205 205 .filter(cls.app_settings_name.startswith('ldap_')).all()
206 206 fd = {}
207 207 for row in ret:
208 208 fd.update({row.app_settings_name:row.app_settings_value})
209 209
210 210 return fd
211 211
212 212
213 213 class RhodeCodeUi(Base, BaseModel):
214 214 __tablename__ = 'rhodecode_ui'
215 215 __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
216 216
217 217 HOOK_UPDATE = 'changegroup.update'
218 218 HOOK_REPO_SIZE = 'changegroup.repo_size'
219 219 HOOK_PUSH = 'pretxnchangegroup.push_logger'
220 220 HOOK_PULL = 'preoutgoing.pull_logger'
221 221
222 222 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
223 223 ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
224 224 ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
225 225 ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
226 226 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
227 227
228 228
229 229 @classmethod
230 230 def get_by_key(cls, key):
231 231 return cls.query().filter(cls.ui_key == key)
232 232
233 233
234 234 @classmethod
235 235 def get_builtin_hooks(cls):
236 236 q = cls.query()
237 237 q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE,
238 238 cls.HOOK_REPO_SIZE,
239 239 cls.HOOK_PUSH, cls.HOOK_PULL]))
240 240 return q.all()
241 241
242 242 @classmethod
243 243 def get_custom_hooks(cls):
244 244 q = cls.query()
245 245 q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE,
246 246 cls.HOOK_REPO_SIZE,
247 247 cls.HOOK_PUSH, cls.HOOK_PULL]))
248 248 q = q.filter(cls.ui_section == 'hooks')
249 249 return q.all()
250 250
251 251 @classmethod
252 252 def create_or_update_hook(cls, key, val):
253 253 new_ui = cls.get_by_key(key).scalar() or cls()
254 254 new_ui.ui_section = 'hooks'
255 255 new_ui.ui_active = True
256 256 new_ui.ui_key = key
257 257 new_ui.ui_value = val
258 258
259 259 Session.add(new_ui)
260 260 Session.commit()
261 261
262 262
263 263 class User(Base, BaseModel):
264 264 __tablename__ = 'users'
265 265 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
266 266 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
267 267 username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
268 268 password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
269 269 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
270 270 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
271 271 name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
272 272 lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
273 273 email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
274 274 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
275 275 ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
276 276 api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
277 277
278 278 user_log = relationship('UserLog', cascade='all')
279 279 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
280 280
281 281 repositories = relationship('Repository')
282 282 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
283 283 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
284 284
285 285 group_member = relationship('UsersGroupMember', cascade='all')
286 286
287 287 @property
288 288 def full_contact(self):
289 289 return '%s %s <%s>' % (self.name, self.lastname, self.email)
290 290
291 291 @property
292 292 def short_contact(self):
293 293 return '%s %s' % (self.name, self.lastname)
294 294
295 295 @property
296 296 def is_admin(self):
297 297 return self.admin
298 298
299 299 def __repr__(self):
300 300 try:
301 301 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
302 302 self.user_id, self.username)
303 303 except:
304 304 return self.__class__.__name__
305 305
306 306 @classmethod
307 307 def get_by_username(cls, username, case_insensitive=False):
308 308 if case_insensitive:
309 309 return Session.query(cls).filter(cls.username.ilike(username)).scalar()
310 310 else:
311 311 return Session.query(cls).filter(cls.username == username).scalar()
312 312
313 313 @classmethod
314 314 def get_by_api_key(cls, api_key):
315 315 return cls.query().filter(cls.api_key == api_key).one()
316 316
317 317 def update_lastlogin(self):
318 318 """Update user lastlogin"""
319 319
320 320 self.last_login = datetime.datetime.now()
321 321 Session.add(self)
322 322 Session.commit()
323 323 log.debug('updated user %s lastlogin' % self.username)
324 324
325 325 @classmethod
326 326 def create(cls, form_data):
327 327 from rhodecode.lib.auth import get_crypt_password
328 328
329 329 try:
330 330 new_user = cls()
331 331 for k, v in form_data.items():
332 332 if k == 'password':
333 333 v = get_crypt_password(v)
334 334 setattr(new_user, k, v)
335 335
336 336 new_user.api_key = generate_api_key(form_data['username'])
337 337 Session.add(new_user)
338 338 Session.commit()
339 339 return new_user
340 340 except:
341 341 log.error(traceback.format_exc())
342 342 Session.rollback()
343 343 raise
344 344
345 345 class UserLog(Base, BaseModel):
346 346 __tablename__ = 'user_logs'
347 347 __table_args__ = {'extend_existing':True}
348 348 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
349 349 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
350 350 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
351 351 repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
352 352 user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
353 353 action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
354 354 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
355 355
356 356 @property
357 357 def action_as_day(self):
358 358 return date(*self.action_date.timetuple()[:3])
359 359
360 360 user = relationship('User')
361 361 repository = relationship('Repository')
362 362
363 363
364 364 class UsersGroup(Base, BaseModel):
365 365 __tablename__ = 'users_groups'
366 366 __table_args__ = {'extend_existing':True}
367 367
368 368 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
369 369 users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
370 370 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
371 371
372 372 members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
373 373
374 374 def __repr__(self):
375 375 return '<userGroup(%s)>' % (self.users_group_name)
376 376
377 377 @classmethod
378 378 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
379 379 if case_insensitive:
380 380 gr = cls.query()\
381 381 .filter(cls.users_group_name.ilike(group_name))
382 382 else:
383 383 gr = cls.query()\
384 384 .filter(cls.users_group_name == group_name)
385 385 if cache:
386 386 gr = gr.options(FromCache("sql_cache_short",
387 387 "get_user_%s" % group_name))
388 388 return gr.scalar()
389 389
390 390
391 391 @classmethod
392 392 def get(cls, users_group_id, cache=False):
393 393 users_group = cls.query()
394 394 if cache:
395 395 users_group = users_group.options(FromCache("sql_cache_short",
396 396 "get_users_group_%s" % users_group_id))
397 397 return users_group.get(users_group_id)
398 398
399 399 @classmethod
400 400 def create(cls, form_data):
401 401 try:
402 402 new_users_group = cls()
403 403 for k, v in form_data.items():
404 404 setattr(new_users_group, k, v)
405 405
406 406 Session.add(new_users_group)
407 407 Session.commit()
408 408 return new_users_group
409 409 except:
410 410 log.error(traceback.format_exc())
411 411 Session.rollback()
412 412 raise
413 413
414 414 @classmethod
415 415 def update(cls, users_group_id, form_data):
416 416
417 417 try:
418 418 users_group = cls.get(users_group_id, cache=False)
419 419
420 420 for k, v in form_data.items():
421 421 if k == 'users_group_members':
422 422 users_group.members = []
423 423 Session.flush()
424 424 members_list = []
425 425 if v:
426 426 v = [v] if isinstance(v, basestring) else v
427 427 for u_id in set(v):
428 428 member = UsersGroupMember(users_group_id, u_id)
429 429 members_list.append(member)
430 430 setattr(users_group, 'members', members_list)
431 431 setattr(users_group, k, v)
432 432
433 433 Session.add(users_group)
434 434 Session.commit()
435 435 except:
436 436 log.error(traceback.format_exc())
437 437 Session.rollback()
438 438 raise
439 439
440 440 @classmethod
441 441 def delete(cls, users_group_id):
442 442 try:
443 443
444 444 # check if this group is not assigned to repo
445 445 assigned_groups = UsersGroupRepoToPerm.query()\
446 446 .filter(UsersGroupRepoToPerm.users_group_id ==
447 447 users_group_id).all()
448 448
449 449 if assigned_groups:
450 450 raise UsersGroupsAssignedException('RepoGroup assigned to %s' %
451 451 assigned_groups)
452 452
453 453 users_group = cls.get(users_group_id, cache=False)
454 454 Session.delete(users_group)
455 455 Session.commit()
456 456 except:
457 457 log.error(traceback.format_exc())
458 458 Session.rollback()
459 459 raise
460 460
461 461 class UsersGroupMember(Base, BaseModel):
462 462 __tablename__ = 'users_groups_members'
463 463 __table_args__ = {'extend_existing':True}
464 464
465 465 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
466 466 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
467 467 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
468 468
469 469 user = relationship('User', lazy='joined')
470 470 users_group = relationship('UsersGroup')
471 471
472 472 def __init__(self, gr_id='', u_id=''):
473 473 self.users_group_id = gr_id
474 474 self.user_id = u_id
475 475
476 476 @staticmethod
477 477 def add_user_to_group(group, user):
478 478 ugm = UsersGroupMember()
479 479 ugm.users_group = group
480 480 ugm.user = user
481 481 Session.add(ugm)
482 482 Session.commit()
483 483 return ugm
484 484
485 485 class Repository(Base, BaseModel):
486 486 __tablename__ = 'repositories'
487 487 __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
488 488
489 489 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
490 490 repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
491 491 clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
492 492 repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg')
493 493 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
494 494 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
495 495 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
496 496 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
497 497 description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
498 498 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
499 499
500 500 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
501 501 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
502 502
503 503
504 504 user = relationship('User')
505 505 fork = relationship('Repository', remote_side=repo_id)
506 506 group = relationship('RepoGroup')
507 507 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
508 508 users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all')
509 509 stats = relationship('Statistics', cascade='all', uselist=False)
510 510
511 511 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
512 512
513 513 logs = relationship('UserLog', cascade='all')
514 514
515 515 def __repr__(self):
516 516 return "<%s('%s:%s')>" % (self.__class__.__name__,
517 517 self.repo_id, self.repo_name)
518 518
519 519 @classmethod
520 520 def url_sep(cls):
521 521 return '/'
522 522
523 523 @classmethod
524 524 def get_by_repo_name(cls, repo_name):
525 525 q = Session.query(cls).filter(cls.repo_name == repo_name)
526 526 q = q.options(joinedload(Repository.fork))\
527 527 .options(joinedload(Repository.user))\
528 528 .options(joinedload(Repository.group))
529 529 return q.one()
530 530
531 531 @classmethod
532 532 def get_repo_forks(cls, repo_id):
533 533 return cls.query().filter(Repository.fork_id == repo_id)
534 534
535 535 @classmethod
536 536 def base_path(cls):
537 537 """
538 538 Returns base path when all repos are stored
539 539
540 540 :param cls:
541 541 """
542 542 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
543 543 cls.url_sep())
544 544 q.options(FromCache("sql_cache_short", "repository_repo_path"))
545 545 return q.one().ui_value
546 546
547 547 @property
548 548 def just_name(self):
549 549 return self.repo_name.split(Repository.url_sep())[-1]
550 550
551 551 @property
552 552 def groups_with_parents(self):
553 553 groups = []
554 554 if self.group is None:
555 555 return groups
556 556
557 557 cur_gr = self.group
558 558 groups.insert(0, cur_gr)
559 559 while 1:
560 560 gr = getattr(cur_gr, 'parent_group', None)
561 561 cur_gr = cur_gr.parent_group
562 562 if gr is None:
563 563 break
564 564 groups.insert(0, gr)
565 565
566 566 return groups
567 567
568 568 @property
569 569 def groups_and_repo(self):
570 570 return self.groups_with_parents, self.just_name
571 571
572 572 @LazyProperty
573 573 def repo_path(self):
574 574 """
575 575 Returns base full path for that repository means where it actually
576 576 exists on a filesystem
577 577 """
578 578 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
579 579 Repository.url_sep())
580 580 q.options(FromCache("sql_cache_short", "repository_repo_path"))
581 581 return q.one().ui_value
582 582
583 583 @property
584 584 def repo_full_path(self):
585 585 p = [self.repo_path]
586 586 # we need to split the name by / since this is how we store the
587 587 # names in the database, but that eventually needs to be converted
588 588 # into a valid system path
589 589 p += self.repo_name.split(Repository.url_sep())
590 590 return os.path.join(*p)
591 591
592 592 def get_new_name(self, repo_name):
593 593 """
594 594 returns new full repository name based on assigned group and new new
595 595
596 596 :param group_name:
597 597 """
598 598 path_prefix = self.group.full_path_splitted if self.group else []
599 599 return Repository.url_sep().join(path_prefix + [repo_name])
600 600
601 601 @property
602 602 def _ui(self):
603 603 """
604 604 Creates an db based ui object for this repository
605 605 """
606 606 from mercurial import ui
607 607 from mercurial import config
608 608 baseui = ui.ui()
609 609
610 610 #clean the baseui object
611 611 baseui._ocfg = config.config()
612 612 baseui._ucfg = config.config()
613 613 baseui._tcfg = config.config()
614 614
615 615
616 616 ret = RhodeCodeUi.query()\
617 617 .options(FromCache("sql_cache_short", "repository_repo_ui")).all()
618 618
619 619 hg_ui = ret
620 620 for ui_ in hg_ui:
621 621 if ui_.ui_active:
622 622 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
623 623 ui_.ui_key, ui_.ui_value)
624 624 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
625 625
626 626 return baseui
627 627
628 628 @classmethod
629 629 def is_valid(cls, repo_name):
630 630 """
631 631 returns True if given repo name is a valid filesystem repository
632 632
633 633 :param cls:
634 634 :param repo_name:
635 635 """
636 636 from rhodecode.lib.utils import is_valid_repo
637 637
638 638 return is_valid_repo(repo_name, cls.base_path())
639 639
640 640
641 641 #==========================================================================
642 642 # SCM PROPERTIES
643 643 #==========================================================================
644 644
645 645 def get_changeset(self, rev):
646 646 return get_changeset_safe(self.scm_instance, rev)
647 647
648 648 @property
649 649 def tip(self):
650 650 return self.get_changeset('tip')
651 651
652 652 @property
653 653 def author(self):
654 654 return self.tip.author
655 655
656 656 @property
657 657 def last_change(self):
658 658 return self.scm_instance.last_change
659 659
660 660 #==========================================================================
661 661 # SCM CACHE INSTANCE
662 662 #==========================================================================
663 663
664 664 @property
665 665 def invalidate(self):
666 666 return CacheInvalidation.invalidate(self.repo_name)
667 667
668 668 def set_invalidate(self):
669 669 """
670 670 set a cache for invalidation for this instance
671 671 """
672 672 CacheInvalidation.set_invalidate(self.repo_name)
673 673
674 674 @LazyProperty
675 675 def scm_instance(self):
676 676 return self.__get_instance()
677 677
678 678 @property
679 679 def scm_instance_cached(self):
680 680 @cache_region('long_term')
681 681 def _c(repo_name):
682 682 return self.__get_instance()
683 683 rn = self.repo_name
684 684
685 685 inv = self.invalidate
686 686 if inv is not None:
687 687 region_invalidate(_c, None, rn)
688 688 # update our cache
689 689 CacheInvalidation.set_valid(inv.cache_key)
690 690 return _c(rn)
691 691
692 692 def __get_instance(self):
693 693
694 694 repo_full_path = self.repo_full_path
695 695
696 696 try:
697 697 alias = get_scm(repo_full_path)[0]
698 698 log.debug('Creating instance of %s repository' % alias)
699 699 backend = get_backend(alias)
700 700 except VCSError:
701 701 log.error(traceback.format_exc())
702 702 log.error('Perhaps this repository is in db and not in '
703 703 'filesystem run rescan repositories with '
704 704 '"destroy old data " option from admin panel')
705 705 return
706 706
707 707 if alias == 'hg':
708 708
709 709 repo = backend(safe_str(repo_full_path), create=False,
710 710 baseui=self._ui)
711 711 # skip hidden web repository
712 712 if repo._get_hidden():
713 713 return
714 714 else:
715 715 repo = backend(repo_full_path, create=False)
716 716
717 717 return repo
718 718
719 719
720 720 class RepoGroup(Base, BaseModel):
721 721 __tablename__ = 'groups'
722 722 __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
723 723 CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
724 724 __mapper_args__ = {'order_by':'group_name'}
725 725
726 726 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
727 727 group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
728 728 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
729 729 group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
730 730
731 731 parent_group = relationship('RepoGroup', remote_side=group_id)
732 732
733 733
734 734 def __init__(self, group_name='', parent_group=None):
735 735 self.group_name = group_name
736 736 self.parent_group = parent_group
737 737
738 738 def __repr__(self):
739 739 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
740 740 self.group_name)
741 741
742 742 @classmethod
743 743 def groups_choices(cls):
744 744 from webhelpers.html import literal as _literal
745 745 repo_groups = [('', '')]
746 746 sep = ' &raquo; '
747 747 _name = lambda k: _literal(sep.join(k))
748 748
749 749 repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
750 750 for x in cls.query().all()])
751 751
752 752 repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
753 753 return repo_groups
754 754
755 755 @classmethod
756 756 def url_sep(cls):
757 757 return '/'
758 758
759 759 @classmethod
760 760 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
761 761 if case_insensitive:
762 762 gr = cls.query()\
763 763 .filter(cls.group_name.ilike(group_name))
764 764 else:
765 765 gr = cls.query()\
766 766 .filter(cls.group_name == group_name)
767 767 if cache:
768 768 gr = gr.options(FromCache("sql_cache_short",
769 769 "get_group_%s" % group_name))
770 770 return gr.scalar()
771 771
772 772 @property
773 773 def parents(self):
774 774 parents_recursion_limit = 5
775 775 groups = []
776 776 if self.parent_group is None:
777 777 return groups
778 778 cur_gr = self.parent_group
779 779 groups.insert(0, cur_gr)
780 780 cnt = 0
781 781 while 1:
782 782 cnt += 1
783 783 gr = getattr(cur_gr, 'parent_group', None)
784 784 cur_gr = cur_gr.parent_group
785 785 if gr is None:
786 786 break
787 787 if cnt == parents_recursion_limit:
788 788 # this will prevent accidental infinit loops
789 789 log.error('group nested more than %s' %
790 790 parents_recursion_limit)
791 791 break
792 792
793 793 groups.insert(0, gr)
794 794 return groups
795 795
796 796 @property
797 797 def children(self):
798 798 return Group.query().filter(Group.parent_group == self)
799 799
800 800 @property
801 801 def name(self):
802 802 return self.group_name.split(Group.url_sep())[-1]
803 803
804 804 @property
805 805 def full_path(self):
806 806 return self.group_name
807 807
808 808 @property
809 809 def full_path_splitted(self):
810 810 return self.group_name.split(Group.url_sep())
811 811
812 812 @property
813 813 def repositories(self):
814 814 return Repository.query().filter(Repository.group == self)
815 815
816 816 @property
817 817 def repositories_recursive_count(self):
818 818 cnt = self.repositories.count()
819 819
820 820 def children_count(group):
821 821 cnt = 0
822 822 for child in group.children:
823 823 cnt += child.repositories.count()
824 824 cnt += children_count(child)
825 825 return cnt
826 826
827 827 return cnt + children_count(self)
828 828
829 829
830 830 def get_new_name(self, group_name):
831 831 """
832 832 returns new full group name based on parent and new name
833 833
834 834 :param group_name:
835 835 """
836 836 path_prefix = (self.parent_group.full_path_splitted if
837 837 self.parent_group else [])
838 838 return Group.url_sep().join(path_prefix + [group_name])
839 839
840 840
841 841 class Permission(Base, BaseModel):
842 842 __tablename__ = 'permissions'
843 843 __table_args__ = {'extend_existing':True}
844 844 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
845 845 permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
846 846 permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
847 847
848 848 def __repr__(self):
849 849 return "<%s('%s:%s')>" % (self.__class__.__name__,
850 850 self.permission_id, self.permission_name)
851 851
852 852 @classmethod
853 853 def get_by_key(cls, key):
854 854 return cls.query().filter(cls.permission_name == key).scalar()
855 855
856 856 class UserRepoToPerm(Base, BaseModel):
857 857 __tablename__ = 'repo_to_perm'
858 858 __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
859 859 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
860 860 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
861 861 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
862 862 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
863 863
864 864 user = relationship('User')
865 865 permission = relationship('Permission')
866 866 repository = relationship('Repository')
867 867
868 868 class UserToPerm(Base, BaseModel):
869 869 __tablename__ = 'user_to_perm'
870 870 __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
871 871 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
872 872 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
873 873 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
874 874
875 875 user = relationship('User')
876 876 permission = relationship('Permission')
877 877
878 878 @classmethod
879 879 def has_perm(cls, user_id, perm):
880 880 if not isinstance(perm, Permission):
881 881 raise Exception('perm needs to be an instance of Permission class')
882 882
883 883 return cls.query().filter(cls.user_id == user_id)\
884 884 .filter(cls.permission == perm).scalar() is not None
885 885
886 886 @classmethod
887 887 def grant_perm(cls, user_id, perm):
888 888 if not isinstance(perm, Permission):
889 889 raise Exception('perm needs to be an instance of Permission class')
890 890
891 891 new = cls()
892 892 new.user_id = user_id
893 893 new.permission = perm
894 894 try:
895 895 Session.add(new)
896 896 Session.commit()
897 897 except:
898 898 Session.rollback()
899 899
900 900
901 901 @classmethod
902 902 def revoke_perm(cls, user_id, perm):
903 903 if not isinstance(perm, Permission):
904 904 raise Exception('perm needs to be an instance of Permission class')
905 905
906 906 try:
907 907 cls.query().filter(cls.user_id == user_id)\
908 908 .filter(cls.permission == perm).delete()
909 909 Session.commit()
910 910 except:
911 911 Session.rollback()
912 912
913 913 class UsersGroupRepoToPerm(Base, BaseModel):
914 914 __tablename__ = 'users_group_repo_to_perm'
915 915 __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
916 916 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
917 917 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
918 918 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
919 919 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
920 920
921 921 users_group = relationship('UsersGroup')
922 922 permission = relationship('Permission')
923 923 repository = relationship('Repository')
924 924
925 925 def __repr__(self):
926 926 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
927 927
928 928 class UsersGroupToPerm(Base, BaseModel):
929 929 __tablename__ = 'users_group_to_perm'
930 930 __table_args__ = {'extend_existing':True}
931 931 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
932 932 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
933 933 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
934 934
935 935 users_group = relationship('UsersGroup')
936 936 permission = relationship('Permission')
937 937
938 938
939 939 @classmethod
940 940 def has_perm(cls, users_group_id, perm):
941 941 if not isinstance(perm, Permission):
942 942 raise Exception('perm needs to be an instance of Permission class')
943 943
944 944 return cls.query().filter(cls.users_group_id ==
945 945 users_group_id)\
946 946 .filter(cls.permission == perm)\
947 947 .scalar() is not None
948 948
949 949 @classmethod
950 950 def grant_perm(cls, users_group_id, perm):
951 951 if not isinstance(perm, Permission):
952 952 raise Exception('perm needs to be an instance of Permission class')
953 953
954 954 new = cls()
955 955 new.users_group_id = users_group_id
956 956 new.permission = perm
957 957 try:
958 958 Session.add(new)
959 959 Session.commit()
960 960 except:
961 961 Session.rollback()
962 962
963 963
964 964 @classmethod
965 965 def revoke_perm(cls, users_group_id, perm):
966 966 if not isinstance(perm, Permission):
967 967 raise Exception('perm needs to be an instance of Permission class')
968 968
969 969 try:
970 970 cls.query().filter(cls.users_group_id == users_group_id)\
971 971 .filter(cls.permission == perm).delete()
972 972 Session.commit()
973 973 except:
974 974 Session.rollback()
975 975
976 976
977 977 class UserRepoGroupToPerm(Base, BaseModel):
978 978 __tablename__ = 'group_to_perm'
979 979 __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
980 980
981 981 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
982 982 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
983 983 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
984 984 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
985 985
986 986 user = relationship('User')
987 987 permission = relationship('Permission')
988 988 group = relationship('RepoGroup')
989 989
990 990 class Statistics(Base, BaseModel):
991 991 __tablename__ = 'statistics'
992 992 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
993 993 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
994 994 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
995 995 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
996 996 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
997 997 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
998 998 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
999 999
1000 1000 repository = relationship('Repository', single_parent=True)
1001 1001
1002 1002 class UserFollowing(Base, BaseModel):
1003 1003 __tablename__ = 'user_followings'
1004 1004 __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
1005 1005 UniqueConstraint('user_id', 'follows_user_id')
1006 1006 , {'extend_existing':True})
1007 1007
1008 1008 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1009 1009 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1010 1010 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
1011 1011 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1012 1012 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
1013 1013
1014 1014 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
1015 1015
1016 1016 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
1017 1017 follows_repository = relationship('Repository', order_by='Repository.repo_name')
1018 1018
1019 1019
1020 1020 @classmethod
1021 1021 def get_repo_followers(cls, repo_id):
1022 1022 return cls.query().filter(cls.follows_repo_id == repo_id)
1023 1023
1024 1024 class CacheInvalidation(Base, BaseModel):
1025 1025 __tablename__ = 'cache_invalidation'
1026 1026 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
1027 1027 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1028 1028 cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
1029 1029 cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
1030 1030 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
1031 1031
1032 1032
1033 1033 def __init__(self, cache_key, cache_args=''):
1034 1034 self.cache_key = cache_key
1035 1035 self.cache_args = cache_args
1036 1036 self.cache_active = False
1037 1037
1038 1038 def __repr__(self):
1039 1039 return "<%s('%s:%s')>" % (self.__class__.__name__,
1040 1040 self.cache_id, self.cache_key)
1041 1041
1042 1042 @classmethod
1043 1043 def invalidate(cls, key):
1044 1044 """
1045 1045 Returns Invalidation object if this given key should be invalidated
1046 1046 None otherwise. `cache_active = False` means that this cache
1047 1047 state is not valid and needs to be invalidated
1048 1048
1049 1049 :param key:
1050 1050 """
1051 1051 return cls.query()\
1052 1052 .filter(CacheInvalidation.cache_key == key)\
1053 1053 .filter(CacheInvalidation.cache_active == False)\
1054 1054 .scalar()
1055 1055
1056 1056 @classmethod
1057 1057 def set_invalidate(cls, key):
1058 1058 """
1059 1059 Mark this Cache key for invalidation
1060 1060
1061 1061 :param key:
1062 1062 """
1063 1063
1064 1064 log.debug('marking %s for invalidation' % key)
1065 1065 inv_obj = Session.query(cls)\
1066 1066 .filter(cls.cache_key == key).scalar()
1067 1067 if inv_obj:
1068 1068 inv_obj.cache_active = False
1069 1069 else:
1070 1070 log.debug('cache key not found in invalidation db -> creating one')
1071 1071 inv_obj = CacheInvalidation(key)
1072 1072
1073 1073 try:
1074 1074 Session.add(inv_obj)
1075 1075 Session.commit()
1076 1076 except Exception:
1077 1077 log.error(traceback.format_exc())
1078 1078 Session.rollback()
1079 1079
1080 1080 @classmethod
1081 1081 def set_valid(cls, key):
1082 1082 """
1083 1083 Mark this cache key as active and currently cached
1084 1084
1085 1085 :param key:
1086 1086 """
1087 1087 inv_obj = Session.query(CacheInvalidation)\
1088 1088 .filter(CacheInvalidation.cache_key == key).scalar()
1089 1089 inv_obj.cache_active = True
1090 1090 Session.add(inv_obj)
1091 1091 Session.commit()
1092 1092
1093 1093 class DbMigrateVersion(Base, BaseModel):
1094 1094 __tablename__ = 'db_migrate_version'
1095 1095 __table_args__ = {'extend_existing':True}
1096 1096 repository_id = Column('repository_id', String(250), primary_key=True)
1097 1097 repository_path = Column('repository_path', Text)
1098 1098 version = Column('version', Integer)
@@ -1,517 +1,517 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.diffs
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Set of diffing helpers, previously part of vcs
7 7
8 8
9 9 :created_on: Dec 4, 2011
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :original copyright: 2007-2008 by Armin Ronacher
13 13 :license: GPLv3, see COPYING for more details.
14 14 """
15 15 # This program is free software: you can redistribute it and/or modify
16 16 # it under the terms of the GNU General Public License as published by
17 17 # the Free Software Foundation, either version 3 of the License, or
18 18 # (at your option) any later version.
19 19 #
20 20 # This program is distributed in the hope that it will be useful,
21 21 # but WITHOUT ANY WARRANTY; without even the implied warranty of
22 22 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 23 # GNU General Public License for more details.
24 24 #
25 25 # You should have received a copy of the GNU General Public License
26 26 # along with this program. If not, see <http://www.gnu.org/licenses/>.
27 27
28 28 import re
29 29 import difflib
30 30 import markupsafe
31 31 from itertools import tee, imap
32 32
33 33 from pylons.i18n.translation import _
34 34
35 from vcs.exceptions import VCSError
36 from vcs.nodes import FileNode
35 from rhodecode.lib.vcs.exceptions import VCSError
36 from rhodecode.lib.vcs.nodes import FileNode
37 37
38 38 from rhodecode.lib.utils import EmptyChangeset
39 39
40 40
41 41 def wrap_to_table(str_):
42 42 return '''<table class="code-difftable">
43 43 <tr class="line no-comment">
44 44 <td class="lineno new"></td>
45 45 <td class="code no-comment"><pre>%s</pre></td>
46 46 </tr>
47 47 </table>''' % str_
48 48
49 49
50 50 def wrapped_diff(filenode_old, filenode_new, cut_off_limit=None,
51 51 ignore_whitespace=True, line_context=3,
52 52 enable_comments=False):
53 53 """
54 54 returns a wrapped diff into a table, checks for cut_off_limit and presents
55 55 proper message
56 56 """
57 57
58 58 if filenode_old is None:
59 59 filenode_old = FileNode(filenode_new.path, '', EmptyChangeset())
60 60
61 61 if filenode_old.is_binary or filenode_new.is_binary:
62 62 diff = wrap_to_table(_('binary file'))
63 63 stats = (0, 0)
64 64 size = 0
65 65
66 66 elif cut_off_limit != -1 and (cut_off_limit is None or
67 67 (filenode_old.size < cut_off_limit and filenode_new.size < cut_off_limit)):
68 68
69 69 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
70 70 ignore_whitespace=ignore_whitespace,
71 71 context=line_context)
72 72 diff_processor = DiffProcessor(f_gitdiff, format='gitdiff')
73 73
74 74 diff = diff_processor.as_html(enable_comments=enable_comments)
75 75 stats = diff_processor.stat()
76 76 size = len(diff or '')
77 77 else:
78 78 diff = wrap_to_table(_('Changeset was to big and was cut off, use '
79 79 'diff menu to display this diff'))
80 80 stats = (0, 0)
81 81 size = 0
82 82
83 83 if not diff:
84 84 diff = wrap_to_table(_('No changes detected'))
85 85
86 86 cs1 = filenode_old.last_changeset.raw_id
87 87 cs2 = filenode_new.last_changeset.raw_id
88 88
89 89 return size, cs1, cs2, diff, stats
90 90
91 91
92 92 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
93 93 """
94 94 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
95 95
96 96 :param ignore_whitespace: ignore whitespaces in diff
97 97 """
98 98 # make sure we pass in default context
99 99 context = context or 3
100 100
101 101 for filenode in (filenode_old, filenode_new):
102 102 if not isinstance(filenode, FileNode):
103 103 raise VCSError("Given object should be FileNode object, not %s"
104 104 % filenode.__class__)
105 105
106 106 repo = filenode_new.changeset.repository
107 107 old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
108 108 new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
109 109
110 110 vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
111 111 ignore_whitespace, context)
112 112
113 113 return vcs_gitdiff
114 114
115 115
116 116 class DiffProcessor(object):
117 117 """
118 118 Give it a unified diff and it returns a list of the files that were
119 119 mentioned in the diff together with a dict of meta information that
120 120 can be used to render it in a HTML template.
121 121 """
122 122 _chunk_re = re.compile(r'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
123 123
124 124 def __init__(self, diff, differ='diff', format='udiff'):
125 125 """
126 126 :param diff: a text in diff format or generator
127 127 :param format: format of diff passed, `udiff` or `gitdiff`
128 128 """
129 129 if isinstance(diff, basestring):
130 130 diff = [diff]
131 131
132 132 self.__udiff = diff
133 133 self.__format = format
134 134 self.adds = 0
135 135 self.removes = 0
136 136
137 137 if isinstance(self.__udiff, basestring):
138 138 self.lines = iter(self.__udiff.splitlines(1))
139 139
140 140 elif self.__format == 'gitdiff':
141 141 udiff_copy = self.copy_iterator()
142 142 self.lines = imap(self.escaper, self._parse_gitdiff(udiff_copy))
143 143 else:
144 144 udiff_copy = self.copy_iterator()
145 145 self.lines = imap(self.escaper, udiff_copy)
146 146
147 147 # Select a differ.
148 148 if differ == 'difflib':
149 149 self.differ = self._highlight_line_difflib
150 150 else:
151 151 self.differ = self._highlight_line_udiff
152 152
153 153 def escaper(self, string):
154 154 return markupsafe.escape(string)
155 155
156 156 def copy_iterator(self):
157 157 """
158 158 make a fresh copy of generator, we should not iterate thru
159 159 an original as it's needed for repeating operations on
160 160 this instance of DiffProcessor
161 161 """
162 162 self.__udiff, iterator_copy = tee(self.__udiff)
163 163 return iterator_copy
164 164
165 165 def _extract_rev(self, line1, line2):
166 166 """
167 167 Extract the filename and revision hint from a line.
168 168 """
169 169
170 170 try:
171 171 if line1.startswith('--- ') and line2.startswith('+++ '):
172 172 l1 = line1[4:].split(None, 1)
173 173 old_filename = (l1[0].replace('a/', '', 1)
174 174 if len(l1) >= 1 else None)
175 175 old_rev = l1[1] if len(l1) == 2 else 'old'
176 176
177 177 l2 = line2[4:].split(None, 1)
178 178 new_filename = (l2[0].replace('b/', '', 1)
179 179 if len(l1) >= 1 else None)
180 180 new_rev = l2[1] if len(l2) == 2 else 'new'
181 181
182 182 filename = (old_filename
183 183 if old_filename != '/dev/null' else new_filename)
184 184
185 185 return filename, new_rev, old_rev
186 186 except (ValueError, IndexError):
187 187 pass
188 188
189 189 return None, None, None
190 190
191 191 def _parse_gitdiff(self, diffiterator):
192 192 def line_decoder(l):
193 193 if l.startswith('+') and not l.startswith('+++'):
194 194 self.adds += 1
195 195 elif l.startswith('-') and not l.startswith('---'):
196 196 self.removes += 1
197 197 return l.decode('utf8', 'replace')
198 198
199 199 output = list(diffiterator)
200 200 size = len(output)
201 201
202 202 if size == 2:
203 203 l = []
204 204 l.extend([output[0]])
205 205 l.extend(output[1].splitlines(1))
206 206 return map(line_decoder, l)
207 207 elif size == 1:
208 208 return map(line_decoder, output[0].splitlines(1))
209 209 elif size == 0:
210 210 return []
211 211
212 212 raise Exception('wrong size of diff %s' % size)
213 213
214 214 def _highlight_line_difflib(self, line, next_):
215 215 """
216 216 Highlight inline changes in both lines.
217 217 """
218 218
219 219 if line['action'] == 'del':
220 220 old, new = line, next_
221 221 else:
222 222 old, new = next_, line
223 223
224 224 oldwords = re.split(r'(\W)', old['line'])
225 225 newwords = re.split(r'(\W)', new['line'])
226 226
227 227 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
228 228
229 229 oldfragments, newfragments = [], []
230 230 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
231 231 oldfrag = ''.join(oldwords[i1:i2])
232 232 newfrag = ''.join(newwords[j1:j2])
233 233 if tag != 'equal':
234 234 if oldfrag:
235 235 oldfrag = '<del>%s</del>' % oldfrag
236 236 if newfrag:
237 237 newfrag = '<ins>%s</ins>' % newfrag
238 238 oldfragments.append(oldfrag)
239 239 newfragments.append(newfrag)
240 240
241 241 old['line'] = "".join(oldfragments)
242 242 new['line'] = "".join(newfragments)
243 243
244 244 def _highlight_line_udiff(self, line, next_):
245 245 """
246 246 Highlight inline changes in both lines.
247 247 """
248 248 start = 0
249 249 limit = min(len(line['line']), len(next_['line']))
250 250 while start < limit and line['line'][start] == next_['line'][start]:
251 251 start += 1
252 252 end = -1
253 253 limit -= start
254 254 while -end <= limit and line['line'][end] == next_['line'][end]:
255 255 end -= 1
256 256 end += 1
257 257 if start or end:
258 258 def do(l):
259 259 last = end + len(l['line'])
260 260 if l['action'] == 'add':
261 261 tag = 'ins'
262 262 else:
263 263 tag = 'del'
264 264 l['line'] = '%s<%s>%s</%s>%s' % (
265 265 l['line'][:start],
266 266 tag,
267 267 l['line'][start:last],
268 268 tag,
269 269 l['line'][last:]
270 270 )
271 271 do(line)
272 272 do(next_)
273 273
274 274 def _parse_udiff(self):
275 275 """
276 276 Parse the diff an return data for the template.
277 277 """
278 278 lineiter = self.lines
279 279 files = []
280 280 try:
281 281 line = lineiter.next()
282 282 # skip first context
283 283 skipfirst = True
284 284 while 1:
285 285 # continue until we found the old file
286 286 if not line.startswith('--- '):
287 287 line = lineiter.next()
288 288 continue
289 289
290 290 chunks = []
291 291 filename, old_rev, new_rev = \
292 292 self._extract_rev(line, lineiter.next())
293 293 files.append({
294 294 'filename': filename,
295 295 'old_revision': old_rev,
296 296 'new_revision': new_rev,
297 297 'chunks': chunks
298 298 })
299 299
300 300 line = lineiter.next()
301 301 while line:
302 302 match = self._chunk_re.match(line)
303 303 if not match:
304 304 break
305 305
306 306 lines = []
307 307 chunks.append(lines)
308 308
309 309 old_line, old_end, new_line, new_end = \
310 310 [int(x or 1) for x in match.groups()[:-1]]
311 311 old_line -= 1
312 312 new_line -= 1
313 313 context = len(match.groups()) == 5
314 314 old_end += old_line
315 315 new_end += new_line
316 316
317 317 if context:
318 318 if not skipfirst:
319 319 lines.append({
320 320 'old_lineno': '...',
321 321 'new_lineno': '...',
322 322 'action': 'context',
323 323 'line': line,
324 324 })
325 325 else:
326 326 skipfirst = False
327 327
328 328 line = lineiter.next()
329 329 while old_line < old_end or new_line < new_end:
330 330 if line:
331 331 command, line = line[0], line[1:]
332 332 else:
333 333 command = ' '
334 334 affects_old = affects_new = False
335 335
336 336 # ignore those if we don't expect them
337 337 if command in '#@':
338 338 continue
339 339 elif command == '+':
340 340 affects_new = True
341 341 action = 'add'
342 342 elif command == '-':
343 343 affects_old = True
344 344 action = 'del'
345 345 else:
346 346 affects_old = affects_new = True
347 347 action = 'unmod'
348 348
349 349 old_line += affects_old
350 350 new_line += affects_new
351 351 lines.append({
352 352 'old_lineno': affects_old and old_line or '',
353 353 'new_lineno': affects_new and new_line or '',
354 354 'action': action,
355 355 'line': line
356 356 })
357 357 line = lineiter.next()
358 358
359 359 except StopIteration:
360 360 pass
361 361
362 362 # highlight inline changes
363 363 for _ in files:
364 364 for chunk in chunks:
365 365 lineiter = iter(chunk)
366 366 #first = True
367 367 try:
368 368 while 1:
369 369 line = lineiter.next()
370 370 if line['action'] != 'unmod':
371 371 nextline = lineiter.next()
372 372 if nextline['action'] == 'unmod' or \
373 373 nextline['action'] == line['action']:
374 374 continue
375 375 self.differ(line, nextline)
376 376 except StopIteration:
377 377 pass
378 378
379 379 return files
380 380
381 381 def prepare(self):
382 382 """
383 383 Prepare the passed udiff for HTML rendering. It'l return a list
384 384 of dicts
385 385 """
386 386 return self._parse_udiff()
387 387
388 388 def _safe_id(self, idstring):
389 389 """Make a string safe for including in an id attribute.
390 390
391 391 The HTML spec says that id attributes 'must begin with
392 392 a letter ([A-Za-z]) and may be followed by any number
393 393 of letters, digits ([0-9]), hyphens ("-"), underscores
394 394 ("_"), colons (":"), and periods (".")'. These regexps
395 395 are slightly over-zealous, in that they remove colons
396 396 and periods unnecessarily.
397 397
398 398 Whitespace is transformed into underscores, and then
399 399 anything which is not a hyphen or a character that
400 400 matches \w (alphanumerics and underscore) is removed.
401 401
402 402 """
403 403 # Transform all whitespace to underscore
404 404 idstring = re.sub(r'\s', "_", '%s' % idstring)
405 405 # Remove everything that is not a hyphen or a member of \w
406 406 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
407 407 return idstring
408 408
409 409 def raw_diff(self):
410 410 """
411 411 Returns raw string as udiff
412 412 """
413 413 udiff_copy = self.copy_iterator()
414 414 if self.__format == 'gitdiff':
415 415 udiff_copy = self._parse_gitdiff(udiff_copy)
416 416 return u''.join(udiff_copy)
417 417
418 418 def as_html(self, table_class='code-difftable', line_class='line',
419 419 new_lineno_class='lineno old', old_lineno_class='lineno new',
420 420 code_class='code', enable_comments=False):
421 421 """
422 422 Return udiff as html table with customized css classes
423 423 """
424 424 def _link_to_if(condition, label, url):
425 425 """
426 426 Generates a link if condition is meet or just the label if not.
427 427 """
428 428
429 429 if condition:
430 430 return '''<a href="%(url)s">%(label)s</a>''' % {
431 431 'url': url,
432 432 'label': label
433 433 }
434 434 else:
435 435 return label
436 436 diff_lines = self.prepare()
437 437 _html_empty = True
438 438 _html = []
439 439 _html.append('''<table class="%(table_class)s">\n''' % {
440 440 'table_class': table_class
441 441 })
442 442 for diff in diff_lines:
443 443 for line in diff['chunks']:
444 444 _html_empty = False
445 445 for change in line:
446 446 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
447 447 'lc': line_class,
448 448 'action': change['action']
449 449 })
450 450 anchor_old_id = ''
451 451 anchor_new_id = ''
452 452 anchor_old = "%(filename)s_o%(oldline_no)s" % {
453 453 'filename': self._safe_id(diff['filename']),
454 454 'oldline_no': change['old_lineno']
455 455 }
456 456 anchor_new = "%(filename)s_n%(oldline_no)s" % {
457 457 'filename': self._safe_id(diff['filename']),
458 458 'oldline_no': change['new_lineno']
459 459 }
460 460 cond_old = (change['old_lineno'] != '...' and
461 461 change['old_lineno'])
462 462 cond_new = (change['new_lineno'] != '...' and
463 463 change['new_lineno'])
464 464 if cond_old:
465 465 anchor_old_id = 'id="%s"' % anchor_old
466 466 if cond_new:
467 467 anchor_new_id = 'id="%s"' % anchor_new
468 468 ###########################################################
469 469 # OLD LINE NUMBER
470 470 ###########################################################
471 471 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
472 472 'a_id': anchor_old_id,
473 473 'olc': old_lineno_class
474 474 })
475 475
476 476 _html.append('''%(link)s''' % {
477 477 'link': _link_to_if(True, change['old_lineno'],
478 478 '#%s' % anchor_old)
479 479 })
480 480 _html.append('''</td>\n''')
481 481 ###########################################################
482 482 # NEW LINE NUMBER
483 483 ###########################################################
484 484
485 485 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
486 486 'a_id': anchor_new_id,
487 487 'nlc': new_lineno_class
488 488 })
489 489
490 490 _html.append('''%(link)s''' % {
491 491 'link': _link_to_if(True, change['new_lineno'],
492 492 '#%s' % anchor_new)
493 493 })
494 494 _html.append('''</td>\n''')
495 495 ###########################################################
496 496 # CODE
497 497 ###########################################################
498 498 comments = '' if enable_comments else 'no-comment'
499 499 _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
500 500 'cc': code_class,
501 501 'inc': comments
502 502 })
503 503 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
504 504 'code': change['line']
505 505 })
506 506 _html.append('''\t</td>''')
507 507 _html.append('''\n</tr>\n''')
508 508 _html.append('''</table>''')
509 509 if _html_empty:
510 510 return None
511 511 return ''.join(_html)
512 512
513 513 def stat(self):
514 514 """
515 515 Returns tuple of added, and removed lines for this instance
516 516 """
517 517 return self.adds, self.removes
@@ -1,886 +1,886 b''
1 1 """Helper functions
2 2
3 3 Consists of functions to typically be used within templates, but also
4 4 available to Controllers. This module is available to both as 'h'.
5 5 """
6 6 import random
7 7 import hashlib
8 8 import StringIO
9 9 import urllib
10 10 import math
11 11 import logging
12 12
13 13 from datetime import datetime
14 14 from pygments.formatters.html import HtmlFormatter
15 15 from pygments import highlight as code_highlight
16 16 from pylons import url, request, config
17 17 from pylons.i18n.translation import _, ungettext
18 18 from hashlib import md5
19 19
20 20 from webhelpers.html import literal, HTML, escape
21 21 from webhelpers.html.tools import *
22 22 from webhelpers.html.builder import make_tag
23 23 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
24 24 end_form, file, form, hidden, image, javascript_link, link_to, \
25 25 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
26 26 submit, text, password, textarea, title, ul, xml_declaration, radio
27 27 from webhelpers.html.tools import auto_link, button_to, highlight, \
28 28 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
29 29 from webhelpers.number import format_byte_size, format_bit_size
30 30 from webhelpers.pylonslib import Flash as _Flash
31 31 from webhelpers.pylonslib.secure_form import secure_form
32 32 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
33 33 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
34 34 replace_whitespace, urlify, truncate, wrap_paragraphs
35 35 from webhelpers.date import time_ago_in_words
36 36 from webhelpers.paginate import Page
37 37 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
38 38 convert_boolean_attrs, NotGiven, _make_safe_id_component
39 39
40 40 from rhodecode.lib.annotate import annotate_highlight
41 41 from rhodecode.lib.utils import repo_name_slug
42 42 from rhodecode.lib import str2bool, safe_unicode, safe_str, get_changeset_safe
43 43 from rhodecode.lib.markup_renderer import MarkupRenderer
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
49 49 """
50 50 Reset button
51 51 """
52 52 _set_input_attrs(attrs, type, name, value)
53 53 _set_id_attr(attrs, id, name)
54 54 convert_boolean_attrs(attrs, ["disabled"])
55 55 return HTML.input(**attrs)
56 56
57 57 reset = _reset
58 58 safeid = _make_safe_id_component
59 59
60 60
61 61 def FID(raw_id, path):
62 62 """
63 63 Creates a uniqe ID for filenode based on it's hash of path and revision
64 64 it's safe to use in urls
65 65
66 66 :param raw_id:
67 67 :param path:
68 68 """
69 69
70 70 return 'C-%s-%s' % (short_id(raw_id), md5(path).hexdigest()[:12])
71 71
72 72
73 73 def get_token():
74 74 """Return the current authentication token, creating one if one doesn't
75 75 already exist.
76 76 """
77 77 token_key = "_authentication_token"
78 78 from pylons import session
79 79 if not token_key in session:
80 80 try:
81 81 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
82 82 except AttributeError: # Python < 2.4
83 83 token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest()
84 84 session[token_key] = token
85 85 if hasattr(session, 'save'):
86 86 session.save()
87 87 return session[token_key]
88 88
89 89 class _GetError(object):
90 90 """Get error from form_errors, and represent it as span wrapped error
91 91 message
92 92
93 93 :param field_name: field to fetch errors for
94 94 :param form_errors: form errors dict
95 95 """
96 96
97 97 def __call__(self, field_name, form_errors):
98 98 tmpl = """<span class="error_msg">%s</span>"""
99 99 if form_errors and form_errors.has_key(field_name):
100 100 return literal(tmpl % form_errors.get(field_name))
101 101
102 102 get_error = _GetError()
103 103
104 104 class _ToolTip(object):
105 105
106 106 def __call__(self, tooltip_title, trim_at=50):
107 107 """Special function just to wrap our text into nice formatted
108 108 autowrapped text
109 109
110 110 :param tooltip_title:
111 111 """
112 112 return escape(tooltip_title)
113 113 tooltip = _ToolTip()
114 114
115 115 class _FilesBreadCrumbs(object):
116 116
117 117 def __call__(self, repo_name, rev, paths):
118 118 if isinstance(paths, str):
119 119 paths = safe_unicode(paths)
120 120 url_l = [link_to(repo_name, url('files_home',
121 121 repo_name=repo_name,
122 122 revision=rev, f_path=''))]
123 123 paths_l = paths.split('/')
124 124 for cnt, p in enumerate(paths_l):
125 125 if p != '':
126 126 url_l.append(link_to(p,
127 127 url('files_home',
128 128 repo_name=repo_name,
129 129 revision=rev,
130 130 f_path='/'.join(paths_l[:cnt + 1])
131 131 )
132 132 )
133 133 )
134 134
135 135 return literal('/'.join(url_l))
136 136
137 137 files_breadcrumbs = _FilesBreadCrumbs()
138 138
139 139 class CodeHtmlFormatter(HtmlFormatter):
140 140 """My code Html Formatter for source codes
141 141 """
142 142
143 143 def wrap(self, source, outfile):
144 144 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
145 145
146 146 def _wrap_code(self, source):
147 147 for cnt, it in enumerate(source):
148 148 i, t = it
149 149 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
150 150 yield i, t
151 151
152 152 def _wrap_tablelinenos(self, inner):
153 153 dummyoutfile = StringIO.StringIO()
154 154 lncount = 0
155 155 for t, line in inner:
156 156 if t:
157 157 lncount += 1
158 158 dummyoutfile.write(line)
159 159
160 160 fl = self.linenostart
161 161 mw = len(str(lncount + fl - 1))
162 162 sp = self.linenospecial
163 163 st = self.linenostep
164 164 la = self.lineanchors
165 165 aln = self.anchorlinenos
166 166 nocls = self.noclasses
167 167 if sp:
168 168 lines = []
169 169
170 170 for i in range(fl, fl + lncount):
171 171 if i % st == 0:
172 172 if i % sp == 0:
173 173 if aln:
174 174 lines.append('<a href="#%s%d" class="special">%*d</a>' %
175 175 (la, i, mw, i))
176 176 else:
177 177 lines.append('<span class="special">%*d</span>' % (mw, i))
178 178 else:
179 179 if aln:
180 180 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
181 181 else:
182 182 lines.append('%*d' % (mw, i))
183 183 else:
184 184 lines.append('')
185 185 ls = '\n'.join(lines)
186 186 else:
187 187 lines = []
188 188 for i in range(fl, fl + lncount):
189 189 if i % st == 0:
190 190 if aln:
191 191 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
192 192 else:
193 193 lines.append('%*d' % (mw, i))
194 194 else:
195 195 lines.append('')
196 196 ls = '\n'.join(lines)
197 197
198 198 # in case you wonder about the seemingly redundant <div> here: since the
199 199 # content in the other cell also is wrapped in a div, some browsers in
200 200 # some configurations seem to mess up the formatting...
201 201 if nocls:
202 202 yield 0, ('<table class="%stable">' % self.cssclass +
203 203 '<tr><td><div class="linenodiv" '
204 204 'style="background-color: #f0f0f0; padding-right: 10px">'
205 205 '<pre style="line-height: 125%">' +
206 206 ls + '</pre></div></td><td id="hlcode" class="code">')
207 207 else:
208 208 yield 0, ('<table class="%stable">' % self.cssclass +
209 209 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
210 210 ls + '</pre></div></td><td id="hlcode" class="code">')
211 211 yield 0, dummyoutfile.getvalue()
212 212 yield 0, '</td></tr></table>'
213 213
214 214
215 215 def pygmentize(filenode, **kwargs):
216 216 """pygmentize function using pygments
217 217
218 218 :param filenode:
219 219 """
220 220
221 221 return literal(code_highlight(filenode.content,
222 222 filenode.lexer, CodeHtmlFormatter(**kwargs)))
223 223
224 224
225 225 def pygmentize_annotation(repo_name, filenode, **kwargs):
226 226 """
227 227 pygmentize function for annotation
228 228
229 229 :param filenode:
230 230 """
231 231
232 232 color_dict = {}
233 233
234 234 def gen_color(n=10000):
235 235 """generator for getting n of evenly distributed colors using
236 236 hsv color and golden ratio. It always return same order of colors
237 237
238 238 :returns: RGB tuple
239 239 """
240 240
241 241 def hsv_to_rgb(h, s, v):
242 242 if s == 0.0:
243 243 return v, v, v
244 244 i = int(h * 6.0) # XXX assume int() truncates!
245 245 f = (h * 6.0) - i
246 246 p = v * (1.0 - s)
247 247 q = v * (1.0 - s * f)
248 248 t = v * (1.0 - s * (1.0 - f))
249 249 i = i % 6
250 250 if i == 0:
251 251 return v, t, p
252 252 if i == 1:
253 253 return q, v, p
254 254 if i == 2:
255 255 return p, v, t
256 256 if i == 3:
257 257 return p, q, v
258 258 if i == 4:
259 259 return t, p, v
260 260 if i == 5:
261 261 return v, p, q
262 262
263 263 golden_ratio = 0.618033988749895
264 264 h = 0.22717784590367374
265 265
266 266 for _ in xrange(n):
267 267 h += golden_ratio
268 268 h %= 1
269 269 HSV_tuple = [h, 0.95, 0.95]
270 270 RGB_tuple = hsv_to_rgb(*HSV_tuple)
271 271 yield map(lambda x: str(int(x * 256)), RGB_tuple)
272 272
273 273 cgenerator = gen_color()
274 274
275 275 def get_color_string(cs):
276 276 if cs in color_dict:
277 277 col = color_dict[cs]
278 278 else:
279 279 col = color_dict[cs] = cgenerator.next()
280 280 return "color: rgb(%s)! important;" % (', '.join(col))
281 281
282 282 def url_func(repo_name):
283 283
284 284 def _url_func(changeset):
285 285 author = changeset.author
286 286 date = changeset.date
287 287 message = tooltip(changeset.message)
288 288
289 289 tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
290 290 " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
291 291 "</b> %s<br/></div>")
292 292
293 293 tooltip_html = tooltip_html % (author, date, message)
294 294 lnk_format = '%5s:%s' % ('r%s' % changeset.revision,
295 295 short_id(changeset.raw_id))
296 296 uri = link_to(
297 297 lnk_format,
298 298 url('changeset_home', repo_name=repo_name,
299 299 revision=changeset.raw_id),
300 300 style=get_color_string(changeset.raw_id),
301 301 class_='tooltip',
302 302 title=tooltip_html
303 303 )
304 304
305 305 uri += '\n'
306 306 return uri
307 307 return _url_func
308 308
309 309 return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
310 310
311 311
312 312 def is_following_repo(repo_name, user_id):
313 313 from rhodecode.model.scm import ScmModel
314 314 return ScmModel().is_following_repo(repo_name, user_id)
315 315
316 316 flash = _Flash()
317 317
318 318 #==============================================================================
319 319 # SCM FILTERS available via h.
320 320 #==============================================================================
321 from vcs.utils import author_name, author_email
321 from rhodecode.lib.vcs.utils import author_name, author_email
322 322 from rhodecode.lib import credentials_filter, age as _age
323 323 from rhodecode.model.db import User
324 324
325 325 age = lambda x: _age(x)
326 326 capitalize = lambda x: x.capitalize()
327 327 email = author_email
328 328 short_id = lambda x: x[:12]
329 329 hide_credentials = lambda x: ''.join(credentials_filter(x))
330 330
331 331
332 332 def is_git(repository):
333 333 if hasattr(repository, 'alias'):
334 334 _type = repository.alias
335 335 elif hasattr(repository, 'repo_type'):
336 336 _type = repository.repo_type
337 337 else:
338 338 _type = repository
339 339 return _type == 'git'
340 340
341 341
342 342 def is_hg(repository):
343 343 if hasattr(repository, 'alias'):
344 344 _type = repository.alias
345 345 elif hasattr(repository, 'repo_type'):
346 346 _type = repository.repo_type
347 347 else:
348 348 _type = repository
349 349 return _type == 'hg'
350 350
351 351
352 352 def email_or_none(author):
353 353 _email = email(author)
354 354 if _email != '':
355 355 return _email
356 356
357 357 # See if it contains a username we can get an email from
358 358 user = User.get_by_username(author_name(author), case_insensitive=True,
359 359 cache=True)
360 360 if user is not None:
361 361 return user.email
362 362
363 363 # No valid email, not a valid user in the system, none!
364 364 return None
365 365
366 366
367 367 def person(author):
368 368 # attr to return from fetched user
369 369 person_getter = lambda usr: usr.username
370 370
371 371 # Valid email in the attribute passed, see if they're in the system
372 372 _email = email(author)
373 373 if _email != '':
374 374 user = User.get_by_email(_email, case_insensitive=True, cache=True)
375 375 if user is not None:
376 376 return person_getter(user)
377 377 return _email
378 378
379 379 # Maybe it's a username?
380 380 _author = author_name(author)
381 381 user = User.get_by_username(_author, case_insensitive=True,
382 382 cache=True)
383 383 if user is not None:
384 384 return person_getter(user)
385 385
386 386 # Still nothing? Just pass back the author name then
387 387 return _author
388 388
389 389
390 390 def bool2icon(value):
391 391 """Returns True/False values represented as small html image of true/false
392 392 icons
393 393
394 394 :param value: bool value
395 395 """
396 396
397 397 if value is True:
398 398 return HTML.tag('img', src=url("/images/icons/accept.png"),
399 399 alt=_('True'))
400 400
401 401 if value is False:
402 402 return HTML.tag('img', src=url("/images/icons/cancel.png"),
403 403 alt=_('False'))
404 404
405 405 return value
406 406
407 407
408 408 def action_parser(user_log, feed=False):
409 409 """This helper will action_map the specified string action into translated
410 410 fancy names with icons and links
411 411
412 412 :param user_log: user log instance
413 413 :param feed: use output for feeds (no html and fancy icons)
414 414 """
415 415
416 416 action = user_log.action
417 417 action_params = ' '
418 418
419 419 x = action.split(':')
420 420
421 421 if len(x) > 1:
422 422 action, action_params = x
423 423
424 424 def get_cs_links():
425 425 revs_limit = 3 #display this amount always
426 426 revs_top_limit = 50 #show upto this amount of changesets hidden
427 427 revs = action_params.split(',')
428 428 repo_name = user_log.repository.repo_name
429 429
430 430 from rhodecode.model.scm import ScmModel
431 431 repo = user_log.repository.scm_instance
432 432
433 433 message = lambda rev: get_changeset_safe(repo, rev).message
434 434 cs_links = []
435 435 cs_links.append(" " + ', '.join ([link_to(rev,
436 436 url('changeset_home',
437 437 repo_name=repo_name,
438 438 revision=rev), title=tooltip(message(rev)),
439 439 class_='tooltip') for rev in revs[:revs_limit] ]))
440 440
441 441 compare_view = (' <div class="compare_view tooltip" title="%s">'
442 442 '<a href="%s">%s</a> '
443 443 '</div>' % (_('Show all combined changesets %s->%s' \
444 444 % (revs[0], revs[-1])),
445 445 url('changeset_home', repo_name=repo_name,
446 446 revision='%s...%s' % (revs[0], revs[-1])
447 447 ),
448 448 _('compare view'))
449 449 )
450 450
451 451 # if we have exactly one more than normally displayed:
452 452 # just display it, takes less space than displaying "and 1 more revisions"
453 453 if len(revs) == revs_limit + 1:
454 454 rev = revs[revs_limit]
455 455 cs_links.append(", " + link_to(rev,
456 456 url('changeset_home',
457 457 repo_name=repo_name,
458 458 revision=rev), title=tooltip(message(rev)),
459 459 class_='tooltip') )
460 460
461 461 # hidden-by-default ones
462 462 if len(revs) > revs_limit + 1:
463 463 uniq_id = revs[0]
464 464 html_tmpl = ('<span> %s '
465 465 '<a class="show_more" id="_%s" href="#more">%s</a> '
466 466 '%s</span>')
467 467 if not feed:
468 468 cs_links.append(html_tmpl % (_('and'), uniq_id, _('%s more') \
469 469 % (len(revs) - revs_limit),
470 470 _('revisions')))
471 471
472 472 if not feed:
473 473 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
474 474 else:
475 475 html_tmpl = '<span id="%s"> %s </span>'
476 476
477 477 morelinks = ', '.join([link_to(rev,
478 478 url('changeset_home',
479 479 repo_name=repo_name, revision=rev),
480 480 title=message(rev), class_='tooltip')
481 481 for rev in revs[revs_limit:revs_top_limit]])
482 482
483 483 if len(revs) > revs_top_limit:
484 484 morelinks += ', ...'
485 485
486 486 cs_links.append(html_tmpl % (uniq_id, morelinks))
487 487 if len(revs) > 1:
488 488 cs_links.append(compare_view)
489 489 return ''.join(cs_links)
490 490
491 491 def get_fork_name():
492 492 repo_name = action_params
493 493 return _('fork name ') + str(link_to(action_params, url('summary_home',
494 494 repo_name=repo_name,)))
495 495
496 496 action_map = {'user_deleted_repo':(_('[deleted] repository'), None),
497 497 'user_created_repo':(_('[created] repository'), None),
498 498 'user_created_fork':(_('[created] repository as fork'), None),
499 499 'user_forked_repo':(_('[forked] repository'), get_fork_name),
500 500 'user_updated_repo':(_('[updated] repository'), None),
501 501 'admin_deleted_repo':(_('[delete] repository'), None),
502 502 'admin_created_repo':(_('[created] repository'), None),
503 503 'admin_forked_repo':(_('[forked] repository'), None),
504 504 'admin_updated_repo':(_('[updated] repository'), None),
505 505 'push':(_('[pushed] into'), get_cs_links),
506 506 'push_local':(_('[committed via RhodeCode] into'), get_cs_links),
507 507 'push_remote':(_('[pulled from remote] into'), get_cs_links),
508 508 'pull':(_('[pulled] from'), None),
509 509 'started_following_repo':(_('[started following] repository'), None),
510 510 'stopped_following_repo':(_('[stopped following] repository'), None),
511 511 }
512 512
513 513 action_str = action_map.get(action, action)
514 514 if feed:
515 515 action = action_str[0].replace('[', '').replace(']', '')
516 516 else:
517 517 action = action_str[0].replace('[', '<span class="journal_highlight">')\
518 518 .replace(']', '</span>')
519 519
520 520 action_params_func = lambda :""
521 521
522 522 if callable(action_str[1]):
523 523 action_params_func = action_str[1]
524 524
525 525 return [literal(action), action_params_func]
526 526
527 527
528 528 def action_parser_icon(user_log):
529 529 action = user_log.action
530 530 action_params = None
531 531 x = action.split(':')
532 532
533 533 if len(x) > 1:
534 534 action, action_params = x
535 535
536 536 tmpl = """<img src="%s%s" alt="%s"/>"""
537 537 map = {'user_deleted_repo':'database_delete.png',
538 538 'user_created_repo':'database_add.png',
539 539 'user_created_fork':'arrow_divide.png',
540 540 'user_forked_repo':'arrow_divide.png',
541 541 'user_updated_repo':'database_edit.png',
542 542 'admin_deleted_repo':'database_delete.png',
543 543 'admin_created_repo':'database_add.png',
544 544 'admin_forked_repo':'arrow_divide.png',
545 545 'admin_updated_repo':'database_edit.png',
546 546 'push':'script_add.png',
547 547 'push_local':'script_edit.png',
548 548 'push_remote':'connect.png',
549 549 'pull':'down_16.png',
550 550 'started_following_repo':'heart_add.png',
551 551 'stopped_following_repo':'heart_delete.png',
552 552 }
553 553 return literal(tmpl % ((url('/images/icons/')),
554 554 map.get(action, action), action))
555 555
556 556
557 557 #==============================================================================
558 558 # PERMS
559 559 #==============================================================================
560 560 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
561 561 HasRepoPermissionAny, HasRepoPermissionAll
562 562
563 563
564 564 #==============================================================================
565 565 # GRAVATAR URL
566 566 #==============================================================================
567 567
568 568 def gravatar_url(email_address, size=30):
569 569 if (not str2bool(config['app_conf'].get('use_gravatar')) or
570 570 not email_address or email_address == 'anonymous@rhodecode.org'):
571 571 f=lambda a,l:min(l,key=lambda x:abs(x-a))
572 572 return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30]))
573 573
574 574 ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme')
575 575 default = 'identicon'
576 576 baseurl_nossl = "http://www.gravatar.com/avatar/"
577 577 baseurl_ssl = "https://secure.gravatar.com/avatar/"
578 578 baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
579 579
580 580 if isinstance(email_address, unicode):
581 581 #hashlib crashes on unicode items
582 582 email_address = safe_str(email_address)
583 583 # construct the url
584 584 gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
585 585 gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
586 586
587 587 return gravatar_url
588 588
589 589
590 590 #==============================================================================
591 591 # REPO PAGER, PAGER FOR REPOSITORY
592 592 #==============================================================================
593 593 class RepoPage(Page):
594 594
595 595 def __init__(self, collection, page=1, items_per_page=20,
596 596 item_count=None, url=None, **kwargs):
597 597
598 598 """Create a "RepoPage" instance. special pager for paging
599 599 repository
600 600 """
601 601 self._url_generator = url
602 602
603 603 # Safe the kwargs class-wide so they can be used in the pager() method
604 604 self.kwargs = kwargs
605 605
606 606 # Save a reference to the collection
607 607 self.original_collection = collection
608 608
609 609 self.collection = collection
610 610
611 611 # The self.page is the number of the current page.
612 612 # The first page has the number 1!
613 613 try:
614 614 self.page = int(page) # make it int() if we get it as a string
615 615 except (ValueError, TypeError):
616 616 self.page = 1
617 617
618 618 self.items_per_page = items_per_page
619 619
620 620 # Unless the user tells us how many items the collections has
621 621 # we calculate that ourselves.
622 622 if item_count is not None:
623 623 self.item_count = item_count
624 624 else:
625 625 self.item_count = len(self.collection)
626 626
627 627 # Compute the number of the first and last available page
628 628 if self.item_count > 0:
629 629 self.first_page = 1
630 630 self.page_count = int(math.ceil(float(self.item_count) /
631 631 self.items_per_page))
632 632 self.last_page = self.first_page + self.page_count - 1
633 633
634 634 # Make sure that the requested page number is the range of
635 635 # valid pages
636 636 if self.page > self.last_page:
637 637 self.page = self.last_page
638 638 elif self.page < self.first_page:
639 639 self.page = self.first_page
640 640
641 641 # Note: the number of items on this page can be less than
642 642 # items_per_page if the last page is not full
643 643 self.first_item = max(0, (self.item_count) - (self.page *
644 644 items_per_page))
645 645 self.last_item = ((self.item_count - 1) - items_per_page *
646 646 (self.page - 1))
647 647
648 648 self.items = list(self.collection[self.first_item:self.last_item + 1])
649 649
650 650 # Links to previous and next page
651 651 if self.page > self.first_page:
652 652 self.previous_page = self.page - 1
653 653 else:
654 654 self.previous_page = None
655 655
656 656 if self.page < self.last_page:
657 657 self.next_page = self.page + 1
658 658 else:
659 659 self.next_page = None
660 660
661 661 # No items available
662 662 else:
663 663 self.first_page = None
664 664 self.page_count = 0
665 665 self.last_page = None
666 666 self.first_item = None
667 667 self.last_item = None
668 668 self.previous_page = None
669 669 self.next_page = None
670 670 self.items = []
671 671
672 672 # This is a subclass of the 'list' type. Initialise the list now.
673 673 list.__init__(self, reversed(self.items))
674 674
675 675
676 676 def changed_tooltip(nodes):
677 677 """
678 678 Generates a html string for changed nodes in changeset page.
679 679 It limits the output to 30 entries
680 680
681 681 :param nodes: LazyNodesGenerator
682 682 """
683 683 if nodes:
684 684 pref = ': <br/> '
685 685 suf = ''
686 686 if len(nodes) > 30:
687 687 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
688 688 return literal(pref + '<br/> '.join([safe_unicode(x.path)
689 689 for x in nodes[:30]]) + suf)
690 690 else:
691 691 return ': ' + _('No Files')
692 692
693 693
694 694 def repo_link(groups_and_repos):
695 695 """
696 696 Makes a breadcrumbs link to repo within a group
697 697 joins &raquo; on each group to create a fancy link
698 698
699 699 ex::
700 700 group >> subgroup >> repo
701 701
702 702 :param groups_and_repos:
703 703 """
704 704 groups, repo_name = groups_and_repos
705 705
706 706 if not groups:
707 707 return repo_name
708 708 else:
709 709 def make_link(group):
710 710 return link_to(group.name, url('repos_group_home',
711 711 group_name=group.group_name))
712 712 return literal(' &raquo; '.join(map(make_link, groups)) + \
713 713 " &raquo; " + repo_name)
714 714
715 715
716 716 def fancy_file_stats(stats):
717 717 """
718 718 Displays a fancy two colored bar for number of added/deleted
719 719 lines of code on file
720 720
721 721 :param stats: two element list of added/deleted lines of code
722 722 """
723 723
724 724 a, d, t = stats[0], stats[1], stats[0] + stats[1]
725 725 width = 100
726 726 unit = float(width) / (t or 1)
727 727
728 728 # needs > 9% of width to be visible or 0 to be hidden
729 729 a_p = max(9, unit * a) if a > 0 else 0
730 730 d_p = max(9, unit * d) if d > 0 else 0
731 731 p_sum = a_p + d_p
732 732
733 733 if p_sum > width:
734 734 #adjust the percentage to be == 100% since we adjusted to 9
735 735 if a_p > d_p:
736 736 a_p = a_p - (p_sum - width)
737 737 else:
738 738 d_p = d_p - (p_sum - width)
739 739
740 740 a_v = a if a > 0 else ''
741 741 d_v = d if d > 0 else ''
742 742
743 743 def cgen(l_type):
744 744 mapping = {'tr': 'top-right-rounded-corner',
745 745 'tl': 'top-left-rounded-corner',
746 746 'br': 'bottom-right-rounded-corner',
747 747 'bl': 'bottom-left-rounded-corner'}
748 748 map_getter = lambda x: mapping[x]
749 749
750 750 if l_type == 'a' and d_v:
751 751 #case when added and deleted are present
752 752 return ' '.join(map(map_getter, ['tl', 'bl']))
753 753
754 754 if l_type == 'a' and not d_v:
755 755 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
756 756
757 757 if l_type == 'd' and a_v:
758 758 return ' '.join(map(map_getter, ['tr', 'br']))
759 759
760 760 if l_type == 'd' and not a_v:
761 761 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
762 762
763 763 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
764 764 cgen('a'),a_p, a_v
765 765 )
766 766 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
767 767 cgen('d'),d_p, d_v
768 768 )
769 769 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
770 770
771 771
772 772 def urlify_text(text_):
773 773 import re
774 774
775 775 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]'''
776 776 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
777 777
778 778 def url_func(match_obj):
779 779 url_full = match_obj.groups()[0]
780 780 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
781 781
782 782 return literal(url_pat.sub(url_func, text_))
783 783
784 784
785 785 def urlify_changesets(text_, repository):
786 786 import re
787 787 URL_PAT = re.compile(r'([0-9a-fA-F]{12,})')
788 788
789 789 def url_func(match_obj):
790 790 rev = match_obj.groups()[0]
791 791 pref = ''
792 792 if match_obj.group().startswith(' '):
793 793 pref = ' '
794 794 tmpl = (
795 795 '%(pref)s<a class="%(cls)s" href="%(url)s">'
796 796 '%(rev)s'
797 797 '</a>'
798 798 )
799 799 return tmpl % {
800 800 'pref': pref,
801 801 'cls': 'revision-link',
802 802 'url': url('changeset_home', repo_name=repository, revision=rev),
803 803 'rev': rev,
804 804 }
805 805
806 806 newtext = URL_PAT.sub(url_func, text_)
807 807
808 808 return newtext
809 809
810 810
811 811 def urlify_commit(text_, repository=None, link_=None):
812 812 import re
813 813 import traceback
814 814
815 815 # urlify changesets
816 816 text_ = urlify_changesets(text_, repository)
817 817
818 818 def linkify_others(t,l):
819 819 urls = re.compile(r'(\<a.*?\<\/a\>)',)
820 820 links = []
821 821 for e in urls.split(t):
822 822 if not urls.match(e):
823 823 links.append('<a class="message-link" href="%s">%s</a>' % (l,e))
824 824 else:
825 825 links.append(e)
826 826
827 827 return ''.join(links)
828 828 try:
829 829 conf = config['app_conf']
830 830
831 831 URL_PAT = re.compile(r'%s' % conf.get('issue_pat'))
832 832
833 833 if URL_PAT:
834 834 ISSUE_SERVER_LNK = conf.get('issue_server_link')
835 835 ISSUE_PREFIX = conf.get('issue_prefix')
836 836
837 837 def url_func(match_obj):
838 838 pref = ''
839 839 if match_obj.group().startswith(' '):
840 840 pref = ' '
841 841
842 842 issue_id = ''.join(match_obj.groups())
843 843 tmpl = (
844 844 '%(pref)s<a class="%(cls)s" href="%(url)s">'
845 845 '%(issue-prefix)s%(id-repr)s'
846 846 '</a>'
847 847 )
848 848 url = ISSUE_SERVER_LNK.replace('{id}', issue_id)
849 849 if repository:
850 850 url = url.replace('{repo}', repository)
851 851
852 852 return tmpl % {
853 853 'pref': pref,
854 854 'cls': 'issue-tracker-link',
855 855 'url': url,
856 856 'id-repr': issue_id,
857 857 'issue-prefix': ISSUE_PREFIX,
858 858 'serv': ISSUE_SERVER_LNK,
859 859 }
860 860
861 861 newtext = URL_PAT.sub(url_func, text_)
862 862
863 863 # wrap not links into final link => link_
864 864 newtext = linkify_others(newtext, link_)
865 865
866 866 return literal(newtext)
867 867 except:
868 868 log.error(traceback.format_exc())
869 869 pass
870 870
871 871 return text_
872 872
873 873
874 874 def rst(source):
875 875 return literal('<div class="rst-block">%s</div>' %
876 876 MarkupRenderer.rst(source))
877 877
878 878
879 879 def rst_w_mentions(source):
880 880 """
881 881 Wrapped rst renderer with @mention highlighting
882 882
883 883 :param source:
884 884 """
885 885 return literal('<div class="rst-block">%s</div>' %
886 886 MarkupRenderer.rst_with_mentions(source))
@@ -1,235 +1,235 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.indexers.daemon
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 A daemon will read from task table and run tasks
7 7
8 8 :created_on: Jan 26, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import sys
28 28 import logging
29 29 import traceback
30 30
31 31 from shutil import rmtree
32 32 from time import mktime
33 33
34 34 from os.path import dirname as dn
35 35 from os.path import join as jn
36 36
37 37 #to get the rhodecode import
38 38 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
39 39 sys.path.append(project_path)
40 40
41 41
42 42 from rhodecode.model.scm import ScmModel
43 43 from rhodecode.lib import safe_unicode
44 44 from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
45 45
46 from vcs.exceptions import ChangesetError, RepositoryError, \
46 from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \
47 47 NodeDoesNotExistError
48 48
49 49 from whoosh.index import create_in, open_dir
50 50
51 51
52 52 log = logging.getLogger('whooshIndexer')
53 53 # create logger
54 54 log.setLevel(logging.DEBUG)
55 55 log.propagate = False
56 56 # create console handler and set level to debug
57 57 ch = logging.StreamHandler()
58 58 ch.setLevel(logging.DEBUG)
59 59
60 60 # create formatter
61 61 formatter = logging.Formatter("%(asctime)s - %(name)s -"
62 62 " %(levelname)s - %(message)s")
63 63
64 64 # add formatter to ch
65 65 ch.setFormatter(formatter)
66 66
67 67 # add ch to logger
68 68 log.addHandler(ch)
69 69
70 70
71 71 class WhooshIndexingDaemon(object):
72 72 """
73 73 Daemon for atomic jobs
74 74 """
75 75
76 76 def __init__(self, indexname=IDX_NAME, index_location=None,
77 77 repo_location=None, sa=None, repo_list=None):
78 78 self.indexname = indexname
79 79
80 80 self.index_location = index_location
81 81 if not index_location:
82 82 raise Exception('You have to provide index location')
83 83
84 84 self.repo_location = repo_location
85 85 if not repo_location:
86 86 raise Exception('You have to provide repositories location')
87 87
88 88 self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)
89 89
90 90 if repo_list:
91 91 filtered_repo_paths = {}
92 92 for repo_name, repo in self.repo_paths.items():
93 93 if repo_name in repo_list:
94 94 filtered_repo_paths[repo_name] = repo
95 95
96 96 self.repo_paths = filtered_repo_paths
97 97
98 98 self.initial = False
99 99 if not os.path.isdir(self.index_location):
100 100 os.makedirs(self.index_location)
101 101 log.info('Cannot run incremental index since it does not'
102 102 ' yet exist running full build')
103 103 self.initial = True
104 104
105 105 def get_paths(self, repo):
106 106 """recursive walk in root dir and return a set of all path in that dir
107 107 based on repository walk function
108 108 """
109 109 index_paths_ = set()
110 110 try:
111 111 tip = repo.get_changeset('tip')
112 112 for topnode, dirs, files in tip.walk('/'):
113 113 for f in files:
114 114 index_paths_.add(jn(repo.path, f.path))
115 115
116 116 except RepositoryError, e:
117 117 log.debug(traceback.format_exc())
118 118 pass
119 119 return index_paths_
120 120
121 121 def get_node(self, repo, path):
122 122 n_path = path[len(repo.path) + 1:]
123 123 node = repo.get_changeset().get_node(n_path)
124 124 return node
125 125
126 126 def get_node_mtime(self, node):
127 127 return mktime(node.last_changeset.date.timetuple())
128 128
129 129 def add_doc(self, writer, path, repo, repo_name):
130 130 """Adding doc to writer this function itself fetches data from
131 131 the instance of vcs backend"""
132 132 node = self.get_node(repo, path)
133 133
134 134 #we just index the content of chosen files, and skip binary files
135 135 if node.extension in INDEX_EXTENSIONS and not node.is_binary:
136 136
137 137 u_content = node.content
138 138 if not isinstance(u_content, unicode):
139 139 log.warning(' >> %s Could not get this content as unicode '
140 140 'replacing with empty content', path)
141 141 u_content = u''
142 142 else:
143 143 log.debug(' >> %s [WITH CONTENT]' % path)
144 144
145 145 else:
146 146 log.debug(' >> %s' % path)
147 147 #just index file name without it's content
148 148 u_content = u''
149 149
150 150 writer.add_document(owner=unicode(repo.contact),
151 151 repository=safe_unicode(repo_name),
152 152 path=safe_unicode(path),
153 153 content=u_content,
154 154 modtime=self.get_node_mtime(node),
155 155 extension=node.extension)
156 156
157 157 def build_index(self):
158 158 if os.path.exists(self.index_location):
159 159 log.debug('removing previous index')
160 160 rmtree(self.index_location)
161 161
162 162 if not os.path.exists(self.index_location):
163 163 os.mkdir(self.index_location)
164 164
165 165 idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
166 166 writer = idx.writer()
167 167
168 168 for repo_name, repo in self.repo_paths.items():
169 169 log.debug('building index @ %s' % repo.path)
170 170
171 171 for idx_path in self.get_paths(repo):
172 172 self.add_doc(writer, idx_path, repo, repo_name)
173 173
174 174 log.debug('>> COMMITING CHANGES <<')
175 175 writer.commit(merge=True)
176 176 log.debug('>>> FINISHED BUILDING INDEX <<<')
177 177
178 178 def update_index(self):
179 179 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
180 180
181 181 idx = open_dir(self.index_location, indexname=self.indexname)
182 182 # The set of all paths in the index
183 183 indexed_paths = set()
184 184 # The set of all paths we need to re-index
185 185 to_index = set()
186 186
187 187 reader = idx.reader()
188 188 writer = idx.writer()
189 189
190 190 # Loop over the stored fields in the index
191 191 for fields in reader.all_stored_fields():
192 192 indexed_path = fields['path']
193 193 indexed_paths.add(indexed_path)
194 194
195 195 repo = self.repo_paths[fields['repository']]
196 196
197 197 try:
198 198 node = self.get_node(repo, indexed_path)
199 199 except (ChangesetError, NodeDoesNotExistError):
200 200 # This file was deleted since it was indexed
201 201 log.debug('removing from index %s' % indexed_path)
202 202 writer.delete_by_term('path', indexed_path)
203 203
204 204 else:
205 205 # Check if this file was changed since it was indexed
206 206 indexed_time = fields['modtime']
207 207 mtime = self.get_node_mtime(node)
208 208 if mtime > indexed_time:
209 209 # The file has changed, delete it and add it to the list of
210 210 # files to reindex
211 211 log.debug('adding to reindex list %s' % indexed_path)
212 212 writer.delete_by_term('path', indexed_path)
213 213 to_index.add(indexed_path)
214 214
215 215 # Loop over the files in the filesystem
216 216 # Assume we have a function that gathers the filenames of the
217 217 # documents to be indexed
218 218 for repo_name, repo in self.repo_paths.items():
219 219 for path in self.get_paths(repo):
220 220 if path in to_index or path not in indexed_paths:
221 221 # This is either a file that's changed, or a new file
222 222 # that wasn't indexed before. So index it!
223 223 self.add_doc(writer, path, repo, repo_name)
224 224 log.debug('re indexing %s' % path)
225 225
226 226 log.debug('>> COMMITING CHANGES <<')
227 227 writer.commit(merge=True)
228 228 log.debug('>>> FINISHED REBUILDING INDEX <<<')
229 229
230 230 def run(self, full_index=False):
231 231 """Run daemon"""
232 232 if full_index or self.initial:
233 233 self.build_index()
234 234 else:
235 235 self.update_index()
@@ -1,248 +1,249 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.middleware.simplegit
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 SimpleGit middleware for handling git protocol request (push/clone etc.)
7 7 It's implemented with basic auth function
8 8
9 9 :created_on: Apr 28, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26
27 27 import os
28 28 import logging
29 29 import traceback
30 30
31 31 from dulwich import server as dulserver
32 32
33
33 34 class SimpleGitUploadPackHandler(dulserver.UploadPackHandler):
34 35
35 36 def handle(self):
36 37 write = lambda x: self.proto.write_sideband(1, x)
37 38
38 39 graph_walker = dulserver.ProtocolGraphWalker(self,
39 40 self.repo.object_store,
40 41 self.repo.get_peeled)
41 42 objects_iter = self.repo.fetch_objects(
42 43 graph_walker.determine_wants, graph_walker, self.progress,
43 44 get_tagged=self.get_tagged)
44 45
45 46 # Do they want any objects?
46 47 if objects_iter is None or len(objects_iter) == 0:
47 48 return
48 49
49 50 self.progress("counting objects: %d, done.\n" % len(objects_iter))
50 51 dulserver.write_pack_objects(dulserver.ProtocolFile(None, write),
51 52 objects_iter, len(objects_iter))
52 53 messages = []
53 54 messages.append('thank you for using rhodecode')
54 55
55 56 for msg in messages:
56 57 self.progress(msg + "\n")
57 58 # we are done
58 59 self.proto.write("0000")
59 60
60 61 dulserver.DEFAULT_HANDLERS = {
61 62 'git-upload-pack': SimpleGitUploadPackHandler,
62 63 'git-receive-pack': dulserver.ReceivePackHandler,
63 64 }
64 65
65 66 from dulwich.repo import Repo
66 67 from dulwich.web import HTTPGitApplication
67 68
68 69 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
69 70
70 71 from rhodecode.lib import safe_str
71 72 from rhodecode.lib.base import BaseVCSController
72 73 from rhodecode.lib.auth import get_container_username
73 74 from rhodecode.lib.utils import is_valid_repo
74 75 from rhodecode.model.db import User
75 76
76 77 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError
77 78
78 79 log = logging.getLogger(__name__)
79 80
80 81
81 82 def is_git(environ):
82 83 """Returns True if request's target is git server.
83 84 ``HTTP_USER_AGENT`` would then have git client version given.
84 85
85 86 :param environ:
86 87 """
87 88 http_user_agent = environ.get('HTTP_USER_AGENT')
88 89 if http_user_agent and http_user_agent.startswith('git'):
89 90 return True
90 91 return False
91 92
92 93
93 94 class SimpleGit(BaseVCSController):
94 95
95 96 def _handle_request(self, environ, start_response):
96 97 if not is_git(environ):
97 98 return self.application(environ, start_response)
98 99
99 100 proxy_key = 'HTTP_X_REAL_IP'
100 101 def_key = 'REMOTE_ADDR'
101 102 ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0'))
102 103 username = None
103 104 # skip passing error to error controller
104 105 environ['pylons.status_code_redirect'] = True
105 106
106 107 #======================================================================
107 108 # EXTRACT REPOSITORY NAME FROM ENV
108 109 #======================================================================
109 110 try:
110 111 repo_name = self.__get_repository(environ)
111 112 log.debug('Extracted repo name is %s' % repo_name)
112 113 except:
113 114 return HTTPInternalServerError()(environ, start_response)
114 115
115 116 #======================================================================
116 117 # GET ACTION PULL or PUSH
117 118 #======================================================================
118 119 action = self.__get_action(environ)
119 120
120 121 #======================================================================
121 122 # CHECK ANONYMOUS PERMISSION
122 123 #======================================================================
123 124 if action in ['pull', 'push']:
124 125 anonymous_user = self.__get_user('default')
125 126 username = anonymous_user.username
126 127 anonymous_perm = self._check_permission(action,anonymous_user,
127 128 repo_name)
128 129
129 130 if anonymous_perm is not True or anonymous_user.active is False:
130 131 if anonymous_perm is not True:
131 132 log.debug('Not enough credentials to access this '
132 133 'repository as anonymous user')
133 134 if anonymous_user.active is False:
134 135 log.debug('Anonymous access is disabled, running '
135 136 'authentication')
136 137 #==============================================================
137 138 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
138 139 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
139 140 #==============================================================
140 141
141 142 # Attempting to retrieve username from the container
142 143 username = get_container_username(environ, self.config)
143 144
144 145 # If not authenticated by the container, running basic auth
145 146 if not username:
146 147 self.authenticate.realm = \
147 148 safe_str(self.config['rhodecode_realm'])
148 149 result = self.authenticate(environ)
149 150 if isinstance(result, str):
150 151 AUTH_TYPE.update(environ, 'basic')
151 152 REMOTE_USER.update(environ, result)
152 153 username = result
153 154 else:
154 155 return result.wsgi_application(environ, start_response)
155 156
156 157 #==============================================================
157 158 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
158 159 #==============================================================
159 160
160 161 if action in ['pull', 'push']:
161 162 try:
162 163 user = self.__get_user(username)
163 164 if user is None or not user.active:
164 165 return HTTPForbidden()(environ, start_response)
165 166 username = user.username
166 167 except:
167 168 log.error(traceback.format_exc())
168 169 return HTTPInternalServerError()(environ,
169 170 start_response)
170 171
171 172 #check permissions for this repository
172 173 perm = self._check_permission(action, user,
173 174 repo_name)
174 175 if perm is not True:
175 176 return HTTPForbidden()(environ, start_response)
176 177
177 178 #===================================================================
178 179 # GIT REQUEST HANDLING
179 180 #===================================================================
180 181
181 182 repo_path = safe_str(os.path.join(self.basepath, repo_name))
182 183 log.debug('Repository path is %s' % repo_path)
183 184
184 185 # quick check if that dir exists...
185 186 if is_valid_repo(repo_name, self.basepath) is False:
186 187 return HTTPNotFound()(environ, start_response)
187 188
188 189 try:
189 190 #invalidate cache on push
190 191 if action == 'push':
191 192 self._invalidate_cache(repo_name)
192 193
193 194 app = self.__make_app(repo_name, repo_path)
194 195 return app(environ, start_response)
195 196 except Exception:
196 197 log.error(traceback.format_exc())
197 198 return HTTPInternalServerError()(environ, start_response)
198 199
199 200 def __make_app(self, repo_name, repo_path):
200 201 """
201 202 Make an wsgi application using dulserver
202 203
203 204 :param repo_name: name of the repository
204 205 :param repo_path: full path to the repository
205 206 """
206 207
207 208 _d = {'/' + repo_name: Repo(repo_path)}
208 209 backend = dulserver.DictBackend(_d)
209 210 gitserve = HTTPGitApplication(backend)
210 211
211 212 return gitserve
212 213
213 214 def __get_repository(self, environ):
214 215 """
215 216 Get's repository name out of PATH_INFO header
216 217
217 218 :param environ: environ where PATH_INFO is stored
218 219 """
219 220 try:
220 221 environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO'])
221 222 repo_name = '/'.join(environ['PATH_INFO'].split('/')[1:])
222 223 if repo_name.endswith('/'):
223 224 repo_name = repo_name.rstrip('/')
224 225 except:
225 226 log.error(traceback.format_exc())
226 227 raise
227 228 repo_name = repo_name.split('/')[0]
228 229 return repo_name
229 230
230 231 def __get_user(self, username):
231 232 return User.get_by_username(username)
232 233
233 234 def __get_action(self, environ):
234 235 """Maps git request commands into a pull or push command.
235 236
236 237 :param environ:
237 238 """
238 239 service = environ['QUERY_STRING'].split('=')
239 240 if len(service) > 1:
240 241 service_cmd = service[1]
241 242 mapping = {'git-receive-pack': 'push',
242 243 'git-upload-pack': 'pull',
243 244 }
244 245
245 246 return mapping.get(service_cmd,
246 247 service_cmd if service_cmd else 'other')
247 248 else:
248 249 return 'other'
@@ -1,251 +1,249 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.middleware.simplehg
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 SimpleHG middleware for handling mercurial protocol request
7 7 (push/clone etc.). It's implemented with basic auth function
8 8
9 9 :created_on: Apr 28, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26
27 27 import os
28 28 import logging
29 29 import traceback
30 30
31 31 from mercurial.error import RepoError
32 32 from mercurial.hgweb import hgweb_mod
33 33
34 34 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 35
36 36 from rhodecode.lib import safe_str
37 37 from rhodecode.lib.base import BaseVCSController
38 38 from rhodecode.lib.auth import get_container_username
39 39 from rhodecode.lib.utils import make_ui, is_valid_repo, ui_sections
40 40 from rhodecode.model.db import User
41 41
42 42 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 def is_mercurial(environ):
48 48 """Returns True if request's target is mercurial server - header
49 49 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
50 50 """
51 51 http_accept = environ.get('HTTP_ACCEPT')
52 52 if http_accept and http_accept.startswith('application/mercurial'):
53 53 return True
54 54 return False
55 55
56 56
57 57 class SimpleHg(BaseVCSController):
58 58
59 59 def _handle_request(self, environ, start_response):
60 60 if not is_mercurial(environ):
61 61 return self.application(environ, start_response)
62 62
63 63 proxy_key = 'HTTP_X_REAL_IP'
64 64 def_key = 'REMOTE_ADDR'
65 65 ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0'))
66 66
67 67 # skip passing error to error controller
68 68 environ['pylons.status_code_redirect'] = True
69 69
70 70 #======================================================================
71 71 # EXTRACT REPOSITORY NAME FROM ENV
72 72 #======================================================================
73 73 try:
74 74 repo_name = environ['REPO_NAME'] = self.__get_repository(environ)
75 75 log.debug('Extracted repo name is %s' % repo_name)
76 76 except:
77 77 return HTTPInternalServerError()(environ, start_response)
78 78
79 79 #======================================================================
80 80 # GET ACTION PULL or PUSH
81 81 #======================================================================
82 82 action = self.__get_action(environ)
83 83
84 84 #======================================================================
85 85 # CHECK ANONYMOUS PERMISSION
86 86 #======================================================================
87 87 if action in ['pull', 'push']:
88 88 anonymous_user = self.__get_user('default')
89 89
90 90 username = anonymous_user.username
91 91 anonymous_perm = self._check_permission(action,anonymous_user,
92 92 repo_name)
93 93
94 94 if anonymous_perm is not True or anonymous_user.active is False:
95 95 if anonymous_perm is not True:
96 96 log.debug('Not enough credentials to access this '
97 97 'repository as anonymous user')
98 98 if anonymous_user.active is False:
99 99 log.debug('Anonymous access is disabled, running '
100 100 'authentication')
101 101 #==============================================================
102 102 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
103 103 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
104 104 #==============================================================
105 105
106 106 # Attempting to retrieve username from the container
107 107 username = get_container_username(environ, self.config)
108 108
109 109 # If not authenticated by the container, running basic auth
110 110 if not username:
111 111 self.authenticate.realm = \
112 112 safe_str(self.config['rhodecode_realm'])
113 113 result = self.authenticate(environ)
114 114 if isinstance(result, str):
115 115 AUTH_TYPE.update(environ, 'basic')
116 116 REMOTE_USER.update(environ, result)
117 117 username = result
118 118 else:
119 119 return result.wsgi_application(environ, start_response)
120 120
121 121 #==============================================================
122 122 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
123 123 #==============================================================
124 124
125 125 if action in ['pull', 'push']:
126 126 try:
127 127 user = self.__get_user(username)
128 128 if user is None or not user.active:
129 129 return HTTPForbidden()(environ, start_response)
130 130 username = user.username
131 131 except:
132 132 log.error(traceback.format_exc())
133 133 return HTTPInternalServerError()(environ,
134 134 start_response)
135 135
136 136 #check permissions for this repository
137 137 perm = self._check_permission(action, user,
138 138 repo_name)
139 139 if perm is not True:
140 140 return HTTPForbidden()(environ, start_response)
141 141
142 142 extras = {'ip': ipaddr,
143 143 'username': username,
144 144 'action': action,
145 145 'repository': repo_name}
146 146
147 147 #======================================================================
148 148 # MERCURIAL REQUEST HANDLING
149 149 #======================================================================
150 150
151 151 repo_path = safe_str(os.path.join(self.basepath, repo_name))
152 152 log.debug('Repository path is %s' % repo_path)
153 153
154 154 baseui = make_ui('db')
155 155 self.__inject_extras(repo_path, baseui, extras)
156 156
157
158 157 # quick check if that dir exists...
159 158 if is_valid_repo(repo_name, self.basepath) is False:
160 159 return HTTPNotFound()(environ, start_response)
161 160
162 161 try:
163 162 # invalidate cache on push
164 163 if action == 'push':
165 164 self._invalidate_cache(repo_name)
166 165
167 166 app = self.__make_app(repo_path, baseui, extras)
168 167 return app(environ, start_response)
169 168 except RepoError, e:
170 169 if str(e).find('not found') != -1:
171 170 return HTTPNotFound()(environ, start_response)
172 171 except Exception:
173 172 log.error(traceback.format_exc())
174 173 return HTTPInternalServerError()(environ, start_response)
175 174
176 175 def __make_app(self, repo_name, baseui, extras):
177 176 """
178 177 Make an wsgi application using hgweb, and inject generated baseui
179 178 instance, additionally inject some extras into ui object
180 179 """
181 180 return hgweb_mod.hgweb(repo_name, name=repo_name, baseui=baseui)
182 181
183 182 def __get_repository(self, environ):
184 183 """
185 184 Get's repository name out of PATH_INFO header
186 185
187 186 :param environ: environ where PATH_INFO is stored
188 187 """
189 188 try:
190 189 environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO'])
191 190 repo_name = '/'.join(environ['PATH_INFO'].split('/')[1:])
192 191 if repo_name.endswith('/'):
193 192 repo_name = repo_name.rstrip('/')
194 193 except:
195 194 log.error(traceback.format_exc())
196 195 raise
197 196
198 197 return repo_name
199 198
200 199 def __get_user(self, username):
201 200 return User.get_by_username(username)
202 201
203 202 def __get_action(self, environ):
204 203 """
205 204 Maps mercurial request commands into a clone,pull or push command.
206 205 This should always return a valid command string
207 206
208 207 :param environ:
209 208 """
210 209 mapping = {'changegroup': 'pull',
211 210 'changegroupsubset': 'pull',
212 211 'stream_out': 'pull',
213 212 'listkeys': 'pull',
214 213 'unbundle': 'push',
215 214 'pushkey': 'push', }
216 215 for qry in environ['QUERY_STRING'].split('&'):
217 216 if qry.startswith('cmd'):
218 217 cmd = qry.split('=')[-1]
219 218 if cmd in mapping:
220 219 return mapping[cmd]
221 220 else:
222 221 return 'pull'
223 222
224
225 223 def __inject_extras(self, repo_path, baseui, extras={}):
226 224 """
227 225 Injects some extra params into baseui instance
228 226
229 227 also overwrites global settings with those takes from local hgrc file
230 228
231 229 :param baseui: baseui instance
232 230 :param extras: dict with extra params to put into baseui
233 231 """
234 232
235 233 hgrc = os.path.join(repo_path, '.hg', 'hgrc')
236 234
237 235 # make our hgweb quiet so it doesn't print output
238 236 baseui.setconfig('ui', 'quiet', 'true')
239 237
240 238 #inject some additional parameters that will be available in ui
241 239 #for hooks
242 240 for k, v in extras.items():
243 241 baseui.setconfig('rhodecode_extras', k, v)
244 242
245 243 repoui = make_ui('file', hgrc, False)
246 244
247 245 if repoui:
248 246 #overwrite our ui instance with the section from hgrc file
249 247 for section in ui_sections:
250 248 for k, v in repoui.configitems(section):
251 249 baseui.setconfig(section, k, v)
@@ -1,633 +1,633 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import logging
28 28 import datetime
29 29 import traceback
30 30 import paste
31 31 import beaker
32 32 import tarfile
33 33 import shutil
34 34 from os.path import abspath
35 35 from os.path import dirname as dn, join as jn
36 36
37 37 from paste.script.command import Command, BadCommand
38 38
39 39 from mercurial import ui, config
40 40
41 41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 42
43 from vcs import get_backend
44 from vcs.backends.base import BaseChangeset
45 from vcs.utils.lazy import LazyProperty
46 from vcs.utils.helpers import get_scm
47 from vcs.exceptions import VCSError
43 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.helpers import get_scm
47 from rhodecode.lib.vcs.exceptions import VCSError
48 48
49 49 from rhodecode.lib.caching_query import FromCache
50 50
51 51 from rhodecode.model import meta
52 52 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
53 53 UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.repos_group import ReposGroupModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 def recursive_replace(str_, replace=' '):
61 61 """Recursive replace of given sign to just one instance
62 62
63 63 :param str_: given string
64 64 :param replace: char to find and replace multiple instances
65 65
66 66 Examples::
67 67 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
68 68 'Mighty-Mighty-Bo-sstones'
69 69 """
70 70
71 71 if str_.find(replace * 2) == -1:
72 72 return str_
73 73 else:
74 74 str_ = str_.replace(replace * 2, replace)
75 75 return recursive_replace(str_, replace)
76 76
77 77
78 78 def repo_name_slug(value):
79 79 """Return slug of name of repository
80 80 This function is called on each creation/modification
81 81 of repository to prevent bad names in repo
82 82 """
83 83
84 84 slug = remove_formatting(value)
85 85 slug = strip_tags(slug)
86 86
87 87 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
88 88 slug = slug.replace(c, '-')
89 89 slug = recursive_replace(slug, '-')
90 90 slug = collapse(slug, '-')
91 91 return slug
92 92
93 93
94 94 def get_repo_slug(request):
95 95 return request.environ['pylons.routes_dict'].get('repo_name')
96 96
97 97
98 98 def get_repos_group_slug(request):
99 99 return request.environ['pylons.routes_dict'].get('group_name')
100 100
101 101
102 102 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
103 103 """
104 104 Action logger for various actions made by users
105 105
106 106 :param user: user that made this action, can be a unique username string or
107 107 object containing user_id attribute
108 108 :param action: action to log, should be on of predefined unique actions for
109 109 easy translations
110 110 :param repo: string name of repository or object containing repo_id,
111 111 that action was made on
112 112 :param ipaddr: optional ip address from what the action was made
113 113 :param sa: optional sqlalchemy session
114 114
115 115 """
116 116
117 117 if not sa:
118 118 sa = meta.Session
119 119
120 120 try:
121 121 if hasattr(user, 'user_id'):
122 122 user_obj = user
123 123 elif isinstance(user, basestring):
124 124 user_obj = User.get_by_username(user)
125 125 else:
126 126 raise Exception('You have to provide user object or username')
127 127
128 128 if hasattr(repo, 'repo_id'):
129 129 repo_obj = Repository.get(repo.repo_id)
130 130 repo_name = repo_obj.repo_name
131 131 elif isinstance(repo, basestring):
132 132 repo_name = repo.lstrip('/')
133 133 repo_obj = Repository.get_by_repo_name(repo_name)
134 134 else:
135 135 raise Exception('You have to provide repository to action logger')
136 136
137 137 user_log = UserLog()
138 138 user_log.user_id = user_obj.user_id
139 139 user_log.action = action
140 140
141 141 user_log.repository_id = repo_obj.repo_id
142 142 user_log.repository_name = repo_name
143 143
144 144 user_log.action_date = datetime.datetime.now()
145 145 user_log.user_ip = ipaddr
146 146 sa.add(user_log)
147 147
148 148 log.info('Adding user %s, action %s on %s' % (user_obj, action, repo))
149 149 if commit:
150 150 sa.commit()
151 151 except:
152 152 log.error(traceback.format_exc())
153 153 raise
154 154
155 155
156 156 def get_repos(path, recursive=False):
157 157 """
158 158 Scans given path for repos and return (name,(type,path)) tuple
159 159
160 160 :param path: path to scan for repositories
161 161 :param recursive: recursive search and return names with subdirs in front
162 162 """
163 163
164 164 # remove ending slash for better results
165 165 path = path.rstrip(os.sep)
166 166
167 167 def _get_repos(p):
168 168 if not os.access(p, os.W_OK):
169 169 return
170 170 for dirpath in os.listdir(p):
171 171 if os.path.isfile(os.path.join(p, dirpath)):
172 172 continue
173 173 cur_path = os.path.join(p, dirpath)
174 174 try:
175 175 scm_info = get_scm(cur_path)
176 176 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
177 177 except VCSError:
178 178 if not recursive:
179 179 continue
180 180 #check if this dir containts other repos for recursive scan
181 181 rec_path = os.path.join(p, dirpath)
182 182 if os.path.isdir(rec_path):
183 183 for inner_scm in _get_repos(rec_path):
184 184 yield inner_scm
185 185
186 186 return _get_repos(path)
187 187
188 188
189 189 def is_valid_repo(repo_name, base_path):
190 190 """
191 191 Returns True if given path is a valid repository False otherwise
192 192 :param repo_name:
193 193 :param base_path:
194 194
195 195 :return True: if given path is a valid repository
196 196 """
197 197 full_path = os.path.join(base_path, repo_name)
198 198
199 199 try:
200 200 get_scm(full_path)
201 201 return True
202 202 except VCSError:
203 203 return False
204 204
205 205
206 206 def is_valid_repos_group(repos_group_name, base_path):
207 207 """
208 208 Returns True if given path is a repos group False otherwise
209 209
210 210 :param repo_name:
211 211 :param base_path:
212 212 """
213 213 full_path = os.path.join(base_path, repos_group_name)
214 214
215 215 # check if it's not a repo
216 216 if is_valid_repo(repos_group_name, base_path):
217 217 return False
218 218
219 219 # check if it's a valid path
220 220 if os.path.isdir(full_path):
221 221 return True
222 222
223 223 return False
224 224
225 225
226 226 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
227 227 while True:
228 228 ok = raw_input(prompt)
229 229 if ok in ('y', 'ye', 'yes'):
230 230 return True
231 231 if ok in ('n', 'no', 'nop', 'nope'):
232 232 return False
233 233 retries = retries - 1
234 234 if retries < 0:
235 235 raise IOError
236 236 print complaint
237 237
238 238 #propagated from mercurial documentation
239 239 ui_sections = ['alias', 'auth',
240 240 'decode/encode', 'defaults',
241 241 'diff', 'email',
242 242 'extensions', 'format',
243 243 'merge-patterns', 'merge-tools',
244 244 'hooks', 'http_proxy',
245 245 'smtp', 'patch',
246 246 'paths', 'profiling',
247 247 'server', 'trusted',
248 248 'ui', 'web', ]
249 249
250 250
251 251 def make_ui(read_from='file', path=None, checkpaths=True):
252 252 """A function that will read python rc files or database
253 253 and make an mercurial ui object from read options
254 254
255 255 :param path: path to mercurial config file
256 256 :param checkpaths: check the path
257 257 :param read_from: read from 'file' or 'db'
258 258 """
259 259
260 260 baseui = ui.ui()
261 261
262 262 # clean the baseui object
263 263 baseui._ocfg = config.config()
264 264 baseui._ucfg = config.config()
265 265 baseui._tcfg = config.config()
266 266
267 267 if read_from == 'file':
268 268 if not os.path.isfile(path):
269 269 log.debug('hgrc file is not present at %s skipping...' % path)
270 270 return False
271 271 log.debug('reading hgrc from %s' % path)
272 272 cfg = config.config()
273 273 cfg.read(path)
274 274 for section in ui_sections:
275 275 for k, v in cfg.items(section):
276 276 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
277 277 baseui.setconfig(section, k, v)
278 278
279 279 elif read_from == 'db':
280 280 sa = meta.Session
281 281 ret = sa.query(RhodeCodeUi)\
282 282 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
283 283 .all()
284 284
285 285 hg_ui = ret
286 286 for ui_ in hg_ui:
287 287 if ui_.ui_active:
288 288 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
289 289 ui_.ui_key, ui_.ui_value)
290 290 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
291 291
292 292 meta.Session.remove()
293 293 return baseui
294 294
295 295
296 296 def set_rhodecode_config(config):
297 297 """
298 298 Updates pylons config with new settings from database
299 299
300 300 :param config:
301 301 """
302 302 hgsettings = RhodeCodeSetting.get_app_settings()
303 303
304 304 for k, v in hgsettings.items():
305 305 config[k] = v
306 306
307 307
308 308 def invalidate_cache(cache_key, *args):
309 309 """
310 310 Puts cache invalidation task into db for
311 311 further global cache invalidation
312 312 """
313 313
314 314 from rhodecode.model.scm import ScmModel
315 315
316 316 if cache_key.startswith('get_repo_cached_'):
317 317 name = cache_key.split('get_repo_cached_')[-1]
318 318 ScmModel().mark_for_invalidation(name)
319 319
320 320
321 321 class EmptyChangeset(BaseChangeset):
322 322 """
323 323 An dummy empty changeset. It's possible to pass hash when creating
324 324 an EmptyChangeset
325 325 """
326 326
327 327 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
328 328 alias=None):
329 329 self._empty_cs = cs
330 330 self.revision = -1
331 331 self.message = ''
332 332 self.author = ''
333 333 self.date = ''
334 334 self.repository = repo
335 335 self.requested_revision = requested_revision
336 336 self.alias = alias
337 337
338 338 @LazyProperty
339 339 def raw_id(self):
340 340 """
341 341 Returns raw string identifying this changeset, useful for web
342 342 representation.
343 343 """
344 344
345 345 return self._empty_cs
346 346
347 347 @LazyProperty
348 348 def branch(self):
349 349 return get_backend(self.alias).DEFAULT_BRANCH_NAME
350 350
351 351 @LazyProperty
352 352 def short_id(self):
353 353 return self.raw_id[:12]
354 354
355 355 def get_file_changeset(self, path):
356 356 return self
357 357
358 358 def get_file_content(self, path):
359 359 return u''
360 360
361 361 def get_file_size(self, path):
362 362 return 0
363 363
364 364
365 365 def map_groups(groups):
366 366 """
367 367 Checks for groups existence, and creates groups structures.
368 368 It returns last group in structure
369 369
370 370 :param groups: list of groups structure
371 371 """
372 372 sa = meta.Session
373 373
374 374 parent = None
375 375 group = None
376 376
377 377 # last element is repo in nested groups structure
378 378 groups = groups[:-1]
379 379 rgm = ReposGroupModel(sa)
380 380 for lvl, group_name in enumerate(groups):
381 381 log.debug('creating group level: %s group_name: %s' % (lvl, group_name))
382 382 group_name = '/'.join(groups[:lvl] + [group_name])
383 383 group = RepoGroup.get_by_group_name(group_name)
384 384 desc = '%s group' % group_name
385 385
386 386 # # WTF that doesn't work !?
387 387 # if group is None:
388 388 # group = rgm.create(group_name, desc, parent, just_db=True)
389 389 # sa.commit()
390 390
391 391 if group is None:
392 392 group = RepoGroup(group_name, parent)
393 393 group.group_description = desc
394 394 sa.add(group)
395 395 rgm._create_default_perms(group)
396 396 sa.commit()
397 397 parent = group
398 398 return group
399 399
400 400
401 401 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
402 402 """
403 403 maps all repos given in initial_repo_list, non existing repositories
404 404 are created, if remove_obsolete is True it also check for db entries
405 405 that are not in initial_repo_list and removes them.
406 406
407 407 :param initial_repo_list: list of repositories found by scanning methods
408 408 :param remove_obsolete: check for obsolete entries in database
409 409 """
410 410 from rhodecode.model.repo import RepoModel
411 411 sa = meta.Session
412 412 rm = RepoModel()
413 413 user = sa.query(User).filter(User.admin == True).first()
414 414 if user is None:
415 415 raise Exception('Missing administrative account !')
416 416 added = []
417 417
418 418 # fixup groups paths to new format on the fly. Helps with migration from
419 419 # old rhodecode versions also set permissions if they are not present !
420 420 # TODO: remove this in future, before release
421 421 def_usr = User.get_by_username('default')
422 422 for g in RepoGroup.query().all():
423 423 g.group_name = g.get_new_name(g.name)
424 424 sa.add(g)
425 425 # get default perm
426 426 default = UserRepoGroupToPerm.query()\
427 427 .filter(UserRepoGroupToPerm.group == g)\
428 428 .filter(UserRepoGroupToPerm.user == def_usr)\
429 429 .scalar()
430 430
431 431 if default is None:
432 432 log.debug('missing default permission for group %s adding' % g)
433 433 ReposGroupModel()._create_default_perms(g)
434 434
435 435 for name, repo in initial_repo_list.items():
436 436 group = map_groups(name.split(Repository.url_sep()))
437 437 if not rm.get_by_repo_name(name, cache=False):
438 438 log.info('repository %s not found creating default' % name)
439 439 added.append(name)
440 440 form_data = {
441 441 'repo_name': name,
442 442 'repo_name_full': name,
443 443 'repo_type': repo.alias,
444 444 'description': repo.description \
445 445 if repo.description != 'unknown' else '%s repository' % name,
446 446 'private': False,
447 447 'group_id': getattr(group, 'group_id', None)
448 448 }
449 449 rm.create(form_data, user, just_db=True)
450 450 sa.commit()
451 451 removed = []
452 452 if remove_obsolete:
453 453 #remove from database those repositories that are not in the filesystem
454 454 for repo in sa.query(Repository).all():
455 455 if repo.repo_name not in initial_repo_list.keys():
456 456 removed.append(repo.repo_name)
457 457 sa.delete(repo)
458 458 sa.commit()
459 459
460 460 return added, removed
461 461
462 462
463 463 # set cache regions for beaker so celery can utilise it
464 464 def add_cache(settings):
465 465 cache_settings = {'regions': None}
466 466 for key in settings.keys():
467 467 for prefix in ['beaker.cache.', 'cache.']:
468 468 if key.startswith(prefix):
469 469 name = key.split(prefix)[1].strip()
470 470 cache_settings[name] = settings[key].strip()
471 471 if cache_settings['regions']:
472 472 for region in cache_settings['regions'].split(','):
473 473 region = region.strip()
474 474 region_settings = {}
475 475 for key, value in cache_settings.items():
476 476 if key.startswith(region):
477 477 region_settings[key.split('.')[1]] = value
478 478 region_settings['expire'] = int(region_settings.get('expire',
479 479 60))
480 480 region_settings.setdefault('lock_dir',
481 481 cache_settings.get('lock_dir'))
482 482 region_settings.setdefault('data_dir',
483 483 cache_settings.get('data_dir'))
484 484
485 485 if 'type' not in region_settings:
486 486 region_settings['type'] = cache_settings.get('type',
487 487 'memory')
488 488 beaker.cache.cache_regions[region] = region_settings
489 489
490 490
491 491 #==============================================================================
492 492 # TEST FUNCTIONS AND CREATORS
493 493 #==============================================================================
494 494 def create_test_index(repo_location, config, full_index):
495 495 """
496 496 Makes default test index
497 497
498 498 :param config: test config
499 499 :param full_index:
500 500 """
501 501
502 502 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
503 503 from rhodecode.lib.pidlock import DaemonLock, LockHeld
504 504
505 505 repo_location = repo_location
506 506
507 507 index_location = os.path.join(config['app_conf']['index_dir'])
508 508 if not os.path.exists(index_location):
509 509 os.makedirs(index_location)
510 510
511 511 try:
512 512 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
513 513 WhooshIndexingDaemon(index_location=index_location,
514 514 repo_location=repo_location)\
515 515 .run(full_index=full_index)
516 516 l.release()
517 517 except LockHeld:
518 518 pass
519 519
520 520
521 521 def create_test_env(repos_test_path, config):
522 522 """
523 523 Makes a fresh database and
524 524 install test repository into tmp dir
525 525 """
526 526 from rhodecode.lib.db_manage import DbManage
527 527 from rhodecode.tests import HG_REPO, TESTS_TMP_PATH
528 528
529 529 # PART ONE create db
530 530 dbconf = config['sqlalchemy.db1.url']
531 531 log.debug('making test db %s' % dbconf)
532 532
533 533 # create test dir if it doesn't exist
534 534 if not os.path.isdir(repos_test_path):
535 535 log.debug('Creating testdir %s' % repos_test_path)
536 536 os.makedirs(repos_test_path)
537 537
538 538 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
539 539 tests=True)
540 540 dbmanage.create_tables(override=True)
541 541 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
542 542 dbmanage.create_default_user()
543 543 dbmanage.admin_prompt()
544 544 dbmanage.create_permissions()
545 545 dbmanage.populate_default_permissions()
546 546 Session.commit()
547 547 # PART TWO make test repo
548 548 log.debug('making test vcs repositories')
549 549
550 550 idx_path = config['app_conf']['index_dir']
551 551 data_path = config['app_conf']['cache_dir']
552 552
553 553 #clean index and data
554 554 if idx_path and os.path.exists(idx_path):
555 555 log.debug('remove %s' % idx_path)
556 556 shutil.rmtree(idx_path)
557 557
558 558 if data_path and os.path.exists(data_path):
559 559 log.debug('remove %s' % data_path)
560 560 shutil.rmtree(data_path)
561 561
562 562 #CREATE DEFAULT HG REPOSITORY
563 563 cur_dir = dn(dn(abspath(__file__)))
564 564 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
565 565 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
566 566 tar.close()
567 567
568 568
569 569 #==============================================================================
570 570 # PASTER COMMANDS
571 571 #==============================================================================
572 572 class BasePasterCommand(Command):
573 573 """
574 574 Abstract Base Class for paster commands.
575 575
576 576 The celery commands are somewhat aggressive about loading
577 577 celery.conf, and since our module sets the `CELERY_LOADER`
578 578 environment variable to our loader, we have to bootstrap a bit and
579 579 make sure we've had a chance to load the pylons config off of the
580 580 command line, otherwise everything fails.
581 581 """
582 582 min_args = 1
583 583 min_args_error = "Please provide a paster config file as an argument."
584 584 takes_config_file = 1
585 585 requires_config_file = True
586 586
587 587 def notify_msg(self, msg, log=False):
588 588 """Make a notification to user, additionally if logger is passed
589 589 it logs this action using given logger
590 590
591 591 :param msg: message that will be printed to user
592 592 :param log: logging instance, to use to additionally log this message
593 593
594 594 """
595 595 if log and isinstance(log, logging):
596 596 log(msg)
597 597
598 598 def run(self, args):
599 599 """
600 600 Overrides Command.run
601 601
602 602 Checks for a config file argument and loads it.
603 603 """
604 604 if len(args) < self.min_args:
605 605 raise BadCommand(
606 606 self.min_args_error % {'min_args': self.min_args,
607 607 'actual_args': len(args)})
608 608
609 609 # Decrement because we're going to lob off the first argument.
610 610 # @@ This is hacky
611 611 self.min_args -= 1
612 612 self.bootstrap_config(args[0])
613 613 self.update_parser()
614 614 return super(BasePasterCommand, self).run(args[1:])
615 615
616 616 def update_parser(self):
617 617 """
618 618 Abstract method. Allows for the class's parser to be updated
619 619 before the superclass's `run` method is called. Necessary to
620 620 allow options/arguments to be passed through to the underlying
621 621 celery command.
622 622 """
623 623 raise NotImplementedError("Abstract Method.")
624 624
625 625 def bootstrap_config(self, conf):
626 626 """
627 627 Loads the pylons configuration.
628 628 """
629 629 from pylons import config as pylonsconfig
630 630
631 631 path_to_ini_file = os.path.realpath(conf)
632 632 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
633 633 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
@@ -1,1185 +1,1185 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.db
4 4 ~~~~~~~~~~~~~~~~~~
5 5
6 6 Database Models for RhodeCode
7 7
8 8 :created_on: Apr 08, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import logging
28 28 import datetime
29 29 import traceback
30 30 from collections import defaultdict
31 31
32 32 from sqlalchemy import *
33 33 from sqlalchemy.ext.hybrid import hybrid_property
34 34 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
35 35 from beaker.cache import cache_region, region_invalidate
36 36
37 from vcs import get_backend
38 from vcs.utils.helpers import get_scm
39 from vcs.exceptions import VCSError
40 from vcs.utils.lazy import LazyProperty
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.utils.helpers import get_scm
39 from rhodecode.lib.vcs.exceptions import VCSError
40 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 41
42 42 from rhodecode.lib import str2bool, safe_str, get_changeset_safe, safe_unicode
43 43 from rhodecode.lib.compat import json
44 44 from rhodecode.lib.caching_query import FromCache
45 45
46 46 from rhodecode.model.meta import Base, Session
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51 #==============================================================================
52 52 # BASE CLASSES
53 53 #==============================================================================
54 54
55 55
56 56 class ModelSerializer(json.JSONEncoder):
57 57 """
58 58 Simple Serializer for JSON,
59 59
60 60 usage::
61 61
62 62 to make object customized for serialization implement a __json__
63 63 method that will return a dict for serialization into json
64 64
65 65 example::
66 66
67 67 class Task(object):
68 68
69 69 def __init__(self, name, value):
70 70 self.name = name
71 71 self.value = value
72 72
73 73 def __json__(self):
74 74 return dict(name=self.name,
75 75 value=self.value)
76 76
77 77 """
78 78
79 79 def default(self, obj):
80 80
81 81 if hasattr(obj, '__json__'):
82 82 return obj.__json__()
83 83 else:
84 84 return json.JSONEncoder.default(self, obj)
85 85
86 86
87 87 class BaseModel(object):
88 88 """
89 89 Base Model for all classess
90 90 """
91 91
92 92 @classmethod
93 93 def _get_keys(cls):
94 94 """return column names for this model """
95 95 return class_mapper(cls).c.keys()
96 96
97 97 def get_dict(self):
98 98 """
99 99 return dict with keys and values corresponding
100 100 to this model data """
101 101
102 102 d = {}
103 103 for k in self._get_keys():
104 104 d[k] = getattr(self, k)
105 105
106 106 # also use __json__() if present to get additional fields
107 107 for k, val in getattr(self, '__json__', lambda: {})().iteritems():
108 108 d[k] = val
109 109 return d
110 110
111 111 def get_appstruct(self):
112 112 """return list with keys and values tupples corresponding
113 113 to this model data """
114 114
115 115 l = []
116 116 for k in self._get_keys():
117 117 l.append((k, getattr(self, k),))
118 118 return l
119 119
120 120 def populate_obj(self, populate_dict):
121 121 """populate model with data from given populate_dict"""
122 122
123 123 for k in self._get_keys():
124 124 if k in populate_dict:
125 125 setattr(self, k, populate_dict[k])
126 126
127 127 @classmethod
128 128 def query(cls):
129 129 return Session.query(cls)
130 130
131 131 @classmethod
132 132 def get(cls, id_):
133 133 if id_:
134 134 return cls.query().get(id_)
135 135
136 136 @classmethod
137 137 def getAll(cls):
138 138 return cls.query().all()
139 139
140 140 @classmethod
141 141 def delete(cls, id_):
142 142 obj = cls.query().get(id_)
143 143 Session.delete(obj)
144 144
145 145
146 146 class RhodeCodeSetting(Base, BaseModel):
147 147 __tablename__ = 'rhodecode_settings'
148 148 __table_args__ = (
149 149 UniqueConstraint('app_settings_name'),
150 150 {'extend_existing': True}
151 151 )
152 152 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
153 153 app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
154 154 _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
155 155
156 156 def __init__(self, k='', v=''):
157 157 self.app_settings_name = k
158 158 self.app_settings_value = v
159 159
160 160 @validates('_app_settings_value')
161 161 def validate_settings_value(self, key, val):
162 162 assert type(val) == unicode
163 163 return val
164 164
165 165 @hybrid_property
166 166 def app_settings_value(self):
167 167 v = self._app_settings_value
168 168 if v == 'ldap_active':
169 169 v = str2bool(v)
170 170 return v
171 171
172 172 @app_settings_value.setter
173 173 def app_settings_value(self, val):
174 174 """
175 175 Setter that will always make sure we use unicode in app_settings_value
176 176
177 177 :param val:
178 178 """
179 179 self._app_settings_value = safe_unicode(val)
180 180
181 181 def __repr__(self):
182 182 return "<%s('%s:%s')>" % (
183 183 self.__class__.__name__,
184 184 self.app_settings_name, self.app_settings_value
185 185 )
186 186
187 187 @classmethod
188 188 def get_by_name(cls, ldap_key):
189 189 return cls.query()\
190 190 .filter(cls.app_settings_name == ldap_key).scalar()
191 191
192 192 @classmethod
193 193 def get_app_settings(cls, cache=False):
194 194
195 195 ret = cls.query()
196 196
197 197 if cache:
198 198 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
199 199
200 200 if not ret:
201 201 raise Exception('Could not get application settings !')
202 202 settings = {}
203 203 for each in ret:
204 204 settings['rhodecode_' + each.app_settings_name] = \
205 205 each.app_settings_value
206 206
207 207 return settings
208 208
209 209 @classmethod
210 210 def get_ldap_settings(cls, cache=False):
211 211 ret = cls.query()\
212 212 .filter(cls.app_settings_name.startswith('ldap_')).all()
213 213 fd = {}
214 214 for row in ret:
215 215 fd.update({row.app_settings_name:row.app_settings_value})
216 216
217 217 return fd
218 218
219 219
220 220 class RhodeCodeUi(Base, BaseModel):
221 221 __tablename__ = 'rhodecode_ui'
222 222 __table_args__ = (
223 223 UniqueConstraint('ui_key'),
224 224 {'extend_existing': True}
225 225 )
226 226
227 227 HOOK_UPDATE = 'changegroup.update'
228 228 HOOK_REPO_SIZE = 'changegroup.repo_size'
229 229 HOOK_PUSH = 'pretxnchangegroup.push_logger'
230 230 HOOK_PULL = 'preoutgoing.pull_logger'
231 231
232 232 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
233 233 ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
234 234 ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
235 235 ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
236 236 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
237 237
238 238 @classmethod
239 239 def get_by_key(cls, key):
240 240 return cls.query().filter(cls.ui_key == key)
241 241
242 242 @classmethod
243 243 def get_builtin_hooks(cls):
244 244 q = cls.query()
245 245 q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE,
246 246 cls.HOOK_REPO_SIZE,
247 247 cls.HOOK_PUSH, cls.HOOK_PULL]))
248 248 return q.all()
249 249
250 250 @classmethod
251 251 def get_custom_hooks(cls):
252 252 q = cls.query()
253 253 q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE,
254 254 cls.HOOK_REPO_SIZE,
255 255 cls.HOOK_PUSH, cls.HOOK_PULL]))
256 256 q = q.filter(cls.ui_section == 'hooks')
257 257 return q.all()
258 258
259 259 @classmethod
260 260 def create_or_update_hook(cls, key, val):
261 261 new_ui = cls.get_by_key(key).scalar() or cls()
262 262 new_ui.ui_section = 'hooks'
263 263 new_ui.ui_active = True
264 264 new_ui.ui_key = key
265 265 new_ui.ui_value = val
266 266
267 267 Session.add(new_ui)
268 268
269 269
270 270 class User(Base, BaseModel):
271 271 __tablename__ = 'users'
272 272 __table_args__ = (
273 273 UniqueConstraint('username'), UniqueConstraint('email'),
274 274 {'extend_existing': True}
275 275 )
276 276 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
277 277 username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
278 278 password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
279 279 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
280 280 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
281 281 name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
282 282 lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
283 283 _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
284 284 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
285 285 ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
286 286 api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
287 287
288 288 user_log = relationship('UserLog', cascade='all')
289 289 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
290 290
291 291 repositories = relationship('Repository')
292 292 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
293 293 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
294 294
295 295 group_member = relationship('UsersGroupMember', cascade='all')
296 296
297 297 notifications = relationship('UserNotification',)
298 298
299 299 @hybrid_property
300 300 def email(self):
301 301 return self._email
302 302
303 303 @email.setter
304 304 def email(self, val):
305 305 self._email = val.lower() if val else None
306 306
307 307 @property
308 308 def full_name(self):
309 309 return '%s %s' % (self.name, self.lastname)
310 310
311 311 @property
312 312 def full_name_or_username(self):
313 313 return ('%s %s' % (self.name, self.lastname)
314 314 if (self.name and self.lastname) else self.username)
315 315
316 316 @property
317 317 def full_contact(self):
318 318 return '%s %s <%s>' % (self.name, self.lastname, self.email)
319 319
320 320 @property
321 321 def short_contact(self):
322 322 return '%s %s' % (self.name, self.lastname)
323 323
324 324 @property
325 325 def is_admin(self):
326 326 return self.admin
327 327
328 328 def __repr__(self):
329 329 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
330 330 self.user_id, self.username)
331 331
332 332 @classmethod
333 333 def get_by_username(cls, username, case_insensitive=False, cache=False):
334 334 if case_insensitive:
335 335 q = cls.query().filter(cls.username.ilike(username))
336 336 else:
337 337 q = cls.query().filter(cls.username == username)
338 338
339 339 if cache:
340 340 q = q.options(FromCache("sql_cache_short",
341 341 "get_user_%s" % username))
342 342 return q.scalar()
343 343
344 344 @classmethod
345 345 def get_by_api_key(cls, api_key, cache=False):
346 346 q = cls.query().filter(cls.api_key == api_key)
347 347
348 348 if cache:
349 349 q = q.options(FromCache("sql_cache_short",
350 350 "get_api_key_%s" % api_key))
351 351 return q.scalar()
352 352
353 353 @classmethod
354 354 def get_by_email(cls, email, case_insensitive=False, cache=False):
355 355 if case_insensitive:
356 356 q = cls.query().filter(cls.email.ilike(email))
357 357 else:
358 358 q = cls.query().filter(cls.email == email)
359 359
360 360 if cache:
361 361 q = q.options(FromCache("sql_cache_short",
362 362 "get_api_key_%s" % email))
363 363 return q.scalar()
364 364
365 365 def update_lastlogin(self):
366 366 """Update user lastlogin"""
367 367 self.last_login = datetime.datetime.now()
368 368 Session.add(self)
369 369 log.debug('updated user %s lastlogin' % self.username)
370 370
371 371 def __json__(self):
372 372 return dict(
373 373 email=self.email,
374 374 full_name=self.full_name,
375 375 full_name_or_username=self.full_name_or_username,
376 376 short_contact=self.short_contact,
377 377 full_contact=self.full_contact
378 378 )
379 379
380 380
381 381 class UserLog(Base, BaseModel):
382 382 __tablename__ = 'user_logs'
383 383 __table_args__ = {'extend_existing': True}
384 384 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
385 385 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
386 386 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
387 387 repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
388 388 user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
389 389 action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
390 390 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
391 391
392 392 @property
393 393 def action_as_day(self):
394 394 return datetime.date(*self.action_date.timetuple()[:3])
395 395
396 396 user = relationship('User')
397 397 repository = relationship('Repository',cascade='')
398 398
399 399
400 400 class UsersGroup(Base, BaseModel):
401 401 __tablename__ = 'users_groups'
402 402 __table_args__ = {'extend_existing': True}
403 403
404 404 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
405 405 users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
406 406 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
407 407
408 408 members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
409 409
410 410 def __repr__(self):
411 411 return '<userGroup(%s)>' % (self.users_group_name)
412 412
413 413 @classmethod
414 414 def get_by_group_name(cls, group_name, cache=False,
415 415 case_insensitive=False):
416 416 if case_insensitive:
417 417 q = cls.query().filter(cls.users_group_name.ilike(group_name))
418 418 else:
419 419 q = cls.query().filter(cls.users_group_name == group_name)
420 420 if cache:
421 421 q = q.options(FromCache("sql_cache_short",
422 422 "get_user_%s" % group_name))
423 423 return q.scalar()
424 424
425 425 @classmethod
426 426 def get(cls, users_group_id, cache=False):
427 427 users_group = cls.query()
428 428 if cache:
429 429 users_group = users_group.options(FromCache("sql_cache_short",
430 430 "get_users_group_%s" % users_group_id))
431 431 return users_group.get(users_group_id)
432 432
433 433
434 434 class UsersGroupMember(Base, BaseModel):
435 435 __tablename__ = 'users_groups_members'
436 436 __table_args__ = {'extend_existing': True}
437 437
438 438 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
439 439 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
440 440 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
441 441
442 442 user = relationship('User', lazy='joined')
443 443 users_group = relationship('UsersGroup')
444 444
445 445 def __init__(self, gr_id='', u_id=''):
446 446 self.users_group_id = gr_id
447 447 self.user_id = u_id
448 448
449 449 class Repository(Base, BaseModel):
450 450 __tablename__ = 'repositories'
451 451 __table_args__ = (
452 452 UniqueConstraint('repo_name'),
453 453 {'extend_existing': True},
454 454 )
455 455
456 456 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
457 457 repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
458 458 clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
459 459 repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg')
460 460 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
461 461 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
462 462 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
463 463 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
464 464 description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
465 465 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
466 466
467 467 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
468 468 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
469 469
470 470 user = relationship('User')
471 471 fork = relationship('Repository', remote_side=repo_id)
472 472 group = relationship('RepoGroup')
473 473 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
474 474 users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all')
475 475 stats = relationship('Statistics', cascade='all', uselist=False)
476 476
477 477 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
478 478
479 479 logs = relationship('UserLog')
480 480
481 481 def __repr__(self):
482 482 return "<%s('%s:%s')>" % (self.__class__.__name__,
483 483 self.repo_id, self.repo_name)
484 484
485 485 @classmethod
486 486 def url_sep(cls):
487 487 return '/'
488 488
489 489 @classmethod
490 490 def get_by_repo_name(cls, repo_name):
491 491 q = Session.query(cls).filter(cls.repo_name == repo_name)
492 492 q = q.options(joinedload(Repository.fork))\
493 493 .options(joinedload(Repository.user))\
494 494 .options(joinedload(Repository.group))
495 495 return q.scalar()
496 496
497 497 @classmethod
498 498 def get_repo_forks(cls, repo_id):
499 499 return cls.query().filter(Repository.fork_id == repo_id)
500 500
501 501 @classmethod
502 502 def base_path(cls):
503 503 """
504 504 Returns base path when all repos are stored
505 505
506 506 :param cls:
507 507 """
508 508 q = Session.query(RhodeCodeUi)\
509 509 .filter(RhodeCodeUi.ui_key == cls.url_sep())
510 510 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
511 511 return q.one().ui_value
512 512
513 513 @property
514 514 def just_name(self):
515 515 return self.repo_name.split(Repository.url_sep())[-1]
516 516
517 517 @property
518 518 def groups_with_parents(self):
519 519 groups = []
520 520 if self.group is None:
521 521 return groups
522 522
523 523 cur_gr = self.group
524 524 groups.insert(0, cur_gr)
525 525 while 1:
526 526 gr = getattr(cur_gr, 'parent_group', None)
527 527 cur_gr = cur_gr.parent_group
528 528 if gr is None:
529 529 break
530 530 groups.insert(0, gr)
531 531
532 532 return groups
533 533
534 534 @property
535 535 def groups_and_repo(self):
536 536 return self.groups_with_parents, self.just_name
537 537
538 538 @LazyProperty
539 539 def repo_path(self):
540 540 """
541 541 Returns base full path for that repository means where it actually
542 542 exists on a filesystem
543 543 """
544 544 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
545 545 Repository.url_sep())
546 546 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
547 547 return q.one().ui_value
548 548
549 549 @property
550 550 def repo_full_path(self):
551 551 p = [self.repo_path]
552 552 # we need to split the name by / since this is how we store the
553 553 # names in the database, but that eventually needs to be converted
554 554 # into a valid system path
555 555 p += self.repo_name.split(Repository.url_sep())
556 556 return os.path.join(*p)
557 557
558 558 def get_new_name(self, repo_name):
559 559 """
560 560 returns new full repository name based on assigned group and new new
561 561
562 562 :param group_name:
563 563 """
564 564 path_prefix = self.group.full_path_splitted if self.group else []
565 565 return Repository.url_sep().join(path_prefix + [repo_name])
566 566
567 567 @property
568 568 def _ui(self):
569 569 """
570 570 Creates an db based ui object for this repository
571 571 """
572 572 from mercurial import ui
573 573 from mercurial import config
574 574 baseui = ui.ui()
575 575
576 576 #clean the baseui object
577 577 baseui._ocfg = config.config()
578 578 baseui._ucfg = config.config()
579 579 baseui._tcfg = config.config()
580 580
581 581 ret = RhodeCodeUi.query()\
582 582 .options(FromCache("sql_cache_short", "repository_repo_ui")).all()
583 583
584 584 hg_ui = ret
585 585 for ui_ in hg_ui:
586 586 if ui_.ui_active:
587 587 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
588 588 ui_.ui_key, ui_.ui_value)
589 589 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
590 590
591 591 return baseui
592 592
593 593 @classmethod
594 594 def is_valid(cls, repo_name):
595 595 """
596 596 returns True if given repo name is a valid filesystem repository
597 597
598 598 :param cls:
599 599 :param repo_name:
600 600 """
601 601 from rhodecode.lib.utils import is_valid_repo
602 602
603 603 return is_valid_repo(repo_name, cls.base_path())
604 604
605 605 #==========================================================================
606 606 # SCM PROPERTIES
607 607 #==========================================================================
608 608
609 609 def get_changeset(self, rev):
610 610 return get_changeset_safe(self.scm_instance, rev)
611 611
612 612 @property
613 613 def tip(self):
614 614 return self.get_changeset('tip')
615 615
616 616 @property
617 617 def author(self):
618 618 return self.tip.author
619 619
620 620 @property
621 621 def last_change(self):
622 622 return self.scm_instance.last_change
623 623
624 624 def comments(self, revisions=None):
625 625 """
626 626 Returns comments for this repository grouped by revisions
627 627
628 628 :param revisions: filter query by revisions only
629 629 """
630 630 cmts = ChangesetComment.query()\
631 631 .filter(ChangesetComment.repo == self)
632 632 if revisions:
633 633 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
634 634 grouped = defaultdict(list)
635 635 for cmt in cmts.all():
636 636 grouped[cmt.revision].append(cmt)
637 637 return grouped
638 638
639 639 #==========================================================================
640 640 # SCM CACHE INSTANCE
641 641 #==========================================================================
642 642
643 643 @property
644 644 def invalidate(self):
645 645 return CacheInvalidation.invalidate(self.repo_name)
646 646
647 647 def set_invalidate(self):
648 648 """
649 649 set a cache for invalidation for this instance
650 650 """
651 651 CacheInvalidation.set_invalidate(self.repo_name)
652 652
653 653 @LazyProperty
654 654 def scm_instance(self):
655 655 return self.__get_instance()
656 656
657 657 @property
658 658 def scm_instance_cached(self):
659 659 @cache_region('long_term')
660 660 def _c(repo_name):
661 661 return self.__get_instance()
662 662 rn = self.repo_name
663 663 log.debug('Getting cached instance of repo')
664 664 inv = self.invalidate
665 665 if inv is not None:
666 666 region_invalidate(_c, None, rn)
667 667 # update our cache
668 668 CacheInvalidation.set_valid(inv.cache_key)
669 669 return _c(rn)
670 670
671 671 def __get_instance(self):
672 672 repo_full_path = self.repo_full_path
673 673 try:
674 674 alias = get_scm(repo_full_path)[0]
675 675 log.debug('Creating instance of %s repository' % alias)
676 676 backend = get_backend(alias)
677 677 except VCSError:
678 678 log.error(traceback.format_exc())
679 679 log.error('Perhaps this repository is in db and not in '
680 680 'filesystem run rescan repositories with '
681 681 '"destroy old data " option from admin panel')
682 682 return
683 683
684 684 if alias == 'hg':
685 685
686 686 repo = backend(safe_str(repo_full_path), create=False,
687 687 baseui=self._ui)
688 688 # skip hidden web repository
689 689 if repo._get_hidden():
690 690 return
691 691 else:
692 692 repo = backend(repo_full_path, create=False)
693 693
694 694 return repo
695 695
696 696
697 697 class RepoGroup(Base, BaseModel):
698 698 __tablename__ = 'groups'
699 699 __table_args__ = (
700 700 UniqueConstraint('group_name', 'group_parent_id'),
701 701 CheckConstraint('group_id != group_parent_id'),
702 702 {'extend_existing': True},
703 703 )
704 704 __mapper_args__ = {'order_by': 'group_name'}
705 705
706 706 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
707 707 group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
708 708 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
709 709 group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
710 710
711 711 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
712 712 users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all')
713 713
714 714 parent_group = relationship('RepoGroup', remote_side=group_id)
715 715
716 716 def __init__(self, group_name='', parent_group=None):
717 717 self.group_name = group_name
718 718 self.parent_group = parent_group
719 719
720 720 def __repr__(self):
721 721 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
722 722 self.group_name)
723 723
724 724 @classmethod
725 725 def groups_choices(cls):
726 726 from webhelpers.html import literal as _literal
727 727 repo_groups = [('', '')]
728 728 sep = ' &raquo; '
729 729 _name = lambda k: _literal(sep.join(k))
730 730
731 731 repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
732 732 for x in cls.query().all()])
733 733
734 734 repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
735 735 return repo_groups
736 736
737 737 @classmethod
738 738 def url_sep(cls):
739 739 return '/'
740 740
741 741 @classmethod
742 742 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
743 743 if case_insensitive:
744 744 gr = cls.query()\
745 745 .filter(cls.group_name.ilike(group_name))
746 746 else:
747 747 gr = cls.query()\
748 748 .filter(cls.group_name == group_name)
749 749 if cache:
750 750 gr = gr.options(FromCache("sql_cache_short",
751 751 "get_group_%s" % group_name))
752 752 return gr.scalar()
753 753
754 754 @property
755 755 def parents(self):
756 756 parents_recursion_limit = 5
757 757 groups = []
758 758 if self.parent_group is None:
759 759 return groups
760 760 cur_gr = self.parent_group
761 761 groups.insert(0, cur_gr)
762 762 cnt = 0
763 763 while 1:
764 764 cnt += 1
765 765 gr = getattr(cur_gr, 'parent_group', None)
766 766 cur_gr = cur_gr.parent_group
767 767 if gr is None:
768 768 break
769 769 if cnt == parents_recursion_limit:
770 770 # this will prevent accidental infinit loops
771 771 log.error('group nested more than %s' %
772 772 parents_recursion_limit)
773 773 break
774 774
775 775 groups.insert(0, gr)
776 776 return groups
777 777
778 778 @property
779 779 def children(self):
780 780 return RepoGroup.query().filter(RepoGroup.parent_group == self)
781 781
782 782 @property
783 783 def name(self):
784 784 return self.group_name.split(RepoGroup.url_sep())[-1]
785 785
786 786 @property
787 787 def full_path(self):
788 788 return self.group_name
789 789
790 790 @property
791 791 def full_path_splitted(self):
792 792 return self.group_name.split(RepoGroup.url_sep())
793 793
794 794 @property
795 795 def repositories(self):
796 796 return Repository.query().filter(Repository.group == self)
797 797
798 798 @property
799 799 def repositories_recursive_count(self):
800 800 cnt = self.repositories.count()
801 801
802 802 def children_count(group):
803 803 cnt = 0
804 804 for child in group.children:
805 805 cnt += child.repositories.count()
806 806 cnt += children_count(child)
807 807 return cnt
808 808
809 809 return cnt + children_count(self)
810 810
811 811 def get_new_name(self, group_name):
812 812 """
813 813 returns new full group name based on parent and new name
814 814
815 815 :param group_name:
816 816 """
817 817 path_prefix = (self.parent_group.full_path_splitted if
818 818 self.parent_group else [])
819 819 return RepoGroup.url_sep().join(path_prefix + [group_name])
820 820
821 821
822 822 class Permission(Base, BaseModel):
823 823 __tablename__ = 'permissions'
824 824 __table_args__ = {'extend_existing': True}
825 825 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
826 826 permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
827 827 permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
828 828
829 829 def __repr__(self):
830 830 return "<%s('%s:%s')>" % (
831 831 self.__class__.__name__, self.permission_id, self.permission_name
832 832 )
833 833
834 834 @classmethod
835 835 def get_by_key(cls, key):
836 836 return cls.query().filter(cls.permission_name == key).scalar()
837 837
838 838 @classmethod
839 839 def get_default_perms(cls, default_user_id):
840 840 q = Session.query(UserRepoToPerm, Repository, cls)\
841 841 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
842 842 .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
843 843 .filter(UserRepoToPerm.user_id == default_user_id)
844 844
845 845 return q.all()
846 846
847 847 @classmethod
848 848 def get_default_group_perms(cls, default_user_id):
849 849 q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\
850 850 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
851 851 .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
852 852 .filter(UserRepoGroupToPerm.user_id == default_user_id)
853 853
854 854 return q.all()
855 855
856 856
857 857 class UserRepoToPerm(Base, BaseModel):
858 858 __tablename__ = 'repo_to_perm'
859 859 __table_args__ = (
860 860 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
861 861 {'extend_existing': True}
862 862 )
863 863 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
864 864 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
865 865 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
866 866 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
867 867
868 868 user = relationship('User')
869 869 repository = relationship('Repository')
870 870 permission = relationship('Permission')
871 871
872 872 @classmethod
873 873 def create(cls, user, repository, permission):
874 874 n = cls()
875 875 n.user = user
876 876 n.repository = repository
877 877 n.permission = permission
878 878 Session.add(n)
879 879 return n
880 880
881 881 def __repr__(self):
882 882 return '<user:%s => %s >' % (self.user, self.repository)
883 883
884 884
885 885 class UserToPerm(Base, BaseModel):
886 886 __tablename__ = 'user_to_perm'
887 887 __table_args__ = (
888 888 UniqueConstraint('user_id', 'permission_id'),
889 889 {'extend_existing': True}
890 890 )
891 891 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
892 892 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
893 893 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
894 894
895 895 user = relationship('User')
896 896 permission = relationship('Permission', lazy='joined')
897 897
898 898
899 899 class UsersGroupRepoToPerm(Base, BaseModel):
900 900 __tablename__ = 'users_group_repo_to_perm'
901 901 __table_args__ = (
902 902 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
903 903 {'extend_existing': True}
904 904 )
905 905 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
906 906 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
907 907 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
908 908 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
909 909
910 910 users_group = relationship('UsersGroup')
911 911 permission = relationship('Permission')
912 912 repository = relationship('Repository')
913 913
914 914 @classmethod
915 915 def create(cls, users_group, repository, permission):
916 916 n = cls()
917 917 n.users_group = users_group
918 918 n.repository = repository
919 919 n.permission = permission
920 920 Session.add(n)
921 921 return n
922 922
923 923 def __repr__(self):
924 924 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
925 925
926 926
927 927 class UsersGroupToPerm(Base, BaseModel):
928 928 __tablename__ = 'users_group_to_perm'
929 929 __table_args__ = (
930 930 UniqueConstraint('users_group_id', 'permission_id',),
931 931 {'extend_existing': True}
932 932 )
933 933 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
934 934 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
935 935 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
936 936
937 937 users_group = relationship('UsersGroup')
938 938 permission = relationship('Permission')
939 939
940 940
941 941 class UserRepoGroupToPerm(Base, BaseModel):
942 942 __tablename__ = 'user_repo_group_to_perm'
943 943 __table_args__ = (
944 944 UniqueConstraint('user_id', 'group_id', 'permission_id'),
945 945 {'extend_existing': True}
946 946 )
947 947
948 948 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
949 949 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
950 950 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
951 951 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
952 952
953 953 user = relationship('User')
954 954 group = relationship('RepoGroup')
955 955 permission = relationship('Permission')
956 956
957 957
958 958 class UsersGroupRepoGroupToPerm(Base, BaseModel):
959 959 __tablename__ = 'users_group_repo_group_to_perm'
960 960 __table_args__ = (
961 961 UniqueConstraint('users_group_id', 'group_id'),
962 962 {'extend_existing': True}
963 963 )
964 964
965 965 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
966 966 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
967 967 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
968 968 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
969 969
970 970 users_group = relationship('UsersGroup')
971 971 permission = relationship('Permission')
972 972 group = relationship('RepoGroup')
973 973
974 974
975 975 class Statistics(Base, BaseModel):
976 976 __tablename__ = 'statistics'
977 977 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing': True})
978 978 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
979 979 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
980 980 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
981 981 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
982 982 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
983 983 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
984 984
985 985 repository = relationship('Repository', single_parent=True)
986 986
987 987
988 988 class UserFollowing(Base, BaseModel):
989 989 __tablename__ = 'user_followings'
990 990 __table_args__ = (
991 991 UniqueConstraint('user_id', 'follows_repository_id'),
992 992 UniqueConstraint('user_id', 'follows_user_id'),
993 993 {'extend_existing': True}
994 994 )
995 995
996 996 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
998 998 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
999 999 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1000 1000 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
1001 1001
1002 1002 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
1003 1003
1004 1004 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
1005 1005 follows_repository = relationship('Repository', order_by='Repository.repo_name')
1006 1006
1007 1007 @classmethod
1008 1008 def get_repo_followers(cls, repo_id):
1009 1009 return cls.query().filter(cls.follows_repo_id == repo_id)
1010 1010
1011 1011
1012 1012 class CacheInvalidation(Base, BaseModel):
1013 1013 __tablename__ = 'cache_invalidation'
1014 1014 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing': True})
1015 1015 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1016 1016 cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
1017 1017 cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
1018 1018 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
1019 1019
1020 1020 def __init__(self, cache_key, cache_args=''):
1021 1021 self.cache_key = cache_key
1022 1022 self.cache_args = cache_args
1023 1023 self.cache_active = False
1024 1024
1025 1025 def __repr__(self):
1026 1026 return "<%s('%s:%s')>" % (self.__class__.__name__,
1027 1027 self.cache_id, self.cache_key)
1028 1028
1029 1029 @classmethod
1030 1030 def invalidate(cls, key):
1031 1031 """
1032 1032 Returns Invalidation object if this given key should be invalidated
1033 1033 None otherwise. `cache_active = False` means that this cache
1034 1034 state is not valid and needs to be invalidated
1035 1035
1036 1036 :param key:
1037 1037 """
1038 1038 return cls.query()\
1039 1039 .filter(CacheInvalidation.cache_key == key)\
1040 1040 .filter(CacheInvalidation.cache_active == False)\
1041 1041 .scalar()
1042 1042
1043 1043 @classmethod
1044 1044 def set_invalidate(cls, key):
1045 1045 """
1046 1046 Mark this Cache key for invalidation
1047 1047
1048 1048 :param key:
1049 1049 """
1050 1050
1051 1051 log.debug('marking %s for invalidation' % key)
1052 1052 inv_obj = Session.query(cls)\
1053 1053 .filter(cls.cache_key == key).scalar()
1054 1054 if inv_obj:
1055 1055 inv_obj.cache_active = False
1056 1056 else:
1057 1057 log.debug('cache key not found in invalidation db -> creating one')
1058 1058 inv_obj = CacheInvalidation(key)
1059 1059
1060 1060 try:
1061 1061 Session.add(inv_obj)
1062 1062 Session.commit()
1063 1063 except Exception:
1064 1064 log.error(traceback.format_exc())
1065 1065 Session.rollback()
1066 1066
1067 1067 @classmethod
1068 1068 def set_valid(cls, key):
1069 1069 """
1070 1070 Mark this cache key as active and currently cached
1071 1071
1072 1072 :param key:
1073 1073 """
1074 1074 inv_obj = CacheInvalidation.query()\
1075 1075 .filter(CacheInvalidation.cache_key == key).scalar()
1076 1076 inv_obj.cache_active = True
1077 1077 Session.add(inv_obj)
1078 1078 Session.commit()
1079 1079
1080 1080
1081 1081 class ChangesetComment(Base, BaseModel):
1082 1082 __tablename__ = 'changeset_comments'
1083 1083 __table_args__ = ({'extend_existing': True},)
1084 1084 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
1085 1085 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
1086 1086 revision = Column('revision', String(40), nullable=False)
1087 1087 line_no = Column('line_no', Unicode(10), nullable=True)
1088 1088 f_path = Column('f_path', Unicode(1000), nullable=True)
1089 1089 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
1090 1090 text = Column('text', Unicode(25000), nullable=False)
1091 1091 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
1092 1092
1093 1093 author = relationship('User', lazy='joined')
1094 1094 repo = relationship('Repository')
1095 1095
1096 1096 @classmethod
1097 1097 def get_users(cls, revision):
1098 1098 """
1099 1099 Returns user associated with this changesetComment. ie those
1100 1100 who actually commented
1101 1101
1102 1102 :param cls:
1103 1103 :param revision:
1104 1104 """
1105 1105 return Session.query(User)\
1106 1106 .filter(cls.revision == revision)\
1107 1107 .join(ChangesetComment.author).all()
1108 1108
1109 1109
1110 1110 class Notification(Base, BaseModel):
1111 1111 __tablename__ = 'notifications'
1112 1112 __table_args__ = ({'extend_existing': True},)
1113 1113
1114 1114 TYPE_CHANGESET_COMMENT = u'cs_comment'
1115 1115 TYPE_MESSAGE = u'message'
1116 1116 TYPE_MENTION = u'mention'
1117 1117 TYPE_REGISTRATION = u'registration'
1118 1118
1119 1119 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
1120 1120 subject = Column('subject', Unicode(512), nullable=True)
1121 1121 body = Column('body', Unicode(50000), nullable=True)
1122 1122 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
1123 1123 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1124 1124 type_ = Column('type', Unicode(256))
1125 1125
1126 1126 created_by_user = relationship('User')
1127 1127 notifications_to_users = relationship('UserNotification', lazy='joined',
1128 1128 cascade="all, delete, delete-orphan")
1129 1129
1130 1130 @property
1131 1131 def recipients(self):
1132 1132 return [x.user for x in UserNotification.query()\
1133 1133 .filter(UserNotification.notification == self).all()]
1134 1134
1135 1135 @classmethod
1136 1136 def create(cls, created_by, subject, body, recipients, type_=None):
1137 1137 if type_ is None:
1138 1138 type_ = Notification.TYPE_MESSAGE
1139 1139
1140 1140 notification = cls()
1141 1141 notification.created_by_user = created_by
1142 1142 notification.subject = subject
1143 1143 notification.body = body
1144 1144 notification.type_ = type_
1145 1145 notification.created_on = datetime.datetime.now()
1146 1146
1147 1147 for u in recipients:
1148 1148 assoc = UserNotification()
1149 1149 assoc.notification = notification
1150 1150 u.notifications.append(assoc)
1151 1151 Session.add(notification)
1152 1152 return notification
1153 1153
1154 1154 @property
1155 1155 def description(self):
1156 1156 from rhodecode.model.notification import NotificationModel
1157 1157 return NotificationModel().make_description(self)
1158 1158
1159 1159
1160 1160 class UserNotification(Base, BaseModel):
1161 1161 __tablename__ = 'user_to_notification'
1162 1162 __table_args__ = (
1163 1163 UniqueConstraint('user_id', 'notification_id'),
1164 1164 {'extend_existing': True}
1165 1165 )
1166 1166 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
1167 1167 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
1168 1168 read = Column('read', Boolean, default=False)
1169 1169 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
1170 1170
1171 1171 user = relationship('User', lazy="joined")
1172 1172 notification = relationship('Notification', lazy="joined",
1173 1173 order_by=lambda: Notification.created_on.desc(),)
1174 1174
1175 1175 def mark_as_read(self):
1176 1176 self.read = True
1177 1177 Session.add(self)
1178 1178
1179 1179
1180 1180 class DbMigrateVersion(Base, BaseModel):
1181 1181 __tablename__ = 'db_migrate_version'
1182 1182 __table_args__ = {'extend_existing': True}
1183 1183 repository_id = Column('repository_id', String(250), primary_key=True)
1184 1184 repository_path = Column('repository_path', Text)
1185 1185 version = Column('version', Integer)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now