##// END OF EJS Templates
scm: change ._get_content() to .raw_bytes attribute to file nodes to...
dan -
r501:274fedc6 default
parent child Browse files
Show More
@@ -1,1502 +1,1502 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import itertools
27 import itertools
28 import logging
28 import logging
29 import os
29 import os
30 import time
30 import time
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 from rhodecode.lib.vcs import connection
36 from rhodecode.lib.vcs import connection
37 from rhodecode.lib.vcs.utils import author_name, author_email
37 from rhodecode.lib.vcs.utils import author_name, author_email
38 from rhodecode.lib.vcs.conf import settings
38 from rhodecode.lib.vcs.conf import settings
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 RepositoryError)
44 RepositoryError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 FILEMODE_DEFAULT = 0100644
50 FILEMODE_DEFAULT = 0100644
51 FILEMODE_EXECUTABLE = 0100755
51 FILEMODE_EXECUTABLE = 0100755
52
52
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 MergeResponse = collections.namedtuple(
54 MergeResponse = collections.namedtuple(
55 'MergeResponse',
55 'MergeResponse',
56 ('possible', 'executed', 'merge_commit_id', 'failure_reason'))
56 ('possible', 'executed', 'merge_commit_id', 'failure_reason'))
57
57
58
58
59 class MergeFailureReason(object):
59 class MergeFailureReason(object):
60 """
60 """
61 Enumeration with all the reasons why the server side merge could fail.
61 Enumeration with all the reasons why the server side merge could fail.
62
62
63 DO NOT change the number of the reasons, as they may be stored in the
63 DO NOT change the number of the reasons, as they may be stored in the
64 database.
64 database.
65
65
66 Changing the name of a reason is acceptable and encouraged to deprecate old
66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 reasons.
67 reasons.
68 """
68 """
69
69
70 # Everything went well.
70 # Everything went well.
71 NONE = 0
71 NONE = 0
72
72
73 # An unexpected exception was raised. Check the logs for more details.
73 # An unexpected exception was raised. Check the logs for more details.
74 UNKNOWN = 1
74 UNKNOWN = 1
75
75
76 # The merge was not successful, there are conflicts.
76 # The merge was not successful, there are conflicts.
77 MERGE_FAILED = 2
77 MERGE_FAILED = 2
78
78
79 # The merge succeeded but we could not push it to the target repository.
79 # The merge succeeded but we could not push it to the target repository.
80 PUSH_FAILED = 3
80 PUSH_FAILED = 3
81
81
82 # The specified target is not a head in the target repository.
82 # The specified target is not a head in the target repository.
83 TARGET_IS_NOT_HEAD = 4
83 TARGET_IS_NOT_HEAD = 4
84
84
85 # The source repository contains more branches than the target. Pushing
85 # The source repository contains more branches than the target. Pushing
86 # the merge will create additional branches in the target.
86 # the merge will create additional branches in the target.
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88
88
89 # The target reference has multiple heads. That does not allow to correctly
89 # The target reference has multiple heads. That does not allow to correctly
90 # identify the target location. This could only happen for mercurial
90 # identify the target location. This could only happen for mercurial
91 # branches.
91 # branches.
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93
93
94 # The target repository is locked
94 # The target repository is locked
95 TARGET_IS_LOCKED = 7
95 TARGET_IS_LOCKED = 7
96
96
97 # A involved commit could not be found.
97 # A involved commit could not be found.
98 MISSING_COMMIT = 8
98 MISSING_COMMIT = 8
99
99
100
100
101 class BaseRepository(object):
101 class BaseRepository(object):
102 """
102 """
103 Base Repository for final backends
103 Base Repository for final backends
104
104
105 .. attribute:: DEFAULT_BRANCH_NAME
105 .. attribute:: DEFAULT_BRANCH_NAME
106
106
107 name of default branch (i.e. "trunk" for svn, "master" for git etc.
107 name of default branch (i.e. "trunk" for svn, "master" for git etc.
108
108
109 .. attribute:: commit_ids
109 .. attribute:: commit_ids
110
110
111 list of all available commit ids, in ascending order
111 list of all available commit ids, in ascending order
112
112
113 .. attribute:: path
113 .. attribute:: path
114
114
115 absolute path to the repository
115 absolute path to the repository
116
116
117 .. attribute:: bookmarks
117 .. attribute:: bookmarks
118
118
119 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
119 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
120 there are no bookmarks or the backend implementation does not support
120 there are no bookmarks or the backend implementation does not support
121 bookmarks.
121 bookmarks.
122
122
123 .. attribute:: tags
123 .. attribute:: tags
124
124
125 Mapping from name to :term:`Commit ID` of the tag.
125 Mapping from name to :term:`Commit ID` of the tag.
126
126
127 """
127 """
128
128
129 DEFAULT_BRANCH_NAME = None
129 DEFAULT_BRANCH_NAME = None
130 DEFAULT_CONTACT = u"Unknown"
130 DEFAULT_CONTACT = u"Unknown"
131 DEFAULT_DESCRIPTION = u"unknown"
131 DEFAULT_DESCRIPTION = u"unknown"
132 EMPTY_COMMIT_ID = '0' * 40
132 EMPTY_COMMIT_ID = '0' * 40
133
133
134 path = None
134 path = None
135
135
136 def __init__(self, repo_path, config=None, create=False, **kwargs):
136 def __init__(self, repo_path, config=None, create=False, **kwargs):
137 """
137 """
138 Initializes repository. Raises RepositoryError if repository could
138 Initializes repository. Raises RepositoryError if repository could
139 not be find at the given ``repo_path`` or directory at ``repo_path``
139 not be find at the given ``repo_path`` or directory at ``repo_path``
140 exists and ``create`` is set to True.
140 exists and ``create`` is set to True.
141
141
142 :param repo_path: local path of the repository
142 :param repo_path: local path of the repository
143 :param config: repository configuration
143 :param config: repository configuration
144 :param create=False: if set to True, would try to create repository.
144 :param create=False: if set to True, would try to create repository.
145 :param src_url=None: if set, should be proper url from which repository
145 :param src_url=None: if set, should be proper url from which repository
146 would be cloned; requires ``create`` parameter to be set to True -
146 would be cloned; requires ``create`` parameter to be set to True -
147 raises RepositoryError if src_url is set and create evaluates to
147 raises RepositoryError if src_url is set and create evaluates to
148 False
148 False
149 """
149 """
150 raise NotImplementedError
150 raise NotImplementedError
151
151
152 def __repr__(self):
152 def __repr__(self):
153 return '<%s at %s>' % (self.__class__.__name__, self.path)
153 return '<%s at %s>' % (self.__class__.__name__, self.path)
154
154
155 def __len__(self):
155 def __len__(self):
156 return self.count()
156 return self.count()
157
157
158 def __eq__(self, other):
158 def __eq__(self, other):
159 same_instance = isinstance(other, self.__class__)
159 same_instance = isinstance(other, self.__class__)
160 return same_instance and other.path == self.path
160 return same_instance and other.path == self.path
161
161
162 def __ne__(self, other):
162 def __ne__(self, other):
163 return not self.__eq__(other)
163 return not self.__eq__(other)
164
164
165 @LazyProperty
165 @LazyProperty
166 def EMPTY_COMMIT(self):
166 def EMPTY_COMMIT(self):
167 return EmptyCommit(self.EMPTY_COMMIT_ID)
167 return EmptyCommit(self.EMPTY_COMMIT_ID)
168
168
169 @LazyProperty
169 @LazyProperty
170 def alias(self):
170 def alias(self):
171 for k, v in settings.BACKENDS.items():
171 for k, v in settings.BACKENDS.items():
172 if v.split('.')[-1] == str(self.__class__.__name__):
172 if v.split('.')[-1] == str(self.__class__.__name__):
173 return k
173 return k
174
174
175 @LazyProperty
175 @LazyProperty
176 def name(self):
176 def name(self):
177 return safe_unicode(os.path.basename(self.path))
177 return safe_unicode(os.path.basename(self.path))
178
178
179 @LazyProperty
179 @LazyProperty
180 def description(self):
180 def description(self):
181 raise NotImplementedError
181 raise NotImplementedError
182
182
183 def refs(self):
183 def refs(self):
184 """
184 """
185 returns a `dict` with branches, bookmarks, tags, and closed_branches
185 returns a `dict` with branches, bookmarks, tags, and closed_branches
186 for this repository
186 for this repository
187 """
187 """
188 raise NotImplementedError
188 raise NotImplementedError
189
189
190 @LazyProperty
190 @LazyProperty
191 def branches(self):
191 def branches(self):
192 """
192 """
193 A `dict` which maps branch names to commit ids.
193 A `dict` which maps branch names to commit ids.
194 """
194 """
195 raise NotImplementedError
195 raise NotImplementedError
196
196
197 @LazyProperty
197 @LazyProperty
198 def size(self):
198 def size(self):
199 """
199 """
200 Returns combined size in bytes for all repository files
200 Returns combined size in bytes for all repository files
201 """
201 """
202 tip = self.get_commit()
202 tip = self.get_commit()
203 return tip.size
203 return tip.size
204
204
205 def size_at_commit(self, commit_id):
205 def size_at_commit(self, commit_id):
206 commit = self.get_commit(commit_id)
206 commit = self.get_commit(commit_id)
207 return commit.size
207 return commit.size
208
208
209 def is_empty(self):
209 def is_empty(self):
210 return not bool(self.commit_ids)
210 return not bool(self.commit_ids)
211
211
212 @staticmethod
212 @staticmethod
213 def check_url(url, config):
213 def check_url(url, config):
214 """
214 """
215 Function will check given url and try to verify if it's a valid
215 Function will check given url and try to verify if it's a valid
216 link.
216 link.
217 """
217 """
218 raise NotImplementedError
218 raise NotImplementedError
219
219
220 @staticmethod
220 @staticmethod
221 def is_valid_repository(path):
221 def is_valid_repository(path):
222 """
222 """
223 Check if given `path` contains a valid repository of this backend
223 Check if given `path` contains a valid repository of this backend
224 """
224 """
225 raise NotImplementedError
225 raise NotImplementedError
226
226
227 # ==========================================================================
227 # ==========================================================================
228 # COMMITS
228 # COMMITS
229 # ==========================================================================
229 # ==========================================================================
230
230
231 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
231 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
232 """
232 """
233 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
233 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
234 are both None, most recent commit is returned.
234 are both None, most recent commit is returned.
235
235
236 :param pre_load: Optional. List of commit attributes to load.
236 :param pre_load: Optional. List of commit attributes to load.
237
237
238 :raises ``EmptyRepositoryError``: if there are no commits
238 :raises ``EmptyRepositoryError``: if there are no commits
239 """
239 """
240 raise NotImplementedError
240 raise NotImplementedError
241
241
242 def __iter__(self):
242 def __iter__(self):
243 for commit_id in self.commit_ids:
243 for commit_id in self.commit_ids:
244 yield self.get_commit(commit_id=commit_id)
244 yield self.get_commit(commit_id=commit_id)
245
245
246 def get_commits(
246 def get_commits(
247 self, start_id=None, end_id=None, start_date=None, end_date=None,
247 self, start_id=None, end_id=None, start_date=None, end_date=None,
248 branch_name=None, pre_load=None):
248 branch_name=None, pre_load=None):
249 """
249 """
250 Returns iterator of `BaseCommit` objects from start to end
250 Returns iterator of `BaseCommit` objects from start to end
251 not inclusive. This should behave just like a list, ie. end is not
251 not inclusive. This should behave just like a list, ie. end is not
252 inclusive.
252 inclusive.
253
253
254 :param start_id: None or str, must be a valid commit id
254 :param start_id: None or str, must be a valid commit id
255 :param end_id: None or str, must be a valid commit id
255 :param end_id: None or str, must be a valid commit id
256 :param start_date:
256 :param start_date:
257 :param end_date:
257 :param end_date:
258 :param branch_name:
258 :param branch_name:
259 :param pre_load:
259 :param pre_load:
260 """
260 """
261 raise NotImplementedError
261 raise NotImplementedError
262
262
263 def __getitem__(self, key):
263 def __getitem__(self, key):
264 """
264 """
265 Allows index based access to the commit objects of this repository.
265 Allows index based access to the commit objects of this repository.
266 """
266 """
267 pre_load = ["author", "branch", "date", "message", "parents"]
267 pre_load = ["author", "branch", "date", "message", "parents"]
268 if isinstance(key, slice):
268 if isinstance(key, slice):
269 return self._get_range(key, pre_load)
269 return self._get_range(key, pre_load)
270 return self.get_commit(commit_idx=key, pre_load=pre_load)
270 return self.get_commit(commit_idx=key, pre_load=pre_load)
271
271
272 def _get_range(self, slice_obj, pre_load):
272 def _get_range(self, slice_obj, pre_load):
273 for commit_id in self.commit_ids.__getitem__(slice_obj):
273 for commit_id in self.commit_ids.__getitem__(slice_obj):
274 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
274 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
275
275
276 def count(self):
276 def count(self):
277 return len(self.commit_ids)
277 return len(self.commit_ids)
278
278
279 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
279 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
280 """
280 """
281 Creates and returns a tag for the given ``commit_id``.
281 Creates and returns a tag for the given ``commit_id``.
282
282
283 :param name: name for new tag
283 :param name: name for new tag
284 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
284 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
285 :param commit_id: commit id for which new tag would be created
285 :param commit_id: commit id for which new tag would be created
286 :param message: message of the tag's commit
286 :param message: message of the tag's commit
287 :param date: date of tag's commit
287 :param date: date of tag's commit
288
288
289 :raises TagAlreadyExistError: if tag with same name already exists
289 :raises TagAlreadyExistError: if tag with same name already exists
290 """
290 """
291 raise NotImplementedError
291 raise NotImplementedError
292
292
293 def remove_tag(self, name, user, message=None, date=None):
293 def remove_tag(self, name, user, message=None, date=None):
294 """
294 """
295 Removes tag with the given ``name``.
295 Removes tag with the given ``name``.
296
296
297 :param name: name of the tag to be removed
297 :param name: name of the tag to be removed
298 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
298 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
299 :param message: message of the tag's removal commit
299 :param message: message of the tag's removal commit
300 :param date: date of tag's removal commit
300 :param date: date of tag's removal commit
301
301
302 :raises TagDoesNotExistError: if tag with given name does not exists
302 :raises TagDoesNotExistError: if tag with given name does not exists
303 """
303 """
304 raise NotImplementedError
304 raise NotImplementedError
305
305
306 def get_diff(
306 def get_diff(
307 self, commit1, commit2, path=None, ignore_whitespace=False,
307 self, commit1, commit2, path=None, ignore_whitespace=False,
308 context=3, path1=None):
308 context=3, path1=None):
309 """
309 """
310 Returns (git like) *diff*, as plain text. Shows changes introduced by
310 Returns (git like) *diff*, as plain text. Shows changes introduced by
311 `commit2` since `commit1`.
311 `commit2` since `commit1`.
312
312
313 :param commit1: Entry point from which diff is shown. Can be
313 :param commit1: Entry point from which diff is shown. Can be
314 ``self.EMPTY_COMMIT`` - in this case, patch showing all
314 ``self.EMPTY_COMMIT`` - in this case, patch showing all
315 the changes since empty state of the repository until `commit2`
315 the changes since empty state of the repository until `commit2`
316 :param commit2: Until which commit changes should be shown.
316 :param commit2: Until which commit changes should be shown.
317 :param path: Can be set to a path of a file to create a diff of that
317 :param path: Can be set to a path of a file to create a diff of that
318 file. If `path1` is also set, this value is only associated to
318 file. If `path1` is also set, this value is only associated to
319 `commit2`.
319 `commit2`.
320 :param ignore_whitespace: If set to ``True``, would not show whitespace
320 :param ignore_whitespace: If set to ``True``, would not show whitespace
321 changes. Defaults to ``False``.
321 changes. Defaults to ``False``.
322 :param context: How many lines before/after changed lines should be
322 :param context: How many lines before/after changed lines should be
323 shown. Defaults to ``3``.
323 shown. Defaults to ``3``.
324 :param path1: Can be set to a path to associate with `commit1`. This
324 :param path1: Can be set to a path to associate with `commit1`. This
325 parameter works only for backends which support diff generation for
325 parameter works only for backends which support diff generation for
326 different paths. Other backends will raise a `ValueError` if `path1`
326 different paths. Other backends will raise a `ValueError` if `path1`
327 is set and has a different value than `path`.
327 is set and has a different value than `path`.
328 """
328 """
329 raise NotImplementedError
329 raise NotImplementedError
330
330
331 def strip(self, commit_id, branch=None):
331 def strip(self, commit_id, branch=None):
332 """
332 """
333 Strip given commit_id from the repository
333 Strip given commit_id from the repository
334 """
334 """
335 raise NotImplementedError
335 raise NotImplementedError
336
336
337 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
337 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
338 """
338 """
339 Return a latest common ancestor commit if one exists for this repo
339 Return a latest common ancestor commit if one exists for this repo
340 `commit_id1` vs `commit_id2` from `repo2`.
340 `commit_id1` vs `commit_id2` from `repo2`.
341
341
342 :param commit_id1: Commit it from this repository to use as a
342 :param commit_id1: Commit it from this repository to use as a
343 target for the comparison.
343 target for the comparison.
344 :param commit_id2: Source commit id to use for comparison.
344 :param commit_id2: Source commit id to use for comparison.
345 :param repo2: Source repository to use for comparison.
345 :param repo2: Source repository to use for comparison.
346 """
346 """
347 raise NotImplementedError
347 raise NotImplementedError
348
348
349 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
349 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
350 """
350 """
351 Compare this repository's revision `commit_id1` with `commit_id2`.
351 Compare this repository's revision `commit_id1` with `commit_id2`.
352
352
353 Returns a tuple(commits, ancestor) that would be merged from
353 Returns a tuple(commits, ancestor) that would be merged from
354 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
354 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
355 will be returned as ancestor.
355 will be returned as ancestor.
356
356
357 :param commit_id1: Commit it from this repository to use as a
357 :param commit_id1: Commit it from this repository to use as a
358 target for the comparison.
358 target for the comparison.
359 :param commit_id2: Source commit id to use for comparison.
359 :param commit_id2: Source commit id to use for comparison.
360 :param repo2: Source repository to use for comparison.
360 :param repo2: Source repository to use for comparison.
361 :param merge: If set to ``True`` will do a merge compare which also
361 :param merge: If set to ``True`` will do a merge compare which also
362 returns the common ancestor.
362 returns the common ancestor.
363 :param pre_load: Optional. List of commit attributes to load.
363 :param pre_load: Optional. List of commit attributes to load.
364 """
364 """
365 raise NotImplementedError
365 raise NotImplementedError
366
366
367 def merge(self, target_ref, source_repo, source_ref, workspace_id,
367 def merge(self, target_ref, source_repo, source_ref, workspace_id,
368 user_name='', user_email='', message='', dry_run=False,
368 user_name='', user_email='', message='', dry_run=False,
369 use_rebase=False):
369 use_rebase=False):
370 """
370 """
371 Merge the revisions specified in `source_ref` from `source_repo`
371 Merge the revisions specified in `source_ref` from `source_repo`
372 onto the `target_ref` of this repository.
372 onto the `target_ref` of this repository.
373
373
374 `source_ref` and `target_ref` are named tupls with the following
374 `source_ref` and `target_ref` are named tupls with the following
375 fields `type`, `name` and `commit_id`.
375 fields `type`, `name` and `commit_id`.
376
376
377 Returns a MergeResponse named tuple with the following fields
377 Returns a MergeResponse named tuple with the following fields
378 'possible', 'executed', 'source_commit', 'target_commit',
378 'possible', 'executed', 'source_commit', 'target_commit',
379 'merge_commit'.
379 'merge_commit'.
380
380
381 :param target_ref: `target_ref` points to the commit on top of which
381 :param target_ref: `target_ref` points to the commit on top of which
382 the `source_ref` should be merged.
382 the `source_ref` should be merged.
383 :param source_repo: The repository that contains the commits to be
383 :param source_repo: The repository that contains the commits to be
384 merged.
384 merged.
385 :param source_ref: `source_ref` points to the topmost commit from
385 :param source_ref: `source_ref` points to the topmost commit from
386 the `source_repo` which should be merged.
386 the `source_repo` which should be merged.
387 :param workspace_id: `workspace_id` unique identifier.
387 :param workspace_id: `workspace_id` unique identifier.
388 :param user_name: Merge commit `user_name`.
388 :param user_name: Merge commit `user_name`.
389 :param user_email: Merge commit `user_email`.
389 :param user_email: Merge commit `user_email`.
390 :param message: Merge commit `message`.
390 :param message: Merge commit `message`.
391 :param dry_run: If `True` the merge will not take place.
391 :param dry_run: If `True` the merge will not take place.
392 :param use_rebase: If `True` commits from the source will be rebased
392 :param use_rebase: If `True` commits from the source will be rebased
393 on top of the target instead of being merged.
393 on top of the target instead of being merged.
394 """
394 """
395 if dry_run:
395 if dry_run:
396 message = message or 'sample_message'
396 message = message or 'sample_message'
397 user_email = user_email or 'user@email.com'
397 user_email = user_email or 'user@email.com'
398 user_name = user_name or 'user name'
398 user_name = user_name or 'user name'
399 else:
399 else:
400 if not user_name:
400 if not user_name:
401 raise ValueError('user_name cannot be empty')
401 raise ValueError('user_name cannot be empty')
402 if not user_email:
402 if not user_email:
403 raise ValueError('user_email cannot be empty')
403 raise ValueError('user_email cannot be empty')
404 if not message:
404 if not message:
405 raise ValueError('message cannot be empty')
405 raise ValueError('message cannot be empty')
406
406
407 shadow_repository_path = self._maybe_prepare_merge_workspace(
407 shadow_repository_path = self._maybe_prepare_merge_workspace(
408 workspace_id, target_ref)
408 workspace_id, target_ref)
409
409
410 try:
410 try:
411 return self._merge_repo(
411 return self._merge_repo(
412 shadow_repository_path, target_ref, source_repo,
412 shadow_repository_path, target_ref, source_repo,
413 source_ref, message, user_name, user_email, dry_run=dry_run,
413 source_ref, message, user_name, user_email, dry_run=dry_run,
414 use_rebase=use_rebase)
414 use_rebase=use_rebase)
415 except RepositoryError:
415 except RepositoryError:
416 log.exception(
416 log.exception(
417 'Unexpected failure when running merge, dry-run=%s',
417 'Unexpected failure when running merge, dry-run=%s',
418 dry_run)
418 dry_run)
419 return MergeResponse(
419 return MergeResponse(
420 False, False, None, MergeFailureReason.UNKNOWN)
420 False, False, None, MergeFailureReason.UNKNOWN)
421
421
422 def _merge_repo(self, shadow_repository_path, target_ref,
422 def _merge_repo(self, shadow_repository_path, target_ref,
423 source_repo, source_ref, merge_message,
423 source_repo, source_ref, merge_message,
424 merger_name, merger_email, dry_run=False):
424 merger_name, merger_email, dry_run=False):
425 """Internal implementation of merge."""
425 """Internal implementation of merge."""
426 raise NotImplementedError
426 raise NotImplementedError
427
427
428 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
428 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
429 """
429 """
430 Create the merge workspace.
430 Create the merge workspace.
431
431
432 :param workspace_id: `workspace_id` unique identifier.
432 :param workspace_id: `workspace_id` unique identifier.
433 """
433 """
434 raise NotImplementedError
434 raise NotImplementedError
435
435
436 def cleanup_merge_workspace(self, workspace_id):
436 def cleanup_merge_workspace(self, workspace_id):
437 """
437 """
438 Remove merge workspace.
438 Remove merge workspace.
439
439
440 This function MUST not fail in case there is no workspace associated to
440 This function MUST not fail in case there is no workspace associated to
441 the given `workspace_id`.
441 the given `workspace_id`.
442
442
443 :param workspace_id: `workspace_id` unique identifier.
443 :param workspace_id: `workspace_id` unique identifier.
444 """
444 """
445 raise NotImplementedError
445 raise NotImplementedError
446
446
447 # ========== #
447 # ========== #
448 # COMMIT API #
448 # COMMIT API #
449 # ========== #
449 # ========== #
450
450
451 @LazyProperty
451 @LazyProperty
452 def in_memory_commit(self):
452 def in_memory_commit(self):
453 """
453 """
454 Returns :class:`InMemoryCommit` object for this repository.
454 Returns :class:`InMemoryCommit` object for this repository.
455 """
455 """
456 raise NotImplementedError
456 raise NotImplementedError
457
457
458 # ======================== #
458 # ======================== #
459 # UTILITIES FOR SUBCLASSES #
459 # UTILITIES FOR SUBCLASSES #
460 # ======================== #
460 # ======================== #
461
461
462 def _validate_diff_commits(self, commit1, commit2):
462 def _validate_diff_commits(self, commit1, commit2):
463 """
463 """
464 Validates that the given commits are related to this repository.
464 Validates that the given commits are related to this repository.
465
465
466 Intended as a utility for sub classes to have a consistent validation
466 Intended as a utility for sub classes to have a consistent validation
467 of input parameters in methods like :meth:`get_diff`.
467 of input parameters in methods like :meth:`get_diff`.
468 """
468 """
469 self._validate_commit(commit1)
469 self._validate_commit(commit1)
470 self._validate_commit(commit2)
470 self._validate_commit(commit2)
471 if (isinstance(commit1, EmptyCommit) and
471 if (isinstance(commit1, EmptyCommit) and
472 isinstance(commit2, EmptyCommit)):
472 isinstance(commit2, EmptyCommit)):
473 raise ValueError("Cannot compare two empty commits")
473 raise ValueError("Cannot compare two empty commits")
474
474
475 def _validate_commit(self, commit):
475 def _validate_commit(self, commit):
476 if not isinstance(commit, BaseCommit):
476 if not isinstance(commit, BaseCommit):
477 raise TypeError(
477 raise TypeError(
478 "%s is not of type BaseCommit" % repr(commit))
478 "%s is not of type BaseCommit" % repr(commit))
479 if commit.repository != self and not isinstance(commit, EmptyCommit):
479 if commit.repository != self and not isinstance(commit, EmptyCommit):
480 raise ValueError(
480 raise ValueError(
481 "Commit %s must be a valid commit from this repository %s, "
481 "Commit %s must be a valid commit from this repository %s, "
482 "related to this repository instead %s." %
482 "related to this repository instead %s." %
483 (commit, self, commit.repository))
483 (commit, self, commit.repository))
484
484
485 def _validate_commit_id(self, commit_id):
485 def _validate_commit_id(self, commit_id):
486 if not isinstance(commit_id, basestring):
486 if not isinstance(commit_id, basestring):
487 raise TypeError("commit_id must be a string value")
487 raise TypeError("commit_id must be a string value")
488
488
489 def _validate_commit_idx(self, commit_idx):
489 def _validate_commit_idx(self, commit_idx):
490 if not isinstance(commit_idx, (int, long)):
490 if not isinstance(commit_idx, (int, long)):
491 raise TypeError("commit_idx must be a numeric value")
491 raise TypeError("commit_idx must be a numeric value")
492
492
493 def _validate_branch_name(self, branch_name):
493 def _validate_branch_name(self, branch_name):
494 if branch_name and branch_name not in self.branches_all:
494 if branch_name and branch_name not in self.branches_all:
495 msg = ("Branch %s not found in %s" % (branch_name, self))
495 msg = ("Branch %s not found in %s" % (branch_name, self))
496 raise BranchDoesNotExistError(msg)
496 raise BranchDoesNotExistError(msg)
497
497
498 #
498 #
499 # Supporting deprecated API parts
499 # Supporting deprecated API parts
500 # TODO: johbo: consider to move this into a mixin
500 # TODO: johbo: consider to move this into a mixin
501 #
501 #
502
502
503 @property
503 @property
504 def EMPTY_CHANGESET(self):
504 def EMPTY_CHANGESET(self):
505 warnings.warn(
505 warnings.warn(
506 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
506 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
507 return self.EMPTY_COMMIT_ID
507 return self.EMPTY_COMMIT_ID
508
508
509 @property
509 @property
510 def revisions(self):
510 def revisions(self):
511 warnings.warn("Use commits attribute instead", DeprecationWarning)
511 warnings.warn("Use commits attribute instead", DeprecationWarning)
512 return self.commit_ids
512 return self.commit_ids
513
513
514 @revisions.setter
514 @revisions.setter
515 def revisions(self, value):
515 def revisions(self, value):
516 warnings.warn("Use commits attribute instead", DeprecationWarning)
516 warnings.warn("Use commits attribute instead", DeprecationWarning)
517 self.commit_ids = value
517 self.commit_ids = value
518
518
519 def get_changeset(self, revision=None, pre_load=None):
519 def get_changeset(self, revision=None, pre_load=None):
520 warnings.warn("Use get_commit instead", DeprecationWarning)
520 warnings.warn("Use get_commit instead", DeprecationWarning)
521 commit_id = None
521 commit_id = None
522 commit_idx = None
522 commit_idx = None
523 if isinstance(revision, basestring):
523 if isinstance(revision, basestring):
524 commit_id = revision
524 commit_id = revision
525 else:
525 else:
526 commit_idx = revision
526 commit_idx = revision
527 return self.get_commit(
527 return self.get_commit(
528 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
528 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
529
529
530 def get_changesets(
530 def get_changesets(
531 self, start=None, end=None, start_date=None, end_date=None,
531 self, start=None, end=None, start_date=None, end_date=None,
532 branch_name=None, pre_load=None):
532 branch_name=None, pre_load=None):
533 warnings.warn("Use get_commits instead", DeprecationWarning)
533 warnings.warn("Use get_commits instead", DeprecationWarning)
534 start_id = self._revision_to_commit(start)
534 start_id = self._revision_to_commit(start)
535 end_id = self._revision_to_commit(end)
535 end_id = self._revision_to_commit(end)
536 return self.get_commits(
536 return self.get_commits(
537 start_id=start_id, end_id=end_id, start_date=start_date,
537 start_id=start_id, end_id=end_id, start_date=start_date,
538 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
538 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
539
539
540 def _revision_to_commit(self, revision):
540 def _revision_to_commit(self, revision):
541 """
541 """
542 Translates a revision to a commit_id
542 Translates a revision to a commit_id
543
543
544 Helps to support the old changeset based API which allows to use
544 Helps to support the old changeset based API which allows to use
545 commit ids and commit indices interchangeable.
545 commit ids and commit indices interchangeable.
546 """
546 """
547 if revision is None:
547 if revision is None:
548 return revision
548 return revision
549
549
550 if isinstance(revision, basestring):
550 if isinstance(revision, basestring):
551 commit_id = revision
551 commit_id = revision
552 else:
552 else:
553 commit_id = self.commit_ids[revision]
553 commit_id = self.commit_ids[revision]
554 return commit_id
554 return commit_id
555
555
556 @property
556 @property
557 def in_memory_changeset(self):
557 def in_memory_changeset(self):
558 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
558 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
559 return self.in_memory_commit
559 return self.in_memory_commit
560
560
561
561
562 class BaseCommit(object):
562 class BaseCommit(object):
563 """
563 """
564 Each backend should implement it's commit representation.
564 Each backend should implement it's commit representation.
565
565
566 **Attributes**
566 **Attributes**
567
567
568 ``repository``
568 ``repository``
569 repository object within which commit exists
569 repository object within which commit exists
570
570
571 ``id``
571 ``id``
572 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
572 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
573 just ``tip``.
573 just ``tip``.
574
574
575 ``raw_id``
575 ``raw_id``
576 raw commit representation (i.e. full 40 length sha for git
576 raw commit representation (i.e. full 40 length sha for git
577 backend)
577 backend)
578
578
579 ``short_id``
579 ``short_id``
580 shortened (if apply) version of ``raw_id``; it would be simple
580 shortened (if apply) version of ``raw_id``; it would be simple
581 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
581 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
582 as ``raw_id`` for subversion
582 as ``raw_id`` for subversion
583
583
584 ``idx``
584 ``idx``
585 commit index
585 commit index
586
586
587 ``files``
587 ``files``
588 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
588 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
589
589
590 ``dirs``
590 ``dirs``
591 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
591 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
592
592
593 ``nodes``
593 ``nodes``
594 combined list of ``Node`` objects
594 combined list of ``Node`` objects
595
595
596 ``author``
596 ``author``
597 author of the commit, as unicode
597 author of the commit, as unicode
598
598
599 ``message``
599 ``message``
600 message of the commit, as unicode
600 message of the commit, as unicode
601
601
602 ``parents``
602 ``parents``
603 list of parent commits
603 list of parent commits
604
604
605 """
605 """
606
606
607 branch = None
607 branch = None
608 """
608 """
609 Depending on the backend this should be set to the branch name of the
609 Depending on the backend this should be set to the branch name of the
610 commit. Backends not supporting branches on commits should leave this
610 commit. Backends not supporting branches on commits should leave this
611 value as ``None``.
611 value as ``None``.
612 """
612 """
613
613
614 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
614 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
615 """
615 """
616 This template is used to generate a default prefix for repository archives
616 This template is used to generate a default prefix for repository archives
617 if no prefix has been specified.
617 if no prefix has been specified.
618 """
618 """
619
619
620 def __str__(self):
620 def __str__(self):
621 return '<%s at %s:%s>' % (
621 return '<%s at %s:%s>' % (
622 self.__class__.__name__, self.idx, self.short_id)
622 self.__class__.__name__, self.idx, self.short_id)
623
623
624 def __repr__(self):
624 def __repr__(self):
625 return self.__str__()
625 return self.__str__()
626
626
627 def __unicode__(self):
627 def __unicode__(self):
628 return u'%s:%s' % (self.idx, self.short_id)
628 return u'%s:%s' % (self.idx, self.short_id)
629
629
630 def __eq__(self, other):
630 def __eq__(self, other):
631 return self.raw_id == other.raw_id
631 return self.raw_id == other.raw_id
632
632
633 def __json__(self):
633 def __json__(self):
634 parents = []
634 parents = []
635 try:
635 try:
636 for parent in self.parents:
636 for parent in self.parents:
637 parents.append({'raw_id': parent.raw_id})
637 parents.append({'raw_id': parent.raw_id})
638 except NotImplementedError:
638 except NotImplementedError:
639 # empty commit doesn't have parents implemented
639 # empty commit doesn't have parents implemented
640 pass
640 pass
641
641
642 return {
642 return {
643 'short_id': self.short_id,
643 'short_id': self.short_id,
644 'raw_id': self.raw_id,
644 'raw_id': self.raw_id,
645 'revision': self.idx,
645 'revision': self.idx,
646 'message': self.message,
646 'message': self.message,
647 'date': self.date,
647 'date': self.date,
648 'author': self.author,
648 'author': self.author,
649 'parents': parents,
649 'parents': parents,
650 'branch': self.branch
650 'branch': self.branch
651 }
651 }
652
652
653 @LazyProperty
653 @LazyProperty
654 def last(self):
654 def last(self):
655 """
655 """
656 ``True`` if this is last commit in repository, ``False``
656 ``True`` if this is last commit in repository, ``False``
657 otherwise; trying to access this attribute while there is no
657 otherwise; trying to access this attribute while there is no
658 commits would raise `EmptyRepositoryError`
658 commits would raise `EmptyRepositoryError`
659 """
659 """
660 if self.repository is None:
660 if self.repository is None:
661 raise CommitError("Cannot check if it's most recent commit")
661 raise CommitError("Cannot check if it's most recent commit")
662 return self.raw_id == self.repository.commit_ids[-1]
662 return self.raw_id == self.repository.commit_ids[-1]
663
663
664 @LazyProperty
664 @LazyProperty
665 def parents(self):
665 def parents(self):
666 """
666 """
667 Returns list of parent commits.
667 Returns list of parent commits.
668 """
668 """
669 raise NotImplementedError
669 raise NotImplementedError
670
670
671 @property
671 @property
672 def merge(self):
672 def merge(self):
673 """
673 """
674 Returns boolean if commit is a merge.
674 Returns boolean if commit is a merge.
675 """
675 """
676 return len(self.parents) > 1
676 return len(self.parents) > 1
677
677
678 @LazyProperty
678 @LazyProperty
679 def children(self):
679 def children(self):
680 """
680 """
681 Returns list of child commits.
681 Returns list of child commits.
682 """
682 """
683 raise NotImplementedError
683 raise NotImplementedError
684
684
685 @LazyProperty
685 @LazyProperty
686 def id(self):
686 def id(self):
687 """
687 """
688 Returns string identifying this commit.
688 Returns string identifying this commit.
689 """
689 """
690 raise NotImplementedError
690 raise NotImplementedError
691
691
692 @LazyProperty
692 @LazyProperty
693 def raw_id(self):
693 def raw_id(self):
694 """
694 """
695 Returns raw string identifying this commit.
695 Returns raw string identifying this commit.
696 """
696 """
697 raise NotImplementedError
697 raise NotImplementedError
698
698
699 @LazyProperty
699 @LazyProperty
700 def short_id(self):
700 def short_id(self):
701 """
701 """
702 Returns shortened version of ``raw_id`` attribute, as string,
702 Returns shortened version of ``raw_id`` attribute, as string,
703 identifying this commit, useful for presentation to users.
703 identifying this commit, useful for presentation to users.
704 """
704 """
705 raise NotImplementedError
705 raise NotImplementedError
706
706
707 @LazyProperty
707 @LazyProperty
708 def idx(self):
708 def idx(self):
709 """
709 """
710 Returns integer identifying this commit.
710 Returns integer identifying this commit.
711 """
711 """
712 raise NotImplementedError
712 raise NotImplementedError
713
713
714 @LazyProperty
714 @LazyProperty
715 def committer(self):
715 def committer(self):
716 """
716 """
717 Returns committer for this commit
717 Returns committer for this commit
718 """
718 """
719 raise NotImplementedError
719 raise NotImplementedError
720
720
721 @LazyProperty
721 @LazyProperty
722 def committer_name(self):
722 def committer_name(self):
723 """
723 """
724 Returns committer name for this commit
724 Returns committer name for this commit
725 """
725 """
726
726
727 return author_name(self.committer)
727 return author_name(self.committer)
728
728
729 @LazyProperty
729 @LazyProperty
730 def committer_email(self):
730 def committer_email(self):
731 """
731 """
732 Returns committer email address for this commit
732 Returns committer email address for this commit
733 """
733 """
734
734
735 return author_email(self.committer)
735 return author_email(self.committer)
736
736
737 @LazyProperty
737 @LazyProperty
738 def author(self):
738 def author(self):
739 """
739 """
740 Returns author for this commit
740 Returns author for this commit
741 """
741 """
742
742
743 raise NotImplementedError
743 raise NotImplementedError
744
744
745 @LazyProperty
745 @LazyProperty
746 def author_name(self):
746 def author_name(self):
747 """
747 """
748 Returns author name for this commit
748 Returns author name for this commit
749 """
749 """
750
750
751 return author_name(self.author)
751 return author_name(self.author)
752
752
753 @LazyProperty
753 @LazyProperty
754 def author_email(self):
754 def author_email(self):
755 """
755 """
756 Returns author email address for this commit
756 Returns author email address for this commit
757 """
757 """
758
758
759 return author_email(self.author)
759 return author_email(self.author)
760
760
761 def get_file_mode(self, path):
761 def get_file_mode(self, path):
762 """
762 """
763 Returns stat mode of the file at `path`.
763 Returns stat mode of the file at `path`.
764 """
764 """
765 raise NotImplementedError
765 raise NotImplementedError
766
766
767 def is_link(self, path):
767 def is_link(self, path):
768 """
768 """
769 Returns ``True`` if given `path` is a symlink
769 Returns ``True`` if given `path` is a symlink
770 """
770 """
771 raise NotImplementedError
771 raise NotImplementedError
772
772
773 def get_file_content(self, path):
773 def get_file_content(self, path):
774 """
774 """
775 Returns content of the file at the given `path`.
775 Returns content of the file at the given `path`.
776 """
776 """
777 raise NotImplementedError
777 raise NotImplementedError
778
778
779 def get_file_size(self, path):
779 def get_file_size(self, path):
780 """
780 """
781 Returns size of the file at the given `path`.
781 Returns size of the file at the given `path`.
782 """
782 """
783 raise NotImplementedError
783 raise NotImplementedError
784
784
785 def get_file_commit(self, path, pre_load=None):
785 def get_file_commit(self, path, pre_load=None):
786 """
786 """
787 Returns last commit of the file at the given `path`.
787 Returns last commit of the file at the given `path`.
788
788
789 :param pre_load: Optional. List of commit attributes to load.
789 :param pre_load: Optional. List of commit attributes to load.
790 """
790 """
791 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
791 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
792
792
793 def get_file_history(self, path, limit=None, pre_load=None):
793 def get_file_history(self, path, limit=None, pre_load=None):
794 """
794 """
795 Returns history of file as reversed list of :class:`BaseCommit`
795 Returns history of file as reversed list of :class:`BaseCommit`
796 objects for which file at given `path` has been modified.
796 objects for which file at given `path` has been modified.
797
797
798 :param limit: Optional. Allows to limit the size of the returned
798 :param limit: Optional. Allows to limit the size of the returned
799 history. This is intended as a hint to the underlying backend, so
799 history. This is intended as a hint to the underlying backend, so
800 that it can apply optimizations depending on the limit.
800 that it can apply optimizations depending on the limit.
801 :param pre_load: Optional. List of commit attributes to load.
801 :param pre_load: Optional. List of commit attributes to load.
802 """
802 """
803 raise NotImplementedError
803 raise NotImplementedError
804
804
805 def get_file_annotate(self, path, pre_load=None):
805 def get_file_annotate(self, path, pre_load=None):
806 """
806 """
807 Returns a generator of four element tuples with
807 Returns a generator of four element tuples with
808 lineno, sha, commit lazy loader and line
808 lineno, sha, commit lazy loader and line
809
809
810 :param pre_load: Optional. List of commit attributes to load.
810 :param pre_load: Optional. List of commit attributes to load.
811 """
811 """
812 raise NotImplementedError
812 raise NotImplementedError
813
813
814 def get_nodes(self, path):
814 def get_nodes(self, path):
815 """
815 """
816 Returns combined ``DirNode`` and ``FileNode`` objects list representing
816 Returns combined ``DirNode`` and ``FileNode`` objects list representing
817 state of commit at the given ``path``.
817 state of commit at the given ``path``.
818
818
819 :raises ``CommitError``: if node at the given ``path`` is not
819 :raises ``CommitError``: if node at the given ``path`` is not
820 instance of ``DirNode``
820 instance of ``DirNode``
821 """
821 """
822 raise NotImplementedError
822 raise NotImplementedError
823
823
824 def get_node(self, path):
824 def get_node(self, path):
825 """
825 """
826 Returns ``Node`` object from the given ``path``.
826 Returns ``Node`` object from the given ``path``.
827
827
828 :raises ``NodeDoesNotExistError``: if there is no node at the given
828 :raises ``NodeDoesNotExistError``: if there is no node at the given
829 ``path``
829 ``path``
830 """
830 """
831 raise NotImplementedError
831 raise NotImplementedError
832
832
833 def get_largefile_node(self, path):
833 def get_largefile_node(self, path):
834 """
834 """
835 Returns the path to largefile from Mercurial storage.
835 Returns the path to largefile from Mercurial storage.
836 """
836 """
837 raise NotImplementedError
837 raise NotImplementedError
838
838
839 def archive_repo(self, file_path, kind='tgz', subrepos=None,
839 def archive_repo(self, file_path, kind='tgz', subrepos=None,
840 prefix=None, write_metadata=False, mtime=None):
840 prefix=None, write_metadata=False, mtime=None):
841 """
841 """
842 Creates an archive containing the contents of the repository.
842 Creates an archive containing the contents of the repository.
843
843
844 :param file_path: path to the file which to create the archive.
844 :param file_path: path to the file which to create the archive.
845 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
845 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
846 :param prefix: name of root directory in archive.
846 :param prefix: name of root directory in archive.
847 Default is repository name and commit's short_id joined with dash:
847 Default is repository name and commit's short_id joined with dash:
848 ``"{repo_name}-{short_id}"``.
848 ``"{repo_name}-{short_id}"``.
849 :param write_metadata: write a metadata file into archive.
849 :param write_metadata: write a metadata file into archive.
850 :param mtime: custom modification time for archive creation, defaults
850 :param mtime: custom modification time for archive creation, defaults
851 to time.time() if not given.
851 to time.time() if not given.
852
852
853 :raise VCSError: If prefix has a problem.
853 :raise VCSError: If prefix has a problem.
854 """
854 """
855 allowed_kinds = settings.ARCHIVE_SPECS.keys()
855 allowed_kinds = settings.ARCHIVE_SPECS.keys()
856 if kind not in allowed_kinds:
856 if kind not in allowed_kinds:
857 raise ImproperArchiveTypeError(
857 raise ImproperArchiveTypeError(
858 'Archive kind (%s) not supported use one of %s' %
858 'Archive kind (%s) not supported use one of %s' %
859 (kind, allowed_kinds))
859 (kind, allowed_kinds))
860
860
861 prefix = self._validate_archive_prefix(prefix)
861 prefix = self._validate_archive_prefix(prefix)
862
862
863 mtime = mtime or time.time()
863 mtime = mtime or time.time()
864
864
865 file_info = []
865 file_info = []
866 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
866 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
867 for _r, _d, files in cur_rev.walk('/'):
867 for _r, _d, files in cur_rev.walk('/'):
868 for f in files:
868 for f in files:
869 f_path = os.path.join(prefix, f.path)
869 f_path = os.path.join(prefix, f.path)
870 file_info.append(
870 file_info.append(
871 (f_path, f.mode, f.is_link(), f._get_content()))
871 (f_path, f.mode, f.is_link(), f.raw_bytes))
872
872
873 if write_metadata:
873 if write_metadata:
874 metadata = [
874 metadata = [
875 ('repo_name', self.repository.name),
875 ('repo_name', self.repository.name),
876 ('rev', self.raw_id),
876 ('rev', self.raw_id),
877 ('create_time', mtime),
877 ('create_time', mtime),
878 ('branch', self.branch),
878 ('branch', self.branch),
879 ('tags', ','.join(self.tags)),
879 ('tags', ','.join(self.tags)),
880 ]
880 ]
881 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
881 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
882 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
882 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
883
883
884 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
884 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
885
885
886 def _validate_archive_prefix(self, prefix):
886 def _validate_archive_prefix(self, prefix):
887 if prefix is None:
887 if prefix is None:
888 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
888 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
889 repo_name=safe_str(self.repository.name),
889 repo_name=safe_str(self.repository.name),
890 short_id=self.short_id)
890 short_id=self.short_id)
891 elif not isinstance(prefix, str):
891 elif not isinstance(prefix, str):
892 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
892 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
893 elif prefix.startswith('/'):
893 elif prefix.startswith('/'):
894 raise VCSError("Prefix cannot start with leading slash")
894 raise VCSError("Prefix cannot start with leading slash")
895 elif prefix.strip() == '':
895 elif prefix.strip() == '':
896 raise VCSError("Prefix cannot be empty")
896 raise VCSError("Prefix cannot be empty")
897 return prefix
897 return prefix
898
898
899 @LazyProperty
899 @LazyProperty
900 def root(self):
900 def root(self):
901 """
901 """
902 Returns ``RootNode`` object for this commit.
902 Returns ``RootNode`` object for this commit.
903 """
903 """
904 return self.get_node('')
904 return self.get_node('')
905
905
906 def next(self, branch=None):
906 def next(self, branch=None):
907 """
907 """
908 Returns next commit from current, if branch is gives it will return
908 Returns next commit from current, if branch is gives it will return
909 next commit belonging to this branch
909 next commit belonging to this branch
910
910
911 :param branch: show commits within the given named branch
911 :param branch: show commits within the given named branch
912 """
912 """
913 indexes = xrange(self.idx + 1, self.repository.count())
913 indexes = xrange(self.idx + 1, self.repository.count())
914 return self._find_next(indexes, branch)
914 return self._find_next(indexes, branch)
915
915
916 def prev(self, branch=None):
916 def prev(self, branch=None):
917 """
917 """
918 Returns previous commit from current, if branch is gives it will
918 Returns previous commit from current, if branch is gives it will
919 return previous commit belonging to this branch
919 return previous commit belonging to this branch
920
920
921 :param branch: show commit within the given named branch
921 :param branch: show commit within the given named branch
922 """
922 """
923 indexes = xrange(self.idx - 1, -1, -1)
923 indexes = xrange(self.idx - 1, -1, -1)
924 return self._find_next(indexes, branch)
924 return self._find_next(indexes, branch)
925
925
926 def _find_next(self, indexes, branch=None):
926 def _find_next(self, indexes, branch=None):
927 if branch and self.branch != branch:
927 if branch and self.branch != branch:
928 raise VCSError('Branch option used on commit not belonging '
928 raise VCSError('Branch option used on commit not belonging '
929 'to that branch')
929 'to that branch')
930
930
931 for next_idx in indexes:
931 for next_idx in indexes:
932 commit = self.repository.get_commit(commit_idx=next_idx)
932 commit = self.repository.get_commit(commit_idx=next_idx)
933 if branch and branch != commit.branch:
933 if branch and branch != commit.branch:
934 continue
934 continue
935 return commit
935 return commit
936 raise CommitDoesNotExistError
936 raise CommitDoesNotExistError
937
937
938 def diff(self, ignore_whitespace=True, context=3):
938 def diff(self, ignore_whitespace=True, context=3):
939 """
939 """
940 Returns a `Diff` object representing the change made by this commit.
940 Returns a `Diff` object representing the change made by this commit.
941 """
941 """
942 parent = (
942 parent = (
943 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
943 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
944 diff = self.repository.get_diff(
944 diff = self.repository.get_diff(
945 parent, self,
945 parent, self,
946 ignore_whitespace=ignore_whitespace,
946 ignore_whitespace=ignore_whitespace,
947 context=context)
947 context=context)
948 return diff
948 return diff
949
949
950 @LazyProperty
950 @LazyProperty
951 def added(self):
951 def added(self):
952 """
952 """
953 Returns list of added ``FileNode`` objects.
953 Returns list of added ``FileNode`` objects.
954 """
954 """
955 raise NotImplementedError
955 raise NotImplementedError
956
956
957 @LazyProperty
957 @LazyProperty
958 def changed(self):
958 def changed(self):
959 """
959 """
960 Returns list of modified ``FileNode`` objects.
960 Returns list of modified ``FileNode`` objects.
961 """
961 """
962 raise NotImplementedError
962 raise NotImplementedError
963
963
964 @LazyProperty
964 @LazyProperty
965 def removed(self):
965 def removed(self):
966 """
966 """
967 Returns list of removed ``FileNode`` objects.
967 Returns list of removed ``FileNode`` objects.
968 """
968 """
969 raise NotImplementedError
969 raise NotImplementedError
970
970
971 @LazyProperty
971 @LazyProperty
972 def size(self):
972 def size(self):
973 """
973 """
974 Returns total number of bytes from contents of all filenodes.
974 Returns total number of bytes from contents of all filenodes.
975 """
975 """
976 return sum((node.size for node in self.get_filenodes_generator()))
976 return sum((node.size for node in self.get_filenodes_generator()))
977
977
978 def walk(self, topurl=''):
978 def walk(self, topurl=''):
979 """
979 """
980 Similar to os.walk method. Insted of filesystem it walks through
980 Similar to os.walk method. Insted of filesystem it walks through
981 commit starting at given ``topurl``. Returns generator of tuples
981 commit starting at given ``topurl``. Returns generator of tuples
982 (topnode, dirnodes, filenodes).
982 (topnode, dirnodes, filenodes).
983 """
983 """
984 topnode = self.get_node(topurl)
984 topnode = self.get_node(topurl)
985 if not topnode.is_dir():
985 if not topnode.is_dir():
986 return
986 return
987 yield (topnode, topnode.dirs, topnode.files)
987 yield (topnode, topnode.dirs, topnode.files)
988 for dirnode in topnode.dirs:
988 for dirnode in topnode.dirs:
989 for tup in self.walk(dirnode.path):
989 for tup in self.walk(dirnode.path):
990 yield tup
990 yield tup
991
991
992 def get_filenodes_generator(self):
992 def get_filenodes_generator(self):
993 """
993 """
994 Returns generator that yields *all* file nodes.
994 Returns generator that yields *all* file nodes.
995 """
995 """
996 for topnode, dirs, files in self.walk():
996 for topnode, dirs, files in self.walk():
997 for node in files:
997 for node in files:
998 yield node
998 yield node
999
999
1000 #
1000 #
1001 # Utilities for sub classes to support consistent behavior
1001 # Utilities for sub classes to support consistent behavior
1002 #
1002 #
1003
1003
1004 def no_node_at_path(self, path):
1004 def no_node_at_path(self, path):
1005 return NodeDoesNotExistError(
1005 return NodeDoesNotExistError(
1006 "There is no file nor directory at the given path: "
1006 "There is no file nor directory at the given path: "
1007 "'%s' at commit %s" % (path, self.short_id))
1007 "'%s' at commit %s" % (path, self.short_id))
1008
1008
1009 def _fix_path(self, path):
1009 def _fix_path(self, path):
1010 """
1010 """
1011 Paths are stored without trailing slash so we need to get rid off it if
1011 Paths are stored without trailing slash so we need to get rid off it if
1012 needed.
1012 needed.
1013 """
1013 """
1014 return path.rstrip('/')
1014 return path.rstrip('/')
1015
1015
1016 #
1016 #
1017 # Deprecated API based on changesets
1017 # Deprecated API based on changesets
1018 #
1018 #
1019
1019
1020 @property
1020 @property
1021 def revision(self):
1021 def revision(self):
1022 warnings.warn("Use idx instead", DeprecationWarning)
1022 warnings.warn("Use idx instead", DeprecationWarning)
1023 return self.idx
1023 return self.idx
1024
1024
1025 @revision.setter
1025 @revision.setter
1026 def revision(self, value):
1026 def revision(self, value):
1027 warnings.warn("Use idx instead", DeprecationWarning)
1027 warnings.warn("Use idx instead", DeprecationWarning)
1028 self.idx = value
1028 self.idx = value
1029
1029
1030 def get_file_changeset(self, path):
1030 def get_file_changeset(self, path):
1031 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1031 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1032 return self.get_file_commit(path)
1032 return self.get_file_commit(path)
1033
1033
1034
1034
1035 class BaseChangesetClass(type):
1035 class BaseChangesetClass(type):
1036
1036
1037 def __instancecheck__(self, instance):
1037 def __instancecheck__(self, instance):
1038 return isinstance(instance, BaseCommit)
1038 return isinstance(instance, BaseCommit)
1039
1039
1040
1040
1041 class BaseChangeset(BaseCommit):
1041 class BaseChangeset(BaseCommit):
1042
1042
1043 __metaclass__ = BaseChangesetClass
1043 __metaclass__ = BaseChangesetClass
1044
1044
1045 def __new__(cls, *args, **kwargs):
1045 def __new__(cls, *args, **kwargs):
1046 warnings.warn(
1046 warnings.warn(
1047 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1047 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1048 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1048 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1049
1049
1050
1050
1051 class BaseInMemoryCommit(object):
1051 class BaseInMemoryCommit(object):
1052 """
1052 """
1053 Represents differences between repository's state (most recent head) and
1053 Represents differences between repository's state (most recent head) and
1054 changes made *in place*.
1054 changes made *in place*.
1055
1055
1056 **Attributes**
1056 **Attributes**
1057
1057
1058 ``repository``
1058 ``repository``
1059 repository object for this in-memory-commit
1059 repository object for this in-memory-commit
1060
1060
1061 ``added``
1061 ``added``
1062 list of ``FileNode`` objects marked as *added*
1062 list of ``FileNode`` objects marked as *added*
1063
1063
1064 ``changed``
1064 ``changed``
1065 list of ``FileNode`` objects marked as *changed*
1065 list of ``FileNode`` objects marked as *changed*
1066
1066
1067 ``removed``
1067 ``removed``
1068 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1068 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1069 *removed*
1069 *removed*
1070
1070
1071 ``parents``
1071 ``parents``
1072 list of :class:`BaseCommit` instances representing parents of
1072 list of :class:`BaseCommit` instances representing parents of
1073 in-memory commit. Should always be 2-element sequence.
1073 in-memory commit. Should always be 2-element sequence.
1074
1074
1075 """
1075 """
1076
1076
1077 def __init__(self, repository):
1077 def __init__(self, repository):
1078 self.repository = repository
1078 self.repository = repository
1079 self.added = []
1079 self.added = []
1080 self.changed = []
1080 self.changed = []
1081 self.removed = []
1081 self.removed = []
1082 self.parents = []
1082 self.parents = []
1083
1083
1084 def add(self, *filenodes):
1084 def add(self, *filenodes):
1085 """
1085 """
1086 Marks given ``FileNode`` objects as *to be committed*.
1086 Marks given ``FileNode`` objects as *to be committed*.
1087
1087
1088 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1088 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1089 latest commit
1089 latest commit
1090 :raises ``NodeAlreadyAddedError``: if node with same path is already
1090 :raises ``NodeAlreadyAddedError``: if node with same path is already
1091 marked as *added*
1091 marked as *added*
1092 """
1092 """
1093 # Check if not already marked as *added* first
1093 # Check if not already marked as *added* first
1094 for node in filenodes:
1094 for node in filenodes:
1095 if node.path in (n.path for n in self.added):
1095 if node.path in (n.path for n in self.added):
1096 raise NodeAlreadyAddedError(
1096 raise NodeAlreadyAddedError(
1097 "Such FileNode %s is already marked for addition"
1097 "Such FileNode %s is already marked for addition"
1098 % node.path)
1098 % node.path)
1099 for node in filenodes:
1099 for node in filenodes:
1100 self.added.append(node)
1100 self.added.append(node)
1101
1101
1102 def change(self, *filenodes):
1102 def change(self, *filenodes):
1103 """
1103 """
1104 Marks given ``FileNode`` objects to be *changed* in next commit.
1104 Marks given ``FileNode`` objects to be *changed* in next commit.
1105
1105
1106 :raises ``EmptyRepositoryError``: if there are no commits yet
1106 :raises ``EmptyRepositoryError``: if there are no commits yet
1107 :raises ``NodeAlreadyExistsError``: if node with same path is already
1107 :raises ``NodeAlreadyExistsError``: if node with same path is already
1108 marked to be *changed*
1108 marked to be *changed*
1109 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1109 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1110 marked to be *removed*
1110 marked to be *removed*
1111 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1111 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1112 commit
1112 commit
1113 :raises ``NodeNotChangedError``: if node hasn't really be changed
1113 :raises ``NodeNotChangedError``: if node hasn't really be changed
1114 """
1114 """
1115 for node in filenodes:
1115 for node in filenodes:
1116 if node.path in (n.path for n in self.removed):
1116 if node.path in (n.path for n in self.removed):
1117 raise NodeAlreadyRemovedError(
1117 raise NodeAlreadyRemovedError(
1118 "Node at %s is already marked as removed" % node.path)
1118 "Node at %s is already marked as removed" % node.path)
1119 try:
1119 try:
1120 self.repository.get_commit()
1120 self.repository.get_commit()
1121 except EmptyRepositoryError:
1121 except EmptyRepositoryError:
1122 raise EmptyRepositoryError(
1122 raise EmptyRepositoryError(
1123 "Nothing to change - try to *add* new nodes rather than "
1123 "Nothing to change - try to *add* new nodes rather than "
1124 "changing them")
1124 "changing them")
1125 for node in filenodes:
1125 for node in filenodes:
1126 if node.path in (n.path for n in self.changed):
1126 if node.path in (n.path for n in self.changed):
1127 raise NodeAlreadyChangedError(
1127 raise NodeAlreadyChangedError(
1128 "Node at '%s' is already marked as changed" % node.path)
1128 "Node at '%s' is already marked as changed" % node.path)
1129 self.changed.append(node)
1129 self.changed.append(node)
1130
1130
1131 def remove(self, *filenodes):
1131 def remove(self, *filenodes):
1132 """
1132 """
1133 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1133 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1134 *removed* in next commit.
1134 *removed* in next commit.
1135
1135
1136 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1136 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1137 be *removed*
1137 be *removed*
1138 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1138 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1139 be *changed*
1139 be *changed*
1140 """
1140 """
1141 for node in filenodes:
1141 for node in filenodes:
1142 if node.path in (n.path for n in self.removed):
1142 if node.path in (n.path for n in self.removed):
1143 raise NodeAlreadyRemovedError(
1143 raise NodeAlreadyRemovedError(
1144 "Node is already marked to for removal at %s" % node.path)
1144 "Node is already marked to for removal at %s" % node.path)
1145 if node.path in (n.path for n in self.changed):
1145 if node.path in (n.path for n in self.changed):
1146 raise NodeAlreadyChangedError(
1146 raise NodeAlreadyChangedError(
1147 "Node is already marked to be changed at %s" % node.path)
1147 "Node is already marked to be changed at %s" % node.path)
1148 # We only mark node as *removed* - real removal is done by
1148 # We only mark node as *removed* - real removal is done by
1149 # commit method
1149 # commit method
1150 self.removed.append(node)
1150 self.removed.append(node)
1151
1151
1152 def reset(self):
1152 def reset(self):
1153 """
1153 """
1154 Resets this instance to initial state (cleans ``added``, ``changed``
1154 Resets this instance to initial state (cleans ``added``, ``changed``
1155 and ``removed`` lists).
1155 and ``removed`` lists).
1156 """
1156 """
1157 self.added = []
1157 self.added = []
1158 self.changed = []
1158 self.changed = []
1159 self.removed = []
1159 self.removed = []
1160 self.parents = []
1160 self.parents = []
1161
1161
1162 def get_ipaths(self):
1162 def get_ipaths(self):
1163 """
1163 """
1164 Returns generator of paths from nodes marked as added, changed or
1164 Returns generator of paths from nodes marked as added, changed or
1165 removed.
1165 removed.
1166 """
1166 """
1167 for node in itertools.chain(self.added, self.changed, self.removed):
1167 for node in itertools.chain(self.added, self.changed, self.removed):
1168 yield node.path
1168 yield node.path
1169
1169
1170 def get_paths(self):
1170 def get_paths(self):
1171 """
1171 """
1172 Returns list of paths from nodes marked as added, changed or removed.
1172 Returns list of paths from nodes marked as added, changed or removed.
1173 """
1173 """
1174 return list(self.get_ipaths())
1174 return list(self.get_ipaths())
1175
1175
1176 def check_integrity(self, parents=None):
1176 def check_integrity(self, parents=None):
1177 """
1177 """
1178 Checks in-memory commit's integrity. Also, sets parents if not
1178 Checks in-memory commit's integrity. Also, sets parents if not
1179 already set.
1179 already set.
1180
1180
1181 :raises CommitError: if any error occurs (i.e.
1181 :raises CommitError: if any error occurs (i.e.
1182 ``NodeDoesNotExistError``).
1182 ``NodeDoesNotExistError``).
1183 """
1183 """
1184 if not self.parents:
1184 if not self.parents:
1185 parents = parents or []
1185 parents = parents or []
1186 if len(parents) == 0:
1186 if len(parents) == 0:
1187 try:
1187 try:
1188 parents = [self.repository.get_commit(), None]
1188 parents = [self.repository.get_commit(), None]
1189 except EmptyRepositoryError:
1189 except EmptyRepositoryError:
1190 parents = [None, None]
1190 parents = [None, None]
1191 elif len(parents) == 1:
1191 elif len(parents) == 1:
1192 parents += [None]
1192 parents += [None]
1193 self.parents = parents
1193 self.parents = parents
1194
1194
1195 # Local parents, only if not None
1195 # Local parents, only if not None
1196 parents = [p for p in self.parents if p]
1196 parents = [p for p in self.parents if p]
1197
1197
1198 # Check nodes marked as added
1198 # Check nodes marked as added
1199 for p in parents:
1199 for p in parents:
1200 for node in self.added:
1200 for node in self.added:
1201 try:
1201 try:
1202 p.get_node(node.path)
1202 p.get_node(node.path)
1203 except NodeDoesNotExistError:
1203 except NodeDoesNotExistError:
1204 pass
1204 pass
1205 else:
1205 else:
1206 raise NodeAlreadyExistsError(
1206 raise NodeAlreadyExistsError(
1207 "Node `%s` already exists at %s" % (node.path, p))
1207 "Node `%s` already exists at %s" % (node.path, p))
1208
1208
1209 # Check nodes marked as changed
1209 # Check nodes marked as changed
1210 missing = set(self.changed)
1210 missing = set(self.changed)
1211 not_changed = set(self.changed)
1211 not_changed = set(self.changed)
1212 if self.changed and not parents:
1212 if self.changed and not parents:
1213 raise NodeDoesNotExistError(str(self.changed[0].path))
1213 raise NodeDoesNotExistError(str(self.changed[0].path))
1214 for p in parents:
1214 for p in parents:
1215 for node in self.changed:
1215 for node in self.changed:
1216 try:
1216 try:
1217 old = p.get_node(node.path)
1217 old = p.get_node(node.path)
1218 missing.remove(node)
1218 missing.remove(node)
1219 # if content actually changed, remove node from not_changed
1219 # if content actually changed, remove node from not_changed
1220 if old.content != node.content:
1220 if old.content != node.content:
1221 not_changed.remove(node)
1221 not_changed.remove(node)
1222 except NodeDoesNotExistError:
1222 except NodeDoesNotExistError:
1223 pass
1223 pass
1224 if self.changed and missing:
1224 if self.changed and missing:
1225 raise NodeDoesNotExistError(
1225 raise NodeDoesNotExistError(
1226 "Node `%s` marked as modified but missing in parents: %s"
1226 "Node `%s` marked as modified but missing in parents: %s"
1227 % (node.path, parents))
1227 % (node.path, parents))
1228
1228
1229 if self.changed and not_changed:
1229 if self.changed and not_changed:
1230 raise NodeNotChangedError(
1230 raise NodeNotChangedError(
1231 "Node `%s` wasn't actually changed (parents: %s)"
1231 "Node `%s` wasn't actually changed (parents: %s)"
1232 % (not_changed.pop().path, parents))
1232 % (not_changed.pop().path, parents))
1233
1233
1234 # Check nodes marked as removed
1234 # Check nodes marked as removed
1235 if self.removed and not parents:
1235 if self.removed and not parents:
1236 raise NodeDoesNotExistError(
1236 raise NodeDoesNotExistError(
1237 "Cannot remove node at %s as there "
1237 "Cannot remove node at %s as there "
1238 "were no parents specified" % self.removed[0].path)
1238 "were no parents specified" % self.removed[0].path)
1239 really_removed = set()
1239 really_removed = set()
1240 for p in parents:
1240 for p in parents:
1241 for node in self.removed:
1241 for node in self.removed:
1242 try:
1242 try:
1243 p.get_node(node.path)
1243 p.get_node(node.path)
1244 really_removed.add(node)
1244 really_removed.add(node)
1245 except CommitError:
1245 except CommitError:
1246 pass
1246 pass
1247 not_removed = set(self.removed) - really_removed
1247 not_removed = set(self.removed) - really_removed
1248 if not_removed:
1248 if not_removed:
1249 # TODO: johbo: This code branch does not seem to be covered
1249 # TODO: johbo: This code branch does not seem to be covered
1250 raise NodeDoesNotExistError(
1250 raise NodeDoesNotExistError(
1251 "Cannot remove node at %s from "
1251 "Cannot remove node at %s from "
1252 "following parents: %s" % (not_removed, parents))
1252 "following parents: %s" % (not_removed, parents))
1253
1253
1254 def commit(
1254 def commit(
1255 self, message, author, parents=None, branch=None, date=None,
1255 self, message, author, parents=None, branch=None, date=None,
1256 **kwargs):
1256 **kwargs):
1257 """
1257 """
1258 Performs in-memory commit (doesn't check workdir in any way) and
1258 Performs in-memory commit (doesn't check workdir in any way) and
1259 returns newly created :class:`BaseCommit`. Updates repository's
1259 returns newly created :class:`BaseCommit`. Updates repository's
1260 attribute `commits`.
1260 attribute `commits`.
1261
1261
1262 .. note::
1262 .. note::
1263
1263
1264 While overriding this method each backend's should call
1264 While overriding this method each backend's should call
1265 ``self.check_integrity(parents)`` in the first place.
1265 ``self.check_integrity(parents)`` in the first place.
1266
1266
1267 :param message: message of the commit
1267 :param message: message of the commit
1268 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1268 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1269 :param parents: single parent or sequence of parents from which commit
1269 :param parents: single parent or sequence of parents from which commit
1270 would be derived
1270 would be derived
1271 :param date: ``datetime.datetime`` instance. Defaults to
1271 :param date: ``datetime.datetime`` instance. Defaults to
1272 ``datetime.datetime.now()``.
1272 ``datetime.datetime.now()``.
1273 :param branch: branch name, as string. If none given, default backend's
1273 :param branch: branch name, as string. If none given, default backend's
1274 branch would be used.
1274 branch would be used.
1275
1275
1276 :raises ``CommitError``: if any error occurs while committing
1276 :raises ``CommitError``: if any error occurs while committing
1277 """
1277 """
1278 raise NotImplementedError
1278 raise NotImplementedError
1279
1279
1280
1280
1281 class BaseInMemoryChangesetClass(type):
1281 class BaseInMemoryChangesetClass(type):
1282
1282
1283 def __instancecheck__(self, instance):
1283 def __instancecheck__(self, instance):
1284 return isinstance(instance, BaseInMemoryCommit)
1284 return isinstance(instance, BaseInMemoryCommit)
1285
1285
1286
1286
1287 class BaseInMemoryChangeset(BaseInMemoryCommit):
1287 class BaseInMemoryChangeset(BaseInMemoryCommit):
1288
1288
1289 __metaclass__ = BaseInMemoryChangesetClass
1289 __metaclass__ = BaseInMemoryChangesetClass
1290
1290
1291 def __new__(cls, *args, **kwargs):
1291 def __new__(cls, *args, **kwargs):
1292 warnings.warn(
1292 warnings.warn(
1293 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1293 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1294 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1294 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1295
1295
1296
1296
1297 class EmptyCommit(BaseCommit):
1297 class EmptyCommit(BaseCommit):
1298 """
1298 """
1299 An dummy empty commit. It's possible to pass hash when creating
1299 An dummy empty commit. It's possible to pass hash when creating
1300 an EmptyCommit
1300 an EmptyCommit
1301 """
1301 """
1302
1302
1303 def __init__(
1303 def __init__(
1304 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1304 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1305 message='', author='', date=None):
1305 message='', author='', date=None):
1306 self._empty_commit_id = commit_id
1306 self._empty_commit_id = commit_id
1307 # TODO: johbo: Solve idx parameter, default value does not make
1307 # TODO: johbo: Solve idx parameter, default value does not make
1308 # too much sense
1308 # too much sense
1309 self.idx = idx
1309 self.idx = idx
1310 self.message = message
1310 self.message = message
1311 self.author = author
1311 self.author = author
1312 self.date = date or datetime.datetime.fromtimestamp(0)
1312 self.date = date or datetime.datetime.fromtimestamp(0)
1313 self.repository = repo
1313 self.repository = repo
1314 self.alias = alias
1314 self.alias = alias
1315
1315
1316 @LazyProperty
1316 @LazyProperty
1317 def raw_id(self):
1317 def raw_id(self):
1318 """
1318 """
1319 Returns raw string identifying this commit, useful for web
1319 Returns raw string identifying this commit, useful for web
1320 representation.
1320 representation.
1321 """
1321 """
1322
1322
1323 return self._empty_commit_id
1323 return self._empty_commit_id
1324
1324
1325 @LazyProperty
1325 @LazyProperty
1326 def branch(self):
1326 def branch(self):
1327 if self.alias:
1327 if self.alias:
1328 from rhodecode.lib.vcs.backends import get_backend
1328 from rhodecode.lib.vcs.backends import get_backend
1329 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1329 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1330
1330
1331 @LazyProperty
1331 @LazyProperty
1332 def short_id(self):
1332 def short_id(self):
1333 return self.raw_id[:12]
1333 return self.raw_id[:12]
1334
1334
1335 def get_file_commit(self, path):
1335 def get_file_commit(self, path):
1336 return self
1336 return self
1337
1337
1338 def get_file_content(self, path):
1338 def get_file_content(self, path):
1339 return u''
1339 return u''
1340
1340
1341 def get_file_size(self, path):
1341 def get_file_size(self, path):
1342 return 0
1342 return 0
1343
1343
1344
1344
1345 class EmptyChangesetClass(type):
1345 class EmptyChangesetClass(type):
1346
1346
1347 def __instancecheck__(self, instance):
1347 def __instancecheck__(self, instance):
1348 return isinstance(instance, EmptyCommit)
1348 return isinstance(instance, EmptyCommit)
1349
1349
1350
1350
1351 class EmptyChangeset(EmptyCommit):
1351 class EmptyChangeset(EmptyCommit):
1352
1352
1353 __metaclass__ = EmptyChangesetClass
1353 __metaclass__ = EmptyChangesetClass
1354
1354
1355 def __new__(cls, *args, **kwargs):
1355 def __new__(cls, *args, **kwargs):
1356 warnings.warn(
1356 warnings.warn(
1357 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1357 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1358 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1358 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1359
1359
1360 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1360 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1361 alias=None, revision=-1, message='', author='', date=None):
1361 alias=None, revision=-1, message='', author='', date=None):
1362 if requested_revision is not None:
1362 if requested_revision is not None:
1363 warnings.warn(
1363 warnings.warn(
1364 "Parameter requested_revision not supported anymore",
1364 "Parameter requested_revision not supported anymore",
1365 DeprecationWarning)
1365 DeprecationWarning)
1366 super(EmptyChangeset, self).__init__(
1366 super(EmptyChangeset, self).__init__(
1367 commit_id=cs, repo=repo, alias=alias, idx=revision,
1367 commit_id=cs, repo=repo, alias=alias, idx=revision,
1368 message=message, author=author, date=date)
1368 message=message, author=author, date=date)
1369
1369
1370 @property
1370 @property
1371 def revision(self):
1371 def revision(self):
1372 warnings.warn("Use idx instead", DeprecationWarning)
1372 warnings.warn("Use idx instead", DeprecationWarning)
1373 return self.idx
1373 return self.idx
1374
1374
1375 @revision.setter
1375 @revision.setter
1376 def revision(self, value):
1376 def revision(self, value):
1377 warnings.warn("Use idx instead", DeprecationWarning)
1377 warnings.warn("Use idx instead", DeprecationWarning)
1378 self.idx = value
1378 self.idx = value
1379
1379
1380
1380
1381 class CollectionGenerator(object):
1381 class CollectionGenerator(object):
1382
1382
1383 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1383 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1384 self.repo = repo
1384 self.repo = repo
1385 self.commit_ids = commit_ids
1385 self.commit_ids = commit_ids
1386 # TODO: (oliver) this isn't currently hooked up
1386 # TODO: (oliver) this isn't currently hooked up
1387 self.collection_size = None
1387 self.collection_size = None
1388 self.pre_load = pre_load
1388 self.pre_load = pre_load
1389
1389
1390 def __len__(self):
1390 def __len__(self):
1391 if self.collection_size is not None:
1391 if self.collection_size is not None:
1392 return self.collection_size
1392 return self.collection_size
1393 return self.commit_ids.__len__()
1393 return self.commit_ids.__len__()
1394
1394
1395 def __iter__(self):
1395 def __iter__(self):
1396 for commit_id in self.commit_ids:
1396 for commit_id in self.commit_ids:
1397 # TODO: johbo: Mercurial passes in commit indices or commit ids
1397 # TODO: johbo: Mercurial passes in commit indices or commit ids
1398 yield self._commit_factory(commit_id)
1398 yield self._commit_factory(commit_id)
1399
1399
1400 def _commit_factory(self, commit_id):
1400 def _commit_factory(self, commit_id):
1401 """
1401 """
1402 Allows backends to override the way commits are generated.
1402 Allows backends to override the way commits are generated.
1403 """
1403 """
1404 return self.repo.get_commit(commit_id=commit_id,
1404 return self.repo.get_commit(commit_id=commit_id,
1405 pre_load=self.pre_load)
1405 pre_load=self.pre_load)
1406
1406
1407 def __getslice__(self, i, j):
1407 def __getslice__(self, i, j):
1408 """
1408 """
1409 Returns an iterator of sliced repository
1409 Returns an iterator of sliced repository
1410 """
1410 """
1411 commit_ids = self.commit_ids[i:j]
1411 commit_ids = self.commit_ids[i:j]
1412 return self.__class__(
1412 return self.__class__(
1413 self.repo, commit_ids, pre_load=self.pre_load)
1413 self.repo, commit_ids, pre_load=self.pre_load)
1414
1414
1415 def __repr__(self):
1415 def __repr__(self):
1416 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1416 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1417
1417
1418
1418
1419 class Config(object):
1419 class Config(object):
1420 """
1420 """
1421 Represents the configuration for a repository.
1421 Represents the configuration for a repository.
1422
1422
1423 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1423 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1424 standard library. It implements only the needed subset.
1424 standard library. It implements only the needed subset.
1425 """
1425 """
1426
1426
1427 def __init__(self):
1427 def __init__(self):
1428 self._values = {}
1428 self._values = {}
1429
1429
1430 def copy(self):
1430 def copy(self):
1431 clone = Config()
1431 clone = Config()
1432 for section, values in self._values.items():
1432 for section, values in self._values.items():
1433 clone._values[section] = values.copy()
1433 clone._values[section] = values.copy()
1434 return clone
1434 return clone
1435
1435
1436 def __repr__(self):
1436 def __repr__(self):
1437 return '<Config(%s values) at %s>' % (len(self._values), hex(id(self)))
1437 return '<Config(%s values) at %s>' % (len(self._values), hex(id(self)))
1438
1438
1439 def items(self, section):
1439 def items(self, section):
1440 return self._values.get(section, {}).iteritems()
1440 return self._values.get(section, {}).iteritems()
1441
1441
1442 def get(self, section, option):
1442 def get(self, section, option):
1443 return self._values.get(section, {}).get(option)
1443 return self._values.get(section, {}).get(option)
1444
1444
1445 def set(self, section, option, value):
1445 def set(self, section, option, value):
1446 section_values = self._values.setdefault(section, {})
1446 section_values = self._values.setdefault(section, {})
1447 section_values[option] = value
1447 section_values[option] = value
1448
1448
1449 def clear_section(self, section):
1449 def clear_section(self, section):
1450 self._values[section] = {}
1450 self._values[section] = {}
1451
1451
1452 def serialize(self):
1452 def serialize(self):
1453 """
1453 """
1454 Creates a list of three tuples (section, key, value) representing
1454 Creates a list of three tuples (section, key, value) representing
1455 this config object.
1455 this config object.
1456 """
1456 """
1457 items = []
1457 items = []
1458 for section in self._values:
1458 for section in self._values:
1459 for option, value in self._values[section].items():
1459 for option, value in self._values[section].items():
1460 items.append(
1460 items.append(
1461 (safe_str(section), safe_str(option), safe_str(value)))
1461 (safe_str(section), safe_str(option), safe_str(value)))
1462 return items
1462 return items
1463
1463
1464
1464
1465 class Diff(object):
1465 class Diff(object):
1466 """
1466 """
1467 Represents a diff result from a repository backend.
1467 Represents a diff result from a repository backend.
1468
1468
1469 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1469 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1470 """
1470 """
1471
1471
1472 _header_re = None
1472 _header_re = None
1473
1473
1474 def __init__(self, raw_diff):
1474 def __init__(self, raw_diff):
1475 self.raw = raw_diff
1475 self.raw = raw_diff
1476
1476
1477 def chunks(self):
1477 def chunks(self):
1478 """
1478 """
1479 split the diff in chunks of separate --git a/file b/file chunks
1479 split the diff in chunks of separate --git a/file b/file chunks
1480 to make diffs consistent we must prepend with \n, and make sure
1480 to make diffs consistent we must prepend with \n, and make sure
1481 we can detect last chunk as this was also has special rule
1481 we can detect last chunk as this was also has special rule
1482 """
1482 """
1483 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1483 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1484 total_chunks = len(chunks)
1484 total_chunks = len(chunks)
1485 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1485 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1486 for cur_chunk, chunk in enumerate(chunks, start=1))
1486 for cur_chunk, chunk in enumerate(chunks, start=1))
1487
1487
1488
1488
1489 class DiffChunk(object):
1489 class DiffChunk(object):
1490
1490
1491 def __init__(self, chunk, diff, last_chunk):
1491 def __init__(self, chunk, diff, last_chunk):
1492 self._diff = diff
1492 self._diff = diff
1493
1493
1494 # since we split by \ndiff --git that part is lost from original diff
1494 # since we split by \ndiff --git that part is lost from original diff
1495 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1495 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1496 if not last_chunk:
1496 if not last_chunk:
1497 chunk += '\n'
1497 chunk += '\n'
1498
1498
1499 match = self._diff._header_re.match(chunk)
1499 match = self._diff._header_re.match(chunk)
1500 self.header = match.groupdict()
1500 self.header = match.groupdict()
1501 self.diff = chunk[match.end():]
1501 self.diff = chunk[match.end():]
1502 self.raw = chunk
1502 self.raw = chunk
@@ -1,738 +1,740 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module holding everything related to vcs nodes, with vcs2 architecture.
22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 """
23 """
24
24
25
25
26 import stat
26 import stat
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 from rhodecode.lib.utils import safe_unicode, safe_str
30 from rhodecode.lib.utils import safe_unicode, safe_str
31 from rhodecode.lib.utils2 import md5
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
33 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
33 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
34 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
34 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
35 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
35
36
36 LARGEFILE_PREFIX = '.hglf'
37 LARGEFILE_PREFIX = '.hglf'
37
38
38
39
39 class NodeKind:
40 class NodeKind:
40 SUBMODULE = -1
41 SUBMODULE = -1
41 DIR = 1
42 DIR = 1
42 FILE = 2
43 FILE = 2
43 LARGEFILE = 3
44 LARGEFILE = 3
44
45
45
46
46 class NodeState:
47 class NodeState:
47 ADDED = u'added'
48 ADDED = u'added'
48 CHANGED = u'changed'
49 CHANGED = u'changed'
49 NOT_CHANGED = u'not changed'
50 NOT_CHANGED = u'not changed'
50 REMOVED = u'removed'
51 REMOVED = u'removed'
51
52
52
53
53 class NodeGeneratorBase(object):
54 class NodeGeneratorBase(object):
54 """
55 """
55 Base class for removed added and changed filenodes, it's a lazy generator
56 Base class for removed added and changed filenodes, it's a lazy generator
56 class that will create filenodes only on iteration or call
57 class that will create filenodes only on iteration or call
57
58
58 The len method doesn't need to create filenodes at all
59 The len method doesn't need to create filenodes at all
59 """
60 """
60
61
61 def __init__(self, current_paths, cs):
62 def __init__(self, current_paths, cs):
62 self.cs = cs
63 self.cs = cs
63 self.current_paths = current_paths
64 self.current_paths = current_paths
64
65
65 def __call__(self):
66 def __call__(self):
66 return [n for n in self]
67 return [n for n in self]
67
68
68 def __getslice__(self, i, j):
69 def __getslice__(self, i, j):
69 for p in self.current_paths[i:j]:
70 for p in self.current_paths[i:j]:
70 yield self.cs.get_node(p)
71 yield self.cs.get_node(p)
71
72
72 def __len__(self):
73 def __len__(self):
73 return len(self.current_paths)
74 return len(self.current_paths)
74
75
75 def __iter__(self):
76 def __iter__(self):
76 for p in self.current_paths:
77 for p in self.current_paths:
77 yield self.cs.get_node(p)
78 yield self.cs.get_node(p)
78
79
79
80
80 class AddedFileNodesGenerator(NodeGeneratorBase):
81 class AddedFileNodesGenerator(NodeGeneratorBase):
81 """
82 """
82 Class holding added files for current commit
83 Class holding added files for current commit
83 """
84 """
84
85
85
86
86 class ChangedFileNodesGenerator(NodeGeneratorBase):
87 class ChangedFileNodesGenerator(NodeGeneratorBase):
87 """
88 """
88 Class holding changed files for current commit
89 Class holding changed files for current commit
89 """
90 """
90
91
91
92
92 class RemovedFileNodesGenerator(NodeGeneratorBase):
93 class RemovedFileNodesGenerator(NodeGeneratorBase):
93 """
94 """
94 Class holding removed files for current commit
95 Class holding removed files for current commit
95 """
96 """
96 def __iter__(self):
97 def __iter__(self):
97 for p in self.current_paths:
98 for p in self.current_paths:
98 yield RemovedFileNode(path=p)
99 yield RemovedFileNode(path=p)
99
100
100 def __getslice__(self, i, j):
101 def __getslice__(self, i, j):
101 for p in self.current_paths[i:j]:
102 for p in self.current_paths[i:j]:
102 yield RemovedFileNode(path=p)
103 yield RemovedFileNode(path=p)
103
104
104
105
105 class Node(object):
106 class Node(object):
106 """
107 """
107 Simplest class representing file or directory on repository. SCM backends
108 Simplest class representing file or directory on repository. SCM backends
108 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
109 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
109 directly.
110 directly.
110
111
111 Node's ``path`` cannot start with slash as we operate on *relative* paths
112 Node's ``path`` cannot start with slash as we operate on *relative* paths
112 only. Moreover, every single node is identified by the ``path`` attribute,
113 only. Moreover, every single node is identified by the ``path`` attribute,
113 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
114 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
114 """
115 """
115
116
116 commit = None
117 commit = None
117
118
118 def __init__(self, path, kind):
119 def __init__(self, path, kind):
119 self._validate_path(path) # can throw exception if path is invalid
120 self._validate_path(path) # can throw exception if path is invalid
120 self.path = safe_str(path.rstrip('/')) # we store paths as str
121 self.path = safe_str(path.rstrip('/')) # we store paths as str
121 if path == '' and kind != NodeKind.DIR:
122 if path == '' and kind != NodeKind.DIR:
122 raise NodeError("Only DirNode and its subclasses may be "
123 raise NodeError("Only DirNode and its subclasses may be "
123 "initialized with empty path")
124 "initialized with empty path")
124 self.kind = kind
125 self.kind = kind
125
126
126 if self.is_root() and not self.is_dir():
127 if self.is_root() and not self.is_dir():
127 raise NodeError("Root node cannot be FILE kind")
128 raise NodeError("Root node cannot be FILE kind")
128
129
129 def _validate_path(self, path):
130 def _validate_path(self, path):
130 if path.startswith('/'):
131 if path.startswith('/'):
131 raise NodeError(
132 raise NodeError(
132 "Cannot initialize Node objects with slash at "
133 "Cannot initialize Node objects with slash at "
133 "the beginning as only relative paths are supported. "
134 "the beginning as only relative paths are supported. "
134 "Got %s" % (path,))
135 "Got %s" % (path,))
135
136
136 @LazyProperty
137 @LazyProperty
137 def parent(self):
138 def parent(self):
138 parent_path = self.get_parent_path()
139 parent_path = self.get_parent_path()
139 if parent_path:
140 if parent_path:
140 if self.commit:
141 if self.commit:
141 return self.commit.get_node(parent_path)
142 return self.commit.get_node(parent_path)
142 return DirNode(parent_path)
143 return DirNode(parent_path)
143 return None
144 return None
144
145
145 @LazyProperty
146 @LazyProperty
146 def unicode_path(self):
147 def unicode_path(self):
147 return safe_unicode(self.path)
148 return safe_unicode(self.path)
148
149
149 @LazyProperty
150 @LazyProperty
150 def dir_path(self):
151 def dir_path(self):
151 """
152 """
152 Returns name of the directory from full path of this vcs node. Empty
153 Returns name of the directory from full path of this vcs node. Empty
153 string is returned if there's no directory in the path
154 string is returned if there's no directory in the path
154 """
155 """
155 _parts = self.path.rstrip('/').rsplit('/', 1)
156 _parts = self.path.rstrip('/').rsplit('/', 1)
156 if len(_parts) == 2:
157 if len(_parts) == 2:
157 return safe_unicode(_parts[0])
158 return safe_unicode(_parts[0])
158 return u''
159 return u''
159
160
160 @LazyProperty
161 @LazyProperty
161 def name(self):
162 def name(self):
162 """
163 """
163 Returns name of the node so if its path
164 Returns name of the node so if its path
164 then only last part is returned.
165 then only last part is returned.
165 """
166 """
166 return safe_unicode(self.path.rstrip('/').split('/')[-1])
167 return safe_unicode(self.path.rstrip('/').split('/')[-1])
167
168
168 @property
169 @property
169 def kind(self):
170 def kind(self):
170 return self._kind
171 return self._kind
171
172
172 @kind.setter
173 @kind.setter
173 def kind(self, kind):
174 def kind(self, kind):
174 if hasattr(self, '_kind'):
175 if hasattr(self, '_kind'):
175 raise NodeError("Cannot change node's kind")
176 raise NodeError("Cannot change node's kind")
176 else:
177 else:
177 self._kind = kind
178 self._kind = kind
178 # Post setter check (path's trailing slash)
179 # Post setter check (path's trailing slash)
179 if self.path.endswith('/'):
180 if self.path.endswith('/'):
180 raise NodeError("Node's path cannot end with slash")
181 raise NodeError("Node's path cannot end with slash")
181
182
182 def __cmp__(self, other):
183 def __cmp__(self, other):
183 """
184 """
184 Comparator using name of the node, needed for quick list sorting.
185 Comparator using name of the node, needed for quick list sorting.
185 """
186 """
186 kind_cmp = cmp(self.kind, other.kind)
187 kind_cmp = cmp(self.kind, other.kind)
187 if kind_cmp:
188 if kind_cmp:
188 return kind_cmp
189 return kind_cmp
189 return cmp(self.name, other.name)
190 return cmp(self.name, other.name)
190
191
191 def __eq__(self, other):
192 def __eq__(self, other):
192 for attr in ['name', 'path', 'kind']:
193 for attr in ['name', 'path', 'kind']:
193 if getattr(self, attr) != getattr(other, attr):
194 if getattr(self, attr) != getattr(other, attr):
194 return False
195 return False
195 if self.is_file():
196 if self.is_file():
196 if self.content != other.content:
197 if self.content != other.content:
197 return False
198 return False
198 else:
199 else:
199 # For DirNode's check without entering each dir
200 # For DirNode's check without entering each dir
200 self_nodes_paths = list(sorted(n.path for n in self.nodes))
201 self_nodes_paths = list(sorted(n.path for n in self.nodes))
201 other_nodes_paths = list(sorted(n.path for n in self.nodes))
202 other_nodes_paths = list(sorted(n.path for n in self.nodes))
202 if self_nodes_paths != other_nodes_paths:
203 if self_nodes_paths != other_nodes_paths:
203 return False
204 return False
204 return True
205 return True
205
206
206 def __ne__(self, other):
207 def __ne__(self, other):
207 return not self.__eq__(other)
208 return not self.__eq__(other)
208
209
209 def __repr__(self):
210 def __repr__(self):
210 return '<%s %r>' % (self.__class__.__name__, self.path)
211 return '<%s %r>' % (self.__class__.__name__, self.path)
211
212
212 def __str__(self):
213 def __str__(self):
213 return self.__repr__()
214 return self.__repr__()
214
215
215 def __unicode__(self):
216 def __unicode__(self):
216 return self.name
217 return self.name
217
218
218 def get_parent_path(self):
219 def get_parent_path(self):
219 """
220 """
220 Returns node's parent path or empty string if node is root.
221 Returns node's parent path or empty string if node is root.
221 """
222 """
222 if self.is_root():
223 if self.is_root():
223 return ''
224 return ''
224 return vcspath.dirname(self.path.rstrip('/')) + '/'
225 return vcspath.dirname(self.path.rstrip('/')) + '/'
225
226
226 def is_file(self):
227 def is_file(self):
227 """
228 """
228 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
229 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
229 otherwise.
230 otherwise.
230 """
231 """
231 return self.kind == NodeKind.FILE
232 return self.kind == NodeKind.FILE
232
233
233 def is_dir(self):
234 def is_dir(self):
234 """
235 """
235 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
236 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
236 otherwise.
237 otherwise.
237 """
238 """
238 return self.kind == NodeKind.DIR
239 return self.kind == NodeKind.DIR
239
240
240 def is_root(self):
241 def is_root(self):
241 """
242 """
242 Returns ``True`` if node is a root node and ``False`` otherwise.
243 Returns ``True`` if node is a root node and ``False`` otherwise.
243 """
244 """
244 return self.kind == NodeKind.DIR and self.path == ''
245 return self.kind == NodeKind.DIR and self.path == ''
245
246
246 def is_submodule(self):
247 def is_submodule(self):
247 """
248 """
248 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
249 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
249 otherwise.
250 otherwise.
250 """
251 """
251 return self.kind == NodeKind.SUBMODULE
252 return self.kind == NodeKind.SUBMODULE
252
253
253 def is_largefile(self):
254 def is_largefile(self):
254 """
255 """
255 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
256 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
256 otherwise
257 otherwise
257 """
258 """
258 return self.kind == NodeKind.LARGEFILE
259 return self.kind == NodeKind.LARGEFILE
259
260
260 def is_link(self):
261 def is_link(self):
261 if self.commit:
262 if self.commit:
262 return self.commit.is_link(self.path)
263 return self.commit.is_link(self.path)
263 return False
264 return False
264
265
265 @LazyProperty
266 @LazyProperty
266 def added(self):
267 def added(self):
267 return self.state is NodeState.ADDED
268 return self.state is NodeState.ADDED
268
269
269 @LazyProperty
270 @LazyProperty
270 def changed(self):
271 def changed(self):
271 return self.state is NodeState.CHANGED
272 return self.state is NodeState.CHANGED
272
273
273 @LazyProperty
274 @LazyProperty
274 def not_changed(self):
275 def not_changed(self):
275 return self.state is NodeState.NOT_CHANGED
276 return self.state is NodeState.NOT_CHANGED
276
277
277 @LazyProperty
278 @LazyProperty
278 def removed(self):
279 def removed(self):
279 return self.state is NodeState.REMOVED
280 return self.state is NodeState.REMOVED
280
281
281
282
282 class FileNode(Node):
283 class FileNode(Node):
283 """
284 """
284 Class representing file nodes.
285 Class representing file nodes.
285
286
286 :attribute: path: path to the node, relative to repository's root
287 :attribute: path: path to the node, relative to repository's root
287 :attribute: content: if given arbitrary sets content of the file
288 :attribute: content: if given arbitrary sets content of the file
288 :attribute: commit: if given, first time content is accessed, callback
289 :attribute: commit: if given, first time content is accessed, callback
289 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
290 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
290 """
291 """
291
292
292 def __init__(self, path, content=None, commit=None, mode=None):
293 def __init__(self, path, content=None, commit=None, mode=None):
293 """
294 """
294 Only one of ``content`` and ``commit`` may be given. Passing both
295 Only one of ``content`` and ``commit`` may be given. Passing both
295 would raise ``NodeError`` exception.
296 would raise ``NodeError`` exception.
296
297
297 :param path: relative path to the node
298 :param path: relative path to the node
298 :param content: content may be passed to constructor
299 :param content: content may be passed to constructor
299 :param commit: if given, will use it to lazily fetch content
300 :param commit: if given, will use it to lazily fetch content
300 :param mode: ST_MODE (i.e. 0100644)
301 :param mode: ST_MODE (i.e. 0100644)
301 """
302 """
302 if content and commit:
303 if content and commit:
303 raise NodeError("Cannot use both content and commit")
304 raise NodeError("Cannot use both content and commit")
304 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
305 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
305 self.commit = commit
306 self.commit = commit
306 self._content = content
307 self._content = content
307 self._mode = mode or FILEMODE_DEFAULT
308 self._mode = mode or FILEMODE_DEFAULT
308
309
309 @LazyProperty
310 @LazyProperty
310 def mode(self):
311 def mode(self):
311 """
312 """
312 Returns lazily mode of the FileNode. If `commit` is not set, would
313 Returns lazily mode of the FileNode. If `commit` is not set, would
313 use value given at initialization or `FILEMODE_DEFAULT` (default).
314 use value given at initialization or `FILEMODE_DEFAULT` (default).
314 """
315 """
315 if self.commit:
316 if self.commit:
316 mode = self.commit.get_file_mode(self.path)
317 mode = self.commit.get_file_mode(self.path)
317 else:
318 else:
318 mode = self._mode
319 mode = self._mode
319 return mode
320 return mode
320
321
321 def _get_content(self):
322 @LazyProperty
323 def raw_bytes(self):
324 """
325 Returns lazily the raw bytes of the FileNode.
326 """
322 if self.commit:
327 if self.commit:
323 content = self.commit.get_file_content(self.path)
328 if self._content is None:
329 self._content = self.commit.get_file_content(self.path)
330 content = self._content
324 else:
331 else:
325 content = self._content
332 content = self._content
326 return content
333 return content
327
334
328 @property
335 @LazyProperty
336 def md5(self):
337 """
338 Returns md5 of the file node.
339 """
340 return md5(self.raw_bytes)
341
342 @LazyProperty
329 def content(self):
343 def content(self):
330 """
344 """
331 Returns lazily content of the FileNode. If possible, would try to
345 Returns lazily content of the FileNode. If possible, would try to
332 decode content from UTF-8.
346 decode content from UTF-8.
333 """
347 """
334 content = self._get_content()
348 content = self.raw_bytes
335
349
336 if bool(content and '\0' in content):
350 if self.is_binary:
337 return content
351 return content
338 return safe_unicode(content)
352 return safe_unicode(content)
339
353
340 @LazyProperty
354 @LazyProperty
341 def size(self):
355 def size(self):
342 if self.commit:
356 if self.commit:
343 return self.commit.get_file_size(self.path)
357 return self.commit.get_file_size(self.path)
344 raise NodeError(
358 raise NodeError(
345 "Cannot retrieve size of the file without related "
359 "Cannot retrieve size of the file without related "
346 "commit attribute")
360 "commit attribute")
347
361
348 @LazyProperty
362 @LazyProperty
349 def message(self):
363 def message(self):
350 if self.commit:
364 if self.commit:
351 return self.last_commit.message
365 return self.last_commit.message
352 raise NodeError(
366 raise NodeError(
353 "Cannot retrieve message of the file without related "
367 "Cannot retrieve message of the file without related "
354 "commit attribute")
368 "commit attribute")
355
369
356 @LazyProperty
370 @LazyProperty
357 def last_commit(self):
371 def last_commit(self):
358 if self.commit:
372 if self.commit:
359 pre_load = ["author", "date", "message"]
373 pre_load = ["author", "date", "message"]
360 return self.commit.get_file_commit(self.path, pre_load=pre_load)
374 return self.commit.get_file_commit(self.path, pre_load=pre_load)
361 raise NodeError(
375 raise NodeError(
362 "Cannot retrieve last commit of the file without "
376 "Cannot retrieve last commit of the file without "
363 "related commit attribute")
377 "related commit attribute")
364
378
365 def get_mimetype(self):
379 def get_mimetype(self):
366 """
380 """
367 Mimetype is calculated based on the file's content. If ``_mimetype``
381 Mimetype is calculated based on the file's content. If ``_mimetype``
368 attribute is available, it will be returned (backends which store
382 attribute is available, it will be returned (backends which store
369 mimetypes or can easily recognize them, should set this private
383 mimetypes or can easily recognize them, should set this private
370 attribute to indicate that type should *NOT* be calculated).
384 attribute to indicate that type should *NOT* be calculated).
371 """
385 """
372
386
373 if hasattr(self, '_mimetype'):
387 if hasattr(self, '_mimetype'):
374 if (isinstance(self._mimetype, (tuple, list,)) and
388 if (isinstance(self._mimetype, (tuple, list,)) and
375 len(self._mimetype) == 2):
389 len(self._mimetype) == 2):
376 return self._mimetype
390 return self._mimetype
377 else:
391 else:
378 raise NodeError('given _mimetype attribute must be an 2 '
392 raise NodeError('given _mimetype attribute must be an 2 '
379 'element list or tuple')
393 'element list or tuple')
380
394
381 db = get_mimetypes_db()
395 db = get_mimetypes_db()
382 mtype, encoding = db.guess_type(self.name)
396 mtype, encoding = db.guess_type(self.name)
383
397
384 if mtype is None:
398 if mtype is None:
385 if self.is_binary:
399 if self.is_binary:
386 mtype = 'application/octet-stream'
400 mtype = 'application/octet-stream'
387 encoding = None
401 encoding = None
388 else:
402 else:
389 mtype = 'text/plain'
403 mtype = 'text/plain'
390 encoding = None
404 encoding = None
391
405
392 # try with pygments
406 # try with pygments
393 try:
407 try:
394 from pygments.lexers import get_lexer_for_filename
408 from pygments.lexers import get_lexer_for_filename
395 mt = get_lexer_for_filename(self.name).mimetypes
409 mt = get_lexer_for_filename(self.name).mimetypes
396 except Exception:
410 except Exception:
397 mt = None
411 mt = None
398
412
399 if mt:
413 if mt:
400 mtype = mt[0]
414 mtype = mt[0]
401
415
402 return mtype, encoding
416 return mtype, encoding
403
417
404 @LazyProperty
418 @LazyProperty
405 def mimetype(self):
419 def mimetype(self):
406 """
420 """
407 Wrapper around full mimetype info. It returns only type of fetched
421 Wrapper around full mimetype info. It returns only type of fetched
408 mimetype without the encoding part. use get_mimetype function to fetch
422 mimetype without the encoding part. use get_mimetype function to fetch
409 full set of (type,encoding)
423 full set of (type,encoding)
410 """
424 """
411 return self.get_mimetype()[0]
425 return self.get_mimetype()[0]
412
426
413 @LazyProperty
427 @LazyProperty
414 def mimetype_main(self):
428 def mimetype_main(self):
415 return self.mimetype.split('/')[0]
429 return self.mimetype.split('/')[0]
416
430
417 @LazyProperty
431 @LazyProperty
418 def lexer(self):
432 def lexer(self):
419 """
433 """
420 Returns pygment's lexer class. Would try to guess lexer taking file's
434 Returns pygment's lexer class. Would try to guess lexer taking file's
421 content, name and mimetype.
435 content, name and mimetype.
422 """
436 """
423 from pygments import lexers
437 from pygments import lexers
424 try:
438 try:
425 lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
439 lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
426 except lexers.ClassNotFound:
440 except lexers.ClassNotFound:
427 lexer = lexers.TextLexer(stripnl=False)
441 lexer = lexers.TextLexer(stripnl=False)
428 # returns first alias
442 # returns first alias
429 return lexer
443 return lexer
430
444
431 @LazyProperty
445 @LazyProperty
432 def lexer_alias(self):
446 def lexer_alias(self):
433 """
447 """
434 Returns first alias of the lexer guessed for this file.
448 Returns first alias of the lexer guessed for this file.
435 """
449 """
436 return self.lexer.aliases[0]
450 return self.lexer.aliases[0]
437
451
438 @LazyProperty
452 @LazyProperty
439 def history(self):
453 def history(self):
440 """
454 """
441 Returns a list of commit for this file in which the file was changed
455 Returns a list of commit for this file in which the file was changed
442 """
456 """
443 if self.commit is None:
457 if self.commit is None:
444 raise NodeError('Unable to get commit for this FileNode')
458 raise NodeError('Unable to get commit for this FileNode')
445 return self.commit.get_file_history(self.path)
459 return self.commit.get_file_history(self.path)
446
460
447 @LazyProperty
461 @LazyProperty
448 def annotate(self):
462 def annotate(self):
449 """
463 """
450 Returns a list of three element tuples with lineno, commit and line
464 Returns a list of three element tuples with lineno, commit and line
451 """
465 """
452 if self.commit is None:
466 if self.commit is None:
453 raise NodeError('Unable to get commit for this FileNode')
467 raise NodeError('Unable to get commit for this FileNode')
454 pre_load = ["author", "date", "message"]
468 pre_load = ["author", "date", "message"]
455 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
469 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
456
470
457 @LazyProperty
471 @LazyProperty
458 def state(self):
472 def state(self):
459 if not self.commit:
473 if not self.commit:
460 raise NodeError(
474 raise NodeError(
461 "Cannot check state of the node if it's not "
475 "Cannot check state of the node if it's not "
462 "linked with commit")
476 "linked with commit")
463 elif self.path in (node.path for node in self.commit.added):
477 elif self.path in (node.path for node in self.commit.added):
464 return NodeState.ADDED
478 return NodeState.ADDED
465 elif self.path in (node.path for node in self.commit.changed):
479 elif self.path in (node.path for node in self.commit.changed):
466 return NodeState.CHANGED
480 return NodeState.CHANGED
467 else:
481 else:
468 return NodeState.NOT_CHANGED
482 return NodeState.NOT_CHANGED
469
483
470 @property
484 @LazyProperty
471 def is_binary(self):
485 def is_binary(self):
472 """
486 """
473 Returns True if file has binary content.
487 Returns True if file has binary content.
474 """
488 """
475 _bin = '\0' in self._get_content()
489 _bin = self.raw_bytes and '\0' in self.raw_bytes
476 return _bin
490 return _bin
477
491
478 @LazyProperty
492 @LazyProperty
479 def extension(self):
493 def extension(self):
480 """Returns filenode extension"""
494 """Returns filenode extension"""
481 return self.name.split('.')[-1]
495 return self.name.split('.')[-1]
482
496
483 @property
497 @property
484 def is_executable(self):
498 def is_executable(self):
485 """
499 """
486 Returns ``True`` if file has executable flag turned on.
500 Returns ``True`` if file has executable flag turned on.
487 """
501 """
488 return bool(self.mode & stat.S_IXUSR)
502 return bool(self.mode & stat.S_IXUSR)
489
503
490 def get_largefile_node(self):
504 def get_largefile_node(self):
491 """
505 """
492 Try to return a Mercurial FileNode from this node. It does internal
506 Try to return a Mercurial FileNode from this node. It does internal
493 checks inside largefile store, if that file exist there it will
507 checks inside largefile store, if that file exist there it will
494 create special instance of LargeFileNode which can get content from
508 create special instance of LargeFileNode which can get content from
495 LF store.
509 LF store.
496 """
510 """
497 if self.commit and self.path.startswith(LARGEFILE_PREFIX):
511 if self.commit and self.path.startswith(LARGEFILE_PREFIX):
498 largefile_path = self.path.split(LARGEFILE_PREFIX)[-1].lstrip('/')
512 largefile_path = self.path.split(LARGEFILE_PREFIX)[-1].lstrip('/')
499 return self.commit.get_largefile_node(largefile_path)
513 return self.commit.get_largefile_node(largefile_path)
500
514
501 def lines(self, count_empty=False):
515 def lines(self, count_empty=False):
502 all_lines, empty_lines = 0, 0
516 all_lines, empty_lines = 0, 0
503
517
504 if not self.is_binary:
518 if not self.is_binary:
505 content = self._get_content()
519 content = self.content
506 if count_empty:
520 if count_empty:
507 all_lines = 0
521 all_lines = 0
508 empty_lines = 0
522 empty_lines = 0
509 for line in content.splitlines(True):
523 for line in content.splitlines(True):
510 if line == '\n':
524 if line == '\n':
511 empty_lines += 1
525 empty_lines += 1
512 all_lines += 1
526 all_lines += 1
513
527
514 return all_lines, all_lines - empty_lines
528 return all_lines, all_lines - empty_lines
515 else:
529 else:
516 # fast method
530 # fast method
517 empty_lines = all_lines = content.count('\n')
531 empty_lines = all_lines = content.count('\n')
518 if all_lines == 0 and content:
532 if all_lines == 0 and content:
519 # one-line without a newline
533 # one-line without a newline
520 empty_lines = all_lines = 1
534 empty_lines = all_lines = 1
521
535
522 return all_lines, empty_lines
536 return all_lines, empty_lines
523
537
524 def __repr__(self):
538 def __repr__(self):
525 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
539 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
526 getattr(self.commit, 'short_id', ''))
540 getattr(self.commit, 'short_id', ''))
527
541
528
542
529 class RemovedFileNode(FileNode):
543 class RemovedFileNode(FileNode):
530 """
544 """
531 Dummy FileNode class - trying to access any public attribute except path,
545 Dummy FileNode class - trying to access any public attribute except path,
532 name, kind or state (or methods/attributes checking those two) would raise
546 name, kind or state (or methods/attributes checking those two) would raise
533 RemovedFileNodeError.
547 RemovedFileNodeError.
534 """
548 """
535 ALLOWED_ATTRIBUTES = [
549 ALLOWED_ATTRIBUTES = [
536 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
550 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
537 'added', 'changed', 'not_changed', 'removed'
551 'added', 'changed', 'not_changed', 'removed'
538 ]
552 ]
539
553
540 def __init__(self, path):
554 def __init__(self, path):
541 """
555 """
542 :param path: relative path to the node
556 :param path: relative path to the node
543 """
557 """
544 super(RemovedFileNode, self).__init__(path=path)
558 super(RemovedFileNode, self).__init__(path=path)
545
559
546 def __getattribute__(self, attr):
560 def __getattribute__(self, attr):
547 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
561 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
548 return super(RemovedFileNode, self).__getattribute__(attr)
562 return super(RemovedFileNode, self).__getattribute__(attr)
549 raise RemovedFileNodeError(
563 raise RemovedFileNodeError(
550 "Cannot access attribute %s on RemovedFileNode" % attr)
564 "Cannot access attribute %s on RemovedFileNode" % attr)
551
565
552 @LazyProperty
566 @LazyProperty
553 def state(self):
567 def state(self):
554 return NodeState.REMOVED
568 return NodeState.REMOVED
555
569
556
570
557 class DirNode(Node):
571 class DirNode(Node):
558 """
572 """
559 DirNode stores list of files and directories within this node.
573 DirNode stores list of files and directories within this node.
560 Nodes may be used standalone but within repository context they
574 Nodes may be used standalone but within repository context they
561 lazily fetch data within same repositorty's commit.
575 lazily fetch data within same repositorty's commit.
562 """
576 """
563
577
564 def __init__(self, path, nodes=(), commit=None):
578 def __init__(self, path, nodes=(), commit=None):
565 """
579 """
566 Only one of ``nodes`` and ``commit`` may be given. Passing both
580 Only one of ``nodes`` and ``commit`` may be given. Passing both
567 would raise ``NodeError`` exception.
581 would raise ``NodeError`` exception.
568
582
569 :param path: relative path to the node
583 :param path: relative path to the node
570 :param nodes: content may be passed to constructor
584 :param nodes: content may be passed to constructor
571 :param commit: if given, will use it to lazily fetch content
585 :param commit: if given, will use it to lazily fetch content
572 """
586 """
573 if nodes and commit:
587 if nodes and commit:
574 raise NodeError("Cannot use both nodes and commit")
588 raise NodeError("Cannot use both nodes and commit")
575 super(DirNode, self).__init__(path, NodeKind.DIR)
589 super(DirNode, self).__init__(path, NodeKind.DIR)
576 self.commit = commit
590 self.commit = commit
577 self._nodes = nodes
591 self._nodes = nodes
578
592
579 @LazyProperty
593 @LazyProperty
580 def content(self):
594 def content(self):
581 raise NodeError(
595 raise NodeError(
582 "%s represents a dir and has no `content` attribute" % self)
596 "%s represents a dir and has no `content` attribute" % self)
583
597
584 @LazyProperty
598 @LazyProperty
585 def nodes(self):
599 def nodes(self):
586 if self.commit:
600 if self.commit:
587 nodes = self.commit.get_nodes(self.path)
601 nodes = self.commit.get_nodes(self.path)
588 else:
602 else:
589 nodes = self._nodes
603 nodes = self._nodes
590 self._nodes_dict = dict((node.path, node) for node in nodes)
604 self._nodes_dict = dict((node.path, node) for node in nodes)
591 return sorted(nodes)
605 return sorted(nodes)
592
606
593 @LazyProperty
607 @LazyProperty
594 def files(self):
608 def files(self):
595 return sorted((node for node in self.nodes if node.is_file()))
609 return sorted((node for node in self.nodes if node.is_file()))
596
610
597 @LazyProperty
611 @LazyProperty
598 def dirs(self):
612 def dirs(self):
599 return sorted((node for node in self.nodes if node.is_dir()))
613 return sorted((node for node in self.nodes if node.is_dir()))
600
614
601 def __iter__(self):
615 def __iter__(self):
602 for node in self.nodes:
616 for node in self.nodes:
603 yield node
617 yield node
604
618
605 def get_node(self, path):
619 def get_node(self, path):
606 """
620 """
607 Returns node from within this particular ``DirNode``, so it is now
621 Returns node from within this particular ``DirNode``, so it is now
608 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
622 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
609 'docs'. In order to access deeper nodes one must fetch nodes between
623 'docs'. In order to access deeper nodes one must fetch nodes between
610 them first - this would work::
624 them first - this would work::
611
625
612 docs = root.get_node('docs')
626 docs = root.get_node('docs')
613 docs.get_node('api').get_node('index.rst')
627 docs.get_node('api').get_node('index.rst')
614
628
615 :param: path - relative to the current node
629 :param: path - relative to the current node
616
630
617 .. note::
631 .. note::
618 To access lazily (as in example above) node have to be initialized
632 To access lazily (as in example above) node have to be initialized
619 with related commit object - without it node is out of
633 with related commit object - without it node is out of
620 context and may know nothing about anything else than nearest
634 context and may know nothing about anything else than nearest
621 (located at same level) nodes.
635 (located at same level) nodes.
622 """
636 """
623 try:
637 try:
624 path = path.rstrip('/')
638 path = path.rstrip('/')
625 if path == '':
639 if path == '':
626 raise NodeError("Cannot retrieve node without path")
640 raise NodeError("Cannot retrieve node without path")
627 self.nodes # access nodes first in order to set _nodes_dict
641 self.nodes # access nodes first in order to set _nodes_dict
628 paths = path.split('/')
642 paths = path.split('/')
629 if len(paths) == 1:
643 if len(paths) == 1:
630 if not self.is_root():
644 if not self.is_root():
631 path = '/'.join((self.path, paths[0]))
645 path = '/'.join((self.path, paths[0]))
632 else:
646 else:
633 path = paths[0]
647 path = paths[0]
634 return self._nodes_dict[path]
648 return self._nodes_dict[path]
635 elif len(paths) > 1:
649 elif len(paths) > 1:
636 if self.commit is None:
650 if self.commit is None:
637 raise NodeError(
651 raise NodeError(
638 "Cannot access deeper nodes without commit")
652 "Cannot access deeper nodes without commit")
639 else:
653 else:
640 path1, path2 = paths[0], '/'.join(paths[1:])
654 path1, path2 = paths[0], '/'.join(paths[1:])
641 return self.get_node(path1).get_node(path2)
655 return self.get_node(path1).get_node(path2)
642 else:
656 else:
643 raise KeyError
657 raise KeyError
644 except KeyError:
658 except KeyError:
645 raise NodeError("Node does not exist at %s" % path)
659 raise NodeError("Node does not exist at %s" % path)
646
660
647 @LazyProperty
661 @LazyProperty
648 def state(self):
662 def state(self):
649 raise NodeError("Cannot access state of DirNode")
663 raise NodeError("Cannot access state of DirNode")
650
664
651 @LazyProperty
665 @LazyProperty
652 def size(self):
666 def size(self):
653 size = 0
667 size = 0
654 for root, dirs, files in self.commit.walk(self.path):
668 for root, dirs, files in self.commit.walk(self.path):
655 for f in files:
669 for f in files:
656 size += f.size
670 size += f.size
657
671
658 return size
672 return size
659
673
660 def __repr__(self):
674 def __repr__(self):
661 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
675 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
662 getattr(self.commit, 'short_id', ''))
676 getattr(self.commit, 'short_id', ''))
663
677
664
678
665 class RootNode(DirNode):
679 class RootNode(DirNode):
666 """
680 """
667 DirNode being the root node of the repository.
681 DirNode being the root node of the repository.
668 """
682 """
669
683
670 def __init__(self, nodes=(), commit=None):
684 def __init__(self, nodes=(), commit=None):
671 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
685 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
672
686
673 def __repr__(self):
687 def __repr__(self):
674 return '<%s>' % self.__class__.__name__
688 return '<%s>' % self.__class__.__name__
675
689
676
690
677 class SubModuleNode(Node):
691 class SubModuleNode(Node):
678 """
692 """
679 represents a SubModule of Git or SubRepo of Mercurial
693 represents a SubModule of Git or SubRepo of Mercurial
680 """
694 """
681 is_binary = False
695 is_binary = False
682 size = 0
696 size = 0
683
697
684 def __init__(self, name, url=None, commit=None, alias=None):
698 def __init__(self, name, url=None, commit=None, alias=None):
685 self.path = name
699 self.path = name
686 self.kind = NodeKind.SUBMODULE
700 self.kind = NodeKind.SUBMODULE
687 self.alias = alias
701 self.alias = alias
688
702
689 # we have to use EmptyCommit here since this can point to svn/git/hg
703 # we have to use EmptyCommit here since this can point to svn/git/hg
690 # submodules we cannot get from repository
704 # submodules we cannot get from repository
691 self.commit = EmptyCommit(str(commit), alias=alias)
705 self.commit = EmptyCommit(str(commit), alias=alias)
692 self.url = url or self._extract_submodule_url()
706 self.url = url or self._extract_submodule_url()
693
707
694 def __repr__(self):
708 def __repr__(self):
695 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
709 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
696 getattr(self.commit, 'short_id', ''))
710 getattr(self.commit, 'short_id', ''))
697
711
698 def _extract_submodule_url(self):
712 def _extract_submodule_url(self):
699 # TODO: find a way to parse gits submodule file and extract the
713 # TODO: find a way to parse gits submodule file and extract the
700 # linking URL
714 # linking URL
701 return self.path
715 return self.path
702
716
703 @LazyProperty
717 @LazyProperty
704 def name(self):
718 def name(self):
705 """
719 """
706 Returns name of the node so if its path
720 Returns name of the node so if its path
707 then only last part is returned.
721 then only last part is returned.
708 """
722 """
709 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
723 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
710 return u'%s @ %s' % (org, self.commit.short_id)
724 return u'%s @ %s' % (org, self.commit.short_id)
711
725
712
726
713 class LargeFileNode(FileNode):
727 class LargeFileNode(FileNode):
714
728
715 def _validate_path(self, path):
729 def _validate_path(self, path):
716 """
730 """
717 we override check since the LargeFileNode path is system absolute
731 we override check since the LargeFileNode path is system absolute
718 """
732 """
719
733
720 def _get_content(self):
734 def raw_bytes(self):
721 if self.commit:
735 if self.commit:
722 with open(self.path, 'rb') as f:
736 with open(self.path, 'rb') as f:
723 content = f.read()
737 content = f.read()
724 else:
738 else:
725 content = self._content
739 content = self._content
726 return content
740 return content No newline at end of file
727
728 @property
729 def content(self):
730 """
731 Returns lazily content of the `FileNode`. If possible, would try to
732 decode content from UTF-8.
733 """
734 content = self._get_content()
735
736 if bool(content and '\0' in content):
737 return content
738 return safe_unicode(content)
@@ -1,1101 +1,1099 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33
33
34 import pylons
34 import pylons
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from sqlalchemy import func
36 from sqlalchemy import func
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46 from rhodecode.lib.auth import (
46 from rhodecode.lib.auth import (
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 HasUserGroupPermissionAny)
48 HasUserGroupPermissionAny)
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 from rhodecode.lib import hooks_utils, caches
50 from rhodecode.lib import hooks_utils, caches
51 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
52 get_filesystem_repos, action_logger, make_db_config)
52 get_filesystem_repos, action_logger, make_db_config)
53 from rhodecode.lib.utils2 import (
53 from rhodecode.lib.utils2 import (
54 safe_str, safe_unicode, get_server_url, md5)
54 safe_str, safe_unicode, get_server_url, md5)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 PullRequest, DbMigrateVersion)
58 PullRequest, DbMigrateVersion)
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class UserTemp(object):
64 class UserTemp(object):
65 def __init__(self, user_id):
65 def __init__(self, user_id):
66 self.user_id = user_id
66 self.user_id = user_id
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70
70
71
71
72 class RepoTemp(object):
72 class RepoTemp(object):
73 def __init__(self, repo_id):
73 def __init__(self, repo_id):
74 self.repo_id = repo_id
74 self.repo_id = repo_id
75
75
76 def __repr__(self):
76 def __repr__(self):
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78
78
79
79
80 class SimpleCachedRepoList(object):
80 class SimpleCachedRepoList(object):
81 """
81 """
82 Lighter version of of iteration of repos without the scm initialisation,
82 Lighter version of of iteration of repos without the scm initialisation,
83 and with cache usage
83 and with cache usage
84 """
84 """
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 self.db_repo_list = db_repo_list
86 self.db_repo_list = db_repo_list
87 self.repos_path = repos_path
87 self.repos_path = repos_path
88 self.order_by = order_by
88 self.order_by = order_by
89 self.reversed = (order_by or '').startswith('-')
89 self.reversed = (order_by or '').startswith('-')
90 if not perm_set:
90 if not perm_set:
91 perm_set = ['repository.read', 'repository.write',
91 perm_set = ['repository.read', 'repository.write',
92 'repository.admin']
92 'repository.admin']
93 self.perm_set = perm_set
93 self.perm_set = perm_set
94
94
95 def __len__(self):
95 def __len__(self):
96 return len(self.db_repo_list)
96 return len(self.db_repo_list)
97
97
98 def __repr__(self):
98 def __repr__(self):
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100
100
101 def __iter__(self):
101 def __iter__(self):
102 for dbr in self.db_repo_list:
102 for dbr in self.db_repo_list:
103 # check permission at this level
103 # check permission at this level
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 dbr.repo_name, 'SimpleCachedRepoList check')
105 dbr.repo_name, 'SimpleCachedRepoList check')
106 if not has_perm:
106 if not has_perm:
107 continue
107 continue
108
108
109 tmp_d = {
109 tmp_d = {
110 'name': dbr.repo_name,
110 'name': dbr.repo_name,
111 'dbrepo': dbr.get_dict(),
111 'dbrepo': dbr.get_dict(),
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 }
113 }
114 yield tmp_d
114 yield tmp_d
115
115
116
116
117 class _PermCheckIterator(object):
117 class _PermCheckIterator(object):
118
118
119 def __init__(
119 def __init__(
120 self, obj_list, obj_attr, perm_set, perm_checker,
120 self, obj_list, obj_attr, perm_set, perm_checker,
121 extra_kwargs=None):
121 extra_kwargs=None):
122 """
122 """
123 Creates iterator from given list of objects, additionally
123 Creates iterator from given list of objects, additionally
124 checking permission for them from perm_set var
124 checking permission for them from perm_set var
125
125
126 :param obj_list: list of db objects
126 :param obj_list: list of db objects
127 :param obj_attr: attribute of object to pass into perm_checker
127 :param obj_attr: attribute of object to pass into perm_checker
128 :param perm_set: list of permissions to check
128 :param perm_set: list of permissions to check
129 :param perm_checker: callable to check permissions against
129 :param perm_checker: callable to check permissions against
130 """
130 """
131 self.obj_list = obj_list
131 self.obj_list = obj_list
132 self.obj_attr = obj_attr
132 self.obj_attr = obj_attr
133 self.perm_set = perm_set
133 self.perm_set = perm_set
134 self.perm_checker = perm_checker
134 self.perm_checker = perm_checker
135 self.extra_kwargs = extra_kwargs or {}
135 self.extra_kwargs = extra_kwargs or {}
136
136
137 def __len__(self):
137 def __len__(self):
138 return len(self.obj_list)
138 return len(self.obj_list)
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142
142
143 def __iter__(self):
143 def __iter__(self):
144 checker = self.perm_checker(*self.perm_set)
144 checker = self.perm_checker(*self.perm_set)
145 for db_obj in self.obj_list:
145 for db_obj in self.obj_list:
146 # check permission at this level
146 # check permission at this level
147 name = getattr(db_obj, self.obj_attr, None)
147 name = getattr(db_obj, self.obj_attr, None)
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 continue
149 continue
150
150
151 yield db_obj
151 yield db_obj
152
152
153
153
154 class RepoList(_PermCheckIterator):
154 class RepoList(_PermCheckIterator):
155
155
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 if not perm_set:
157 if not perm_set:
158 perm_set = [
158 perm_set = [
159 'repository.read', 'repository.write', 'repository.admin']
159 'repository.read', 'repository.write', 'repository.admin']
160
160
161 super(RepoList, self).__init__(
161 super(RepoList, self).__init__(
162 obj_list=db_repo_list,
162 obj_list=db_repo_list,
163 obj_attr='repo_name', perm_set=perm_set,
163 obj_attr='repo_name', perm_set=perm_set,
164 perm_checker=HasRepoPermissionAny,
164 perm_checker=HasRepoPermissionAny,
165 extra_kwargs=extra_kwargs)
165 extra_kwargs=extra_kwargs)
166
166
167
167
168 class RepoGroupList(_PermCheckIterator):
168 class RepoGroupList(_PermCheckIterator):
169
169
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 if not perm_set:
171 if not perm_set:
172 perm_set = ['group.read', 'group.write', 'group.admin']
172 perm_set = ['group.read', 'group.write', 'group.admin']
173
173
174 super(RepoGroupList, self).__init__(
174 super(RepoGroupList, self).__init__(
175 obj_list=db_repo_group_list,
175 obj_list=db_repo_group_list,
176 obj_attr='group_name', perm_set=perm_set,
176 obj_attr='group_name', perm_set=perm_set,
177 perm_checker=HasRepoGroupPermissionAny,
177 perm_checker=HasRepoGroupPermissionAny,
178 extra_kwargs=extra_kwargs)
178 extra_kwargs=extra_kwargs)
179
179
180
180
181 class UserGroupList(_PermCheckIterator):
181 class UserGroupList(_PermCheckIterator):
182
182
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 if not perm_set:
184 if not perm_set:
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186
186
187 super(UserGroupList, self).__init__(
187 super(UserGroupList, self).__init__(
188 obj_list=db_user_group_list,
188 obj_list=db_user_group_list,
189 obj_attr='users_group_name', perm_set=perm_set,
189 obj_attr='users_group_name', perm_set=perm_set,
190 perm_checker=HasUserGroupPermissionAny,
190 perm_checker=HasUserGroupPermissionAny,
191 extra_kwargs=extra_kwargs)
191 extra_kwargs=extra_kwargs)
192
192
193
193
194 class ScmModel(BaseModel):
194 class ScmModel(BaseModel):
195 """
195 """
196 Generic Scm Model
196 Generic Scm Model
197 """
197 """
198
198
199 @LazyProperty
199 @LazyProperty
200 def repos_path(self):
200 def repos_path(self):
201 """
201 """
202 Gets the repositories root path from database
202 Gets the repositories root path from database
203 """
203 """
204
204
205 settings_model = VcsSettingsModel(sa=self.sa)
205 settings_model = VcsSettingsModel(sa=self.sa)
206 return settings_model.get_repos_location()
206 return settings_model.get_repos_location()
207
207
208 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
209 """
209 """
210 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
211 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
212
212
213 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
214 """
214 """
215
215
216 if repos_path is None:
216 if repos_path is None:
217 repos_path = self.repos_path
217 repos_path = self.repos_path
218
218
219 log.info('scanning for repositories in %s', repos_path)
219 log.info('scanning for repositories in %s', repos_path)
220
220
221 config = make_db_config()
221 config = make_db_config()
222 config.set('extensions', 'largefiles', '')
222 config.set('extensions', 'largefiles', '')
223 repos = {}
223 repos = {}
224
224
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 # name need to be decomposed and put back together using the /
226 # name need to be decomposed and put back together using the /
227 # since this is internal storage separator for rhodecode
227 # since this is internal storage separator for rhodecode
228 name = Repository.normalize_repo_name(name)
228 name = Repository.normalize_repo_name(name)
229
229
230 try:
230 try:
231 if name in repos:
231 if name in repos:
232 raise RepositoryError('Duplicate repository name %s '
232 raise RepositoryError('Duplicate repository name %s '
233 'found in %s' % (name, path))
233 'found in %s' % (name, path))
234 elif path[0] in rhodecode.BACKENDS:
234 elif path[0] in rhodecode.BACKENDS:
235 klass = get_backend(path[0])
235 klass = get_backend(path[0])
236 repos[name] = klass(path[1], config=config)
236 repos[name] = klass(path[1], config=config)
237 except OSError:
237 except OSError:
238 continue
238 continue
239 log.debug('found %s paths with repositories', len(repos))
239 log.debug('found %s paths with repositories', len(repos))
240 return repos
240 return repos
241
241
242 def get_repos(self, all_repos=None, sort_key=None):
242 def get_repos(self, all_repos=None, sort_key=None):
243 """
243 """
244 Get all repositories from db and for each repo create it's
244 Get all repositories from db and for each repo create it's
245 backend instance and fill that backed with information from database
245 backend instance and fill that backed with information from database
246
246
247 :param all_repos: list of repository names as strings
247 :param all_repos: list of repository names as strings
248 give specific repositories list, good for filtering
248 give specific repositories list, good for filtering
249
249
250 :param sort_key: initial sorting of repositories
250 :param sort_key: initial sorting of repositories
251 """
251 """
252 if all_repos is None:
252 if all_repos is None:
253 all_repos = self.sa.query(Repository)\
253 all_repos = self.sa.query(Repository)\
254 .filter(Repository.group_id == None)\
254 .filter(Repository.group_id == None)\
255 .order_by(func.lower(Repository.repo_name)).all()
255 .order_by(func.lower(Repository.repo_name)).all()
256 repo_iter = SimpleCachedRepoList(
256 repo_iter = SimpleCachedRepoList(
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 return repo_iter
258 return repo_iter
259
259
260 def get_repo_groups(self, all_groups=None):
260 def get_repo_groups(self, all_groups=None):
261 if all_groups is None:
261 if all_groups is None:
262 all_groups = RepoGroup.query()\
262 all_groups = RepoGroup.query()\
263 .filter(RepoGroup.group_parent_id == None).all()
263 .filter(RepoGroup.group_parent_id == None).all()
264 return [x for x in RepoGroupList(all_groups)]
264 return [x for x in RepoGroupList(all_groups)]
265
265
266 def mark_for_invalidation(self, repo_name, delete=False):
266 def mark_for_invalidation(self, repo_name, delete=False):
267 """
267 """
268 Mark caches of this repo invalid in the database. `delete` flag
268 Mark caches of this repo invalid in the database. `delete` flag
269 removes the cache entries
269 removes the cache entries
270
270
271 :param repo_name: the repo_name for which caches should be marked
271 :param repo_name: the repo_name for which caches should be marked
272 invalid, or deleted
272 invalid, or deleted
273 :param delete: delete the entry keys instead of setting bool
273 :param delete: delete the entry keys instead of setting bool
274 flag on them
274 flag on them
275 """
275 """
276 CacheKey.set_invalidate(repo_name, delete=delete)
276 CacheKey.set_invalidate(repo_name, delete=delete)
277 repo = Repository.get_by_repo_name(repo_name)
277 repo = Repository.get_by_repo_name(repo_name)
278
278
279 if repo:
279 if repo:
280 config = repo._config
280 config = repo._config
281 config.set('extensions', 'largefiles', '')
281 config.set('extensions', 'largefiles', '')
282 repo.update_commit_cache(config=config, cs_cache=None)
282 repo.update_commit_cache(config=config, cs_cache=None)
283 caches.clear_repo_caches(repo_name)
283 caches.clear_repo_caches(repo_name)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 action_logger(UserTemp(user_id),
294 action_logger(UserTemp(user_id),
295 'stopped_following_repo',
295 'stopped_following_repo',
296 RepoTemp(follow_repo_id))
296 RepoTemp(follow_repo_id))
297 return
297 return
298 except Exception:
298 except Exception:
299 log.error(traceback.format_exc())
299 log.error(traceback.format_exc())
300 raise
300 raise
301
301
302 try:
302 try:
303 f = UserFollowing()
303 f = UserFollowing()
304 f.user_id = user_id
304 f.user_id = user_id
305 f.follows_repo_id = follow_repo_id
305 f.follows_repo_id = follow_repo_id
306 self.sa.add(f)
306 self.sa.add(f)
307
307
308 action_logger(UserTemp(user_id),
308 action_logger(UserTemp(user_id),
309 'started_following_repo',
309 'started_following_repo',
310 RepoTemp(follow_repo_id))
310 RepoTemp(follow_repo_id))
311 except Exception:
311 except Exception:
312 log.error(traceback.format_exc())
312 log.error(traceback.format_exc())
313 raise
313 raise
314
314
315 def toggle_following_user(self, follow_user_id, user_id):
315 def toggle_following_user(self, follow_user_id, user_id):
316 f = self.sa.query(UserFollowing)\
316 f = self.sa.query(UserFollowing)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
318 .filter(UserFollowing.user_id == user_id).scalar()
318 .filter(UserFollowing.user_id == user_id).scalar()
319
319
320 if f is not None:
320 if f is not None:
321 try:
321 try:
322 self.sa.delete(f)
322 self.sa.delete(f)
323 return
323 return
324 except Exception:
324 except Exception:
325 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
326 raise
326 raise
327
327
328 try:
328 try:
329 f = UserFollowing()
329 f = UserFollowing()
330 f.user_id = user_id
330 f.user_id = user_id
331 f.follows_user_id = follow_user_id
331 f.follows_user_id = follow_user_id
332 self.sa.add(f)
332 self.sa.add(f)
333 except Exception:
333 except Exception:
334 log.error(traceback.format_exc())
334 log.error(traceback.format_exc())
335 raise
335 raise
336
336
337 def is_following_repo(self, repo_name, user_id, cache=False):
337 def is_following_repo(self, repo_name, user_id, cache=False):
338 r = self.sa.query(Repository)\
338 r = self.sa.query(Repository)\
339 .filter(Repository.repo_name == repo_name).scalar()
339 .filter(Repository.repo_name == repo_name).scalar()
340
340
341 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.follows_repository == r)\
343 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
344
344
345 return f is not None
345 return f is not None
346
346
347 def is_following_user(self, username, user_id, cache=False):
347 def is_following_user(self, username, user_id, cache=False):
348 u = User.get_by_username(username)
348 u = User.get_by_username(username)
349
349
350 f = self.sa.query(UserFollowing)\
350 f = self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.follows_user == u)\
352 .filter(UserFollowing.user_id == user_id).scalar()
352 .filter(UserFollowing.user_id == user_id).scalar()
353
353
354 return f is not None
354 return f is not None
355
355
356 def get_followers(self, repo):
356 def get_followers(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358
358
359 return self.sa.query(UserFollowing)\
359 return self.sa.query(UserFollowing)\
360 .filter(UserFollowing.follows_repository == repo).count()
360 .filter(UserFollowing.follows_repository == repo).count()
361
361
362 def get_forks(self, repo):
362 def get_forks(self, repo):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 return self.sa.query(Repository)\
364 return self.sa.query(Repository)\
365 .filter(Repository.fork == repo).count()
365 .filter(Repository.fork == repo).count()
366
366
367 def get_pull_requests(self, repo):
367 def get_pull_requests(self, repo):
368 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
369 return self.sa.query(PullRequest)\
369 return self.sa.query(PullRequest)\
370 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.target_repo == repo)\
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
372
372
373 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
374 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
375 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
376 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
377 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
378
378
379 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
380 raise RepositoryError(
380 raise RepositoryError(
381 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
382
382
383 repo.fork = fork
383 repo.fork = fork
384 self.sa.add(repo)
384 self.sa.add(repo)
385 return repo
385 return repo
386
386
387 def pull_changes(self, repo, username):
387 def pull_changes(self, repo, username):
388 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
389 clone_uri = dbrepo.clone_uri
389 clone_uri = dbrepo.clone_uri
390 if not clone_uri:
390 if not clone_uri:
391 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
392
392
393 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
394 # TODO: marcink fix this an re-enable since we need common logic
394 # TODO: marcink fix this an re-enable since we need common logic
395 # for hg/git remove hooks so we don't trigger them on fetching
395 # for hg/git remove hooks so we don't trigger them on fetching
396 # commits from remote
396 # commits from remote
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.pull(clone_uri)
402 repo.pull(clone_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def commit_change(self, repo, repo_name, commit, user, author, message,
409 def commit_change(self, repo, repo_name, commit, user, author, message,
410 content, f_path):
410 content, f_path):
411 """
411 """
412 Commits changes
412 Commits changes
413
413
414 :param repo: SCM instance
414 :param repo: SCM instance
415
415
416 """
416 """
417 user = self._get_user(user)
417 user = self._get_user(user)
418
418
419 # decoding here will force that we have proper encoded values
419 # decoding here will force that we have proper encoded values
420 # in any other case this will throw exceptions and deny commit
420 # in any other case this will throw exceptions and deny commit
421 content = safe_str(content)
421 content = safe_str(content)
422 path = safe_str(f_path)
422 path = safe_str(f_path)
423 # message and author needs to be unicode
423 # message and author needs to be unicode
424 # proper backend should then translate that into required type
424 # proper backend should then translate that into required type
425 message = safe_unicode(message)
425 message = safe_unicode(message)
426 author = safe_unicode(author)
426 author = safe_unicode(author)
427 imc = repo.in_memory_commit
427 imc = repo.in_memory_commit
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
429 try:
429 try:
430 # TODO: handle pre-push action !
430 # TODO: handle pre-push action !
431 tip = imc.commit(
431 tip = imc.commit(
432 message=message, author=author, parents=[commit],
432 message=message, author=author, parents=[commit],
433 branch=commit.branch)
433 branch=commit.branch)
434 except Exception as e:
434 except Exception as e:
435 log.error(traceback.format_exc())
435 log.error(traceback.format_exc())
436 raise IMCCommitError(str(e))
436 raise IMCCommitError(str(e))
437 finally:
437 finally:
438 # always clear caches, if commit fails we want fresh object also
438 # always clear caches, if commit fails we want fresh object also
439 self.mark_for_invalidation(repo_name)
439 self.mark_for_invalidation(repo_name)
440
440
441 # We trigger the post-push action
441 # We trigger the post-push action
442 hooks_utils.trigger_post_push_hook(
442 hooks_utils.trigger_post_push_hook(
443 username=user.username, action='push_local', repo_name=repo_name,
443 username=user.username, action='push_local', repo_name=repo_name,
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
445 return tip
445 return tip
446
446
447 def _sanitize_path(self, f_path):
447 def _sanitize_path(self, f_path):
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
449 raise NonRelativePathError('%s is not an relative path' % f_path)
449 raise NonRelativePathError('%s is not an relative path' % f_path)
450 if f_path:
450 if f_path:
451 f_path = os.path.normpath(f_path)
451 f_path = os.path.normpath(f_path)
452 return f_path
452 return f_path
453
453
454 def get_dirnode_metadata(self, commit, dir_node):
454 def get_dirnode_metadata(self, commit, dir_node):
455 if not dir_node.is_dir():
455 if not dir_node.is_dir():
456 return []
456 return []
457
457
458 data = []
458 data = []
459 for node in dir_node:
459 for node in dir_node:
460 if not node.is_file():
460 if not node.is_file():
461 # we skip file-nodes
461 # we skip file-nodes
462 continue
462 continue
463
463
464 last_commit = node.last_commit
464 last_commit = node.last_commit
465 last_commit_date = last_commit.date
465 last_commit_date = last_commit.date
466 data.append({
466 data.append({
467 'name': node.name,
467 'name': node.name,
468 'size': h.format_byte_size_binary(node.size),
468 'size': h.format_byte_size_binary(node.size),
469 'modified_at': h.format_date(last_commit_date),
469 'modified_at': h.format_date(last_commit_date),
470 'modified_ts': last_commit_date.isoformat(),
470 'modified_ts': last_commit_date.isoformat(),
471 'revision': last_commit.revision,
471 'revision': last_commit.revision,
472 'short_id': last_commit.short_id,
472 'short_id': last_commit.short_id,
473 'message': h.escape(last_commit.message),
473 'message': h.escape(last_commit.message),
474 'author': h.escape(last_commit.author),
474 'author': h.escape(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
476 })
476 })
477
477
478 return data
478 return data
479
479
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
481 extended_info=False, content=False):
481 extended_info=False, content=False):
482 """
482 """
483 recursive walk in root dir and return a set of all path in that dir
483 recursive walk in root dir and return a set of all path in that dir
484 based on repository walk function
484 based on repository walk function
485
485
486 :param repo_name: name of repository
486 :param repo_name: name of repository
487 :param commit_id: commit id for which to list nodes
487 :param commit_id: commit id for which to list nodes
488 :param root_path: root path to list
488 :param root_path: root path to list
489 :param flat: return as a list, if False returns a dict with decription
489 :param flat: return as a list, if False returns a dict with description
490
490
491 """
491 """
492 _files = list()
492 _files = list()
493 _dirs = list()
493 _dirs = list()
494 try:
494 try:
495 _repo = self._get_repo(repo_name)
495 _repo = self._get_repo(repo_name)
496 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
496 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
497 root_path = root_path.lstrip('/')
497 root_path = root_path.lstrip('/')
498 for __, dirs, files in commit.walk(root_path):
498 for __, dirs, files in commit.walk(root_path):
499 for f in files:
499 for f in files:
500 _content = None
500 _content = None
501 _data = f.unicode_path
501 _data = f.unicode_path
502
502 if not flat:
503 if not flat:
503 _data = {
504 _data = {
504 "name": f.unicode_path,
505 "name": f.unicode_path,
505 "type": "file",
506 "type": "file",
506 }
507 }
507 if extended_info:
508 if extended_info:
508 _content = safe_str(f.content)
509 _data.update({
509 _data.update({
510 "md5": md5(_content),
510 "md5": f.md5,
511 "binary": f.is_binary,
511 "binary": f.is_binary,
512 "size": f.size,
512 "size": f.size,
513 "extension": f.extension,
513 "extension": f.extension,
514
515 "mimetype": f.mimetype,
514 "mimetype": f.mimetype,
516 "lines": f.lines()[0]
515 "lines": f.lines()[0]
517 })
516 })
517
518 if content:
518 if content:
519 full_content = None
519 full_content = None
520 if not f.is_binary:
520 if not f.is_binary:
521 # in case we loaded the _content already
521 full_content = safe_str(f.content)
522 # re-use it, or load from f[ile]
523 full_content = _content or safe_str(f.content)
524
522
525 _data.update({
523 _data.update({
526 "content": full_content
524 "content": full_content,
527 })
525 })
528 _files.append(_data)
526 _files.append(_data)
529 for d in dirs:
527 for d in dirs:
530 _data = d.unicode_path
528 _data = d.unicode_path
531 if not flat:
529 if not flat:
532 _data = {
530 _data = {
533 "name": d.unicode_path,
531 "name": d.unicode_path,
534 "type": "dir",
532 "type": "dir",
535 }
533 }
536 if extended_info:
534 if extended_info:
537 _data.update({
535 _data.update({
538 "md5": None,
536 "md5": None,
539 "binary": None,
537 "binary": None,
540 "size": None,
538 "size": None,
541 "extension": None,
539 "extension": None,
542 })
540 })
543 if content:
541 if content:
544 _data.update({
542 _data.update({
545 "content": None
543 "content": None
546 })
544 })
547 _dirs.append(_data)
545 _dirs.append(_data)
548 except RepositoryError:
546 except RepositoryError:
549 log.debug("Exception in get_nodes", exc_info=True)
547 log.debug("Exception in get_nodes", exc_info=True)
550 raise
548 raise
551
549
552 return _dirs, _files
550 return _dirs, _files
553
551
554 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
552 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
555 author=None, trigger_push_hook=True):
553 author=None, trigger_push_hook=True):
556 """
554 """
557 Commits given multiple nodes into repo
555 Commits given multiple nodes into repo
558
556
559 :param user: RhodeCode User object or user_id, the commiter
557 :param user: RhodeCode User object or user_id, the commiter
560 :param repo: RhodeCode Repository object
558 :param repo: RhodeCode Repository object
561 :param message: commit message
559 :param message: commit message
562 :param nodes: mapping {filename:{'content':content},...}
560 :param nodes: mapping {filename:{'content':content},...}
563 :param parent_commit: parent commit, can be empty than it's
561 :param parent_commit: parent commit, can be empty than it's
564 initial commit
562 initial commit
565 :param author: author of commit, cna be different that commiter
563 :param author: author of commit, cna be different that commiter
566 only for git
564 only for git
567 :param trigger_push_hook: trigger push hooks
565 :param trigger_push_hook: trigger push hooks
568
566
569 :returns: new commited commit
567 :returns: new commited commit
570 """
568 """
571
569
572 user = self._get_user(user)
570 user = self._get_user(user)
573 scm_instance = repo.scm_instance(cache=False)
571 scm_instance = repo.scm_instance(cache=False)
574
572
575 processed_nodes = []
573 processed_nodes = []
576 for f_path in nodes:
574 for f_path in nodes:
577 f_path = self._sanitize_path(f_path)
575 f_path = self._sanitize_path(f_path)
578 content = nodes[f_path]['content']
576 content = nodes[f_path]['content']
579 f_path = safe_str(f_path)
577 f_path = safe_str(f_path)
580 # decoding here will force that we have proper encoded values
578 # decoding here will force that we have proper encoded values
581 # in any other case this will throw exceptions and deny commit
579 # in any other case this will throw exceptions and deny commit
582 if isinstance(content, (basestring,)):
580 if isinstance(content, (basestring,)):
583 content = safe_str(content)
581 content = safe_str(content)
584 elif isinstance(content, (file, cStringIO.OutputType,)):
582 elif isinstance(content, (file, cStringIO.OutputType,)):
585 content = content.read()
583 content = content.read()
586 else:
584 else:
587 raise Exception('Content is of unrecognized type %s' % (
585 raise Exception('Content is of unrecognized type %s' % (
588 type(content)
586 type(content)
589 ))
587 ))
590 processed_nodes.append((f_path, content))
588 processed_nodes.append((f_path, content))
591
589
592 message = safe_unicode(message)
590 message = safe_unicode(message)
593 commiter = user.full_contact
591 commiter = user.full_contact
594 author = safe_unicode(author) if author else commiter
592 author = safe_unicode(author) if author else commiter
595
593
596 imc = scm_instance.in_memory_commit
594 imc = scm_instance.in_memory_commit
597
595
598 if not parent_commit:
596 if not parent_commit:
599 parent_commit = EmptyCommit(alias=scm_instance.alias)
597 parent_commit = EmptyCommit(alias=scm_instance.alias)
600
598
601 if isinstance(parent_commit, EmptyCommit):
599 if isinstance(parent_commit, EmptyCommit):
602 # EmptyCommit means we we're editing empty repository
600 # EmptyCommit means we we're editing empty repository
603 parents = None
601 parents = None
604 else:
602 else:
605 parents = [parent_commit]
603 parents = [parent_commit]
606 # add multiple nodes
604 # add multiple nodes
607 for path, content in processed_nodes:
605 for path, content in processed_nodes:
608 imc.add(FileNode(path, content=content))
606 imc.add(FileNode(path, content=content))
609 # TODO: handle pre push scenario
607 # TODO: handle pre push scenario
610 tip = imc.commit(message=message,
608 tip = imc.commit(message=message,
611 author=author,
609 author=author,
612 parents=parents,
610 parents=parents,
613 branch=parent_commit.branch)
611 branch=parent_commit.branch)
614
612
615 self.mark_for_invalidation(repo.repo_name)
613 self.mark_for_invalidation(repo.repo_name)
616 if trigger_push_hook:
614 if trigger_push_hook:
617 hooks_utils.trigger_post_push_hook(
615 hooks_utils.trigger_post_push_hook(
618 username=user.username, action='push_local',
616 username=user.username, action='push_local',
619 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
617 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
620 commit_ids=[tip.raw_id])
618 commit_ids=[tip.raw_id])
621 return tip
619 return tip
622
620
623 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
621 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
624 author=None, trigger_push_hook=True):
622 author=None, trigger_push_hook=True):
625 user = self._get_user(user)
623 user = self._get_user(user)
626 scm_instance = repo.scm_instance(cache=False)
624 scm_instance = repo.scm_instance(cache=False)
627
625
628 message = safe_unicode(message)
626 message = safe_unicode(message)
629 commiter = user.full_contact
627 commiter = user.full_contact
630 author = safe_unicode(author) if author else commiter
628 author = safe_unicode(author) if author else commiter
631
629
632 imc = scm_instance.in_memory_commit
630 imc = scm_instance.in_memory_commit
633
631
634 if not parent_commit:
632 if not parent_commit:
635 parent_commit = EmptyCommit(alias=scm_instance.alias)
633 parent_commit = EmptyCommit(alias=scm_instance.alias)
636
634
637 if isinstance(parent_commit, EmptyCommit):
635 if isinstance(parent_commit, EmptyCommit):
638 # EmptyCommit means we we're editing empty repository
636 # EmptyCommit means we we're editing empty repository
639 parents = None
637 parents = None
640 else:
638 else:
641 parents = [parent_commit]
639 parents = [parent_commit]
642
640
643 # add multiple nodes
641 # add multiple nodes
644 for _filename, data in nodes.items():
642 for _filename, data in nodes.items():
645 # new filename, can be renamed from the old one, also sanitaze
643 # new filename, can be renamed from the old one, also sanitaze
646 # the path for any hack around relative paths like ../../ etc.
644 # the path for any hack around relative paths like ../../ etc.
647 filename = self._sanitize_path(data['filename'])
645 filename = self._sanitize_path(data['filename'])
648 old_filename = self._sanitize_path(_filename)
646 old_filename = self._sanitize_path(_filename)
649 content = data['content']
647 content = data['content']
650
648
651 filenode = FileNode(old_filename, content=content)
649 filenode = FileNode(old_filename, content=content)
652 op = data['op']
650 op = data['op']
653 if op == 'add':
651 if op == 'add':
654 imc.add(filenode)
652 imc.add(filenode)
655 elif op == 'del':
653 elif op == 'del':
656 imc.remove(filenode)
654 imc.remove(filenode)
657 elif op == 'mod':
655 elif op == 'mod':
658 if filename != old_filename:
656 if filename != old_filename:
659 # TODO: handle renames more efficient, needs vcs lib
657 # TODO: handle renames more efficient, needs vcs lib
660 # changes
658 # changes
661 imc.remove(filenode)
659 imc.remove(filenode)
662 imc.add(FileNode(filename, content=content))
660 imc.add(FileNode(filename, content=content))
663 else:
661 else:
664 imc.change(filenode)
662 imc.change(filenode)
665
663
666 try:
664 try:
667 # TODO: handle pre push scenario
665 # TODO: handle pre push scenario
668 # commit changes
666 # commit changes
669 tip = imc.commit(message=message,
667 tip = imc.commit(message=message,
670 author=author,
668 author=author,
671 parents=parents,
669 parents=parents,
672 branch=parent_commit.branch)
670 branch=parent_commit.branch)
673 except NodeNotChangedError:
671 except NodeNotChangedError:
674 raise
672 raise
675 except Exception as e:
673 except Exception as e:
676 log.exception("Unexpected exception during call to imc.commit")
674 log.exception("Unexpected exception during call to imc.commit")
677 raise IMCCommitError(str(e))
675 raise IMCCommitError(str(e))
678 finally:
676 finally:
679 # always clear caches, if commit fails we want fresh object also
677 # always clear caches, if commit fails we want fresh object also
680 self.mark_for_invalidation(repo.repo_name)
678 self.mark_for_invalidation(repo.repo_name)
681
679
682 if trigger_push_hook:
680 if trigger_push_hook:
683 hooks_utils.trigger_post_push_hook(
681 hooks_utils.trigger_post_push_hook(
684 username=user.username, action='push_local',
682 username=user.username, action='push_local',
685 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
683 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
686 commit_ids=[tip.raw_id])
684 commit_ids=[tip.raw_id])
687
685
688 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
686 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
689 author=None, trigger_push_hook=True):
687 author=None, trigger_push_hook=True):
690 """
688 """
691 Deletes given multiple nodes into `repo`
689 Deletes given multiple nodes into `repo`
692
690
693 :param user: RhodeCode User object or user_id, the committer
691 :param user: RhodeCode User object or user_id, the committer
694 :param repo: RhodeCode Repository object
692 :param repo: RhodeCode Repository object
695 :param message: commit message
693 :param message: commit message
696 :param nodes: mapping {filename:{'content':content},...}
694 :param nodes: mapping {filename:{'content':content},...}
697 :param parent_commit: parent commit, can be empty than it's initial
695 :param parent_commit: parent commit, can be empty than it's initial
698 commit
696 commit
699 :param author: author of commit, cna be different that commiter only
697 :param author: author of commit, cna be different that commiter only
700 for git
698 for git
701 :param trigger_push_hook: trigger push hooks
699 :param trigger_push_hook: trigger push hooks
702
700
703 :returns: new commit after deletion
701 :returns: new commit after deletion
704 """
702 """
705
703
706 user = self._get_user(user)
704 user = self._get_user(user)
707 scm_instance = repo.scm_instance(cache=False)
705 scm_instance = repo.scm_instance(cache=False)
708
706
709 processed_nodes = []
707 processed_nodes = []
710 for f_path in nodes:
708 for f_path in nodes:
711 f_path = self._sanitize_path(f_path)
709 f_path = self._sanitize_path(f_path)
712 # content can be empty but for compatabilty it allows same dicts
710 # content can be empty but for compatabilty it allows same dicts
713 # structure as add_nodes
711 # structure as add_nodes
714 content = nodes[f_path].get('content')
712 content = nodes[f_path].get('content')
715 processed_nodes.append((f_path, content))
713 processed_nodes.append((f_path, content))
716
714
717 message = safe_unicode(message)
715 message = safe_unicode(message)
718 commiter = user.full_contact
716 commiter = user.full_contact
719 author = safe_unicode(author) if author else commiter
717 author = safe_unicode(author) if author else commiter
720
718
721 imc = scm_instance.in_memory_commit
719 imc = scm_instance.in_memory_commit
722
720
723 if not parent_commit:
721 if not parent_commit:
724 parent_commit = EmptyCommit(alias=scm_instance.alias)
722 parent_commit = EmptyCommit(alias=scm_instance.alias)
725
723
726 if isinstance(parent_commit, EmptyCommit):
724 if isinstance(parent_commit, EmptyCommit):
727 # EmptyCommit means we we're editing empty repository
725 # EmptyCommit means we we're editing empty repository
728 parents = None
726 parents = None
729 else:
727 else:
730 parents = [parent_commit]
728 parents = [parent_commit]
731 # add multiple nodes
729 # add multiple nodes
732 for path, content in processed_nodes:
730 for path, content in processed_nodes:
733 imc.remove(FileNode(path, content=content))
731 imc.remove(FileNode(path, content=content))
734
732
735 # TODO: handle pre push scenario
733 # TODO: handle pre push scenario
736 tip = imc.commit(message=message,
734 tip = imc.commit(message=message,
737 author=author,
735 author=author,
738 parents=parents,
736 parents=parents,
739 branch=parent_commit.branch)
737 branch=parent_commit.branch)
740
738
741 self.mark_for_invalidation(repo.repo_name)
739 self.mark_for_invalidation(repo.repo_name)
742 if trigger_push_hook:
740 if trigger_push_hook:
743 hooks_utils.trigger_post_push_hook(
741 hooks_utils.trigger_post_push_hook(
744 username=user.username, action='push_local',
742 username=user.username, action='push_local',
745 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
743 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
746 commit_ids=[tip.raw_id])
744 commit_ids=[tip.raw_id])
747 return tip
745 return tip
748
746
749 def strip(self, repo, commit_id, branch):
747 def strip(self, repo, commit_id, branch):
750 scm_instance = repo.scm_instance(cache=False)
748 scm_instance = repo.scm_instance(cache=False)
751 scm_instance.config.clear_section('hooks')
749 scm_instance.config.clear_section('hooks')
752 scm_instance.strip(commit_id, branch)
750 scm_instance.strip(commit_id, branch)
753 self.mark_for_invalidation(repo.repo_name)
751 self.mark_for_invalidation(repo.repo_name)
754
752
755 def get_unread_journal(self):
753 def get_unread_journal(self):
756 return self.sa.query(UserLog).count()
754 return self.sa.query(UserLog).count()
757
755
758 def get_repo_landing_revs(self, repo=None):
756 def get_repo_landing_revs(self, repo=None):
759 """
757 """
760 Generates select option with tags branches and bookmarks (for hg only)
758 Generates select option with tags branches and bookmarks (for hg only)
761 grouped by type
759 grouped by type
762
760
763 :param repo:
761 :param repo:
764 """
762 """
765
763
766 hist_l = []
764 hist_l = []
767 choices = []
765 choices = []
768 repo = self._get_repo(repo)
766 repo = self._get_repo(repo)
769 hist_l.append(['rev:tip', _('latest tip')])
767 hist_l.append(['rev:tip', _('latest tip')])
770 choices.append('rev:tip')
768 choices.append('rev:tip')
771 if not repo:
769 if not repo:
772 return choices, hist_l
770 return choices, hist_l
773
771
774 repo = repo.scm_instance()
772 repo = repo.scm_instance()
775
773
776 branches_group = (
774 branches_group = (
777 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
775 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
778 for b in repo.branches],
776 for b in repo.branches],
779 _("Branches"))
777 _("Branches"))
780 hist_l.append(branches_group)
778 hist_l.append(branches_group)
781 choices.extend([x[0] for x in branches_group[0]])
779 choices.extend([x[0] for x in branches_group[0]])
782
780
783 if repo.alias == 'hg':
781 if repo.alias == 'hg':
784 bookmarks_group = (
782 bookmarks_group = (
785 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
783 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
786 for b in repo.bookmarks],
784 for b in repo.bookmarks],
787 _("Bookmarks"))
785 _("Bookmarks"))
788 hist_l.append(bookmarks_group)
786 hist_l.append(bookmarks_group)
789 choices.extend([x[0] for x in bookmarks_group[0]])
787 choices.extend([x[0] for x in bookmarks_group[0]])
790
788
791 tags_group = (
789 tags_group = (
792 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
790 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
793 for t in repo.tags],
791 for t in repo.tags],
794 _("Tags"))
792 _("Tags"))
795 hist_l.append(tags_group)
793 hist_l.append(tags_group)
796 choices.extend([x[0] for x in tags_group[0]])
794 choices.extend([x[0] for x in tags_group[0]])
797
795
798 return choices, hist_l
796 return choices, hist_l
799
797
800 def install_git_hook(self, repo, force_create=False):
798 def install_git_hook(self, repo, force_create=False):
801 """
799 """
802 Creates a rhodecode hook inside a git repository
800 Creates a rhodecode hook inside a git repository
803
801
804 :param repo: Instance of VCS repo
802 :param repo: Instance of VCS repo
805 :param force_create: Create even if same name hook exists
803 :param force_create: Create even if same name hook exists
806 """
804 """
807
805
808 loc = os.path.join(repo.path, 'hooks')
806 loc = os.path.join(repo.path, 'hooks')
809 if not repo.bare:
807 if not repo.bare:
810 loc = os.path.join(repo.path, '.git', 'hooks')
808 loc = os.path.join(repo.path, '.git', 'hooks')
811 if not os.path.isdir(loc):
809 if not os.path.isdir(loc):
812 os.makedirs(loc, mode=0777)
810 os.makedirs(loc, mode=0777)
813
811
814 tmpl_post = pkg_resources.resource_string(
812 tmpl_post = pkg_resources.resource_string(
815 'rhodecode', '/'.join(
813 'rhodecode', '/'.join(
816 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
814 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
817 tmpl_pre = pkg_resources.resource_string(
815 tmpl_pre = pkg_resources.resource_string(
818 'rhodecode', '/'.join(
816 'rhodecode', '/'.join(
819 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
817 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
820
818
821 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
819 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
822 _hook_file = os.path.join(loc, '%s-receive' % h_type)
820 _hook_file = os.path.join(loc, '%s-receive' % h_type)
823 log.debug('Installing git hook in repo %s', repo)
821 log.debug('Installing git hook in repo %s', repo)
824 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
822 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
825
823
826 if _rhodecode_hook or force_create:
824 if _rhodecode_hook or force_create:
827 log.debug('writing %s hook file !', h_type)
825 log.debug('writing %s hook file !', h_type)
828 try:
826 try:
829 with open(_hook_file, 'wb') as f:
827 with open(_hook_file, 'wb') as f:
830 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
828 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
831 tmpl = tmpl.replace('_ENV_', sys.executable)
829 tmpl = tmpl.replace('_ENV_', sys.executable)
832 f.write(tmpl)
830 f.write(tmpl)
833 os.chmod(_hook_file, 0755)
831 os.chmod(_hook_file, 0755)
834 except IOError:
832 except IOError:
835 log.exception('error writing hook file %s', _hook_file)
833 log.exception('error writing hook file %s', _hook_file)
836 else:
834 else:
837 log.debug('skipping writing hook file')
835 log.debug('skipping writing hook file')
838
836
839 def install_svn_hooks(self, repo, force_create=False):
837 def install_svn_hooks(self, repo, force_create=False):
840 """
838 """
841 Creates rhodecode hooks inside a svn repository
839 Creates rhodecode hooks inside a svn repository
842
840
843 :param repo: Instance of VCS repo
841 :param repo: Instance of VCS repo
844 :param force_create: Create even if same name hook exists
842 :param force_create: Create even if same name hook exists
845 """
843 """
846 hooks_path = os.path.join(repo.path, 'hooks')
844 hooks_path = os.path.join(repo.path, 'hooks')
847 if not os.path.isdir(hooks_path):
845 if not os.path.isdir(hooks_path):
848 os.makedirs(hooks_path)
846 os.makedirs(hooks_path)
849 post_commit_tmpl = pkg_resources.resource_string(
847 post_commit_tmpl = pkg_resources.resource_string(
850 'rhodecode', '/'.join(
848 'rhodecode', '/'.join(
851 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
849 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
852 pre_commit_template = pkg_resources.resource_string(
850 pre_commit_template = pkg_resources.resource_string(
853 'rhodecode', '/'.join(
851 'rhodecode', '/'.join(
854 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
852 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
855 templates = {
853 templates = {
856 'post-commit': post_commit_tmpl,
854 'post-commit': post_commit_tmpl,
857 'pre-commit': pre_commit_template
855 'pre-commit': pre_commit_template
858 }
856 }
859 for filename in templates:
857 for filename in templates:
860 _hook_file = os.path.join(hooks_path, filename)
858 _hook_file = os.path.join(hooks_path, filename)
861 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
859 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
862 if _rhodecode_hook or force_create:
860 if _rhodecode_hook or force_create:
863 log.debug('writing %s hook file !', filename)
861 log.debug('writing %s hook file !', filename)
864 template = templates[filename]
862 template = templates[filename]
865 try:
863 try:
866 with open(_hook_file, 'wb') as f:
864 with open(_hook_file, 'wb') as f:
867 template = template.replace(
865 template = template.replace(
868 '_TMPL_', rhodecode.__version__)
866 '_TMPL_', rhodecode.__version__)
869 template = template.replace('_ENV_', sys.executable)
867 template = template.replace('_ENV_', sys.executable)
870 f.write(template)
868 f.write(template)
871 os.chmod(_hook_file, 0755)
869 os.chmod(_hook_file, 0755)
872 except IOError:
870 except IOError:
873 log.exception('error writing hook file %s', filename)
871 log.exception('error writing hook file %s', filename)
874 else:
872 else:
875 log.debug('skipping writing hook file')
873 log.debug('skipping writing hook file')
876
874
877 def install_hooks(self, repo, repo_type):
875 def install_hooks(self, repo, repo_type):
878 if repo_type == 'git':
876 if repo_type == 'git':
879 self.install_git_hook(repo)
877 self.install_git_hook(repo)
880 elif repo_type == 'svn':
878 elif repo_type == 'svn':
881 self.install_svn_hooks(repo)
879 self.install_svn_hooks(repo)
882
880
883 def get_server_info(self, environ=None):
881 def get_server_info(self, environ=None):
884 import platform
882 import platform
885 import rhodecode
883 import rhodecode
886 import pkg_resources
884 import pkg_resources
887 from rhodecode.model.meta import Base as sql_base, Session
885 from rhodecode.model.meta import Base as sql_base, Session
888 from sqlalchemy.engine import url
886 from sqlalchemy.engine import url
889 from rhodecode.lib.base import get_server_ip_addr, get_server_port
887 from rhodecode.lib.base import get_server_ip_addr, get_server_port
890 from rhodecode.lib.vcs.backends.git import discover_git_version
888 from rhodecode.lib.vcs.backends.git import discover_git_version
891 from rhodecode.model.gist import GIST_STORE_LOC
889 from rhodecode.model.gist import GIST_STORE_LOC
892
890
893 try:
891 try:
894 # cygwin cannot have yet psutil support.
892 # cygwin cannot have yet psutil support.
895 import psutil
893 import psutil
896 except ImportError:
894 except ImportError:
897 psutil = None
895 psutil = None
898
896
899 environ = environ or {}
897 environ = environ or {}
900 _NA = 'NOT AVAILABLE'
898 _NA = 'NOT AVAILABLE'
901 _memory = _NA
899 _memory = _NA
902 _uptime = _NA
900 _uptime = _NA
903 _boot_time = _NA
901 _boot_time = _NA
904 _cpu = _NA
902 _cpu = _NA
905 _disk = dict(percent=0, used=0, total=0, error='')
903 _disk = dict(percent=0, used=0, total=0, error='')
906 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
904 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
907
905
908 model = VcsSettingsModel()
906 model = VcsSettingsModel()
909 storage_path = model.get_repos_location()
907 storage_path = model.get_repos_location()
910 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
908 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
911 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
909 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
912 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
910 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
913
911
914 if psutil:
912 if psutil:
915 # disk storage
913 # disk storage
916 try:
914 try:
917 _disk = dict(psutil.disk_usage(storage_path)._asdict())
915 _disk = dict(psutil.disk_usage(storage_path)._asdict())
918 except Exception as e:
916 except Exception as e:
919 log.exception('Failed to fetch disk info')
917 log.exception('Failed to fetch disk info')
920 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
918 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
921
919
922 # memory
920 # memory
923 _memory = dict(psutil.virtual_memory()._asdict())
921 _memory = dict(psutil.virtual_memory()._asdict())
924 _memory['percent2'] = psutil._common.usage_percent(
922 _memory['percent2'] = psutil._common.usage_percent(
925 (_memory['total'] - _memory['free']),
923 (_memory['total'] - _memory['free']),
926 _memory['total'], 1)
924 _memory['total'], 1)
927
925
928 # load averages
926 # load averages
929 if hasattr(psutil.os, 'getloadavg'):
927 if hasattr(psutil.os, 'getloadavg'):
930 _load = dict(zip(
928 _load = dict(zip(
931 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
929 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
932 _uptime = time.time() - psutil.boot_time()
930 _uptime = time.time() - psutil.boot_time()
933 _boot_time = psutil.boot_time()
931 _boot_time = psutil.boot_time()
934 _cpu = psutil.cpu_percent(0.5)
932 _cpu = psutil.cpu_percent(0.5)
935
933
936 mods = dict([(p.project_name, p.version)
934 mods = dict([(p.project_name, p.version)
937 for p in pkg_resources.working_set])
935 for p in pkg_resources.working_set])
938
936
939 def get_storage_size(storage_path):
937 def get_storage_size(storage_path):
940 sizes = []
938 sizes = []
941 for file_ in os.listdir(storage_path):
939 for file_ in os.listdir(storage_path):
942 storage_file = os.path.join(storage_path, file_)
940 storage_file = os.path.join(storage_path, file_)
943 if os.path.isfile(storage_file):
941 if os.path.isfile(storage_file):
944 try:
942 try:
945 sizes.append(os.path.getsize(storage_file))
943 sizes.append(os.path.getsize(storage_file))
946 except OSError:
944 except OSError:
947 log.exception('Failed to get size of storage file %s',
945 log.exception('Failed to get size of storage file %s',
948 storage_file)
946 storage_file)
949 pass
947 pass
950
948
951 return sum(sizes)
949 return sum(sizes)
952
950
953 # archive cache storage
951 # archive cache storage
954 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
952 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
955 try:
953 try:
956 archive_storage_path_exists = os.path.isdir(
954 archive_storage_path_exists = os.path.isdir(
957 archive_storage_path)
955 archive_storage_path)
958 if archive_storage_path and archive_storage_path_exists:
956 if archive_storage_path and archive_storage_path_exists:
959 used = get_storage_size(archive_storage_path)
957 used = get_storage_size(archive_storage_path)
960 _disk_archive.update({
958 _disk_archive.update({
961 'used': used,
959 'used': used,
962 'total': used,
960 'total': used,
963 })
961 })
964 except Exception as e:
962 except Exception as e:
965 log.exception('failed to fetch archive cache storage')
963 log.exception('failed to fetch archive cache storage')
966 _disk_archive['error'] = str(e)
964 _disk_archive['error'] = str(e)
967
965
968 # search index storage
966 # search index storage
969 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
967 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
970 try:
968 try:
971 search_index_storage_path_exists = os.path.isdir(
969 search_index_storage_path_exists = os.path.isdir(
972 search_index_storage_path)
970 search_index_storage_path)
973 if search_index_storage_path_exists:
971 if search_index_storage_path_exists:
974 used = get_storage_size(search_index_storage_path)
972 used = get_storage_size(search_index_storage_path)
975 _disk_index.update({
973 _disk_index.update({
976 'percent': 100,
974 'percent': 100,
977 'used': used,
975 'used': used,
978 'total': used,
976 'total': used,
979 })
977 })
980 except Exception as e:
978 except Exception as e:
981 log.exception('failed to fetch search index storage')
979 log.exception('failed to fetch search index storage')
982 _disk_index['error'] = str(e)
980 _disk_index['error'] = str(e)
983
981
984 # gist storage
982 # gist storage
985 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
983 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
986 try:
984 try:
987 items_count = 0
985 items_count = 0
988 used = 0
986 used = 0
989 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
987 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
990 if root == gist_storage_path:
988 if root == gist_storage_path:
991 items_count = len(dirs)
989 items_count = len(dirs)
992
990
993 for f in files:
991 for f in files:
994 try:
992 try:
995 used += os.path.getsize(os.path.join(root, f))
993 used += os.path.getsize(os.path.join(root, f))
996 except OSError:
994 except OSError:
997 pass
995 pass
998 _disk_gist.update({
996 _disk_gist.update({
999 'percent': 100,
997 'percent': 100,
1000 'used': used,
998 'used': used,
1001 'total': used,
999 'total': used,
1002 'items': items_count
1000 'items': items_count
1003 })
1001 })
1004 except Exception as e:
1002 except Exception as e:
1005 log.exception('failed to fetch gist storage items')
1003 log.exception('failed to fetch gist storage items')
1006 _disk_gist['error'] = str(e)
1004 _disk_gist['error'] = str(e)
1007
1005
1008 # GIT info
1006 # GIT info
1009 git_ver = discover_git_version()
1007 git_ver = discover_git_version()
1010
1008
1011 # SVN info
1009 # SVN info
1012 # TODO: johbo: Add discover_svn_version to replace this code.
1010 # TODO: johbo: Add discover_svn_version to replace this code.
1013 try:
1011 try:
1014 import svn.core
1012 import svn.core
1015 svn_ver = svn.core.SVN_VERSION
1013 svn_ver = svn.core.SVN_VERSION
1016 except ImportError:
1014 except ImportError:
1017 svn_ver = None
1015 svn_ver = None
1018
1016
1019 # DB stuff
1017 # DB stuff
1020 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1018 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1021 db_type = db_info.__to_string__()
1019 db_type = db_info.__to_string__()
1022 try:
1020 try:
1023 engine = sql_base.metadata.bind
1021 engine = sql_base.metadata.bind
1024 db_server_info = engine.dialect._get_server_version_info(
1022 db_server_info = engine.dialect._get_server_version_info(
1025 Session.connection(bind=engine))
1023 Session.connection(bind=engine))
1026 db_version = '%s %s' % (db_info.drivername,
1024 db_version = '%s %s' % (db_info.drivername,
1027 '.'.join(map(str, db_server_info)))
1025 '.'.join(map(str, db_server_info)))
1028 except Exception:
1026 except Exception:
1029 log.exception('failed to fetch db version')
1027 log.exception('failed to fetch db version')
1030 db_version = '%s %s' % (db_info.drivername, '?')
1028 db_version = '%s %s' % (db_info.drivername, '?')
1031
1029
1032 db_migrate = DbMigrateVersion.query().filter(
1030 db_migrate = DbMigrateVersion.query().filter(
1033 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1031 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1034 db_migrate_version = db_migrate.version
1032 db_migrate_version = db_migrate.version
1035
1033
1036 info = {
1034 info = {
1037 'py_version': ' '.join(platform._sys_version()),
1035 'py_version': ' '.join(platform._sys_version()),
1038 'py_path': sys.executable,
1036 'py_path': sys.executable,
1039 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1037 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1040
1038
1041 'platform': safe_unicode(platform.platform()),
1039 'platform': safe_unicode(platform.platform()),
1042 'storage': storage_path,
1040 'storage': storage_path,
1043 'archive_storage': archive_storage_path,
1041 'archive_storage': archive_storage_path,
1044 'index_storage': search_index_storage_path,
1042 'index_storage': search_index_storage_path,
1045 'gist_storage': gist_storage_path,
1043 'gist_storage': gist_storage_path,
1046
1044
1047
1045
1048 'db_type': db_type,
1046 'db_type': db_type,
1049 'db_version': db_version,
1047 'db_version': db_version,
1050 'db_migrate_version': db_migrate_version,
1048 'db_migrate_version': db_migrate_version,
1051
1049
1052 'rhodecode_version': rhodecode.__version__,
1050 'rhodecode_version': rhodecode.__version__,
1053 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1051 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1054 'server_ip': '%s:%s' % (
1052 'server_ip': '%s:%s' % (
1055 get_server_ip_addr(environ, log_errors=False),
1053 get_server_ip_addr(environ, log_errors=False),
1056 get_server_port(environ)
1054 get_server_port(environ)
1057 ),
1055 ),
1058 'server_id': rhodecode.CONFIG.get('instance_id'),
1056 'server_id': rhodecode.CONFIG.get('instance_id'),
1059
1057
1060 'git_version': safe_unicode(git_ver),
1058 'git_version': safe_unicode(git_ver),
1061 'hg_version': mods.get('mercurial'),
1059 'hg_version': mods.get('mercurial'),
1062 'svn_version': svn_ver,
1060 'svn_version': svn_ver,
1063
1061
1064 'uptime': _uptime,
1062 'uptime': _uptime,
1065 'boot_time': _boot_time,
1063 'boot_time': _boot_time,
1066 'load': _load,
1064 'load': _load,
1067 'cpu': _cpu,
1065 'cpu': _cpu,
1068 'memory': _memory,
1066 'memory': _memory,
1069 'disk': _disk,
1067 'disk': _disk,
1070 'disk_archive': _disk_archive,
1068 'disk_archive': _disk_archive,
1071 'disk_gist': _disk_gist,
1069 'disk_gist': _disk_gist,
1072 'disk_index': _disk_index,
1070 'disk_index': _disk_index,
1073 }
1071 }
1074 return info
1072 return info
1075
1073
1076
1074
1077 def _check_rhodecode_hook(hook_path):
1075 def _check_rhodecode_hook(hook_path):
1078 """
1076 """
1079 Check if the hook was created by RhodeCode
1077 Check if the hook was created by RhodeCode
1080 """
1078 """
1081 if not os.path.exists(hook_path):
1079 if not os.path.exists(hook_path):
1082 return True
1080 return True
1083
1081
1084 log.debug('hook exists, checking if it is from rhodecode')
1082 log.debug('hook exists, checking if it is from rhodecode')
1085 hook_content = _read_hook(hook_path)
1083 hook_content = _read_hook(hook_path)
1086 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1084 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1087 if matches:
1085 if matches:
1088 try:
1086 try:
1089 version = matches.groups()[0]
1087 version = matches.groups()[0]
1090 log.debug('got %s, it is rhodecode', version)
1088 log.debug('got %s, it is rhodecode', version)
1091 return True
1089 return True
1092 except Exception:
1090 except Exception:
1093 log.exception("Exception while reading the hook version.")
1091 log.exception("Exception while reading the hook version.")
1094
1092
1095 return False
1093 return False
1096
1094
1097
1095
1098 def _read_hook(hook_path):
1096 def _read_hook(hook_path):
1099 with open(hook_path, 'rb') as f:
1097 with open(hook_path, 'rb') as f:
1100 content = f.read()
1098 content = f.read()
1101 return content
1099 return content No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now