Show More
@@ -1,1837 +1,1840 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Base module for all VCS systems |
|
23 | 23 | """ |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import time |
|
27 | 27 | import shutil |
|
28 | 28 | import datetime |
|
29 | 29 | import fnmatch |
|
30 | 30 | import itertools |
|
31 | 31 | import logging |
|
32 | 32 | import collections |
|
33 | 33 | import warnings |
|
34 | 34 | |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | |
|
37 | 37 | from rhodecode.translation import lazy_ugettext |
|
38 | 38 | from rhodecode.lib.utils2 import safe_str, safe_unicode |
|
39 | 39 | from rhodecode.lib.vcs import connection |
|
40 | 40 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
41 | 41 | from rhodecode.lib.vcs.conf import settings |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | CommitError, EmptyRepositoryError, NodeAlreadyAddedError, |
|
44 | 44 | NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, |
|
45 | 45 | NodeDoesNotExistError, NodeNotChangedError, VCSError, |
|
46 | 46 | ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError, |
|
47 | 47 | RepositoryError) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | FILEMODE_DEFAULT = 0o100644 |
|
54 | 54 | FILEMODE_EXECUTABLE = 0o100755 |
|
55 | 55 | |
|
56 | 56 | Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id')) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class MergeFailureReason(object): |
|
60 | 60 | """ |
|
61 | 61 | Enumeration with all the reasons why the server side merge could fail. |
|
62 | 62 | |
|
63 | 63 | DO NOT change the number of the reasons, as they may be stored in the |
|
64 | 64 | database. |
|
65 | 65 | |
|
66 | 66 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
67 | 67 | reasons. |
|
68 | 68 | """ |
|
69 | 69 | |
|
70 | 70 | # Everything went well. |
|
71 | 71 | NONE = 0 |
|
72 | 72 | |
|
73 | 73 | # An unexpected exception was raised. Check the logs for more details. |
|
74 | 74 | UNKNOWN = 1 |
|
75 | 75 | |
|
76 | 76 | # The merge was not successful, there are conflicts. |
|
77 | 77 | MERGE_FAILED = 2 |
|
78 | 78 | |
|
79 | 79 | # The merge succeeded but we could not push it to the target repository. |
|
80 | 80 | PUSH_FAILED = 3 |
|
81 | 81 | |
|
82 | 82 | # The specified target is not a head in the target repository. |
|
83 | 83 | TARGET_IS_NOT_HEAD = 4 |
|
84 | 84 | |
|
85 | 85 | # The source repository contains more branches than the target. Pushing |
|
86 | 86 | # the merge will create additional branches in the target. |
|
87 | 87 | HG_SOURCE_HAS_MORE_BRANCHES = 5 |
|
88 | 88 | |
|
89 | 89 | # The target reference has multiple heads. That does not allow to correctly |
|
90 | 90 | # identify the target location. This could only happen for mercurial |
|
91 | 91 | # branches. |
|
92 | 92 | HG_TARGET_HAS_MULTIPLE_HEADS = 6 |
|
93 | 93 | |
|
94 | 94 | # The target repository is locked |
|
95 | 95 | TARGET_IS_LOCKED = 7 |
|
96 | 96 | |
|
97 | 97 | # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead. |
|
98 | 98 | # A involved commit could not be found. |
|
99 | 99 | _DEPRECATED_MISSING_COMMIT = 8 |
|
100 | 100 | |
|
101 | 101 | # The target repo reference is missing. |
|
102 | 102 | MISSING_TARGET_REF = 9 |
|
103 | 103 | |
|
104 | 104 | # The source repo reference is missing. |
|
105 | 105 | MISSING_SOURCE_REF = 10 |
|
106 | 106 | |
|
107 | 107 | # The merge was not successful, there are conflicts related to sub |
|
108 | 108 | # repositories. |
|
109 | 109 | SUBREPO_MERGE_FAILED = 11 |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | class UpdateFailureReason(object): |
|
113 | 113 | """ |
|
114 | 114 | Enumeration with all the reasons why the pull request update could fail. |
|
115 | 115 | |
|
116 | 116 | DO NOT change the number of the reasons, as they may be stored in the |
|
117 | 117 | database. |
|
118 | 118 | |
|
119 | 119 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
120 | 120 | reasons. |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | # Everything went well. |
|
124 | 124 | NONE = 0 |
|
125 | 125 | |
|
126 | 126 | # An unexpected exception was raised. Check the logs for more details. |
|
127 | 127 | UNKNOWN = 1 |
|
128 | 128 | |
|
129 | 129 | # The pull request is up to date. |
|
130 | 130 | NO_CHANGE = 2 |
|
131 | 131 | |
|
132 | 132 | # The pull request has a reference type that is not supported for update. |
|
133 | 133 | WRONG_REF_TYPE = 3 |
|
134 | 134 | |
|
135 | 135 | # Update failed because the target reference is missing. |
|
136 | 136 | MISSING_TARGET_REF = 4 |
|
137 | 137 | |
|
138 | 138 | # Update failed because the source reference is missing. |
|
139 | 139 | MISSING_SOURCE_REF = 5 |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | class MergeResponse(object): |
|
143 | 143 | |
|
144 | 144 | # uses .format(**metadata) for variables |
|
145 | 145 | MERGE_STATUS_MESSAGES = { |
|
146 | 146 | MergeFailureReason.NONE: lazy_ugettext( |
|
147 | 147 | u'This pull request can be automatically merged.'), |
|
148 | 148 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
149 | 149 | u'This pull request cannot be merged because of an unhandled exception. ' |
|
150 | 150 | u'{exception}'), |
|
151 | 151 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
152 | 152 | u'This pull request cannot be merged because of merge conflicts.'), |
|
153 | 153 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
154 | 154 | u'This pull request could not be merged because push to ' |
|
155 | 155 | u'target:`{target}@{merge_commit}` failed.'), |
|
156 | 156 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
157 | 157 | u'This pull request cannot be merged because the target ' |
|
158 | 158 | u'`{target_ref.name}` is not a head.'), |
|
159 | 159 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
160 | 160 | u'This pull request cannot be merged because the source contains ' |
|
161 | 161 | u'more branches than the target.'), |
|
162 | 162 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
163 | 163 | u'This pull request cannot be merged because the target ' |
|
164 | 164 | u'has multiple heads: `{heads}`.'), |
|
165 | 165 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
166 | 166 | u'This pull request cannot be merged because the target repository is ' |
|
167 | 167 | u'locked by {locked_by}.'), |
|
168 | 168 | |
|
169 | 169 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
170 | 170 | u'This pull request cannot be merged because the target ' |
|
171 | 171 | u'reference `{target_ref.name}` is missing.'), |
|
172 | 172 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
173 | 173 | u'This pull request cannot be merged because the source ' |
|
174 | 174 | u'reference `{source_ref.name}` is missing.'), |
|
175 | 175 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
176 | 176 | u'This pull request cannot be merged because of conflicts related ' |
|
177 | 177 | u'to sub repositories.'), |
|
178 | 178 | |
|
179 | 179 | # Deprecations |
|
180 | 180 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
181 | 181 | u'This pull request cannot be merged because the target or the ' |
|
182 | 182 | u'source reference is missing.'), |
|
183 | 183 | |
|
184 | 184 | } |
|
185 | 185 | |
|
186 | 186 | def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None): |
|
187 | 187 | self.possible = possible |
|
188 | 188 | self.executed = executed |
|
189 | 189 | self.merge_ref = merge_ref |
|
190 | 190 | self.failure_reason = failure_reason |
|
191 | 191 | self.metadata = metadata or {} |
|
192 | 192 | |
|
193 | 193 | def __repr__(self): |
|
194 | 194 | return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason) |
|
195 | 195 | |
|
196 | 196 | def __eq__(self, other): |
|
197 | 197 | same_instance = isinstance(other, self.__class__) |
|
198 | 198 | return same_instance \ |
|
199 | 199 | and self.possible == other.possible \ |
|
200 | 200 | and self.executed == other.executed \ |
|
201 | 201 | and self.failure_reason == other.failure_reason |
|
202 | 202 | |
|
203 | 203 | @property |
|
204 | 204 | def label(self): |
|
205 | 205 | label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if |
|
206 | 206 | not k.startswith('_')) |
|
207 | 207 | return label_dict.get(self.failure_reason) |
|
208 | 208 | |
|
209 | 209 | @property |
|
210 | 210 | def merge_status_message(self): |
|
211 | 211 | """ |
|
212 | 212 | Return a human friendly error message for the given merge status code. |
|
213 | 213 | """ |
|
214 | 214 | msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason]) |
|
215 | 215 | try: |
|
216 | 216 | return msg.format(**self.metadata) |
|
217 | 217 | except Exception: |
|
218 | 218 | log.exception('Failed to format %s message', self) |
|
219 | 219 | return msg |
|
220 | 220 | |
|
221 | 221 | def asdict(self): |
|
222 | 222 | data = {} |
|
223 | 223 | for k in ['possible', 'executed', 'merge_ref', 'failure_reason']: |
|
224 | 224 | data[k] = getattr(self, k) |
|
225 | 225 | return data |
|
226 | 226 | |
|
227 | 227 | |
|
228 | 228 | class BaseRepository(object): |
|
229 | 229 | """ |
|
230 | 230 | Base Repository for final backends |
|
231 | 231 | |
|
232 | 232 | .. attribute:: DEFAULT_BRANCH_NAME |
|
233 | 233 | |
|
234 | 234 | name of default branch (i.e. "trunk" for svn, "master" for git etc. |
|
235 | 235 | |
|
236 | 236 | .. attribute:: commit_ids |
|
237 | 237 | |
|
238 | 238 | list of all available commit ids, in ascending order |
|
239 | 239 | |
|
240 | 240 | .. attribute:: path |
|
241 | 241 | |
|
242 | 242 | absolute path to the repository |
|
243 | 243 | |
|
244 | 244 | .. attribute:: bookmarks |
|
245 | 245 | |
|
246 | 246 | Mapping from name to :term:`Commit ID` of the bookmark. Empty in case |
|
247 | 247 | there are no bookmarks or the backend implementation does not support |
|
248 | 248 | bookmarks. |
|
249 | 249 | |
|
250 | 250 | .. attribute:: tags |
|
251 | 251 | |
|
252 | 252 | Mapping from name to :term:`Commit ID` of the tag. |
|
253 | 253 | |
|
254 | 254 | """ |
|
255 | 255 | |
|
256 | 256 | DEFAULT_BRANCH_NAME = None |
|
257 | 257 | DEFAULT_CONTACT = u"Unknown" |
|
258 | 258 | DEFAULT_DESCRIPTION = u"unknown" |
|
259 | 259 | EMPTY_COMMIT_ID = '0' * 40 |
|
260 | 260 | |
|
261 | 261 | path = None |
|
262 | 262 | |
|
263 | 263 | def __init__(self, repo_path, config=None, create=False, **kwargs): |
|
264 | 264 | """ |
|
265 | 265 | Initializes repository. Raises RepositoryError if repository could |
|
266 | 266 | not be find at the given ``repo_path`` or directory at ``repo_path`` |
|
267 | 267 | exists and ``create`` is set to True. |
|
268 | 268 | |
|
269 | 269 | :param repo_path: local path of the repository |
|
270 | 270 | :param config: repository configuration |
|
271 | 271 | :param create=False: if set to True, would try to create repository. |
|
272 | 272 | :param src_url=None: if set, should be proper url from which repository |
|
273 | 273 | would be cloned; requires ``create`` parameter to be set to True - |
|
274 | 274 | raises RepositoryError if src_url is set and create evaluates to |
|
275 | 275 | False |
|
276 | 276 | """ |
|
277 | 277 | raise NotImplementedError |
|
278 | 278 | |
|
279 | 279 | def __repr__(self): |
|
280 | 280 | return '<%s at %s>' % (self.__class__.__name__, self.path) |
|
281 | 281 | |
|
282 | 282 | def __len__(self): |
|
283 | 283 | return self.count() |
|
284 | 284 | |
|
285 | 285 | def __eq__(self, other): |
|
286 | 286 | same_instance = isinstance(other, self.__class__) |
|
287 | 287 | return same_instance and other.path == self.path |
|
288 | 288 | |
|
289 | 289 | def __ne__(self, other): |
|
290 | 290 | return not self.__eq__(other) |
|
291 | 291 | |
|
292 | 292 | def get_create_shadow_cache_pr_path(self, db_repo): |
|
293 | 293 | path = db_repo.cached_diffs_dir |
|
294 | 294 | if not os.path.exists(path): |
|
295 | 295 | os.makedirs(path, 0o755) |
|
296 | 296 | return path |
|
297 | 297 | |
|
298 | 298 | @classmethod |
|
299 | 299 | def get_default_config(cls, default=None): |
|
300 | 300 | config = Config() |
|
301 | 301 | if default and isinstance(default, list): |
|
302 | 302 | for section, key, val in default: |
|
303 | 303 | config.set(section, key, val) |
|
304 | 304 | return config |
|
305 | 305 | |
|
306 | 306 | @LazyProperty |
|
307 | 307 | def _remote(self): |
|
308 | 308 | raise NotImplementedError |
|
309 | 309 | |
|
310 | 310 | @LazyProperty |
|
311 | 311 | def EMPTY_COMMIT(self): |
|
312 | 312 | return EmptyCommit(self.EMPTY_COMMIT_ID) |
|
313 | 313 | |
|
314 | 314 | @LazyProperty |
|
315 | 315 | def alias(self): |
|
316 | 316 | for k, v in settings.BACKENDS.items(): |
|
317 | 317 | if v.split('.')[-1] == str(self.__class__.__name__): |
|
318 | 318 | return k |
|
319 | 319 | |
|
320 | 320 | @LazyProperty |
|
321 | 321 | def name(self): |
|
322 | 322 | return safe_unicode(os.path.basename(self.path)) |
|
323 | 323 | |
|
324 | 324 | @LazyProperty |
|
325 | 325 | def description(self): |
|
326 | 326 | raise NotImplementedError |
|
327 | 327 | |
|
328 | 328 | def refs(self): |
|
329 | 329 | """ |
|
330 | 330 | returns a `dict` with branches, bookmarks, tags, and closed_branches |
|
331 | 331 | for this repository |
|
332 | 332 | """ |
|
333 | 333 | return dict( |
|
334 | 334 | branches=self.branches, |
|
335 | 335 | branches_closed=self.branches_closed, |
|
336 | 336 | tags=self.tags, |
|
337 | 337 | bookmarks=self.bookmarks |
|
338 | 338 | ) |
|
339 | 339 | |
|
340 | 340 | @LazyProperty |
|
341 | 341 | def branches(self): |
|
342 | 342 | """ |
|
343 | 343 | A `dict` which maps branch names to commit ids. |
|
344 | 344 | """ |
|
345 | 345 | raise NotImplementedError |
|
346 | 346 | |
|
347 | 347 | @LazyProperty |
|
348 | 348 | def branches_closed(self): |
|
349 | 349 | """ |
|
350 | 350 | A `dict` which maps tags names to commit ids. |
|
351 | 351 | """ |
|
352 | 352 | raise NotImplementedError |
|
353 | 353 | |
|
354 | 354 | @LazyProperty |
|
355 | 355 | def bookmarks(self): |
|
356 | 356 | """ |
|
357 | 357 | A `dict` which maps tags names to commit ids. |
|
358 | 358 | """ |
|
359 | 359 | raise NotImplementedError |
|
360 | 360 | |
|
361 | 361 | @LazyProperty |
|
362 | 362 | def tags(self): |
|
363 | 363 | """ |
|
364 | 364 | A `dict` which maps tags names to commit ids. |
|
365 | 365 | """ |
|
366 | 366 | raise NotImplementedError |
|
367 | 367 | |
|
368 | 368 | @LazyProperty |
|
369 | 369 | def size(self): |
|
370 | 370 | """ |
|
371 | 371 | Returns combined size in bytes for all repository files |
|
372 | 372 | """ |
|
373 | 373 | tip = self.get_commit() |
|
374 | 374 | return tip.size |
|
375 | 375 | |
|
376 | 376 | def size_at_commit(self, commit_id): |
|
377 | 377 | commit = self.get_commit(commit_id) |
|
378 | 378 | return commit.size |
|
379 | 379 | |
|
380 | 380 | def is_empty(self): |
|
381 | 381 | return not bool(self.commit_ids) |
|
382 | 382 | |
|
383 | 383 | @staticmethod |
|
384 | 384 | def check_url(url, config): |
|
385 | 385 | """ |
|
386 | 386 | Function will check given url and try to verify if it's a valid |
|
387 | 387 | link. |
|
388 | 388 | """ |
|
389 | 389 | raise NotImplementedError |
|
390 | 390 | |
|
391 | 391 | @staticmethod |
|
392 | 392 | def is_valid_repository(path): |
|
393 | 393 | """ |
|
394 | 394 | Check if given `path` contains a valid repository of this backend |
|
395 | 395 | """ |
|
396 | 396 | raise NotImplementedError |
|
397 | 397 | |
|
398 | 398 | # ========================================================================== |
|
399 | 399 | # COMMITS |
|
400 | 400 | # ========================================================================== |
|
401 | 401 | |
|
402 | 402 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
403 | 403 | """ |
|
404 | 404 | Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx` |
|
405 | 405 | are both None, most recent commit is returned. |
|
406 | 406 | |
|
407 | 407 | :param pre_load: Optional. List of commit attributes to load. |
|
408 | 408 | |
|
409 | 409 | :raises ``EmptyRepositoryError``: if there are no commits |
|
410 | 410 | """ |
|
411 | 411 | raise NotImplementedError |
|
412 | 412 | |
|
413 | 413 | def __iter__(self): |
|
414 | 414 | for commit_id in self.commit_ids: |
|
415 | 415 | yield self.get_commit(commit_id=commit_id) |
|
416 | 416 | |
|
417 | 417 | def get_commits( |
|
418 | 418 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
419 | 419 | branch_name=None, show_hidden=False, pre_load=None): |
|
420 | 420 | """ |
|
421 | 421 | Returns iterator of `BaseCommit` objects from start to end |
|
422 | 422 | not inclusive. This should behave just like a list, ie. end is not |
|
423 | 423 | inclusive. |
|
424 | 424 | |
|
425 | 425 | :param start_id: None or str, must be a valid commit id |
|
426 | 426 | :param end_id: None or str, must be a valid commit id |
|
427 | 427 | :param start_date: |
|
428 | 428 | :param end_date: |
|
429 | 429 | :param branch_name: |
|
430 | 430 | :param show_hidden: |
|
431 | 431 | :param pre_load: |
|
432 | 432 | """ |
|
433 | 433 | raise NotImplementedError |
|
434 | 434 | |
|
435 | 435 | def __getitem__(self, key): |
|
436 | 436 | """ |
|
437 | 437 | Allows index based access to the commit objects of this repository. |
|
438 | 438 | """ |
|
439 | 439 | pre_load = ["author", "branch", "date", "message", "parents"] |
|
440 | 440 | if isinstance(key, slice): |
|
441 | 441 | return self._get_range(key, pre_load) |
|
442 | 442 | return self.get_commit(commit_idx=key, pre_load=pre_load) |
|
443 | 443 | |
|
444 | 444 | def _get_range(self, slice_obj, pre_load): |
|
445 | 445 | for commit_id in self.commit_ids.__getitem__(slice_obj): |
|
446 | 446 | yield self.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
447 | 447 | |
|
448 | 448 | def count(self): |
|
449 | 449 | return len(self.commit_ids) |
|
450 | 450 | |
|
451 | 451 | def tag(self, name, user, commit_id=None, message=None, date=None, **opts): |
|
452 | 452 | """ |
|
453 | 453 | Creates and returns a tag for the given ``commit_id``. |
|
454 | 454 | |
|
455 | 455 | :param name: name for new tag |
|
456 | 456 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
457 | 457 | :param commit_id: commit id for which new tag would be created |
|
458 | 458 | :param message: message of the tag's commit |
|
459 | 459 | :param date: date of tag's commit |
|
460 | 460 | |
|
461 | 461 | :raises TagAlreadyExistError: if tag with same name already exists |
|
462 | 462 | """ |
|
463 | 463 | raise NotImplementedError |
|
464 | 464 | |
|
465 | 465 | def remove_tag(self, name, user, message=None, date=None): |
|
466 | 466 | """ |
|
467 | 467 | Removes tag with the given ``name``. |
|
468 | 468 | |
|
469 | 469 | :param name: name of the tag to be removed |
|
470 | 470 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
471 | 471 | :param message: message of the tag's removal commit |
|
472 | 472 | :param date: date of tag's removal commit |
|
473 | 473 | |
|
474 | 474 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
475 | 475 | """ |
|
476 | 476 | raise NotImplementedError |
|
477 | 477 | |
|
478 | 478 | def get_diff( |
|
479 | 479 | self, commit1, commit2, path=None, ignore_whitespace=False, |
|
480 | 480 | context=3, path1=None): |
|
481 | 481 | """ |
|
482 | 482 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
483 | 483 | `commit2` since `commit1`. |
|
484 | 484 | |
|
485 | 485 | :param commit1: Entry point from which diff is shown. Can be |
|
486 | 486 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
487 | 487 | the changes since empty state of the repository until `commit2` |
|
488 | 488 | :param commit2: Until which commit changes should be shown. |
|
489 | 489 | :param path: Can be set to a path of a file to create a diff of that |
|
490 | 490 | file. If `path1` is also set, this value is only associated to |
|
491 | 491 | `commit2`. |
|
492 | 492 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
493 | 493 | changes. Defaults to ``False``. |
|
494 | 494 | :param context: How many lines before/after changed lines should be |
|
495 | 495 | shown. Defaults to ``3``. |
|
496 | 496 | :param path1: Can be set to a path to associate with `commit1`. This |
|
497 | 497 | parameter works only for backends which support diff generation for |
|
498 | 498 | different paths. Other backends will raise a `ValueError` if `path1` |
|
499 | 499 | is set and has a different value than `path`. |
|
500 | 500 | :param file_path: filter this diff by given path pattern |
|
501 | 501 | """ |
|
502 | 502 | raise NotImplementedError |
|
503 | 503 | |
|
504 | 504 | def strip(self, commit_id, branch=None): |
|
505 | 505 | """ |
|
506 | 506 | Strip given commit_id from the repository |
|
507 | 507 | """ |
|
508 | 508 | raise NotImplementedError |
|
509 | 509 | |
|
510 | 510 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
511 | 511 | """ |
|
512 | 512 | Return a latest common ancestor commit if one exists for this repo |
|
513 | 513 | `commit_id1` vs `commit_id2` from `repo2`. |
|
514 | 514 | |
|
515 | 515 | :param commit_id1: Commit it from this repository to use as a |
|
516 | 516 | target for the comparison. |
|
517 | 517 | :param commit_id2: Source commit id to use for comparison. |
|
518 | 518 | :param repo2: Source repository to use for comparison. |
|
519 | 519 | """ |
|
520 | 520 | raise NotImplementedError |
|
521 | 521 | |
|
522 | 522 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
523 | 523 | """ |
|
524 | 524 | Compare this repository's revision `commit_id1` with `commit_id2`. |
|
525 | 525 | |
|
526 | 526 | Returns a tuple(commits, ancestor) that would be merged from |
|
527 | 527 | `commit_id2`. Doing a normal compare (``merge=False``), ``None`` |
|
528 | 528 | will be returned as ancestor. |
|
529 | 529 | |
|
530 | 530 | :param commit_id1: Commit it from this repository to use as a |
|
531 | 531 | target for the comparison. |
|
532 | 532 | :param commit_id2: Source commit id to use for comparison. |
|
533 | 533 | :param repo2: Source repository to use for comparison. |
|
534 | 534 | :param merge: If set to ``True`` will do a merge compare which also |
|
535 | 535 | returns the common ancestor. |
|
536 | 536 | :param pre_load: Optional. List of commit attributes to load. |
|
537 | 537 | """ |
|
538 | 538 | raise NotImplementedError |
|
539 | 539 | |
|
540 | 540 | def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
541 | 541 | user_name='', user_email='', message='', dry_run=False, |
|
542 | 542 | use_rebase=False, close_branch=False): |
|
543 | 543 | """ |
|
544 | 544 | Merge the revisions specified in `source_ref` from `source_repo` |
|
545 | 545 | onto the `target_ref` of this repository. |
|
546 | 546 | |
|
547 | 547 | `source_ref` and `target_ref` are named tupls with the following |
|
548 | 548 | fields `type`, `name` and `commit_id`. |
|
549 | 549 | |
|
550 | 550 | Returns a MergeResponse named tuple with the following fields |
|
551 | 551 | 'possible', 'executed', 'source_commit', 'target_commit', |
|
552 | 552 | 'merge_commit'. |
|
553 | 553 | |
|
554 | 554 | :param repo_id: `repo_id` target repo id. |
|
555 | 555 | :param workspace_id: `workspace_id` unique identifier. |
|
556 | 556 | :param target_ref: `target_ref` points to the commit on top of which |
|
557 | 557 | the `source_ref` should be merged. |
|
558 | 558 | :param source_repo: The repository that contains the commits to be |
|
559 | 559 | merged. |
|
560 | 560 | :param source_ref: `source_ref` points to the topmost commit from |
|
561 | 561 | the `source_repo` which should be merged. |
|
562 | 562 | :param user_name: Merge commit `user_name`. |
|
563 | 563 | :param user_email: Merge commit `user_email`. |
|
564 | 564 | :param message: Merge commit `message`. |
|
565 | 565 | :param dry_run: If `True` the merge will not take place. |
|
566 | 566 | :param use_rebase: If `True` commits from the source will be rebased |
|
567 | 567 | on top of the target instead of being merged. |
|
568 | 568 | :param close_branch: If `True` branch will be close before merging it |
|
569 | 569 | """ |
|
570 | 570 | if dry_run: |
|
571 | 571 | message = message or settings.MERGE_DRY_RUN_MESSAGE |
|
572 | 572 | user_email = user_email or settings.MERGE_DRY_RUN_EMAIL |
|
573 | 573 | user_name = user_name or settings.MERGE_DRY_RUN_USER |
|
574 | 574 | else: |
|
575 | 575 | if not user_name: |
|
576 | 576 | raise ValueError('user_name cannot be empty') |
|
577 | 577 | if not user_email: |
|
578 | 578 | raise ValueError('user_email cannot be empty') |
|
579 | 579 | if not message: |
|
580 | 580 | raise ValueError('message cannot be empty') |
|
581 | 581 | |
|
582 | 582 | try: |
|
583 | 583 | return self._merge_repo( |
|
584 | 584 | repo_id, workspace_id, target_ref, source_repo, |
|
585 | 585 | source_ref, message, user_name, user_email, dry_run=dry_run, |
|
586 | 586 | use_rebase=use_rebase, close_branch=close_branch) |
|
587 | 587 | except RepositoryError as exc: |
|
588 | 588 | log.exception('Unexpected failure when running merge, dry-run=%s', dry_run) |
|
589 | 589 | return MergeResponse( |
|
590 | 590 | False, False, None, MergeFailureReason.UNKNOWN, |
|
591 | 591 | metadata={'exception': str(exc)}) |
|
592 | 592 | |
|
593 | 593 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
594 | 594 | source_repo, source_ref, merge_message, |
|
595 | 595 | merger_name, merger_email, dry_run=False, |
|
596 | 596 | use_rebase=False, close_branch=False): |
|
597 | 597 | """Internal implementation of merge.""" |
|
598 | 598 | raise NotImplementedError |
|
599 | 599 | |
|
600 | 600 | def _maybe_prepare_merge_workspace( |
|
601 | 601 | self, repo_id, workspace_id, target_ref, source_ref): |
|
602 | 602 | """ |
|
603 | 603 | Create the merge workspace. |
|
604 | 604 | |
|
605 | 605 | :param workspace_id: `workspace_id` unique identifier. |
|
606 | 606 | """ |
|
607 | 607 | raise NotImplementedError |
|
608 | 608 | |
|
609 | 609 | def _get_legacy_shadow_repository_path(self, workspace_id): |
|
610 | 610 | """ |
|
611 | 611 | Legacy version that was used before. We still need it for |
|
612 | 612 | backward compat |
|
613 | 613 | """ |
|
614 | 614 | return os.path.join( |
|
615 | 615 | os.path.dirname(self.path), |
|
616 | 616 | '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id)) |
|
617 | 617 | |
|
618 | 618 | def _get_shadow_repository_path(self, repo_id, workspace_id): |
|
619 | 619 | # The name of the shadow repository must start with '.', so it is |
|
620 | 620 | # skipped by 'rhodecode.lib.utils.get_filesystem_repos'. |
|
621 | 621 | legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id) |
|
622 | 622 | if os.path.exists(legacy_repository_path): |
|
623 | 623 | return legacy_repository_path |
|
624 | 624 | else: |
|
625 | 625 | return os.path.join( |
|
626 | 626 | os.path.dirname(self.path), |
|
627 | 627 | '.__shadow_repo_%s_%s' % (repo_id, workspace_id)) |
|
628 | 628 | |
|
629 | 629 | def cleanup_merge_workspace(self, repo_id, workspace_id): |
|
630 | 630 | """ |
|
631 | 631 | Remove merge workspace. |
|
632 | 632 | |
|
633 | 633 | This function MUST not fail in case there is no workspace associated to |
|
634 | 634 | the given `workspace_id`. |
|
635 | 635 | |
|
636 | 636 | :param workspace_id: `workspace_id` unique identifier. |
|
637 | 637 | """ |
|
638 | 638 | shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id) |
|
639 | 639 | shadow_repository_path_del = '{}.{}.delete'.format( |
|
640 | 640 | shadow_repository_path, time.time()) |
|
641 | 641 | |
|
642 | 642 | # move the shadow repo, so it never conflicts with the one used. |
|
643 | 643 | # we use this method because shutil.rmtree had some edge case problems |
|
644 | 644 | # removing symlinked repositories |
|
645 | 645 | if not os.path.isdir(shadow_repository_path): |
|
646 | 646 | return |
|
647 | 647 | |
|
648 | 648 | shutil.move(shadow_repository_path, shadow_repository_path_del) |
|
649 | 649 | try: |
|
650 | 650 | shutil.rmtree(shadow_repository_path_del, ignore_errors=False) |
|
651 | 651 | except Exception: |
|
652 | 652 | log.exception('Failed to gracefully remove shadow repo under %s', |
|
653 | 653 | shadow_repository_path_del) |
|
654 | 654 | shutil.rmtree(shadow_repository_path_del, ignore_errors=True) |
|
655 | 655 | |
|
656 | 656 | # ========== # |
|
657 | 657 | # COMMIT API # |
|
658 | 658 | # ========== # |
|
659 | 659 | |
|
660 | 660 | @LazyProperty |
|
661 | 661 | def in_memory_commit(self): |
|
662 | 662 | """ |
|
663 | 663 | Returns :class:`InMemoryCommit` object for this repository. |
|
664 | 664 | """ |
|
665 | 665 | raise NotImplementedError |
|
666 | 666 | |
|
667 | 667 | # ======================== # |
|
668 | 668 | # UTILITIES FOR SUBCLASSES # |
|
669 | 669 | # ======================== # |
|
670 | 670 | |
|
671 | 671 | def _validate_diff_commits(self, commit1, commit2): |
|
672 | 672 | """ |
|
673 | 673 | Validates that the given commits are related to this repository. |
|
674 | 674 | |
|
675 | 675 | Intended as a utility for sub classes to have a consistent validation |
|
676 | 676 | of input parameters in methods like :meth:`get_diff`. |
|
677 | 677 | """ |
|
678 | 678 | self._validate_commit(commit1) |
|
679 | 679 | self._validate_commit(commit2) |
|
680 | 680 | if (isinstance(commit1, EmptyCommit) and |
|
681 | 681 | isinstance(commit2, EmptyCommit)): |
|
682 | 682 | raise ValueError("Cannot compare two empty commits") |
|
683 | 683 | |
|
684 | 684 | def _validate_commit(self, commit): |
|
685 | 685 | if not isinstance(commit, BaseCommit): |
|
686 | 686 | raise TypeError( |
|
687 | 687 | "%s is not of type BaseCommit" % repr(commit)) |
|
688 | 688 | if commit.repository != self and not isinstance(commit, EmptyCommit): |
|
689 | 689 | raise ValueError( |
|
690 | 690 | "Commit %s must be a valid commit from this repository %s, " |
|
691 | 691 | "related to this repository instead %s." % |
|
692 | 692 | (commit, self, commit.repository)) |
|
693 | 693 | |
|
694 | 694 | def _validate_commit_id(self, commit_id): |
|
695 | 695 | if not isinstance(commit_id, basestring): |
|
696 | 696 | raise TypeError("commit_id must be a string value") |
|
697 | 697 | |
|
698 | 698 | def _validate_commit_idx(self, commit_idx): |
|
699 | 699 | if not isinstance(commit_idx, (int, long)): |
|
700 | 700 | raise TypeError("commit_idx must be a numeric value") |
|
701 | 701 | |
|
702 | 702 | def _validate_branch_name(self, branch_name): |
|
703 | 703 | if branch_name and branch_name not in self.branches_all: |
|
704 | 704 | msg = ("Branch %s not found in %s" % (branch_name, self)) |
|
705 | 705 | raise BranchDoesNotExistError(msg) |
|
706 | 706 | |
|
707 | 707 | # |
|
708 | 708 | # Supporting deprecated API parts |
|
709 | 709 | # TODO: johbo: consider to move this into a mixin |
|
710 | 710 | # |
|
711 | 711 | |
|
712 | 712 | @property |
|
713 | 713 | def EMPTY_CHANGESET(self): |
|
714 | 714 | warnings.warn( |
|
715 | 715 | "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning) |
|
716 | 716 | return self.EMPTY_COMMIT_ID |
|
717 | 717 | |
|
718 | 718 | @property |
|
719 | 719 | def revisions(self): |
|
720 | 720 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
721 | 721 | return self.commit_ids |
|
722 | 722 | |
|
723 | 723 | @revisions.setter |
|
724 | 724 | def revisions(self, value): |
|
725 | 725 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
726 | 726 | self.commit_ids = value |
|
727 | 727 | |
|
728 | 728 | def get_changeset(self, revision=None, pre_load=None): |
|
729 | 729 | warnings.warn("Use get_commit instead", DeprecationWarning) |
|
730 | 730 | commit_id = None |
|
731 | 731 | commit_idx = None |
|
732 | 732 | if isinstance(revision, basestring): |
|
733 | 733 | commit_id = revision |
|
734 | 734 | else: |
|
735 | 735 | commit_idx = revision |
|
736 | 736 | return self.get_commit( |
|
737 | 737 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load) |
|
738 | 738 | |
|
739 | 739 | def get_changesets( |
|
740 | 740 | self, start=None, end=None, start_date=None, end_date=None, |
|
741 | 741 | branch_name=None, pre_load=None): |
|
742 | 742 | warnings.warn("Use get_commits instead", DeprecationWarning) |
|
743 | 743 | start_id = self._revision_to_commit(start) |
|
744 | 744 | end_id = self._revision_to_commit(end) |
|
745 | 745 | return self.get_commits( |
|
746 | 746 | start_id=start_id, end_id=end_id, start_date=start_date, |
|
747 | 747 | end_date=end_date, branch_name=branch_name, pre_load=pre_load) |
|
748 | 748 | |
|
749 | 749 | def _revision_to_commit(self, revision): |
|
750 | 750 | """ |
|
751 | 751 | Translates a revision to a commit_id |
|
752 | 752 | |
|
753 | 753 | Helps to support the old changeset based API which allows to use |
|
754 | 754 | commit ids and commit indices interchangeable. |
|
755 | 755 | """ |
|
756 | 756 | if revision is None: |
|
757 | 757 | return revision |
|
758 | 758 | |
|
759 | 759 | if isinstance(revision, basestring): |
|
760 | 760 | commit_id = revision |
|
761 | 761 | else: |
|
762 | 762 | commit_id = self.commit_ids[revision] |
|
763 | 763 | return commit_id |
|
764 | 764 | |
|
765 | 765 | @property |
|
766 | 766 | def in_memory_changeset(self): |
|
767 | 767 | warnings.warn("Use in_memory_commit instead", DeprecationWarning) |
|
768 | 768 | return self.in_memory_commit |
|
769 | 769 | |
|
770 | 770 | def get_path_permissions(self, username): |
|
771 | 771 | """ |
|
772 | 772 | Returns a path permission checker or None if not supported |
|
773 | 773 | |
|
774 | 774 | :param username: session user name |
|
775 | 775 | :return: an instance of BasePathPermissionChecker or None |
|
776 | 776 | """ |
|
777 | 777 | return None |
|
778 | 778 | |
|
779 | 779 | def install_hooks(self, force=False): |
|
780 | 780 | return self._remote.install_hooks(force) |
|
781 | 781 | |
|
782 | def get_hooks_info(self): | |
|
783 | return self._remote.get_hooks_info() | |
|
784 | ||
|
782 | 785 | |
|
783 | 786 | class BaseCommit(object): |
|
784 | 787 | """ |
|
785 | 788 | Each backend should implement it's commit representation. |
|
786 | 789 | |
|
787 | 790 | **Attributes** |
|
788 | 791 | |
|
789 | 792 | ``repository`` |
|
790 | 793 | repository object within which commit exists |
|
791 | 794 | |
|
792 | 795 | ``id`` |
|
793 | 796 | The commit id, may be ``raw_id`` or i.e. for mercurial's tip |
|
794 | 797 | just ``tip``. |
|
795 | 798 | |
|
796 | 799 | ``raw_id`` |
|
797 | 800 | raw commit representation (i.e. full 40 length sha for git |
|
798 | 801 | backend) |
|
799 | 802 | |
|
800 | 803 | ``short_id`` |
|
801 | 804 | shortened (if apply) version of ``raw_id``; it would be simple |
|
802 | 805 | shortcut for ``raw_id[:12]`` for git/mercurial backends or same |
|
803 | 806 | as ``raw_id`` for subversion |
|
804 | 807 | |
|
805 | 808 | ``idx`` |
|
806 | 809 | commit index |
|
807 | 810 | |
|
808 | 811 | ``files`` |
|
809 | 812 | list of ``FileNode`` (``Node`` with NodeKind.FILE) objects |
|
810 | 813 | |
|
811 | 814 | ``dirs`` |
|
812 | 815 | list of ``DirNode`` (``Node`` with NodeKind.DIR) objects |
|
813 | 816 | |
|
814 | 817 | ``nodes`` |
|
815 | 818 | combined list of ``Node`` objects |
|
816 | 819 | |
|
817 | 820 | ``author`` |
|
818 | 821 | author of the commit, as unicode |
|
819 | 822 | |
|
820 | 823 | ``message`` |
|
821 | 824 | message of the commit, as unicode |
|
822 | 825 | |
|
823 | 826 | ``parents`` |
|
824 | 827 | list of parent commits |
|
825 | 828 | |
|
826 | 829 | """ |
|
827 | 830 | |
|
828 | 831 | branch = None |
|
829 | 832 | """ |
|
830 | 833 | Depending on the backend this should be set to the branch name of the |
|
831 | 834 | commit. Backends not supporting branches on commits should leave this |
|
832 | 835 | value as ``None``. |
|
833 | 836 | """ |
|
834 | 837 | |
|
835 | 838 | _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}' |
|
836 | 839 | """ |
|
837 | 840 | This template is used to generate a default prefix for repository archives |
|
838 | 841 | if no prefix has been specified. |
|
839 | 842 | """ |
|
840 | 843 | |
|
841 | 844 | def __str__(self): |
|
842 | 845 | return '<%s at %s:%s>' % ( |
|
843 | 846 | self.__class__.__name__, self.idx, self.short_id) |
|
844 | 847 | |
|
845 | 848 | def __repr__(self): |
|
846 | 849 | return self.__str__() |
|
847 | 850 | |
|
848 | 851 | def __unicode__(self): |
|
849 | 852 | return u'%s:%s' % (self.idx, self.short_id) |
|
850 | 853 | |
|
851 | 854 | def __eq__(self, other): |
|
852 | 855 | same_instance = isinstance(other, self.__class__) |
|
853 | 856 | return same_instance and self.raw_id == other.raw_id |
|
854 | 857 | |
|
855 | 858 | def __json__(self): |
|
856 | 859 | parents = [] |
|
857 | 860 | try: |
|
858 | 861 | for parent in self.parents: |
|
859 | 862 | parents.append({'raw_id': parent.raw_id}) |
|
860 | 863 | except NotImplementedError: |
|
861 | 864 | # empty commit doesn't have parents implemented |
|
862 | 865 | pass |
|
863 | 866 | |
|
864 | 867 | return { |
|
865 | 868 | 'short_id': self.short_id, |
|
866 | 869 | 'raw_id': self.raw_id, |
|
867 | 870 | 'revision': self.idx, |
|
868 | 871 | 'message': self.message, |
|
869 | 872 | 'date': self.date, |
|
870 | 873 | 'author': self.author, |
|
871 | 874 | 'parents': parents, |
|
872 | 875 | 'branch': self.branch |
|
873 | 876 | } |
|
874 | 877 | |
|
875 | 878 | def __getstate__(self): |
|
876 | 879 | d = self.__dict__.copy() |
|
877 | 880 | d.pop('_remote', None) |
|
878 | 881 | d.pop('repository', None) |
|
879 | 882 | return d |
|
880 | 883 | |
|
881 | 884 | def _get_refs(self): |
|
882 | 885 | return { |
|
883 | 886 | 'branches': [self.branch] if self.branch else [], |
|
884 | 887 | 'bookmarks': getattr(self, 'bookmarks', []), |
|
885 | 888 | 'tags': self.tags |
|
886 | 889 | } |
|
887 | 890 | |
|
888 | 891 | @LazyProperty |
|
889 | 892 | def last(self): |
|
890 | 893 | """ |
|
891 | 894 | ``True`` if this is last commit in repository, ``False`` |
|
892 | 895 | otherwise; trying to access this attribute while there is no |
|
893 | 896 | commits would raise `EmptyRepositoryError` |
|
894 | 897 | """ |
|
895 | 898 | if self.repository is None: |
|
896 | 899 | raise CommitError("Cannot check if it's most recent commit") |
|
897 | 900 | return self.raw_id == self.repository.commit_ids[-1] |
|
898 | 901 | |
|
899 | 902 | @LazyProperty |
|
900 | 903 | def parents(self): |
|
901 | 904 | """ |
|
902 | 905 | Returns list of parent commits. |
|
903 | 906 | """ |
|
904 | 907 | raise NotImplementedError |
|
905 | 908 | |
|
906 | 909 | @LazyProperty |
|
907 | 910 | def first_parent(self): |
|
908 | 911 | """ |
|
909 | 912 | Returns list of parent commits. |
|
910 | 913 | """ |
|
911 | 914 | return self.parents[0] if self.parents else EmptyCommit() |
|
912 | 915 | |
|
913 | 916 | @property |
|
914 | 917 | def merge(self): |
|
915 | 918 | """ |
|
916 | 919 | Returns boolean if commit is a merge. |
|
917 | 920 | """ |
|
918 | 921 | return len(self.parents) > 1 |
|
919 | 922 | |
|
920 | 923 | @LazyProperty |
|
921 | 924 | def children(self): |
|
922 | 925 | """ |
|
923 | 926 | Returns list of child commits. |
|
924 | 927 | """ |
|
925 | 928 | raise NotImplementedError |
|
926 | 929 | |
|
927 | 930 | @LazyProperty |
|
928 | 931 | def id(self): |
|
929 | 932 | """ |
|
930 | 933 | Returns string identifying this commit. |
|
931 | 934 | """ |
|
932 | 935 | raise NotImplementedError |
|
933 | 936 | |
|
934 | 937 | @LazyProperty |
|
935 | 938 | def raw_id(self): |
|
936 | 939 | """ |
|
937 | 940 | Returns raw string identifying this commit. |
|
938 | 941 | """ |
|
939 | 942 | raise NotImplementedError |
|
940 | 943 | |
|
941 | 944 | @LazyProperty |
|
942 | 945 | def short_id(self): |
|
943 | 946 | """ |
|
944 | 947 | Returns shortened version of ``raw_id`` attribute, as string, |
|
945 | 948 | identifying this commit, useful for presentation to users. |
|
946 | 949 | """ |
|
947 | 950 | raise NotImplementedError |
|
948 | 951 | |
|
949 | 952 | @LazyProperty |
|
950 | 953 | def idx(self): |
|
951 | 954 | """ |
|
952 | 955 | Returns integer identifying this commit. |
|
953 | 956 | """ |
|
954 | 957 | raise NotImplementedError |
|
955 | 958 | |
|
956 | 959 | @LazyProperty |
|
957 | 960 | def committer(self): |
|
958 | 961 | """ |
|
959 | 962 | Returns committer for this commit |
|
960 | 963 | """ |
|
961 | 964 | raise NotImplementedError |
|
962 | 965 | |
|
963 | 966 | @LazyProperty |
|
964 | 967 | def committer_name(self): |
|
965 | 968 | """ |
|
966 | 969 | Returns committer name for this commit |
|
967 | 970 | """ |
|
968 | 971 | |
|
969 | 972 | return author_name(self.committer) |
|
970 | 973 | |
|
971 | 974 | @LazyProperty |
|
972 | 975 | def committer_email(self): |
|
973 | 976 | """ |
|
974 | 977 | Returns committer email address for this commit |
|
975 | 978 | """ |
|
976 | 979 | |
|
977 | 980 | return author_email(self.committer) |
|
978 | 981 | |
|
979 | 982 | @LazyProperty |
|
980 | 983 | def author(self): |
|
981 | 984 | """ |
|
982 | 985 | Returns author for this commit |
|
983 | 986 | """ |
|
984 | 987 | |
|
985 | 988 | raise NotImplementedError |
|
986 | 989 | |
|
987 | 990 | @LazyProperty |
|
988 | 991 | def author_name(self): |
|
989 | 992 | """ |
|
990 | 993 | Returns author name for this commit |
|
991 | 994 | """ |
|
992 | 995 | |
|
993 | 996 | return author_name(self.author) |
|
994 | 997 | |
|
995 | 998 | @LazyProperty |
|
996 | 999 | def author_email(self): |
|
997 | 1000 | """ |
|
998 | 1001 | Returns author email address for this commit |
|
999 | 1002 | """ |
|
1000 | 1003 | |
|
1001 | 1004 | return author_email(self.author) |
|
1002 | 1005 | |
|
1003 | 1006 | def get_file_mode(self, path): |
|
1004 | 1007 | """ |
|
1005 | 1008 | Returns stat mode of the file at `path`. |
|
1006 | 1009 | """ |
|
1007 | 1010 | raise NotImplementedError |
|
1008 | 1011 | |
|
1009 | 1012 | def is_link(self, path): |
|
1010 | 1013 | """ |
|
1011 | 1014 | Returns ``True`` if given `path` is a symlink |
|
1012 | 1015 | """ |
|
1013 | 1016 | raise NotImplementedError |
|
1014 | 1017 | |
|
1015 | 1018 | def get_file_content(self, path): |
|
1016 | 1019 | """ |
|
1017 | 1020 | Returns content of the file at the given `path`. |
|
1018 | 1021 | """ |
|
1019 | 1022 | raise NotImplementedError |
|
1020 | 1023 | |
|
1021 | 1024 | def get_file_size(self, path): |
|
1022 | 1025 | """ |
|
1023 | 1026 | Returns size of the file at the given `path`. |
|
1024 | 1027 | """ |
|
1025 | 1028 | raise NotImplementedError |
|
1026 | 1029 | |
|
1027 | 1030 | def get_path_commit(self, path, pre_load=None): |
|
1028 | 1031 | """ |
|
1029 | 1032 | Returns last commit of the file at the given `path`. |
|
1030 | 1033 | |
|
1031 | 1034 | :param pre_load: Optional. List of commit attributes to load. |
|
1032 | 1035 | """ |
|
1033 | 1036 | commits = self.get_path_history(path, limit=1, pre_load=pre_load) |
|
1034 | 1037 | if not commits: |
|
1035 | 1038 | raise RepositoryError( |
|
1036 | 1039 | 'Failed to fetch history for path {}. ' |
|
1037 | 1040 | 'Please check if such path exists in your repository'.format( |
|
1038 | 1041 | path)) |
|
1039 | 1042 | return commits[0] |
|
1040 | 1043 | |
|
1041 | 1044 | def get_path_history(self, path, limit=None, pre_load=None): |
|
1042 | 1045 | """ |
|
1043 | 1046 | Returns history of file as reversed list of :class:`BaseCommit` |
|
1044 | 1047 | objects for which file at given `path` has been modified. |
|
1045 | 1048 | |
|
1046 | 1049 | :param limit: Optional. Allows to limit the size of the returned |
|
1047 | 1050 | history. This is intended as a hint to the underlying backend, so |
|
1048 | 1051 | that it can apply optimizations depending on the limit. |
|
1049 | 1052 | :param pre_load: Optional. List of commit attributes to load. |
|
1050 | 1053 | """ |
|
1051 | 1054 | raise NotImplementedError |
|
1052 | 1055 | |
|
1053 | 1056 | def get_file_annotate(self, path, pre_load=None): |
|
1054 | 1057 | """ |
|
1055 | 1058 | Returns a generator of four element tuples with |
|
1056 | 1059 | lineno, sha, commit lazy loader and line |
|
1057 | 1060 | |
|
1058 | 1061 | :param pre_load: Optional. List of commit attributes to load. |
|
1059 | 1062 | """ |
|
1060 | 1063 | raise NotImplementedError |
|
1061 | 1064 | |
|
1062 | 1065 | def get_nodes(self, path): |
|
1063 | 1066 | """ |
|
1064 | 1067 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
1065 | 1068 | state of commit at the given ``path``. |
|
1066 | 1069 | |
|
1067 | 1070 | :raises ``CommitError``: if node at the given ``path`` is not |
|
1068 | 1071 | instance of ``DirNode`` |
|
1069 | 1072 | """ |
|
1070 | 1073 | raise NotImplementedError |
|
1071 | 1074 | |
|
1072 | 1075 | def get_node(self, path): |
|
1073 | 1076 | """ |
|
1074 | 1077 | Returns ``Node`` object from the given ``path``. |
|
1075 | 1078 | |
|
1076 | 1079 | :raises ``NodeDoesNotExistError``: if there is no node at the given |
|
1077 | 1080 | ``path`` |
|
1078 | 1081 | """ |
|
1079 | 1082 | raise NotImplementedError |
|
1080 | 1083 | |
|
1081 | 1084 | def get_largefile_node(self, path): |
|
1082 | 1085 | """ |
|
1083 | 1086 | Returns the path to largefile from Mercurial/Git-lfs storage. |
|
1084 | 1087 | or None if it's not a largefile node |
|
1085 | 1088 | """ |
|
1086 | 1089 | return None |
|
1087 | 1090 | |
|
1088 | 1091 | def archive_repo(self, file_path, kind='tgz', subrepos=None, |
|
1089 | 1092 | prefix=None, write_metadata=False, mtime=None): |
|
1090 | 1093 | """ |
|
1091 | 1094 | Creates an archive containing the contents of the repository. |
|
1092 | 1095 | |
|
1093 | 1096 | :param file_path: path to the file which to create the archive. |
|
1094 | 1097 | :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``. |
|
1095 | 1098 | :param prefix: name of root directory in archive. |
|
1096 | 1099 | Default is repository name and commit's short_id joined with dash: |
|
1097 | 1100 | ``"{repo_name}-{short_id}"``. |
|
1098 | 1101 | :param write_metadata: write a metadata file into archive. |
|
1099 | 1102 | :param mtime: custom modification time for archive creation, defaults |
|
1100 | 1103 | to time.time() if not given. |
|
1101 | 1104 | |
|
1102 | 1105 | :raise VCSError: If prefix has a problem. |
|
1103 | 1106 | """ |
|
1104 | 1107 | allowed_kinds = settings.ARCHIVE_SPECS.keys() |
|
1105 | 1108 | if kind not in allowed_kinds: |
|
1106 | 1109 | raise ImproperArchiveTypeError( |
|
1107 | 1110 | 'Archive kind (%s) not supported use one of %s' % |
|
1108 | 1111 | (kind, allowed_kinds)) |
|
1109 | 1112 | |
|
1110 | 1113 | prefix = self._validate_archive_prefix(prefix) |
|
1111 | 1114 | |
|
1112 | 1115 | mtime = mtime or time.mktime(self.date.timetuple()) |
|
1113 | 1116 | |
|
1114 | 1117 | file_info = [] |
|
1115 | 1118 | cur_rev = self.repository.get_commit(commit_id=self.raw_id) |
|
1116 | 1119 | for _r, _d, files in cur_rev.walk('/'): |
|
1117 | 1120 | for f in files: |
|
1118 | 1121 | f_path = os.path.join(prefix, f.path) |
|
1119 | 1122 | file_info.append( |
|
1120 | 1123 | (f_path, f.mode, f.is_link(), f.raw_bytes)) |
|
1121 | 1124 | |
|
1122 | 1125 | if write_metadata: |
|
1123 | 1126 | metadata = [ |
|
1124 | 1127 | ('repo_name', self.repository.name), |
|
1125 | 1128 | ('rev', self.raw_id), |
|
1126 | 1129 | ('create_time', mtime), |
|
1127 | 1130 | ('branch', self.branch), |
|
1128 | 1131 | ('tags', ','.join(self.tags)), |
|
1129 | 1132 | ] |
|
1130 | 1133 | meta = ["%s:%s" % (f_name, value) for f_name, value in metadata] |
|
1131 | 1134 | file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta))) |
|
1132 | 1135 | |
|
1133 | 1136 | connection.Hg.archive_repo(file_path, mtime, file_info, kind) |
|
1134 | 1137 | |
|
1135 | 1138 | def _validate_archive_prefix(self, prefix): |
|
1136 | 1139 | if prefix is None: |
|
1137 | 1140 | prefix = self._ARCHIVE_PREFIX_TEMPLATE.format( |
|
1138 | 1141 | repo_name=safe_str(self.repository.name), |
|
1139 | 1142 | short_id=self.short_id) |
|
1140 | 1143 | elif not isinstance(prefix, str): |
|
1141 | 1144 | raise ValueError("prefix not a bytes object: %s" % repr(prefix)) |
|
1142 | 1145 | elif prefix.startswith('/'): |
|
1143 | 1146 | raise VCSError("Prefix cannot start with leading slash") |
|
1144 | 1147 | elif prefix.strip() == '': |
|
1145 | 1148 | raise VCSError("Prefix cannot be empty") |
|
1146 | 1149 | return prefix |
|
1147 | 1150 | |
|
1148 | 1151 | @LazyProperty |
|
1149 | 1152 | def root(self): |
|
1150 | 1153 | """ |
|
1151 | 1154 | Returns ``RootNode`` object for this commit. |
|
1152 | 1155 | """ |
|
1153 | 1156 | return self.get_node('') |
|
1154 | 1157 | |
|
1155 | 1158 | def next(self, branch=None): |
|
1156 | 1159 | """ |
|
1157 | 1160 | Returns next commit from current, if branch is gives it will return |
|
1158 | 1161 | next commit belonging to this branch |
|
1159 | 1162 | |
|
1160 | 1163 | :param branch: show commits within the given named branch |
|
1161 | 1164 | """ |
|
1162 | 1165 | indexes = xrange(self.idx + 1, self.repository.count()) |
|
1163 | 1166 | return self._find_next(indexes, branch) |
|
1164 | 1167 | |
|
1165 | 1168 | def prev(self, branch=None): |
|
1166 | 1169 | """ |
|
1167 | 1170 | Returns previous commit from current, if branch is gives it will |
|
1168 | 1171 | return previous commit belonging to this branch |
|
1169 | 1172 | |
|
1170 | 1173 | :param branch: show commit within the given named branch |
|
1171 | 1174 | """ |
|
1172 | 1175 | indexes = xrange(self.idx - 1, -1, -1) |
|
1173 | 1176 | return self._find_next(indexes, branch) |
|
1174 | 1177 | |
|
1175 | 1178 | def _find_next(self, indexes, branch=None): |
|
1176 | 1179 | if branch and self.branch != branch: |
|
1177 | 1180 | raise VCSError('Branch option used on commit not belonging ' |
|
1178 | 1181 | 'to that branch') |
|
1179 | 1182 | |
|
1180 | 1183 | for next_idx in indexes: |
|
1181 | 1184 | commit = self.repository.get_commit(commit_idx=next_idx) |
|
1182 | 1185 | if branch and branch != commit.branch: |
|
1183 | 1186 | continue |
|
1184 | 1187 | return commit |
|
1185 | 1188 | raise CommitDoesNotExistError |
|
1186 | 1189 | |
|
1187 | 1190 | def diff(self, ignore_whitespace=True, context=3): |
|
1188 | 1191 | """ |
|
1189 | 1192 | Returns a `Diff` object representing the change made by this commit. |
|
1190 | 1193 | """ |
|
1191 | 1194 | parent = self.first_parent |
|
1192 | 1195 | diff = self.repository.get_diff( |
|
1193 | 1196 | parent, self, |
|
1194 | 1197 | ignore_whitespace=ignore_whitespace, |
|
1195 | 1198 | context=context) |
|
1196 | 1199 | return diff |
|
1197 | 1200 | |
|
1198 | 1201 | @LazyProperty |
|
1199 | 1202 | def added(self): |
|
1200 | 1203 | """ |
|
1201 | 1204 | Returns list of added ``FileNode`` objects. |
|
1202 | 1205 | """ |
|
1203 | 1206 | raise NotImplementedError |
|
1204 | 1207 | |
|
1205 | 1208 | @LazyProperty |
|
1206 | 1209 | def changed(self): |
|
1207 | 1210 | """ |
|
1208 | 1211 | Returns list of modified ``FileNode`` objects. |
|
1209 | 1212 | """ |
|
1210 | 1213 | raise NotImplementedError |
|
1211 | 1214 | |
|
1212 | 1215 | @LazyProperty |
|
1213 | 1216 | def removed(self): |
|
1214 | 1217 | """ |
|
1215 | 1218 | Returns list of removed ``FileNode`` objects. |
|
1216 | 1219 | """ |
|
1217 | 1220 | raise NotImplementedError |
|
1218 | 1221 | |
|
1219 | 1222 | @LazyProperty |
|
1220 | 1223 | def size(self): |
|
1221 | 1224 | """ |
|
1222 | 1225 | Returns total number of bytes from contents of all filenodes. |
|
1223 | 1226 | """ |
|
1224 | 1227 | return sum((node.size for node in self.get_filenodes_generator())) |
|
1225 | 1228 | |
|
1226 | 1229 | def walk(self, topurl=''): |
|
1227 | 1230 | """ |
|
1228 | 1231 | Similar to os.walk method. Insted of filesystem it walks through |
|
1229 | 1232 | commit starting at given ``topurl``. Returns generator of tuples |
|
1230 | 1233 | (topnode, dirnodes, filenodes). |
|
1231 | 1234 | """ |
|
1232 | 1235 | topnode = self.get_node(topurl) |
|
1233 | 1236 | if not topnode.is_dir(): |
|
1234 | 1237 | return |
|
1235 | 1238 | yield (topnode, topnode.dirs, topnode.files) |
|
1236 | 1239 | for dirnode in topnode.dirs: |
|
1237 | 1240 | for tup in self.walk(dirnode.path): |
|
1238 | 1241 | yield tup |
|
1239 | 1242 | |
|
1240 | 1243 | def get_filenodes_generator(self): |
|
1241 | 1244 | """ |
|
1242 | 1245 | Returns generator that yields *all* file nodes. |
|
1243 | 1246 | """ |
|
1244 | 1247 | for topnode, dirs, files in self.walk(): |
|
1245 | 1248 | for node in files: |
|
1246 | 1249 | yield node |
|
1247 | 1250 | |
|
1248 | 1251 | # |
|
1249 | 1252 | # Utilities for sub classes to support consistent behavior |
|
1250 | 1253 | # |
|
1251 | 1254 | |
|
1252 | 1255 | def no_node_at_path(self, path): |
|
1253 | 1256 | return NodeDoesNotExistError( |
|
1254 | 1257 | u"There is no file nor directory at the given path: " |
|
1255 | 1258 | u"`%s` at commit %s" % (safe_unicode(path), self.short_id)) |
|
1256 | 1259 | |
|
1257 | 1260 | def _fix_path(self, path): |
|
1258 | 1261 | """ |
|
1259 | 1262 | Paths are stored without trailing slash so we need to get rid off it if |
|
1260 | 1263 | needed. |
|
1261 | 1264 | """ |
|
1262 | 1265 | return path.rstrip('/') |
|
1263 | 1266 | |
|
1264 | 1267 | # |
|
1265 | 1268 | # Deprecated API based on changesets |
|
1266 | 1269 | # |
|
1267 | 1270 | |
|
1268 | 1271 | @property |
|
1269 | 1272 | def revision(self): |
|
1270 | 1273 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1271 | 1274 | return self.idx |
|
1272 | 1275 | |
|
1273 | 1276 | @revision.setter |
|
1274 | 1277 | def revision(self, value): |
|
1275 | 1278 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1276 | 1279 | self.idx = value |
|
1277 | 1280 | |
|
1278 | 1281 | def get_file_changeset(self, path): |
|
1279 | 1282 | warnings.warn("Use get_path_commit instead", DeprecationWarning) |
|
1280 | 1283 | return self.get_path_commit(path) |
|
1281 | 1284 | |
|
1282 | 1285 | |
|
1283 | 1286 | class BaseChangesetClass(type): |
|
1284 | 1287 | |
|
1285 | 1288 | def __instancecheck__(self, instance): |
|
1286 | 1289 | return isinstance(instance, BaseCommit) |
|
1287 | 1290 | |
|
1288 | 1291 | |
|
1289 | 1292 | class BaseChangeset(BaseCommit): |
|
1290 | 1293 | |
|
1291 | 1294 | __metaclass__ = BaseChangesetClass |
|
1292 | 1295 | |
|
1293 | 1296 | def __new__(cls, *args, **kwargs): |
|
1294 | 1297 | warnings.warn( |
|
1295 | 1298 | "Use BaseCommit instead of BaseChangeset", DeprecationWarning) |
|
1296 | 1299 | return super(BaseChangeset, cls).__new__(cls, *args, **kwargs) |
|
1297 | 1300 | |
|
1298 | 1301 | |
|
1299 | 1302 | class BaseInMemoryCommit(object): |
|
1300 | 1303 | """ |
|
1301 | 1304 | Represents differences between repository's state (most recent head) and |
|
1302 | 1305 | changes made *in place*. |
|
1303 | 1306 | |
|
1304 | 1307 | **Attributes** |
|
1305 | 1308 | |
|
1306 | 1309 | ``repository`` |
|
1307 | 1310 | repository object for this in-memory-commit |
|
1308 | 1311 | |
|
1309 | 1312 | ``added`` |
|
1310 | 1313 | list of ``FileNode`` objects marked as *added* |
|
1311 | 1314 | |
|
1312 | 1315 | ``changed`` |
|
1313 | 1316 | list of ``FileNode`` objects marked as *changed* |
|
1314 | 1317 | |
|
1315 | 1318 | ``removed`` |
|
1316 | 1319 | list of ``FileNode`` or ``RemovedFileNode`` objects marked to be |
|
1317 | 1320 | *removed* |
|
1318 | 1321 | |
|
1319 | 1322 | ``parents`` |
|
1320 | 1323 | list of :class:`BaseCommit` instances representing parents of |
|
1321 | 1324 | in-memory commit. Should always be 2-element sequence. |
|
1322 | 1325 | |
|
1323 | 1326 | """ |
|
1324 | 1327 | |
|
1325 | 1328 | def __init__(self, repository): |
|
1326 | 1329 | self.repository = repository |
|
1327 | 1330 | self.added = [] |
|
1328 | 1331 | self.changed = [] |
|
1329 | 1332 | self.removed = [] |
|
1330 | 1333 | self.parents = [] |
|
1331 | 1334 | |
|
1332 | 1335 | def add(self, *filenodes): |
|
1333 | 1336 | """ |
|
1334 | 1337 | Marks given ``FileNode`` objects as *to be committed*. |
|
1335 | 1338 | |
|
1336 | 1339 | :raises ``NodeAlreadyExistsError``: if node with same path exists at |
|
1337 | 1340 | latest commit |
|
1338 | 1341 | :raises ``NodeAlreadyAddedError``: if node with same path is already |
|
1339 | 1342 | marked as *added* |
|
1340 | 1343 | """ |
|
1341 | 1344 | # Check if not already marked as *added* first |
|
1342 | 1345 | for node in filenodes: |
|
1343 | 1346 | if node.path in (n.path for n in self.added): |
|
1344 | 1347 | raise NodeAlreadyAddedError( |
|
1345 | 1348 | "Such FileNode %s is already marked for addition" |
|
1346 | 1349 | % node.path) |
|
1347 | 1350 | for node in filenodes: |
|
1348 | 1351 | self.added.append(node) |
|
1349 | 1352 | |
|
1350 | 1353 | def change(self, *filenodes): |
|
1351 | 1354 | """ |
|
1352 | 1355 | Marks given ``FileNode`` objects to be *changed* in next commit. |
|
1353 | 1356 | |
|
1354 | 1357 | :raises ``EmptyRepositoryError``: if there are no commits yet |
|
1355 | 1358 | :raises ``NodeAlreadyExistsError``: if node with same path is already |
|
1356 | 1359 | marked to be *changed* |
|
1357 | 1360 | :raises ``NodeAlreadyRemovedError``: if node with same path is already |
|
1358 | 1361 | marked to be *removed* |
|
1359 | 1362 | :raises ``NodeDoesNotExistError``: if node doesn't exist in latest |
|
1360 | 1363 | commit |
|
1361 | 1364 | :raises ``NodeNotChangedError``: if node hasn't really be changed |
|
1362 | 1365 | """ |
|
1363 | 1366 | for node in filenodes: |
|
1364 | 1367 | if node.path in (n.path for n in self.removed): |
|
1365 | 1368 | raise NodeAlreadyRemovedError( |
|
1366 | 1369 | "Node at %s is already marked as removed" % node.path) |
|
1367 | 1370 | try: |
|
1368 | 1371 | self.repository.get_commit() |
|
1369 | 1372 | except EmptyRepositoryError: |
|
1370 | 1373 | raise EmptyRepositoryError( |
|
1371 | 1374 | "Nothing to change - try to *add* new nodes rather than " |
|
1372 | 1375 | "changing them") |
|
1373 | 1376 | for node in filenodes: |
|
1374 | 1377 | if node.path in (n.path for n in self.changed): |
|
1375 | 1378 | raise NodeAlreadyChangedError( |
|
1376 | 1379 | "Node at '%s' is already marked as changed" % node.path) |
|
1377 | 1380 | self.changed.append(node) |
|
1378 | 1381 | |
|
1379 | 1382 | def remove(self, *filenodes): |
|
1380 | 1383 | """ |
|
1381 | 1384 | Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be |
|
1382 | 1385 | *removed* in next commit. |
|
1383 | 1386 | |
|
1384 | 1387 | :raises ``NodeAlreadyRemovedError``: if node has been already marked to |
|
1385 | 1388 | be *removed* |
|
1386 | 1389 | :raises ``NodeAlreadyChangedError``: if node has been already marked to |
|
1387 | 1390 | be *changed* |
|
1388 | 1391 | """ |
|
1389 | 1392 | for node in filenodes: |
|
1390 | 1393 | if node.path in (n.path for n in self.removed): |
|
1391 | 1394 | raise NodeAlreadyRemovedError( |
|
1392 | 1395 | "Node is already marked to for removal at %s" % node.path) |
|
1393 | 1396 | if node.path in (n.path for n in self.changed): |
|
1394 | 1397 | raise NodeAlreadyChangedError( |
|
1395 | 1398 | "Node is already marked to be changed at %s" % node.path) |
|
1396 | 1399 | # We only mark node as *removed* - real removal is done by |
|
1397 | 1400 | # commit method |
|
1398 | 1401 | self.removed.append(node) |
|
1399 | 1402 | |
|
1400 | 1403 | def reset(self): |
|
1401 | 1404 | """ |
|
1402 | 1405 | Resets this instance to initial state (cleans ``added``, ``changed`` |
|
1403 | 1406 | and ``removed`` lists). |
|
1404 | 1407 | """ |
|
1405 | 1408 | self.added = [] |
|
1406 | 1409 | self.changed = [] |
|
1407 | 1410 | self.removed = [] |
|
1408 | 1411 | self.parents = [] |
|
1409 | 1412 | |
|
1410 | 1413 | def get_ipaths(self): |
|
1411 | 1414 | """ |
|
1412 | 1415 | Returns generator of paths from nodes marked as added, changed or |
|
1413 | 1416 | removed. |
|
1414 | 1417 | """ |
|
1415 | 1418 | for node in itertools.chain(self.added, self.changed, self.removed): |
|
1416 | 1419 | yield node.path |
|
1417 | 1420 | |
|
1418 | 1421 | def get_paths(self): |
|
1419 | 1422 | """ |
|
1420 | 1423 | Returns list of paths from nodes marked as added, changed or removed. |
|
1421 | 1424 | """ |
|
1422 | 1425 | return list(self.get_ipaths()) |
|
1423 | 1426 | |
|
1424 | 1427 | def check_integrity(self, parents=None): |
|
1425 | 1428 | """ |
|
1426 | 1429 | Checks in-memory commit's integrity. Also, sets parents if not |
|
1427 | 1430 | already set. |
|
1428 | 1431 | |
|
1429 | 1432 | :raises CommitError: if any error occurs (i.e. |
|
1430 | 1433 | ``NodeDoesNotExistError``). |
|
1431 | 1434 | """ |
|
1432 | 1435 | if not self.parents: |
|
1433 | 1436 | parents = parents or [] |
|
1434 | 1437 | if len(parents) == 0: |
|
1435 | 1438 | try: |
|
1436 | 1439 | parents = [self.repository.get_commit(), None] |
|
1437 | 1440 | except EmptyRepositoryError: |
|
1438 | 1441 | parents = [None, None] |
|
1439 | 1442 | elif len(parents) == 1: |
|
1440 | 1443 | parents += [None] |
|
1441 | 1444 | self.parents = parents |
|
1442 | 1445 | |
|
1443 | 1446 | # Local parents, only if not None |
|
1444 | 1447 | parents = [p for p in self.parents if p] |
|
1445 | 1448 | |
|
1446 | 1449 | # Check nodes marked as added |
|
1447 | 1450 | for p in parents: |
|
1448 | 1451 | for node in self.added: |
|
1449 | 1452 | try: |
|
1450 | 1453 | p.get_node(node.path) |
|
1451 | 1454 | except NodeDoesNotExistError: |
|
1452 | 1455 | pass |
|
1453 | 1456 | else: |
|
1454 | 1457 | raise NodeAlreadyExistsError( |
|
1455 | 1458 | "Node `%s` already exists at %s" % (node.path, p)) |
|
1456 | 1459 | |
|
1457 | 1460 | # Check nodes marked as changed |
|
1458 | 1461 | missing = set(self.changed) |
|
1459 | 1462 | not_changed = set(self.changed) |
|
1460 | 1463 | if self.changed and not parents: |
|
1461 | 1464 | raise NodeDoesNotExistError(str(self.changed[0].path)) |
|
1462 | 1465 | for p in parents: |
|
1463 | 1466 | for node in self.changed: |
|
1464 | 1467 | try: |
|
1465 | 1468 | old = p.get_node(node.path) |
|
1466 | 1469 | missing.remove(node) |
|
1467 | 1470 | # if content actually changed, remove node from not_changed |
|
1468 | 1471 | if old.content != node.content: |
|
1469 | 1472 | not_changed.remove(node) |
|
1470 | 1473 | except NodeDoesNotExistError: |
|
1471 | 1474 | pass |
|
1472 | 1475 | if self.changed and missing: |
|
1473 | 1476 | raise NodeDoesNotExistError( |
|
1474 | 1477 | "Node `%s` marked as modified but missing in parents: %s" |
|
1475 | 1478 | % (node.path, parents)) |
|
1476 | 1479 | |
|
1477 | 1480 | if self.changed and not_changed: |
|
1478 | 1481 | raise NodeNotChangedError( |
|
1479 | 1482 | "Node `%s` wasn't actually changed (parents: %s)" |
|
1480 | 1483 | % (not_changed.pop().path, parents)) |
|
1481 | 1484 | |
|
1482 | 1485 | # Check nodes marked as removed |
|
1483 | 1486 | if self.removed and not parents: |
|
1484 | 1487 | raise NodeDoesNotExistError( |
|
1485 | 1488 | "Cannot remove node at %s as there " |
|
1486 | 1489 | "were no parents specified" % self.removed[0].path) |
|
1487 | 1490 | really_removed = set() |
|
1488 | 1491 | for p in parents: |
|
1489 | 1492 | for node in self.removed: |
|
1490 | 1493 | try: |
|
1491 | 1494 | p.get_node(node.path) |
|
1492 | 1495 | really_removed.add(node) |
|
1493 | 1496 | except CommitError: |
|
1494 | 1497 | pass |
|
1495 | 1498 | not_removed = set(self.removed) - really_removed |
|
1496 | 1499 | if not_removed: |
|
1497 | 1500 | # TODO: johbo: This code branch does not seem to be covered |
|
1498 | 1501 | raise NodeDoesNotExistError( |
|
1499 | 1502 | "Cannot remove node at %s from " |
|
1500 | 1503 | "following parents: %s" % (not_removed, parents)) |
|
1501 | 1504 | |
|
1502 | 1505 | def commit( |
|
1503 | 1506 | self, message, author, parents=None, branch=None, date=None, |
|
1504 | 1507 | **kwargs): |
|
1505 | 1508 | """ |
|
1506 | 1509 | Performs in-memory commit (doesn't check workdir in any way) and |
|
1507 | 1510 | returns newly created :class:`BaseCommit`. Updates repository's |
|
1508 | 1511 | attribute `commits`. |
|
1509 | 1512 | |
|
1510 | 1513 | .. note:: |
|
1511 | 1514 | |
|
1512 | 1515 | While overriding this method each backend's should call |
|
1513 | 1516 | ``self.check_integrity(parents)`` in the first place. |
|
1514 | 1517 | |
|
1515 | 1518 | :param message: message of the commit |
|
1516 | 1519 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" |
|
1517 | 1520 | :param parents: single parent or sequence of parents from which commit |
|
1518 | 1521 | would be derived |
|
1519 | 1522 | :param date: ``datetime.datetime`` instance. Defaults to |
|
1520 | 1523 | ``datetime.datetime.now()``. |
|
1521 | 1524 | :param branch: branch name, as string. If none given, default backend's |
|
1522 | 1525 | branch would be used. |
|
1523 | 1526 | |
|
1524 | 1527 | :raises ``CommitError``: if any error occurs while committing |
|
1525 | 1528 | """ |
|
1526 | 1529 | raise NotImplementedError |
|
1527 | 1530 | |
|
1528 | 1531 | |
|
1529 | 1532 | class BaseInMemoryChangesetClass(type): |
|
1530 | 1533 | |
|
1531 | 1534 | def __instancecheck__(self, instance): |
|
1532 | 1535 | return isinstance(instance, BaseInMemoryCommit) |
|
1533 | 1536 | |
|
1534 | 1537 | |
|
1535 | 1538 | class BaseInMemoryChangeset(BaseInMemoryCommit): |
|
1536 | 1539 | |
|
1537 | 1540 | __metaclass__ = BaseInMemoryChangesetClass |
|
1538 | 1541 | |
|
1539 | 1542 | def __new__(cls, *args, **kwargs): |
|
1540 | 1543 | warnings.warn( |
|
1541 | 1544 | "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning) |
|
1542 | 1545 | return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs) |
|
1543 | 1546 | |
|
1544 | 1547 | |
|
1545 | 1548 | class EmptyCommit(BaseCommit): |
|
1546 | 1549 | """ |
|
1547 | 1550 | An dummy empty commit. It's possible to pass hash when creating |
|
1548 | 1551 | an EmptyCommit |
|
1549 | 1552 | """ |
|
1550 | 1553 | |
|
1551 | 1554 | def __init__( |
|
1552 | 1555 | self, commit_id='0' * 40, repo=None, alias=None, idx=-1, |
|
1553 | 1556 | message='', author='', date=None): |
|
1554 | 1557 | self._empty_commit_id = commit_id |
|
1555 | 1558 | # TODO: johbo: Solve idx parameter, default value does not make |
|
1556 | 1559 | # too much sense |
|
1557 | 1560 | self.idx = idx |
|
1558 | 1561 | self.message = message |
|
1559 | 1562 | self.author = author |
|
1560 | 1563 | self.date = date or datetime.datetime.fromtimestamp(0) |
|
1561 | 1564 | self.repository = repo |
|
1562 | 1565 | self.alias = alias |
|
1563 | 1566 | |
|
1564 | 1567 | @LazyProperty |
|
1565 | 1568 | def raw_id(self): |
|
1566 | 1569 | """ |
|
1567 | 1570 | Returns raw string identifying this commit, useful for web |
|
1568 | 1571 | representation. |
|
1569 | 1572 | """ |
|
1570 | 1573 | |
|
1571 | 1574 | return self._empty_commit_id |
|
1572 | 1575 | |
|
1573 | 1576 | @LazyProperty |
|
1574 | 1577 | def branch(self): |
|
1575 | 1578 | if self.alias: |
|
1576 | 1579 | from rhodecode.lib.vcs.backends import get_backend |
|
1577 | 1580 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
1578 | 1581 | |
|
1579 | 1582 | @LazyProperty |
|
1580 | 1583 | def short_id(self): |
|
1581 | 1584 | return self.raw_id[:12] |
|
1582 | 1585 | |
|
1583 | 1586 | @LazyProperty |
|
1584 | 1587 | def id(self): |
|
1585 | 1588 | return self.raw_id |
|
1586 | 1589 | |
|
1587 | 1590 | def get_path_commit(self, path): |
|
1588 | 1591 | return self |
|
1589 | 1592 | |
|
1590 | 1593 | def get_file_content(self, path): |
|
1591 | 1594 | return u'' |
|
1592 | 1595 | |
|
1593 | 1596 | def get_file_size(self, path): |
|
1594 | 1597 | return 0 |
|
1595 | 1598 | |
|
1596 | 1599 | |
|
1597 | 1600 | class EmptyChangesetClass(type): |
|
1598 | 1601 | |
|
1599 | 1602 | def __instancecheck__(self, instance): |
|
1600 | 1603 | return isinstance(instance, EmptyCommit) |
|
1601 | 1604 | |
|
1602 | 1605 | |
|
1603 | 1606 | class EmptyChangeset(EmptyCommit): |
|
1604 | 1607 | |
|
1605 | 1608 | __metaclass__ = EmptyChangesetClass |
|
1606 | 1609 | |
|
1607 | 1610 | def __new__(cls, *args, **kwargs): |
|
1608 | 1611 | warnings.warn( |
|
1609 | 1612 | "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning) |
|
1610 | 1613 | return super(EmptyCommit, cls).__new__(cls, *args, **kwargs) |
|
1611 | 1614 | |
|
1612 | 1615 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
1613 | 1616 | alias=None, revision=-1, message='', author='', date=None): |
|
1614 | 1617 | if requested_revision is not None: |
|
1615 | 1618 | warnings.warn( |
|
1616 | 1619 | "Parameter requested_revision not supported anymore", |
|
1617 | 1620 | DeprecationWarning) |
|
1618 | 1621 | super(EmptyChangeset, self).__init__( |
|
1619 | 1622 | commit_id=cs, repo=repo, alias=alias, idx=revision, |
|
1620 | 1623 | message=message, author=author, date=date) |
|
1621 | 1624 | |
|
1622 | 1625 | @property |
|
1623 | 1626 | def revision(self): |
|
1624 | 1627 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1625 | 1628 | return self.idx |
|
1626 | 1629 | |
|
1627 | 1630 | @revision.setter |
|
1628 | 1631 | def revision(self, value): |
|
1629 | 1632 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1630 | 1633 | self.idx = value |
|
1631 | 1634 | |
|
1632 | 1635 | |
|
1633 | 1636 | class EmptyRepository(BaseRepository): |
|
1634 | 1637 | def __init__(self, repo_path=None, config=None, create=False, **kwargs): |
|
1635 | 1638 | pass |
|
1636 | 1639 | |
|
1637 | 1640 | def get_diff(self, *args, **kwargs): |
|
1638 | 1641 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
1639 | 1642 | return GitDiff('') |
|
1640 | 1643 | |
|
1641 | 1644 | |
|
1642 | 1645 | class CollectionGenerator(object): |
|
1643 | 1646 | |
|
1644 | 1647 | def __init__(self, repo, commit_ids, collection_size=None, pre_load=None): |
|
1645 | 1648 | self.repo = repo |
|
1646 | 1649 | self.commit_ids = commit_ids |
|
1647 | 1650 | # TODO: (oliver) this isn't currently hooked up |
|
1648 | 1651 | self.collection_size = None |
|
1649 | 1652 | self.pre_load = pre_load |
|
1650 | 1653 | |
|
1651 | 1654 | def __len__(self): |
|
1652 | 1655 | if self.collection_size is not None: |
|
1653 | 1656 | return self.collection_size |
|
1654 | 1657 | return self.commit_ids.__len__() |
|
1655 | 1658 | |
|
1656 | 1659 | def __iter__(self): |
|
1657 | 1660 | for commit_id in self.commit_ids: |
|
1658 | 1661 | # TODO: johbo: Mercurial passes in commit indices or commit ids |
|
1659 | 1662 | yield self._commit_factory(commit_id) |
|
1660 | 1663 | |
|
1661 | 1664 | def _commit_factory(self, commit_id): |
|
1662 | 1665 | """ |
|
1663 | 1666 | Allows backends to override the way commits are generated. |
|
1664 | 1667 | """ |
|
1665 | 1668 | return self.repo.get_commit(commit_id=commit_id, |
|
1666 | 1669 | pre_load=self.pre_load) |
|
1667 | 1670 | |
|
1668 | 1671 | def __getslice__(self, i, j): |
|
1669 | 1672 | """ |
|
1670 | 1673 | Returns an iterator of sliced repository |
|
1671 | 1674 | """ |
|
1672 | 1675 | commit_ids = self.commit_ids[i:j] |
|
1673 | 1676 | return self.__class__( |
|
1674 | 1677 | self.repo, commit_ids, pre_load=self.pre_load) |
|
1675 | 1678 | |
|
1676 | 1679 | def __repr__(self): |
|
1677 | 1680 | return '<CollectionGenerator[len:%s]>' % (self.__len__()) |
|
1678 | 1681 | |
|
1679 | 1682 | |
|
1680 | 1683 | class Config(object): |
|
1681 | 1684 | """ |
|
1682 | 1685 | Represents the configuration for a repository. |
|
1683 | 1686 | |
|
1684 | 1687 | The API is inspired by :class:`ConfigParser.ConfigParser` from the |
|
1685 | 1688 | standard library. It implements only the needed subset. |
|
1686 | 1689 | """ |
|
1687 | 1690 | |
|
1688 | 1691 | def __init__(self): |
|
1689 | 1692 | self._values = {} |
|
1690 | 1693 | |
|
1691 | 1694 | def copy(self): |
|
1692 | 1695 | clone = Config() |
|
1693 | 1696 | for section, values in self._values.items(): |
|
1694 | 1697 | clone._values[section] = values.copy() |
|
1695 | 1698 | return clone |
|
1696 | 1699 | |
|
1697 | 1700 | def __repr__(self): |
|
1698 | 1701 | return '<Config(%s sections) at %s>' % ( |
|
1699 | 1702 | len(self._values), hex(id(self))) |
|
1700 | 1703 | |
|
1701 | 1704 | def items(self, section): |
|
1702 | 1705 | return self._values.get(section, {}).iteritems() |
|
1703 | 1706 | |
|
1704 | 1707 | def get(self, section, option): |
|
1705 | 1708 | return self._values.get(section, {}).get(option) |
|
1706 | 1709 | |
|
1707 | 1710 | def set(self, section, option, value): |
|
1708 | 1711 | section_values = self._values.setdefault(section, {}) |
|
1709 | 1712 | section_values[option] = value |
|
1710 | 1713 | |
|
1711 | 1714 | def clear_section(self, section): |
|
1712 | 1715 | self._values[section] = {} |
|
1713 | 1716 | |
|
1714 | 1717 | def serialize(self): |
|
1715 | 1718 | """ |
|
1716 | 1719 | Creates a list of three tuples (section, key, value) representing |
|
1717 | 1720 | this config object. |
|
1718 | 1721 | """ |
|
1719 | 1722 | items = [] |
|
1720 | 1723 | for section in self._values: |
|
1721 | 1724 | for option, value in self._values[section].items(): |
|
1722 | 1725 | items.append( |
|
1723 | 1726 | (safe_str(section), safe_str(option), safe_str(value))) |
|
1724 | 1727 | return items |
|
1725 | 1728 | |
|
1726 | 1729 | |
|
1727 | 1730 | class Diff(object): |
|
1728 | 1731 | """ |
|
1729 | 1732 | Represents a diff result from a repository backend. |
|
1730 | 1733 | |
|
1731 | 1734 | Subclasses have to provide a backend specific value for |
|
1732 | 1735 | :attr:`_header_re` and :attr:`_meta_re`. |
|
1733 | 1736 | """ |
|
1734 | 1737 | _meta_re = None |
|
1735 | 1738 | _header_re = None |
|
1736 | 1739 | |
|
1737 | 1740 | def __init__(self, raw_diff): |
|
1738 | 1741 | self.raw = raw_diff |
|
1739 | 1742 | |
|
1740 | 1743 | def chunks(self): |
|
1741 | 1744 | """ |
|
1742 | 1745 | split the diff in chunks of separate --git a/file b/file chunks |
|
1743 | 1746 | to make diffs consistent we must prepend with \n, and make sure |
|
1744 | 1747 | we can detect last chunk as this was also has special rule |
|
1745 | 1748 | """ |
|
1746 | 1749 | |
|
1747 | 1750 | diff_parts = ('\n' + self.raw).split('\ndiff --git') |
|
1748 | 1751 | header = diff_parts[0] |
|
1749 | 1752 | |
|
1750 | 1753 | if self._meta_re: |
|
1751 | 1754 | match = self._meta_re.match(header) |
|
1752 | 1755 | |
|
1753 | 1756 | chunks = diff_parts[1:] |
|
1754 | 1757 | total_chunks = len(chunks) |
|
1755 | 1758 | |
|
1756 | 1759 | return ( |
|
1757 | 1760 | DiffChunk(chunk, self, cur_chunk == total_chunks) |
|
1758 | 1761 | for cur_chunk, chunk in enumerate(chunks, start=1)) |
|
1759 | 1762 | |
|
1760 | 1763 | |
|
1761 | 1764 | class DiffChunk(object): |
|
1762 | 1765 | |
|
1763 | 1766 | def __init__(self, chunk, diff, last_chunk): |
|
1764 | 1767 | self._diff = diff |
|
1765 | 1768 | |
|
1766 | 1769 | # since we split by \ndiff --git that part is lost from original diff |
|
1767 | 1770 | # we need to re-apply it at the end, EXCEPT ! if it's last chunk |
|
1768 | 1771 | if not last_chunk: |
|
1769 | 1772 | chunk += '\n' |
|
1770 | 1773 | |
|
1771 | 1774 | match = self._diff._header_re.match(chunk) |
|
1772 | 1775 | self.header = match.groupdict() |
|
1773 | 1776 | self.diff = chunk[match.end():] |
|
1774 | 1777 | self.raw = chunk |
|
1775 | 1778 | |
|
1776 | 1779 | |
|
1777 | 1780 | class BasePathPermissionChecker(object): |
|
1778 | 1781 | |
|
1779 | 1782 | @staticmethod |
|
1780 | 1783 | def create_from_patterns(includes, excludes): |
|
1781 | 1784 | if includes and '*' in includes and not excludes: |
|
1782 | 1785 | return AllPathPermissionChecker() |
|
1783 | 1786 | elif excludes and '*' in excludes: |
|
1784 | 1787 | return NonePathPermissionChecker() |
|
1785 | 1788 | else: |
|
1786 | 1789 | return PatternPathPermissionChecker(includes, excludes) |
|
1787 | 1790 | |
|
1788 | 1791 | @property |
|
1789 | 1792 | def has_full_access(self): |
|
1790 | 1793 | raise NotImplemented() |
|
1791 | 1794 | |
|
1792 | 1795 | def has_access(self, path): |
|
1793 | 1796 | raise NotImplemented() |
|
1794 | 1797 | |
|
1795 | 1798 | |
|
1796 | 1799 | class AllPathPermissionChecker(BasePathPermissionChecker): |
|
1797 | 1800 | |
|
1798 | 1801 | @property |
|
1799 | 1802 | def has_full_access(self): |
|
1800 | 1803 | return True |
|
1801 | 1804 | |
|
1802 | 1805 | def has_access(self, path): |
|
1803 | 1806 | return True |
|
1804 | 1807 | |
|
1805 | 1808 | |
|
1806 | 1809 | class NonePathPermissionChecker(BasePathPermissionChecker): |
|
1807 | 1810 | |
|
1808 | 1811 | @property |
|
1809 | 1812 | def has_full_access(self): |
|
1810 | 1813 | return False |
|
1811 | 1814 | |
|
1812 | 1815 | def has_access(self, path): |
|
1813 | 1816 | return False |
|
1814 | 1817 | |
|
1815 | 1818 | |
|
1816 | 1819 | class PatternPathPermissionChecker(BasePathPermissionChecker): |
|
1817 | 1820 | |
|
1818 | 1821 | def __init__(self, includes, excludes): |
|
1819 | 1822 | self.includes = includes |
|
1820 | 1823 | self.excludes = excludes |
|
1821 | 1824 | self.includes_re = [] if not includes else [ |
|
1822 | 1825 | re.compile(fnmatch.translate(pattern)) for pattern in includes] |
|
1823 | 1826 | self.excludes_re = [] if not excludes else [ |
|
1824 | 1827 | re.compile(fnmatch.translate(pattern)) for pattern in excludes] |
|
1825 | 1828 | |
|
1826 | 1829 | @property |
|
1827 | 1830 | def has_full_access(self): |
|
1828 | 1831 | return '*' in self.includes and not self.excludes |
|
1829 | 1832 | |
|
1830 | 1833 | def has_access(self, path): |
|
1831 | 1834 | for regex in self.excludes_re: |
|
1832 | 1835 | if regex.match(path): |
|
1833 | 1836 | return False |
|
1834 | 1837 | for regex in self.includes_re: |
|
1835 | 1838 | if regex.match(path): |
|
1836 | 1839 | return True |
|
1837 | 1840 | return False |
@@ -1,255 +1,286 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <% |
|
4 | 4 | elems = [ |
|
5 | 5 | (_('Repository ID'), c.rhodecode_db_repo.repo_id, '', ''), |
|
6 | 6 | (_('Owner'), lambda:base.gravatar_with_user(c.rhodecode_db_repo.user.email), '', ''), |
|
7 | 7 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), |
|
8 | 8 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), |
|
9 | 9 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), |
|
10 | 10 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), |
|
11 | 11 | (_('Pull requests source'), len(c.rhodecode_db_repo.pull_requests_source), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.source_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_source]), |
|
12 | 12 | (_('Pull requests target'), len(c.rhodecode_db_repo.pull_requests_target), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.target_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_target]), |
|
13 | 13 | ] |
|
14 | 14 | %> |
|
15 | 15 | |
|
16 | 16 | <div class="panel panel-default"> |
|
17 | 17 | <div class="panel-heading" id="advanced-info" > |
|
18 | 18 | <h3 class="panel-title">${_('Repository: %s') % c.rhodecode_db_repo.repo_name} <a class="permalink" href="#advanced-info"> ΒΆ</a></h3> |
|
19 | 19 | </div> |
|
20 | 20 | <div class="panel-body"> |
|
21 | 21 | ${base.dt_info_panel(elems)} |
|
22 | 22 | </div> |
|
23 | 23 | </div> |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | <div class="panel panel-default"> |
|
27 | 27 | <div class="panel-heading" id="advanced-fork"> |
|
28 | 28 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ΒΆ</a></h3> |
|
29 | 29 | </div> |
|
30 | 30 | <div class="panel-body"> |
|
31 | 31 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
32 | 32 | |
|
33 | 33 | % if c.rhodecode_db_repo.fork: |
|
34 | 34 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.rhodecode_db_repo.fork.repo_name, h.route_path('repo_summary', repo_name=c.rhodecode_db_repo.fork.repo_name))})} |
|
35 | 35 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> |
|
36 | 36 | % endif |
|
37 | 37 | |
|
38 | 38 | <div class="field"> |
|
39 | 39 | ${h.hidden('id_fork_of')} |
|
40 | 40 | ${h.submit('set_as_fork_%s' % c.rhodecode_db_repo.repo_name,_('Set'),class_="btn btn-small",)} |
|
41 | 41 | </div> |
|
42 | 42 | <div class="field"> |
|
43 | 43 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> |
|
44 | 44 | </div> |
|
45 | 45 | ${h.end_form()} |
|
46 | 46 | </div> |
|
47 | 47 | </div> |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | <div class="panel panel-default"> |
|
51 | 51 | <div class="panel-heading" id="advanced-journal"> |
|
52 | 52 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ΒΆ</a></h3> |
|
53 | 53 | </div> |
|
54 | 54 | <div class="panel-body"> |
|
55 | 55 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
56 | 56 | <div class="field"> |
|
57 | 57 | %if c.in_public_journal: |
|
58 | 58 | <button class="btn btn-small" type="submit"> |
|
59 | 59 | ${_('Remove from Public Journal')} |
|
60 | 60 | </button> |
|
61 | 61 | %else: |
|
62 | 62 | <button class="btn btn-small" type="submit"> |
|
63 | 63 | ${_('Add to Public Journal')} |
|
64 | 64 | </button> |
|
65 | 65 | %endif |
|
66 | 66 | </div> |
|
67 | 67 | <div class="field" > |
|
68 | 68 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> |
|
69 | 69 | </div> |
|
70 | 70 | ${h.end_form()} |
|
71 | 71 | </div> |
|
72 | 72 | </div> |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | <div class="panel panel-default"> |
|
76 | 76 | <div class="panel-heading" id="advanced-locking"> |
|
77 | 77 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ΒΆ</a></h3> |
|
78 | 78 | </div> |
|
79 | 79 | <div class="panel-body"> |
|
80 | 80 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
81 | 81 | |
|
82 | 82 | %if c.rhodecode_db_repo.locked[0]: |
|
83 | 83 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.rhodecode_db_repo.locked[0]), |
|
84 | 84 | h.format_date(h. time_to_datetime(c.rhodecode_db_repo.locked[1])), c.rhodecode_db_repo.locked[2])}</div> |
|
85 | 85 | %else: |
|
86 | 86 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> |
|
87 | 87 | %endif |
|
88 | 88 | |
|
89 | 89 | <div class="field" > |
|
90 | 90 | %if c.rhodecode_db_repo.locked[0]: |
|
91 | 91 | ${h.hidden('set_unlock', '1')} |
|
92 | 92 | <button class="btn btn-small" type="submit" |
|
93 | 93 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> |
|
94 | 94 | <i class="icon-unlock"></i> |
|
95 | 95 | ${_('Unlock repository')} |
|
96 | 96 | </button> |
|
97 | 97 | %else: |
|
98 | 98 | ${h.hidden('set_lock', '1')} |
|
99 | 99 | <button class="btn btn-small" type="submit" |
|
100 | 100 | onclick="return confirm('${_('Confirm to lock repository.')}');"> |
|
101 | 101 | <i class="icon-lock"></i> |
|
102 | 102 | ${_('Lock repository')} |
|
103 | 103 | </button> |
|
104 | 104 | %endif |
|
105 | 105 | </div> |
|
106 | 106 | <div class="field" > |
|
107 | 107 | <span class="help-block"> |
|
108 | 108 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} |
|
109 | 109 | </span> |
|
110 | 110 | </div> |
|
111 | 111 | ${h.end_form()} |
|
112 | 112 | </div> |
|
113 | 113 | </div> |
|
114 | 114 | |
|
115 | 115 | |
|
116 | <div class="panel panel-default"> | |
|
117 | <div class="panel-heading" id="advanced-hooks"> | |
|
118 | <h3 class="panel-title">${_('Hooks')} <a class="permalink" href="#advanced-hooks"> ΒΆ</a></h3> | |
|
119 | </div> | |
|
120 | <div class="panel-body"> | |
|
121 | <% ver_info_dict = c.rhodecode_db_repo.scm_instance().get_hooks_info() %> | |
|
122 | ||
|
123 | <table class="rctable"> | |
|
124 | <th>${_('Hook type')}</th> | |
|
125 | <th>${_('Hook version')}</th> | |
|
126 | <th>${_('Current version')}</th> | |
|
127 | ||
|
128 | <tr> | |
|
129 | <td>${_('PRE HOOK')}</td> | |
|
130 | <td>${ver_info_dict['pre_version']}</td> | |
|
131 | <td>${c.rhodecode_version}</td> | |
|
132 | </tr> | |
|
133 | <tr> | |
|
134 | <td>${_('POST HOOK')}</td> | |
|
135 | <td>${ver_info_dict['post_version']}</td> | |
|
136 | <td>${c.rhodecode_version}</td> | |
|
137 | </tr> | |
|
138 | </table> | |
|
139 | ||
|
140 | <a href="${h.route_path('edit_repo_advanced_hooks', repo_name=c.repo_name)}" | |
|
141 | onclick="return confirm('${_('Confirm to reinstall hooks for this repository.')}');"> | |
|
142 | ${_('Update Hooks')} | |
|
143 | </a> | |
|
144 | </div> | |
|
145 | </div> | |
|
146 | ||
|
116 | 147 | <div class="panel panel-warning"> |
|
117 | 148 | <div class="panel-heading" id="advanced-archive"> |
|
118 | 149 | <h3 class="panel-title">${_('Archive repository')} <a class="permalink" href="#advanced-archive"> ΒΆ</a></h3> |
|
119 | 150 | </div> |
|
120 | 151 | <div class="panel-body"> |
|
121 | 152 | ${h.secure_form(h.route_path('edit_repo_advanced_archive', repo_name=c.repo_name), request=request)} |
|
122 | 153 | |
|
123 | 154 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
124 | 155 | |
|
125 | 156 | <div class="field"> |
|
126 | 157 | <button class="btn btn-small btn-danger" type="submit" |
|
127 | 158 | onclick="return confirm('${_('Confirm to archive this repository: %s') % c.repo_name}');"> |
|
128 | 159 | <i class="icon-remove-sign"></i> |
|
129 | 160 | ${_('Archive this repository')} |
|
130 | 161 | </button> |
|
131 | 162 | </div> |
|
132 | 163 | <div class="field"> |
|
133 | 164 | <span class="help-block"> |
|
134 | 165 | ${_('Archiving the repository will make it entirely read-only. The repository cannot be committed to.' |
|
135 | 166 | 'It is hidden from the search results and dashboard. ')} |
|
136 | 167 | </span> |
|
137 | 168 | </div> |
|
138 | 169 | |
|
139 | 170 | ${h.end_form()} |
|
140 | 171 | </div> |
|
141 | 172 | </div> |
|
142 | 173 | |
|
143 | 174 | |
|
144 | 175 | <div class="panel panel-danger"> |
|
145 | 176 | <div class="panel-heading" id="advanced-delete"> |
|
146 | 177 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ΒΆ</a></h3> |
|
147 | 178 | </div> |
|
148 | 179 | <div class="panel-body"> |
|
149 | 180 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), request=request)} |
|
150 | 181 | <table class="display"> |
|
151 | 182 | <tr> |
|
152 | 183 | <td> |
|
153 | 184 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.rhodecode_db_repo.forks.count()) % c.rhodecode_db_repo.forks.count()} |
|
154 | 185 | </td> |
|
155 | 186 | <td> |
|
156 | 187 | %if c.rhodecode_db_repo.forks.count(): |
|
157 | 188 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> |
|
158 | 189 | %endif |
|
159 | 190 | </td> |
|
160 | 191 | <td> |
|
161 | 192 | %if c.rhodecode_db_repo.forks.count(): |
|
162 | 193 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> |
|
163 | 194 | %endif |
|
164 | 195 | </td> |
|
165 | 196 | </tr> |
|
166 | 197 | <% attached_prs = len(c.rhodecode_db_repo.pull_requests_source + c.rhodecode_db_repo.pull_requests_target) %> |
|
167 | 198 | % if c.rhodecode_db_repo.pull_requests_source or c.rhodecode_db_repo.pull_requests_target: |
|
168 | 199 | <tr> |
|
169 | 200 | <td> |
|
170 | 201 | ${_ungettext('This repository has %s attached pull request.', 'This repository has %s attached pull requests.', attached_prs) % attached_prs} |
|
171 | 202 | <br/> |
|
172 | 203 | ${_('Consider to archive this repository instead.')} |
|
173 | 204 | </td> |
|
174 | 205 | <td></td> |
|
175 | 206 | <td></td> |
|
176 | 207 | </tr> |
|
177 | 208 | % endif |
|
178 | 209 | </table> |
|
179 | 210 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
180 | 211 | |
|
181 | 212 | <div class="field"> |
|
182 | 213 | <button class="btn btn-small btn-danger" type="submit" |
|
183 | 214 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> |
|
184 | 215 | <i class="icon-remove-sign"></i> |
|
185 | 216 | ${_('Delete this repository')} |
|
186 | 217 | </button> |
|
187 | 218 | </div> |
|
188 | 219 | <div class="field"> |
|
189 | 220 | <span class="help-block"> |
|
190 | 221 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} |
|
191 | 222 | </span> |
|
192 | 223 | </div> |
|
193 | 224 | |
|
194 | 225 | ${h.end_form()} |
|
195 | 226 | </div> |
|
196 | 227 | </div> |
|
197 | 228 | |
|
198 | 229 | |
|
199 | 230 | <script> |
|
200 | 231 | |
|
201 | 232 | var currentRepoId = ${c.rhodecode_db_repo.repo_id}; |
|
202 | 233 | |
|
203 | 234 | var repoTypeFilter = function(data) { |
|
204 | 235 | var results = []; |
|
205 | 236 | |
|
206 | 237 | if (!data.results[0]) { |
|
207 | 238 | return data |
|
208 | 239 | } |
|
209 | 240 | |
|
210 | 241 | $.each(data.results[0].children, function() { |
|
211 | 242 | // filter out the SAME repo, it cannot be used as fork of itself |
|
212 | 243 | if (this.repo_id != currentRepoId) { |
|
213 | 244 | this.id = this.repo_id; |
|
214 | 245 | results.push(this) |
|
215 | 246 | } |
|
216 | 247 | }); |
|
217 | 248 | data.results[0].children = results; |
|
218 | 249 | return data; |
|
219 | 250 | }; |
|
220 | 251 | |
|
221 | 252 | $("#id_fork_of").select2({ |
|
222 | 253 | cachedDataSource: {}, |
|
223 | 254 | minimumInputLength: 2, |
|
224 | 255 | placeholder: "${_('Change repository') if c.rhodecode_db_repo.fork else _('Pick repository')}", |
|
225 | 256 | dropdownAutoWidth: true, |
|
226 | 257 | containerCssClass: "drop-menu", |
|
227 | 258 | dropdownCssClass: "drop-menu-dropdown", |
|
228 | 259 | formatResult: formatRepoResult, |
|
229 | 260 | query: $.debounce(250, function(query){ |
|
230 | 261 | self = this; |
|
231 | 262 | var cacheKey = query.term; |
|
232 | 263 | var cachedData = self.cachedDataSource[cacheKey]; |
|
233 | 264 | |
|
234 | 265 | if (cachedData) { |
|
235 | 266 | query.callback({results: cachedData.results}); |
|
236 | 267 | } else { |
|
237 | 268 | $.ajax({ |
|
238 | 269 | url: pyroutes.url('repo_list_data'), |
|
239 | 270 | data: {'query': query.term, repo_type: '${c.rhodecode_db_repo.repo_type}'}, |
|
240 | 271 | dataType: 'json', |
|
241 | 272 | type: 'GET', |
|
242 | 273 | success: function(data) { |
|
243 | 274 | data = repoTypeFilter(data); |
|
244 | 275 | self.cachedDataSource[cacheKey] = data; |
|
245 | 276 | query.callback({results: data.results}); |
|
246 | 277 | }, |
|
247 | 278 | error: function(data, textStatus, errorThrown) { |
|
248 | 279 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); |
|
249 | 280 | } |
|
250 | 281 | }) |
|
251 | 282 | } |
|
252 | 283 | }) |
|
253 | 284 | }); |
|
254 | 285 | </script> |
|
255 | 286 |
General Comments 0
You need to be logged in to leave comments.
Login now