##// END OF EJS Templates
caches: make gevent curl connection cache friendly....
marcink -
r2946:193b4eb7 default
parent child Browse files
Show More
@@ -1,1746 +1,1749 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179 _remote = None
180
179
181 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 """
181 """
183 Initializes repository. Raises RepositoryError if repository could
182 Initializes repository. Raises RepositoryError if repository could
184 not be find at the given ``repo_path`` or directory at ``repo_path``
183 not be find at the given ``repo_path`` or directory at ``repo_path``
185 exists and ``create`` is set to True.
184 exists and ``create`` is set to True.
186
185
187 :param repo_path: local path of the repository
186 :param repo_path: local path of the repository
188 :param config: repository configuration
187 :param config: repository configuration
189 :param create=False: if set to True, would try to create repository.
188 :param create=False: if set to True, would try to create repository.
190 :param src_url=None: if set, should be proper url from which repository
189 :param src_url=None: if set, should be proper url from which repository
191 would be cloned; requires ``create`` parameter to be set to True -
190 would be cloned; requires ``create`` parameter to be set to True -
192 raises RepositoryError if src_url is set and create evaluates to
191 raises RepositoryError if src_url is set and create evaluates to
193 False
192 False
194 """
193 """
195 raise NotImplementedError
194 raise NotImplementedError
196
195
197 def __repr__(self):
196 def __repr__(self):
198 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
199
198
200 def __len__(self):
199 def __len__(self):
201 return self.count()
200 return self.count()
202
201
203 def __eq__(self, other):
202 def __eq__(self, other):
204 same_instance = isinstance(other, self.__class__)
203 same_instance = isinstance(other, self.__class__)
205 return same_instance and other.path == self.path
204 return same_instance and other.path == self.path
206
205
207 def __ne__(self, other):
206 def __ne__(self, other):
208 return not self.__eq__(other)
207 return not self.__eq__(other)
209
208
210 def get_create_shadow_cache_pr_path(self, db_repo):
209 def get_create_shadow_cache_pr_path(self, db_repo):
211 path = db_repo.cached_diffs_dir
210 path = db_repo.cached_diffs_dir
212 if not os.path.exists(path):
211 if not os.path.exists(path):
213 os.makedirs(path, 0755)
212 os.makedirs(path, 0755)
214 return path
213 return path
215
214
216 @classmethod
215 @classmethod
217 def get_default_config(cls, default=None):
216 def get_default_config(cls, default=None):
218 config = Config()
217 config = Config()
219 if default and isinstance(default, list):
218 if default and isinstance(default, list):
220 for section, key, val in default:
219 for section, key, val in default:
221 config.set(section, key, val)
220 config.set(section, key, val)
222 return config
221 return config
223
222
224 @LazyProperty
223 @LazyProperty
224 def _remote(self):
225 raise NotImplementedError
226
227 @LazyProperty
225 def EMPTY_COMMIT(self):
228 def EMPTY_COMMIT(self):
226 return EmptyCommit(self.EMPTY_COMMIT_ID)
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
227
230
228 @LazyProperty
231 @LazyProperty
229 def alias(self):
232 def alias(self):
230 for k, v in settings.BACKENDS.items():
233 for k, v in settings.BACKENDS.items():
231 if v.split('.')[-1] == str(self.__class__.__name__):
234 if v.split('.')[-1] == str(self.__class__.__name__):
232 return k
235 return k
233
236
234 @LazyProperty
237 @LazyProperty
235 def name(self):
238 def name(self):
236 return safe_unicode(os.path.basename(self.path))
239 return safe_unicode(os.path.basename(self.path))
237
240
238 @LazyProperty
241 @LazyProperty
239 def description(self):
242 def description(self):
240 raise NotImplementedError
243 raise NotImplementedError
241
244
242 def refs(self):
245 def refs(self):
243 """
246 """
244 returns a `dict` with branches, bookmarks, tags, and closed_branches
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 for this repository
248 for this repository
246 """
249 """
247 return dict(
250 return dict(
248 branches=self.branches,
251 branches=self.branches,
249 branches_closed=self.branches_closed,
252 branches_closed=self.branches_closed,
250 tags=self.tags,
253 tags=self.tags,
251 bookmarks=self.bookmarks
254 bookmarks=self.bookmarks
252 )
255 )
253
256
254 @LazyProperty
257 @LazyProperty
255 def branches(self):
258 def branches(self):
256 """
259 """
257 A `dict` which maps branch names to commit ids.
260 A `dict` which maps branch names to commit ids.
258 """
261 """
259 raise NotImplementedError
262 raise NotImplementedError
260
263
261 @LazyProperty
264 @LazyProperty
262 def branches_closed(self):
265 def branches_closed(self):
263 """
266 """
264 A `dict` which maps tags names to commit ids.
267 A `dict` which maps tags names to commit ids.
265 """
268 """
266 raise NotImplementedError
269 raise NotImplementedError
267
270
268 @LazyProperty
271 @LazyProperty
269 def bookmarks(self):
272 def bookmarks(self):
270 """
273 """
271 A `dict` which maps tags names to commit ids.
274 A `dict` which maps tags names to commit ids.
272 """
275 """
273 raise NotImplementedError
276 raise NotImplementedError
274
277
275 @LazyProperty
278 @LazyProperty
276 def tags(self):
279 def tags(self):
277 """
280 """
278 A `dict` which maps tags names to commit ids.
281 A `dict` which maps tags names to commit ids.
279 """
282 """
280 raise NotImplementedError
283 raise NotImplementedError
281
284
282 @LazyProperty
285 @LazyProperty
283 def size(self):
286 def size(self):
284 """
287 """
285 Returns combined size in bytes for all repository files
288 Returns combined size in bytes for all repository files
286 """
289 """
287 tip = self.get_commit()
290 tip = self.get_commit()
288 return tip.size
291 return tip.size
289
292
290 def size_at_commit(self, commit_id):
293 def size_at_commit(self, commit_id):
291 commit = self.get_commit(commit_id)
294 commit = self.get_commit(commit_id)
292 return commit.size
295 return commit.size
293
296
294 def is_empty(self):
297 def is_empty(self):
295 return not bool(self.commit_ids)
298 return not bool(self.commit_ids)
296
299
297 @staticmethod
300 @staticmethod
298 def check_url(url, config):
301 def check_url(url, config):
299 """
302 """
300 Function will check given url and try to verify if it's a valid
303 Function will check given url and try to verify if it's a valid
301 link.
304 link.
302 """
305 """
303 raise NotImplementedError
306 raise NotImplementedError
304
307
305 @staticmethod
308 @staticmethod
306 def is_valid_repository(path):
309 def is_valid_repository(path):
307 """
310 """
308 Check if given `path` contains a valid repository of this backend
311 Check if given `path` contains a valid repository of this backend
309 """
312 """
310 raise NotImplementedError
313 raise NotImplementedError
311
314
312 # ==========================================================================
315 # ==========================================================================
313 # COMMITS
316 # COMMITS
314 # ==========================================================================
317 # ==========================================================================
315
318
316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 """
320 """
318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 are both None, most recent commit is returned.
322 are both None, most recent commit is returned.
320
323
321 :param pre_load: Optional. List of commit attributes to load.
324 :param pre_load: Optional. List of commit attributes to load.
322
325
323 :raises ``EmptyRepositoryError``: if there are no commits
326 :raises ``EmptyRepositoryError``: if there are no commits
324 """
327 """
325 raise NotImplementedError
328 raise NotImplementedError
326
329
327 def __iter__(self):
330 def __iter__(self):
328 for commit_id in self.commit_ids:
331 for commit_id in self.commit_ids:
329 yield self.get_commit(commit_id=commit_id)
332 yield self.get_commit(commit_id=commit_id)
330
333
331 def get_commits(
334 def get_commits(
332 self, start_id=None, end_id=None, start_date=None, end_date=None,
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 branch_name=None, show_hidden=False, pre_load=None):
336 branch_name=None, show_hidden=False, pre_load=None):
334 """
337 """
335 Returns iterator of `BaseCommit` objects from start to end
338 Returns iterator of `BaseCommit` objects from start to end
336 not inclusive. This should behave just like a list, ie. end is not
339 not inclusive. This should behave just like a list, ie. end is not
337 inclusive.
340 inclusive.
338
341
339 :param start_id: None or str, must be a valid commit id
342 :param start_id: None or str, must be a valid commit id
340 :param end_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
341 :param start_date:
344 :param start_date:
342 :param end_date:
345 :param end_date:
343 :param branch_name:
346 :param branch_name:
344 :param show_hidden:
347 :param show_hidden:
345 :param pre_load:
348 :param pre_load:
346 """
349 """
347 raise NotImplementedError
350 raise NotImplementedError
348
351
349 def __getitem__(self, key):
352 def __getitem__(self, key):
350 """
353 """
351 Allows index based access to the commit objects of this repository.
354 Allows index based access to the commit objects of this repository.
352 """
355 """
353 pre_load = ["author", "branch", "date", "message", "parents"]
356 pre_load = ["author", "branch", "date", "message", "parents"]
354 if isinstance(key, slice):
357 if isinstance(key, slice):
355 return self._get_range(key, pre_load)
358 return self._get_range(key, pre_load)
356 return self.get_commit(commit_idx=key, pre_load=pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
357
360
358 def _get_range(self, slice_obj, pre_load):
361 def _get_range(self, slice_obj, pre_load):
359 for commit_id in self.commit_ids.__getitem__(slice_obj):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361
364
362 def count(self):
365 def count(self):
363 return len(self.commit_ids)
366 return len(self.commit_ids)
364
367
365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 """
369 """
367 Creates and returns a tag for the given ``commit_id``.
370 Creates and returns a tag for the given ``commit_id``.
368
371
369 :param name: name for new tag
372 :param name: name for new tag
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 :param commit_id: commit id for which new tag would be created
374 :param commit_id: commit id for which new tag would be created
372 :param message: message of the tag's commit
375 :param message: message of the tag's commit
373 :param date: date of tag's commit
376 :param date: date of tag's commit
374
377
375 :raises TagAlreadyExistError: if tag with same name already exists
378 :raises TagAlreadyExistError: if tag with same name already exists
376 """
379 """
377 raise NotImplementedError
380 raise NotImplementedError
378
381
379 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
380 """
383 """
381 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
382
385
383 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
386 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
387
390
388 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
389 """
392 """
390 raise NotImplementedError
393 raise NotImplementedError
391
394
392 def get_diff(
395 def get_diff(
393 self, commit1, commit2, path=None, ignore_whitespace=False,
396 self, commit1, commit2, path=None, ignore_whitespace=False,
394 context=3, path1=None):
397 context=3, path1=None):
395 """
398 """
396 Returns (git like) *diff*, as plain text. Shows changes introduced by
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 `commit2` since `commit1`.
400 `commit2` since `commit1`.
398
401
399 :param commit1: Entry point from which diff is shown. Can be
402 :param commit1: Entry point from which diff is shown. Can be
400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 the changes since empty state of the repository until `commit2`
404 the changes since empty state of the repository until `commit2`
402 :param commit2: Until which commit changes should be shown.
405 :param commit2: Until which commit changes should be shown.
403 :param path: Can be set to a path of a file to create a diff of that
406 :param path: Can be set to a path of a file to create a diff of that
404 file. If `path1` is also set, this value is only associated to
407 file. If `path1` is also set, this value is only associated to
405 `commit2`.
408 `commit2`.
406 :param ignore_whitespace: If set to ``True``, would not show whitespace
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 changes. Defaults to ``False``.
410 changes. Defaults to ``False``.
408 :param context: How many lines before/after changed lines should be
411 :param context: How many lines before/after changed lines should be
409 shown. Defaults to ``3``.
412 shown. Defaults to ``3``.
410 :param path1: Can be set to a path to associate with `commit1`. This
413 :param path1: Can be set to a path to associate with `commit1`. This
411 parameter works only for backends which support diff generation for
414 parameter works only for backends which support diff generation for
412 different paths. Other backends will raise a `ValueError` if `path1`
415 different paths. Other backends will raise a `ValueError` if `path1`
413 is set and has a different value than `path`.
416 is set and has a different value than `path`.
414 :param file_path: filter this diff by given path pattern
417 :param file_path: filter this diff by given path pattern
415 """
418 """
416 raise NotImplementedError
419 raise NotImplementedError
417
420
418 def strip(self, commit_id, branch=None):
421 def strip(self, commit_id, branch=None):
419 """
422 """
420 Strip given commit_id from the repository
423 Strip given commit_id from the repository
421 """
424 """
422 raise NotImplementedError
425 raise NotImplementedError
423
426
424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 """
428 """
426 Return a latest common ancestor commit if one exists for this repo
429 Return a latest common ancestor commit if one exists for this repo
427 `commit_id1` vs `commit_id2` from `repo2`.
430 `commit_id1` vs `commit_id2` from `repo2`.
428
431
429 :param commit_id1: Commit it from this repository to use as a
432 :param commit_id1: Commit it from this repository to use as a
430 target for the comparison.
433 target for the comparison.
431 :param commit_id2: Source commit id to use for comparison.
434 :param commit_id2: Source commit id to use for comparison.
432 :param repo2: Source repository to use for comparison.
435 :param repo2: Source repository to use for comparison.
433 """
436 """
434 raise NotImplementedError
437 raise NotImplementedError
435
438
436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 """
440 """
438 Compare this repository's revision `commit_id1` with `commit_id2`.
441 Compare this repository's revision `commit_id1` with `commit_id2`.
439
442
440 Returns a tuple(commits, ancestor) that would be merged from
443 Returns a tuple(commits, ancestor) that would be merged from
441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 will be returned as ancestor.
445 will be returned as ancestor.
443
446
444 :param commit_id1: Commit it from this repository to use as a
447 :param commit_id1: Commit it from this repository to use as a
445 target for the comparison.
448 target for the comparison.
446 :param commit_id2: Source commit id to use for comparison.
449 :param commit_id2: Source commit id to use for comparison.
447 :param repo2: Source repository to use for comparison.
450 :param repo2: Source repository to use for comparison.
448 :param merge: If set to ``True`` will do a merge compare which also
451 :param merge: If set to ``True`` will do a merge compare which also
449 returns the common ancestor.
452 returns the common ancestor.
450 :param pre_load: Optional. List of commit attributes to load.
453 :param pre_load: Optional. List of commit attributes to load.
451 """
454 """
452 raise NotImplementedError
455 raise NotImplementedError
453
456
454 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 user_name='', user_email='', message='', dry_run=False,
458 user_name='', user_email='', message='', dry_run=False,
456 use_rebase=False, close_branch=False):
459 use_rebase=False, close_branch=False):
457 """
460 """
458 Merge the revisions specified in `source_ref` from `source_repo`
461 Merge the revisions specified in `source_ref` from `source_repo`
459 onto the `target_ref` of this repository.
462 onto the `target_ref` of this repository.
460
463
461 `source_ref` and `target_ref` are named tupls with the following
464 `source_ref` and `target_ref` are named tupls with the following
462 fields `type`, `name` and `commit_id`.
465 fields `type`, `name` and `commit_id`.
463
466
464 Returns a MergeResponse named tuple with the following fields
467 Returns a MergeResponse named tuple with the following fields
465 'possible', 'executed', 'source_commit', 'target_commit',
468 'possible', 'executed', 'source_commit', 'target_commit',
466 'merge_commit'.
469 'merge_commit'.
467
470
468 :param repo_id: `repo_id` target repo id.
471 :param repo_id: `repo_id` target repo id.
469 :param workspace_id: `workspace_id` unique identifier.
472 :param workspace_id: `workspace_id` unique identifier.
470 :param target_ref: `target_ref` points to the commit on top of which
473 :param target_ref: `target_ref` points to the commit on top of which
471 the `source_ref` should be merged.
474 the `source_ref` should be merged.
472 :param source_repo: The repository that contains the commits to be
475 :param source_repo: The repository that contains the commits to be
473 merged.
476 merged.
474 :param source_ref: `source_ref` points to the topmost commit from
477 :param source_ref: `source_ref` points to the topmost commit from
475 the `source_repo` which should be merged.
478 the `source_repo` which should be merged.
476 :param user_name: Merge commit `user_name`.
479 :param user_name: Merge commit `user_name`.
477 :param user_email: Merge commit `user_email`.
480 :param user_email: Merge commit `user_email`.
478 :param message: Merge commit `message`.
481 :param message: Merge commit `message`.
479 :param dry_run: If `True` the merge will not take place.
482 :param dry_run: If `True` the merge will not take place.
480 :param use_rebase: If `True` commits from the source will be rebased
483 :param use_rebase: If `True` commits from the source will be rebased
481 on top of the target instead of being merged.
484 on top of the target instead of being merged.
482 :param close_branch: If `True` branch will be close before merging it
485 :param close_branch: If `True` branch will be close before merging it
483 """
486 """
484 if dry_run:
487 if dry_run:
485 message = message or 'dry_run_merge_message'
488 message = message or 'dry_run_merge_message'
486 user_email = user_email or 'dry-run-merge@rhodecode.com'
489 user_email = user_email or 'dry-run-merge@rhodecode.com'
487 user_name = user_name or 'Dry-Run User'
490 user_name = user_name or 'Dry-Run User'
488 else:
491 else:
489 if not user_name:
492 if not user_name:
490 raise ValueError('user_name cannot be empty')
493 raise ValueError('user_name cannot be empty')
491 if not user_email:
494 if not user_email:
492 raise ValueError('user_email cannot be empty')
495 raise ValueError('user_email cannot be empty')
493 if not message:
496 if not message:
494 raise ValueError('message cannot be empty')
497 raise ValueError('message cannot be empty')
495
498
496 try:
499 try:
497 return self._merge_repo(
500 return self._merge_repo(
498 repo_id, workspace_id, target_ref, source_repo,
501 repo_id, workspace_id, target_ref, source_repo,
499 source_ref, message, user_name, user_email, dry_run=dry_run,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
500 use_rebase=use_rebase, close_branch=close_branch)
503 use_rebase=use_rebase, close_branch=close_branch)
501 except RepositoryError:
504 except RepositoryError:
502 log.exception(
505 log.exception(
503 'Unexpected failure when running merge, dry-run=%s',
506 'Unexpected failure when running merge, dry-run=%s',
504 dry_run)
507 dry_run)
505 return MergeResponse(
508 return MergeResponse(
506 False, False, None, MergeFailureReason.UNKNOWN)
509 False, False, None, MergeFailureReason.UNKNOWN)
507
510
508 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
509 source_repo, source_ref, merge_message,
512 source_repo, source_ref, merge_message,
510 merger_name, merger_email, dry_run=False,
513 merger_name, merger_email, dry_run=False,
511 use_rebase=False, close_branch=False):
514 use_rebase=False, close_branch=False):
512 """Internal implementation of merge."""
515 """Internal implementation of merge."""
513 raise NotImplementedError
516 raise NotImplementedError
514
517
515 def _maybe_prepare_merge_workspace(
518 def _maybe_prepare_merge_workspace(
516 self, repo_id, workspace_id, target_ref, source_ref):
519 self, repo_id, workspace_id, target_ref, source_ref):
517 """
520 """
518 Create the merge workspace.
521 Create the merge workspace.
519
522
520 :param workspace_id: `workspace_id` unique identifier.
523 :param workspace_id: `workspace_id` unique identifier.
521 """
524 """
522 raise NotImplementedError
525 raise NotImplementedError
523
526
524 def _get_legacy_shadow_repository_path(self, workspace_id):
527 def _get_legacy_shadow_repository_path(self, workspace_id):
525 """
528 """
526 Legacy version that was used before. We still need it for
529 Legacy version that was used before. We still need it for
527 backward compat
530 backward compat
528 """
531 """
529 return os.path.join(
532 return os.path.join(
530 os.path.dirname(self.path),
533 os.path.dirname(self.path),
531 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
532
535
533 def _get_shadow_repository_path(self, repo_id, workspace_id):
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 # The name of the shadow repository must start with '.', so it is
537 # The name of the shadow repository must start with '.', so it is
535 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 if os.path.exists(legacy_repository_path):
540 if os.path.exists(legacy_repository_path):
538 return legacy_repository_path
541 return legacy_repository_path
539 else:
542 else:
540 return os.path.join(
543 return os.path.join(
541 os.path.dirname(self.path),
544 os.path.dirname(self.path),
542 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543
546
544 def cleanup_merge_workspace(self, repo_id, workspace_id):
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
545 """
548 """
546 Remove merge workspace.
549 Remove merge workspace.
547
550
548 This function MUST not fail in case there is no workspace associated to
551 This function MUST not fail in case there is no workspace associated to
549 the given `workspace_id`.
552 the given `workspace_id`.
550
553
551 :param workspace_id: `workspace_id` unique identifier.
554 :param workspace_id: `workspace_id` unique identifier.
552 """
555 """
553 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
554 shadow_repository_path_del = '{}.{}.delete'.format(
557 shadow_repository_path_del = '{}.{}.delete'.format(
555 shadow_repository_path, time.time())
558 shadow_repository_path, time.time())
556
559
557 # move the shadow repo, so it never conflicts with the one used.
560 # move the shadow repo, so it never conflicts with the one used.
558 # we use this method because shutil.rmtree had some edge case problems
561 # we use this method because shutil.rmtree had some edge case problems
559 # removing symlinked repositories
562 # removing symlinked repositories
560 if not os.path.isdir(shadow_repository_path):
563 if not os.path.isdir(shadow_repository_path):
561 return
564 return
562
565
563 shutil.move(shadow_repository_path, shadow_repository_path_del)
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
564 try:
567 try:
565 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
566 except Exception:
569 except Exception:
567 log.exception('Failed to gracefully remove shadow repo under %s',
570 log.exception('Failed to gracefully remove shadow repo under %s',
568 shadow_repository_path_del)
571 shadow_repository_path_del)
569 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
570
573
571 # ========== #
574 # ========== #
572 # COMMIT API #
575 # COMMIT API #
573 # ========== #
576 # ========== #
574
577
575 @LazyProperty
578 @LazyProperty
576 def in_memory_commit(self):
579 def in_memory_commit(self):
577 """
580 """
578 Returns :class:`InMemoryCommit` object for this repository.
581 Returns :class:`InMemoryCommit` object for this repository.
579 """
582 """
580 raise NotImplementedError
583 raise NotImplementedError
581
584
582 # ======================== #
585 # ======================== #
583 # UTILITIES FOR SUBCLASSES #
586 # UTILITIES FOR SUBCLASSES #
584 # ======================== #
587 # ======================== #
585
588
586 def _validate_diff_commits(self, commit1, commit2):
589 def _validate_diff_commits(self, commit1, commit2):
587 """
590 """
588 Validates that the given commits are related to this repository.
591 Validates that the given commits are related to this repository.
589
592
590 Intended as a utility for sub classes to have a consistent validation
593 Intended as a utility for sub classes to have a consistent validation
591 of input parameters in methods like :meth:`get_diff`.
594 of input parameters in methods like :meth:`get_diff`.
592 """
595 """
593 self._validate_commit(commit1)
596 self._validate_commit(commit1)
594 self._validate_commit(commit2)
597 self._validate_commit(commit2)
595 if (isinstance(commit1, EmptyCommit) and
598 if (isinstance(commit1, EmptyCommit) and
596 isinstance(commit2, EmptyCommit)):
599 isinstance(commit2, EmptyCommit)):
597 raise ValueError("Cannot compare two empty commits")
600 raise ValueError("Cannot compare two empty commits")
598
601
599 def _validate_commit(self, commit):
602 def _validate_commit(self, commit):
600 if not isinstance(commit, BaseCommit):
603 if not isinstance(commit, BaseCommit):
601 raise TypeError(
604 raise TypeError(
602 "%s is not of type BaseCommit" % repr(commit))
605 "%s is not of type BaseCommit" % repr(commit))
603 if commit.repository != self and not isinstance(commit, EmptyCommit):
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
604 raise ValueError(
607 raise ValueError(
605 "Commit %s must be a valid commit from this repository %s, "
608 "Commit %s must be a valid commit from this repository %s, "
606 "related to this repository instead %s." %
609 "related to this repository instead %s." %
607 (commit, self, commit.repository))
610 (commit, self, commit.repository))
608
611
609 def _validate_commit_id(self, commit_id):
612 def _validate_commit_id(self, commit_id):
610 if not isinstance(commit_id, basestring):
613 if not isinstance(commit_id, basestring):
611 raise TypeError("commit_id must be a string value")
614 raise TypeError("commit_id must be a string value")
612
615
613 def _validate_commit_idx(self, commit_idx):
616 def _validate_commit_idx(self, commit_idx):
614 if not isinstance(commit_idx, (int, long)):
617 if not isinstance(commit_idx, (int, long)):
615 raise TypeError("commit_idx must be a numeric value")
618 raise TypeError("commit_idx must be a numeric value")
616
619
617 def _validate_branch_name(self, branch_name):
620 def _validate_branch_name(self, branch_name):
618 if branch_name and branch_name not in self.branches_all:
621 if branch_name and branch_name not in self.branches_all:
619 msg = ("Branch %s not found in %s" % (branch_name, self))
622 msg = ("Branch %s not found in %s" % (branch_name, self))
620 raise BranchDoesNotExistError(msg)
623 raise BranchDoesNotExistError(msg)
621
624
622 #
625 #
623 # Supporting deprecated API parts
626 # Supporting deprecated API parts
624 # TODO: johbo: consider to move this into a mixin
627 # TODO: johbo: consider to move this into a mixin
625 #
628 #
626
629
627 @property
630 @property
628 def EMPTY_CHANGESET(self):
631 def EMPTY_CHANGESET(self):
629 warnings.warn(
632 warnings.warn(
630 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
631 return self.EMPTY_COMMIT_ID
634 return self.EMPTY_COMMIT_ID
632
635
633 @property
636 @property
634 def revisions(self):
637 def revisions(self):
635 warnings.warn("Use commits attribute instead", DeprecationWarning)
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
636 return self.commit_ids
639 return self.commit_ids
637
640
638 @revisions.setter
641 @revisions.setter
639 def revisions(self, value):
642 def revisions(self, value):
640 warnings.warn("Use commits attribute instead", DeprecationWarning)
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
641 self.commit_ids = value
644 self.commit_ids = value
642
645
643 def get_changeset(self, revision=None, pre_load=None):
646 def get_changeset(self, revision=None, pre_load=None):
644 warnings.warn("Use get_commit instead", DeprecationWarning)
647 warnings.warn("Use get_commit instead", DeprecationWarning)
645 commit_id = None
648 commit_id = None
646 commit_idx = None
649 commit_idx = None
647 if isinstance(revision, basestring):
650 if isinstance(revision, basestring):
648 commit_id = revision
651 commit_id = revision
649 else:
652 else:
650 commit_idx = revision
653 commit_idx = revision
651 return self.get_commit(
654 return self.get_commit(
652 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
653
656
654 def get_changesets(
657 def get_changesets(
655 self, start=None, end=None, start_date=None, end_date=None,
658 self, start=None, end=None, start_date=None, end_date=None,
656 branch_name=None, pre_load=None):
659 branch_name=None, pre_load=None):
657 warnings.warn("Use get_commits instead", DeprecationWarning)
660 warnings.warn("Use get_commits instead", DeprecationWarning)
658 start_id = self._revision_to_commit(start)
661 start_id = self._revision_to_commit(start)
659 end_id = self._revision_to_commit(end)
662 end_id = self._revision_to_commit(end)
660 return self.get_commits(
663 return self.get_commits(
661 start_id=start_id, end_id=end_id, start_date=start_date,
664 start_id=start_id, end_id=end_id, start_date=start_date,
662 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
663
666
664 def _revision_to_commit(self, revision):
667 def _revision_to_commit(self, revision):
665 """
668 """
666 Translates a revision to a commit_id
669 Translates a revision to a commit_id
667
670
668 Helps to support the old changeset based API which allows to use
671 Helps to support the old changeset based API which allows to use
669 commit ids and commit indices interchangeable.
672 commit ids and commit indices interchangeable.
670 """
673 """
671 if revision is None:
674 if revision is None:
672 return revision
675 return revision
673
676
674 if isinstance(revision, basestring):
677 if isinstance(revision, basestring):
675 commit_id = revision
678 commit_id = revision
676 else:
679 else:
677 commit_id = self.commit_ids[revision]
680 commit_id = self.commit_ids[revision]
678 return commit_id
681 return commit_id
679
682
680 @property
683 @property
681 def in_memory_changeset(self):
684 def in_memory_changeset(self):
682 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
683 return self.in_memory_commit
686 return self.in_memory_commit
684
687
685 def get_path_permissions(self, username):
688 def get_path_permissions(self, username):
686 """
689 """
687 Returns a path permission checker or None if not supported
690 Returns a path permission checker or None if not supported
688
691
689 :param username: session user name
692 :param username: session user name
690 :return: an instance of BasePathPermissionChecker or None
693 :return: an instance of BasePathPermissionChecker or None
691 """
694 """
692 return None
695 return None
693
696
694 def install_hooks(self, force=False):
697 def install_hooks(self, force=False):
695 return self._remote.install_hooks(force)
698 return self._remote.install_hooks(force)
696
699
697
700
698 class BaseCommit(object):
701 class BaseCommit(object):
699 """
702 """
700 Each backend should implement it's commit representation.
703 Each backend should implement it's commit representation.
701
704
702 **Attributes**
705 **Attributes**
703
706
704 ``repository``
707 ``repository``
705 repository object within which commit exists
708 repository object within which commit exists
706
709
707 ``id``
710 ``id``
708 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
709 just ``tip``.
712 just ``tip``.
710
713
711 ``raw_id``
714 ``raw_id``
712 raw commit representation (i.e. full 40 length sha for git
715 raw commit representation (i.e. full 40 length sha for git
713 backend)
716 backend)
714
717
715 ``short_id``
718 ``short_id``
716 shortened (if apply) version of ``raw_id``; it would be simple
719 shortened (if apply) version of ``raw_id``; it would be simple
717 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
718 as ``raw_id`` for subversion
721 as ``raw_id`` for subversion
719
722
720 ``idx``
723 ``idx``
721 commit index
724 commit index
722
725
723 ``files``
726 ``files``
724 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
725
728
726 ``dirs``
729 ``dirs``
727 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
728
731
729 ``nodes``
732 ``nodes``
730 combined list of ``Node`` objects
733 combined list of ``Node`` objects
731
734
732 ``author``
735 ``author``
733 author of the commit, as unicode
736 author of the commit, as unicode
734
737
735 ``message``
738 ``message``
736 message of the commit, as unicode
739 message of the commit, as unicode
737
740
738 ``parents``
741 ``parents``
739 list of parent commits
742 list of parent commits
740
743
741 """
744 """
742
745
743 branch = None
746 branch = None
744 """
747 """
745 Depending on the backend this should be set to the branch name of the
748 Depending on the backend this should be set to the branch name of the
746 commit. Backends not supporting branches on commits should leave this
749 commit. Backends not supporting branches on commits should leave this
747 value as ``None``.
750 value as ``None``.
748 """
751 """
749
752
750 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
751 """
754 """
752 This template is used to generate a default prefix for repository archives
755 This template is used to generate a default prefix for repository archives
753 if no prefix has been specified.
756 if no prefix has been specified.
754 """
757 """
755
758
756 def __str__(self):
759 def __str__(self):
757 return '<%s at %s:%s>' % (
760 return '<%s at %s:%s>' % (
758 self.__class__.__name__, self.idx, self.short_id)
761 self.__class__.__name__, self.idx, self.short_id)
759
762
760 def __repr__(self):
763 def __repr__(self):
761 return self.__str__()
764 return self.__str__()
762
765
763 def __unicode__(self):
766 def __unicode__(self):
764 return u'%s:%s' % (self.idx, self.short_id)
767 return u'%s:%s' % (self.idx, self.short_id)
765
768
766 def __eq__(self, other):
769 def __eq__(self, other):
767 same_instance = isinstance(other, self.__class__)
770 same_instance = isinstance(other, self.__class__)
768 return same_instance and self.raw_id == other.raw_id
771 return same_instance and self.raw_id == other.raw_id
769
772
770 def __json__(self):
773 def __json__(self):
771 parents = []
774 parents = []
772 try:
775 try:
773 for parent in self.parents:
776 for parent in self.parents:
774 parents.append({'raw_id': parent.raw_id})
777 parents.append({'raw_id': parent.raw_id})
775 except NotImplementedError:
778 except NotImplementedError:
776 # empty commit doesn't have parents implemented
779 # empty commit doesn't have parents implemented
777 pass
780 pass
778
781
779 return {
782 return {
780 'short_id': self.short_id,
783 'short_id': self.short_id,
781 'raw_id': self.raw_id,
784 'raw_id': self.raw_id,
782 'revision': self.idx,
785 'revision': self.idx,
783 'message': self.message,
786 'message': self.message,
784 'date': self.date,
787 'date': self.date,
785 'author': self.author,
788 'author': self.author,
786 'parents': parents,
789 'parents': parents,
787 'branch': self.branch
790 'branch': self.branch
788 }
791 }
789
792
790 def __getstate__(self):
793 def __getstate__(self):
791 d = self.__dict__.copy()
794 d = self.__dict__.copy()
792 d.pop('_remote', None)
795 d.pop('_remote', None)
793 d.pop('repository', None)
796 d.pop('repository', None)
794 return d
797 return d
795
798
796 def _get_refs(self):
799 def _get_refs(self):
797 return {
800 return {
798 'branches': [self.branch] if self.branch else [],
801 'branches': [self.branch] if self.branch else [],
799 'bookmarks': getattr(self, 'bookmarks', []),
802 'bookmarks': getattr(self, 'bookmarks', []),
800 'tags': self.tags
803 'tags': self.tags
801 }
804 }
802
805
803 @LazyProperty
806 @LazyProperty
804 def last(self):
807 def last(self):
805 """
808 """
806 ``True`` if this is last commit in repository, ``False``
809 ``True`` if this is last commit in repository, ``False``
807 otherwise; trying to access this attribute while there is no
810 otherwise; trying to access this attribute while there is no
808 commits would raise `EmptyRepositoryError`
811 commits would raise `EmptyRepositoryError`
809 """
812 """
810 if self.repository is None:
813 if self.repository is None:
811 raise CommitError("Cannot check if it's most recent commit")
814 raise CommitError("Cannot check if it's most recent commit")
812 return self.raw_id == self.repository.commit_ids[-1]
815 return self.raw_id == self.repository.commit_ids[-1]
813
816
814 @LazyProperty
817 @LazyProperty
815 def parents(self):
818 def parents(self):
816 """
819 """
817 Returns list of parent commits.
820 Returns list of parent commits.
818 """
821 """
819 raise NotImplementedError
822 raise NotImplementedError
820
823
821 @property
824 @property
822 def merge(self):
825 def merge(self):
823 """
826 """
824 Returns boolean if commit is a merge.
827 Returns boolean if commit is a merge.
825 """
828 """
826 return len(self.parents) > 1
829 return len(self.parents) > 1
827
830
828 @LazyProperty
831 @LazyProperty
829 def children(self):
832 def children(self):
830 """
833 """
831 Returns list of child commits.
834 Returns list of child commits.
832 """
835 """
833 raise NotImplementedError
836 raise NotImplementedError
834
837
835 @LazyProperty
838 @LazyProperty
836 def id(self):
839 def id(self):
837 """
840 """
838 Returns string identifying this commit.
841 Returns string identifying this commit.
839 """
842 """
840 raise NotImplementedError
843 raise NotImplementedError
841
844
842 @LazyProperty
845 @LazyProperty
843 def raw_id(self):
846 def raw_id(self):
844 """
847 """
845 Returns raw string identifying this commit.
848 Returns raw string identifying this commit.
846 """
849 """
847 raise NotImplementedError
850 raise NotImplementedError
848
851
849 @LazyProperty
852 @LazyProperty
850 def short_id(self):
853 def short_id(self):
851 """
854 """
852 Returns shortened version of ``raw_id`` attribute, as string,
855 Returns shortened version of ``raw_id`` attribute, as string,
853 identifying this commit, useful for presentation to users.
856 identifying this commit, useful for presentation to users.
854 """
857 """
855 raise NotImplementedError
858 raise NotImplementedError
856
859
857 @LazyProperty
860 @LazyProperty
858 def idx(self):
861 def idx(self):
859 """
862 """
860 Returns integer identifying this commit.
863 Returns integer identifying this commit.
861 """
864 """
862 raise NotImplementedError
865 raise NotImplementedError
863
866
864 @LazyProperty
867 @LazyProperty
865 def committer(self):
868 def committer(self):
866 """
869 """
867 Returns committer for this commit
870 Returns committer for this commit
868 """
871 """
869 raise NotImplementedError
872 raise NotImplementedError
870
873
871 @LazyProperty
874 @LazyProperty
872 def committer_name(self):
875 def committer_name(self):
873 """
876 """
874 Returns committer name for this commit
877 Returns committer name for this commit
875 """
878 """
876
879
877 return author_name(self.committer)
880 return author_name(self.committer)
878
881
879 @LazyProperty
882 @LazyProperty
880 def committer_email(self):
883 def committer_email(self):
881 """
884 """
882 Returns committer email address for this commit
885 Returns committer email address for this commit
883 """
886 """
884
887
885 return author_email(self.committer)
888 return author_email(self.committer)
886
889
887 @LazyProperty
890 @LazyProperty
888 def author(self):
891 def author(self):
889 """
892 """
890 Returns author for this commit
893 Returns author for this commit
891 """
894 """
892
895
893 raise NotImplementedError
896 raise NotImplementedError
894
897
895 @LazyProperty
898 @LazyProperty
896 def author_name(self):
899 def author_name(self):
897 """
900 """
898 Returns author name for this commit
901 Returns author name for this commit
899 """
902 """
900
903
901 return author_name(self.author)
904 return author_name(self.author)
902
905
903 @LazyProperty
906 @LazyProperty
904 def author_email(self):
907 def author_email(self):
905 """
908 """
906 Returns author email address for this commit
909 Returns author email address for this commit
907 """
910 """
908
911
909 return author_email(self.author)
912 return author_email(self.author)
910
913
911 def get_file_mode(self, path):
914 def get_file_mode(self, path):
912 """
915 """
913 Returns stat mode of the file at `path`.
916 Returns stat mode of the file at `path`.
914 """
917 """
915 raise NotImplementedError
918 raise NotImplementedError
916
919
917 def is_link(self, path):
920 def is_link(self, path):
918 """
921 """
919 Returns ``True`` if given `path` is a symlink
922 Returns ``True`` if given `path` is a symlink
920 """
923 """
921 raise NotImplementedError
924 raise NotImplementedError
922
925
923 def get_file_content(self, path):
926 def get_file_content(self, path):
924 """
927 """
925 Returns content of the file at the given `path`.
928 Returns content of the file at the given `path`.
926 """
929 """
927 raise NotImplementedError
930 raise NotImplementedError
928
931
929 def get_file_size(self, path):
932 def get_file_size(self, path):
930 """
933 """
931 Returns size of the file at the given `path`.
934 Returns size of the file at the given `path`.
932 """
935 """
933 raise NotImplementedError
936 raise NotImplementedError
934
937
935 def get_file_commit(self, path, pre_load=None):
938 def get_file_commit(self, path, pre_load=None):
936 """
939 """
937 Returns last commit of the file at the given `path`.
940 Returns last commit of the file at the given `path`.
938
941
939 :param pre_load: Optional. List of commit attributes to load.
942 :param pre_load: Optional. List of commit attributes to load.
940 """
943 """
941 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
942 if not commits:
945 if not commits:
943 raise RepositoryError(
946 raise RepositoryError(
944 'Failed to fetch history for path {}. '
947 'Failed to fetch history for path {}. '
945 'Please check if such path exists in your repository'.format(
948 'Please check if such path exists in your repository'.format(
946 path))
949 path))
947 return commits[0]
950 return commits[0]
948
951
949 def get_file_history(self, path, limit=None, pre_load=None):
952 def get_file_history(self, path, limit=None, pre_load=None):
950 """
953 """
951 Returns history of file as reversed list of :class:`BaseCommit`
954 Returns history of file as reversed list of :class:`BaseCommit`
952 objects for which file at given `path` has been modified.
955 objects for which file at given `path` has been modified.
953
956
954 :param limit: Optional. Allows to limit the size of the returned
957 :param limit: Optional. Allows to limit the size of the returned
955 history. This is intended as a hint to the underlying backend, so
958 history. This is intended as a hint to the underlying backend, so
956 that it can apply optimizations depending on the limit.
959 that it can apply optimizations depending on the limit.
957 :param pre_load: Optional. List of commit attributes to load.
960 :param pre_load: Optional. List of commit attributes to load.
958 """
961 """
959 raise NotImplementedError
962 raise NotImplementedError
960
963
961 def get_file_annotate(self, path, pre_load=None):
964 def get_file_annotate(self, path, pre_load=None):
962 """
965 """
963 Returns a generator of four element tuples with
966 Returns a generator of four element tuples with
964 lineno, sha, commit lazy loader and line
967 lineno, sha, commit lazy loader and line
965
968
966 :param pre_load: Optional. List of commit attributes to load.
969 :param pre_load: Optional. List of commit attributes to load.
967 """
970 """
968 raise NotImplementedError
971 raise NotImplementedError
969
972
970 def get_nodes(self, path):
973 def get_nodes(self, path):
971 """
974 """
972 Returns combined ``DirNode`` and ``FileNode`` objects list representing
975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
973 state of commit at the given ``path``.
976 state of commit at the given ``path``.
974
977
975 :raises ``CommitError``: if node at the given ``path`` is not
978 :raises ``CommitError``: if node at the given ``path`` is not
976 instance of ``DirNode``
979 instance of ``DirNode``
977 """
980 """
978 raise NotImplementedError
981 raise NotImplementedError
979
982
980 def get_node(self, path):
983 def get_node(self, path):
981 """
984 """
982 Returns ``Node`` object from the given ``path``.
985 Returns ``Node`` object from the given ``path``.
983
986
984 :raises ``NodeDoesNotExistError``: if there is no node at the given
987 :raises ``NodeDoesNotExistError``: if there is no node at the given
985 ``path``
988 ``path``
986 """
989 """
987 raise NotImplementedError
990 raise NotImplementedError
988
991
989 def get_largefile_node(self, path):
992 def get_largefile_node(self, path):
990 """
993 """
991 Returns the path to largefile from Mercurial/Git-lfs storage.
994 Returns the path to largefile from Mercurial/Git-lfs storage.
992 or None if it's not a largefile node
995 or None if it's not a largefile node
993 """
996 """
994 return None
997 return None
995
998
996 def archive_repo(self, file_path, kind='tgz', subrepos=None,
999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
997 prefix=None, write_metadata=False, mtime=None):
1000 prefix=None, write_metadata=False, mtime=None):
998 """
1001 """
999 Creates an archive containing the contents of the repository.
1002 Creates an archive containing the contents of the repository.
1000
1003
1001 :param file_path: path to the file which to create the archive.
1004 :param file_path: path to the file which to create the archive.
1002 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1003 :param prefix: name of root directory in archive.
1006 :param prefix: name of root directory in archive.
1004 Default is repository name and commit's short_id joined with dash:
1007 Default is repository name and commit's short_id joined with dash:
1005 ``"{repo_name}-{short_id}"``.
1008 ``"{repo_name}-{short_id}"``.
1006 :param write_metadata: write a metadata file into archive.
1009 :param write_metadata: write a metadata file into archive.
1007 :param mtime: custom modification time for archive creation, defaults
1010 :param mtime: custom modification time for archive creation, defaults
1008 to time.time() if not given.
1011 to time.time() if not given.
1009
1012
1010 :raise VCSError: If prefix has a problem.
1013 :raise VCSError: If prefix has a problem.
1011 """
1014 """
1012 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1013 if kind not in allowed_kinds:
1016 if kind not in allowed_kinds:
1014 raise ImproperArchiveTypeError(
1017 raise ImproperArchiveTypeError(
1015 'Archive kind (%s) not supported use one of %s' %
1018 'Archive kind (%s) not supported use one of %s' %
1016 (kind, allowed_kinds))
1019 (kind, allowed_kinds))
1017
1020
1018 prefix = self._validate_archive_prefix(prefix)
1021 prefix = self._validate_archive_prefix(prefix)
1019
1022
1020 mtime = mtime or time.mktime(self.date.timetuple())
1023 mtime = mtime or time.mktime(self.date.timetuple())
1021
1024
1022 file_info = []
1025 file_info = []
1023 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1024 for _r, _d, files in cur_rev.walk('/'):
1027 for _r, _d, files in cur_rev.walk('/'):
1025 for f in files:
1028 for f in files:
1026 f_path = os.path.join(prefix, f.path)
1029 f_path = os.path.join(prefix, f.path)
1027 file_info.append(
1030 file_info.append(
1028 (f_path, f.mode, f.is_link(), f.raw_bytes))
1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1029
1032
1030 if write_metadata:
1033 if write_metadata:
1031 metadata = [
1034 metadata = [
1032 ('repo_name', self.repository.name),
1035 ('repo_name', self.repository.name),
1033 ('rev', self.raw_id),
1036 ('rev', self.raw_id),
1034 ('create_time', mtime),
1037 ('create_time', mtime),
1035 ('branch', self.branch),
1038 ('branch', self.branch),
1036 ('tags', ','.join(self.tags)),
1039 ('tags', ','.join(self.tags)),
1037 ]
1040 ]
1038 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1039 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1040
1043
1041 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1042
1045
1043 def _validate_archive_prefix(self, prefix):
1046 def _validate_archive_prefix(self, prefix):
1044 if prefix is None:
1047 if prefix is None:
1045 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1046 repo_name=safe_str(self.repository.name),
1049 repo_name=safe_str(self.repository.name),
1047 short_id=self.short_id)
1050 short_id=self.short_id)
1048 elif not isinstance(prefix, str):
1051 elif not isinstance(prefix, str):
1049 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1050 elif prefix.startswith('/'):
1053 elif prefix.startswith('/'):
1051 raise VCSError("Prefix cannot start with leading slash")
1054 raise VCSError("Prefix cannot start with leading slash")
1052 elif prefix.strip() == '':
1055 elif prefix.strip() == '':
1053 raise VCSError("Prefix cannot be empty")
1056 raise VCSError("Prefix cannot be empty")
1054 return prefix
1057 return prefix
1055
1058
1056 @LazyProperty
1059 @LazyProperty
1057 def root(self):
1060 def root(self):
1058 """
1061 """
1059 Returns ``RootNode`` object for this commit.
1062 Returns ``RootNode`` object for this commit.
1060 """
1063 """
1061 return self.get_node('')
1064 return self.get_node('')
1062
1065
1063 def next(self, branch=None):
1066 def next(self, branch=None):
1064 """
1067 """
1065 Returns next commit from current, if branch is gives it will return
1068 Returns next commit from current, if branch is gives it will return
1066 next commit belonging to this branch
1069 next commit belonging to this branch
1067
1070
1068 :param branch: show commits within the given named branch
1071 :param branch: show commits within the given named branch
1069 """
1072 """
1070 indexes = xrange(self.idx + 1, self.repository.count())
1073 indexes = xrange(self.idx + 1, self.repository.count())
1071 return self._find_next(indexes, branch)
1074 return self._find_next(indexes, branch)
1072
1075
1073 def prev(self, branch=None):
1076 def prev(self, branch=None):
1074 """
1077 """
1075 Returns previous commit from current, if branch is gives it will
1078 Returns previous commit from current, if branch is gives it will
1076 return previous commit belonging to this branch
1079 return previous commit belonging to this branch
1077
1080
1078 :param branch: show commit within the given named branch
1081 :param branch: show commit within the given named branch
1079 """
1082 """
1080 indexes = xrange(self.idx - 1, -1, -1)
1083 indexes = xrange(self.idx - 1, -1, -1)
1081 return self._find_next(indexes, branch)
1084 return self._find_next(indexes, branch)
1082
1085
1083 def _find_next(self, indexes, branch=None):
1086 def _find_next(self, indexes, branch=None):
1084 if branch and self.branch != branch:
1087 if branch and self.branch != branch:
1085 raise VCSError('Branch option used on commit not belonging '
1088 raise VCSError('Branch option used on commit not belonging '
1086 'to that branch')
1089 'to that branch')
1087
1090
1088 for next_idx in indexes:
1091 for next_idx in indexes:
1089 commit = self.repository.get_commit(commit_idx=next_idx)
1092 commit = self.repository.get_commit(commit_idx=next_idx)
1090 if branch and branch != commit.branch:
1093 if branch and branch != commit.branch:
1091 continue
1094 continue
1092 return commit
1095 return commit
1093 raise CommitDoesNotExistError
1096 raise CommitDoesNotExistError
1094
1097
1095 def diff(self, ignore_whitespace=True, context=3):
1098 def diff(self, ignore_whitespace=True, context=3):
1096 """
1099 """
1097 Returns a `Diff` object representing the change made by this commit.
1100 Returns a `Diff` object representing the change made by this commit.
1098 """
1101 """
1099 parent = (
1102 parent = (
1100 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1101 diff = self.repository.get_diff(
1104 diff = self.repository.get_diff(
1102 parent, self,
1105 parent, self,
1103 ignore_whitespace=ignore_whitespace,
1106 ignore_whitespace=ignore_whitespace,
1104 context=context)
1107 context=context)
1105 return diff
1108 return diff
1106
1109
1107 @LazyProperty
1110 @LazyProperty
1108 def added(self):
1111 def added(self):
1109 """
1112 """
1110 Returns list of added ``FileNode`` objects.
1113 Returns list of added ``FileNode`` objects.
1111 """
1114 """
1112 raise NotImplementedError
1115 raise NotImplementedError
1113
1116
1114 @LazyProperty
1117 @LazyProperty
1115 def changed(self):
1118 def changed(self):
1116 """
1119 """
1117 Returns list of modified ``FileNode`` objects.
1120 Returns list of modified ``FileNode`` objects.
1118 """
1121 """
1119 raise NotImplementedError
1122 raise NotImplementedError
1120
1123
1121 @LazyProperty
1124 @LazyProperty
1122 def removed(self):
1125 def removed(self):
1123 """
1126 """
1124 Returns list of removed ``FileNode`` objects.
1127 Returns list of removed ``FileNode`` objects.
1125 """
1128 """
1126 raise NotImplementedError
1129 raise NotImplementedError
1127
1130
1128 @LazyProperty
1131 @LazyProperty
1129 def size(self):
1132 def size(self):
1130 """
1133 """
1131 Returns total number of bytes from contents of all filenodes.
1134 Returns total number of bytes from contents of all filenodes.
1132 """
1135 """
1133 return sum((node.size for node in self.get_filenodes_generator()))
1136 return sum((node.size for node in self.get_filenodes_generator()))
1134
1137
1135 def walk(self, topurl=''):
1138 def walk(self, topurl=''):
1136 """
1139 """
1137 Similar to os.walk method. Insted of filesystem it walks through
1140 Similar to os.walk method. Insted of filesystem it walks through
1138 commit starting at given ``topurl``. Returns generator of tuples
1141 commit starting at given ``topurl``. Returns generator of tuples
1139 (topnode, dirnodes, filenodes).
1142 (topnode, dirnodes, filenodes).
1140 """
1143 """
1141 topnode = self.get_node(topurl)
1144 topnode = self.get_node(topurl)
1142 if not topnode.is_dir():
1145 if not topnode.is_dir():
1143 return
1146 return
1144 yield (topnode, topnode.dirs, topnode.files)
1147 yield (topnode, topnode.dirs, topnode.files)
1145 for dirnode in topnode.dirs:
1148 for dirnode in topnode.dirs:
1146 for tup in self.walk(dirnode.path):
1149 for tup in self.walk(dirnode.path):
1147 yield tup
1150 yield tup
1148
1151
1149 def get_filenodes_generator(self):
1152 def get_filenodes_generator(self):
1150 """
1153 """
1151 Returns generator that yields *all* file nodes.
1154 Returns generator that yields *all* file nodes.
1152 """
1155 """
1153 for topnode, dirs, files in self.walk():
1156 for topnode, dirs, files in self.walk():
1154 for node in files:
1157 for node in files:
1155 yield node
1158 yield node
1156
1159
1157 #
1160 #
1158 # Utilities for sub classes to support consistent behavior
1161 # Utilities for sub classes to support consistent behavior
1159 #
1162 #
1160
1163
1161 def no_node_at_path(self, path):
1164 def no_node_at_path(self, path):
1162 return NodeDoesNotExistError(
1165 return NodeDoesNotExistError(
1163 u"There is no file nor directory at the given path: "
1166 u"There is no file nor directory at the given path: "
1164 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1165
1168
1166 def _fix_path(self, path):
1169 def _fix_path(self, path):
1167 """
1170 """
1168 Paths are stored without trailing slash so we need to get rid off it if
1171 Paths are stored without trailing slash so we need to get rid off it if
1169 needed.
1172 needed.
1170 """
1173 """
1171 return path.rstrip('/')
1174 return path.rstrip('/')
1172
1175
1173 #
1176 #
1174 # Deprecated API based on changesets
1177 # Deprecated API based on changesets
1175 #
1178 #
1176
1179
1177 @property
1180 @property
1178 def revision(self):
1181 def revision(self):
1179 warnings.warn("Use idx instead", DeprecationWarning)
1182 warnings.warn("Use idx instead", DeprecationWarning)
1180 return self.idx
1183 return self.idx
1181
1184
1182 @revision.setter
1185 @revision.setter
1183 def revision(self, value):
1186 def revision(self, value):
1184 warnings.warn("Use idx instead", DeprecationWarning)
1187 warnings.warn("Use idx instead", DeprecationWarning)
1185 self.idx = value
1188 self.idx = value
1186
1189
1187 def get_file_changeset(self, path):
1190 def get_file_changeset(self, path):
1188 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1189 return self.get_file_commit(path)
1192 return self.get_file_commit(path)
1190
1193
1191
1194
1192 class BaseChangesetClass(type):
1195 class BaseChangesetClass(type):
1193
1196
1194 def __instancecheck__(self, instance):
1197 def __instancecheck__(self, instance):
1195 return isinstance(instance, BaseCommit)
1198 return isinstance(instance, BaseCommit)
1196
1199
1197
1200
1198 class BaseChangeset(BaseCommit):
1201 class BaseChangeset(BaseCommit):
1199
1202
1200 __metaclass__ = BaseChangesetClass
1203 __metaclass__ = BaseChangesetClass
1201
1204
1202 def __new__(cls, *args, **kwargs):
1205 def __new__(cls, *args, **kwargs):
1203 warnings.warn(
1206 warnings.warn(
1204 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1205 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1206
1209
1207
1210
1208 class BaseInMemoryCommit(object):
1211 class BaseInMemoryCommit(object):
1209 """
1212 """
1210 Represents differences between repository's state (most recent head) and
1213 Represents differences between repository's state (most recent head) and
1211 changes made *in place*.
1214 changes made *in place*.
1212
1215
1213 **Attributes**
1216 **Attributes**
1214
1217
1215 ``repository``
1218 ``repository``
1216 repository object for this in-memory-commit
1219 repository object for this in-memory-commit
1217
1220
1218 ``added``
1221 ``added``
1219 list of ``FileNode`` objects marked as *added*
1222 list of ``FileNode`` objects marked as *added*
1220
1223
1221 ``changed``
1224 ``changed``
1222 list of ``FileNode`` objects marked as *changed*
1225 list of ``FileNode`` objects marked as *changed*
1223
1226
1224 ``removed``
1227 ``removed``
1225 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1226 *removed*
1229 *removed*
1227
1230
1228 ``parents``
1231 ``parents``
1229 list of :class:`BaseCommit` instances representing parents of
1232 list of :class:`BaseCommit` instances representing parents of
1230 in-memory commit. Should always be 2-element sequence.
1233 in-memory commit. Should always be 2-element sequence.
1231
1234
1232 """
1235 """
1233
1236
1234 def __init__(self, repository):
1237 def __init__(self, repository):
1235 self.repository = repository
1238 self.repository = repository
1236 self.added = []
1239 self.added = []
1237 self.changed = []
1240 self.changed = []
1238 self.removed = []
1241 self.removed = []
1239 self.parents = []
1242 self.parents = []
1240
1243
1241 def add(self, *filenodes):
1244 def add(self, *filenodes):
1242 """
1245 """
1243 Marks given ``FileNode`` objects as *to be committed*.
1246 Marks given ``FileNode`` objects as *to be committed*.
1244
1247
1245 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1246 latest commit
1249 latest commit
1247 :raises ``NodeAlreadyAddedError``: if node with same path is already
1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1248 marked as *added*
1251 marked as *added*
1249 """
1252 """
1250 # Check if not already marked as *added* first
1253 # Check if not already marked as *added* first
1251 for node in filenodes:
1254 for node in filenodes:
1252 if node.path in (n.path for n in self.added):
1255 if node.path in (n.path for n in self.added):
1253 raise NodeAlreadyAddedError(
1256 raise NodeAlreadyAddedError(
1254 "Such FileNode %s is already marked for addition"
1257 "Such FileNode %s is already marked for addition"
1255 % node.path)
1258 % node.path)
1256 for node in filenodes:
1259 for node in filenodes:
1257 self.added.append(node)
1260 self.added.append(node)
1258
1261
1259 def change(self, *filenodes):
1262 def change(self, *filenodes):
1260 """
1263 """
1261 Marks given ``FileNode`` objects to be *changed* in next commit.
1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1262
1265
1263 :raises ``EmptyRepositoryError``: if there are no commits yet
1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1264 :raises ``NodeAlreadyExistsError``: if node with same path is already
1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1265 marked to be *changed*
1268 marked to be *changed*
1266 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1267 marked to be *removed*
1270 marked to be *removed*
1268 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1269 commit
1272 commit
1270 :raises ``NodeNotChangedError``: if node hasn't really be changed
1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1271 """
1274 """
1272 for node in filenodes:
1275 for node in filenodes:
1273 if node.path in (n.path for n in self.removed):
1276 if node.path in (n.path for n in self.removed):
1274 raise NodeAlreadyRemovedError(
1277 raise NodeAlreadyRemovedError(
1275 "Node at %s is already marked as removed" % node.path)
1278 "Node at %s is already marked as removed" % node.path)
1276 try:
1279 try:
1277 self.repository.get_commit()
1280 self.repository.get_commit()
1278 except EmptyRepositoryError:
1281 except EmptyRepositoryError:
1279 raise EmptyRepositoryError(
1282 raise EmptyRepositoryError(
1280 "Nothing to change - try to *add* new nodes rather than "
1283 "Nothing to change - try to *add* new nodes rather than "
1281 "changing them")
1284 "changing them")
1282 for node in filenodes:
1285 for node in filenodes:
1283 if node.path in (n.path for n in self.changed):
1286 if node.path in (n.path for n in self.changed):
1284 raise NodeAlreadyChangedError(
1287 raise NodeAlreadyChangedError(
1285 "Node at '%s' is already marked as changed" % node.path)
1288 "Node at '%s' is already marked as changed" % node.path)
1286 self.changed.append(node)
1289 self.changed.append(node)
1287
1290
1288 def remove(self, *filenodes):
1291 def remove(self, *filenodes):
1289 """
1292 """
1290 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1291 *removed* in next commit.
1294 *removed* in next commit.
1292
1295
1293 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1294 be *removed*
1297 be *removed*
1295 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1296 be *changed*
1299 be *changed*
1297 """
1300 """
1298 for node in filenodes:
1301 for node in filenodes:
1299 if node.path in (n.path for n in self.removed):
1302 if node.path in (n.path for n in self.removed):
1300 raise NodeAlreadyRemovedError(
1303 raise NodeAlreadyRemovedError(
1301 "Node is already marked to for removal at %s" % node.path)
1304 "Node is already marked to for removal at %s" % node.path)
1302 if node.path in (n.path for n in self.changed):
1305 if node.path in (n.path for n in self.changed):
1303 raise NodeAlreadyChangedError(
1306 raise NodeAlreadyChangedError(
1304 "Node is already marked to be changed at %s" % node.path)
1307 "Node is already marked to be changed at %s" % node.path)
1305 # We only mark node as *removed* - real removal is done by
1308 # We only mark node as *removed* - real removal is done by
1306 # commit method
1309 # commit method
1307 self.removed.append(node)
1310 self.removed.append(node)
1308
1311
1309 def reset(self):
1312 def reset(self):
1310 """
1313 """
1311 Resets this instance to initial state (cleans ``added``, ``changed``
1314 Resets this instance to initial state (cleans ``added``, ``changed``
1312 and ``removed`` lists).
1315 and ``removed`` lists).
1313 """
1316 """
1314 self.added = []
1317 self.added = []
1315 self.changed = []
1318 self.changed = []
1316 self.removed = []
1319 self.removed = []
1317 self.parents = []
1320 self.parents = []
1318
1321
1319 def get_ipaths(self):
1322 def get_ipaths(self):
1320 """
1323 """
1321 Returns generator of paths from nodes marked as added, changed or
1324 Returns generator of paths from nodes marked as added, changed or
1322 removed.
1325 removed.
1323 """
1326 """
1324 for node in itertools.chain(self.added, self.changed, self.removed):
1327 for node in itertools.chain(self.added, self.changed, self.removed):
1325 yield node.path
1328 yield node.path
1326
1329
1327 def get_paths(self):
1330 def get_paths(self):
1328 """
1331 """
1329 Returns list of paths from nodes marked as added, changed or removed.
1332 Returns list of paths from nodes marked as added, changed or removed.
1330 """
1333 """
1331 return list(self.get_ipaths())
1334 return list(self.get_ipaths())
1332
1335
1333 def check_integrity(self, parents=None):
1336 def check_integrity(self, parents=None):
1334 """
1337 """
1335 Checks in-memory commit's integrity. Also, sets parents if not
1338 Checks in-memory commit's integrity. Also, sets parents if not
1336 already set.
1339 already set.
1337
1340
1338 :raises CommitError: if any error occurs (i.e.
1341 :raises CommitError: if any error occurs (i.e.
1339 ``NodeDoesNotExistError``).
1342 ``NodeDoesNotExistError``).
1340 """
1343 """
1341 if not self.parents:
1344 if not self.parents:
1342 parents = parents or []
1345 parents = parents or []
1343 if len(parents) == 0:
1346 if len(parents) == 0:
1344 try:
1347 try:
1345 parents = [self.repository.get_commit(), None]
1348 parents = [self.repository.get_commit(), None]
1346 except EmptyRepositoryError:
1349 except EmptyRepositoryError:
1347 parents = [None, None]
1350 parents = [None, None]
1348 elif len(parents) == 1:
1351 elif len(parents) == 1:
1349 parents += [None]
1352 parents += [None]
1350 self.parents = parents
1353 self.parents = parents
1351
1354
1352 # Local parents, only if not None
1355 # Local parents, only if not None
1353 parents = [p for p in self.parents if p]
1356 parents = [p for p in self.parents if p]
1354
1357
1355 # Check nodes marked as added
1358 # Check nodes marked as added
1356 for p in parents:
1359 for p in parents:
1357 for node in self.added:
1360 for node in self.added:
1358 try:
1361 try:
1359 p.get_node(node.path)
1362 p.get_node(node.path)
1360 except NodeDoesNotExistError:
1363 except NodeDoesNotExistError:
1361 pass
1364 pass
1362 else:
1365 else:
1363 raise NodeAlreadyExistsError(
1366 raise NodeAlreadyExistsError(
1364 "Node `%s` already exists at %s" % (node.path, p))
1367 "Node `%s` already exists at %s" % (node.path, p))
1365
1368
1366 # Check nodes marked as changed
1369 # Check nodes marked as changed
1367 missing = set(self.changed)
1370 missing = set(self.changed)
1368 not_changed = set(self.changed)
1371 not_changed = set(self.changed)
1369 if self.changed and not parents:
1372 if self.changed and not parents:
1370 raise NodeDoesNotExistError(str(self.changed[0].path))
1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1371 for p in parents:
1374 for p in parents:
1372 for node in self.changed:
1375 for node in self.changed:
1373 try:
1376 try:
1374 old = p.get_node(node.path)
1377 old = p.get_node(node.path)
1375 missing.remove(node)
1378 missing.remove(node)
1376 # if content actually changed, remove node from not_changed
1379 # if content actually changed, remove node from not_changed
1377 if old.content != node.content:
1380 if old.content != node.content:
1378 not_changed.remove(node)
1381 not_changed.remove(node)
1379 except NodeDoesNotExistError:
1382 except NodeDoesNotExistError:
1380 pass
1383 pass
1381 if self.changed and missing:
1384 if self.changed and missing:
1382 raise NodeDoesNotExistError(
1385 raise NodeDoesNotExistError(
1383 "Node `%s` marked as modified but missing in parents: %s"
1386 "Node `%s` marked as modified but missing in parents: %s"
1384 % (node.path, parents))
1387 % (node.path, parents))
1385
1388
1386 if self.changed and not_changed:
1389 if self.changed and not_changed:
1387 raise NodeNotChangedError(
1390 raise NodeNotChangedError(
1388 "Node `%s` wasn't actually changed (parents: %s)"
1391 "Node `%s` wasn't actually changed (parents: %s)"
1389 % (not_changed.pop().path, parents))
1392 % (not_changed.pop().path, parents))
1390
1393
1391 # Check nodes marked as removed
1394 # Check nodes marked as removed
1392 if self.removed and not parents:
1395 if self.removed and not parents:
1393 raise NodeDoesNotExistError(
1396 raise NodeDoesNotExistError(
1394 "Cannot remove node at %s as there "
1397 "Cannot remove node at %s as there "
1395 "were no parents specified" % self.removed[0].path)
1398 "were no parents specified" % self.removed[0].path)
1396 really_removed = set()
1399 really_removed = set()
1397 for p in parents:
1400 for p in parents:
1398 for node in self.removed:
1401 for node in self.removed:
1399 try:
1402 try:
1400 p.get_node(node.path)
1403 p.get_node(node.path)
1401 really_removed.add(node)
1404 really_removed.add(node)
1402 except CommitError:
1405 except CommitError:
1403 pass
1406 pass
1404 not_removed = set(self.removed) - really_removed
1407 not_removed = set(self.removed) - really_removed
1405 if not_removed:
1408 if not_removed:
1406 # TODO: johbo: This code branch does not seem to be covered
1409 # TODO: johbo: This code branch does not seem to be covered
1407 raise NodeDoesNotExistError(
1410 raise NodeDoesNotExistError(
1408 "Cannot remove node at %s from "
1411 "Cannot remove node at %s from "
1409 "following parents: %s" % (not_removed, parents))
1412 "following parents: %s" % (not_removed, parents))
1410
1413
1411 def commit(
1414 def commit(
1412 self, message, author, parents=None, branch=None, date=None,
1415 self, message, author, parents=None, branch=None, date=None,
1413 **kwargs):
1416 **kwargs):
1414 """
1417 """
1415 Performs in-memory commit (doesn't check workdir in any way) and
1418 Performs in-memory commit (doesn't check workdir in any way) and
1416 returns newly created :class:`BaseCommit`. Updates repository's
1419 returns newly created :class:`BaseCommit`. Updates repository's
1417 attribute `commits`.
1420 attribute `commits`.
1418
1421
1419 .. note::
1422 .. note::
1420
1423
1421 While overriding this method each backend's should call
1424 While overriding this method each backend's should call
1422 ``self.check_integrity(parents)`` in the first place.
1425 ``self.check_integrity(parents)`` in the first place.
1423
1426
1424 :param message: message of the commit
1427 :param message: message of the commit
1425 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1426 :param parents: single parent or sequence of parents from which commit
1429 :param parents: single parent or sequence of parents from which commit
1427 would be derived
1430 would be derived
1428 :param date: ``datetime.datetime`` instance. Defaults to
1431 :param date: ``datetime.datetime`` instance. Defaults to
1429 ``datetime.datetime.now()``.
1432 ``datetime.datetime.now()``.
1430 :param branch: branch name, as string. If none given, default backend's
1433 :param branch: branch name, as string. If none given, default backend's
1431 branch would be used.
1434 branch would be used.
1432
1435
1433 :raises ``CommitError``: if any error occurs while committing
1436 :raises ``CommitError``: if any error occurs while committing
1434 """
1437 """
1435 raise NotImplementedError
1438 raise NotImplementedError
1436
1439
1437
1440
1438 class BaseInMemoryChangesetClass(type):
1441 class BaseInMemoryChangesetClass(type):
1439
1442
1440 def __instancecheck__(self, instance):
1443 def __instancecheck__(self, instance):
1441 return isinstance(instance, BaseInMemoryCommit)
1444 return isinstance(instance, BaseInMemoryCommit)
1442
1445
1443
1446
1444 class BaseInMemoryChangeset(BaseInMemoryCommit):
1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1445
1448
1446 __metaclass__ = BaseInMemoryChangesetClass
1449 __metaclass__ = BaseInMemoryChangesetClass
1447
1450
1448 def __new__(cls, *args, **kwargs):
1451 def __new__(cls, *args, **kwargs):
1449 warnings.warn(
1452 warnings.warn(
1450 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1451 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1452
1455
1453
1456
1454 class EmptyCommit(BaseCommit):
1457 class EmptyCommit(BaseCommit):
1455 """
1458 """
1456 An dummy empty commit. It's possible to pass hash when creating
1459 An dummy empty commit. It's possible to pass hash when creating
1457 an EmptyCommit
1460 an EmptyCommit
1458 """
1461 """
1459
1462
1460 def __init__(
1463 def __init__(
1461 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1462 message='', author='', date=None):
1465 message='', author='', date=None):
1463 self._empty_commit_id = commit_id
1466 self._empty_commit_id = commit_id
1464 # TODO: johbo: Solve idx parameter, default value does not make
1467 # TODO: johbo: Solve idx parameter, default value does not make
1465 # too much sense
1468 # too much sense
1466 self.idx = idx
1469 self.idx = idx
1467 self.message = message
1470 self.message = message
1468 self.author = author
1471 self.author = author
1469 self.date = date or datetime.datetime.fromtimestamp(0)
1472 self.date = date or datetime.datetime.fromtimestamp(0)
1470 self.repository = repo
1473 self.repository = repo
1471 self.alias = alias
1474 self.alias = alias
1472
1475
1473 @LazyProperty
1476 @LazyProperty
1474 def raw_id(self):
1477 def raw_id(self):
1475 """
1478 """
1476 Returns raw string identifying this commit, useful for web
1479 Returns raw string identifying this commit, useful for web
1477 representation.
1480 representation.
1478 """
1481 """
1479
1482
1480 return self._empty_commit_id
1483 return self._empty_commit_id
1481
1484
1482 @LazyProperty
1485 @LazyProperty
1483 def branch(self):
1486 def branch(self):
1484 if self.alias:
1487 if self.alias:
1485 from rhodecode.lib.vcs.backends import get_backend
1488 from rhodecode.lib.vcs.backends import get_backend
1486 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1487
1490
1488 @LazyProperty
1491 @LazyProperty
1489 def short_id(self):
1492 def short_id(self):
1490 return self.raw_id[:12]
1493 return self.raw_id[:12]
1491
1494
1492 @LazyProperty
1495 @LazyProperty
1493 def id(self):
1496 def id(self):
1494 return self.raw_id
1497 return self.raw_id
1495
1498
1496 def get_file_commit(self, path):
1499 def get_file_commit(self, path):
1497 return self
1500 return self
1498
1501
1499 def get_file_content(self, path):
1502 def get_file_content(self, path):
1500 return u''
1503 return u''
1501
1504
1502 def get_file_size(self, path):
1505 def get_file_size(self, path):
1503 return 0
1506 return 0
1504
1507
1505
1508
1506 class EmptyChangesetClass(type):
1509 class EmptyChangesetClass(type):
1507
1510
1508 def __instancecheck__(self, instance):
1511 def __instancecheck__(self, instance):
1509 return isinstance(instance, EmptyCommit)
1512 return isinstance(instance, EmptyCommit)
1510
1513
1511
1514
1512 class EmptyChangeset(EmptyCommit):
1515 class EmptyChangeset(EmptyCommit):
1513
1516
1514 __metaclass__ = EmptyChangesetClass
1517 __metaclass__ = EmptyChangesetClass
1515
1518
1516 def __new__(cls, *args, **kwargs):
1519 def __new__(cls, *args, **kwargs):
1517 warnings.warn(
1520 warnings.warn(
1518 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1519 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1520
1523
1521 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1522 alias=None, revision=-1, message='', author='', date=None):
1525 alias=None, revision=-1, message='', author='', date=None):
1523 if requested_revision is not None:
1526 if requested_revision is not None:
1524 warnings.warn(
1527 warnings.warn(
1525 "Parameter requested_revision not supported anymore",
1528 "Parameter requested_revision not supported anymore",
1526 DeprecationWarning)
1529 DeprecationWarning)
1527 super(EmptyChangeset, self).__init__(
1530 super(EmptyChangeset, self).__init__(
1528 commit_id=cs, repo=repo, alias=alias, idx=revision,
1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1529 message=message, author=author, date=date)
1532 message=message, author=author, date=date)
1530
1533
1531 @property
1534 @property
1532 def revision(self):
1535 def revision(self):
1533 warnings.warn("Use idx instead", DeprecationWarning)
1536 warnings.warn("Use idx instead", DeprecationWarning)
1534 return self.idx
1537 return self.idx
1535
1538
1536 @revision.setter
1539 @revision.setter
1537 def revision(self, value):
1540 def revision(self, value):
1538 warnings.warn("Use idx instead", DeprecationWarning)
1541 warnings.warn("Use idx instead", DeprecationWarning)
1539 self.idx = value
1542 self.idx = value
1540
1543
1541
1544
1542 class EmptyRepository(BaseRepository):
1545 class EmptyRepository(BaseRepository):
1543 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1544 pass
1547 pass
1545
1548
1546 def get_diff(self, *args, **kwargs):
1549 def get_diff(self, *args, **kwargs):
1547 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1548 return GitDiff('')
1551 return GitDiff('')
1549
1552
1550
1553
1551 class CollectionGenerator(object):
1554 class CollectionGenerator(object):
1552
1555
1553 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1554 self.repo = repo
1557 self.repo = repo
1555 self.commit_ids = commit_ids
1558 self.commit_ids = commit_ids
1556 # TODO: (oliver) this isn't currently hooked up
1559 # TODO: (oliver) this isn't currently hooked up
1557 self.collection_size = None
1560 self.collection_size = None
1558 self.pre_load = pre_load
1561 self.pre_load = pre_load
1559
1562
1560 def __len__(self):
1563 def __len__(self):
1561 if self.collection_size is not None:
1564 if self.collection_size is not None:
1562 return self.collection_size
1565 return self.collection_size
1563 return self.commit_ids.__len__()
1566 return self.commit_ids.__len__()
1564
1567
1565 def __iter__(self):
1568 def __iter__(self):
1566 for commit_id in self.commit_ids:
1569 for commit_id in self.commit_ids:
1567 # TODO: johbo: Mercurial passes in commit indices or commit ids
1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1568 yield self._commit_factory(commit_id)
1571 yield self._commit_factory(commit_id)
1569
1572
1570 def _commit_factory(self, commit_id):
1573 def _commit_factory(self, commit_id):
1571 """
1574 """
1572 Allows backends to override the way commits are generated.
1575 Allows backends to override the way commits are generated.
1573 """
1576 """
1574 return self.repo.get_commit(commit_id=commit_id,
1577 return self.repo.get_commit(commit_id=commit_id,
1575 pre_load=self.pre_load)
1578 pre_load=self.pre_load)
1576
1579
1577 def __getslice__(self, i, j):
1580 def __getslice__(self, i, j):
1578 """
1581 """
1579 Returns an iterator of sliced repository
1582 Returns an iterator of sliced repository
1580 """
1583 """
1581 commit_ids = self.commit_ids[i:j]
1584 commit_ids = self.commit_ids[i:j]
1582 return self.__class__(
1585 return self.__class__(
1583 self.repo, commit_ids, pre_load=self.pre_load)
1586 self.repo, commit_ids, pre_load=self.pre_load)
1584
1587
1585 def __repr__(self):
1588 def __repr__(self):
1586 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1587
1590
1588
1591
1589 class Config(object):
1592 class Config(object):
1590 """
1593 """
1591 Represents the configuration for a repository.
1594 Represents the configuration for a repository.
1592
1595
1593 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1594 standard library. It implements only the needed subset.
1597 standard library. It implements only the needed subset.
1595 """
1598 """
1596
1599
1597 def __init__(self):
1600 def __init__(self):
1598 self._values = {}
1601 self._values = {}
1599
1602
1600 def copy(self):
1603 def copy(self):
1601 clone = Config()
1604 clone = Config()
1602 for section, values in self._values.items():
1605 for section, values in self._values.items():
1603 clone._values[section] = values.copy()
1606 clone._values[section] = values.copy()
1604 return clone
1607 return clone
1605
1608
1606 def __repr__(self):
1609 def __repr__(self):
1607 return '<Config(%s sections) at %s>' % (
1610 return '<Config(%s sections) at %s>' % (
1608 len(self._values), hex(id(self)))
1611 len(self._values), hex(id(self)))
1609
1612
1610 def items(self, section):
1613 def items(self, section):
1611 return self._values.get(section, {}).iteritems()
1614 return self._values.get(section, {}).iteritems()
1612
1615
1613 def get(self, section, option):
1616 def get(self, section, option):
1614 return self._values.get(section, {}).get(option)
1617 return self._values.get(section, {}).get(option)
1615
1618
1616 def set(self, section, option, value):
1619 def set(self, section, option, value):
1617 section_values = self._values.setdefault(section, {})
1620 section_values = self._values.setdefault(section, {})
1618 section_values[option] = value
1621 section_values[option] = value
1619
1622
1620 def clear_section(self, section):
1623 def clear_section(self, section):
1621 self._values[section] = {}
1624 self._values[section] = {}
1622
1625
1623 def serialize(self):
1626 def serialize(self):
1624 """
1627 """
1625 Creates a list of three tuples (section, key, value) representing
1628 Creates a list of three tuples (section, key, value) representing
1626 this config object.
1629 this config object.
1627 """
1630 """
1628 items = []
1631 items = []
1629 for section in self._values:
1632 for section in self._values:
1630 for option, value in self._values[section].items():
1633 for option, value in self._values[section].items():
1631 items.append(
1634 items.append(
1632 (safe_str(section), safe_str(option), safe_str(value)))
1635 (safe_str(section), safe_str(option), safe_str(value)))
1633 return items
1636 return items
1634
1637
1635
1638
1636 class Diff(object):
1639 class Diff(object):
1637 """
1640 """
1638 Represents a diff result from a repository backend.
1641 Represents a diff result from a repository backend.
1639
1642
1640 Subclasses have to provide a backend specific value for
1643 Subclasses have to provide a backend specific value for
1641 :attr:`_header_re` and :attr:`_meta_re`.
1644 :attr:`_header_re` and :attr:`_meta_re`.
1642 """
1645 """
1643 _meta_re = None
1646 _meta_re = None
1644 _header_re = None
1647 _header_re = None
1645
1648
1646 def __init__(self, raw_diff):
1649 def __init__(self, raw_diff):
1647 self.raw = raw_diff
1650 self.raw = raw_diff
1648
1651
1649 def chunks(self):
1652 def chunks(self):
1650 """
1653 """
1651 split the diff in chunks of separate --git a/file b/file chunks
1654 split the diff in chunks of separate --git a/file b/file chunks
1652 to make diffs consistent we must prepend with \n, and make sure
1655 to make diffs consistent we must prepend with \n, and make sure
1653 we can detect last chunk as this was also has special rule
1656 we can detect last chunk as this was also has special rule
1654 """
1657 """
1655
1658
1656 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1657 header = diff_parts[0]
1660 header = diff_parts[0]
1658
1661
1659 if self._meta_re:
1662 if self._meta_re:
1660 match = self._meta_re.match(header)
1663 match = self._meta_re.match(header)
1661
1664
1662 chunks = diff_parts[1:]
1665 chunks = diff_parts[1:]
1663 total_chunks = len(chunks)
1666 total_chunks = len(chunks)
1664
1667
1665 return (
1668 return (
1666 DiffChunk(chunk, self, cur_chunk == total_chunks)
1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1667 for cur_chunk, chunk in enumerate(chunks, start=1))
1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1668
1671
1669
1672
1670 class DiffChunk(object):
1673 class DiffChunk(object):
1671
1674
1672 def __init__(self, chunk, diff, last_chunk):
1675 def __init__(self, chunk, diff, last_chunk):
1673 self._diff = diff
1676 self._diff = diff
1674
1677
1675 # since we split by \ndiff --git that part is lost from original diff
1678 # since we split by \ndiff --git that part is lost from original diff
1676 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1677 if not last_chunk:
1680 if not last_chunk:
1678 chunk += '\n'
1681 chunk += '\n'
1679
1682
1680 match = self._diff._header_re.match(chunk)
1683 match = self._diff._header_re.match(chunk)
1681 self.header = match.groupdict()
1684 self.header = match.groupdict()
1682 self.diff = chunk[match.end():]
1685 self.diff = chunk[match.end():]
1683 self.raw = chunk
1686 self.raw = chunk
1684
1687
1685
1688
1686 class BasePathPermissionChecker(object):
1689 class BasePathPermissionChecker(object):
1687
1690
1688 @staticmethod
1691 @staticmethod
1689 def create_from_patterns(includes, excludes):
1692 def create_from_patterns(includes, excludes):
1690 if includes and '*' in includes and not excludes:
1693 if includes and '*' in includes and not excludes:
1691 return AllPathPermissionChecker()
1694 return AllPathPermissionChecker()
1692 elif excludes and '*' in excludes:
1695 elif excludes and '*' in excludes:
1693 return NonePathPermissionChecker()
1696 return NonePathPermissionChecker()
1694 else:
1697 else:
1695 return PatternPathPermissionChecker(includes, excludes)
1698 return PatternPathPermissionChecker(includes, excludes)
1696
1699
1697 @property
1700 @property
1698 def has_full_access(self):
1701 def has_full_access(self):
1699 raise NotImplemented()
1702 raise NotImplemented()
1700
1703
1701 def has_access(self, path):
1704 def has_access(self, path):
1702 raise NotImplemented()
1705 raise NotImplemented()
1703
1706
1704
1707
1705 class AllPathPermissionChecker(BasePathPermissionChecker):
1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1706
1709
1707 @property
1710 @property
1708 def has_full_access(self):
1711 def has_full_access(self):
1709 return True
1712 return True
1710
1713
1711 def has_access(self, path):
1714 def has_access(self, path):
1712 return True
1715 return True
1713
1716
1714
1717
1715 class NonePathPermissionChecker(BasePathPermissionChecker):
1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1716
1719
1717 @property
1720 @property
1718 def has_full_access(self):
1721 def has_full_access(self):
1719 return False
1722 return False
1720
1723
1721 def has_access(self, path):
1724 def has_access(self, path):
1722 return False
1725 return False
1723
1726
1724
1727
1725 class PatternPathPermissionChecker(BasePathPermissionChecker):
1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1726
1729
1727 def __init__(self, includes, excludes):
1730 def __init__(self, includes, excludes):
1728 self.includes = includes
1731 self.includes = includes
1729 self.excludes = excludes
1732 self.excludes = excludes
1730 self.includes_re = [] if not includes else [
1733 self.includes_re = [] if not includes else [
1731 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1732 self.excludes_re = [] if not excludes else [
1735 self.excludes_re = [] if not excludes else [
1733 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1734
1737
1735 @property
1738 @property
1736 def has_full_access(self):
1739 def has_full_access(self):
1737 return '*' in self.includes and not self.excludes
1740 return '*' in self.includes and not self.excludes
1738
1741
1739 def has_access(self, path):
1742 def has_access(self, path):
1740 for regex in self.excludes_re:
1743 for regex in self.excludes_re:
1741 if regex.match(path):
1744 if regex.match(path):
1742 return False
1745 return False
1743 for regex in self.includes_re:
1746 for regex in self.includes_re:
1744 if regex.match(path):
1747 if regex.match(path):
1745 return True
1748 return True
1746 return False
1749 return False
@@ -1,1006 +1,1009 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 update_after_clone=False, with_wire=None, bare=False):
61 update_after_clone=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self._remote = connection.Git(
65 self.with_wire = with_wire
66 self.path, self.config, with_wire=with_wire)
67
66
68 self._init_repo(create, src_url, update_after_clone, bare)
67 self._init_repo(create, src_url, update_after_clone, bare)
69
68
70 # caches
69 # caches
71 self._commit_ids = {}
70 self._commit_ids = {}
72
71
73 @LazyProperty
72 @LazyProperty
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
76 @LazyProperty
74 def bare(self):
77 def bare(self):
75 return self._remote.bare()
78 return self._remote.bare()
76
79
77 @LazyProperty
80 @LazyProperty
78 def head(self):
81 def head(self):
79 return self._remote.head()
82 return self._remote.head()
80
83
81 @LazyProperty
84 @LazyProperty
82 def commit_ids(self):
85 def commit_ids(self):
83 """
86 """
84 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
85 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
86 """
89 """
87 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
88 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
89 return commit_ids
92 return commit_ids
90
93
91 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
92 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
93 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
94
97
95 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
96 """
99 """
97 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
98 (stdout, stderr).
101 (stdout, stderr).
99
102
100 :param cmd: git command to be executed
103 :param cmd: git command to be executed
101 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
102 """
105 """
103 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105
108
106 skip_stderr_log = opts.pop('skip_stderr_log', False)
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
108 if err and not skip_stderr_log:
111 if err and not skip_stderr_log:
109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 return out, err
113 return out, err
111
114
112 @staticmethod
115 @staticmethod
113 def check_url(url, config):
116 def check_url(url, config):
114 """
117 """
115 Function will check given url and try to verify if it's a valid
118 Function will check given url and try to verify if it's a valid
116 link. Sometimes it may happened that git will issue basic
119 link. Sometimes it may happened that git will issue basic
117 auth request that can cause whole API to hang when used from python
120 auth request that can cause whole API to hang when used from python
118 or other external calls.
121 or other external calls.
119
122
120 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 when the return code is non 200
124 when the return code is non 200
122 """
125 """
123 # check first if it's not an url
126 # check first if it's not an url
124 if os.path.isdir(url) or url.startswith('file:'):
127 if os.path.isdir(url) or url.startswith('file:'):
125 return True
128 return True
126
129
127 if '+' in url.split('://', 1)[0]:
130 if '+' in url.split('://', 1)[0]:
128 url = url.split('+', 1)[1]
131 url = url.split('+', 1)[1]
129
132
130 # Request the _remote to verify the url
133 # Request the _remote to verify the url
131 return connection.Git.check_url(url, config.serialize())
134 return connection.Git.check_url(url, config.serialize())
132
135
133 @staticmethod
136 @staticmethod
134 def is_valid_repository(path):
137 def is_valid_repository(path):
135 if os.path.isdir(os.path.join(path, '.git')):
138 if os.path.isdir(os.path.join(path, '.git')):
136 return True
139 return True
137 # check case of bare repository
140 # check case of bare repository
138 try:
141 try:
139 GitRepository(path)
142 GitRepository(path)
140 return True
143 return True
141 except VCSError:
144 except VCSError:
142 pass
145 pass
143 return False
146 return False
144
147
145 def _init_repo(self, create, src_url=None, update_after_clone=False,
148 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 bare=False):
149 bare=False):
147 if create and os.path.exists(self.path):
150 if create and os.path.exists(self.path):
148 raise RepositoryError(
151 raise RepositoryError(
149 "Cannot create repository at %s, location already exist"
152 "Cannot create repository at %s, location already exist"
150 % self.path)
153 % self.path)
151
154
152 try:
155 try:
153 if create and src_url:
156 if create and src_url:
154 GitRepository.check_url(src_url, self.config)
157 GitRepository.check_url(src_url, self.config)
155 self.clone(src_url, update_after_clone, bare)
158 self.clone(src_url, update_after_clone, bare)
156 elif create:
159 elif create:
157 os.makedirs(self.path, mode=0755)
160 os.makedirs(self.path, mode=0755)
158
161
159 if bare:
162 if bare:
160 self._remote.init_bare()
163 self._remote.init_bare()
161 else:
164 else:
162 self._remote.init()
165 self._remote.init()
163 else:
166 else:
164 if not self._remote.assert_correct_path():
167 if not self._remote.assert_correct_path():
165 raise RepositoryError(
168 raise RepositoryError(
166 'Path "%s" does not contain a Git repository' %
169 'Path "%s" does not contain a Git repository' %
167 (self.path,))
170 (self.path,))
168
171
169 # TODO: johbo: check if we have to translate the OSError here
172 # TODO: johbo: check if we have to translate the OSError here
170 except OSError as err:
173 except OSError as err:
171 raise RepositoryError(err)
174 raise RepositoryError(err)
172
175
173 def _get_all_commit_ids(self, filters=None):
176 def _get_all_commit_ids(self, filters=None):
174 # we must check if this repo is not empty, since later command
177 # we must check if this repo is not empty, since later command
175 # fails if it is. And it's cheaper to ask than throw the subprocess
178 # fails if it is. And it's cheaper to ask than throw the subprocess
176 # errors
179 # errors
177 try:
180 try:
178 self._remote.head()
181 self._remote.head()
179 except KeyError:
182 except KeyError:
180 return []
183 return []
181
184
182 rev_filter = ['--branches', '--tags']
185 rev_filter = ['--branches', '--tags']
183 extra_filter = []
186 extra_filter = []
184
187
185 if filters:
188 if filters:
186 if filters.get('since'):
189 if filters.get('since'):
187 extra_filter.append('--since=%s' % (filters['since']))
190 extra_filter.append('--since=%s' % (filters['since']))
188 if filters.get('until'):
191 if filters.get('until'):
189 extra_filter.append('--until=%s' % (filters['until']))
192 extra_filter.append('--until=%s' % (filters['until']))
190 if filters.get('branch_name'):
193 if filters.get('branch_name'):
191 rev_filter = ['--tags']
194 rev_filter = ['--tags']
192 extra_filter.append(filters['branch_name'])
195 extra_filter.append(filters['branch_name'])
193 rev_filter.extend(extra_filter)
196 rev_filter.extend(extra_filter)
194
197
195 # if filters.get('start') or filters.get('end'):
198 # if filters.get('start') or filters.get('end'):
196 # # skip is offset, max-count is limit
199 # # skip is offset, max-count is limit
197 # if filters.get('start'):
200 # if filters.get('start'):
198 # extra_filter += ' --skip=%s' % filters['start']
201 # extra_filter += ' --skip=%s' % filters['start']
199 # if filters.get('end'):
202 # if filters.get('end'):
200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
203 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201
204
202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
205 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 try:
206 try:
204 output, __ = self.run_git_command(cmd)
207 output, __ = self.run_git_command(cmd)
205 except RepositoryError:
208 except RepositoryError:
206 # Can be raised for empty repositories
209 # Can be raised for empty repositories
207 return []
210 return []
208 return output.splitlines()
211 return output.splitlines()
209
212
210 def _get_commit_id(self, commit_id_or_idx):
213 def _get_commit_id(self, commit_id_or_idx):
211 def is_null(value):
214 def is_null(value):
212 return len(value) == commit_id_or_idx.count('0')
215 return len(value) == commit_id_or_idx.count('0')
213
216
214 if self.is_empty():
217 if self.is_empty():
215 raise EmptyRepositoryError("There are no commits yet")
218 raise EmptyRepositoryError("There are no commits yet")
216
219
217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
220 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 return self.commit_ids[-1]
221 return self.commit_ids[-1]
219
222
220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
223 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
224 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
225 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 try:
226 try:
224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
227 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 except Exception:
228 except Exception:
226 msg = "Commit %s does not exist for %s" % (
229 msg = "Commit %s does not exist for %s" % (
227 commit_id_or_idx, self)
230 commit_id_or_idx, self)
228 raise CommitDoesNotExistError(msg)
231 raise CommitDoesNotExistError(msg)
229
232
230 elif is_bstr:
233 elif is_bstr:
231 # check full path ref, eg. refs/heads/master
234 # check full path ref, eg. refs/heads/master
232 ref_id = self._refs.get(commit_id_or_idx)
235 ref_id = self._refs.get(commit_id_or_idx)
233 if ref_id:
236 if ref_id:
234 return ref_id
237 return ref_id
235
238
236 # check branch name
239 # check branch name
237 branch_ids = self.branches.values()
240 branch_ids = self.branches.values()
238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
241 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 if ref_id:
242 if ref_id:
240 return ref_id
243 return ref_id
241
244
242 # check tag name
245 # check tag name
243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
246 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 if ref_id:
247 if ref_id:
245 return ref_id
248 return ref_id
246
249
247 if (not SHA_PATTERN.match(commit_id_or_idx) or
250 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 commit_id_or_idx not in self.commit_ids):
251 commit_id_or_idx not in self.commit_ids):
249 msg = "Commit %s does not exist for %s" % (
252 msg = "Commit %s does not exist for %s" % (
250 commit_id_or_idx, self)
253 commit_id_or_idx, self)
251 raise CommitDoesNotExistError(msg)
254 raise CommitDoesNotExistError(msg)
252
255
253 # Ensure we return full id
256 # Ensure we return full id
254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 raise CommitDoesNotExistError(
258 raise CommitDoesNotExistError(
256 "Given commit id %s not recognized" % commit_id_or_idx)
259 "Given commit id %s not recognized" % commit_id_or_idx)
257 return commit_id_or_idx
260 return commit_id_or_idx
258
261
259 def get_hook_location(self):
262 def get_hook_location(self):
260 """
263 """
261 returns absolute path to location where hooks are stored
264 returns absolute path to location where hooks are stored
262 """
265 """
263 loc = os.path.join(self.path, 'hooks')
266 loc = os.path.join(self.path, 'hooks')
264 if not self.bare:
267 if not self.bare:
265 loc = os.path.join(self.path, '.git', 'hooks')
268 loc = os.path.join(self.path, '.git', 'hooks')
266 return loc
269 return loc
267
270
268 @LazyProperty
271 @LazyProperty
269 def last_change(self):
272 def last_change(self):
270 """
273 """
271 Returns last change made on this repository as
274 Returns last change made on this repository as
272 `datetime.datetime` object.
275 `datetime.datetime` object.
273 """
276 """
274 try:
277 try:
275 return self.get_commit().date
278 return self.get_commit().date
276 except RepositoryError:
279 except RepositoryError:
277 tzoffset = makedate()[1]
280 tzoffset = makedate()[1]
278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279
282
280 def _get_fs_mtime(self):
283 def _get_fs_mtime(self):
281 idx_loc = '' if self.bare else '.git'
284 idx_loc = '' if self.bare else '.git'
282 # fallback to filesystem
285 # fallback to filesystem
283 in_path = os.path.join(self.path, idx_loc, "index")
286 in_path = os.path.join(self.path, idx_loc, "index")
284 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 if os.path.exists(in_path):
288 if os.path.exists(in_path):
286 return os.stat(in_path).st_mtime
289 return os.stat(in_path).st_mtime
287 else:
290 else:
288 return os.stat(he_path).st_mtime
291 return os.stat(he_path).st_mtime
289
292
290 @LazyProperty
293 @LazyProperty
291 def description(self):
294 def description(self):
292 description = self._remote.get_description()
295 description = self._remote.get_description()
293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294
297
295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 if self.is_empty():
299 if self.is_empty():
297 return OrderedDict()
300 return OrderedDict()
298
301
299 result = []
302 result = []
300 for ref, sha in self._refs.iteritems():
303 for ref, sha in self._refs.iteritems():
301 if ref.startswith(prefix):
304 if ref.startswith(prefix):
302 ref_name = ref
305 ref_name = ref
303 if strip_prefix:
306 if strip_prefix:
304 ref_name = ref[len(prefix):]
307 ref_name = ref[len(prefix):]
305 result.append((safe_unicode(ref_name), sha))
308 result.append((safe_unicode(ref_name), sha))
306
309
307 def get_name(entry):
310 def get_name(entry):
308 return entry[0]
311 return entry[0]
309
312
310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311
314
312 def _get_branches(self):
315 def _get_branches(self):
313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314
317
315 @LazyProperty
318 @LazyProperty
316 def branches(self):
319 def branches(self):
317 return self._get_branches()
320 return self._get_branches()
318
321
319 @LazyProperty
322 @LazyProperty
320 def branches_closed(self):
323 def branches_closed(self):
321 return {}
324 return {}
322
325
323 @LazyProperty
326 @LazyProperty
324 def bookmarks(self):
327 def bookmarks(self):
325 return {}
328 return {}
326
329
327 @LazyProperty
330 @LazyProperty
328 def branches_all(self):
331 def branches_all(self):
329 all_branches = {}
332 all_branches = {}
330 all_branches.update(self.branches)
333 all_branches.update(self.branches)
331 all_branches.update(self.branches_closed)
334 all_branches.update(self.branches_closed)
332 return all_branches
335 return all_branches
333
336
334 @LazyProperty
337 @LazyProperty
335 def tags(self):
338 def tags(self):
336 return self._get_tags()
339 return self._get_tags()
337
340
338 def _get_tags(self):
341 def _get_tags(self):
339 return self._get_refs_entries(
342 return self._get_refs_entries(
340 prefix='refs/tags/', strip_prefix=True, reverse=True)
343 prefix='refs/tags/', strip_prefix=True, reverse=True)
341
344
342 def tag(self, name, user, commit_id=None, message=None, date=None,
345 def tag(self, name, user, commit_id=None, message=None, date=None,
343 **kwargs):
346 **kwargs):
344 # TODO: fix this method to apply annotated tags correct with message
347 # TODO: fix this method to apply annotated tags correct with message
345 """
348 """
346 Creates and returns a tag for the given ``commit_id``.
349 Creates and returns a tag for the given ``commit_id``.
347
350
348 :param name: name for new tag
351 :param name: name for new tag
349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 :param commit_id: commit id for which new tag would be created
353 :param commit_id: commit id for which new tag would be created
351 :param message: message of the tag's commit
354 :param message: message of the tag's commit
352 :param date: date of tag's commit
355 :param date: date of tag's commit
353
356
354 :raises TagAlreadyExistError: if tag with same name already exists
357 :raises TagAlreadyExistError: if tag with same name already exists
355 """
358 """
356 if name in self.tags:
359 if name in self.tags:
357 raise TagAlreadyExistError("Tag %s already exists" % name)
360 raise TagAlreadyExistError("Tag %s already exists" % name)
358 commit = self.get_commit(commit_id=commit_id)
361 commit = self.get_commit(commit_id=commit_id)
359 message = message or "Added tag %s for commit %s" % (
362 message = message or "Added tag %s for commit %s" % (
360 name, commit.raw_id)
363 name, commit.raw_id)
361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
364 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362
365
363 self._refs = self._get_refs()
366 self._refs = self._get_refs()
364 self.tags = self._get_tags()
367 self.tags = self._get_tags()
365 return commit
368 return commit
366
369
367 def remove_tag(self, name, user, message=None, date=None):
370 def remove_tag(self, name, user, message=None, date=None):
368 """
371 """
369 Removes tag with the given ``name``.
372 Removes tag with the given ``name``.
370
373
371 :param name: name of the tag to be removed
374 :param name: name of the tag to be removed
372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param message: message of the tag's removal commit
376 :param message: message of the tag's removal commit
374 :param date: date of tag's removal commit
377 :param date: date of tag's removal commit
375
378
376 :raises TagDoesNotExistError: if tag with given name does not exists
379 :raises TagDoesNotExistError: if tag with given name does not exists
377 """
380 """
378 if name not in self.tags:
381 if name not in self.tags:
379 raise TagDoesNotExistError("Tag %s does not exist" % name)
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 tagpath = vcspath.join(
383 tagpath = vcspath.join(
381 self._remote.get_refs_path(), 'refs', 'tags', name)
384 self._remote.get_refs_path(), 'refs', 'tags', name)
382 try:
385 try:
383 os.remove(tagpath)
386 os.remove(tagpath)
384 self._refs = self._get_refs()
387 self._refs = self._get_refs()
385 self.tags = self._get_tags()
388 self.tags = self._get_tags()
386 except OSError as e:
389 except OSError as e:
387 raise RepositoryError(e.strerror)
390 raise RepositoryError(e.strerror)
388
391
389 def _get_refs(self):
392 def _get_refs(self):
390 return self._remote.get_refs()
393 return self._remote.get_refs()
391
394
392 @LazyProperty
395 @LazyProperty
393 def _refs(self):
396 def _refs(self):
394 return self._get_refs()
397 return self._get_refs()
395
398
396 @property
399 @property
397 def _ref_tree(self):
400 def _ref_tree(self):
398 node = tree = {}
401 node = tree = {}
399 for ref, sha in self._refs.iteritems():
402 for ref, sha in self._refs.iteritems():
400 path = ref.split('/')
403 path = ref.split('/')
401 for bit in path[:-1]:
404 for bit in path[:-1]:
402 node = node.setdefault(bit, {})
405 node = node.setdefault(bit, {})
403 node[path[-1]] = sha
406 node[path[-1]] = sha
404 node = tree
407 node = tree
405 return tree
408 return tree
406
409
407 def get_remote_ref(self, ref_name):
410 def get_remote_ref(self, ref_name):
408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
411 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 try:
412 try:
410 return self._refs[ref_key]
413 return self._refs[ref_key]
411 except Exception:
414 except Exception:
412 return
415 return
413
416
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
418 """
416 Returns `GitCommit` object representing commit from git repository
419 Returns `GitCommit` object representing commit from git repository
417 at the given `commit_id` or head (most recent commit) if None given.
420 at the given `commit_id` or head (most recent commit) if None given.
418 """
421 """
419 if commit_id is not None:
422 if commit_id is not None:
420 self._validate_commit_id(commit_id)
423 self._validate_commit_id(commit_id)
421 elif commit_idx is not None:
424 elif commit_idx is not None:
422 self._validate_commit_idx(commit_idx)
425 self._validate_commit_idx(commit_idx)
423 commit_id = commit_idx
426 commit_id = commit_idx
424 commit_id = self._get_commit_id(commit_id)
427 commit_id = self._get_commit_id(commit_id)
425 try:
428 try:
426 # Need to call remote to translate id for tagging scenario
429 # Need to call remote to translate id for tagging scenario
427 commit_id = self._remote.get_object(commit_id)["commit_id"]
430 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 idx = self._commit_ids[commit_id]
431 idx = self._commit_ids[commit_id]
429 except KeyError:
432 except KeyError:
430 raise RepositoryError("Cannot get object with id %s" % commit_id)
433 raise RepositoryError("Cannot get object with id %s" % commit_id)
431
434
432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
435 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433
436
434 def get_commits(
437 def get_commits(
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
438 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 branch_name=None, show_hidden=False, pre_load=None):
439 branch_name=None, show_hidden=False, pre_load=None):
437 """
440 """
438 Returns generator of `GitCommit` objects from start to end (both
441 Returns generator of `GitCommit` objects from start to end (both
439 are inclusive), in ascending date order.
442 are inclusive), in ascending date order.
440
443
441 :param start_id: None, str(commit_id)
444 :param start_id: None, str(commit_id)
442 :param end_id: None, str(commit_id)
445 :param end_id: None, str(commit_id)
443 :param start_date: if specified, commits with commit date less than
446 :param start_date: if specified, commits with commit date less than
444 ``start_date`` would be filtered out from returned set
447 ``start_date`` would be filtered out from returned set
445 :param end_date: if specified, commits with commit date greater than
448 :param end_date: if specified, commits with commit date greater than
446 ``end_date`` would be filtered out from returned set
449 ``end_date`` would be filtered out from returned set
447 :param branch_name: if specified, commits not reachable from given
450 :param branch_name: if specified, commits not reachable from given
448 branch would be filtered out from returned set
451 branch would be filtered out from returned set
449 :param show_hidden: Show hidden commits such as obsolete or hidden from
452 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 Mercurial evolve
453 Mercurial evolve
451 :raise BranchDoesNotExistError: If given `branch_name` does not
454 :raise BranchDoesNotExistError: If given `branch_name` does not
452 exist.
455 exist.
453 :raise CommitDoesNotExistError: If commits for given `start` or
456 :raise CommitDoesNotExistError: If commits for given `start` or
454 `end` could not be found.
457 `end` could not be found.
455
458
456 """
459 """
457 if self.is_empty():
460 if self.is_empty():
458 raise EmptyRepositoryError("There are no commits yet")
461 raise EmptyRepositoryError("There are no commits yet")
459 self._validate_branch_name(branch_name)
462 self._validate_branch_name(branch_name)
460
463
461 if start_id is not None:
464 if start_id is not None:
462 self._validate_commit_id(start_id)
465 self._validate_commit_id(start_id)
463 if end_id is not None:
466 if end_id is not None:
464 self._validate_commit_id(end_id)
467 self._validate_commit_id(end_id)
465
468
466 start_raw_id = self._get_commit_id(start_id)
469 start_raw_id = self._get_commit_id(start_id)
467 start_pos = self._commit_ids[start_raw_id] if start_id else None
470 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 end_raw_id = self._get_commit_id(end_id)
471 end_raw_id = self._get_commit_id(end_id)
469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
472 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470
473
471 if None not in [start_id, end_id] and start_pos > end_pos:
474 if None not in [start_id, end_id] and start_pos > end_pos:
472 raise RepositoryError(
475 raise RepositoryError(
473 "Start commit '%s' cannot be after end commit '%s'" %
476 "Start commit '%s' cannot be after end commit '%s'" %
474 (start_id, end_id))
477 (start_id, end_id))
475
478
476 if end_pos is not None:
479 if end_pos is not None:
477 end_pos += 1
480 end_pos += 1
478
481
479 filter_ = []
482 filter_ = []
480 if branch_name:
483 if branch_name:
481 filter_.append({'branch_name': branch_name})
484 filter_.append({'branch_name': branch_name})
482 if start_date and not end_date:
485 if start_date and not end_date:
483 filter_.append({'since': start_date})
486 filter_.append({'since': start_date})
484 if end_date and not start_date:
487 if end_date and not start_date:
485 filter_.append({'until': end_date})
488 filter_.append({'until': end_date})
486 if start_date and end_date:
489 if start_date and end_date:
487 filter_.append({'since': start_date})
490 filter_.append({'since': start_date})
488 filter_.append({'until': end_date})
491 filter_.append({'until': end_date})
489
492
490 # if start_pos or end_pos:
493 # if start_pos or end_pos:
491 # filter_.append({'start': start_pos})
494 # filter_.append({'start': start_pos})
492 # filter_.append({'end': end_pos})
495 # filter_.append({'end': end_pos})
493
496
494 if filter_:
497 if filter_:
495 revfilters = {
498 revfilters = {
496 'branch_name': branch_name,
499 'branch_name': branch_name,
497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
500 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
501 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 'start': start_pos,
502 'start': start_pos,
500 'end': end_pos,
503 'end': end_pos,
501 }
504 }
502 commit_ids = self._get_all_commit_ids(filters=revfilters)
505 commit_ids = self._get_all_commit_ids(filters=revfilters)
503
506
504 # pure python stuff, it's slow due to walker walking whole repo
507 # pure python stuff, it's slow due to walker walking whole repo
505 # def get_revs(walker):
508 # def get_revs(walker):
506 # for walker_entry in walker:
509 # for walker_entry in walker:
507 # yield walker_entry.commit.id
510 # yield walker_entry.commit.id
508 # revfilters = {}
511 # revfilters = {}
509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
512 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 else:
513 else:
511 commit_ids = self.commit_ids
514 commit_ids = self.commit_ids
512
515
513 if start_pos or end_pos:
516 if start_pos or end_pos:
514 commit_ids = commit_ids[start_pos: end_pos]
517 commit_ids = commit_ids[start_pos: end_pos]
515
518
516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
519 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517
520
518 def get_diff(
521 def get_diff(
519 self, commit1, commit2, path='', ignore_whitespace=False,
522 self, commit1, commit2, path='', ignore_whitespace=False,
520 context=3, path1=None):
523 context=3, path1=None):
521 """
524 """
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
525 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 ``commit2`` since ``commit1``.
526 ``commit2`` since ``commit1``.
524
527
525 :param commit1: Entry point from which diff is shown. Can be
528 :param commit1: Entry point from which diff is shown. Can be
526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
529 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 the changes since empty state of the repository until ``commit2``
530 the changes since empty state of the repository until ``commit2``
528 :param commit2: Until which commits changes should be shown.
531 :param commit2: Until which commits changes should be shown.
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 changes. Defaults to ``False``.
533 changes. Defaults to ``False``.
531 :param context: How many lines before/after changed lines should be
534 :param context: How many lines before/after changed lines should be
532 shown. Defaults to ``3``.
535 shown. Defaults to ``3``.
533 """
536 """
534 self._validate_diff_commits(commit1, commit2)
537 self._validate_diff_commits(commit1, commit2)
535 if path1 is not None and path1 != path:
538 if path1 is not None and path1 != path:
536 raise ValueError("Diff of two different paths not supported.")
539 raise ValueError("Diff of two different paths not supported.")
537
540
538 flags = [
541 flags = [
539 '-U%s' % context, '--full-index', '--binary', '-p',
542 '-U%s' % context, '--full-index', '--binary', '-p',
540 '-M', '--abbrev=40']
543 '-M', '--abbrev=40']
541 if ignore_whitespace:
544 if ignore_whitespace:
542 flags.append('-w')
545 flags.append('-w')
543
546
544 if commit1 == self.EMPTY_COMMIT:
547 if commit1 == self.EMPTY_COMMIT:
545 cmd = ['show'] + flags + [commit2.raw_id]
548 cmd = ['show'] + flags + [commit2.raw_id]
546 else:
549 else:
547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
550 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548
551
549 if path:
552 if path:
550 cmd.extend(['--', path])
553 cmd.extend(['--', path])
551
554
552 stdout, __ = self.run_git_command(cmd)
555 stdout, __ = self.run_git_command(cmd)
553 # If we used 'show' command, strip first few lines (until actual diff
556 # If we used 'show' command, strip first few lines (until actual diff
554 # starts)
557 # starts)
555 if commit1 == self.EMPTY_COMMIT:
558 if commit1 == self.EMPTY_COMMIT:
556 lines = stdout.splitlines()
559 lines = stdout.splitlines()
557 x = 0
560 x = 0
558 for line in lines:
561 for line in lines:
559 if line.startswith('diff'):
562 if line.startswith('diff'):
560 break
563 break
561 x += 1
564 x += 1
562 # Append new line just like 'diff' command do
565 # Append new line just like 'diff' command do
563 stdout = '\n'.join(lines[x:]) + '\n'
566 stdout = '\n'.join(lines[x:]) + '\n'
564 return GitDiff(stdout)
567 return GitDiff(stdout)
565
568
566 def strip(self, commit_id, branch_name):
569 def strip(self, commit_id, branch_name):
567 commit = self.get_commit(commit_id=commit_id)
570 commit = self.get_commit(commit_id=commit_id)
568 if commit.merge:
571 if commit.merge:
569 raise Exception('Cannot reset to merge commit')
572 raise Exception('Cannot reset to merge commit')
570
573
571 # parent is going to be the new head now
574 # parent is going to be the new head now
572 commit = commit.parents[0]
575 commit = commit.parents[0]
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
576 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574
577
575 self.commit_ids = self._get_all_commit_ids()
578 self.commit_ids = self._get_all_commit_ids()
576 self._rebuild_cache(self.commit_ids)
579 self._rebuild_cache(self.commit_ids)
577
580
578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
581 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 if commit_id1 == commit_id2:
582 if commit_id1 == commit_id2:
580 return commit_id1
583 return commit_id1
581
584
582 if self != repo2:
585 if self != repo2:
583 commits = self._remote.get_missing_revs(
586 commits = self._remote.get_missing_revs(
584 commit_id1, commit_id2, repo2.path)
587 commit_id1, commit_id2, repo2.path)
585 if commits:
588 if commits:
586 commit = repo2.get_commit(commits[-1])
589 commit = repo2.get_commit(commits[-1])
587 if commit.parents:
590 if commit.parents:
588 ancestor_id = commit.parents[0].raw_id
591 ancestor_id = commit.parents[0].raw_id
589 else:
592 else:
590 ancestor_id = None
593 ancestor_id = None
591 else:
594 else:
592 # no commits from other repo, ancestor_id is the commit_id2
595 # no commits from other repo, ancestor_id is the commit_id2
593 ancestor_id = commit_id2
596 ancestor_id = commit_id2
594 else:
597 else:
595 output, __ = self.run_git_command(
598 output, __ = self.run_git_command(
596 ['merge-base', commit_id1, commit_id2])
599 ['merge-base', commit_id1, commit_id2])
597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
600 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598
601
599 return ancestor_id
602 return ancestor_id
600
603
601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
604 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 repo1 = self
605 repo1 = self
603 ancestor_id = None
606 ancestor_id = None
604
607
605 if commit_id1 == commit_id2:
608 if commit_id1 == commit_id2:
606 commits = []
609 commits = []
607 elif repo1 != repo2:
610 elif repo1 != repo2:
608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
611 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 repo2.path)
612 repo2.path)
610 commits = [
613 commits = [
611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
614 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 for commit_id in reversed(missing_ids)]
615 for commit_id in reversed(missing_ids)]
613 else:
616 else:
614 output, __ = repo1.run_git_command(
617 output, __ = repo1.run_git_command(
615 ['log', '--reverse', '--pretty=format: %H', '-s',
618 ['log', '--reverse', '--pretty=format: %H', '-s',
616 '%s..%s' % (commit_id1, commit_id2)])
619 '%s..%s' % (commit_id1, commit_id2)])
617 commits = [
620 commits = [
618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
621 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
622 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620
623
621 return commits
624 return commits
622
625
623 @LazyProperty
626 @LazyProperty
624 def in_memory_commit(self):
627 def in_memory_commit(self):
625 """
628 """
626 Returns ``GitInMemoryCommit`` object for this repository.
629 Returns ``GitInMemoryCommit`` object for this repository.
627 """
630 """
628 return GitInMemoryCommit(self)
631 return GitInMemoryCommit(self)
629
632
630 def clone(self, url, update_after_clone=True, bare=False):
633 def clone(self, url, update_after_clone=True, bare=False):
631 """
634 """
632 Tries to clone commits from external location.
635 Tries to clone commits from external location.
633
636
634 :param update_after_clone: If set to ``False``, git won't checkout
637 :param update_after_clone: If set to ``False``, git won't checkout
635 working directory
638 working directory
636 :param bare: If set to ``True``, repository would be cloned into
639 :param bare: If set to ``True``, repository would be cloned into
637 *bare* git repository (no working directory at all).
640 *bare* git repository (no working directory at all).
638 """
641 """
639 # init_bare and init expect empty dir created to proceed
642 # init_bare and init expect empty dir created to proceed
640 if not os.path.exists(self.path):
643 if not os.path.exists(self.path):
641 os.mkdir(self.path)
644 os.mkdir(self.path)
642
645
643 if bare:
646 if bare:
644 self._remote.init_bare()
647 self._remote.init_bare()
645 else:
648 else:
646 self._remote.init()
649 self._remote.init()
647
650
648 deferred = '^{}'
651 deferred = '^{}'
649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
652 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650
653
651 return self._remote.clone(
654 return self._remote.clone(
652 url, deferred, valid_refs, update_after_clone)
655 url, deferred, valid_refs, update_after_clone)
653
656
654 def pull(self, url, commit_ids=None):
657 def pull(self, url, commit_ids=None):
655 """
658 """
656 Tries to pull changes from external location. We use fetch here since
659 Tries to pull changes from external location. We use fetch here since
657 pull in get does merges and we want to be compatible with hg backend so
660 pull in get does merges and we want to be compatible with hg backend so
658 pull == fetch in this case
661 pull == fetch in this case
659 """
662 """
660 self.fetch(url, commit_ids=commit_ids)
663 self.fetch(url, commit_ids=commit_ids)
661
664
662 def fetch(self, url, commit_ids=None):
665 def fetch(self, url, commit_ids=None):
663 """
666 """
664 Tries to fetch changes from external location.
667 Tries to fetch changes from external location.
665 """
668 """
666 refs = None
669 refs = None
667
670
668 if commit_ids is not None:
671 if commit_ids is not None:
669 remote_refs = self._remote.get_remote_refs(url)
672 remote_refs = self._remote.get_remote_refs(url)
670 refs = [
673 refs = [
671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
674 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 self._remote.fetch(url, refs=refs)
675 self._remote.fetch(url, refs=refs)
673
676
674 def push(self, url):
677 def push(self, url):
675 refs = None
678 refs = None
676 self._remote.sync_push(url, refs=refs)
679 self._remote.sync_push(url, refs=refs)
677
680
678 def set_refs(self, ref_name, commit_id):
681 def set_refs(self, ref_name, commit_id):
679 self._remote.set_refs(ref_name, commit_id)
682 self._remote.set_refs(ref_name, commit_id)
680
683
681 def remove_ref(self, ref_name):
684 def remove_ref(self, ref_name):
682 self._remote.remove_ref(ref_name)
685 self._remote.remove_ref(ref_name)
683
686
684 def _update_server_info(self):
687 def _update_server_info(self):
685 """
688 """
686 runs gits update-server-info command in this repo instance
689 runs gits update-server-info command in this repo instance
687 """
690 """
688 self._remote.update_server_info()
691 self._remote.update_server_info()
689
692
690 def _current_branch(self):
693 def _current_branch(self):
691 """
694 """
692 Return the name of the current branch.
695 Return the name of the current branch.
693
696
694 It only works for non bare repositories (i.e. repositories with a
697 It only works for non bare repositories (i.e. repositories with a
695 working copy)
698 working copy)
696 """
699 """
697 if self.bare:
700 if self.bare:
698 raise RepositoryError('Bare git repos do not have active branches')
701 raise RepositoryError('Bare git repos do not have active branches')
699
702
700 if self.is_empty():
703 if self.is_empty():
701 return None
704 return None
702
705
703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
706 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 return stdout.strip()
707 return stdout.strip()
705
708
706 def _checkout(self, branch_name, create=False, force=False):
709 def _checkout(self, branch_name, create=False, force=False):
707 """
710 """
708 Checkout a branch in the working directory.
711 Checkout a branch in the working directory.
709
712
710 It tries to create the branch if create is True, failing if the branch
713 It tries to create the branch if create is True, failing if the branch
711 already exists.
714 already exists.
712
715
713 It only works for non bare repositories (i.e. repositories with a
716 It only works for non bare repositories (i.e. repositories with a
714 working copy)
717 working copy)
715 """
718 """
716 if self.bare:
719 if self.bare:
717 raise RepositoryError('Cannot checkout branches in a bare git repo')
720 raise RepositoryError('Cannot checkout branches in a bare git repo')
718
721
719 cmd = ['checkout']
722 cmd = ['checkout']
720 if force:
723 if force:
721 cmd.append('-f')
724 cmd.append('-f')
722 if create:
725 if create:
723 cmd.append('-b')
726 cmd.append('-b')
724 cmd.append(branch_name)
727 cmd.append(branch_name)
725 self.run_git_command(cmd, fail_on_stderr=False)
728 self.run_git_command(cmd, fail_on_stderr=False)
726
729
727 def _identify(self):
730 def _identify(self):
728 """
731 """
729 Return the current state of the working directory.
732 Return the current state of the working directory.
730 """
733 """
731 if self.bare:
734 if self.bare:
732 raise RepositoryError('Bare git repos do not have active branches')
735 raise RepositoryError('Bare git repos do not have active branches')
733
736
734 if self.is_empty():
737 if self.is_empty():
735 return None
738 return None
736
739
737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
740 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 return stdout.strip()
741 return stdout.strip()
739
742
740 def _local_clone(self, clone_path, branch_name, source_branch=None):
743 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 """
744 """
742 Create a local clone of the current repo.
745 Create a local clone of the current repo.
743 """
746 """
744 # N.B.(skreft): the --branch option is required as otherwise the shallow
747 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 # clone will only fetch the active branch.
748 # clone will only fetch the active branch.
746 cmd = ['clone', '--branch', branch_name,
749 cmd = ['clone', '--branch', branch_name,
747 self.path, os.path.abspath(clone_path)]
750 self.path, os.path.abspath(clone_path)]
748
751
749 self.run_git_command(cmd, fail_on_stderr=False)
752 self.run_git_command(cmd, fail_on_stderr=False)
750
753
751 # if we get the different source branch, make sure we also fetch it for
754 # if we get the different source branch, make sure we also fetch it for
752 # merge conditions
755 # merge conditions
753 if source_branch and source_branch != branch_name:
756 if source_branch and source_branch != branch_name:
754 # check if the ref exists.
757 # check if the ref exists.
755 shadow_repo = GitRepository(os.path.abspath(clone_path))
758 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 if shadow_repo.get_remote_ref(source_branch):
759 if shadow_repo.get_remote_ref(source_branch):
757 cmd = ['fetch', self.path, source_branch]
760 cmd = ['fetch', self.path, source_branch]
758 self.run_git_command(cmd, fail_on_stderr=False)
761 self.run_git_command(cmd, fail_on_stderr=False)
759
762
760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
763 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 """
764 """
762 Fetch a branch from a local repository.
765 Fetch a branch from a local repository.
763 """
766 """
764 repository_path = os.path.abspath(repository_path)
767 repository_path = os.path.abspath(repository_path)
765 if repository_path == self.path:
768 if repository_path == self.path:
766 raise ValueError('Cannot fetch from the same repository')
769 raise ValueError('Cannot fetch from the same repository')
767
770
768 if use_origin:
771 if use_origin:
769 branch_name = '+{branch}:refs/heads/{branch}'.format(
772 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 branch=branch_name)
773 branch=branch_name)
771
774
772 cmd = ['fetch', '--no-tags', '--update-head-ok',
775 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 repository_path, branch_name]
776 repository_path, branch_name]
774 self.run_git_command(cmd, fail_on_stderr=False)
777 self.run_git_command(cmd, fail_on_stderr=False)
775
778
776 def _local_reset(self, branch_name):
779 def _local_reset(self, branch_name):
777 branch_name = '{}'.format(branch_name)
780 branch_name = '{}'.format(branch_name)
778 cmd = ['reset', '--hard', branch_name]
781 cmd = ['reset', '--hard', branch_name]
779 self.run_git_command(cmd, fail_on_stderr=False)
782 self.run_git_command(cmd, fail_on_stderr=False)
780
783
781 def _last_fetch_heads(self):
784 def _last_fetch_heads(self):
782 """
785 """
783 Return the last fetched heads that need merging.
786 Return the last fetched heads that need merging.
784
787
785 The algorithm is defined at
788 The algorithm is defined at
786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
789 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 """
790 """
788 if not self.bare:
791 if not self.bare:
789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
792 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 else:
793 else:
791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
794 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792
795
793 heads = []
796 heads = []
794 with open(fetch_heads_path) as f:
797 with open(fetch_heads_path) as f:
795 for line in f:
798 for line in f:
796 if ' not-for-merge ' in line:
799 if ' not-for-merge ' in line:
797 continue
800 continue
798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
801 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 heads.append(line)
802 heads.append(line)
800
803
801 return heads
804 return heads
802
805
803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
806 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 return GitRepository(shadow_repository_path)
807 return GitRepository(shadow_repository_path)
805
808
806 def _local_pull(self, repository_path, branch_name, ff_only=True):
809 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 """
810 """
808 Pull a branch from a local repository.
811 Pull a branch from a local repository.
809 """
812 """
810 if self.bare:
813 if self.bare:
811 raise RepositoryError('Cannot pull into a bare git repository')
814 raise RepositoryError('Cannot pull into a bare git repository')
812 # N.B.(skreft): The --ff-only option is to make sure this is a
815 # N.B.(skreft): The --ff-only option is to make sure this is a
813 # fast-forward (i.e., we are only pulling new changes and there are no
816 # fast-forward (i.e., we are only pulling new changes and there are no
814 # conflicts with our current branch)
817 # conflicts with our current branch)
815 # Additionally, that option needs to go before --no-tags, otherwise git
818 # Additionally, that option needs to go before --no-tags, otherwise git
816 # pull complains about it being an unknown flag.
819 # pull complains about it being an unknown flag.
817 cmd = ['pull']
820 cmd = ['pull']
818 if ff_only:
821 if ff_only:
819 cmd.append('--ff-only')
822 cmd.append('--ff-only')
820 cmd.extend(['--no-tags', repository_path, branch_name])
823 cmd.extend(['--no-tags', repository_path, branch_name])
821 self.run_git_command(cmd, fail_on_stderr=False)
824 self.run_git_command(cmd, fail_on_stderr=False)
822
825
823 def _local_merge(self, merge_message, user_name, user_email, heads):
826 def _local_merge(self, merge_message, user_name, user_email, heads):
824 """
827 """
825 Merge the given head into the checked out branch.
828 Merge the given head into the checked out branch.
826
829
827 It will force a merge commit.
830 It will force a merge commit.
828
831
829 Currently it raises an error if the repo is empty, as it is not possible
832 Currently it raises an error if the repo is empty, as it is not possible
830 to create a merge commit in an empty repo.
833 to create a merge commit in an empty repo.
831
834
832 :param merge_message: The message to use for the merge commit.
835 :param merge_message: The message to use for the merge commit.
833 :param heads: the heads to merge.
836 :param heads: the heads to merge.
834 """
837 """
835 if self.bare:
838 if self.bare:
836 raise RepositoryError('Cannot merge into a bare git repository')
839 raise RepositoryError('Cannot merge into a bare git repository')
837
840
838 if not heads:
841 if not heads:
839 return
842 return
840
843
841 if self.is_empty():
844 if self.is_empty():
842 # TODO(skreft): do somehting more robust in this case.
845 # TODO(skreft): do somehting more robust in this case.
843 raise RepositoryError(
846 raise RepositoryError(
844 'Do not know how to merge into empty repositories yet')
847 'Do not know how to merge into empty repositories yet')
845
848
846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
849 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 # commit message. We also specify the user who is doing the merge.
850 # commit message. We also specify the user who is doing the merge.
848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
851 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 '-c', 'user.email=%s' % safe_str(user_email),
852 '-c', 'user.email=%s' % safe_str(user_email),
850 'merge', '--no-ff', '-m', safe_str(merge_message)]
853 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 cmd.extend(heads)
854 cmd.extend(heads)
852 try:
855 try:
853 output = self.run_git_command(cmd, fail_on_stderr=False)
856 output = self.run_git_command(cmd, fail_on_stderr=False)
854 except RepositoryError:
857 except RepositoryError:
855 # Cleanup any merge leftovers
858 # Cleanup any merge leftovers
856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
859 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 raise
860 raise
858
861
859 def _local_push(
862 def _local_push(
860 self, source_branch, repository_path, target_branch,
863 self, source_branch, repository_path, target_branch,
861 enable_hooks=False, rc_scm_data=None):
864 enable_hooks=False, rc_scm_data=None):
862 """
865 """
863 Push the source_branch to the given repository and target_branch.
866 Push the source_branch to the given repository and target_branch.
864
867
865 Currently it if the target_branch is not master and the target repo is
868 Currently it if the target_branch is not master and the target repo is
866 empty, the push will work, but then GitRepository won't be able to find
869 empty, the push will work, but then GitRepository won't be able to find
867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
870 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 pointing to master, which does not exist).
871 pointing to master, which does not exist).
869
872
870 It does not run the hooks in the target repo.
873 It does not run the hooks in the target repo.
871 """
874 """
872 # TODO(skreft): deal with the case in which the target repo is empty,
875 # TODO(skreft): deal with the case in which the target repo is empty,
873 # and the target_branch is not master.
876 # and the target_branch is not master.
874 target_repo = GitRepository(repository_path)
877 target_repo = GitRepository(repository_path)
875 if (not target_repo.bare and
878 if (not target_repo.bare and
876 target_repo._current_branch() == target_branch):
879 target_repo._current_branch() == target_branch):
877 # Git prevents pushing to the checked out branch, so simulate it by
880 # Git prevents pushing to the checked out branch, so simulate it by
878 # pulling into the target repository.
881 # pulling into the target repository.
879 target_repo._local_pull(self.path, source_branch)
882 target_repo._local_pull(self.path, source_branch)
880 else:
883 else:
881 cmd = ['push', os.path.abspath(repository_path),
884 cmd = ['push', os.path.abspath(repository_path),
882 '%s:%s' % (source_branch, target_branch)]
885 '%s:%s' % (source_branch, target_branch)]
883 gitenv = {}
886 gitenv = {}
884 if rc_scm_data:
887 if rc_scm_data:
885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
888 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886
889
887 if not enable_hooks:
890 if not enable_hooks:
888 gitenv['RC_SKIP_HOOKS'] = '1'
891 gitenv['RC_SKIP_HOOKS'] = '1'
889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
892 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890
893
891 def _get_new_pr_branch(self, source_branch, target_branch):
894 def _get_new_pr_branch(self, source_branch, target_branch):
892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
895 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 pr_branches = []
896 pr_branches = []
894 for branch in self.branches:
897 for branch in self.branches:
895 if branch.startswith(prefix):
898 if branch.startswith(prefix):
896 pr_branches.append(int(branch[len(prefix):]))
899 pr_branches.append(int(branch[len(prefix):]))
897
900
898 if not pr_branches:
901 if not pr_branches:
899 branch_id = 0
902 branch_id = 0
900 else:
903 else:
901 branch_id = max(pr_branches) + 1
904 branch_id = max(pr_branches) + 1
902
905
903 return '%s%d' % (prefix, branch_id)
906 return '%s%d' % (prefix, branch_id)
904
907
905 def _maybe_prepare_merge_workspace(
908 def _maybe_prepare_merge_workspace(
906 self, repo_id, workspace_id, target_ref, source_ref):
909 self, repo_id, workspace_id, target_ref, source_ref):
907 shadow_repository_path = self._get_shadow_repository_path(
910 shadow_repository_path = self._get_shadow_repository_path(
908 repo_id, workspace_id)
911 repo_id, workspace_id)
909 if not os.path.exists(shadow_repository_path):
912 if not os.path.exists(shadow_repository_path):
910 self._local_clone(
913 self._local_clone(
911 shadow_repository_path, target_ref.name, source_ref.name)
914 shadow_repository_path, target_ref.name, source_ref.name)
912 log.debug(
915 log.debug(
913 'Prepared shadow repository in %s', shadow_repository_path)
916 'Prepared shadow repository in %s', shadow_repository_path)
914
917
915 return shadow_repository_path
918 return shadow_repository_path
916
919
917 def _merge_repo(self, repo_id, workspace_id, target_ref,
920 def _merge_repo(self, repo_id, workspace_id, target_ref,
918 source_repo, source_ref, merge_message,
921 source_repo, source_ref, merge_message,
919 merger_name, merger_email, dry_run=False,
922 merger_name, merger_email, dry_run=False,
920 use_rebase=False, close_branch=False):
923 use_rebase=False, close_branch=False):
921 if target_ref.commit_id != self.branches[target_ref.name]:
924 if target_ref.commit_id != self.branches[target_ref.name]:
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
925 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 target_ref.commit_id, self.branches[target_ref.name])
926 target_ref.commit_id, self.branches[target_ref.name])
924 return MergeResponse(
927 return MergeResponse(
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
928 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
926
929
927 shadow_repository_path = self._maybe_prepare_merge_workspace(
930 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 repo_id, workspace_id, target_ref, source_ref)
931 repo_id, workspace_id, target_ref, source_ref)
929 shadow_repo = self._get_shadow_instance(shadow_repository_path)
932 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930
933
931 # checkout source, if it's different. Otherwise we could not
934 # checkout source, if it's different. Otherwise we could not
932 # fetch proper commits for merge testing
935 # fetch proper commits for merge testing
933 if source_ref.name != target_ref.name:
936 if source_ref.name != target_ref.name:
934 if shadow_repo.get_remote_ref(source_ref.name):
937 if shadow_repo.get_remote_ref(source_ref.name):
935 shadow_repo._checkout(source_ref.name, force=True)
938 shadow_repo._checkout(source_ref.name, force=True)
936
939
937 # checkout target, and fetch changes
940 # checkout target, and fetch changes
938 shadow_repo._checkout(target_ref.name, force=True)
941 shadow_repo._checkout(target_ref.name, force=True)
939
942
940 # fetch/reset pull the target, in case it is changed
943 # fetch/reset pull the target, in case it is changed
941 # this handles even force changes
944 # this handles even force changes
942 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
945 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
943 shadow_repo._local_reset(target_ref.name)
946 shadow_repo._local_reset(target_ref.name)
944
947
945 # Need to reload repo to invalidate the cache, or otherwise we cannot
948 # Need to reload repo to invalidate the cache, or otherwise we cannot
946 # retrieve the last target commit.
949 # retrieve the last target commit.
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
950 shadow_repo = self._get_shadow_instance(shadow_repository_path)
948 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
951 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
949 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
952 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
950 target_ref, target_ref.commit_id,
953 target_ref, target_ref.commit_id,
951 shadow_repo.branches[target_ref.name])
954 shadow_repo.branches[target_ref.name])
952 return MergeResponse(
955 return MergeResponse(
953 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
956 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
954
957
955 # calculate new branch
958 # calculate new branch
956 pr_branch = shadow_repo._get_new_pr_branch(
959 pr_branch = shadow_repo._get_new_pr_branch(
957 source_ref.name, target_ref.name)
960 source_ref.name, target_ref.name)
958 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 log.debug('using pull-request merge branch: `%s`', pr_branch)
959 # checkout to temp branch, and fetch changes
962 # checkout to temp branch, and fetch changes
960 shadow_repo._checkout(pr_branch, create=True)
963 shadow_repo._checkout(pr_branch, create=True)
961 try:
964 try:
962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 shadow_repo._local_fetch(source_repo.path, source_ref.name)
963 except RepositoryError:
966 except RepositoryError:
964 log.exception('Failure when doing local fetch on git shadow repo')
967 log.exception('Failure when doing local fetch on git shadow repo')
965 return MergeResponse(
968 return MergeResponse(
966 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
967
970
968 merge_ref = None
971 merge_ref = None
969 merge_failure_reason = MergeFailureReason.NONE
972 merge_failure_reason = MergeFailureReason.NONE
970 try:
973 try:
971 shadow_repo._local_merge(merge_message, merger_name, merger_email,
974 shadow_repo._local_merge(merge_message, merger_name, merger_email,
972 [source_ref.commit_id])
975 [source_ref.commit_id])
973 merge_possible = True
976 merge_possible = True
974
977
975 # Need to reload repo to invalidate the cache, or otherwise we
978 # Need to reload repo to invalidate the cache, or otherwise we
976 # cannot retrieve the merge commit.
979 # cannot retrieve the merge commit.
977 shadow_repo = GitRepository(shadow_repository_path)
980 shadow_repo = GitRepository(shadow_repository_path)
978 merge_commit_id = shadow_repo.branches[pr_branch]
981 merge_commit_id = shadow_repo.branches[pr_branch]
979
982
980 # Set a reference pointing to the merge commit. This reference may
983 # Set a reference pointing to the merge commit. This reference may
981 # be used to easily identify the last successful merge commit in
984 # be used to easily identify the last successful merge commit in
982 # the shadow repository.
985 # the shadow repository.
983 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
986 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
984 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
987 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
985 except RepositoryError:
988 except RepositoryError:
986 log.exception('Failure when doing local merge on git shadow repo')
989 log.exception('Failure when doing local merge on git shadow repo')
987 merge_possible = False
990 merge_possible = False
988 merge_failure_reason = MergeFailureReason.MERGE_FAILED
991 merge_failure_reason = MergeFailureReason.MERGE_FAILED
989
992
990 if merge_possible and not dry_run:
993 if merge_possible and not dry_run:
991 try:
994 try:
992 shadow_repo._local_push(
995 shadow_repo._local_push(
993 pr_branch, self.path, target_ref.name, enable_hooks=True,
996 pr_branch, self.path, target_ref.name, enable_hooks=True,
994 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
997 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
995 merge_succeeded = True
998 merge_succeeded = True
996 except RepositoryError:
999 except RepositoryError:
997 log.exception(
1000 log.exception(
998 'Failure when doing local push on git shadow repo')
1001 'Failure when doing local push on git shadow repo')
999 merge_succeeded = False
1002 merge_succeeded = False
1000 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1003 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1001 else:
1004 else:
1002 merge_succeeded = False
1005 merge_succeeded = False
1003
1006
1004 return MergeResponse(
1007 return MergeResponse(
1005 merge_possible, merge_succeeded, merge_ref,
1008 merge_possible, merge_succeeded, merge_ref,
1006 merge_failure_reason)
1009 merge_failure_reason)
@@ -1,915 +1,917 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 update_after_clone=False, with_wire=None):
60 update_after_clone=False, with_wire=None):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param update_after_clone=False: sets update of working copy after
70 :param update_after_clone=False: sets update of working copy after
71 making a clone
71 making a clone
72 """
72 """
73
73
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 # mercurial since 4.4.X requires certain configuration to be present
75 # mercurial since 4.4.X requires certain configuration to be present
76 # because sometimes we init the repos with config we need to meet
76 # because sometimes we init the repos with config we need to meet
77 # special requirements
77 # special requirements
78 self.config = config if config else self.get_default_config(
78 self.config = config if config else self.get_default_config(
79 default=[('extensions', 'largefiles', '1')])
79 default=[('extensions', 'largefiles', '1')])
80
80 self.with_wire = with_wire
81 self._remote = connection.Hg(
82 self.path, self.config, with_wire=with_wire)
83
81
84 self._init_repo(create, src_url, update_after_clone)
82 self._init_repo(create, src_url, update_after_clone)
85
83
86 # caches
84 # caches
87 self._commit_ids = {}
85 self._commit_ids = {}
88
86
89 @LazyProperty
87 @LazyProperty
88 def _remote(self):
89 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90
91 @LazyProperty
90 def commit_ids(self):
92 def commit_ids(self):
91 """
93 """
92 Returns list of commit ids, in ascending order. Being lazy
94 Returns list of commit ids, in ascending order. Being lazy
93 attribute allows external tools to inject shas from cache.
95 attribute allows external tools to inject shas from cache.
94 """
96 """
95 commit_ids = self._get_all_commit_ids()
97 commit_ids = self._get_all_commit_ids()
96 self._rebuild_cache(commit_ids)
98 self._rebuild_cache(commit_ids)
97 return commit_ids
99 return commit_ids
98
100
99 def _rebuild_cache(self, commit_ids):
101 def _rebuild_cache(self, commit_ids):
100 self._commit_ids = dict((commit_id, index)
102 self._commit_ids = dict((commit_id, index)
101 for index, commit_id in enumerate(commit_ids))
103 for index, commit_id in enumerate(commit_ids))
102
104
103 @LazyProperty
105 @LazyProperty
104 def branches(self):
106 def branches(self):
105 return self._get_branches()
107 return self._get_branches()
106
108
107 @LazyProperty
109 @LazyProperty
108 def branches_closed(self):
110 def branches_closed(self):
109 return self._get_branches(active=False, closed=True)
111 return self._get_branches(active=False, closed=True)
110
112
111 @LazyProperty
113 @LazyProperty
112 def branches_all(self):
114 def branches_all(self):
113 all_branches = {}
115 all_branches = {}
114 all_branches.update(self.branches)
116 all_branches.update(self.branches)
115 all_branches.update(self.branches_closed)
117 all_branches.update(self.branches_closed)
116 return all_branches
118 return all_branches
117
119
118 def _get_branches(self, active=True, closed=False):
120 def _get_branches(self, active=True, closed=False):
119 """
121 """
120 Gets branches for this repository
122 Gets branches for this repository
121 Returns only not closed active branches by default
123 Returns only not closed active branches by default
122
124
123 :param active: return also active branches
125 :param active: return also active branches
124 :param closed: return also closed branches
126 :param closed: return also closed branches
125
127
126 """
128 """
127 if self.is_empty():
129 if self.is_empty():
128 return {}
130 return {}
129
131
130 def get_name(ctx):
132 def get_name(ctx):
131 return ctx[0]
133 return ctx[0]
132
134
133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
135 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 self._remote.branches(active, closed).items()]
136 self._remote.branches(active, closed).items()]
135
137
136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
138 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137
139
138 @LazyProperty
140 @LazyProperty
139 def tags(self):
141 def tags(self):
140 """
142 """
141 Gets tags for this repository
143 Gets tags for this repository
142 """
144 """
143 return self._get_tags()
145 return self._get_tags()
144
146
145 def _get_tags(self):
147 def _get_tags(self):
146 if self.is_empty():
148 if self.is_empty():
147 return {}
149 return {}
148
150
149 def get_name(ctx):
151 def get_name(ctx):
150 return ctx[0]
152 return ctx[0]
151
153
152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
154 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 self._remote.tags().items()]
155 self._remote.tags().items()]
154
156
155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
157 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156
158
157 def tag(self, name, user, commit_id=None, message=None, date=None,
159 def tag(self, name, user, commit_id=None, message=None, date=None,
158 **kwargs):
160 **kwargs):
159 """
161 """
160 Creates and returns a tag for the given ``commit_id``.
162 Creates and returns a tag for the given ``commit_id``.
161
163
162 :param name: name for new tag
164 :param name: name for new tag
163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
165 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 :param commit_id: commit id for which new tag would be created
166 :param commit_id: commit id for which new tag would be created
165 :param message: message of the tag's commit
167 :param message: message of the tag's commit
166 :param date: date of tag's commit
168 :param date: date of tag's commit
167
169
168 :raises TagAlreadyExistError: if tag with same name already exists
170 :raises TagAlreadyExistError: if tag with same name already exists
169 """
171 """
170 if name in self.tags:
172 if name in self.tags:
171 raise TagAlreadyExistError("Tag %s already exists" % name)
173 raise TagAlreadyExistError("Tag %s already exists" % name)
172 commit = self.get_commit(commit_id=commit_id)
174 commit = self.get_commit(commit_id=commit_id)
173 local = kwargs.setdefault('local', False)
175 local = kwargs.setdefault('local', False)
174
176
175 if message is None:
177 if message is None:
176 message = "Added tag %s for commit %s" % (name, commit.short_id)
178 message = "Added tag %s for commit %s" % (name, commit.short_id)
177
179
178 date, tz = date_to_timestamp_plus_offset(date)
180 date, tz = date_to_timestamp_plus_offset(date)
179
181
180 self._remote.tag(
182 self._remote.tag(
181 name, commit.raw_id, message, local, user, date, tz)
183 name, commit.raw_id, message, local, user, date, tz)
182 self._remote.invalidate_vcs_cache()
184 self._remote.invalidate_vcs_cache()
183
185
184 # Reinitialize tags
186 # Reinitialize tags
185 self.tags = self._get_tags()
187 self.tags = self._get_tags()
186 tag_id = self.tags[name]
188 tag_id = self.tags[name]
187
189
188 return self.get_commit(commit_id=tag_id)
190 return self.get_commit(commit_id=tag_id)
189
191
190 def remove_tag(self, name, user, message=None, date=None):
192 def remove_tag(self, name, user, message=None, date=None):
191 """
193 """
192 Removes tag with the given `name`.
194 Removes tag with the given `name`.
193
195
194 :param name: name of the tag to be removed
196 :param name: name of the tag to be removed
195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
197 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 :param message: message of the tag's removal commit
198 :param message: message of the tag's removal commit
197 :param date: date of tag's removal commit
199 :param date: date of tag's removal commit
198
200
199 :raises TagDoesNotExistError: if tag with given name does not exists
201 :raises TagDoesNotExistError: if tag with given name does not exists
200 """
202 """
201 if name not in self.tags:
203 if name not in self.tags:
202 raise TagDoesNotExistError("Tag %s does not exist" % name)
204 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 if message is None:
205 if message is None:
204 message = "Removed tag %s" % name
206 message = "Removed tag %s" % name
205 local = False
207 local = False
206
208
207 date, tz = date_to_timestamp_plus_offset(date)
209 date, tz = date_to_timestamp_plus_offset(date)
208
210
209 self._remote.tag(name, nullid, message, local, user, date, tz)
211 self._remote.tag(name, nullid, message, local, user, date, tz)
210 self._remote.invalidate_vcs_cache()
212 self._remote.invalidate_vcs_cache()
211 self.tags = self._get_tags()
213 self.tags = self._get_tags()
212
214
213 @LazyProperty
215 @LazyProperty
214 def bookmarks(self):
216 def bookmarks(self):
215 """
217 """
216 Gets bookmarks for this repository
218 Gets bookmarks for this repository
217 """
219 """
218 return self._get_bookmarks()
220 return self._get_bookmarks()
219
221
220 def _get_bookmarks(self):
222 def _get_bookmarks(self):
221 if self.is_empty():
223 if self.is_empty():
222 return {}
224 return {}
223
225
224 def get_name(ctx):
226 def get_name(ctx):
225 return ctx[0]
227 return ctx[0]
226
228
227 _bookmarks = [
229 _bookmarks = [
228 (safe_unicode(n), hexlify(h)) for n, h in
230 (safe_unicode(n), hexlify(h)) for n, h in
229 self._remote.bookmarks().items()]
231 self._remote.bookmarks().items()]
230
232
231 return OrderedDict(sorted(_bookmarks, key=get_name))
233 return OrderedDict(sorted(_bookmarks, key=get_name))
232
234
233 def _get_all_commit_ids(self):
235 def _get_all_commit_ids(self):
234 return self._remote.get_all_commit_ids('visible')
236 return self._remote.get_all_commit_ids('visible')
235
237
236 def get_diff(
238 def get_diff(
237 self, commit1, commit2, path='', ignore_whitespace=False,
239 self, commit1, commit2, path='', ignore_whitespace=False,
238 context=3, path1=None):
240 context=3, path1=None):
239 """
241 """
240 Returns (git like) *diff*, as plain text. Shows changes introduced by
242 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 `commit2` since `commit1`.
243 `commit2` since `commit1`.
242
244
243 :param commit1: Entry point from which diff is shown. Can be
245 :param commit1: Entry point from which diff is shown. Can be
244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
246 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 the changes since empty state of the repository until `commit2`
247 the changes since empty state of the repository until `commit2`
246 :param commit2: Until which commit changes should be shown.
248 :param commit2: Until which commit changes should be shown.
247 :param ignore_whitespace: If set to ``True``, would not show whitespace
249 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 changes. Defaults to ``False``.
250 changes. Defaults to ``False``.
249 :param context: How many lines before/after changed lines should be
251 :param context: How many lines before/after changed lines should be
250 shown. Defaults to ``3``.
252 shown. Defaults to ``3``.
251 """
253 """
252 self._validate_diff_commits(commit1, commit2)
254 self._validate_diff_commits(commit1, commit2)
253 if path1 is not None and path1 != path:
255 if path1 is not None and path1 != path:
254 raise ValueError("Diff of two different paths not supported.")
256 raise ValueError("Diff of two different paths not supported.")
255
257
256 if path:
258 if path:
257 file_filter = [self.path, path]
259 file_filter = [self.path, path]
258 else:
260 else:
259 file_filter = None
261 file_filter = None
260
262
261 diff = self._remote.diff(
263 diff = self._remote.diff(
262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
264 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 opt_git=True, opt_ignorews=ignore_whitespace,
265 opt_git=True, opt_ignorews=ignore_whitespace,
264 context=context)
266 context=context)
265 return MercurialDiff(diff)
267 return MercurialDiff(diff)
266
268
267 def strip(self, commit_id, branch=None):
269 def strip(self, commit_id, branch=None):
268 self._remote.strip(commit_id, update=False, backup="none")
270 self._remote.strip(commit_id, update=False, backup="none")
269
271
270 self._remote.invalidate_vcs_cache()
272 self._remote.invalidate_vcs_cache()
271 self.commit_ids = self._get_all_commit_ids()
273 self.commit_ids = self._get_all_commit_ids()
272 self._rebuild_cache(self.commit_ids)
274 self._rebuild_cache(self.commit_ids)
273
275
274 def verify(self):
276 def verify(self):
275 verify = self._remote.verify()
277 verify = self._remote.verify()
276
278
277 self._remote.invalidate_vcs_cache()
279 self._remote.invalidate_vcs_cache()
278 return verify
280 return verify
279
281
280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
282 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 if commit_id1 == commit_id2:
283 if commit_id1 == commit_id2:
282 return commit_id1
284 return commit_id1
283
285
284 ancestors = self._remote.revs_from_revspec(
286 ancestors = self._remote.revs_from_revspec(
285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
287 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 other_path=repo2.path)
288 other_path=repo2.path)
287 return repo2[ancestors[0]].raw_id if ancestors else None
289 return repo2[ancestors[0]].raw_id if ancestors else None
288
290
289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
291 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 if commit_id1 == commit_id2:
292 if commit_id1 == commit_id2:
291 commits = []
293 commits = []
292 else:
294 else:
293 if merge:
295 if merge:
294 indexes = self._remote.revs_from_revspec(
296 indexes = self._remote.revs_from_revspec(
295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
297 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
298 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 else:
299 else:
298 indexes = self._remote.revs_from_revspec(
300 indexes = self._remote.revs_from_revspec(
299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
301 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 commit_id1, other_path=repo2.path)
302 commit_id1, other_path=repo2.path)
301
303
302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
304 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 for idx in indexes]
305 for idx in indexes]
304
306
305 return commits
307 return commits
306
308
307 @staticmethod
309 @staticmethod
308 def check_url(url, config):
310 def check_url(url, config):
309 """
311 """
310 Function will check given url and try to verify if it's a valid
312 Function will check given url and try to verify if it's a valid
311 link. Sometimes it may happened that mercurial will issue basic
313 link. Sometimes it may happened that mercurial will issue basic
312 auth request that can cause whole API to hang when used from python
314 auth request that can cause whole API to hang when used from python
313 or other external calls.
315 or other external calls.
314
316
315 On failures it'll raise urllib2.HTTPError, exception is also thrown
317 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 when the return code is non 200
318 when the return code is non 200
317 """
319 """
318 # check first if it's not an local url
320 # check first if it's not an local url
319 if os.path.isdir(url) or url.startswith('file:'):
321 if os.path.isdir(url) or url.startswith('file:'):
320 return True
322 return True
321
323
322 # Request the _remote to verify the url
324 # Request the _remote to verify the url
323 return connection.Hg.check_url(url, config.serialize())
325 return connection.Hg.check_url(url, config.serialize())
324
326
325 @staticmethod
327 @staticmethod
326 def is_valid_repository(path):
328 def is_valid_repository(path):
327 return os.path.isdir(os.path.join(path, '.hg'))
329 return os.path.isdir(os.path.join(path, '.hg'))
328
330
329 def _init_repo(self, create, src_url=None, update_after_clone=False):
331 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 """
332 """
331 Function will check for mercurial repository in given path. If there
333 Function will check for mercurial repository in given path. If there
332 is no repository in that path it will raise an exception unless
334 is no repository in that path it will raise an exception unless
333 `create` parameter is set to True - in that case repository would
335 `create` parameter is set to True - in that case repository would
334 be created.
336 be created.
335
337
336 If `src_url` is given, would try to clone repository from the
338 If `src_url` is given, would try to clone repository from the
337 location at given clone_point. Additionally it'll make update to
339 location at given clone_point. Additionally it'll make update to
338 working copy accordingly to `update_after_clone` flag.
340 working copy accordingly to `update_after_clone` flag.
339 """
341 """
340 if create and os.path.exists(self.path):
342 if create and os.path.exists(self.path):
341 raise RepositoryError(
343 raise RepositoryError(
342 "Cannot create repository at %s, location already exist"
344 "Cannot create repository at %s, location already exist"
343 % self.path)
345 % self.path)
344
346
345 if src_url:
347 if src_url:
346 url = str(self._get_url(src_url))
348 url = str(self._get_url(src_url))
347 MercurialRepository.check_url(url, self.config)
349 MercurialRepository.check_url(url, self.config)
348
350
349 self._remote.clone(url, self.path, update_after_clone)
351 self._remote.clone(url, self.path, update_after_clone)
350
352
351 # Don't try to create if we've already cloned repo
353 # Don't try to create if we've already cloned repo
352 create = False
354 create = False
353
355
354 if create:
356 if create:
355 os.makedirs(self.path, mode=0755)
357 os.makedirs(self.path, mode=0755)
356
358
357 self._remote.localrepository(create)
359 self._remote.localrepository(create)
358
360
359 @LazyProperty
361 @LazyProperty
360 def in_memory_commit(self):
362 def in_memory_commit(self):
361 return MercurialInMemoryCommit(self)
363 return MercurialInMemoryCommit(self)
362
364
363 @LazyProperty
365 @LazyProperty
364 def description(self):
366 def description(self):
365 description = self._remote.get_config_value(
367 description = self._remote.get_config_value(
366 'web', 'description', untrusted=True)
368 'web', 'description', untrusted=True)
367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
369 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368
370
369 @LazyProperty
371 @LazyProperty
370 def contact(self):
372 def contact(self):
371 contact = (
373 contact = (
372 self._remote.get_config_value("web", "contact") or
374 self._remote.get_config_value("web", "contact") or
373 self._remote.get_config_value("ui", "username"))
375 self._remote.get_config_value("ui", "username"))
374 return safe_unicode(contact or self.DEFAULT_CONTACT)
376 return safe_unicode(contact or self.DEFAULT_CONTACT)
375
377
376 @LazyProperty
378 @LazyProperty
377 def last_change(self):
379 def last_change(self):
378 """
380 """
379 Returns last change made on this repository as
381 Returns last change made on this repository as
380 `datetime.datetime` object.
382 `datetime.datetime` object.
381 """
383 """
382 try:
384 try:
383 return self.get_commit().date
385 return self.get_commit().date
384 except RepositoryError:
386 except RepositoryError:
385 tzoffset = makedate()[1]
387 tzoffset = makedate()[1]
386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
388 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387
389
388 def _get_fs_mtime(self):
390 def _get_fs_mtime(self):
389 # fallback to filesystem
391 # fallback to filesystem
390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
392 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 st_path = os.path.join(self.path, '.hg', "store")
393 st_path = os.path.join(self.path, '.hg', "store")
392 if os.path.exists(cl_path):
394 if os.path.exists(cl_path):
393 return os.stat(cl_path).st_mtime
395 return os.stat(cl_path).st_mtime
394 else:
396 else:
395 return os.stat(st_path).st_mtime
397 return os.stat(st_path).st_mtime
396
398
397 def _get_url(self, url):
399 def _get_url(self, url):
398 """
400 """
399 Returns normalized url. If schema is not given, would fall
401 Returns normalized url. If schema is not given, would fall
400 to filesystem
402 to filesystem
401 (``file:///``) schema.
403 (``file:///``) schema.
402 """
404 """
403 url = url.encode('utf8')
405 url = url.encode('utf8')
404 if url != 'default' and '://' not in url:
406 if url != 'default' and '://' not in url:
405 url = "file:" + urllib.pathname2url(url)
407 url = "file:" + urllib.pathname2url(url)
406 return url
408 return url
407
409
408 def get_hook_location(self):
410 def get_hook_location(self):
409 """
411 """
410 returns absolute path to location where hooks are stored
412 returns absolute path to location where hooks are stored
411 """
413 """
412 return os.path.join(self.path, '.hg', '.hgrc')
414 return os.path.join(self.path, '.hg', '.hgrc')
413
415
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
416 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
417 """
416 Returns ``MercurialCommit`` object representing repository's
418 Returns ``MercurialCommit`` object representing repository's
417 commit at the given `commit_id` or `commit_idx`.
419 commit at the given `commit_id` or `commit_idx`.
418 """
420 """
419 if self.is_empty():
421 if self.is_empty():
420 raise EmptyRepositoryError("There are no commits yet")
422 raise EmptyRepositoryError("There are no commits yet")
421
423
422 if commit_id is not None:
424 if commit_id is not None:
423 self._validate_commit_id(commit_id)
425 self._validate_commit_id(commit_id)
424 try:
426 try:
425 idx = self._commit_ids[commit_id]
427 idx = self._commit_ids[commit_id]
426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
428 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
429 except KeyError:
428 pass
430 pass
429 elif commit_idx is not None:
431 elif commit_idx is not None:
430 self._validate_commit_idx(commit_idx)
432 self._validate_commit_idx(commit_idx)
431 try:
433 try:
432 id_ = self.commit_ids[commit_idx]
434 id_ = self.commit_ids[commit_idx]
433 if commit_idx < 0:
435 if commit_idx < 0:
434 commit_idx += len(self.commit_ids)
436 commit_idx += len(self.commit_ids)
435 return MercurialCommit(
437 return MercurialCommit(
436 self, id_, commit_idx, pre_load=pre_load)
438 self, id_, commit_idx, pre_load=pre_load)
437 except IndexError:
439 except IndexError:
438 commit_id = commit_idx
440 commit_id = commit_idx
439 else:
441 else:
440 commit_id = "tip"
442 commit_id = "tip"
441
443
442 if isinstance(commit_id, unicode):
444 if isinstance(commit_id, unicode):
443 commit_id = safe_str(commit_id)
445 commit_id = safe_str(commit_id)
444
446
445 try:
447 try:
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
448 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 except CommitDoesNotExistError:
449 except CommitDoesNotExistError:
448 msg = "Commit %s does not exist for %s" % (
450 msg = "Commit %s does not exist for %s" % (
449 commit_id, self)
451 commit_id, self)
450 raise CommitDoesNotExistError(msg)
452 raise CommitDoesNotExistError(msg)
451
453
452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
454 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453
455
454 def get_commits(
456 def get_commits(
455 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 branch_name=None, show_hidden=False, pre_load=None):
458 branch_name=None, show_hidden=False, pre_load=None):
457 """
459 """
458 Returns generator of ``MercurialCommit`` objects from start to end
460 Returns generator of ``MercurialCommit`` objects from start to end
459 (both are inclusive)
461 (both are inclusive)
460
462
461 :param start_id: None, str(commit_id)
463 :param start_id: None, str(commit_id)
462 :param end_id: None, str(commit_id)
464 :param end_id: None, str(commit_id)
463 :param start_date: if specified, commits with commit date less than
465 :param start_date: if specified, commits with commit date less than
464 ``start_date`` would be filtered out from returned set
466 ``start_date`` would be filtered out from returned set
465 :param end_date: if specified, commits with commit date greater than
467 :param end_date: if specified, commits with commit date greater than
466 ``end_date`` would be filtered out from returned set
468 ``end_date`` would be filtered out from returned set
467 :param branch_name: if specified, commits not reachable from given
469 :param branch_name: if specified, commits not reachable from given
468 branch would be filtered out from returned set
470 branch would be filtered out from returned set
469 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 Mercurial evolve
472 Mercurial evolve
471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
473 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 exist.
474 exist.
473 :raise CommitDoesNotExistError: If commit for given ``start`` or
475 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 ``end`` could not be found.
476 ``end`` could not be found.
475 """
477 """
476 # actually we should check now if it's not an empty repo
478 # actually we should check now if it's not an empty repo
477 branch_ancestors = False
479 branch_ancestors = False
478 if self.is_empty():
480 if self.is_empty():
479 raise EmptyRepositoryError("There are no commits yet")
481 raise EmptyRepositoryError("There are no commits yet")
480 self._validate_branch_name(branch_name)
482 self._validate_branch_name(branch_name)
481
483
482 if start_id is not None:
484 if start_id is not None:
483 self._validate_commit_id(start_id)
485 self._validate_commit_id(start_id)
484 c_start = self.get_commit(commit_id=start_id)
486 c_start = self.get_commit(commit_id=start_id)
485 start_pos = self._commit_ids[c_start.raw_id]
487 start_pos = self._commit_ids[c_start.raw_id]
486 else:
488 else:
487 start_pos = None
489 start_pos = None
488
490
489 if end_id is not None:
491 if end_id is not None:
490 self._validate_commit_id(end_id)
492 self._validate_commit_id(end_id)
491 c_end = self.get_commit(commit_id=end_id)
493 c_end = self.get_commit(commit_id=end_id)
492 end_pos = max(0, self._commit_ids[c_end.raw_id])
494 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 else:
495 else:
494 end_pos = None
496 end_pos = None
495
497
496 if None not in [start_id, end_id] and start_pos > end_pos:
498 if None not in [start_id, end_id] and start_pos > end_pos:
497 raise RepositoryError(
499 raise RepositoryError(
498 "Start commit '%s' cannot be after end commit '%s'" %
500 "Start commit '%s' cannot be after end commit '%s'" %
499 (start_id, end_id))
501 (start_id, end_id))
500
502
501 if end_pos is not None:
503 if end_pos is not None:
502 end_pos += 1
504 end_pos += 1
503
505
504 commit_filter = []
506 commit_filter = []
505
507
506 if branch_name and not branch_ancestors:
508 if branch_name and not branch_ancestors:
507 commit_filter.append('branch("%s")' % (branch_name,))
509 commit_filter.append('branch("%s")' % (branch_name,))
508 elif branch_name and branch_ancestors:
510 elif branch_name and branch_ancestors:
509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
511 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510
512
511 if start_date and not end_date:
513 if start_date and not end_date:
512 commit_filter.append('date(">%s")' % (start_date,))
514 commit_filter.append('date(">%s")' % (start_date,))
513 if end_date and not start_date:
515 if end_date and not start_date:
514 commit_filter.append('date("<%s")' % (end_date,))
516 commit_filter.append('date("<%s")' % (end_date,))
515 if start_date and end_date:
517 if start_date and end_date:
516 commit_filter.append(
518 commit_filter.append(
517 'date(">%s") and date("<%s")' % (start_date, end_date))
519 'date(">%s") and date("<%s")' % (start_date, end_date))
518
520
519 if not show_hidden:
521 if not show_hidden:
520 commit_filter.append('not obsolete()')
522 commit_filter.append('not obsolete()')
521 commit_filter.append('not hidden()')
523 commit_filter.append('not hidden()')
522
524
523 # TODO: johbo: Figure out a simpler way for this solution
525 # TODO: johbo: Figure out a simpler way for this solution
524 collection_generator = CollectionGenerator
526 collection_generator = CollectionGenerator
525 if commit_filter:
527 if commit_filter:
526 commit_filter = ' and '.join(map(safe_str, commit_filter))
528 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 revisions = self._remote.rev_range([commit_filter])
529 revisions = self._remote.rev_range([commit_filter])
528 collection_generator = MercurialIndexBasedCollectionGenerator
530 collection_generator = MercurialIndexBasedCollectionGenerator
529 else:
531 else:
530 revisions = self.commit_ids
532 revisions = self.commit_ids
531
533
532 if start_pos or end_pos:
534 if start_pos or end_pos:
533 revisions = revisions[start_pos:end_pos]
535 revisions = revisions[start_pos:end_pos]
534
536
535 return collection_generator(self, revisions, pre_load=pre_load)
537 return collection_generator(self, revisions, pre_load=pre_load)
536
538
537 def pull(self, url, commit_ids=None):
539 def pull(self, url, commit_ids=None):
538 """
540 """
539 Tries to pull changes from external location.
541 Tries to pull changes from external location.
540
542
541 :param commit_ids: Optional. Can be set to a list of commit ids
543 :param commit_ids: Optional. Can be set to a list of commit ids
542 which shall be pulled from the other repository.
544 which shall be pulled from the other repository.
543 """
545 """
544 url = self._get_url(url)
546 url = self._get_url(url)
545 self._remote.pull(url, commit_ids=commit_ids)
547 self._remote.pull(url, commit_ids=commit_ids)
546 self._remote.invalidate_vcs_cache()
548 self._remote.invalidate_vcs_cache()
547
549
548 def push(self, url):
550 def push(self, url):
549 url = self._get_url(url)
551 url = self._get_url(url)
550 self._remote.sync_push(url)
552 self._remote.sync_push(url)
551
553
552 def _local_clone(self, clone_path):
554 def _local_clone(self, clone_path):
553 """
555 """
554 Create a local clone of the current repo.
556 Create a local clone of the current repo.
555 """
557 """
556 self._remote.clone(self.path, clone_path, update_after_clone=True,
558 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 hooks=False)
559 hooks=False)
558
560
559 def _update(self, revision, clean=False):
561 def _update(self, revision, clean=False):
560 """
562 """
561 Update the working copy to the specified revision.
563 Update the working copy to the specified revision.
562 """
564 """
563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
565 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 self._remote.update(revision, clean=clean)
566 self._remote.update(revision, clean=clean)
565
567
566 def _identify(self):
568 def _identify(self):
567 """
569 """
568 Return the current state of the working directory.
570 Return the current state of the working directory.
569 """
571 """
570 return self._remote.identify().strip().rstrip('+')
572 return self._remote.identify().strip().rstrip('+')
571
573
572 def _heads(self, branch=None):
574 def _heads(self, branch=None):
573 """
575 """
574 Return the commit ids of the repository heads.
576 Return the commit ids of the repository heads.
575 """
577 """
576 return self._remote.heads(branch=branch).strip().split(' ')
578 return self._remote.heads(branch=branch).strip().split(' ')
577
579
578 def _ancestor(self, revision1, revision2):
580 def _ancestor(self, revision1, revision2):
579 """
581 """
580 Return the common ancestor of the two revisions.
582 Return the common ancestor of the two revisions.
581 """
583 """
582 return self._remote.ancestor(revision1, revision2)
584 return self._remote.ancestor(revision1, revision2)
583
585
584 def _local_push(
586 def _local_push(
585 self, revision, repository_path, push_branches=False,
587 self, revision, repository_path, push_branches=False,
586 enable_hooks=False):
588 enable_hooks=False):
587 """
589 """
588 Push the given revision to the specified repository.
590 Push the given revision to the specified repository.
589
591
590 :param push_branches: allow to create branches in the target repo.
592 :param push_branches: allow to create branches in the target repo.
591 """
593 """
592 self._remote.push(
594 self._remote.push(
593 [revision], repository_path, hooks=enable_hooks,
595 [revision], repository_path, hooks=enable_hooks,
594 push_branches=push_branches)
596 push_branches=push_branches)
595
597
596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
598 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 source_ref, use_rebase=False, dry_run=False):
599 source_ref, use_rebase=False, dry_run=False):
598 """
600 """
599 Merge the given source_revision into the checked out revision.
601 Merge the given source_revision into the checked out revision.
600
602
601 Returns the commit id of the merge and a boolean indicating if the
603 Returns the commit id of the merge and a boolean indicating if the
602 commit needs to be pushed.
604 commit needs to be pushed.
603 """
605 """
604 self._update(target_ref.commit_id)
606 self._update(target_ref.commit_id)
605
607
606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
608 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
609 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608
610
609 if ancestor == source_ref.commit_id:
611 if ancestor == source_ref.commit_id:
610 # Nothing to do, the changes were already integrated
612 # Nothing to do, the changes were already integrated
611 return target_ref.commit_id, False
613 return target_ref.commit_id, False
612
614
613 elif ancestor == target_ref.commit_id and is_the_same_branch:
615 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 # In this case we should force a commit message
616 # In this case we should force a commit message
615 return source_ref.commit_id, True
617 return source_ref.commit_id, True
616
618
617 if use_rebase:
619 if use_rebase:
618 try:
620 try:
619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
621 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 target_ref.commit_id)
622 target_ref.commit_id)
621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
623 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 self._remote.rebase(
624 self._remote.rebase(
623 source=source_ref.commit_id, dest=target_ref.commit_id)
625 source=source_ref.commit_id, dest=target_ref.commit_id)
624 self._remote.invalidate_vcs_cache()
626 self._remote.invalidate_vcs_cache()
625 self._update(bookmark_name)
627 self._update(bookmark_name)
626 return self._identify(), True
628 return self._identify(), True
627 except RepositoryError:
629 except RepositoryError:
628 # The rebase-abort may raise another exception which 'hides'
630 # The rebase-abort may raise another exception which 'hides'
629 # the original one, therefore we log it here.
631 # the original one, therefore we log it here.
630 log.exception('Error while rebasing shadow repo during merge.')
632 log.exception('Error while rebasing shadow repo during merge.')
631
633
632 # Cleanup any rebase leftovers
634 # Cleanup any rebase leftovers
633 self._remote.invalidate_vcs_cache()
635 self._remote.invalidate_vcs_cache()
634 self._remote.rebase(abort=True)
636 self._remote.rebase(abort=True)
635 self._remote.invalidate_vcs_cache()
637 self._remote.invalidate_vcs_cache()
636 self._remote.update(clean=True)
638 self._remote.update(clean=True)
637 raise
639 raise
638 else:
640 else:
639 try:
641 try:
640 self._remote.merge(source_ref.commit_id)
642 self._remote.merge(source_ref.commit_id)
641 self._remote.invalidate_vcs_cache()
643 self._remote.invalidate_vcs_cache()
642 self._remote.commit(
644 self._remote.commit(
643 message=safe_str(merge_message),
645 message=safe_str(merge_message),
644 username=safe_str('%s <%s>' % (user_name, user_email)))
646 username=safe_str('%s <%s>' % (user_name, user_email)))
645 self._remote.invalidate_vcs_cache()
647 self._remote.invalidate_vcs_cache()
646 return self._identify(), True
648 return self._identify(), True
647 except RepositoryError:
649 except RepositoryError:
648 # Cleanup any merge leftovers
650 # Cleanup any merge leftovers
649 self._remote.update(clean=True)
651 self._remote.update(clean=True)
650 raise
652 raise
651
653
652 def _local_close(self, target_ref, user_name, user_email,
654 def _local_close(self, target_ref, user_name, user_email,
653 source_ref, close_message=''):
655 source_ref, close_message=''):
654 """
656 """
655 Close the branch of the given source_revision
657 Close the branch of the given source_revision
656
658
657 Returns the commit id of the close and a boolean indicating if the
659 Returns the commit id of the close and a boolean indicating if the
658 commit needs to be pushed.
660 commit needs to be pushed.
659 """
661 """
660 self._update(source_ref.commit_id)
662 self._update(source_ref.commit_id)
661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
663 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 try:
664 try:
663 self._remote.commit(
665 self._remote.commit(
664 message=safe_str(message),
666 message=safe_str(message),
665 username=safe_str('%s <%s>' % (user_name, user_email)),
667 username=safe_str('%s <%s>' % (user_name, user_email)),
666 close_branch=True)
668 close_branch=True)
667 self._remote.invalidate_vcs_cache()
669 self._remote.invalidate_vcs_cache()
668 return self._identify(), True
670 return self._identify(), True
669 except RepositoryError:
671 except RepositoryError:
670 # Cleanup any commit leftovers
672 # Cleanup any commit leftovers
671 self._remote.update(clean=True)
673 self._remote.update(clean=True)
672 raise
674 raise
673
675
674 def _is_the_same_branch(self, target_ref, source_ref):
676 def _is_the_same_branch(self, target_ref, source_ref):
675 return (
677 return (
676 self._get_branch_name(target_ref) ==
678 self._get_branch_name(target_ref) ==
677 self._get_branch_name(source_ref))
679 self._get_branch_name(source_ref))
678
680
679 def _get_branch_name(self, ref):
681 def _get_branch_name(self, ref):
680 if ref.type == 'branch':
682 if ref.type == 'branch':
681 return ref.name
683 return ref.name
682 return self._remote.ctx_branch(ref.commit_id)
684 return self._remote.ctx_branch(ref.commit_id)
683
685
684 def _maybe_prepare_merge_workspace(
686 def _maybe_prepare_merge_workspace(
685 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
687 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 shadow_repository_path = self._get_shadow_repository_path(
688 shadow_repository_path = self._get_shadow_repository_path(
687 repo_id, workspace_id)
689 repo_id, workspace_id)
688 if not os.path.exists(shadow_repository_path):
690 if not os.path.exists(shadow_repository_path):
689 self._local_clone(shadow_repository_path)
691 self._local_clone(shadow_repository_path)
690 log.debug(
692 log.debug(
691 'Prepared shadow repository in %s', shadow_repository_path)
693 'Prepared shadow repository in %s', shadow_repository_path)
692
694
693 return shadow_repository_path
695 return shadow_repository_path
694
696
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
697 def _merge_repo(self, repo_id, workspace_id, target_ref,
696 source_repo, source_ref, merge_message,
698 source_repo, source_ref, merge_message,
697 merger_name, merger_email, dry_run=False,
699 merger_name, merger_email, dry_run=False,
698 use_rebase=False, close_branch=False):
700 use_rebase=False, close_branch=False):
699
701
700 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
702 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
701 'rebase' if use_rebase else 'merge', dry_run)
703 'rebase' if use_rebase else 'merge', dry_run)
702 if target_ref.commit_id not in self._heads():
704 if target_ref.commit_id not in self._heads():
703 return MergeResponse(
705 return MergeResponse(
704 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
706 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
705
707
706 try:
708 try:
707 if (target_ref.type == 'branch' and
709 if (target_ref.type == 'branch' and
708 len(self._heads(target_ref.name)) != 1):
710 len(self._heads(target_ref.name)) != 1):
709 return MergeResponse(
711 return MergeResponse(
710 False, False, None,
712 False, False, None,
711 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
713 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
712 except CommitDoesNotExistError:
714 except CommitDoesNotExistError:
713 log.exception('Failure when looking up branch heads on hg target')
715 log.exception('Failure when looking up branch heads on hg target')
714 return MergeResponse(
716 return MergeResponse(
715 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
717 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
716
718
717 shadow_repository_path = self._maybe_prepare_merge_workspace(
719 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 repo_id, workspace_id, target_ref, source_ref)
720 repo_id, workspace_id, target_ref, source_ref)
719 shadow_repo = self._get_shadow_instance(shadow_repository_path)
721 shadow_repo = self._get_shadow_instance(shadow_repository_path)
720
722
721 log.debug('Pulling in target reference %s', target_ref)
723 log.debug('Pulling in target reference %s', target_ref)
722 self._validate_pull_reference(target_ref)
724 self._validate_pull_reference(target_ref)
723 shadow_repo._local_pull(self.path, target_ref)
725 shadow_repo._local_pull(self.path, target_ref)
724 try:
726 try:
725 log.debug('Pulling in source reference %s', source_ref)
727 log.debug('Pulling in source reference %s', source_ref)
726 source_repo._validate_pull_reference(source_ref)
728 source_repo._validate_pull_reference(source_ref)
727 shadow_repo._local_pull(source_repo.path, source_ref)
729 shadow_repo._local_pull(source_repo.path, source_ref)
728 except CommitDoesNotExistError:
730 except CommitDoesNotExistError:
729 log.exception('Failure when doing local pull on hg shadow repo')
731 log.exception('Failure when doing local pull on hg shadow repo')
730 return MergeResponse(
732 return MergeResponse(
731 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
733 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
732
734
733 merge_ref = None
735 merge_ref = None
734 merge_commit_id = None
736 merge_commit_id = None
735 close_commit_id = None
737 close_commit_id = None
736 merge_failure_reason = MergeFailureReason.NONE
738 merge_failure_reason = MergeFailureReason.NONE
737
739
738 # enforce that close branch should be used only in case we source from
740 # enforce that close branch should be used only in case we source from
739 # an actual Branch
741 # an actual Branch
740 close_branch = close_branch and source_ref.type == 'branch'
742 close_branch = close_branch and source_ref.type == 'branch'
741
743
742 # don't allow to close branch if source and target are the same
744 # don't allow to close branch if source and target are the same
743 close_branch = close_branch and source_ref.name != target_ref.name
745 close_branch = close_branch and source_ref.name != target_ref.name
744
746
745 needs_push_on_close = False
747 needs_push_on_close = False
746 if close_branch and not use_rebase and not dry_run:
748 if close_branch and not use_rebase and not dry_run:
747 try:
749 try:
748 close_commit_id, needs_push_on_close = shadow_repo._local_close(
750 close_commit_id, needs_push_on_close = shadow_repo._local_close(
749 target_ref, merger_name, merger_email, source_ref)
751 target_ref, merger_name, merger_email, source_ref)
750 merge_possible = True
752 merge_possible = True
751 except RepositoryError:
753 except RepositoryError:
752 log.exception(
754 log.exception(
753 'Failure when doing close branch on hg shadow repo')
755 'Failure when doing close branch on hg shadow repo')
754 merge_possible = False
756 merge_possible = False
755 merge_failure_reason = MergeFailureReason.MERGE_FAILED
757 merge_failure_reason = MergeFailureReason.MERGE_FAILED
756 else:
758 else:
757 merge_possible = True
759 merge_possible = True
758
760
759 needs_push = False
761 needs_push = False
760 if merge_possible:
762 if merge_possible:
761 try:
763 try:
762 merge_commit_id, needs_push = shadow_repo._local_merge(
764 merge_commit_id, needs_push = shadow_repo._local_merge(
763 target_ref, merge_message, merger_name, merger_email,
765 target_ref, merge_message, merger_name, merger_email,
764 source_ref, use_rebase=use_rebase, dry_run=dry_run)
766 source_ref, use_rebase=use_rebase, dry_run=dry_run)
765 merge_possible = True
767 merge_possible = True
766
768
767 # read the state of the close action, if it
769 # read the state of the close action, if it
768 # maybe required a push
770 # maybe required a push
769 needs_push = needs_push or needs_push_on_close
771 needs_push = needs_push or needs_push_on_close
770
772
771 # Set a bookmark pointing to the merge commit. This bookmark
773 # Set a bookmark pointing to the merge commit. This bookmark
772 # may be used to easily identify the last successful merge
774 # may be used to easily identify the last successful merge
773 # commit in the shadow repository.
775 # commit in the shadow repository.
774 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
776 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
775 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
777 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
776 except SubrepoMergeError:
778 except SubrepoMergeError:
777 log.exception(
779 log.exception(
778 'Subrepo merge error during local merge on hg shadow repo.')
780 'Subrepo merge error during local merge on hg shadow repo.')
779 merge_possible = False
781 merge_possible = False
780 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
782 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
781 needs_push = False
783 needs_push = False
782 except RepositoryError:
784 except RepositoryError:
783 log.exception('Failure when doing local merge on hg shadow repo')
785 log.exception('Failure when doing local merge on hg shadow repo')
784 merge_possible = False
786 merge_possible = False
785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
787 merge_failure_reason = MergeFailureReason.MERGE_FAILED
786 needs_push = False
788 needs_push = False
787
789
788 if merge_possible and not dry_run:
790 if merge_possible and not dry_run:
789 if needs_push:
791 if needs_push:
790 # In case the target is a bookmark, update it, so after pushing
792 # In case the target is a bookmark, update it, so after pushing
791 # the bookmarks is also updated in the target.
793 # the bookmarks is also updated in the target.
792 if target_ref.type == 'book':
794 if target_ref.type == 'book':
793 shadow_repo.bookmark(
795 shadow_repo.bookmark(
794 target_ref.name, revision=merge_commit_id)
796 target_ref.name, revision=merge_commit_id)
795 try:
797 try:
796 shadow_repo_with_hooks = self._get_shadow_instance(
798 shadow_repo_with_hooks = self._get_shadow_instance(
797 shadow_repository_path,
799 shadow_repository_path,
798 enable_hooks=True)
800 enable_hooks=True)
799 # This is the actual merge action, we push from shadow
801 # This is the actual merge action, we push from shadow
800 # into origin.
802 # into origin.
801 # Note: the push_branches option will push any new branch
803 # Note: the push_branches option will push any new branch
802 # defined in the source repository to the target. This may
804 # defined in the source repository to the target. This may
803 # be dangerous as branches are permanent in Mercurial.
805 # be dangerous as branches are permanent in Mercurial.
804 # This feature was requested in issue #441.
806 # This feature was requested in issue #441.
805 shadow_repo_with_hooks._local_push(
807 shadow_repo_with_hooks._local_push(
806 merge_commit_id, self.path, push_branches=True,
808 merge_commit_id, self.path, push_branches=True,
807 enable_hooks=True)
809 enable_hooks=True)
808
810
809 # maybe we also need to push the close_commit_id
811 # maybe we also need to push the close_commit_id
810 if close_commit_id:
812 if close_commit_id:
811 shadow_repo_with_hooks._local_push(
813 shadow_repo_with_hooks._local_push(
812 close_commit_id, self.path, push_branches=True,
814 close_commit_id, self.path, push_branches=True,
813 enable_hooks=True)
815 enable_hooks=True)
814 merge_succeeded = True
816 merge_succeeded = True
815 except RepositoryError:
817 except RepositoryError:
816 log.exception(
818 log.exception(
817 'Failure when doing local push from the shadow '
819 'Failure when doing local push from the shadow '
818 'repository to the target repository.')
820 'repository to the target repository.')
819 merge_succeeded = False
821 merge_succeeded = False
820 merge_failure_reason = MergeFailureReason.PUSH_FAILED
822 merge_failure_reason = MergeFailureReason.PUSH_FAILED
821 else:
823 else:
822 merge_succeeded = True
824 merge_succeeded = True
823 else:
825 else:
824 merge_succeeded = False
826 merge_succeeded = False
825
827
826 return MergeResponse(
828 return MergeResponse(
827 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
829 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
828
830
829 def _get_shadow_instance(
831 def _get_shadow_instance(
830 self, shadow_repository_path, enable_hooks=False):
832 self, shadow_repository_path, enable_hooks=False):
831 config = self.config.copy()
833 config = self.config.copy()
832 if not enable_hooks:
834 if not enable_hooks:
833 config.clear_section('hooks')
835 config.clear_section('hooks')
834 return MercurialRepository(shadow_repository_path, config)
836 return MercurialRepository(shadow_repository_path, config)
835
837
836 def _validate_pull_reference(self, reference):
838 def _validate_pull_reference(self, reference):
837 if not (reference.name in self.bookmarks or
839 if not (reference.name in self.bookmarks or
838 reference.name in self.branches or
840 reference.name in self.branches or
839 self.get_commit(reference.commit_id)):
841 self.get_commit(reference.commit_id)):
840 raise CommitDoesNotExistError(
842 raise CommitDoesNotExistError(
841 'Unknown branch, bookmark or commit id')
843 'Unknown branch, bookmark or commit id')
842
844
843 def _local_pull(self, repository_path, reference):
845 def _local_pull(self, repository_path, reference):
844 """
846 """
845 Fetch a branch, bookmark or commit from a local repository.
847 Fetch a branch, bookmark or commit from a local repository.
846 """
848 """
847 repository_path = os.path.abspath(repository_path)
849 repository_path = os.path.abspath(repository_path)
848 if repository_path == self.path:
850 if repository_path == self.path:
849 raise ValueError('Cannot pull from the same repository')
851 raise ValueError('Cannot pull from the same repository')
850
852
851 reference_type_to_option_name = {
853 reference_type_to_option_name = {
852 'book': 'bookmark',
854 'book': 'bookmark',
853 'branch': 'branch',
855 'branch': 'branch',
854 }
856 }
855 option_name = reference_type_to_option_name.get(
857 option_name = reference_type_to_option_name.get(
856 reference.type, 'revision')
858 reference.type, 'revision')
857
859
858 if option_name == 'revision':
860 if option_name == 'revision':
859 ref = reference.commit_id
861 ref = reference.commit_id
860 else:
862 else:
861 ref = reference.name
863 ref = reference.name
862
864
863 options = {option_name: [ref]}
865 options = {option_name: [ref]}
864 self._remote.pull_cmd(repository_path, hooks=False, **options)
866 self._remote.pull_cmd(repository_path, hooks=False, **options)
865 self._remote.invalidate_vcs_cache()
867 self._remote.invalidate_vcs_cache()
866
868
867 def bookmark(self, bookmark, revision=None):
869 def bookmark(self, bookmark, revision=None):
868 if isinstance(bookmark, unicode):
870 if isinstance(bookmark, unicode):
869 bookmark = safe_str(bookmark)
871 bookmark = safe_str(bookmark)
870 self._remote.bookmark(bookmark, revision=revision)
872 self._remote.bookmark(bookmark, revision=revision)
871 self._remote.invalidate_vcs_cache()
873 self._remote.invalidate_vcs_cache()
872
874
873 def get_path_permissions(self, username):
875 def get_path_permissions(self, username):
874 hgacl_file = os.path.join(self.path, '.hg/hgacl')
876 hgacl_file = os.path.join(self.path, '.hg/hgacl')
875
877
876 def read_patterns(suffix):
878 def read_patterns(suffix):
877 svalue = None
879 svalue = None
878 try:
880 try:
879 svalue = hgacl.get('narrowhgacl', username + suffix)
881 svalue = hgacl.get('narrowhgacl', username + suffix)
880 except configparser.NoOptionError:
882 except configparser.NoOptionError:
881 try:
883 try:
882 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
884 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
883 except configparser.NoOptionError:
885 except configparser.NoOptionError:
884 pass
886 pass
885 if not svalue:
887 if not svalue:
886 return None
888 return None
887 result = ['/']
889 result = ['/']
888 for pattern in svalue.split():
890 for pattern in svalue.split():
889 result.append(pattern)
891 result.append(pattern)
890 if '*' not in pattern and '?' not in pattern:
892 if '*' not in pattern and '?' not in pattern:
891 result.append(pattern + '/*')
893 result.append(pattern + '/*')
892 return result
894 return result
893
895
894 if os.path.exists(hgacl_file):
896 if os.path.exists(hgacl_file):
895 try:
897 try:
896 hgacl = configparser.RawConfigParser()
898 hgacl = configparser.RawConfigParser()
897 hgacl.read(hgacl_file)
899 hgacl.read(hgacl_file)
898
900
899 includes = read_patterns('.includes')
901 includes = read_patterns('.includes')
900 excludes = read_patterns('.excludes')
902 excludes = read_patterns('.excludes')
901 return BasePathPermissionChecker.create_from_patterns(
903 return BasePathPermissionChecker.create_from_patterns(
902 includes, excludes)
904 includes, excludes)
903 except BaseException as e:
905 except BaseException as e:
904 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
906 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
905 hgacl_file, self.name, e)
907 hgacl_file, self.name, e)
906 raise exceptions.RepositoryRequirementError(msg)
908 raise exceptions.RepositoryRequirementError(msg)
907 else:
909 else:
908 return None
910 return None
909
911
910
912
911 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
913 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
912
914
913 def _commit_factory(self, commit_id):
915 def _commit_factory(self, commit_id):
914 return self.repo.get_commit(
916 return self.repo.get_commit(
915 commit_idx=commit_id, pre_load=self.pre_load)
917 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,341 +1,343 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends import base
35 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends.svn.commit import (
36 from rhodecode.lib.vcs.backends.svn.commit import (
37 SubversionCommit, _date_from_svn_properties)
37 SubversionCommit, _date_from_svn_properties)
38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 VCSError, NodeDoesNotExistError)
43 VCSError, NodeDoesNotExistError)
44
44
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 class SubversionRepository(base.BaseRepository):
49 class SubversionRepository(base.BaseRepository):
50 """
50 """
51 Subversion backend implementation
51 Subversion backend implementation
52
52
53 .. important::
53 .. important::
54
54
55 It is very important to distinguish the commit index and the commit id
55 It is very important to distinguish the commit index and the commit id
56 which is assigned by Subversion. The first one is always handled as an
56 which is assigned by Subversion. The first one is always handled as an
57 `int` by this implementation. The commit id assigned by Subversion on
57 `int` by this implementation. The commit id assigned by Subversion on
58 the other side will always be a `str`.
58 the other side will always be a `str`.
59
59
60 There is a specific trap since the first commit will have the index
60 There is a specific trap since the first commit will have the index
61 ``0`` but the svn id will be ``"1"``.
61 ``0`` but the svn id will be ``"1"``.
62
62
63 """
63 """
64
64
65 # Note: Subversion does not really have a default branch name.
65 # Note: Subversion does not really have a default branch name.
66 DEFAULT_BRANCH_NAME = None
66 DEFAULT_BRANCH_NAME = None
67
67
68 contact = base.BaseRepository.DEFAULT_CONTACT
68 contact = base.BaseRepository.DEFAULT_CONTACT
69 description = base.BaseRepository.DEFAULT_DESCRIPTION
69 description = base.BaseRepository.DEFAULT_DESCRIPTION
70
70
71 def __init__(self, repo_path, config=None, create=False, src_url=None,
71 def __init__(self, repo_path, config=None, create=False, src_url=None,
72 **kwargs):
72 **kwargs):
73 self.path = safe_str(os.path.abspath(repo_path))
73 self.path = safe_str(os.path.abspath(repo_path))
74 self.config = config if config else self.get_default_config()
74 self.config = config if config else self.get_default_config()
75 self._remote = connection.Svn(
76 self.path, self.config)
77
75
78 self._init_repo(create, src_url)
76 self._init_repo(create, src_url)
79
77
78 @LazyProperty
79 def _remote(self):
80 return connection.Svn(self.path, self.config)
81
80 def _init_repo(self, create, src_url):
82 def _init_repo(self, create, src_url):
81 if create and os.path.exists(self.path):
83 if create and os.path.exists(self.path):
82 raise RepositoryError(
84 raise RepositoryError(
83 "Cannot create repository at %s, location already exist"
85 "Cannot create repository at %s, location already exist"
84 % self.path)
86 % self.path)
85
87
86 if create:
88 if create:
87 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
88 if src_url:
90 if src_url:
89 src_url = _sanitize_url(src_url)
91 src_url = _sanitize_url(src_url)
90 self._remote.import_remote_repository(src_url)
92 self._remote.import_remote_repository(src_url)
91 else:
93 else:
92 self._check_path()
94 self._check_path()
93
95
94 @LazyProperty
96 @LazyProperty
95 def commit_ids(self):
97 def commit_ids(self):
96 head = self._remote.lookup(None)
98 head = self._remote.lookup(None)
97 return [str(r) for r in xrange(1, head + 1)]
99 return [str(r) for r in xrange(1, head + 1)]
98
100
99 @LazyProperty
101 @LazyProperty
100 def branches(self):
102 def branches(self):
101 return self._tags_or_branches('vcs_svn_branch')
103 return self._tags_or_branches('vcs_svn_branch')
102
104
103 @LazyProperty
105 @LazyProperty
104 def branches_closed(self):
106 def branches_closed(self):
105 return {}
107 return {}
106
108
107 @LazyProperty
109 @LazyProperty
108 def bookmarks(self):
110 def bookmarks(self):
109 return {}
111 return {}
110
112
111 @LazyProperty
113 @LazyProperty
112 def branches_all(self):
114 def branches_all(self):
113 # TODO: johbo: Implement proper branch support
115 # TODO: johbo: Implement proper branch support
114 all_branches = {}
116 all_branches = {}
115 all_branches.update(self.branches)
117 all_branches.update(self.branches)
116 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
117 return all_branches
119 return all_branches
118
120
119 @LazyProperty
121 @LazyProperty
120 def tags(self):
122 def tags(self):
121 return self._tags_or_branches('vcs_svn_tag')
123 return self._tags_or_branches('vcs_svn_tag')
122
124
123 def _tags_or_branches(self, config_section):
125 def _tags_or_branches(self, config_section):
124 found_items = {}
126 found_items = {}
125
127
126 if self.is_empty():
128 if self.is_empty():
127 return {}
129 return {}
128
130
129 for pattern in self._patterns_from_section(config_section):
131 for pattern in self._patterns_from_section(config_section):
130 pattern = vcspath.sanitize(pattern)
132 pattern = vcspath.sanitize(pattern)
131 tip = self.get_commit()
133 tip = self.get_commit()
132 try:
134 try:
133 if pattern.endswith('*'):
135 if pattern.endswith('*'):
134 basedir = tip.get_node(vcspath.dirname(pattern))
136 basedir = tip.get_node(vcspath.dirname(pattern))
135 directories = basedir.dirs
137 directories = basedir.dirs
136 else:
138 else:
137 directories = (tip.get_node(pattern), )
139 directories = (tip.get_node(pattern), )
138 except NodeDoesNotExistError:
140 except NodeDoesNotExistError:
139 continue
141 continue
140 found_items.update(
142 found_items.update(
141 (safe_unicode(n.path),
143 (safe_unicode(n.path),
142 self.commit_ids[-1])
144 self.commit_ids[-1])
143 for n in directories)
145 for n in directories)
144
146
145 def get_name(item):
147 def get_name(item):
146 return item[0]
148 return item[0]
147
149
148 return OrderedDict(sorted(found_items.items(), key=get_name))
150 return OrderedDict(sorted(found_items.items(), key=get_name))
149
151
150 def _patterns_from_section(self, section):
152 def _patterns_from_section(self, section):
151 return (pattern for key, pattern in self.config.items(section))
153 return (pattern for key, pattern in self.config.items(section))
152
154
153 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
155 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
154 if self != repo2:
156 if self != repo2:
155 raise ValueError(
157 raise ValueError(
156 "Subversion does not support getting common ancestor of"
158 "Subversion does not support getting common ancestor of"
157 " different repositories.")
159 " different repositories.")
158
160
159 if int(commit_id1) < int(commit_id2):
161 if int(commit_id1) < int(commit_id2):
160 return commit_id1
162 return commit_id1
161 return commit_id2
163 return commit_id2
162
164
163 def verify(self):
165 def verify(self):
164 verify = self._remote.verify()
166 verify = self._remote.verify()
165
167
166 self._remote.invalidate_vcs_cache()
168 self._remote.invalidate_vcs_cache()
167 return verify
169 return verify
168
170
169 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
171 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
170 # TODO: johbo: Implement better comparison, this is a very naive
172 # TODO: johbo: Implement better comparison, this is a very naive
171 # version which does not allow to compare branches, tags or folders
173 # version which does not allow to compare branches, tags or folders
172 # at all.
174 # at all.
173 if repo2 != self:
175 if repo2 != self:
174 raise ValueError(
176 raise ValueError(
175 "Subversion does not support comparison of of different "
177 "Subversion does not support comparison of of different "
176 "repositories.")
178 "repositories.")
177
179
178 if commit_id1 == commit_id2:
180 if commit_id1 == commit_id2:
179 return []
181 return []
180
182
181 commit_idx1 = self._get_commit_idx(commit_id1)
183 commit_idx1 = self._get_commit_idx(commit_id1)
182 commit_idx2 = self._get_commit_idx(commit_id2)
184 commit_idx2 = self._get_commit_idx(commit_id2)
183
185
184 commits = [
186 commits = [
185 self.get_commit(commit_idx=idx)
187 self.get_commit(commit_idx=idx)
186 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
188 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
187
189
188 return commits
190 return commits
189
191
190 def _get_commit_idx(self, commit_id):
192 def _get_commit_idx(self, commit_id):
191 try:
193 try:
192 svn_rev = int(commit_id)
194 svn_rev = int(commit_id)
193 except:
195 except:
194 # TODO: johbo: this might be only one case, HEAD, check this
196 # TODO: johbo: this might be only one case, HEAD, check this
195 svn_rev = self._remote.lookup(commit_id)
197 svn_rev = self._remote.lookup(commit_id)
196 commit_idx = svn_rev - 1
198 commit_idx = svn_rev - 1
197 if commit_idx >= len(self.commit_ids):
199 if commit_idx >= len(self.commit_ids):
198 raise CommitDoesNotExistError(
200 raise CommitDoesNotExistError(
199 "Commit at index %s does not exist." % (commit_idx, ))
201 "Commit at index %s does not exist." % (commit_idx, ))
200 return commit_idx
202 return commit_idx
201
203
202 @staticmethod
204 @staticmethod
203 def check_url(url, config):
205 def check_url(url, config):
204 """
206 """
205 Check if `url` is a valid source to import a Subversion repository.
207 Check if `url` is a valid source to import a Subversion repository.
206 """
208 """
207 # convert to URL if it's a local directory
209 # convert to URL if it's a local directory
208 if os.path.isdir(url):
210 if os.path.isdir(url):
209 url = 'file://' + urllib.pathname2url(url)
211 url = 'file://' + urllib.pathname2url(url)
210 return connection.Svn.check_url(url, config.serialize())
212 return connection.Svn.check_url(url, config.serialize())
211
213
212 @staticmethod
214 @staticmethod
213 def is_valid_repository(path):
215 def is_valid_repository(path):
214 try:
216 try:
215 SubversionRepository(path)
217 SubversionRepository(path)
216 return True
218 return True
217 except VCSError:
219 except VCSError:
218 pass
220 pass
219 return False
221 return False
220
222
221 def _check_path(self):
223 def _check_path(self):
222 if not os.path.exists(self.path):
224 if not os.path.exists(self.path):
223 raise VCSError('Path "%s" does not exist!' % (self.path, ))
225 raise VCSError('Path "%s" does not exist!' % (self.path, ))
224 if not self._remote.is_path_valid_repository(self.path):
226 if not self._remote.is_path_valid_repository(self.path):
225 raise VCSError(
227 raise VCSError(
226 'Path "%s" does not contain a Subversion repository' %
228 'Path "%s" does not contain a Subversion repository' %
227 (self.path, ))
229 (self.path, ))
228
230
229 @LazyProperty
231 @LazyProperty
230 def last_change(self):
232 def last_change(self):
231 """
233 """
232 Returns last change made on this repository as
234 Returns last change made on this repository as
233 `datetime.datetime` object.
235 `datetime.datetime` object.
234 """
236 """
235 # Subversion always has a first commit which has id "0" and contains
237 # Subversion always has a first commit which has id "0" and contains
236 # what we are looking for.
238 # what we are looking for.
237 last_id = len(self.commit_ids)
239 last_id = len(self.commit_ids)
238 properties = self._remote.revision_properties(last_id)
240 properties = self._remote.revision_properties(last_id)
239 return _date_from_svn_properties(properties)
241 return _date_from_svn_properties(properties)
240
242
241 @LazyProperty
243 @LazyProperty
242 def in_memory_commit(self):
244 def in_memory_commit(self):
243 return SubversionInMemoryCommit(self)
245 return SubversionInMemoryCommit(self)
244
246
245 def get_hook_location(self):
247 def get_hook_location(self):
246 """
248 """
247 returns absolute path to location where hooks are stored
249 returns absolute path to location where hooks are stored
248 """
250 """
249 return os.path.join(self.path, 'hooks')
251 return os.path.join(self.path, 'hooks')
250
252
251 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
252 if self.is_empty():
254 if self.is_empty():
253 raise EmptyRepositoryError("There are no commits yet")
255 raise EmptyRepositoryError("There are no commits yet")
254 if commit_id is not None:
256 if commit_id is not None:
255 self._validate_commit_id(commit_id)
257 self._validate_commit_id(commit_id)
256 elif commit_idx is not None:
258 elif commit_idx is not None:
257 self._validate_commit_idx(commit_idx)
259 self._validate_commit_idx(commit_idx)
258 try:
260 try:
259 commit_id = self.commit_ids[commit_idx]
261 commit_id = self.commit_ids[commit_idx]
260 except IndexError:
262 except IndexError:
261 raise CommitDoesNotExistError
263 raise CommitDoesNotExistError
262
264
263 commit_id = self._sanitize_commit_id(commit_id)
265 commit_id = self._sanitize_commit_id(commit_id)
264 commit = SubversionCommit(repository=self, commit_id=commit_id)
266 commit = SubversionCommit(repository=self, commit_id=commit_id)
265 return commit
267 return commit
266
268
267 def get_commits(
269 def get_commits(
268 self, start_id=None, end_id=None, start_date=None, end_date=None,
270 self, start_id=None, end_id=None, start_date=None, end_date=None,
269 branch_name=None, show_hidden=False, pre_load=None):
271 branch_name=None, show_hidden=False, pre_load=None):
270 if self.is_empty():
272 if self.is_empty():
271 raise EmptyRepositoryError("There are no commit_ids yet")
273 raise EmptyRepositoryError("There are no commit_ids yet")
272 self._validate_branch_name(branch_name)
274 self._validate_branch_name(branch_name)
273
275
274 if start_id is not None:
276 if start_id is not None:
275 self._validate_commit_id(start_id)
277 self._validate_commit_id(start_id)
276 if end_id is not None:
278 if end_id is not None:
277 self._validate_commit_id(end_id)
279 self._validate_commit_id(end_id)
278
280
279 start_raw_id = self._sanitize_commit_id(start_id)
281 start_raw_id = self._sanitize_commit_id(start_id)
280 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
282 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
281 end_raw_id = self._sanitize_commit_id(end_id)
283 end_raw_id = self._sanitize_commit_id(end_id)
282 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
284 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
283
285
284 if None not in [start_id, end_id] and start_pos > end_pos:
286 if None not in [start_id, end_id] and start_pos > end_pos:
285 raise RepositoryError(
287 raise RepositoryError(
286 "Start commit '%s' cannot be after end commit '%s'" %
288 "Start commit '%s' cannot be after end commit '%s'" %
287 (start_id, end_id))
289 (start_id, end_id))
288 if end_pos is not None:
290 if end_pos is not None:
289 end_pos += 1
291 end_pos += 1
290
292
291 # Date based filtering
293 # Date based filtering
292 if start_date or end_date:
294 if start_date or end_date:
293 start_raw_id, end_raw_id = self._remote.lookup_interval(
295 start_raw_id, end_raw_id = self._remote.lookup_interval(
294 date_astimestamp(start_date) if start_date else None,
296 date_astimestamp(start_date) if start_date else None,
295 date_astimestamp(end_date) if end_date else None)
297 date_astimestamp(end_date) if end_date else None)
296 start_pos = start_raw_id - 1
298 start_pos = start_raw_id - 1
297 end_pos = end_raw_id
299 end_pos = end_raw_id
298
300
299 commit_ids = self.commit_ids
301 commit_ids = self.commit_ids
300
302
301 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
303 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
302 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
304 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
303 svn_rev = long(self.commit_ids[-1])
305 svn_rev = long(self.commit_ids[-1])
304 commit_ids = self._remote.node_history(
306 commit_ids = self._remote.node_history(
305 path=branch_name, revision=svn_rev, limit=None)
307 path=branch_name, revision=svn_rev, limit=None)
306 commit_ids = [str(i) for i in reversed(commit_ids)]
308 commit_ids = [str(i) for i in reversed(commit_ids)]
307
309
308 if start_pos or end_pos:
310 if start_pos or end_pos:
309 commit_ids = commit_ids[start_pos:end_pos]
311 commit_ids = commit_ids[start_pos:end_pos]
310 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
312 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
311
313
312 def _sanitize_commit_id(self, commit_id):
314 def _sanitize_commit_id(self, commit_id):
313 if commit_id and commit_id.isdigit():
315 if commit_id and commit_id.isdigit():
314 if int(commit_id) <= len(self.commit_ids):
316 if int(commit_id) <= len(self.commit_ids):
315 return commit_id
317 return commit_id
316 else:
318 else:
317 raise CommitDoesNotExistError(
319 raise CommitDoesNotExistError(
318 "Commit %s does not exist." % (commit_id, ))
320 "Commit %s does not exist." % (commit_id, ))
319 if commit_id not in [
321 if commit_id not in [
320 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
322 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
321 raise CommitDoesNotExistError(
323 raise CommitDoesNotExistError(
322 "Commit id %s not understood." % (commit_id, ))
324 "Commit id %s not understood." % (commit_id, ))
323 svn_rev = self._remote.lookup('HEAD')
325 svn_rev = self._remote.lookup('HEAD')
324 return str(svn_rev)
326 return str(svn_rev)
325
327
326 def get_diff(
328 def get_diff(
327 self, commit1, commit2, path=None, ignore_whitespace=False,
329 self, commit1, commit2, path=None, ignore_whitespace=False,
328 context=3, path1=None):
330 context=3, path1=None):
329 self._validate_diff_commits(commit1, commit2)
331 self._validate_diff_commits(commit1, commit2)
330 svn_rev1 = long(commit1.raw_id)
332 svn_rev1 = long(commit1.raw_id)
331 svn_rev2 = long(commit2.raw_id)
333 svn_rev2 = long(commit2.raw_id)
332 diff = self._remote.diff(
334 diff = self._remote.diff(
333 svn_rev1, svn_rev2, path1=path1, path2=path,
335 svn_rev1, svn_rev2, path1=path1, path2=path,
334 ignore_whitespace=ignore_whitespace, context=context)
336 ignore_whitespace=ignore_whitespace, context=context)
335 return SubversionDiff(diff)
337 return SubversionDiff(diff)
336
338
337
339
338 def _sanitize_url(url):
340 def _sanitize_url(url):
339 if '://' not in url:
341 if '://' not in url:
340 url = 'file://' + urllib.pathname2url(url)
342 url = 'file://' + urllib.pathname2url(url)
341 return url
343 return url
@@ -1,246 +1,253 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 This serves as a drop in replacement for pycurl. It implements the pycurl Curl
22 This serves as a drop in replacement for pycurl. It implements the pycurl Curl
23 class in a way that is compatible with gevent.
23 class in a way that is compatible with gevent.
24 """
24 """
25
25
26
26
27 import logging
27 import logging
28 import gevent
28 import gevent
29 import pycurl
29 import pycurl
30 import greenlet
30
31
31 # Import everything from pycurl.
32 # Import everything from pycurl.
32 # This allows us to use this module as a drop in replacement of pycurl.
33 # This allows us to use this module as a drop in replacement of pycurl.
33 from pycurl import * # noqa
34 from pycurl import * # noqa
34
35
35 from gevent import core
36 from gevent import core
36 from gevent.hub import Waiter
37 from gevent.hub import Waiter
37
38
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 class GeventCurlMulti(object):
43 class GeventCurlMulti(object):
43 """
44 """
44 Wrapper around pycurl.CurlMulti that integrates it into gevent's event
45 Wrapper around pycurl.CurlMulti that integrates it into gevent's event
45 loop.
46 loop.
46
47
47 Parts of this class are a modified version of code copied from the Tornado
48 Parts of this class are a modified version of code copied from the Tornado
48 Web Server project which is licensed under the Apache License, Version 2.0
49 Web Server project which is licensed under the Apache License, Version 2.0
49 (the "License"). To be more specific the code originates from this file:
50 (the "License"). To be more specific the code originates from this file:
50 https://github.com/tornadoweb/tornado/blob/stable/tornado/curl_httpclient.py
51 https://github.com/tornadoweb/tornado/blob/stable/tornado/curl_httpclient.py
51
52
52 This is the original license header of the origin:
53 This is the original license header of the origin:
53
54
54 Copyright 2009 Facebook
55 Copyright 2009 Facebook
55
56
56 Licensed under the Apache License, Version 2.0 (the "License"); you may
57 Licensed under the Apache License, Version 2.0 (the "License"); you may
57 not use this file except in compliance with the License. You may obtain
58 not use this file except in compliance with the License. You may obtain
58 a copy of the License at
59 a copy of the License at
59
60
60 http://www.apache.org/licenses/LICENSE-2.0
61 http://www.apache.org/licenses/LICENSE-2.0
61
62
62 Unless required by applicable law or agreed to in writing, software
63 Unless required by applicable law or agreed to in writing, software
63 distributed under the License is distributed on an "AS IS" BASIS,
64 distributed under the License is distributed on an "AS IS" BASIS,
64 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
65 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
65 implied. See the License for the specific language governing
66 implied. See the License for the specific language governing
66 permissions and limitations under the License.
67 permissions and limitations under the License.
67 """
68 """
68
69
69 def __init__(self, loop=None):
70 def __init__(self, loop=None):
70 self._watchers = {}
71 self._watchers = {}
71 self._timeout = None
72 self._timeout = None
72 self.loop = loop or gevent.get_hub().loop
73 self.loop = loop or gevent.get_hub().loop
73
74
74 # Setup curl's multi instance.
75 # Setup curl's multi instance.
75 self._curl_multi = pycurl.CurlMulti()
76 self._curl_multi = pycurl.CurlMulti()
76 self.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
77 self.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
77 self.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
78 self.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
78
79
79 def __getattr__(self, item):
80 def __getattr__(self, item):
80 """
81 """
81 The pycurl.CurlMulti class is final and we cannot subclass it.
82 The pycurl.CurlMulti class is final and we cannot subclass it.
82 Therefore we are wrapping it and forward everything to it here.
83 Therefore we are wrapping it and forward everything to it here.
83 """
84 """
84 return getattr(self._curl_multi, item)
85 return getattr(self._curl_multi, item)
85
86
86 def add_handle(self, curl):
87 def add_handle(self, curl):
87 """
88 """
88 Add handle variant that also takes care about the initial invocation of
89 Add handle variant that also takes care about the initial invocation of
89 socket action method. This is done by setting an immediate timeout.
90 socket action method. This is done by setting an immediate timeout.
90 """
91 """
91 result = self._curl_multi.add_handle(curl)
92 result = self._curl_multi.add_handle(curl)
92 self._set_timeout(0)
93 self._set_timeout(0)
93 return result
94 return result
94
95
95 def _handle_socket(self, event, fd, multi, data):
96 def _handle_socket(self, event, fd, multi, data):
96 """
97 """
97 Called by libcurl when it wants to change the file descriptors it cares
98 Called by libcurl when it wants to change the file descriptors it cares
98 about.
99 about.
99 """
100 """
100 event_map = {
101 event_map = {
101 pycurl.POLL_NONE: core.NONE,
102 pycurl.POLL_NONE: core.NONE,
102 pycurl.POLL_IN: core.READ,
103 pycurl.POLL_IN: core.READ,
103 pycurl.POLL_OUT: core.WRITE,
104 pycurl.POLL_OUT: core.WRITE,
104 pycurl.POLL_INOUT: core.READ | core.WRITE
105 pycurl.POLL_INOUT: core.READ | core.WRITE
105 }
106 }
106
107
107 if event == pycurl.POLL_REMOVE:
108 if event == pycurl.POLL_REMOVE:
108 watcher = self._watchers.pop(fd, None)
109 watcher = self._watchers.pop(fd, None)
109 if watcher is not None:
110 if watcher is not None:
110 watcher.stop()
111 watcher.stop()
111 else:
112 else:
112 gloop_event = event_map[event]
113 gloop_event = event_map[event]
113 watcher = self._watchers.get(fd)
114 watcher = self._watchers.get(fd)
114 if watcher is None:
115 if watcher is None:
115 watcher = self.loop.io(fd, gloop_event)
116 watcher = self.loop.io(fd, gloop_event)
116 watcher.start(self._handle_events, fd, pass_events=True)
117 watcher.start(self._handle_events, fd, pass_events=True)
117 self._watchers[fd] = watcher
118 self._watchers[fd] = watcher
118 else:
119 else:
119 if watcher.events != gloop_event:
120 if watcher.events != gloop_event:
120 watcher.stop()
121 watcher.stop()
121 watcher.events = gloop_event
122 watcher.events = gloop_event
122 watcher.start(self._handle_events, fd, pass_events=True)
123 watcher.start(self._handle_events, fd, pass_events=True)
123
124
124 def _set_timeout(self, msecs):
125 def _set_timeout(self, msecs):
125 """
126 """
126 Called by libcurl to schedule a timeout.
127 Called by libcurl to schedule a timeout.
127 """
128 """
128 if self._timeout is not None:
129 if self._timeout is not None:
129 self._timeout.stop()
130 self._timeout.stop()
130 self._timeout = self.loop.timer(msecs/1000.0)
131 self._timeout = self.loop.timer(msecs/1000.0)
131 self._timeout.start(self._handle_timeout)
132 self._timeout.start(self._handle_timeout)
132
133
133 def _handle_events(self, events, fd):
134 def _handle_events(self, events, fd):
134 action = 0
135 action = 0
135 if events & core.READ:
136 if events & core.READ:
136 action |= pycurl.CSELECT_IN
137 action |= pycurl.CSELECT_IN
137 if events & core.WRITE:
138 if events & core.WRITE:
138 action |= pycurl.CSELECT_OUT
139 action |= pycurl.CSELECT_OUT
139 while True:
140 while True:
140 try:
141 try:
141 ret, num_handles = self._curl_multi.socket_action(fd, action)
142 ret, num_handles = self._curl_multi.socket_action(fd, action)
142 except pycurl.error as e:
143 except pycurl.error as e:
143 ret = e.args[0]
144 ret = e.args[0]
144 if ret != pycurl.E_CALL_MULTI_PERFORM:
145 if ret != pycurl.E_CALL_MULTI_PERFORM:
145 break
146 break
146 self._finish_pending_requests()
147 self._finish_pending_requests()
147
148
148 def _handle_timeout(self):
149 def _handle_timeout(self):
149 """
150 """
150 Called by IOLoop when the requested timeout has passed.
151 Called by IOLoop when the requested timeout has passed.
151 """
152 """
152 if self._timeout is not None:
153 if self._timeout is not None:
153 self._timeout.stop()
154 self._timeout.stop()
154 self._timeout = None
155 self._timeout = None
155 while True:
156 while True:
156 try:
157 try:
157 ret, num_handles = self._curl_multi.socket_action(
158 ret, num_handles = self._curl_multi.socket_action(
158 pycurl.SOCKET_TIMEOUT, 0)
159 pycurl.SOCKET_TIMEOUT, 0)
159 except pycurl.error as e:
160 except pycurl.error as e:
160 ret = e.args[0]
161 ret = e.args[0]
161 if ret != pycurl.E_CALL_MULTI_PERFORM:
162 if ret != pycurl.E_CALL_MULTI_PERFORM:
162 break
163 break
163 self._finish_pending_requests()
164 self._finish_pending_requests()
164
165
165 # In theory, we shouldn't have to do this because curl will call
166 # In theory, we shouldn't have to do this because curl will call
166 # _set_timeout whenever the timeout changes. However, sometimes after
167 # _set_timeout whenever the timeout changes. However, sometimes after
167 # _handle_timeout we will need to reschedule immediately even though
168 # _handle_timeout we will need to reschedule immediately even though
168 # nothing has changed from curl's perspective. This is because when
169 # nothing has changed from curl's perspective. This is because when
169 # socket_action is called with SOCKET_TIMEOUT, libcurl decides
170 # socket_action is called with SOCKET_TIMEOUT, libcurl decides
170 # internally which timeouts need to be processed by using a monotonic
171 # internally which timeouts need to be processed by using a monotonic
171 # clock (where available) while tornado uses python's time.time() to
172 # clock (where available) while tornado uses python's time.time() to
172 # decide when timeouts have occurred. When those clocks disagree on
173 # decide when timeouts have occurred. When those clocks disagree on
173 # elapsed time (as they will whenever there is an NTP adjustment),
174 # elapsed time (as they will whenever there is an NTP adjustment),
174 # tornado might call _handle_timeout before libcurl is ready. After
175 # tornado might call _handle_timeout before libcurl is ready. After
175 # each timeout, resync the scheduled timeout with libcurl's current
176 # each timeout, resync the scheduled timeout with libcurl's current
176 # state.
177 # state.
177 new_timeout = self._curl_multi.timeout()
178 new_timeout = self._curl_multi.timeout()
178 if new_timeout >= 0:
179 if new_timeout >= 0:
179 self._set_timeout(new_timeout)
180 self._set_timeout(new_timeout)
180
181
181 def _finish_pending_requests(self):
182 def _finish_pending_requests(self):
182 """
183 """
183 Process any requests that were completed by the last call to
184 Process any requests that were completed by the last call to
184 multi.socket_action.
185 multi.socket_action.
185 """
186 """
186 while True:
187 while True:
187 num_q, ok_list, err_list = self._curl_multi.info_read()
188 num_q, ok_list, err_list = self._curl_multi.info_read()
188 for curl in ok_list:
189 for curl in ok_list:
189 curl.waiter.switch(None)
190 curl.waiter.switch(None)
190 for curl, errnum, errmsg in err_list:
191 for curl, errnum, errmsg in err_list:
191 curl.waiter.throw(Exception('%s %s' % (errnum, errmsg)))
192 curl.waiter.throw(Exception('%s %s' % (errnum, errmsg)))
192 if num_q == 0:
193 if num_q == 0:
193 break
194 break
194
195
195
196
196 class GeventCurl(object):
197 class GeventCurl(object):
197 """
198 """
198 Gevent compatible implementation of the pycurl.Curl class. Essentially a
199 Gevent compatible implementation of the pycurl.Curl class. Essentially a
199 wrapper around pycurl.Curl with a customized perform method. It uses the
200 wrapper around pycurl.Curl with a customized perform method. It uses the
200 GeventCurlMulti class to implement a blocking API to libcurl's "easy"
201 GeventCurlMulti class to implement a blocking API to libcurl's "easy"
201 interface.
202 interface.
202 """
203 """
203
204
204 # Reference to the GeventCurlMulti instance.
205 # Reference to the GeventCurlMulti instance.
205 _multi_instance = None
206 _multi_instance = None
206
207
207 def __init__(self):
208 def __init__(self):
208 self._curl = pycurl.Curl()
209 self._curl = pycurl.Curl()
209
210
210 def __getattr__(self, item):
211 def __getattr__(self, item):
211 """
212 """
212 The pycurl.Curl class is final and we cannot subclass it. Therefore we
213 The pycurl.Curl class is final and we cannot subclass it. Therefore we
213 are wrapping it and forward everything to it here.
214 are wrapping it and forward everything to it here.
214 """
215 """
215 return getattr(self._curl, item)
216 return getattr(self._curl, item)
216
217
217 @property
218 @property
218 def _multi(self):
219 def _multi(self):
219 """
220 """
220 Lazy property that returns the GeventCurlMulti instance. The value is
221 Lazy property that returns the GeventCurlMulti instance. The value is
221 cached as a class attribute. Therefore only one instance per process
222 cached as a class attribute. Therefore only one instance per process
222 exists.
223 exists.
223 """
224 """
224 if GeventCurl._multi_instance is None:
225 if GeventCurl._multi_instance is None:
225 GeventCurl._multi_instance = GeventCurlMulti()
226 GeventCurl._multi_instance = GeventCurlMulti()
226 return GeventCurl._multi_instance
227 return GeventCurl._multi_instance
227
228
228 def perform(self):
229 def perform(self):
229 """
230 """
230 This perform method is compatible with gevent because it uses gevent
231 This perform method is compatible with gevent because it uses gevent
231 synchronization mechanisms to wait for the request to finish.
232 synchronization mechanisms to wait for the request to finish.
232 """
233 """
234 if getattr(self._curl, 'waiter', None) is not None:
235 current = greenlet.getcurrent()
236 msg = 'This curl object is already used by another greenlet, {}, \n' \
237 'this is {}'.format(self._curl.waiter, current)
238 raise Exception(msg)
239
233 waiter = self._curl.waiter = Waiter()
240 waiter = self._curl.waiter = Waiter()
234 try:
241 try:
235 self._multi.add_handle(self._curl)
242 self._multi.add_handle(self._curl)
236 try:
243 try:
237 return waiter.get()
244 return waiter.get()
238 finally:
245 finally:
239 self._multi.remove_handle(self._curl)
246 self._multi.remove_handle(self._curl)
240 finally:
247 finally:
241 del self._curl.waiter
248 del self._curl.waiter
242
249
243
250
244 # Curl is originally imported from pycurl. At this point we override it with
251 # Curl is originally imported from pycurl. At this point we override it with
245 # our custom implementation.
252 # our custom implementation.
246 Curl = GeventCurl
253 Curl = GeventCurl
@@ -1,4511 +1,4514 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import hashlib
28 import hashlib
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import warnings
31 import warnings
32 import ipaddress
32 import ipaddress
33 import functools
33 import functools
34 import traceback
34 import traceback
35 import collections
35 import collections
36
36
37 from sqlalchemy import (
37 from sqlalchemy import (
38 or_, and_, not_, func, TypeDecorator, event,
38 or_, and_, not_, func, TypeDecorator, event,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType)
41 Text, Float, PickleType)
42 from sqlalchemy.sql.expression import true, false
42 from sqlalchemy.sql.expression import true, false
43 from sqlalchemy.sql.functions import coalesce, count # noqa
43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, joinedload, class_mapper, validates, aliased)
45 relationship, joinedload, class_mapper, validates, aliased)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # noqa
48 from sqlalchemy.exc import IntegrityError # noqa
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from zope.cachedescriptors.property import Lazy as LazyProperty
50 from zope.cachedescriptors.property import Lazy as LazyProperty
51
51
52 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
53
53
54 from rhodecode.translation import _
54 from rhodecode.translation import _
55 from rhodecode.lib.vcs import get_vcs_instance
55 from rhodecode.lib.vcs import get_vcs_instance
56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 from rhodecode.lib.utils2 import (
57 from rhodecode.lib.utils2 import (
58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 glob2re, StrictAttributeDict, cleaned_uri)
60 glob2re, StrictAttributeDict, cleaned_uri)
61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 JsonRaw
62 JsonRaw
63 from rhodecode.lib.ext_json import json
63 from rhodecode.lib.ext_json import json
64 from rhodecode.lib.caching_query import FromCache
64 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.encrypt import AESCipher
65 from rhodecode.lib.encrypt import AESCipher
66
66
67 from rhodecode.model.meta import Base, Session
67 from rhodecode.model.meta import Base, Session
68
68
69 URL_SEP = '/'
69 URL_SEP = '/'
70 log = logging.getLogger(__name__)
70 log = logging.getLogger(__name__)
71
71
72 # =============================================================================
72 # =============================================================================
73 # BASE CLASSES
73 # BASE CLASSES
74 # =============================================================================
74 # =============================================================================
75
75
76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 # beaker.session.secret if first is not set.
77 # beaker.session.secret if first is not set.
78 # and initialized at environment.py
78 # and initialized at environment.py
79 ENCRYPTION_KEY = None
79 ENCRYPTION_KEY = None
80
80
81 # used to sort permissions by types, '#' used here is not allowed to be in
81 # used to sort permissions by types, '#' used here is not allowed to be in
82 # usernames, and it's very early in sorted string.printable table.
82 # usernames, and it's very early in sorted string.printable table.
83 PERMISSION_TYPE_SORT = {
83 PERMISSION_TYPE_SORT = {
84 'admin': '####',
84 'admin': '####',
85 'write': '###',
85 'write': '###',
86 'read': '##',
86 'read': '##',
87 'none': '#',
87 'none': '#',
88 }
88 }
89
89
90
90
91 def display_user_sort(obj):
91 def display_user_sort(obj):
92 """
92 """
93 Sort function used to sort permissions in .permissions() function of
93 Sort function used to sort permissions in .permissions() function of
94 Repository, RepoGroup, UserGroup. Also it put the default user in front
94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 of all other resources
95 of all other resources
96 """
96 """
97
97
98 if obj.username == User.DEFAULT_USER:
98 if obj.username == User.DEFAULT_USER:
99 return '#####'
99 return '#####'
100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 return prefix + obj.username
101 return prefix + obj.username
102
102
103
103
104 def display_user_group_sort(obj):
104 def display_user_group_sort(obj):
105 """
105 """
106 Sort function used to sort permissions in .permissions() function of
106 Sort function used to sort permissions in .permissions() function of
107 Repository, RepoGroup, UserGroup. Also it put the default user in front
107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 of all other resources
108 of all other resources
109 """
109 """
110
110
111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 return prefix + obj.users_group_name
112 return prefix + obj.users_group_name
113
113
114
114
115 def _hash_key(k):
115 def _hash_key(k):
116 return sha1_safe(k)
116 return sha1_safe(k)
117
117
118
118
119 def in_filter_generator(qry, items, limit=500):
119 def in_filter_generator(qry, items, limit=500):
120 """
120 """
121 Splits IN() into multiple with OR
121 Splits IN() into multiple with OR
122 e.g.::
122 e.g.::
123 cnt = Repository.query().filter(
123 cnt = Repository.query().filter(
124 or_(
124 or_(
125 *in_filter_generator(Repository.repo_id, range(100000))
125 *in_filter_generator(Repository.repo_id, range(100000))
126 )).count()
126 )).count()
127 """
127 """
128 if not items:
128 if not items:
129 # empty list will cause empty query which might cause security issues
129 # empty list will cause empty query which might cause security issues
130 # this can lead to hidden unpleasant results
130 # this can lead to hidden unpleasant results
131 items = [-1]
131 items = [-1]
132
132
133 parts = []
133 parts = []
134 for chunk in xrange(0, len(items), limit):
134 for chunk in xrange(0, len(items), limit):
135 parts.append(
135 parts.append(
136 qry.in_(items[chunk: chunk + limit])
136 qry.in_(items[chunk: chunk + limit])
137 )
137 )
138
138
139 return parts
139 return parts
140
140
141
141
142 base_table_args = {
142 base_table_args = {
143 'extend_existing': True,
143 'extend_existing': True,
144 'mysql_engine': 'InnoDB',
144 'mysql_engine': 'InnoDB',
145 'mysql_charset': 'utf8',
145 'mysql_charset': 'utf8',
146 'sqlite_autoincrement': True
146 'sqlite_autoincrement': True
147 }
147 }
148
148
149
149
150 class EncryptedTextValue(TypeDecorator):
150 class EncryptedTextValue(TypeDecorator):
151 """
151 """
152 Special column for encrypted long text data, use like::
152 Special column for encrypted long text data, use like::
153
153
154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155
155
156 This column is intelligent so if value is in unencrypted form it return
156 This column is intelligent so if value is in unencrypted form it return
157 unencrypted form, but on save it always encrypts
157 unencrypted form, but on save it always encrypts
158 """
158 """
159 impl = Text
159 impl = Text
160
160
161 def process_bind_param(self, value, dialect):
161 def process_bind_param(self, value, dialect):
162 if not value:
162 if not value:
163 return value
163 return value
164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 # protect against double encrypting if someone manually starts
165 # protect against double encrypting if someone manually starts
166 # doing
166 # doing
167 raise ValueError('value needs to be in unencrypted format, ie. '
167 raise ValueError('value needs to be in unencrypted format, ie. '
168 'not starting with enc$aes')
168 'not starting with enc$aes')
169 return 'enc$aes_hmac$%s' % AESCipher(
169 return 'enc$aes_hmac$%s' % AESCipher(
170 ENCRYPTION_KEY, hmac=True).encrypt(value)
170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171
171
172 def process_result_value(self, value, dialect):
172 def process_result_value(self, value, dialect):
173 import rhodecode
173 import rhodecode
174
174
175 if not value:
175 if not value:
176 return value
176 return value
177
177
178 parts = value.split('$', 3)
178 parts = value.split('$', 3)
179 if not len(parts) == 3:
179 if not len(parts) == 3:
180 # probably not encrypted values
180 # probably not encrypted values
181 return value
181 return value
182 else:
182 else:
183 if parts[0] != 'enc':
183 if parts[0] != 'enc':
184 # parts ok but without our header ?
184 # parts ok but without our header ?
185 return value
185 return value
186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 'rhodecode.encrypted_values.strict') or True)
187 'rhodecode.encrypted_values.strict') or True)
188 # at that stage we know it's our encryption
188 # at that stage we know it's our encryption
189 if parts[1] == 'aes':
189 if parts[1] == 'aes':
190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 elif parts[1] == 'aes_hmac':
191 elif parts[1] == 'aes_hmac':
192 decrypted_data = AESCipher(
192 decrypted_data = AESCipher(
193 ENCRYPTION_KEY, hmac=True,
193 ENCRYPTION_KEY, hmac=True,
194 strict_verification=enc_strict_mode).decrypt(parts[2])
194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 else:
195 else:
196 raise ValueError(
196 raise ValueError(
197 'Encryption type part is wrong, must be `aes` '
197 'Encryption type part is wrong, must be `aes` '
198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 return decrypted_data
199 return decrypted_data
200
200
201
201
202 class BaseModel(object):
202 class BaseModel(object):
203 """
203 """
204 Base Model for all classes
204 Base Model for all classes
205 """
205 """
206
206
207 @classmethod
207 @classmethod
208 def _get_keys(cls):
208 def _get_keys(cls):
209 """return column names for this model """
209 """return column names for this model """
210 return class_mapper(cls).c.keys()
210 return class_mapper(cls).c.keys()
211
211
212 def get_dict(self):
212 def get_dict(self):
213 """
213 """
214 return dict with keys and values corresponding
214 return dict with keys and values corresponding
215 to this model data """
215 to this model data """
216
216
217 d = {}
217 d = {}
218 for k in self._get_keys():
218 for k in self._get_keys():
219 d[k] = getattr(self, k)
219 d[k] = getattr(self, k)
220
220
221 # also use __json__() if present to get additional fields
221 # also use __json__() if present to get additional fields
222 _json_attr = getattr(self, '__json__', None)
222 _json_attr = getattr(self, '__json__', None)
223 if _json_attr:
223 if _json_attr:
224 # update with attributes from __json__
224 # update with attributes from __json__
225 if callable(_json_attr):
225 if callable(_json_attr):
226 _json_attr = _json_attr()
226 _json_attr = _json_attr()
227 for k, val in _json_attr.iteritems():
227 for k, val in _json_attr.iteritems():
228 d[k] = val
228 d[k] = val
229 return d
229 return d
230
230
231 def get_appstruct(self):
231 def get_appstruct(self):
232 """return list with keys and values tuples corresponding
232 """return list with keys and values tuples corresponding
233 to this model data """
233 to this model data """
234
234
235 lst = []
235 lst = []
236 for k in self._get_keys():
236 for k in self._get_keys():
237 lst.append((k, getattr(self, k),))
237 lst.append((k, getattr(self, k),))
238 return lst
238 return lst
239
239
240 def populate_obj(self, populate_dict):
240 def populate_obj(self, populate_dict):
241 """populate model with data from given populate_dict"""
241 """populate model with data from given populate_dict"""
242
242
243 for k in self._get_keys():
243 for k in self._get_keys():
244 if k in populate_dict:
244 if k in populate_dict:
245 setattr(self, k, populate_dict[k])
245 setattr(self, k, populate_dict[k])
246
246
247 @classmethod
247 @classmethod
248 def query(cls):
248 def query(cls):
249 return Session().query(cls)
249 return Session().query(cls)
250
250
251 @classmethod
251 @classmethod
252 def get(cls, id_):
252 def get(cls, id_):
253 if id_:
253 if id_:
254 return cls.query().get(id_)
254 return cls.query().get(id_)
255
255
256 @classmethod
256 @classmethod
257 def get_or_404(cls, id_):
257 def get_or_404(cls, id_):
258 from pyramid.httpexceptions import HTTPNotFound
258 from pyramid.httpexceptions import HTTPNotFound
259
259
260 try:
260 try:
261 id_ = int(id_)
261 id_ = int(id_)
262 except (TypeError, ValueError):
262 except (TypeError, ValueError):
263 raise HTTPNotFound()
263 raise HTTPNotFound()
264
264
265 res = cls.query().get(id_)
265 res = cls.query().get(id_)
266 if not res:
266 if not res:
267 raise HTTPNotFound()
267 raise HTTPNotFound()
268 return res
268 return res
269
269
270 @classmethod
270 @classmethod
271 def getAll(cls):
271 def getAll(cls):
272 # deprecated and left for backward compatibility
272 # deprecated and left for backward compatibility
273 return cls.get_all()
273 return cls.get_all()
274
274
275 @classmethod
275 @classmethod
276 def get_all(cls):
276 def get_all(cls):
277 return cls.query().all()
277 return cls.query().all()
278
278
279 @classmethod
279 @classmethod
280 def delete(cls, id_):
280 def delete(cls, id_):
281 obj = cls.query().get(id_)
281 obj = cls.query().get(id_)
282 Session().delete(obj)
282 Session().delete(obj)
283
283
284 @classmethod
284 @classmethod
285 def identity_cache(cls, session, attr_name, value):
285 def identity_cache(cls, session, attr_name, value):
286 exist_in_session = []
286 exist_in_session = []
287 for (item_cls, pkey), instance in session.identity_map.items():
287 for (item_cls, pkey), instance in session.identity_map.items():
288 if cls == item_cls and getattr(instance, attr_name) == value:
288 if cls == item_cls and getattr(instance, attr_name) == value:
289 exist_in_session.append(instance)
289 exist_in_session.append(instance)
290 if exist_in_session:
290 if exist_in_session:
291 if len(exist_in_session) == 1:
291 if len(exist_in_session) == 1:
292 return exist_in_session[0]
292 return exist_in_session[0]
293 log.exception(
293 log.exception(
294 'multiple objects with attr %s and '
294 'multiple objects with attr %s and '
295 'value %s found with same name: %r',
295 'value %s found with same name: %r',
296 attr_name, value, exist_in_session)
296 attr_name, value, exist_in_session)
297
297
298 def __repr__(self):
298 def __repr__(self):
299 if hasattr(self, '__unicode__'):
299 if hasattr(self, '__unicode__'):
300 # python repr needs to return str
300 # python repr needs to return str
301 try:
301 try:
302 return safe_str(self.__unicode__())
302 return safe_str(self.__unicode__())
303 except UnicodeDecodeError:
303 except UnicodeDecodeError:
304 pass
304 pass
305 return '<DB:%s>' % (self.__class__.__name__)
305 return '<DB:%s>' % (self.__class__.__name__)
306
306
307
307
308 class RhodeCodeSetting(Base, BaseModel):
308 class RhodeCodeSetting(Base, BaseModel):
309 __tablename__ = 'rhodecode_settings'
309 __tablename__ = 'rhodecode_settings'
310 __table_args__ = (
310 __table_args__ = (
311 UniqueConstraint('app_settings_name'),
311 UniqueConstraint('app_settings_name'),
312 base_table_args
312 base_table_args
313 )
313 )
314
314
315 SETTINGS_TYPES = {
315 SETTINGS_TYPES = {
316 'str': safe_str,
316 'str': safe_str,
317 'int': safe_int,
317 'int': safe_int,
318 'unicode': safe_unicode,
318 'unicode': safe_unicode,
319 'bool': str2bool,
319 'bool': str2bool,
320 'list': functools.partial(aslist, sep=',')
320 'list': functools.partial(aslist, sep=',')
321 }
321 }
322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 GLOBAL_CONF_KEY = 'app_settings'
323 GLOBAL_CONF_KEY = 'app_settings'
324
324
325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329
329
330 def __init__(self, key='', val='', type='unicode'):
330 def __init__(self, key='', val='', type='unicode'):
331 self.app_settings_name = key
331 self.app_settings_name = key
332 self.app_settings_type = type
332 self.app_settings_type = type
333 self.app_settings_value = val
333 self.app_settings_value = val
334
334
335 @validates('_app_settings_value')
335 @validates('_app_settings_value')
336 def validate_settings_value(self, key, val):
336 def validate_settings_value(self, key, val):
337 assert type(val) == unicode
337 assert type(val) == unicode
338 return val
338 return val
339
339
340 @hybrid_property
340 @hybrid_property
341 def app_settings_value(self):
341 def app_settings_value(self):
342 v = self._app_settings_value
342 v = self._app_settings_value
343 _type = self.app_settings_type
343 _type = self.app_settings_type
344 if _type:
344 if _type:
345 _type = self.app_settings_type.split('.')[0]
345 _type = self.app_settings_type.split('.')[0]
346 # decode the encrypted value
346 # decode the encrypted value
347 if 'encrypted' in self.app_settings_type:
347 if 'encrypted' in self.app_settings_type:
348 cipher = EncryptedTextValue()
348 cipher = EncryptedTextValue()
349 v = safe_unicode(cipher.process_result_value(v, None))
349 v = safe_unicode(cipher.process_result_value(v, None))
350
350
351 converter = self.SETTINGS_TYPES.get(_type) or \
351 converter = self.SETTINGS_TYPES.get(_type) or \
352 self.SETTINGS_TYPES['unicode']
352 self.SETTINGS_TYPES['unicode']
353 return converter(v)
353 return converter(v)
354
354
355 @app_settings_value.setter
355 @app_settings_value.setter
356 def app_settings_value(self, val):
356 def app_settings_value(self, val):
357 """
357 """
358 Setter that will always make sure we use unicode in app_settings_value
358 Setter that will always make sure we use unicode in app_settings_value
359
359
360 :param val:
360 :param val:
361 """
361 """
362 val = safe_unicode(val)
362 val = safe_unicode(val)
363 # encode the encrypted value
363 # encode the encrypted value
364 if 'encrypted' in self.app_settings_type:
364 if 'encrypted' in self.app_settings_type:
365 cipher = EncryptedTextValue()
365 cipher = EncryptedTextValue()
366 val = safe_unicode(cipher.process_bind_param(val, None))
366 val = safe_unicode(cipher.process_bind_param(val, None))
367 self._app_settings_value = val
367 self._app_settings_value = val
368
368
369 @hybrid_property
369 @hybrid_property
370 def app_settings_type(self):
370 def app_settings_type(self):
371 return self._app_settings_type
371 return self._app_settings_type
372
372
373 @app_settings_type.setter
373 @app_settings_type.setter
374 def app_settings_type(self, val):
374 def app_settings_type(self, val):
375 if val.split('.')[0] not in self.SETTINGS_TYPES:
375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 raise Exception('type must be one of %s got %s'
376 raise Exception('type must be one of %s got %s'
377 % (self.SETTINGS_TYPES.keys(), val))
377 % (self.SETTINGS_TYPES.keys(), val))
378 self._app_settings_type = val
378 self._app_settings_type = val
379
379
380 def __unicode__(self):
380 def __unicode__(self):
381 return u"<%s('%s:%s[%s]')>" % (
381 return u"<%s('%s:%s[%s]')>" % (
382 self.__class__.__name__,
382 self.__class__.__name__,
383 self.app_settings_name, self.app_settings_value,
383 self.app_settings_name, self.app_settings_value,
384 self.app_settings_type
384 self.app_settings_type
385 )
385 )
386
386
387
387
388 class RhodeCodeUi(Base, BaseModel):
388 class RhodeCodeUi(Base, BaseModel):
389 __tablename__ = 'rhodecode_ui'
389 __tablename__ = 'rhodecode_ui'
390 __table_args__ = (
390 __table_args__ = (
391 UniqueConstraint('ui_key'),
391 UniqueConstraint('ui_key'),
392 base_table_args
392 base_table_args
393 )
393 )
394
394
395 HOOK_REPO_SIZE = 'changegroup.repo_size'
395 HOOK_REPO_SIZE = 'changegroup.repo_size'
396 # HG
396 # HG
397 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
397 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
398 HOOK_PULL = 'outgoing.pull_logger'
398 HOOK_PULL = 'outgoing.pull_logger'
399 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
399 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
400 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
400 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
401 HOOK_PUSH = 'changegroup.push_logger'
401 HOOK_PUSH = 'changegroup.push_logger'
402 HOOK_PUSH_KEY = 'pushkey.key_push'
402 HOOK_PUSH_KEY = 'pushkey.key_push'
403
403
404 # TODO: johbo: Unify way how hooks are configured for git and hg,
404 # TODO: johbo: Unify way how hooks are configured for git and hg,
405 # git part is currently hardcoded.
405 # git part is currently hardcoded.
406
406
407 # SVN PATTERNS
407 # SVN PATTERNS
408 SVN_BRANCH_ID = 'vcs_svn_branch'
408 SVN_BRANCH_ID = 'vcs_svn_branch'
409 SVN_TAG_ID = 'vcs_svn_tag'
409 SVN_TAG_ID = 'vcs_svn_tag'
410
410
411 ui_id = Column(
411 ui_id = Column(
412 "ui_id", Integer(), nullable=False, unique=True, default=None,
412 "ui_id", Integer(), nullable=False, unique=True, default=None,
413 primary_key=True)
413 primary_key=True)
414 ui_section = Column(
414 ui_section = Column(
415 "ui_section", String(255), nullable=True, unique=None, default=None)
415 "ui_section", String(255), nullable=True, unique=None, default=None)
416 ui_key = Column(
416 ui_key = Column(
417 "ui_key", String(255), nullable=True, unique=None, default=None)
417 "ui_key", String(255), nullable=True, unique=None, default=None)
418 ui_value = Column(
418 ui_value = Column(
419 "ui_value", String(255), nullable=True, unique=None, default=None)
419 "ui_value", String(255), nullable=True, unique=None, default=None)
420 ui_active = Column(
420 ui_active = Column(
421 "ui_active", Boolean(), nullable=True, unique=None, default=True)
421 "ui_active", Boolean(), nullable=True, unique=None, default=True)
422
422
423 def __repr__(self):
423 def __repr__(self):
424 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
424 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
425 self.ui_key, self.ui_value)
425 self.ui_key, self.ui_value)
426
426
427
427
428 class RepoRhodeCodeSetting(Base, BaseModel):
428 class RepoRhodeCodeSetting(Base, BaseModel):
429 __tablename__ = 'repo_rhodecode_settings'
429 __tablename__ = 'repo_rhodecode_settings'
430 __table_args__ = (
430 __table_args__ = (
431 UniqueConstraint(
431 UniqueConstraint(
432 'app_settings_name', 'repository_id',
432 'app_settings_name', 'repository_id',
433 name='uq_repo_rhodecode_setting_name_repo_id'),
433 name='uq_repo_rhodecode_setting_name_repo_id'),
434 base_table_args
434 base_table_args
435 )
435 )
436
436
437 repository_id = Column(
437 repository_id = Column(
438 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
438 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
439 nullable=False)
439 nullable=False)
440 app_settings_id = Column(
440 app_settings_id = Column(
441 "app_settings_id", Integer(), nullable=False, unique=True,
441 "app_settings_id", Integer(), nullable=False, unique=True,
442 default=None, primary_key=True)
442 default=None, primary_key=True)
443 app_settings_name = Column(
443 app_settings_name = Column(
444 "app_settings_name", String(255), nullable=True, unique=None,
444 "app_settings_name", String(255), nullable=True, unique=None,
445 default=None)
445 default=None)
446 _app_settings_value = Column(
446 _app_settings_value = Column(
447 "app_settings_value", String(4096), nullable=True, unique=None,
447 "app_settings_value", String(4096), nullable=True, unique=None,
448 default=None)
448 default=None)
449 _app_settings_type = Column(
449 _app_settings_type = Column(
450 "app_settings_type", String(255), nullable=True, unique=None,
450 "app_settings_type", String(255), nullable=True, unique=None,
451 default=None)
451 default=None)
452
452
453 repository = relationship('Repository')
453 repository = relationship('Repository')
454
454
455 def __init__(self, repository_id, key='', val='', type='unicode'):
455 def __init__(self, repository_id, key='', val='', type='unicode'):
456 self.repository_id = repository_id
456 self.repository_id = repository_id
457 self.app_settings_name = key
457 self.app_settings_name = key
458 self.app_settings_type = type
458 self.app_settings_type = type
459 self.app_settings_value = val
459 self.app_settings_value = val
460
460
461 @validates('_app_settings_value')
461 @validates('_app_settings_value')
462 def validate_settings_value(self, key, val):
462 def validate_settings_value(self, key, val):
463 assert type(val) == unicode
463 assert type(val) == unicode
464 return val
464 return val
465
465
466 @hybrid_property
466 @hybrid_property
467 def app_settings_value(self):
467 def app_settings_value(self):
468 v = self._app_settings_value
468 v = self._app_settings_value
469 type_ = self.app_settings_type
469 type_ = self.app_settings_type
470 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
470 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
471 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
471 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
472 return converter(v)
472 return converter(v)
473
473
474 @app_settings_value.setter
474 @app_settings_value.setter
475 def app_settings_value(self, val):
475 def app_settings_value(self, val):
476 """
476 """
477 Setter that will always make sure we use unicode in app_settings_value
477 Setter that will always make sure we use unicode in app_settings_value
478
478
479 :param val:
479 :param val:
480 """
480 """
481 self._app_settings_value = safe_unicode(val)
481 self._app_settings_value = safe_unicode(val)
482
482
483 @hybrid_property
483 @hybrid_property
484 def app_settings_type(self):
484 def app_settings_type(self):
485 return self._app_settings_type
485 return self._app_settings_type
486
486
487 @app_settings_type.setter
487 @app_settings_type.setter
488 def app_settings_type(self, val):
488 def app_settings_type(self, val):
489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 if val not in SETTINGS_TYPES:
490 if val not in SETTINGS_TYPES:
491 raise Exception('type must be one of %s got %s'
491 raise Exception('type must be one of %s got %s'
492 % (SETTINGS_TYPES.keys(), val))
492 % (SETTINGS_TYPES.keys(), val))
493 self._app_settings_type = val
493 self._app_settings_type = val
494
494
495 def __unicode__(self):
495 def __unicode__(self):
496 return u"<%s('%s:%s:%s[%s]')>" % (
496 return u"<%s('%s:%s:%s[%s]')>" % (
497 self.__class__.__name__, self.repository.repo_name,
497 self.__class__.__name__, self.repository.repo_name,
498 self.app_settings_name, self.app_settings_value,
498 self.app_settings_name, self.app_settings_value,
499 self.app_settings_type
499 self.app_settings_type
500 )
500 )
501
501
502
502
503 class RepoRhodeCodeUi(Base, BaseModel):
503 class RepoRhodeCodeUi(Base, BaseModel):
504 __tablename__ = 'repo_rhodecode_ui'
504 __tablename__ = 'repo_rhodecode_ui'
505 __table_args__ = (
505 __table_args__ = (
506 UniqueConstraint(
506 UniqueConstraint(
507 'repository_id', 'ui_section', 'ui_key',
507 'repository_id', 'ui_section', 'ui_key',
508 name='uq_repo_rhodecode_ui_repository_id_section_key'),
508 name='uq_repo_rhodecode_ui_repository_id_section_key'),
509 base_table_args
509 base_table_args
510 )
510 )
511
511
512 repository_id = Column(
512 repository_id = Column(
513 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
513 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
514 nullable=False)
514 nullable=False)
515 ui_id = Column(
515 ui_id = Column(
516 "ui_id", Integer(), nullable=False, unique=True, default=None,
516 "ui_id", Integer(), nullable=False, unique=True, default=None,
517 primary_key=True)
517 primary_key=True)
518 ui_section = Column(
518 ui_section = Column(
519 "ui_section", String(255), nullable=True, unique=None, default=None)
519 "ui_section", String(255), nullable=True, unique=None, default=None)
520 ui_key = Column(
520 ui_key = Column(
521 "ui_key", String(255), nullable=True, unique=None, default=None)
521 "ui_key", String(255), nullable=True, unique=None, default=None)
522 ui_value = Column(
522 ui_value = Column(
523 "ui_value", String(255), nullable=True, unique=None, default=None)
523 "ui_value", String(255), nullable=True, unique=None, default=None)
524 ui_active = Column(
524 ui_active = Column(
525 "ui_active", Boolean(), nullable=True, unique=None, default=True)
525 "ui_active", Boolean(), nullable=True, unique=None, default=True)
526
526
527 repository = relationship('Repository')
527 repository = relationship('Repository')
528
528
529 def __repr__(self):
529 def __repr__(self):
530 return '<%s[%s:%s]%s=>%s]>' % (
530 return '<%s[%s:%s]%s=>%s]>' % (
531 self.__class__.__name__, self.repository.repo_name,
531 self.__class__.__name__, self.repository.repo_name,
532 self.ui_section, self.ui_key, self.ui_value)
532 self.ui_section, self.ui_key, self.ui_value)
533
533
534
534
535 class User(Base, BaseModel):
535 class User(Base, BaseModel):
536 __tablename__ = 'users'
536 __tablename__ = 'users'
537 __table_args__ = (
537 __table_args__ = (
538 UniqueConstraint('username'), UniqueConstraint('email'),
538 UniqueConstraint('username'), UniqueConstraint('email'),
539 Index('u_username_idx', 'username'),
539 Index('u_username_idx', 'username'),
540 Index('u_email_idx', 'email'),
540 Index('u_email_idx', 'email'),
541 base_table_args
541 base_table_args
542 )
542 )
543
543
544 DEFAULT_USER = 'default'
544 DEFAULT_USER = 'default'
545 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
545 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
546 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
546 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
547
547
548 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
548 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
549 username = Column("username", String(255), nullable=True, unique=None, default=None)
549 username = Column("username", String(255), nullable=True, unique=None, default=None)
550 password = Column("password", String(255), nullable=True, unique=None, default=None)
550 password = Column("password", String(255), nullable=True, unique=None, default=None)
551 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
551 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
552 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
552 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
553 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
553 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
554 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
554 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
555 _email = Column("email", String(255), nullable=True, unique=None, default=None)
555 _email = Column("email", String(255), nullable=True, unique=None, default=None)
556 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
556 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
557 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
557 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
558
558
559 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
559 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
560 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
560 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
561 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
561 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
562 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
562 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
563 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
563 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
564 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
564 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
565
565
566 user_log = relationship('UserLog')
566 user_log = relationship('UserLog')
567 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
567 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
568
568
569 repositories = relationship('Repository')
569 repositories = relationship('Repository')
570 repository_groups = relationship('RepoGroup')
570 repository_groups = relationship('RepoGroup')
571 user_groups = relationship('UserGroup')
571 user_groups = relationship('UserGroup')
572
572
573 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
573 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
574 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
574 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
575
575
576 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
576 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
577 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
577 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
578 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
578 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
579
579
580 group_member = relationship('UserGroupMember', cascade='all')
580 group_member = relationship('UserGroupMember', cascade='all')
581
581
582 notifications = relationship('UserNotification', cascade='all')
582 notifications = relationship('UserNotification', cascade='all')
583 # notifications assigned to this user
583 # notifications assigned to this user
584 user_created_notifications = relationship('Notification', cascade='all')
584 user_created_notifications = relationship('Notification', cascade='all')
585 # comments created by this user
585 # comments created by this user
586 user_comments = relationship('ChangesetComment', cascade='all')
586 user_comments = relationship('ChangesetComment', cascade='all')
587 # user profile extra info
587 # user profile extra info
588 user_emails = relationship('UserEmailMap', cascade='all')
588 user_emails = relationship('UserEmailMap', cascade='all')
589 user_ip_map = relationship('UserIpMap', cascade='all')
589 user_ip_map = relationship('UserIpMap', cascade='all')
590 user_auth_tokens = relationship('UserApiKeys', cascade='all')
590 user_auth_tokens = relationship('UserApiKeys', cascade='all')
591 user_ssh_keys = relationship('UserSshKeys', cascade='all')
591 user_ssh_keys = relationship('UserSshKeys', cascade='all')
592
592
593 # gists
593 # gists
594 user_gists = relationship('Gist', cascade='all')
594 user_gists = relationship('Gist', cascade='all')
595 # user pull requests
595 # user pull requests
596 user_pull_requests = relationship('PullRequest', cascade='all')
596 user_pull_requests = relationship('PullRequest', cascade='all')
597 # external identities
597 # external identities
598 extenal_identities = relationship(
598 extenal_identities = relationship(
599 'ExternalIdentity',
599 'ExternalIdentity',
600 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
600 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
601 cascade='all')
601 cascade='all')
602 # review rules
602 # review rules
603 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
603 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
604
604
605 def __unicode__(self):
605 def __unicode__(self):
606 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
606 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
607 self.user_id, self.username)
607 self.user_id, self.username)
608
608
609 @hybrid_property
609 @hybrid_property
610 def email(self):
610 def email(self):
611 return self._email
611 return self._email
612
612
613 @email.setter
613 @email.setter
614 def email(self, val):
614 def email(self, val):
615 self._email = val.lower() if val else None
615 self._email = val.lower() if val else None
616
616
617 @hybrid_property
617 @hybrid_property
618 def first_name(self):
618 def first_name(self):
619 from rhodecode.lib import helpers as h
619 from rhodecode.lib import helpers as h
620 if self.name:
620 if self.name:
621 return h.escape(self.name)
621 return h.escape(self.name)
622 return self.name
622 return self.name
623
623
624 @hybrid_property
624 @hybrid_property
625 def last_name(self):
625 def last_name(self):
626 from rhodecode.lib import helpers as h
626 from rhodecode.lib import helpers as h
627 if self.lastname:
627 if self.lastname:
628 return h.escape(self.lastname)
628 return h.escape(self.lastname)
629 return self.lastname
629 return self.lastname
630
630
631 @hybrid_property
631 @hybrid_property
632 def api_key(self):
632 def api_key(self):
633 """
633 """
634 Fetch if exist an auth-token with role ALL connected to this user
634 Fetch if exist an auth-token with role ALL connected to this user
635 """
635 """
636 user_auth_token = UserApiKeys.query()\
636 user_auth_token = UserApiKeys.query()\
637 .filter(UserApiKeys.user_id == self.user_id)\
637 .filter(UserApiKeys.user_id == self.user_id)\
638 .filter(or_(UserApiKeys.expires == -1,
638 .filter(or_(UserApiKeys.expires == -1,
639 UserApiKeys.expires >= time.time()))\
639 UserApiKeys.expires >= time.time()))\
640 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
640 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
641 if user_auth_token:
641 if user_auth_token:
642 user_auth_token = user_auth_token.api_key
642 user_auth_token = user_auth_token.api_key
643
643
644 return user_auth_token
644 return user_auth_token
645
645
646 @api_key.setter
646 @api_key.setter
647 def api_key(self, val):
647 def api_key(self, val):
648 # don't allow to set API key this is deprecated for now
648 # don't allow to set API key this is deprecated for now
649 self._api_key = None
649 self._api_key = None
650
650
651 @property
651 @property
652 def reviewer_pull_requests(self):
652 def reviewer_pull_requests(self):
653 return PullRequestReviewers.query() \
653 return PullRequestReviewers.query() \
654 .options(joinedload(PullRequestReviewers.pull_request)) \
654 .options(joinedload(PullRequestReviewers.pull_request)) \
655 .filter(PullRequestReviewers.user_id == self.user_id) \
655 .filter(PullRequestReviewers.user_id == self.user_id) \
656 .all()
656 .all()
657
657
658 @property
658 @property
659 def firstname(self):
659 def firstname(self):
660 # alias for future
660 # alias for future
661 return self.name
661 return self.name
662
662
663 @property
663 @property
664 def emails(self):
664 def emails(self):
665 other = UserEmailMap.query()\
665 other = UserEmailMap.query()\
666 .filter(UserEmailMap.user == self) \
666 .filter(UserEmailMap.user == self) \
667 .order_by(UserEmailMap.email_id.asc()) \
667 .order_by(UserEmailMap.email_id.asc()) \
668 .all()
668 .all()
669 return [self.email] + [x.email for x in other]
669 return [self.email] + [x.email for x in other]
670
670
671 @property
671 @property
672 def auth_tokens(self):
672 def auth_tokens(self):
673 auth_tokens = self.get_auth_tokens()
673 auth_tokens = self.get_auth_tokens()
674 return [x.api_key for x in auth_tokens]
674 return [x.api_key for x in auth_tokens]
675
675
676 def get_auth_tokens(self):
676 def get_auth_tokens(self):
677 return UserApiKeys.query()\
677 return UserApiKeys.query()\
678 .filter(UserApiKeys.user == self)\
678 .filter(UserApiKeys.user == self)\
679 .order_by(UserApiKeys.user_api_key_id.asc())\
679 .order_by(UserApiKeys.user_api_key_id.asc())\
680 .all()
680 .all()
681
681
682 @LazyProperty
682 @LazyProperty
683 def feed_token(self):
683 def feed_token(self):
684 return self.get_feed_token()
684 return self.get_feed_token()
685
685
686 def get_feed_token(self, cache=True):
686 def get_feed_token(self, cache=True):
687 feed_tokens = UserApiKeys.query()\
687 feed_tokens = UserApiKeys.query()\
688 .filter(UserApiKeys.user == self)\
688 .filter(UserApiKeys.user == self)\
689 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
689 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
690 if cache:
690 if cache:
691 feed_tokens = feed_tokens.options(
691 feed_tokens = feed_tokens.options(
692 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
692 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
693
693
694 feed_tokens = feed_tokens.all()
694 feed_tokens = feed_tokens.all()
695 if feed_tokens:
695 if feed_tokens:
696 return feed_tokens[0].api_key
696 return feed_tokens[0].api_key
697 return 'NO_FEED_TOKEN_AVAILABLE'
697 return 'NO_FEED_TOKEN_AVAILABLE'
698
698
699 @classmethod
699 @classmethod
700 def get(cls, user_id, cache=False):
700 def get(cls, user_id, cache=False):
701 if not user_id:
701 if not user_id:
702 return
702 return
703
703
704 user = cls.query()
704 user = cls.query()
705 if cache:
705 if cache:
706 user = user.options(
706 user = user.options(
707 FromCache("sql_cache_short", "get_users_%s" % user_id))
707 FromCache("sql_cache_short", "get_users_%s" % user_id))
708 return user.get(user_id)
708 return user.get(user_id)
709
709
710 @classmethod
710 @classmethod
711 def extra_valid_auth_tokens(cls, user, role=None):
711 def extra_valid_auth_tokens(cls, user, role=None):
712 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
712 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
713 .filter(or_(UserApiKeys.expires == -1,
713 .filter(or_(UserApiKeys.expires == -1,
714 UserApiKeys.expires >= time.time()))
714 UserApiKeys.expires >= time.time()))
715 if role:
715 if role:
716 tokens = tokens.filter(or_(UserApiKeys.role == role,
716 tokens = tokens.filter(or_(UserApiKeys.role == role,
717 UserApiKeys.role == UserApiKeys.ROLE_ALL))
717 UserApiKeys.role == UserApiKeys.ROLE_ALL))
718 return tokens.all()
718 return tokens.all()
719
719
720 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
720 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
721 from rhodecode.lib import auth
721 from rhodecode.lib import auth
722
722
723 log.debug('Trying to authenticate user: %s via auth-token, '
723 log.debug('Trying to authenticate user: %s via auth-token, '
724 'and roles: %s', self, roles)
724 'and roles: %s', self, roles)
725
725
726 if not auth_token:
726 if not auth_token:
727 return False
727 return False
728
728
729 crypto_backend = auth.crypto_backend()
729 crypto_backend = auth.crypto_backend()
730
730
731 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
731 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
732 tokens_q = UserApiKeys.query()\
732 tokens_q = UserApiKeys.query()\
733 .filter(UserApiKeys.user_id == self.user_id)\
733 .filter(UserApiKeys.user_id == self.user_id)\
734 .filter(or_(UserApiKeys.expires == -1,
734 .filter(or_(UserApiKeys.expires == -1,
735 UserApiKeys.expires >= time.time()))
735 UserApiKeys.expires >= time.time()))
736
736
737 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
737 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
738
738
739 plain_tokens = []
739 plain_tokens = []
740 hash_tokens = []
740 hash_tokens = []
741
741
742 for token in tokens_q.all():
742 for token in tokens_q.all():
743 # verify scope first
743 # verify scope first
744 if token.repo_id:
744 if token.repo_id:
745 # token has a scope, we need to verify it
745 # token has a scope, we need to verify it
746 if scope_repo_id != token.repo_id:
746 if scope_repo_id != token.repo_id:
747 log.debug(
747 log.debug(
748 'Scope mismatch: token has a set repo scope: %s, '
748 'Scope mismatch: token has a set repo scope: %s, '
749 'and calling scope is:%s, skipping further checks',
749 'and calling scope is:%s, skipping further checks',
750 token.repo, scope_repo_id)
750 token.repo, scope_repo_id)
751 # token has a scope, and it doesn't match, skip token
751 # token has a scope, and it doesn't match, skip token
752 continue
752 continue
753
753
754 if token.api_key.startswith(crypto_backend.ENC_PREF):
754 if token.api_key.startswith(crypto_backend.ENC_PREF):
755 hash_tokens.append(token.api_key)
755 hash_tokens.append(token.api_key)
756 else:
756 else:
757 plain_tokens.append(token.api_key)
757 plain_tokens.append(token.api_key)
758
758
759 is_plain_match = auth_token in plain_tokens
759 is_plain_match = auth_token in plain_tokens
760 if is_plain_match:
760 if is_plain_match:
761 return True
761 return True
762
762
763 for hashed in hash_tokens:
763 for hashed in hash_tokens:
764 # TODO(marcink): this is expensive to calculate, but most secure
764 # TODO(marcink): this is expensive to calculate, but most secure
765 match = crypto_backend.hash_check(auth_token, hashed)
765 match = crypto_backend.hash_check(auth_token, hashed)
766 if match:
766 if match:
767 return True
767 return True
768
768
769 return False
769 return False
770
770
771 @property
771 @property
772 def ip_addresses(self):
772 def ip_addresses(self):
773 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
773 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
774 return [x.ip_addr for x in ret]
774 return [x.ip_addr for x in ret]
775
775
776 @property
776 @property
777 def username_and_name(self):
777 def username_and_name(self):
778 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
778 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
779
779
780 @property
780 @property
781 def username_or_name_or_email(self):
781 def username_or_name_or_email(self):
782 full_name = self.full_name if self.full_name is not ' ' else None
782 full_name = self.full_name if self.full_name is not ' ' else None
783 return self.username or full_name or self.email
783 return self.username or full_name or self.email
784
784
785 @property
785 @property
786 def full_name(self):
786 def full_name(self):
787 return '%s %s' % (self.first_name, self.last_name)
787 return '%s %s' % (self.first_name, self.last_name)
788
788
789 @property
789 @property
790 def full_name_or_username(self):
790 def full_name_or_username(self):
791 return ('%s %s' % (self.first_name, self.last_name)
791 return ('%s %s' % (self.first_name, self.last_name)
792 if (self.first_name and self.last_name) else self.username)
792 if (self.first_name and self.last_name) else self.username)
793
793
794 @property
794 @property
795 def full_contact(self):
795 def full_contact(self):
796 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
796 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
797
797
798 @property
798 @property
799 def short_contact(self):
799 def short_contact(self):
800 return '%s %s' % (self.first_name, self.last_name)
800 return '%s %s' % (self.first_name, self.last_name)
801
801
802 @property
802 @property
803 def is_admin(self):
803 def is_admin(self):
804 return self.admin
804 return self.admin
805
805
806 def AuthUser(self, **kwargs):
806 def AuthUser(self, **kwargs):
807 """
807 """
808 Returns instance of AuthUser for this user
808 Returns instance of AuthUser for this user
809 """
809 """
810 from rhodecode.lib.auth import AuthUser
810 from rhodecode.lib.auth import AuthUser
811 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
811 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
812
812
813 @hybrid_property
813 @hybrid_property
814 def user_data(self):
814 def user_data(self):
815 if not self._user_data:
815 if not self._user_data:
816 return {}
816 return {}
817
817
818 try:
818 try:
819 return json.loads(self._user_data)
819 return json.loads(self._user_data)
820 except TypeError:
820 except TypeError:
821 return {}
821 return {}
822
822
823 @user_data.setter
823 @user_data.setter
824 def user_data(self, val):
824 def user_data(self, val):
825 if not isinstance(val, dict):
825 if not isinstance(val, dict):
826 raise Exception('user_data must be dict, got %s' % type(val))
826 raise Exception('user_data must be dict, got %s' % type(val))
827 try:
827 try:
828 self._user_data = json.dumps(val)
828 self._user_data = json.dumps(val)
829 except Exception:
829 except Exception:
830 log.error(traceback.format_exc())
830 log.error(traceback.format_exc())
831
831
832 @classmethod
832 @classmethod
833 def get_by_username(cls, username, case_insensitive=False,
833 def get_by_username(cls, username, case_insensitive=False,
834 cache=False, identity_cache=False):
834 cache=False, identity_cache=False):
835 session = Session()
835 session = Session()
836
836
837 if case_insensitive:
837 if case_insensitive:
838 q = cls.query().filter(
838 q = cls.query().filter(
839 func.lower(cls.username) == func.lower(username))
839 func.lower(cls.username) == func.lower(username))
840 else:
840 else:
841 q = cls.query().filter(cls.username == username)
841 q = cls.query().filter(cls.username == username)
842
842
843 if cache:
843 if cache:
844 if identity_cache:
844 if identity_cache:
845 val = cls.identity_cache(session, 'username', username)
845 val = cls.identity_cache(session, 'username', username)
846 if val:
846 if val:
847 return val
847 return val
848 else:
848 else:
849 cache_key = "get_user_by_name_%s" % _hash_key(username)
849 cache_key = "get_user_by_name_%s" % _hash_key(username)
850 q = q.options(
850 q = q.options(
851 FromCache("sql_cache_short", cache_key))
851 FromCache("sql_cache_short", cache_key))
852
852
853 return q.scalar()
853 return q.scalar()
854
854
855 @classmethod
855 @classmethod
856 def get_by_auth_token(cls, auth_token, cache=False):
856 def get_by_auth_token(cls, auth_token, cache=False):
857 q = UserApiKeys.query()\
857 q = UserApiKeys.query()\
858 .filter(UserApiKeys.api_key == auth_token)\
858 .filter(UserApiKeys.api_key == auth_token)\
859 .filter(or_(UserApiKeys.expires == -1,
859 .filter(or_(UserApiKeys.expires == -1,
860 UserApiKeys.expires >= time.time()))
860 UserApiKeys.expires >= time.time()))
861 if cache:
861 if cache:
862 q = q.options(
862 q = q.options(
863 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
863 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
864
864
865 match = q.first()
865 match = q.first()
866 if match:
866 if match:
867 return match.user
867 return match.user
868
868
869 @classmethod
869 @classmethod
870 def get_by_email(cls, email, case_insensitive=False, cache=False):
870 def get_by_email(cls, email, case_insensitive=False, cache=False):
871
871
872 if case_insensitive:
872 if case_insensitive:
873 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
873 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
874
874
875 else:
875 else:
876 q = cls.query().filter(cls.email == email)
876 q = cls.query().filter(cls.email == email)
877
877
878 email_key = _hash_key(email)
878 email_key = _hash_key(email)
879 if cache:
879 if cache:
880 q = q.options(
880 q = q.options(
881 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
881 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
882
882
883 ret = q.scalar()
883 ret = q.scalar()
884 if ret is None:
884 if ret is None:
885 q = UserEmailMap.query()
885 q = UserEmailMap.query()
886 # try fetching in alternate email map
886 # try fetching in alternate email map
887 if case_insensitive:
887 if case_insensitive:
888 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
888 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
889 else:
889 else:
890 q = q.filter(UserEmailMap.email == email)
890 q = q.filter(UserEmailMap.email == email)
891 q = q.options(joinedload(UserEmailMap.user))
891 q = q.options(joinedload(UserEmailMap.user))
892 if cache:
892 if cache:
893 q = q.options(
893 q = q.options(
894 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
894 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
895 ret = getattr(q.scalar(), 'user', None)
895 ret = getattr(q.scalar(), 'user', None)
896
896
897 return ret
897 return ret
898
898
899 @classmethod
899 @classmethod
900 def get_from_cs_author(cls, author):
900 def get_from_cs_author(cls, author):
901 """
901 """
902 Tries to get User objects out of commit author string
902 Tries to get User objects out of commit author string
903
903
904 :param author:
904 :param author:
905 """
905 """
906 from rhodecode.lib.helpers import email, author_name
906 from rhodecode.lib.helpers import email, author_name
907 # Valid email in the attribute passed, see if they're in the system
907 # Valid email in the attribute passed, see if they're in the system
908 _email = email(author)
908 _email = email(author)
909 if _email:
909 if _email:
910 user = cls.get_by_email(_email, case_insensitive=True)
910 user = cls.get_by_email(_email, case_insensitive=True)
911 if user:
911 if user:
912 return user
912 return user
913 # Maybe we can match by username?
913 # Maybe we can match by username?
914 _author = author_name(author)
914 _author = author_name(author)
915 user = cls.get_by_username(_author, case_insensitive=True)
915 user = cls.get_by_username(_author, case_insensitive=True)
916 if user:
916 if user:
917 return user
917 return user
918
918
919 def update_userdata(self, **kwargs):
919 def update_userdata(self, **kwargs):
920 usr = self
920 usr = self
921 old = usr.user_data
921 old = usr.user_data
922 old.update(**kwargs)
922 old.update(**kwargs)
923 usr.user_data = old
923 usr.user_data = old
924 Session().add(usr)
924 Session().add(usr)
925 log.debug('updated userdata with ', kwargs)
925 log.debug('updated userdata with ', kwargs)
926
926
927 def update_lastlogin(self):
927 def update_lastlogin(self):
928 """Update user lastlogin"""
928 """Update user lastlogin"""
929 self.last_login = datetime.datetime.now()
929 self.last_login = datetime.datetime.now()
930 Session().add(self)
930 Session().add(self)
931 log.debug('updated user %s lastlogin', self.username)
931 log.debug('updated user %s lastlogin', self.username)
932
932
933 def update_password(self, new_password):
933 def update_password(self, new_password):
934 from rhodecode.lib.auth import get_crypt_password
934 from rhodecode.lib.auth import get_crypt_password
935
935
936 self.password = get_crypt_password(new_password)
936 self.password = get_crypt_password(new_password)
937 Session().add(self)
937 Session().add(self)
938
938
939 @classmethod
939 @classmethod
940 def get_first_super_admin(cls):
940 def get_first_super_admin(cls):
941 user = User.query().filter(User.admin == true()).first()
941 user = User.query().filter(User.admin == true()).first()
942 if user is None:
942 if user is None:
943 raise Exception('FATAL: Missing administrative account!')
943 raise Exception('FATAL: Missing administrative account!')
944 return user
944 return user
945
945
946 @classmethod
946 @classmethod
947 def get_all_super_admins(cls):
947 def get_all_super_admins(cls):
948 """
948 """
949 Returns all admin accounts sorted by username
949 Returns all admin accounts sorted by username
950 """
950 """
951 return User.query().filter(User.admin == true())\
951 return User.query().filter(User.admin == true())\
952 .order_by(User.username.asc()).all()
952 .order_by(User.username.asc()).all()
953
953
954 @classmethod
954 @classmethod
955 def get_default_user(cls, cache=False, refresh=False):
955 def get_default_user(cls, cache=False, refresh=False):
956 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
956 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
957 if user is None:
957 if user is None:
958 raise Exception('FATAL: Missing default account!')
958 raise Exception('FATAL: Missing default account!')
959 if refresh:
959 if refresh:
960 # The default user might be based on outdated state which
960 # The default user might be based on outdated state which
961 # has been loaded from the cache.
961 # has been loaded from the cache.
962 # A call to refresh() ensures that the
962 # A call to refresh() ensures that the
963 # latest state from the database is used.
963 # latest state from the database is used.
964 Session().refresh(user)
964 Session().refresh(user)
965 return user
965 return user
966
966
967 def _get_default_perms(self, user, suffix=''):
967 def _get_default_perms(self, user, suffix=''):
968 from rhodecode.model.permission import PermissionModel
968 from rhodecode.model.permission import PermissionModel
969 return PermissionModel().get_default_perms(user.user_perms, suffix)
969 return PermissionModel().get_default_perms(user.user_perms, suffix)
970
970
971 def get_default_perms(self, suffix=''):
971 def get_default_perms(self, suffix=''):
972 return self._get_default_perms(self, suffix)
972 return self._get_default_perms(self, suffix)
973
973
974 def get_api_data(self, include_secrets=False, details='full'):
974 def get_api_data(self, include_secrets=False, details='full'):
975 """
975 """
976 Common function for generating user related data for API
976 Common function for generating user related data for API
977
977
978 :param include_secrets: By default secrets in the API data will be replaced
978 :param include_secrets: By default secrets in the API data will be replaced
979 by a placeholder value to prevent exposing this data by accident. In case
979 by a placeholder value to prevent exposing this data by accident. In case
980 this data shall be exposed, set this flag to ``True``.
980 this data shall be exposed, set this flag to ``True``.
981
981
982 :param details: details can be 'basic|full' basic gives only a subset of
982 :param details: details can be 'basic|full' basic gives only a subset of
983 the available user information that includes user_id, name and emails.
983 the available user information that includes user_id, name and emails.
984 """
984 """
985 user = self
985 user = self
986 user_data = self.user_data
986 user_data = self.user_data
987 data = {
987 data = {
988 'user_id': user.user_id,
988 'user_id': user.user_id,
989 'username': user.username,
989 'username': user.username,
990 'firstname': user.name,
990 'firstname': user.name,
991 'lastname': user.lastname,
991 'lastname': user.lastname,
992 'email': user.email,
992 'email': user.email,
993 'emails': user.emails,
993 'emails': user.emails,
994 }
994 }
995 if details == 'basic':
995 if details == 'basic':
996 return data
996 return data
997
997
998 auth_token_length = 40
998 auth_token_length = 40
999 auth_token_replacement = '*' * auth_token_length
999 auth_token_replacement = '*' * auth_token_length
1000
1000
1001 extras = {
1001 extras = {
1002 'auth_tokens': [auth_token_replacement],
1002 'auth_tokens': [auth_token_replacement],
1003 'active': user.active,
1003 'active': user.active,
1004 'admin': user.admin,
1004 'admin': user.admin,
1005 'extern_type': user.extern_type,
1005 'extern_type': user.extern_type,
1006 'extern_name': user.extern_name,
1006 'extern_name': user.extern_name,
1007 'last_login': user.last_login,
1007 'last_login': user.last_login,
1008 'last_activity': user.last_activity,
1008 'last_activity': user.last_activity,
1009 'ip_addresses': user.ip_addresses,
1009 'ip_addresses': user.ip_addresses,
1010 'language': user_data.get('language')
1010 'language': user_data.get('language')
1011 }
1011 }
1012 data.update(extras)
1012 data.update(extras)
1013
1013
1014 if include_secrets:
1014 if include_secrets:
1015 data['auth_tokens'] = user.auth_tokens
1015 data['auth_tokens'] = user.auth_tokens
1016 return data
1016 return data
1017
1017
1018 def __json__(self):
1018 def __json__(self):
1019 data = {
1019 data = {
1020 'full_name': self.full_name,
1020 'full_name': self.full_name,
1021 'full_name_or_username': self.full_name_or_username,
1021 'full_name_or_username': self.full_name_or_username,
1022 'short_contact': self.short_contact,
1022 'short_contact': self.short_contact,
1023 'full_contact': self.full_contact,
1023 'full_contact': self.full_contact,
1024 }
1024 }
1025 data.update(self.get_api_data())
1025 data.update(self.get_api_data())
1026 return data
1026 return data
1027
1027
1028
1028
1029 class UserApiKeys(Base, BaseModel):
1029 class UserApiKeys(Base, BaseModel):
1030 __tablename__ = 'user_api_keys'
1030 __tablename__ = 'user_api_keys'
1031 __table_args__ = (
1031 __table_args__ = (
1032 Index('uak_api_key_idx', 'api_key', unique=True),
1032 Index('uak_api_key_idx', 'api_key', unique=True),
1033 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1033 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1034 base_table_args
1034 base_table_args
1035 )
1035 )
1036 __mapper_args__ = {}
1036 __mapper_args__ = {}
1037
1037
1038 # ApiKey role
1038 # ApiKey role
1039 ROLE_ALL = 'token_role_all'
1039 ROLE_ALL = 'token_role_all'
1040 ROLE_HTTP = 'token_role_http'
1040 ROLE_HTTP = 'token_role_http'
1041 ROLE_VCS = 'token_role_vcs'
1041 ROLE_VCS = 'token_role_vcs'
1042 ROLE_API = 'token_role_api'
1042 ROLE_API = 'token_role_api'
1043 ROLE_FEED = 'token_role_feed'
1043 ROLE_FEED = 'token_role_feed'
1044 ROLE_PASSWORD_RESET = 'token_password_reset'
1044 ROLE_PASSWORD_RESET = 'token_password_reset'
1045
1045
1046 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1046 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1047
1047
1048 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1048 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1050 api_key = Column("api_key", String(255), nullable=False, unique=True)
1050 api_key = Column("api_key", String(255), nullable=False, unique=True)
1051 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1051 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1052 expires = Column('expires', Float(53), nullable=False)
1052 expires = Column('expires', Float(53), nullable=False)
1053 role = Column('role', String(255), nullable=True)
1053 role = Column('role', String(255), nullable=True)
1054 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1054 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1055
1055
1056 # scope columns
1056 # scope columns
1057 repo_id = Column(
1057 repo_id = Column(
1058 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1058 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1059 nullable=True, unique=None, default=None)
1059 nullable=True, unique=None, default=None)
1060 repo = relationship('Repository', lazy='joined')
1060 repo = relationship('Repository', lazy='joined')
1061
1061
1062 repo_group_id = Column(
1062 repo_group_id = Column(
1063 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1063 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1064 nullable=True, unique=None, default=None)
1064 nullable=True, unique=None, default=None)
1065 repo_group = relationship('RepoGroup', lazy='joined')
1065 repo_group = relationship('RepoGroup', lazy='joined')
1066
1066
1067 user = relationship('User', lazy='joined')
1067 user = relationship('User', lazy='joined')
1068
1068
1069 def __unicode__(self):
1069 def __unicode__(self):
1070 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1070 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1071
1071
1072 def __json__(self):
1072 def __json__(self):
1073 data = {
1073 data = {
1074 'auth_token': self.api_key,
1074 'auth_token': self.api_key,
1075 'role': self.role,
1075 'role': self.role,
1076 'scope': self.scope_humanized,
1076 'scope': self.scope_humanized,
1077 'expired': self.expired
1077 'expired': self.expired
1078 }
1078 }
1079 return data
1079 return data
1080
1080
1081 def get_api_data(self, include_secrets=False):
1081 def get_api_data(self, include_secrets=False):
1082 data = self.__json__()
1082 data = self.__json__()
1083 if include_secrets:
1083 if include_secrets:
1084 return data
1084 return data
1085 else:
1085 else:
1086 data['auth_token'] = self.token_obfuscated
1086 data['auth_token'] = self.token_obfuscated
1087 return data
1087 return data
1088
1088
1089 @hybrid_property
1089 @hybrid_property
1090 def description_safe(self):
1090 def description_safe(self):
1091 from rhodecode.lib import helpers as h
1091 from rhodecode.lib import helpers as h
1092 return h.escape(self.description)
1092 return h.escape(self.description)
1093
1093
1094 @property
1094 @property
1095 def expired(self):
1095 def expired(self):
1096 if self.expires == -1:
1096 if self.expires == -1:
1097 return False
1097 return False
1098 return time.time() > self.expires
1098 return time.time() > self.expires
1099
1099
1100 @classmethod
1100 @classmethod
1101 def _get_role_name(cls, role):
1101 def _get_role_name(cls, role):
1102 return {
1102 return {
1103 cls.ROLE_ALL: _('all'),
1103 cls.ROLE_ALL: _('all'),
1104 cls.ROLE_HTTP: _('http/web interface'),
1104 cls.ROLE_HTTP: _('http/web interface'),
1105 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1105 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1106 cls.ROLE_API: _('api calls'),
1106 cls.ROLE_API: _('api calls'),
1107 cls.ROLE_FEED: _('feed access'),
1107 cls.ROLE_FEED: _('feed access'),
1108 }.get(role, role)
1108 }.get(role, role)
1109
1109
1110 @property
1110 @property
1111 def role_humanized(self):
1111 def role_humanized(self):
1112 return self._get_role_name(self.role)
1112 return self._get_role_name(self.role)
1113
1113
1114 def _get_scope(self):
1114 def _get_scope(self):
1115 if self.repo:
1115 if self.repo:
1116 return repr(self.repo)
1116 return repr(self.repo)
1117 if self.repo_group:
1117 if self.repo_group:
1118 return repr(self.repo_group) + ' (recursive)'
1118 return repr(self.repo_group) + ' (recursive)'
1119 return 'global'
1119 return 'global'
1120
1120
1121 @property
1121 @property
1122 def scope_humanized(self):
1122 def scope_humanized(self):
1123 return self._get_scope()
1123 return self._get_scope()
1124
1124
1125 @property
1125 @property
1126 def token_obfuscated(self):
1126 def token_obfuscated(self):
1127 if self.api_key:
1127 if self.api_key:
1128 return self.api_key[:4] + "****"
1128 return self.api_key[:4] + "****"
1129
1129
1130
1130
1131 class UserEmailMap(Base, BaseModel):
1131 class UserEmailMap(Base, BaseModel):
1132 __tablename__ = 'user_email_map'
1132 __tablename__ = 'user_email_map'
1133 __table_args__ = (
1133 __table_args__ = (
1134 Index('uem_email_idx', 'email'),
1134 Index('uem_email_idx', 'email'),
1135 UniqueConstraint('email'),
1135 UniqueConstraint('email'),
1136 base_table_args
1136 base_table_args
1137 )
1137 )
1138 __mapper_args__ = {}
1138 __mapper_args__ = {}
1139
1139
1140 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1140 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1142 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1142 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1143 user = relationship('User', lazy='joined')
1143 user = relationship('User', lazy='joined')
1144
1144
1145 @validates('_email')
1145 @validates('_email')
1146 def validate_email(self, key, email):
1146 def validate_email(self, key, email):
1147 # check if this email is not main one
1147 # check if this email is not main one
1148 main_email = Session().query(User).filter(User.email == email).scalar()
1148 main_email = Session().query(User).filter(User.email == email).scalar()
1149 if main_email is not None:
1149 if main_email is not None:
1150 raise AttributeError('email %s is present is user table' % email)
1150 raise AttributeError('email %s is present is user table' % email)
1151 return email
1151 return email
1152
1152
1153 @hybrid_property
1153 @hybrid_property
1154 def email(self):
1154 def email(self):
1155 return self._email
1155 return self._email
1156
1156
1157 @email.setter
1157 @email.setter
1158 def email(self, val):
1158 def email(self, val):
1159 self._email = val.lower() if val else None
1159 self._email = val.lower() if val else None
1160
1160
1161
1161
1162 class UserIpMap(Base, BaseModel):
1162 class UserIpMap(Base, BaseModel):
1163 __tablename__ = 'user_ip_map'
1163 __tablename__ = 'user_ip_map'
1164 __table_args__ = (
1164 __table_args__ = (
1165 UniqueConstraint('user_id', 'ip_addr'),
1165 UniqueConstraint('user_id', 'ip_addr'),
1166 base_table_args
1166 base_table_args
1167 )
1167 )
1168 __mapper_args__ = {}
1168 __mapper_args__ = {}
1169
1169
1170 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1170 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1171 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1171 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1172 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1172 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1173 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1173 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1174 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1174 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1175 user = relationship('User', lazy='joined')
1175 user = relationship('User', lazy='joined')
1176
1176
1177 @hybrid_property
1177 @hybrid_property
1178 def description_safe(self):
1178 def description_safe(self):
1179 from rhodecode.lib import helpers as h
1179 from rhodecode.lib import helpers as h
1180 return h.escape(self.description)
1180 return h.escape(self.description)
1181
1181
1182 @classmethod
1182 @classmethod
1183 def _get_ip_range(cls, ip_addr):
1183 def _get_ip_range(cls, ip_addr):
1184 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1184 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1185 return [str(net.network_address), str(net.broadcast_address)]
1185 return [str(net.network_address), str(net.broadcast_address)]
1186
1186
1187 def __json__(self):
1187 def __json__(self):
1188 return {
1188 return {
1189 'ip_addr': self.ip_addr,
1189 'ip_addr': self.ip_addr,
1190 'ip_range': self._get_ip_range(self.ip_addr),
1190 'ip_range': self._get_ip_range(self.ip_addr),
1191 }
1191 }
1192
1192
1193 def __unicode__(self):
1193 def __unicode__(self):
1194 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1194 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1195 self.user_id, self.ip_addr)
1195 self.user_id, self.ip_addr)
1196
1196
1197
1197
1198 class UserSshKeys(Base, BaseModel):
1198 class UserSshKeys(Base, BaseModel):
1199 __tablename__ = 'user_ssh_keys'
1199 __tablename__ = 'user_ssh_keys'
1200 __table_args__ = (
1200 __table_args__ = (
1201 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1201 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1202
1202
1203 UniqueConstraint('ssh_key_fingerprint'),
1203 UniqueConstraint('ssh_key_fingerprint'),
1204
1204
1205 base_table_args
1205 base_table_args
1206 )
1206 )
1207 __mapper_args__ = {}
1207 __mapper_args__ = {}
1208
1208
1209 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1209 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1210 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1210 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1211 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1211 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1212
1212
1213 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1213 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1214
1214
1215 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1215 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1216 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1216 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1217 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1217 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218
1218
1219 user = relationship('User', lazy='joined')
1219 user = relationship('User', lazy='joined')
1220
1220
1221 def __json__(self):
1221 def __json__(self):
1222 data = {
1222 data = {
1223 'ssh_fingerprint': self.ssh_key_fingerprint,
1223 'ssh_fingerprint': self.ssh_key_fingerprint,
1224 'description': self.description,
1224 'description': self.description,
1225 'created_on': self.created_on
1225 'created_on': self.created_on
1226 }
1226 }
1227 return data
1227 return data
1228
1228
1229 def get_api_data(self):
1229 def get_api_data(self):
1230 data = self.__json__()
1230 data = self.__json__()
1231 return data
1231 return data
1232
1232
1233
1233
1234 class UserLog(Base, BaseModel):
1234 class UserLog(Base, BaseModel):
1235 __tablename__ = 'user_logs'
1235 __tablename__ = 'user_logs'
1236 __table_args__ = (
1236 __table_args__ = (
1237 base_table_args,
1237 base_table_args,
1238 )
1238 )
1239
1239
1240 VERSION_1 = 'v1'
1240 VERSION_1 = 'v1'
1241 VERSION_2 = 'v2'
1241 VERSION_2 = 'v2'
1242 VERSIONS = [VERSION_1, VERSION_2]
1242 VERSIONS = [VERSION_1, VERSION_2]
1243
1243
1244 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1244 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1246 username = Column("username", String(255), nullable=True, unique=None, default=None)
1246 username = Column("username", String(255), nullable=True, unique=None, default=None)
1247 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1247 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1248 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1248 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1249 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1249 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1250 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1250 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1251 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1251 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1252
1252
1253 version = Column("version", String(255), nullable=True, default=VERSION_1)
1253 version = Column("version", String(255), nullable=True, default=VERSION_1)
1254 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1254 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1255 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1255 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1256
1256
1257 def __unicode__(self):
1257 def __unicode__(self):
1258 return u"<%s('id:%s:%s')>" % (
1258 return u"<%s('id:%s:%s')>" % (
1259 self.__class__.__name__, self.repository_name, self.action)
1259 self.__class__.__name__, self.repository_name, self.action)
1260
1260
1261 def __json__(self):
1261 def __json__(self):
1262 return {
1262 return {
1263 'user_id': self.user_id,
1263 'user_id': self.user_id,
1264 'username': self.username,
1264 'username': self.username,
1265 'repository_id': self.repository_id,
1265 'repository_id': self.repository_id,
1266 'repository_name': self.repository_name,
1266 'repository_name': self.repository_name,
1267 'user_ip': self.user_ip,
1267 'user_ip': self.user_ip,
1268 'action_date': self.action_date,
1268 'action_date': self.action_date,
1269 'action': self.action,
1269 'action': self.action,
1270 }
1270 }
1271
1271
1272 @hybrid_property
1272 @hybrid_property
1273 def entry_id(self):
1273 def entry_id(self):
1274 return self.user_log_id
1274 return self.user_log_id
1275
1275
1276 @property
1276 @property
1277 def action_as_day(self):
1277 def action_as_day(self):
1278 return datetime.date(*self.action_date.timetuple()[:3])
1278 return datetime.date(*self.action_date.timetuple()[:3])
1279
1279
1280 user = relationship('User')
1280 user = relationship('User')
1281 repository = relationship('Repository', cascade='')
1281 repository = relationship('Repository', cascade='')
1282
1282
1283
1283
1284 class UserGroup(Base, BaseModel):
1284 class UserGroup(Base, BaseModel):
1285 __tablename__ = 'users_groups'
1285 __tablename__ = 'users_groups'
1286 __table_args__ = (
1286 __table_args__ = (
1287 base_table_args,
1287 base_table_args,
1288 )
1288 )
1289
1289
1290 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1290 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1291 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1291 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1292 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1292 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1293 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1293 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1294 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1294 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1295 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1295 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1296 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1296 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1297 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1297 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1298
1298
1299 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1299 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1300 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1300 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1301 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1301 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1302 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1302 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1303 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1303 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1304 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1304 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1305
1305
1306 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1306 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1307 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1307 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1308
1308
1309 @classmethod
1309 @classmethod
1310 def _load_group_data(cls, column):
1310 def _load_group_data(cls, column):
1311 if not column:
1311 if not column:
1312 return {}
1312 return {}
1313
1313
1314 try:
1314 try:
1315 return json.loads(column) or {}
1315 return json.loads(column) or {}
1316 except TypeError:
1316 except TypeError:
1317 return {}
1317 return {}
1318
1318
1319 @hybrid_property
1319 @hybrid_property
1320 def description_safe(self):
1320 def description_safe(self):
1321 from rhodecode.lib import helpers as h
1321 from rhodecode.lib import helpers as h
1322 return h.escape(self.user_group_description)
1322 return h.escape(self.user_group_description)
1323
1323
1324 @hybrid_property
1324 @hybrid_property
1325 def group_data(self):
1325 def group_data(self):
1326 return self._load_group_data(self._group_data)
1326 return self._load_group_data(self._group_data)
1327
1327
1328 @group_data.expression
1328 @group_data.expression
1329 def group_data(self, **kwargs):
1329 def group_data(self, **kwargs):
1330 return self._group_data
1330 return self._group_data
1331
1331
1332 @group_data.setter
1332 @group_data.setter
1333 def group_data(self, val):
1333 def group_data(self, val):
1334 try:
1334 try:
1335 self._group_data = json.dumps(val)
1335 self._group_data = json.dumps(val)
1336 except Exception:
1336 except Exception:
1337 log.error(traceback.format_exc())
1337 log.error(traceback.format_exc())
1338
1338
1339 @classmethod
1339 @classmethod
1340 def _load_sync(cls, group_data):
1340 def _load_sync(cls, group_data):
1341 if group_data:
1341 if group_data:
1342 return group_data.get('extern_type')
1342 return group_data.get('extern_type')
1343
1343
1344 @property
1344 @property
1345 def sync(self):
1345 def sync(self):
1346 return self._load_sync(self.group_data)
1346 return self._load_sync(self.group_data)
1347
1347
1348 def __unicode__(self):
1348 def __unicode__(self):
1349 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1349 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1350 self.users_group_id,
1350 self.users_group_id,
1351 self.users_group_name)
1351 self.users_group_name)
1352
1352
1353 @classmethod
1353 @classmethod
1354 def get_by_group_name(cls, group_name, cache=False,
1354 def get_by_group_name(cls, group_name, cache=False,
1355 case_insensitive=False):
1355 case_insensitive=False):
1356 if case_insensitive:
1356 if case_insensitive:
1357 q = cls.query().filter(func.lower(cls.users_group_name) ==
1357 q = cls.query().filter(func.lower(cls.users_group_name) ==
1358 func.lower(group_name))
1358 func.lower(group_name))
1359
1359
1360 else:
1360 else:
1361 q = cls.query().filter(cls.users_group_name == group_name)
1361 q = cls.query().filter(cls.users_group_name == group_name)
1362 if cache:
1362 if cache:
1363 q = q.options(
1363 q = q.options(
1364 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1364 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1365 return q.scalar()
1365 return q.scalar()
1366
1366
1367 @classmethod
1367 @classmethod
1368 def get(cls, user_group_id, cache=False):
1368 def get(cls, user_group_id, cache=False):
1369 if not user_group_id:
1369 if not user_group_id:
1370 return
1370 return
1371
1371
1372 user_group = cls.query()
1372 user_group = cls.query()
1373 if cache:
1373 if cache:
1374 user_group = user_group.options(
1374 user_group = user_group.options(
1375 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1375 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1376 return user_group.get(user_group_id)
1376 return user_group.get(user_group_id)
1377
1377
1378 def permissions(self, with_admins=True, with_owner=True):
1378 def permissions(self, with_admins=True, with_owner=True):
1379 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1379 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1380 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1380 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1381 joinedload(UserUserGroupToPerm.user),
1381 joinedload(UserUserGroupToPerm.user),
1382 joinedload(UserUserGroupToPerm.permission),)
1382 joinedload(UserUserGroupToPerm.permission),)
1383
1383
1384 # get owners and admins and permissions. We do a trick of re-writing
1384 # get owners and admins and permissions. We do a trick of re-writing
1385 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1385 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1386 # has a global reference and changing one object propagates to all
1386 # has a global reference and changing one object propagates to all
1387 # others. This means if admin is also an owner admin_row that change
1387 # others. This means if admin is also an owner admin_row that change
1388 # would propagate to both objects
1388 # would propagate to both objects
1389 perm_rows = []
1389 perm_rows = []
1390 for _usr in q.all():
1390 for _usr in q.all():
1391 usr = AttributeDict(_usr.user.get_dict())
1391 usr = AttributeDict(_usr.user.get_dict())
1392 usr.permission = _usr.permission.permission_name
1392 usr.permission = _usr.permission.permission_name
1393 perm_rows.append(usr)
1393 perm_rows.append(usr)
1394
1394
1395 # filter the perm rows by 'default' first and then sort them by
1395 # filter the perm rows by 'default' first and then sort them by
1396 # admin,write,read,none permissions sorted again alphabetically in
1396 # admin,write,read,none permissions sorted again alphabetically in
1397 # each group
1397 # each group
1398 perm_rows = sorted(perm_rows, key=display_user_sort)
1398 perm_rows = sorted(perm_rows, key=display_user_sort)
1399
1399
1400 _admin_perm = 'usergroup.admin'
1400 _admin_perm = 'usergroup.admin'
1401 owner_row = []
1401 owner_row = []
1402 if with_owner:
1402 if with_owner:
1403 usr = AttributeDict(self.user.get_dict())
1403 usr = AttributeDict(self.user.get_dict())
1404 usr.owner_row = True
1404 usr.owner_row = True
1405 usr.permission = _admin_perm
1405 usr.permission = _admin_perm
1406 owner_row.append(usr)
1406 owner_row.append(usr)
1407
1407
1408 super_admin_rows = []
1408 super_admin_rows = []
1409 if with_admins:
1409 if with_admins:
1410 for usr in User.get_all_super_admins():
1410 for usr in User.get_all_super_admins():
1411 # if this admin is also owner, don't double the record
1411 # if this admin is also owner, don't double the record
1412 if usr.user_id == owner_row[0].user_id:
1412 if usr.user_id == owner_row[0].user_id:
1413 owner_row[0].admin_row = True
1413 owner_row[0].admin_row = True
1414 else:
1414 else:
1415 usr = AttributeDict(usr.get_dict())
1415 usr = AttributeDict(usr.get_dict())
1416 usr.admin_row = True
1416 usr.admin_row = True
1417 usr.permission = _admin_perm
1417 usr.permission = _admin_perm
1418 super_admin_rows.append(usr)
1418 super_admin_rows.append(usr)
1419
1419
1420 return super_admin_rows + owner_row + perm_rows
1420 return super_admin_rows + owner_row + perm_rows
1421
1421
1422 def permission_user_groups(self):
1422 def permission_user_groups(self):
1423 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1423 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1424 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1424 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1425 joinedload(UserGroupUserGroupToPerm.target_user_group),
1425 joinedload(UserGroupUserGroupToPerm.target_user_group),
1426 joinedload(UserGroupUserGroupToPerm.permission),)
1426 joinedload(UserGroupUserGroupToPerm.permission),)
1427
1427
1428 perm_rows = []
1428 perm_rows = []
1429 for _user_group in q.all():
1429 for _user_group in q.all():
1430 usr = AttributeDict(_user_group.user_group.get_dict())
1430 usr = AttributeDict(_user_group.user_group.get_dict())
1431 usr.permission = _user_group.permission.permission_name
1431 usr.permission = _user_group.permission.permission_name
1432 perm_rows.append(usr)
1432 perm_rows.append(usr)
1433
1433
1434 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1434 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1435 return perm_rows
1435 return perm_rows
1436
1436
1437 def _get_default_perms(self, user_group, suffix=''):
1437 def _get_default_perms(self, user_group, suffix=''):
1438 from rhodecode.model.permission import PermissionModel
1438 from rhodecode.model.permission import PermissionModel
1439 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1439 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1440
1440
1441 def get_default_perms(self, suffix=''):
1441 def get_default_perms(self, suffix=''):
1442 return self._get_default_perms(self, suffix)
1442 return self._get_default_perms(self, suffix)
1443
1443
1444 def get_api_data(self, with_group_members=True, include_secrets=False):
1444 def get_api_data(self, with_group_members=True, include_secrets=False):
1445 """
1445 """
1446 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1446 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1447 basically forwarded.
1447 basically forwarded.
1448
1448
1449 """
1449 """
1450 user_group = self
1450 user_group = self
1451 data = {
1451 data = {
1452 'users_group_id': user_group.users_group_id,
1452 'users_group_id': user_group.users_group_id,
1453 'group_name': user_group.users_group_name,
1453 'group_name': user_group.users_group_name,
1454 'group_description': user_group.user_group_description,
1454 'group_description': user_group.user_group_description,
1455 'active': user_group.users_group_active,
1455 'active': user_group.users_group_active,
1456 'owner': user_group.user.username,
1456 'owner': user_group.user.username,
1457 'sync': user_group.sync,
1457 'sync': user_group.sync,
1458 'owner_email': user_group.user.email,
1458 'owner_email': user_group.user.email,
1459 }
1459 }
1460
1460
1461 if with_group_members:
1461 if with_group_members:
1462 users = []
1462 users = []
1463 for user in user_group.members:
1463 for user in user_group.members:
1464 user = user.user
1464 user = user.user
1465 users.append(user.get_api_data(include_secrets=include_secrets))
1465 users.append(user.get_api_data(include_secrets=include_secrets))
1466 data['users'] = users
1466 data['users'] = users
1467
1467
1468 return data
1468 return data
1469
1469
1470
1470
1471 class UserGroupMember(Base, BaseModel):
1471 class UserGroupMember(Base, BaseModel):
1472 __tablename__ = 'users_groups_members'
1472 __tablename__ = 'users_groups_members'
1473 __table_args__ = (
1473 __table_args__ = (
1474 base_table_args,
1474 base_table_args,
1475 )
1475 )
1476
1476
1477 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1477 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1478 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1478 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1479 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1479 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1480
1480
1481 user = relationship('User', lazy='joined')
1481 user = relationship('User', lazy='joined')
1482 users_group = relationship('UserGroup')
1482 users_group = relationship('UserGroup')
1483
1483
1484 def __init__(self, gr_id='', u_id=''):
1484 def __init__(self, gr_id='', u_id=''):
1485 self.users_group_id = gr_id
1485 self.users_group_id = gr_id
1486 self.user_id = u_id
1486 self.user_id = u_id
1487
1487
1488
1488
1489 class RepositoryField(Base, BaseModel):
1489 class RepositoryField(Base, BaseModel):
1490 __tablename__ = 'repositories_fields'
1490 __tablename__ = 'repositories_fields'
1491 __table_args__ = (
1491 __table_args__ = (
1492 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1492 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1493 base_table_args,
1493 base_table_args,
1494 )
1494 )
1495
1495
1496 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1496 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1497
1497
1498 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1498 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1499 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1499 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1500 field_key = Column("field_key", String(250))
1500 field_key = Column("field_key", String(250))
1501 field_label = Column("field_label", String(1024), nullable=False)
1501 field_label = Column("field_label", String(1024), nullable=False)
1502 field_value = Column("field_value", String(10000), nullable=False)
1502 field_value = Column("field_value", String(10000), nullable=False)
1503 field_desc = Column("field_desc", String(1024), nullable=False)
1503 field_desc = Column("field_desc", String(1024), nullable=False)
1504 field_type = Column("field_type", String(255), nullable=False, unique=None)
1504 field_type = Column("field_type", String(255), nullable=False, unique=None)
1505 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1505 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1506
1506
1507 repository = relationship('Repository')
1507 repository = relationship('Repository')
1508
1508
1509 @property
1509 @property
1510 def field_key_prefixed(self):
1510 def field_key_prefixed(self):
1511 return 'ex_%s' % self.field_key
1511 return 'ex_%s' % self.field_key
1512
1512
1513 @classmethod
1513 @classmethod
1514 def un_prefix_key(cls, key):
1514 def un_prefix_key(cls, key):
1515 if key.startswith(cls.PREFIX):
1515 if key.startswith(cls.PREFIX):
1516 return key[len(cls.PREFIX):]
1516 return key[len(cls.PREFIX):]
1517 return key
1517 return key
1518
1518
1519 @classmethod
1519 @classmethod
1520 def get_by_key_name(cls, key, repo):
1520 def get_by_key_name(cls, key, repo):
1521 row = cls.query()\
1521 row = cls.query()\
1522 .filter(cls.repository == repo)\
1522 .filter(cls.repository == repo)\
1523 .filter(cls.field_key == key).scalar()
1523 .filter(cls.field_key == key).scalar()
1524 return row
1524 return row
1525
1525
1526
1526
1527 class Repository(Base, BaseModel):
1527 class Repository(Base, BaseModel):
1528 __tablename__ = 'repositories'
1528 __tablename__ = 'repositories'
1529 __table_args__ = (
1529 __table_args__ = (
1530 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1530 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1531 base_table_args,
1531 base_table_args,
1532 )
1532 )
1533 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1533 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1534 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1534 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1535 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1535 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1536
1536
1537 STATE_CREATED = 'repo_state_created'
1537 STATE_CREATED = 'repo_state_created'
1538 STATE_PENDING = 'repo_state_pending'
1538 STATE_PENDING = 'repo_state_pending'
1539 STATE_ERROR = 'repo_state_error'
1539 STATE_ERROR = 'repo_state_error'
1540
1540
1541 LOCK_AUTOMATIC = 'lock_auto'
1541 LOCK_AUTOMATIC = 'lock_auto'
1542 LOCK_API = 'lock_api'
1542 LOCK_API = 'lock_api'
1543 LOCK_WEB = 'lock_web'
1543 LOCK_WEB = 'lock_web'
1544 LOCK_PULL = 'lock_pull'
1544 LOCK_PULL = 'lock_pull'
1545
1545
1546 NAME_SEP = URL_SEP
1546 NAME_SEP = URL_SEP
1547
1547
1548 repo_id = Column(
1548 repo_id = Column(
1549 "repo_id", Integer(), nullable=False, unique=True, default=None,
1549 "repo_id", Integer(), nullable=False, unique=True, default=None,
1550 primary_key=True)
1550 primary_key=True)
1551 _repo_name = Column(
1551 _repo_name = Column(
1552 "repo_name", Text(), nullable=False, default=None)
1552 "repo_name", Text(), nullable=False, default=None)
1553 _repo_name_hash = Column(
1553 _repo_name_hash = Column(
1554 "repo_name_hash", String(255), nullable=False, unique=True)
1554 "repo_name_hash", String(255), nullable=False, unique=True)
1555 repo_state = Column("repo_state", String(255), nullable=True)
1555 repo_state = Column("repo_state", String(255), nullable=True)
1556
1556
1557 clone_uri = Column(
1557 clone_uri = Column(
1558 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1558 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1559 default=None)
1559 default=None)
1560 push_uri = Column(
1560 push_uri = Column(
1561 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1561 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1562 default=None)
1562 default=None)
1563 repo_type = Column(
1563 repo_type = Column(
1564 "repo_type", String(255), nullable=False, unique=False, default=None)
1564 "repo_type", String(255), nullable=False, unique=False, default=None)
1565 user_id = Column(
1565 user_id = Column(
1566 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1566 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1567 unique=False, default=None)
1567 unique=False, default=None)
1568 private = Column(
1568 private = Column(
1569 "private", Boolean(), nullable=True, unique=None, default=None)
1569 "private", Boolean(), nullable=True, unique=None, default=None)
1570 enable_statistics = Column(
1570 enable_statistics = Column(
1571 "statistics", Boolean(), nullable=True, unique=None, default=True)
1571 "statistics", Boolean(), nullable=True, unique=None, default=True)
1572 enable_downloads = Column(
1572 enable_downloads = Column(
1573 "downloads", Boolean(), nullable=True, unique=None, default=True)
1573 "downloads", Boolean(), nullable=True, unique=None, default=True)
1574 description = Column(
1574 description = Column(
1575 "description", String(10000), nullable=True, unique=None, default=None)
1575 "description", String(10000), nullable=True, unique=None, default=None)
1576 created_on = Column(
1576 created_on = Column(
1577 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1577 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1578 default=datetime.datetime.now)
1578 default=datetime.datetime.now)
1579 updated_on = Column(
1579 updated_on = Column(
1580 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1580 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1581 default=datetime.datetime.now)
1581 default=datetime.datetime.now)
1582 _landing_revision = Column(
1582 _landing_revision = Column(
1583 "landing_revision", String(255), nullable=False, unique=False,
1583 "landing_revision", String(255), nullable=False, unique=False,
1584 default=None)
1584 default=None)
1585 enable_locking = Column(
1585 enable_locking = Column(
1586 "enable_locking", Boolean(), nullable=False, unique=None,
1586 "enable_locking", Boolean(), nullable=False, unique=None,
1587 default=False)
1587 default=False)
1588 _locked = Column(
1588 _locked = Column(
1589 "locked", String(255), nullable=True, unique=False, default=None)
1589 "locked", String(255), nullable=True, unique=False, default=None)
1590 _changeset_cache = Column(
1590 _changeset_cache = Column(
1591 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1591 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1592
1592
1593 fork_id = Column(
1593 fork_id = Column(
1594 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1594 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1595 nullable=True, unique=False, default=None)
1595 nullable=True, unique=False, default=None)
1596 group_id = Column(
1596 group_id = Column(
1597 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1597 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1598 unique=False, default=None)
1598 unique=False, default=None)
1599
1599
1600 user = relationship('User', lazy='joined')
1600 user = relationship('User', lazy='joined')
1601 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1601 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1602 group = relationship('RepoGroup', lazy='joined')
1602 group = relationship('RepoGroup', lazy='joined')
1603 repo_to_perm = relationship(
1603 repo_to_perm = relationship(
1604 'UserRepoToPerm', cascade='all',
1604 'UserRepoToPerm', cascade='all',
1605 order_by='UserRepoToPerm.repo_to_perm_id')
1605 order_by='UserRepoToPerm.repo_to_perm_id')
1606 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1606 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1607 stats = relationship('Statistics', cascade='all', uselist=False)
1607 stats = relationship('Statistics', cascade='all', uselist=False)
1608
1608
1609 followers = relationship(
1609 followers = relationship(
1610 'UserFollowing',
1610 'UserFollowing',
1611 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1611 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1612 cascade='all')
1612 cascade='all')
1613 extra_fields = relationship(
1613 extra_fields = relationship(
1614 'RepositoryField', cascade="all, delete, delete-orphan")
1614 'RepositoryField', cascade="all, delete, delete-orphan")
1615 logs = relationship('UserLog')
1615 logs = relationship('UserLog')
1616 comments = relationship(
1616 comments = relationship(
1617 'ChangesetComment', cascade="all, delete, delete-orphan")
1617 'ChangesetComment', cascade="all, delete, delete-orphan")
1618 pull_requests_source = relationship(
1618 pull_requests_source = relationship(
1619 'PullRequest',
1619 'PullRequest',
1620 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1620 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1621 cascade="all, delete, delete-orphan")
1621 cascade="all, delete, delete-orphan")
1622 pull_requests_target = relationship(
1622 pull_requests_target = relationship(
1623 'PullRequest',
1623 'PullRequest',
1624 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1624 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1625 cascade="all, delete, delete-orphan")
1625 cascade="all, delete, delete-orphan")
1626 ui = relationship('RepoRhodeCodeUi', cascade="all")
1626 ui = relationship('RepoRhodeCodeUi', cascade="all")
1627 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1627 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1628 integrations = relationship('Integration',
1628 integrations = relationship('Integration',
1629 cascade="all, delete, delete-orphan")
1629 cascade="all, delete, delete-orphan")
1630
1630
1631 scoped_tokens = relationship('UserApiKeys', cascade="all")
1631 scoped_tokens = relationship('UserApiKeys', cascade="all")
1632
1632
1633 def __unicode__(self):
1633 def __unicode__(self):
1634 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1634 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1635 safe_unicode(self.repo_name))
1635 safe_unicode(self.repo_name))
1636
1636
1637 @hybrid_property
1637 @hybrid_property
1638 def description_safe(self):
1638 def description_safe(self):
1639 from rhodecode.lib import helpers as h
1639 from rhodecode.lib import helpers as h
1640 return h.escape(self.description)
1640 return h.escape(self.description)
1641
1641
1642 @hybrid_property
1642 @hybrid_property
1643 def landing_rev(self):
1643 def landing_rev(self):
1644 # always should return [rev_type, rev]
1644 # always should return [rev_type, rev]
1645 if self._landing_revision:
1645 if self._landing_revision:
1646 _rev_info = self._landing_revision.split(':')
1646 _rev_info = self._landing_revision.split(':')
1647 if len(_rev_info) < 2:
1647 if len(_rev_info) < 2:
1648 _rev_info.insert(0, 'rev')
1648 _rev_info.insert(0, 'rev')
1649 return [_rev_info[0], _rev_info[1]]
1649 return [_rev_info[0], _rev_info[1]]
1650 return [None, None]
1650 return [None, None]
1651
1651
1652 @landing_rev.setter
1652 @landing_rev.setter
1653 def landing_rev(self, val):
1653 def landing_rev(self, val):
1654 if ':' not in val:
1654 if ':' not in val:
1655 raise ValueError('value must be delimited with `:` and consist '
1655 raise ValueError('value must be delimited with `:` and consist '
1656 'of <rev_type>:<rev>, got %s instead' % val)
1656 'of <rev_type>:<rev>, got %s instead' % val)
1657 self._landing_revision = val
1657 self._landing_revision = val
1658
1658
1659 @hybrid_property
1659 @hybrid_property
1660 def locked(self):
1660 def locked(self):
1661 if self._locked:
1661 if self._locked:
1662 user_id, timelocked, reason = self._locked.split(':')
1662 user_id, timelocked, reason = self._locked.split(':')
1663 lock_values = int(user_id), timelocked, reason
1663 lock_values = int(user_id), timelocked, reason
1664 else:
1664 else:
1665 lock_values = [None, None, None]
1665 lock_values = [None, None, None]
1666 return lock_values
1666 return lock_values
1667
1667
1668 @locked.setter
1668 @locked.setter
1669 def locked(self, val):
1669 def locked(self, val):
1670 if val and isinstance(val, (list, tuple)):
1670 if val and isinstance(val, (list, tuple)):
1671 self._locked = ':'.join(map(str, val))
1671 self._locked = ':'.join(map(str, val))
1672 else:
1672 else:
1673 self._locked = None
1673 self._locked = None
1674
1674
1675 @hybrid_property
1675 @hybrid_property
1676 def changeset_cache(self):
1676 def changeset_cache(self):
1677 from rhodecode.lib.vcs.backends.base import EmptyCommit
1677 from rhodecode.lib.vcs.backends.base import EmptyCommit
1678 dummy = EmptyCommit().__json__()
1678 dummy = EmptyCommit().__json__()
1679 if not self._changeset_cache:
1679 if not self._changeset_cache:
1680 return dummy
1680 return dummy
1681 try:
1681 try:
1682 return json.loads(self._changeset_cache)
1682 return json.loads(self._changeset_cache)
1683 except TypeError:
1683 except TypeError:
1684 return dummy
1684 return dummy
1685 except Exception:
1685 except Exception:
1686 log.error(traceback.format_exc())
1686 log.error(traceback.format_exc())
1687 return dummy
1687 return dummy
1688
1688
1689 @changeset_cache.setter
1689 @changeset_cache.setter
1690 def changeset_cache(self, val):
1690 def changeset_cache(self, val):
1691 try:
1691 try:
1692 self._changeset_cache = json.dumps(val)
1692 self._changeset_cache = json.dumps(val)
1693 except Exception:
1693 except Exception:
1694 log.error(traceback.format_exc())
1694 log.error(traceback.format_exc())
1695
1695
1696 @hybrid_property
1696 @hybrid_property
1697 def repo_name(self):
1697 def repo_name(self):
1698 return self._repo_name
1698 return self._repo_name
1699
1699
1700 @repo_name.setter
1700 @repo_name.setter
1701 def repo_name(self, value):
1701 def repo_name(self, value):
1702 self._repo_name = value
1702 self._repo_name = value
1703 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1703 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1704
1704
1705 @classmethod
1705 @classmethod
1706 def normalize_repo_name(cls, repo_name):
1706 def normalize_repo_name(cls, repo_name):
1707 """
1707 """
1708 Normalizes os specific repo_name to the format internally stored inside
1708 Normalizes os specific repo_name to the format internally stored inside
1709 database using URL_SEP
1709 database using URL_SEP
1710
1710
1711 :param cls:
1711 :param cls:
1712 :param repo_name:
1712 :param repo_name:
1713 """
1713 """
1714 return cls.NAME_SEP.join(repo_name.split(os.sep))
1714 return cls.NAME_SEP.join(repo_name.split(os.sep))
1715
1715
1716 @classmethod
1716 @classmethod
1717 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1717 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1718 session = Session()
1718 session = Session()
1719 q = session.query(cls).filter(cls.repo_name == repo_name)
1719 q = session.query(cls).filter(cls.repo_name == repo_name)
1720
1720
1721 if cache:
1721 if cache:
1722 if identity_cache:
1722 if identity_cache:
1723 val = cls.identity_cache(session, 'repo_name', repo_name)
1723 val = cls.identity_cache(session, 'repo_name', repo_name)
1724 if val:
1724 if val:
1725 return val
1725 return val
1726 else:
1726 else:
1727 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1727 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1728 q = q.options(
1728 q = q.options(
1729 FromCache("sql_cache_short", cache_key))
1729 FromCache("sql_cache_short", cache_key))
1730
1730
1731 return q.scalar()
1731 return q.scalar()
1732
1732
1733 @classmethod
1733 @classmethod
1734 def get_by_id_or_repo_name(cls, repoid):
1734 def get_by_id_or_repo_name(cls, repoid):
1735 if isinstance(repoid, (int, long)):
1735 if isinstance(repoid, (int, long)):
1736 try:
1736 try:
1737 repo = cls.get(repoid)
1737 repo = cls.get(repoid)
1738 except ValueError:
1738 except ValueError:
1739 repo = None
1739 repo = None
1740 else:
1740 else:
1741 repo = cls.get_by_repo_name(repoid)
1741 repo = cls.get_by_repo_name(repoid)
1742 return repo
1742 return repo
1743
1743
1744 @classmethod
1744 @classmethod
1745 def get_by_full_path(cls, repo_full_path):
1745 def get_by_full_path(cls, repo_full_path):
1746 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1746 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1747 repo_name = cls.normalize_repo_name(repo_name)
1747 repo_name = cls.normalize_repo_name(repo_name)
1748 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1748 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1749
1749
1750 @classmethod
1750 @classmethod
1751 def get_repo_forks(cls, repo_id):
1751 def get_repo_forks(cls, repo_id):
1752 return cls.query().filter(Repository.fork_id == repo_id)
1752 return cls.query().filter(Repository.fork_id == repo_id)
1753
1753
1754 @classmethod
1754 @classmethod
1755 def base_path(cls):
1755 def base_path(cls):
1756 """
1756 """
1757 Returns base path when all repos are stored
1757 Returns base path when all repos are stored
1758
1758
1759 :param cls:
1759 :param cls:
1760 """
1760 """
1761 q = Session().query(RhodeCodeUi)\
1761 q = Session().query(RhodeCodeUi)\
1762 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1762 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1763 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1763 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1764 return q.one().ui_value
1764 return q.one().ui_value
1765
1765
1766 @classmethod
1766 @classmethod
1767 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1767 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1768 case_insensitive=True):
1768 case_insensitive=True):
1769 q = Repository.query()
1769 q = Repository.query()
1770
1770
1771 if not isinstance(user_id, Optional):
1771 if not isinstance(user_id, Optional):
1772 q = q.filter(Repository.user_id == user_id)
1772 q = q.filter(Repository.user_id == user_id)
1773
1773
1774 if not isinstance(group_id, Optional):
1774 if not isinstance(group_id, Optional):
1775 q = q.filter(Repository.group_id == group_id)
1775 q = q.filter(Repository.group_id == group_id)
1776
1776
1777 if case_insensitive:
1777 if case_insensitive:
1778 q = q.order_by(func.lower(Repository.repo_name))
1778 q = q.order_by(func.lower(Repository.repo_name))
1779 else:
1779 else:
1780 q = q.order_by(Repository.repo_name)
1780 q = q.order_by(Repository.repo_name)
1781 return q.all()
1781 return q.all()
1782
1782
1783 @property
1783 @property
1784 def forks(self):
1784 def forks(self):
1785 """
1785 """
1786 Return forks of this repo
1786 Return forks of this repo
1787 """
1787 """
1788 return Repository.get_repo_forks(self.repo_id)
1788 return Repository.get_repo_forks(self.repo_id)
1789
1789
1790 @property
1790 @property
1791 def parent(self):
1791 def parent(self):
1792 """
1792 """
1793 Returns fork parent
1793 Returns fork parent
1794 """
1794 """
1795 return self.fork
1795 return self.fork
1796
1796
1797 @property
1797 @property
1798 def just_name(self):
1798 def just_name(self):
1799 return self.repo_name.split(self.NAME_SEP)[-1]
1799 return self.repo_name.split(self.NAME_SEP)[-1]
1800
1800
1801 @property
1801 @property
1802 def groups_with_parents(self):
1802 def groups_with_parents(self):
1803 groups = []
1803 groups = []
1804 if self.group is None:
1804 if self.group is None:
1805 return groups
1805 return groups
1806
1806
1807 cur_gr = self.group
1807 cur_gr = self.group
1808 groups.insert(0, cur_gr)
1808 groups.insert(0, cur_gr)
1809 while 1:
1809 while 1:
1810 gr = getattr(cur_gr, 'parent_group', None)
1810 gr = getattr(cur_gr, 'parent_group', None)
1811 cur_gr = cur_gr.parent_group
1811 cur_gr = cur_gr.parent_group
1812 if gr is None:
1812 if gr is None:
1813 break
1813 break
1814 groups.insert(0, gr)
1814 groups.insert(0, gr)
1815
1815
1816 return groups
1816 return groups
1817
1817
1818 @property
1818 @property
1819 def groups_and_repo(self):
1819 def groups_and_repo(self):
1820 return self.groups_with_parents, self
1820 return self.groups_with_parents, self
1821
1821
1822 @LazyProperty
1822 @LazyProperty
1823 def repo_path(self):
1823 def repo_path(self):
1824 """
1824 """
1825 Returns base full path for that repository means where it actually
1825 Returns base full path for that repository means where it actually
1826 exists on a filesystem
1826 exists on a filesystem
1827 """
1827 """
1828 q = Session().query(RhodeCodeUi).filter(
1828 q = Session().query(RhodeCodeUi).filter(
1829 RhodeCodeUi.ui_key == self.NAME_SEP)
1829 RhodeCodeUi.ui_key == self.NAME_SEP)
1830 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1830 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1831 return q.one().ui_value
1831 return q.one().ui_value
1832
1832
1833 @property
1833 @property
1834 def repo_full_path(self):
1834 def repo_full_path(self):
1835 p = [self.repo_path]
1835 p = [self.repo_path]
1836 # we need to split the name by / since this is how we store the
1836 # we need to split the name by / since this is how we store the
1837 # names in the database, but that eventually needs to be converted
1837 # names in the database, but that eventually needs to be converted
1838 # into a valid system path
1838 # into a valid system path
1839 p += self.repo_name.split(self.NAME_SEP)
1839 p += self.repo_name.split(self.NAME_SEP)
1840 return os.path.join(*map(safe_unicode, p))
1840 return os.path.join(*map(safe_unicode, p))
1841
1841
1842 @property
1842 @property
1843 def cache_keys(self):
1843 def cache_keys(self):
1844 """
1844 """
1845 Returns associated cache keys for that repo
1845 Returns associated cache keys for that repo
1846 """
1846 """
1847 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1847 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1848 repo_id=self.repo_id)
1848 repo_id=self.repo_id)
1849 return CacheKey.query()\
1849 return CacheKey.query()\
1850 .filter(CacheKey.cache_args == invalidation_namespace)\
1850 .filter(CacheKey.cache_args == invalidation_namespace)\
1851 .order_by(CacheKey.cache_key)\
1851 .order_by(CacheKey.cache_key)\
1852 .all()
1852 .all()
1853
1853
1854 @property
1854 @property
1855 def cached_diffs_relative_dir(self):
1855 def cached_diffs_relative_dir(self):
1856 """
1856 """
1857 Return a relative to the repository store path of cached diffs
1857 Return a relative to the repository store path of cached diffs
1858 used for safe display for users, who shouldn't know the absolute store
1858 used for safe display for users, who shouldn't know the absolute store
1859 path
1859 path
1860 """
1860 """
1861 return os.path.join(
1861 return os.path.join(
1862 os.path.dirname(self.repo_name),
1862 os.path.dirname(self.repo_name),
1863 self.cached_diffs_dir.split(os.path.sep)[-1])
1863 self.cached_diffs_dir.split(os.path.sep)[-1])
1864
1864
1865 @property
1865 @property
1866 def cached_diffs_dir(self):
1866 def cached_diffs_dir(self):
1867 path = self.repo_full_path
1867 path = self.repo_full_path
1868 return os.path.join(
1868 return os.path.join(
1869 os.path.dirname(path),
1869 os.path.dirname(path),
1870 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1870 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1871
1871
1872 def cached_diffs(self):
1872 def cached_diffs(self):
1873 diff_cache_dir = self.cached_diffs_dir
1873 diff_cache_dir = self.cached_diffs_dir
1874 if os.path.isdir(diff_cache_dir):
1874 if os.path.isdir(diff_cache_dir):
1875 return os.listdir(diff_cache_dir)
1875 return os.listdir(diff_cache_dir)
1876 return []
1876 return []
1877
1877
1878 def shadow_repos(self):
1878 def shadow_repos(self):
1879 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1879 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1880 return [
1880 return [
1881 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1881 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1882 if x.startswith(shadow_repos_pattern)]
1882 if x.startswith(shadow_repos_pattern)]
1883
1883
1884 def get_new_name(self, repo_name):
1884 def get_new_name(self, repo_name):
1885 """
1885 """
1886 returns new full repository name based on assigned group and new new
1886 returns new full repository name based on assigned group and new new
1887
1887
1888 :param group_name:
1888 :param group_name:
1889 """
1889 """
1890 path_prefix = self.group.full_path_splitted if self.group else []
1890 path_prefix = self.group.full_path_splitted if self.group else []
1891 return self.NAME_SEP.join(path_prefix + [repo_name])
1891 return self.NAME_SEP.join(path_prefix + [repo_name])
1892
1892
1893 @property
1893 @property
1894 def _config(self):
1894 def _config(self):
1895 """
1895 """
1896 Returns db based config object.
1896 Returns db based config object.
1897 """
1897 """
1898 from rhodecode.lib.utils import make_db_config
1898 from rhodecode.lib.utils import make_db_config
1899 return make_db_config(clear_session=False, repo=self)
1899 return make_db_config(clear_session=False, repo=self)
1900
1900
1901 def permissions(self, with_admins=True, with_owner=True):
1901 def permissions(self, with_admins=True, with_owner=True):
1902 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1902 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1903 q = q.options(joinedload(UserRepoToPerm.repository),
1903 q = q.options(joinedload(UserRepoToPerm.repository),
1904 joinedload(UserRepoToPerm.user),
1904 joinedload(UserRepoToPerm.user),
1905 joinedload(UserRepoToPerm.permission),)
1905 joinedload(UserRepoToPerm.permission),)
1906
1906
1907 # get owners and admins and permissions. We do a trick of re-writing
1907 # get owners and admins and permissions. We do a trick of re-writing
1908 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1908 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1909 # has a global reference and changing one object propagates to all
1909 # has a global reference and changing one object propagates to all
1910 # others. This means if admin is also an owner admin_row that change
1910 # others. This means if admin is also an owner admin_row that change
1911 # would propagate to both objects
1911 # would propagate to both objects
1912 perm_rows = []
1912 perm_rows = []
1913 for _usr in q.all():
1913 for _usr in q.all():
1914 usr = AttributeDict(_usr.user.get_dict())
1914 usr = AttributeDict(_usr.user.get_dict())
1915 usr.permission = _usr.permission.permission_name
1915 usr.permission = _usr.permission.permission_name
1916 perm_rows.append(usr)
1916 perm_rows.append(usr)
1917
1917
1918 # filter the perm rows by 'default' first and then sort them by
1918 # filter the perm rows by 'default' first and then sort them by
1919 # admin,write,read,none permissions sorted again alphabetically in
1919 # admin,write,read,none permissions sorted again alphabetically in
1920 # each group
1920 # each group
1921 perm_rows = sorted(perm_rows, key=display_user_sort)
1921 perm_rows = sorted(perm_rows, key=display_user_sort)
1922
1922
1923 _admin_perm = 'repository.admin'
1923 _admin_perm = 'repository.admin'
1924 owner_row = []
1924 owner_row = []
1925 if with_owner:
1925 if with_owner:
1926 usr = AttributeDict(self.user.get_dict())
1926 usr = AttributeDict(self.user.get_dict())
1927 usr.owner_row = True
1927 usr.owner_row = True
1928 usr.permission = _admin_perm
1928 usr.permission = _admin_perm
1929 owner_row.append(usr)
1929 owner_row.append(usr)
1930
1930
1931 super_admin_rows = []
1931 super_admin_rows = []
1932 if with_admins:
1932 if with_admins:
1933 for usr in User.get_all_super_admins():
1933 for usr in User.get_all_super_admins():
1934 # if this admin is also owner, don't double the record
1934 # if this admin is also owner, don't double the record
1935 if usr.user_id == owner_row[0].user_id:
1935 if usr.user_id == owner_row[0].user_id:
1936 owner_row[0].admin_row = True
1936 owner_row[0].admin_row = True
1937 else:
1937 else:
1938 usr = AttributeDict(usr.get_dict())
1938 usr = AttributeDict(usr.get_dict())
1939 usr.admin_row = True
1939 usr.admin_row = True
1940 usr.permission = _admin_perm
1940 usr.permission = _admin_perm
1941 super_admin_rows.append(usr)
1941 super_admin_rows.append(usr)
1942
1942
1943 return super_admin_rows + owner_row + perm_rows
1943 return super_admin_rows + owner_row + perm_rows
1944
1944
1945 def permission_user_groups(self):
1945 def permission_user_groups(self):
1946 q = UserGroupRepoToPerm.query().filter(
1946 q = UserGroupRepoToPerm.query().filter(
1947 UserGroupRepoToPerm.repository == self)
1947 UserGroupRepoToPerm.repository == self)
1948 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1948 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1949 joinedload(UserGroupRepoToPerm.users_group),
1949 joinedload(UserGroupRepoToPerm.users_group),
1950 joinedload(UserGroupRepoToPerm.permission),)
1950 joinedload(UserGroupRepoToPerm.permission),)
1951
1951
1952 perm_rows = []
1952 perm_rows = []
1953 for _user_group in q.all():
1953 for _user_group in q.all():
1954 usr = AttributeDict(_user_group.users_group.get_dict())
1954 usr = AttributeDict(_user_group.users_group.get_dict())
1955 usr.permission = _user_group.permission.permission_name
1955 usr.permission = _user_group.permission.permission_name
1956 perm_rows.append(usr)
1956 perm_rows.append(usr)
1957
1957
1958 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1958 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1959 return perm_rows
1959 return perm_rows
1960
1960
1961 def get_api_data(self, include_secrets=False):
1961 def get_api_data(self, include_secrets=False):
1962 """
1962 """
1963 Common function for generating repo api data
1963 Common function for generating repo api data
1964
1964
1965 :param include_secrets: See :meth:`User.get_api_data`.
1965 :param include_secrets: See :meth:`User.get_api_data`.
1966
1966
1967 """
1967 """
1968 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1968 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1969 # move this methods on models level.
1969 # move this methods on models level.
1970 from rhodecode.model.settings import SettingsModel
1970 from rhodecode.model.settings import SettingsModel
1971 from rhodecode.model.repo import RepoModel
1971 from rhodecode.model.repo import RepoModel
1972
1972
1973 repo = self
1973 repo = self
1974 _user_id, _time, _reason = self.locked
1974 _user_id, _time, _reason = self.locked
1975
1975
1976 data = {
1976 data = {
1977 'repo_id': repo.repo_id,
1977 'repo_id': repo.repo_id,
1978 'repo_name': repo.repo_name,
1978 'repo_name': repo.repo_name,
1979 'repo_type': repo.repo_type,
1979 'repo_type': repo.repo_type,
1980 'clone_uri': repo.clone_uri or '',
1980 'clone_uri': repo.clone_uri or '',
1981 'push_uri': repo.push_uri or '',
1981 'push_uri': repo.push_uri or '',
1982 'url': RepoModel().get_url(self),
1982 'url': RepoModel().get_url(self),
1983 'private': repo.private,
1983 'private': repo.private,
1984 'created_on': repo.created_on,
1984 'created_on': repo.created_on,
1985 'description': repo.description_safe,
1985 'description': repo.description_safe,
1986 'landing_rev': repo.landing_rev,
1986 'landing_rev': repo.landing_rev,
1987 'owner': repo.user.username,
1987 'owner': repo.user.username,
1988 'fork_of': repo.fork.repo_name if repo.fork else None,
1988 'fork_of': repo.fork.repo_name if repo.fork else None,
1989 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1989 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1990 'enable_statistics': repo.enable_statistics,
1990 'enable_statistics': repo.enable_statistics,
1991 'enable_locking': repo.enable_locking,
1991 'enable_locking': repo.enable_locking,
1992 'enable_downloads': repo.enable_downloads,
1992 'enable_downloads': repo.enable_downloads,
1993 'last_changeset': repo.changeset_cache,
1993 'last_changeset': repo.changeset_cache,
1994 'locked_by': User.get(_user_id).get_api_data(
1994 'locked_by': User.get(_user_id).get_api_data(
1995 include_secrets=include_secrets) if _user_id else None,
1995 include_secrets=include_secrets) if _user_id else None,
1996 'locked_date': time_to_datetime(_time) if _time else None,
1996 'locked_date': time_to_datetime(_time) if _time else None,
1997 'lock_reason': _reason if _reason else None,
1997 'lock_reason': _reason if _reason else None,
1998 }
1998 }
1999
1999
2000 # TODO: mikhail: should be per-repo settings here
2000 # TODO: mikhail: should be per-repo settings here
2001 rc_config = SettingsModel().get_all_settings()
2001 rc_config = SettingsModel().get_all_settings()
2002 repository_fields = str2bool(
2002 repository_fields = str2bool(
2003 rc_config.get('rhodecode_repository_fields'))
2003 rc_config.get('rhodecode_repository_fields'))
2004 if repository_fields:
2004 if repository_fields:
2005 for f in self.extra_fields:
2005 for f in self.extra_fields:
2006 data[f.field_key_prefixed] = f.field_value
2006 data[f.field_key_prefixed] = f.field_value
2007
2007
2008 return data
2008 return data
2009
2009
2010 @classmethod
2010 @classmethod
2011 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2011 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2012 if not lock_time:
2012 if not lock_time:
2013 lock_time = time.time()
2013 lock_time = time.time()
2014 if not lock_reason:
2014 if not lock_reason:
2015 lock_reason = cls.LOCK_AUTOMATIC
2015 lock_reason = cls.LOCK_AUTOMATIC
2016 repo.locked = [user_id, lock_time, lock_reason]
2016 repo.locked = [user_id, lock_time, lock_reason]
2017 Session().add(repo)
2017 Session().add(repo)
2018 Session().commit()
2018 Session().commit()
2019
2019
2020 @classmethod
2020 @classmethod
2021 def unlock(cls, repo):
2021 def unlock(cls, repo):
2022 repo.locked = None
2022 repo.locked = None
2023 Session().add(repo)
2023 Session().add(repo)
2024 Session().commit()
2024 Session().commit()
2025
2025
2026 @classmethod
2026 @classmethod
2027 def getlock(cls, repo):
2027 def getlock(cls, repo):
2028 return repo.locked
2028 return repo.locked
2029
2029
2030 def is_user_lock(self, user_id):
2030 def is_user_lock(self, user_id):
2031 if self.lock[0]:
2031 if self.lock[0]:
2032 lock_user_id = safe_int(self.lock[0])
2032 lock_user_id = safe_int(self.lock[0])
2033 user_id = safe_int(user_id)
2033 user_id = safe_int(user_id)
2034 # both are ints, and they are equal
2034 # both are ints, and they are equal
2035 return all([lock_user_id, user_id]) and lock_user_id == user_id
2035 return all([lock_user_id, user_id]) and lock_user_id == user_id
2036
2036
2037 return False
2037 return False
2038
2038
2039 def get_locking_state(self, action, user_id, only_when_enabled=True):
2039 def get_locking_state(self, action, user_id, only_when_enabled=True):
2040 """
2040 """
2041 Checks locking on this repository, if locking is enabled and lock is
2041 Checks locking on this repository, if locking is enabled and lock is
2042 present returns a tuple of make_lock, locked, locked_by.
2042 present returns a tuple of make_lock, locked, locked_by.
2043 make_lock can have 3 states None (do nothing) True, make lock
2043 make_lock can have 3 states None (do nothing) True, make lock
2044 False release lock, This value is later propagated to hooks, which
2044 False release lock, This value is later propagated to hooks, which
2045 do the locking. Think about this as signals passed to hooks what to do.
2045 do the locking. Think about this as signals passed to hooks what to do.
2046
2046
2047 """
2047 """
2048 # TODO: johbo: This is part of the business logic and should be moved
2048 # TODO: johbo: This is part of the business logic and should be moved
2049 # into the RepositoryModel.
2049 # into the RepositoryModel.
2050
2050
2051 if action not in ('push', 'pull'):
2051 if action not in ('push', 'pull'):
2052 raise ValueError("Invalid action value: %s" % repr(action))
2052 raise ValueError("Invalid action value: %s" % repr(action))
2053
2053
2054 # defines if locked error should be thrown to user
2054 # defines if locked error should be thrown to user
2055 currently_locked = False
2055 currently_locked = False
2056 # defines if new lock should be made, tri-state
2056 # defines if new lock should be made, tri-state
2057 make_lock = None
2057 make_lock = None
2058 repo = self
2058 repo = self
2059 user = User.get(user_id)
2059 user = User.get(user_id)
2060
2060
2061 lock_info = repo.locked
2061 lock_info = repo.locked
2062
2062
2063 if repo and (repo.enable_locking or not only_when_enabled):
2063 if repo and (repo.enable_locking or not only_when_enabled):
2064 if action == 'push':
2064 if action == 'push':
2065 # check if it's already locked !, if it is compare users
2065 # check if it's already locked !, if it is compare users
2066 locked_by_user_id = lock_info[0]
2066 locked_by_user_id = lock_info[0]
2067 if user.user_id == locked_by_user_id:
2067 if user.user_id == locked_by_user_id:
2068 log.debug(
2068 log.debug(
2069 'Got `push` action from user %s, now unlocking', user)
2069 'Got `push` action from user %s, now unlocking', user)
2070 # unlock if we have push from user who locked
2070 # unlock if we have push from user who locked
2071 make_lock = False
2071 make_lock = False
2072 else:
2072 else:
2073 # we're not the same user who locked, ban with
2073 # we're not the same user who locked, ban with
2074 # code defined in settings (default is 423 HTTP Locked) !
2074 # code defined in settings (default is 423 HTTP Locked) !
2075 log.debug('Repo %s is currently locked by %s', repo, user)
2075 log.debug('Repo %s is currently locked by %s', repo, user)
2076 currently_locked = True
2076 currently_locked = True
2077 elif action == 'pull':
2077 elif action == 'pull':
2078 # [0] user [1] date
2078 # [0] user [1] date
2079 if lock_info[0] and lock_info[1]:
2079 if lock_info[0] and lock_info[1]:
2080 log.debug('Repo %s is currently locked by %s', repo, user)
2080 log.debug('Repo %s is currently locked by %s', repo, user)
2081 currently_locked = True
2081 currently_locked = True
2082 else:
2082 else:
2083 log.debug('Setting lock on repo %s by %s', repo, user)
2083 log.debug('Setting lock on repo %s by %s', repo, user)
2084 make_lock = True
2084 make_lock = True
2085
2085
2086 else:
2086 else:
2087 log.debug('Repository %s do not have locking enabled', repo)
2087 log.debug('Repository %s do not have locking enabled', repo)
2088
2088
2089 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2089 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2090 make_lock, currently_locked, lock_info)
2090 make_lock, currently_locked, lock_info)
2091
2091
2092 from rhodecode.lib.auth import HasRepoPermissionAny
2092 from rhodecode.lib.auth import HasRepoPermissionAny
2093 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2093 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2094 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2094 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2095 # if we don't have at least write permission we cannot make a lock
2095 # if we don't have at least write permission we cannot make a lock
2096 log.debug('lock state reset back to FALSE due to lack '
2096 log.debug('lock state reset back to FALSE due to lack '
2097 'of at least read permission')
2097 'of at least read permission')
2098 make_lock = False
2098 make_lock = False
2099
2099
2100 return make_lock, currently_locked, lock_info
2100 return make_lock, currently_locked, lock_info
2101
2101
2102 @property
2102 @property
2103 def last_db_change(self):
2103 def last_db_change(self):
2104 return self.updated_on
2104 return self.updated_on
2105
2105
2106 @property
2106 @property
2107 def clone_uri_hidden(self):
2107 def clone_uri_hidden(self):
2108 clone_uri = self.clone_uri
2108 clone_uri = self.clone_uri
2109 if clone_uri:
2109 if clone_uri:
2110 import urlobject
2110 import urlobject
2111 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2111 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2112 if url_obj.password:
2112 if url_obj.password:
2113 clone_uri = url_obj.with_password('*****')
2113 clone_uri = url_obj.with_password('*****')
2114 return clone_uri
2114 return clone_uri
2115
2115
2116 @property
2116 @property
2117 def push_uri_hidden(self):
2117 def push_uri_hidden(self):
2118 push_uri = self.push_uri
2118 push_uri = self.push_uri
2119 if push_uri:
2119 if push_uri:
2120 import urlobject
2120 import urlobject
2121 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2121 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2122 if url_obj.password:
2122 if url_obj.password:
2123 push_uri = url_obj.with_password('*****')
2123 push_uri = url_obj.with_password('*****')
2124 return push_uri
2124 return push_uri
2125
2125
2126 def clone_url(self, **override):
2126 def clone_url(self, **override):
2127 from rhodecode.model.settings import SettingsModel
2127 from rhodecode.model.settings import SettingsModel
2128
2128
2129 uri_tmpl = None
2129 uri_tmpl = None
2130 if 'with_id' in override:
2130 if 'with_id' in override:
2131 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2131 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2132 del override['with_id']
2132 del override['with_id']
2133
2133
2134 if 'uri_tmpl' in override:
2134 if 'uri_tmpl' in override:
2135 uri_tmpl = override['uri_tmpl']
2135 uri_tmpl = override['uri_tmpl']
2136 del override['uri_tmpl']
2136 del override['uri_tmpl']
2137
2137
2138 ssh = False
2138 ssh = False
2139 if 'ssh' in override:
2139 if 'ssh' in override:
2140 ssh = True
2140 ssh = True
2141 del override['ssh']
2141 del override['ssh']
2142
2142
2143 # we didn't override our tmpl from **overrides
2143 # we didn't override our tmpl from **overrides
2144 if not uri_tmpl:
2144 if not uri_tmpl:
2145 rc_config = SettingsModel().get_all_settings(cache=True)
2145 rc_config = SettingsModel().get_all_settings(cache=True)
2146 if ssh:
2146 if ssh:
2147 uri_tmpl = rc_config.get(
2147 uri_tmpl = rc_config.get(
2148 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2148 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2149 else:
2149 else:
2150 uri_tmpl = rc_config.get(
2150 uri_tmpl = rc_config.get(
2151 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2151 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2152
2152
2153 request = get_current_request()
2153 request = get_current_request()
2154 return get_clone_url(request=request,
2154 return get_clone_url(request=request,
2155 uri_tmpl=uri_tmpl,
2155 uri_tmpl=uri_tmpl,
2156 repo_name=self.repo_name,
2156 repo_name=self.repo_name,
2157 repo_id=self.repo_id, **override)
2157 repo_id=self.repo_id, **override)
2158
2158
2159 def set_state(self, state):
2159 def set_state(self, state):
2160 self.repo_state = state
2160 self.repo_state = state
2161 Session().add(self)
2161 Session().add(self)
2162 #==========================================================================
2162 #==========================================================================
2163 # SCM PROPERTIES
2163 # SCM PROPERTIES
2164 #==========================================================================
2164 #==========================================================================
2165
2165
2166 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2166 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2167 return get_commit_safe(
2167 return get_commit_safe(
2168 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2168 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2169
2169
2170 def get_changeset(self, rev=None, pre_load=None):
2170 def get_changeset(self, rev=None, pre_load=None):
2171 warnings.warn("Use get_commit", DeprecationWarning)
2171 warnings.warn("Use get_commit", DeprecationWarning)
2172 commit_id = None
2172 commit_id = None
2173 commit_idx = None
2173 commit_idx = None
2174 if isinstance(rev, basestring):
2174 if isinstance(rev, basestring):
2175 commit_id = rev
2175 commit_id = rev
2176 else:
2176 else:
2177 commit_idx = rev
2177 commit_idx = rev
2178 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2178 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2179 pre_load=pre_load)
2179 pre_load=pre_load)
2180
2180
2181 def get_landing_commit(self):
2181 def get_landing_commit(self):
2182 """
2182 """
2183 Returns landing commit, or if that doesn't exist returns the tip
2183 Returns landing commit, or if that doesn't exist returns the tip
2184 """
2184 """
2185 _rev_type, _rev = self.landing_rev
2185 _rev_type, _rev = self.landing_rev
2186 commit = self.get_commit(_rev)
2186 commit = self.get_commit(_rev)
2187 if isinstance(commit, EmptyCommit):
2187 if isinstance(commit, EmptyCommit):
2188 return self.get_commit()
2188 return self.get_commit()
2189 return commit
2189 return commit
2190
2190
2191 def update_commit_cache(self, cs_cache=None, config=None):
2191 def update_commit_cache(self, cs_cache=None, config=None):
2192 """
2192 """
2193 Update cache of last changeset for repository, keys should be::
2193 Update cache of last changeset for repository, keys should be::
2194
2194
2195 short_id
2195 short_id
2196 raw_id
2196 raw_id
2197 revision
2197 revision
2198 parents
2198 parents
2199 message
2199 message
2200 date
2200 date
2201 author
2201 author
2202
2202
2203 :param cs_cache:
2203 :param cs_cache:
2204 """
2204 """
2205 from rhodecode.lib.vcs.backends.base import BaseChangeset
2205 from rhodecode.lib.vcs.backends.base import BaseChangeset
2206 if cs_cache is None:
2206 if cs_cache is None:
2207 # use no-cache version here
2207 # use no-cache version here
2208 scm_repo = self.scm_instance(cache=False, config=config)
2208 scm_repo = self.scm_instance(cache=False, config=config)
2209 if scm_repo:
2209 if scm_repo:
2210 cs_cache = scm_repo.get_commit(
2210 cs_cache = scm_repo.get_commit(
2211 pre_load=["author", "date", "message", "parents"])
2211 pre_load=["author", "date", "message", "parents"])
2212 else:
2212 else:
2213 cs_cache = EmptyCommit()
2213 cs_cache = EmptyCommit()
2214
2214
2215 if isinstance(cs_cache, BaseChangeset):
2215 if isinstance(cs_cache, BaseChangeset):
2216 cs_cache = cs_cache.__json__()
2216 cs_cache = cs_cache.__json__()
2217
2217
2218 def is_outdated(new_cs_cache):
2218 def is_outdated(new_cs_cache):
2219 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2219 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2220 new_cs_cache['revision'] != self.changeset_cache['revision']):
2220 new_cs_cache['revision'] != self.changeset_cache['revision']):
2221 return True
2221 return True
2222 return False
2222 return False
2223
2223
2224 # check if we have maybe already latest cached revision
2224 # check if we have maybe already latest cached revision
2225 if is_outdated(cs_cache) or not self.changeset_cache:
2225 if is_outdated(cs_cache) or not self.changeset_cache:
2226 _default = datetime.datetime.utcnow()
2226 _default = datetime.datetime.utcnow()
2227 last_change = cs_cache.get('date') or _default
2227 last_change = cs_cache.get('date') or _default
2228 if self.updated_on and self.updated_on > last_change:
2228 if self.updated_on and self.updated_on > last_change:
2229 # we check if last update is newer than the new value
2229 # we check if last update is newer than the new value
2230 # if yes, we use the current timestamp instead. Imagine you get
2230 # if yes, we use the current timestamp instead. Imagine you get
2231 # old commit pushed 1y ago, we'd set last update 1y to ago.
2231 # old commit pushed 1y ago, we'd set last update 1y to ago.
2232 last_change = _default
2232 last_change = _default
2233 log.debug('updated repo %s with new cs cache %s',
2233 log.debug('updated repo %s with new cs cache %s',
2234 self.repo_name, cs_cache)
2234 self.repo_name, cs_cache)
2235 self.updated_on = last_change
2235 self.updated_on = last_change
2236 self.changeset_cache = cs_cache
2236 self.changeset_cache = cs_cache
2237 Session().add(self)
2237 Session().add(self)
2238 Session().commit()
2238 Session().commit()
2239 else:
2239 else:
2240 log.debug('Skipping update_commit_cache for repo:`%s` '
2240 log.debug('Skipping update_commit_cache for repo:`%s` '
2241 'commit already with latest changes', self.repo_name)
2241 'commit already with latest changes', self.repo_name)
2242
2242
2243 @property
2243 @property
2244 def tip(self):
2244 def tip(self):
2245 return self.get_commit('tip')
2245 return self.get_commit('tip')
2246
2246
2247 @property
2247 @property
2248 def author(self):
2248 def author(self):
2249 return self.tip.author
2249 return self.tip.author
2250
2250
2251 @property
2251 @property
2252 def last_change(self):
2252 def last_change(self):
2253 return self.scm_instance().last_change
2253 return self.scm_instance().last_change
2254
2254
2255 def get_comments(self, revisions=None):
2255 def get_comments(self, revisions=None):
2256 """
2256 """
2257 Returns comments for this repository grouped by revisions
2257 Returns comments for this repository grouped by revisions
2258
2258
2259 :param revisions: filter query by revisions only
2259 :param revisions: filter query by revisions only
2260 """
2260 """
2261 cmts = ChangesetComment.query()\
2261 cmts = ChangesetComment.query()\
2262 .filter(ChangesetComment.repo == self)
2262 .filter(ChangesetComment.repo == self)
2263 if revisions:
2263 if revisions:
2264 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2264 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2265 grouped = collections.defaultdict(list)
2265 grouped = collections.defaultdict(list)
2266 for cmt in cmts.all():
2266 for cmt in cmts.all():
2267 grouped[cmt.revision].append(cmt)
2267 grouped[cmt.revision].append(cmt)
2268 return grouped
2268 return grouped
2269
2269
2270 def statuses(self, revisions=None):
2270 def statuses(self, revisions=None):
2271 """
2271 """
2272 Returns statuses for this repository
2272 Returns statuses for this repository
2273
2273
2274 :param revisions: list of revisions to get statuses for
2274 :param revisions: list of revisions to get statuses for
2275 """
2275 """
2276 statuses = ChangesetStatus.query()\
2276 statuses = ChangesetStatus.query()\
2277 .filter(ChangesetStatus.repo == self)\
2277 .filter(ChangesetStatus.repo == self)\
2278 .filter(ChangesetStatus.version == 0)
2278 .filter(ChangesetStatus.version == 0)
2279
2279
2280 if revisions:
2280 if revisions:
2281 # Try doing the filtering in chunks to avoid hitting limits
2281 # Try doing the filtering in chunks to avoid hitting limits
2282 size = 500
2282 size = 500
2283 status_results = []
2283 status_results = []
2284 for chunk in xrange(0, len(revisions), size):
2284 for chunk in xrange(0, len(revisions), size):
2285 status_results += statuses.filter(
2285 status_results += statuses.filter(
2286 ChangesetStatus.revision.in_(
2286 ChangesetStatus.revision.in_(
2287 revisions[chunk: chunk+size])
2287 revisions[chunk: chunk+size])
2288 ).all()
2288 ).all()
2289 else:
2289 else:
2290 status_results = statuses.all()
2290 status_results = statuses.all()
2291
2291
2292 grouped = {}
2292 grouped = {}
2293
2293
2294 # maybe we have open new pullrequest without a status?
2294 # maybe we have open new pullrequest without a status?
2295 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2295 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2296 status_lbl = ChangesetStatus.get_status_lbl(stat)
2296 status_lbl = ChangesetStatus.get_status_lbl(stat)
2297 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2297 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2298 for rev in pr.revisions:
2298 for rev in pr.revisions:
2299 pr_id = pr.pull_request_id
2299 pr_id = pr.pull_request_id
2300 pr_repo = pr.target_repo.repo_name
2300 pr_repo = pr.target_repo.repo_name
2301 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2301 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2302
2302
2303 for stat in status_results:
2303 for stat in status_results:
2304 pr_id = pr_repo = None
2304 pr_id = pr_repo = None
2305 if stat.pull_request:
2305 if stat.pull_request:
2306 pr_id = stat.pull_request.pull_request_id
2306 pr_id = stat.pull_request.pull_request_id
2307 pr_repo = stat.pull_request.target_repo.repo_name
2307 pr_repo = stat.pull_request.target_repo.repo_name
2308 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2308 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2309 pr_id, pr_repo]
2309 pr_id, pr_repo]
2310 return grouped
2310 return grouped
2311
2311
2312 # ==========================================================================
2312 # ==========================================================================
2313 # SCM CACHE INSTANCE
2313 # SCM CACHE INSTANCE
2314 # ==========================================================================
2314 # ==========================================================================
2315
2315
2316 def scm_instance(self, **kwargs):
2316 def scm_instance(self, **kwargs):
2317 import rhodecode
2317 import rhodecode
2318
2318
2319 # Passing a config will not hit the cache currently only used
2319 # Passing a config will not hit the cache currently only used
2320 # for repo2dbmapper
2320 # for repo2dbmapper
2321 config = kwargs.pop('config', None)
2321 config = kwargs.pop('config', None)
2322 cache = kwargs.pop('cache', None)
2322 cache = kwargs.pop('cache', None)
2323 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2323 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2324 # if cache is NOT defined use default global, else we have a full
2324 # if cache is NOT defined use default global, else we have a full
2325 # control over cache behaviour
2325 # control over cache behaviour
2326 if cache is None and full_cache and not config:
2326 if cache is None and full_cache and not config:
2327 return self._get_instance_cached()
2327 return self._get_instance_cached()
2328 return self._get_instance(cache=bool(cache), config=config)
2328 return self._get_instance(cache=bool(cache), config=config)
2329
2329
2330 def _get_instance_cached(self):
2330 def _get_instance_cached(self):
2331 from rhodecode.lib import rc_cache
2331 from rhodecode.lib import rc_cache
2332
2332
2333 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2333 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2334 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2334 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2335 repo_id=self.repo_id)
2335 repo_id=self.repo_id)
2336 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2336 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2337
2337
2338 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2338 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2339 def get_instance_cached(repo_id):
2339 def get_instance_cached(repo_id):
2340 return self._get_instance()
2340 return self._get_instance()
2341
2341
2342 # we must use thread scoped cache here,
2343 # because each thread of gevent needs it's own connection and cache
2342 inv_context_manager = rc_cache.InvalidationContext(
2344 inv_context_manager = rc_cache.InvalidationContext(
2343 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
2345 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2346 thread_scoped=True)
2344 with inv_context_manager as invalidation_context:
2347 with inv_context_manager as invalidation_context:
2345 args = (self.repo_id,)
2348 args = (self.repo_id,)
2346 # re-compute and store cache if we get invalidate signal
2349 # re-compute and store cache if we get invalidate signal
2347 if invalidation_context.should_invalidate():
2350 if invalidation_context.should_invalidate():
2348 instance = get_instance_cached.refresh(*args)
2351 instance = get_instance_cached.refresh(*args)
2349 else:
2352 else:
2350 instance = get_instance_cached(*args)
2353 instance = get_instance_cached(*args)
2351
2354
2352 log.debug(
2355 log.debug(
2353 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2356 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2354 return instance
2357 return instance
2355
2358
2356 def _get_instance(self, cache=True, config=None):
2359 def _get_instance(self, cache=True, config=None):
2357 config = config or self._config
2360 config = config or self._config
2358 custom_wire = {
2361 custom_wire = {
2359 'cache': cache # controls the vcs.remote cache
2362 'cache': cache # controls the vcs.remote cache
2360 }
2363 }
2361 repo = get_vcs_instance(
2364 repo = get_vcs_instance(
2362 repo_path=safe_str(self.repo_full_path),
2365 repo_path=safe_str(self.repo_full_path),
2363 config=config,
2366 config=config,
2364 with_wire=custom_wire,
2367 with_wire=custom_wire,
2365 create=False,
2368 create=False,
2366 _vcs_alias=self.repo_type)
2369 _vcs_alias=self.repo_type)
2367
2370
2368 return repo
2371 return repo
2369
2372
2370 def __json__(self):
2373 def __json__(self):
2371 return {'landing_rev': self.landing_rev}
2374 return {'landing_rev': self.landing_rev}
2372
2375
2373 def get_dict(self):
2376 def get_dict(self):
2374
2377
2375 # Since we transformed `repo_name` to a hybrid property, we need to
2378 # Since we transformed `repo_name` to a hybrid property, we need to
2376 # keep compatibility with the code which uses `repo_name` field.
2379 # keep compatibility with the code which uses `repo_name` field.
2377
2380
2378 result = super(Repository, self).get_dict()
2381 result = super(Repository, self).get_dict()
2379 result['repo_name'] = result.pop('_repo_name', None)
2382 result['repo_name'] = result.pop('_repo_name', None)
2380 return result
2383 return result
2381
2384
2382
2385
2383 class RepoGroup(Base, BaseModel):
2386 class RepoGroup(Base, BaseModel):
2384 __tablename__ = 'groups'
2387 __tablename__ = 'groups'
2385 __table_args__ = (
2388 __table_args__ = (
2386 UniqueConstraint('group_name', 'group_parent_id'),
2389 UniqueConstraint('group_name', 'group_parent_id'),
2387 CheckConstraint('group_id != group_parent_id'),
2390 CheckConstraint('group_id != group_parent_id'),
2388 base_table_args,
2391 base_table_args,
2389 )
2392 )
2390 __mapper_args__ = {'order_by': 'group_name'}
2393 __mapper_args__ = {'order_by': 'group_name'}
2391
2394
2392 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2395 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2393
2396
2394 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2397 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2395 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2398 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2396 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2399 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2397 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2400 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2398 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2401 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2399 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2402 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2400 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2403 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2401 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2404 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2402 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2405 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2403
2406
2404 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2407 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2405 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2408 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2406 parent_group = relationship('RepoGroup', remote_side=group_id)
2409 parent_group = relationship('RepoGroup', remote_side=group_id)
2407 user = relationship('User')
2410 user = relationship('User')
2408 integrations = relationship('Integration',
2411 integrations = relationship('Integration',
2409 cascade="all, delete, delete-orphan")
2412 cascade="all, delete, delete-orphan")
2410
2413
2411 def __init__(self, group_name='', parent_group=None):
2414 def __init__(self, group_name='', parent_group=None):
2412 self.group_name = group_name
2415 self.group_name = group_name
2413 self.parent_group = parent_group
2416 self.parent_group = parent_group
2414
2417
2415 def __unicode__(self):
2418 def __unicode__(self):
2416 return u"<%s('id:%s:%s')>" % (
2419 return u"<%s('id:%s:%s')>" % (
2417 self.__class__.__name__, self.group_id, self.group_name)
2420 self.__class__.__name__, self.group_id, self.group_name)
2418
2421
2419 @hybrid_property
2422 @hybrid_property
2420 def description_safe(self):
2423 def description_safe(self):
2421 from rhodecode.lib import helpers as h
2424 from rhodecode.lib import helpers as h
2422 return h.escape(self.group_description)
2425 return h.escape(self.group_description)
2423
2426
2424 @classmethod
2427 @classmethod
2425 def _generate_choice(cls, repo_group):
2428 def _generate_choice(cls, repo_group):
2426 from webhelpers.html import literal as _literal
2429 from webhelpers.html import literal as _literal
2427 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2430 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2428 return repo_group.group_id, _name(repo_group.full_path_splitted)
2431 return repo_group.group_id, _name(repo_group.full_path_splitted)
2429
2432
2430 @classmethod
2433 @classmethod
2431 def groups_choices(cls, groups=None, show_empty_group=True):
2434 def groups_choices(cls, groups=None, show_empty_group=True):
2432 if not groups:
2435 if not groups:
2433 groups = cls.query().all()
2436 groups = cls.query().all()
2434
2437
2435 repo_groups = []
2438 repo_groups = []
2436 if show_empty_group:
2439 if show_empty_group:
2437 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2440 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2438
2441
2439 repo_groups.extend([cls._generate_choice(x) for x in groups])
2442 repo_groups.extend([cls._generate_choice(x) for x in groups])
2440
2443
2441 repo_groups = sorted(
2444 repo_groups = sorted(
2442 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2445 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2443 return repo_groups
2446 return repo_groups
2444
2447
2445 @classmethod
2448 @classmethod
2446 def url_sep(cls):
2449 def url_sep(cls):
2447 return URL_SEP
2450 return URL_SEP
2448
2451
2449 @classmethod
2452 @classmethod
2450 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2453 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2451 if case_insensitive:
2454 if case_insensitive:
2452 gr = cls.query().filter(func.lower(cls.group_name)
2455 gr = cls.query().filter(func.lower(cls.group_name)
2453 == func.lower(group_name))
2456 == func.lower(group_name))
2454 else:
2457 else:
2455 gr = cls.query().filter(cls.group_name == group_name)
2458 gr = cls.query().filter(cls.group_name == group_name)
2456 if cache:
2459 if cache:
2457 name_key = _hash_key(group_name)
2460 name_key = _hash_key(group_name)
2458 gr = gr.options(
2461 gr = gr.options(
2459 FromCache("sql_cache_short", "get_group_%s" % name_key))
2462 FromCache("sql_cache_short", "get_group_%s" % name_key))
2460 return gr.scalar()
2463 return gr.scalar()
2461
2464
2462 @classmethod
2465 @classmethod
2463 def get_user_personal_repo_group(cls, user_id):
2466 def get_user_personal_repo_group(cls, user_id):
2464 user = User.get(user_id)
2467 user = User.get(user_id)
2465 if user.username == User.DEFAULT_USER:
2468 if user.username == User.DEFAULT_USER:
2466 return None
2469 return None
2467
2470
2468 return cls.query()\
2471 return cls.query()\
2469 .filter(cls.personal == true()) \
2472 .filter(cls.personal == true()) \
2470 .filter(cls.user == user).scalar()
2473 .filter(cls.user == user).scalar()
2471
2474
2472 @classmethod
2475 @classmethod
2473 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2476 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2474 case_insensitive=True):
2477 case_insensitive=True):
2475 q = RepoGroup.query()
2478 q = RepoGroup.query()
2476
2479
2477 if not isinstance(user_id, Optional):
2480 if not isinstance(user_id, Optional):
2478 q = q.filter(RepoGroup.user_id == user_id)
2481 q = q.filter(RepoGroup.user_id == user_id)
2479
2482
2480 if not isinstance(group_id, Optional):
2483 if not isinstance(group_id, Optional):
2481 q = q.filter(RepoGroup.group_parent_id == group_id)
2484 q = q.filter(RepoGroup.group_parent_id == group_id)
2482
2485
2483 if case_insensitive:
2486 if case_insensitive:
2484 q = q.order_by(func.lower(RepoGroup.group_name))
2487 q = q.order_by(func.lower(RepoGroup.group_name))
2485 else:
2488 else:
2486 q = q.order_by(RepoGroup.group_name)
2489 q = q.order_by(RepoGroup.group_name)
2487 return q.all()
2490 return q.all()
2488
2491
2489 @property
2492 @property
2490 def parents(self):
2493 def parents(self):
2491 parents_recursion_limit = 10
2494 parents_recursion_limit = 10
2492 groups = []
2495 groups = []
2493 if self.parent_group is None:
2496 if self.parent_group is None:
2494 return groups
2497 return groups
2495 cur_gr = self.parent_group
2498 cur_gr = self.parent_group
2496 groups.insert(0, cur_gr)
2499 groups.insert(0, cur_gr)
2497 cnt = 0
2500 cnt = 0
2498 while 1:
2501 while 1:
2499 cnt += 1
2502 cnt += 1
2500 gr = getattr(cur_gr, 'parent_group', None)
2503 gr = getattr(cur_gr, 'parent_group', None)
2501 cur_gr = cur_gr.parent_group
2504 cur_gr = cur_gr.parent_group
2502 if gr is None:
2505 if gr is None:
2503 break
2506 break
2504 if cnt == parents_recursion_limit:
2507 if cnt == parents_recursion_limit:
2505 # this will prevent accidental infinit loops
2508 # this will prevent accidental infinit loops
2506 log.error(('more than %s parents found for group %s, stopping '
2509 log.error(('more than %s parents found for group %s, stopping '
2507 'recursive parent fetching' % (parents_recursion_limit, self)))
2510 'recursive parent fetching' % (parents_recursion_limit, self)))
2508 break
2511 break
2509
2512
2510 groups.insert(0, gr)
2513 groups.insert(0, gr)
2511 return groups
2514 return groups
2512
2515
2513 @property
2516 @property
2514 def last_db_change(self):
2517 def last_db_change(self):
2515 return self.updated_on
2518 return self.updated_on
2516
2519
2517 @property
2520 @property
2518 def children(self):
2521 def children(self):
2519 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2522 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2520
2523
2521 @property
2524 @property
2522 def name(self):
2525 def name(self):
2523 return self.group_name.split(RepoGroup.url_sep())[-1]
2526 return self.group_name.split(RepoGroup.url_sep())[-1]
2524
2527
2525 @property
2528 @property
2526 def full_path(self):
2529 def full_path(self):
2527 return self.group_name
2530 return self.group_name
2528
2531
2529 @property
2532 @property
2530 def full_path_splitted(self):
2533 def full_path_splitted(self):
2531 return self.group_name.split(RepoGroup.url_sep())
2534 return self.group_name.split(RepoGroup.url_sep())
2532
2535
2533 @property
2536 @property
2534 def repositories(self):
2537 def repositories(self):
2535 return Repository.query()\
2538 return Repository.query()\
2536 .filter(Repository.group == self)\
2539 .filter(Repository.group == self)\
2537 .order_by(Repository.repo_name)
2540 .order_by(Repository.repo_name)
2538
2541
2539 @property
2542 @property
2540 def repositories_recursive_count(self):
2543 def repositories_recursive_count(self):
2541 cnt = self.repositories.count()
2544 cnt = self.repositories.count()
2542
2545
2543 def children_count(group):
2546 def children_count(group):
2544 cnt = 0
2547 cnt = 0
2545 for child in group.children:
2548 for child in group.children:
2546 cnt += child.repositories.count()
2549 cnt += child.repositories.count()
2547 cnt += children_count(child)
2550 cnt += children_count(child)
2548 return cnt
2551 return cnt
2549
2552
2550 return cnt + children_count(self)
2553 return cnt + children_count(self)
2551
2554
2552 def _recursive_objects(self, include_repos=True):
2555 def _recursive_objects(self, include_repos=True):
2553 all_ = []
2556 all_ = []
2554
2557
2555 def _get_members(root_gr):
2558 def _get_members(root_gr):
2556 if include_repos:
2559 if include_repos:
2557 for r in root_gr.repositories:
2560 for r in root_gr.repositories:
2558 all_.append(r)
2561 all_.append(r)
2559 childs = root_gr.children.all()
2562 childs = root_gr.children.all()
2560 if childs:
2563 if childs:
2561 for gr in childs:
2564 for gr in childs:
2562 all_.append(gr)
2565 all_.append(gr)
2563 _get_members(gr)
2566 _get_members(gr)
2564
2567
2565 _get_members(self)
2568 _get_members(self)
2566 return [self] + all_
2569 return [self] + all_
2567
2570
2568 def recursive_groups_and_repos(self):
2571 def recursive_groups_and_repos(self):
2569 """
2572 """
2570 Recursive return all groups, with repositories in those groups
2573 Recursive return all groups, with repositories in those groups
2571 """
2574 """
2572 return self._recursive_objects()
2575 return self._recursive_objects()
2573
2576
2574 def recursive_groups(self):
2577 def recursive_groups(self):
2575 """
2578 """
2576 Returns all children groups for this group including children of children
2579 Returns all children groups for this group including children of children
2577 """
2580 """
2578 return self._recursive_objects(include_repos=False)
2581 return self._recursive_objects(include_repos=False)
2579
2582
2580 def get_new_name(self, group_name):
2583 def get_new_name(self, group_name):
2581 """
2584 """
2582 returns new full group name based on parent and new name
2585 returns new full group name based on parent and new name
2583
2586
2584 :param group_name:
2587 :param group_name:
2585 """
2588 """
2586 path_prefix = (self.parent_group.full_path_splitted if
2589 path_prefix = (self.parent_group.full_path_splitted if
2587 self.parent_group else [])
2590 self.parent_group else [])
2588 return RepoGroup.url_sep().join(path_prefix + [group_name])
2591 return RepoGroup.url_sep().join(path_prefix + [group_name])
2589
2592
2590 def permissions(self, with_admins=True, with_owner=True):
2593 def permissions(self, with_admins=True, with_owner=True):
2591 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2594 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2592 q = q.options(joinedload(UserRepoGroupToPerm.group),
2595 q = q.options(joinedload(UserRepoGroupToPerm.group),
2593 joinedload(UserRepoGroupToPerm.user),
2596 joinedload(UserRepoGroupToPerm.user),
2594 joinedload(UserRepoGroupToPerm.permission),)
2597 joinedload(UserRepoGroupToPerm.permission),)
2595
2598
2596 # get owners and admins and permissions. We do a trick of re-writing
2599 # get owners and admins and permissions. We do a trick of re-writing
2597 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2600 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2598 # has a global reference and changing one object propagates to all
2601 # has a global reference and changing one object propagates to all
2599 # others. This means if admin is also an owner admin_row that change
2602 # others. This means if admin is also an owner admin_row that change
2600 # would propagate to both objects
2603 # would propagate to both objects
2601 perm_rows = []
2604 perm_rows = []
2602 for _usr in q.all():
2605 for _usr in q.all():
2603 usr = AttributeDict(_usr.user.get_dict())
2606 usr = AttributeDict(_usr.user.get_dict())
2604 usr.permission = _usr.permission.permission_name
2607 usr.permission = _usr.permission.permission_name
2605 perm_rows.append(usr)
2608 perm_rows.append(usr)
2606
2609
2607 # filter the perm rows by 'default' first and then sort them by
2610 # filter the perm rows by 'default' first and then sort them by
2608 # admin,write,read,none permissions sorted again alphabetically in
2611 # admin,write,read,none permissions sorted again alphabetically in
2609 # each group
2612 # each group
2610 perm_rows = sorted(perm_rows, key=display_user_sort)
2613 perm_rows = sorted(perm_rows, key=display_user_sort)
2611
2614
2612 _admin_perm = 'group.admin'
2615 _admin_perm = 'group.admin'
2613 owner_row = []
2616 owner_row = []
2614 if with_owner:
2617 if with_owner:
2615 usr = AttributeDict(self.user.get_dict())
2618 usr = AttributeDict(self.user.get_dict())
2616 usr.owner_row = True
2619 usr.owner_row = True
2617 usr.permission = _admin_perm
2620 usr.permission = _admin_perm
2618 owner_row.append(usr)
2621 owner_row.append(usr)
2619
2622
2620 super_admin_rows = []
2623 super_admin_rows = []
2621 if with_admins:
2624 if with_admins:
2622 for usr in User.get_all_super_admins():
2625 for usr in User.get_all_super_admins():
2623 # if this admin is also owner, don't double the record
2626 # if this admin is also owner, don't double the record
2624 if usr.user_id == owner_row[0].user_id:
2627 if usr.user_id == owner_row[0].user_id:
2625 owner_row[0].admin_row = True
2628 owner_row[0].admin_row = True
2626 else:
2629 else:
2627 usr = AttributeDict(usr.get_dict())
2630 usr = AttributeDict(usr.get_dict())
2628 usr.admin_row = True
2631 usr.admin_row = True
2629 usr.permission = _admin_perm
2632 usr.permission = _admin_perm
2630 super_admin_rows.append(usr)
2633 super_admin_rows.append(usr)
2631
2634
2632 return super_admin_rows + owner_row + perm_rows
2635 return super_admin_rows + owner_row + perm_rows
2633
2636
2634 def permission_user_groups(self):
2637 def permission_user_groups(self):
2635 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2638 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2636 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2639 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2637 joinedload(UserGroupRepoGroupToPerm.users_group),
2640 joinedload(UserGroupRepoGroupToPerm.users_group),
2638 joinedload(UserGroupRepoGroupToPerm.permission),)
2641 joinedload(UserGroupRepoGroupToPerm.permission),)
2639
2642
2640 perm_rows = []
2643 perm_rows = []
2641 for _user_group in q.all():
2644 for _user_group in q.all():
2642 usr = AttributeDict(_user_group.users_group.get_dict())
2645 usr = AttributeDict(_user_group.users_group.get_dict())
2643 usr.permission = _user_group.permission.permission_name
2646 usr.permission = _user_group.permission.permission_name
2644 perm_rows.append(usr)
2647 perm_rows.append(usr)
2645
2648
2646 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2649 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2647 return perm_rows
2650 return perm_rows
2648
2651
2649 def get_api_data(self):
2652 def get_api_data(self):
2650 """
2653 """
2651 Common function for generating api data
2654 Common function for generating api data
2652
2655
2653 """
2656 """
2654 group = self
2657 group = self
2655 data = {
2658 data = {
2656 'group_id': group.group_id,
2659 'group_id': group.group_id,
2657 'group_name': group.group_name,
2660 'group_name': group.group_name,
2658 'group_description': group.description_safe,
2661 'group_description': group.description_safe,
2659 'parent_group': group.parent_group.group_name if group.parent_group else None,
2662 'parent_group': group.parent_group.group_name if group.parent_group else None,
2660 'repositories': [x.repo_name for x in group.repositories],
2663 'repositories': [x.repo_name for x in group.repositories],
2661 'owner': group.user.username,
2664 'owner': group.user.username,
2662 }
2665 }
2663 return data
2666 return data
2664
2667
2665
2668
2666 class Permission(Base, BaseModel):
2669 class Permission(Base, BaseModel):
2667 __tablename__ = 'permissions'
2670 __tablename__ = 'permissions'
2668 __table_args__ = (
2671 __table_args__ = (
2669 Index('p_perm_name_idx', 'permission_name'),
2672 Index('p_perm_name_idx', 'permission_name'),
2670 base_table_args,
2673 base_table_args,
2671 )
2674 )
2672
2675
2673 PERMS = [
2676 PERMS = [
2674 ('hg.admin', _('RhodeCode Super Administrator')),
2677 ('hg.admin', _('RhodeCode Super Administrator')),
2675
2678
2676 ('repository.none', _('Repository no access')),
2679 ('repository.none', _('Repository no access')),
2677 ('repository.read', _('Repository read access')),
2680 ('repository.read', _('Repository read access')),
2678 ('repository.write', _('Repository write access')),
2681 ('repository.write', _('Repository write access')),
2679 ('repository.admin', _('Repository admin access')),
2682 ('repository.admin', _('Repository admin access')),
2680
2683
2681 ('group.none', _('Repository group no access')),
2684 ('group.none', _('Repository group no access')),
2682 ('group.read', _('Repository group read access')),
2685 ('group.read', _('Repository group read access')),
2683 ('group.write', _('Repository group write access')),
2686 ('group.write', _('Repository group write access')),
2684 ('group.admin', _('Repository group admin access')),
2687 ('group.admin', _('Repository group admin access')),
2685
2688
2686 ('usergroup.none', _('User group no access')),
2689 ('usergroup.none', _('User group no access')),
2687 ('usergroup.read', _('User group read access')),
2690 ('usergroup.read', _('User group read access')),
2688 ('usergroup.write', _('User group write access')),
2691 ('usergroup.write', _('User group write access')),
2689 ('usergroup.admin', _('User group admin access')),
2692 ('usergroup.admin', _('User group admin access')),
2690
2693
2691 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2694 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2692 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2695 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2693
2696
2694 ('hg.usergroup.create.false', _('User Group creation disabled')),
2697 ('hg.usergroup.create.false', _('User Group creation disabled')),
2695 ('hg.usergroup.create.true', _('User Group creation enabled')),
2698 ('hg.usergroup.create.true', _('User Group creation enabled')),
2696
2699
2697 ('hg.create.none', _('Repository creation disabled')),
2700 ('hg.create.none', _('Repository creation disabled')),
2698 ('hg.create.repository', _('Repository creation enabled')),
2701 ('hg.create.repository', _('Repository creation enabled')),
2699 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2702 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2700 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2703 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2701
2704
2702 ('hg.fork.none', _('Repository forking disabled')),
2705 ('hg.fork.none', _('Repository forking disabled')),
2703 ('hg.fork.repository', _('Repository forking enabled')),
2706 ('hg.fork.repository', _('Repository forking enabled')),
2704
2707
2705 ('hg.register.none', _('Registration disabled')),
2708 ('hg.register.none', _('Registration disabled')),
2706 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2709 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2707 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2710 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2708
2711
2709 ('hg.password_reset.enabled', _('Password reset enabled')),
2712 ('hg.password_reset.enabled', _('Password reset enabled')),
2710 ('hg.password_reset.hidden', _('Password reset hidden')),
2713 ('hg.password_reset.hidden', _('Password reset hidden')),
2711 ('hg.password_reset.disabled', _('Password reset disabled')),
2714 ('hg.password_reset.disabled', _('Password reset disabled')),
2712
2715
2713 ('hg.extern_activate.manual', _('Manual activation of external account')),
2716 ('hg.extern_activate.manual', _('Manual activation of external account')),
2714 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2717 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2715
2718
2716 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2719 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2717 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2720 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2718 ]
2721 ]
2719
2722
2720 # definition of system default permissions for DEFAULT user
2723 # definition of system default permissions for DEFAULT user
2721 DEFAULT_USER_PERMISSIONS = [
2724 DEFAULT_USER_PERMISSIONS = [
2722 'repository.read',
2725 'repository.read',
2723 'group.read',
2726 'group.read',
2724 'usergroup.read',
2727 'usergroup.read',
2725 'hg.create.repository',
2728 'hg.create.repository',
2726 'hg.repogroup.create.false',
2729 'hg.repogroup.create.false',
2727 'hg.usergroup.create.false',
2730 'hg.usergroup.create.false',
2728 'hg.create.write_on_repogroup.true',
2731 'hg.create.write_on_repogroup.true',
2729 'hg.fork.repository',
2732 'hg.fork.repository',
2730 'hg.register.manual_activate',
2733 'hg.register.manual_activate',
2731 'hg.password_reset.enabled',
2734 'hg.password_reset.enabled',
2732 'hg.extern_activate.auto',
2735 'hg.extern_activate.auto',
2733 'hg.inherit_default_perms.true',
2736 'hg.inherit_default_perms.true',
2734 ]
2737 ]
2735
2738
2736 # defines which permissions are more important higher the more important
2739 # defines which permissions are more important higher the more important
2737 # Weight defines which permissions are more important.
2740 # Weight defines which permissions are more important.
2738 # The higher number the more important.
2741 # The higher number the more important.
2739 PERM_WEIGHTS = {
2742 PERM_WEIGHTS = {
2740 'repository.none': 0,
2743 'repository.none': 0,
2741 'repository.read': 1,
2744 'repository.read': 1,
2742 'repository.write': 3,
2745 'repository.write': 3,
2743 'repository.admin': 4,
2746 'repository.admin': 4,
2744
2747
2745 'group.none': 0,
2748 'group.none': 0,
2746 'group.read': 1,
2749 'group.read': 1,
2747 'group.write': 3,
2750 'group.write': 3,
2748 'group.admin': 4,
2751 'group.admin': 4,
2749
2752
2750 'usergroup.none': 0,
2753 'usergroup.none': 0,
2751 'usergroup.read': 1,
2754 'usergroup.read': 1,
2752 'usergroup.write': 3,
2755 'usergroup.write': 3,
2753 'usergroup.admin': 4,
2756 'usergroup.admin': 4,
2754
2757
2755 'hg.repogroup.create.false': 0,
2758 'hg.repogroup.create.false': 0,
2756 'hg.repogroup.create.true': 1,
2759 'hg.repogroup.create.true': 1,
2757
2760
2758 'hg.usergroup.create.false': 0,
2761 'hg.usergroup.create.false': 0,
2759 'hg.usergroup.create.true': 1,
2762 'hg.usergroup.create.true': 1,
2760
2763
2761 'hg.fork.none': 0,
2764 'hg.fork.none': 0,
2762 'hg.fork.repository': 1,
2765 'hg.fork.repository': 1,
2763 'hg.create.none': 0,
2766 'hg.create.none': 0,
2764 'hg.create.repository': 1
2767 'hg.create.repository': 1
2765 }
2768 }
2766
2769
2767 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2770 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2768 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2771 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2769 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2772 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2770
2773
2771 def __unicode__(self):
2774 def __unicode__(self):
2772 return u"<%s('%s:%s')>" % (
2775 return u"<%s('%s:%s')>" % (
2773 self.__class__.__name__, self.permission_id, self.permission_name
2776 self.__class__.__name__, self.permission_id, self.permission_name
2774 )
2777 )
2775
2778
2776 @classmethod
2779 @classmethod
2777 def get_by_key(cls, key):
2780 def get_by_key(cls, key):
2778 return cls.query().filter(cls.permission_name == key).scalar()
2781 return cls.query().filter(cls.permission_name == key).scalar()
2779
2782
2780 @classmethod
2783 @classmethod
2781 def get_default_repo_perms(cls, user_id, repo_id=None):
2784 def get_default_repo_perms(cls, user_id, repo_id=None):
2782 q = Session().query(UserRepoToPerm, Repository, Permission)\
2785 q = Session().query(UserRepoToPerm, Repository, Permission)\
2783 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2786 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2784 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2787 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2785 .filter(UserRepoToPerm.user_id == user_id)
2788 .filter(UserRepoToPerm.user_id == user_id)
2786 if repo_id:
2789 if repo_id:
2787 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2790 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2788 return q.all()
2791 return q.all()
2789
2792
2790 @classmethod
2793 @classmethod
2791 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2794 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2792 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2795 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2793 .join(
2796 .join(
2794 Permission,
2797 Permission,
2795 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2798 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2796 .join(
2799 .join(
2797 Repository,
2800 Repository,
2798 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2801 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2799 .join(
2802 .join(
2800 UserGroup,
2803 UserGroup,
2801 UserGroupRepoToPerm.users_group_id ==
2804 UserGroupRepoToPerm.users_group_id ==
2802 UserGroup.users_group_id)\
2805 UserGroup.users_group_id)\
2803 .join(
2806 .join(
2804 UserGroupMember,
2807 UserGroupMember,
2805 UserGroupRepoToPerm.users_group_id ==
2808 UserGroupRepoToPerm.users_group_id ==
2806 UserGroupMember.users_group_id)\
2809 UserGroupMember.users_group_id)\
2807 .filter(
2810 .filter(
2808 UserGroupMember.user_id == user_id,
2811 UserGroupMember.user_id == user_id,
2809 UserGroup.users_group_active == true())
2812 UserGroup.users_group_active == true())
2810 if repo_id:
2813 if repo_id:
2811 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2814 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2812 return q.all()
2815 return q.all()
2813
2816
2814 @classmethod
2817 @classmethod
2815 def get_default_group_perms(cls, user_id, repo_group_id=None):
2818 def get_default_group_perms(cls, user_id, repo_group_id=None):
2816 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2819 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2817 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2820 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2818 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2821 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2819 .filter(UserRepoGroupToPerm.user_id == user_id)
2822 .filter(UserRepoGroupToPerm.user_id == user_id)
2820 if repo_group_id:
2823 if repo_group_id:
2821 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2824 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2822 return q.all()
2825 return q.all()
2823
2826
2824 @classmethod
2827 @classmethod
2825 def get_default_group_perms_from_user_group(
2828 def get_default_group_perms_from_user_group(
2826 cls, user_id, repo_group_id=None):
2829 cls, user_id, repo_group_id=None):
2827 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2830 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2828 .join(
2831 .join(
2829 Permission,
2832 Permission,
2830 UserGroupRepoGroupToPerm.permission_id ==
2833 UserGroupRepoGroupToPerm.permission_id ==
2831 Permission.permission_id)\
2834 Permission.permission_id)\
2832 .join(
2835 .join(
2833 RepoGroup,
2836 RepoGroup,
2834 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2837 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2835 .join(
2838 .join(
2836 UserGroup,
2839 UserGroup,
2837 UserGroupRepoGroupToPerm.users_group_id ==
2840 UserGroupRepoGroupToPerm.users_group_id ==
2838 UserGroup.users_group_id)\
2841 UserGroup.users_group_id)\
2839 .join(
2842 .join(
2840 UserGroupMember,
2843 UserGroupMember,
2841 UserGroupRepoGroupToPerm.users_group_id ==
2844 UserGroupRepoGroupToPerm.users_group_id ==
2842 UserGroupMember.users_group_id)\
2845 UserGroupMember.users_group_id)\
2843 .filter(
2846 .filter(
2844 UserGroupMember.user_id == user_id,
2847 UserGroupMember.user_id == user_id,
2845 UserGroup.users_group_active == true())
2848 UserGroup.users_group_active == true())
2846 if repo_group_id:
2849 if repo_group_id:
2847 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2850 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2848 return q.all()
2851 return q.all()
2849
2852
2850 @classmethod
2853 @classmethod
2851 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2854 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2852 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2855 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2853 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2856 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2854 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2857 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2855 .filter(UserUserGroupToPerm.user_id == user_id)
2858 .filter(UserUserGroupToPerm.user_id == user_id)
2856 if user_group_id:
2859 if user_group_id:
2857 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2860 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2858 return q.all()
2861 return q.all()
2859
2862
2860 @classmethod
2863 @classmethod
2861 def get_default_user_group_perms_from_user_group(
2864 def get_default_user_group_perms_from_user_group(
2862 cls, user_id, user_group_id=None):
2865 cls, user_id, user_group_id=None):
2863 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2866 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2864 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2867 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2865 .join(
2868 .join(
2866 Permission,
2869 Permission,
2867 UserGroupUserGroupToPerm.permission_id ==
2870 UserGroupUserGroupToPerm.permission_id ==
2868 Permission.permission_id)\
2871 Permission.permission_id)\
2869 .join(
2872 .join(
2870 TargetUserGroup,
2873 TargetUserGroup,
2871 UserGroupUserGroupToPerm.target_user_group_id ==
2874 UserGroupUserGroupToPerm.target_user_group_id ==
2872 TargetUserGroup.users_group_id)\
2875 TargetUserGroup.users_group_id)\
2873 .join(
2876 .join(
2874 UserGroup,
2877 UserGroup,
2875 UserGroupUserGroupToPerm.user_group_id ==
2878 UserGroupUserGroupToPerm.user_group_id ==
2876 UserGroup.users_group_id)\
2879 UserGroup.users_group_id)\
2877 .join(
2880 .join(
2878 UserGroupMember,
2881 UserGroupMember,
2879 UserGroupUserGroupToPerm.user_group_id ==
2882 UserGroupUserGroupToPerm.user_group_id ==
2880 UserGroupMember.users_group_id)\
2883 UserGroupMember.users_group_id)\
2881 .filter(
2884 .filter(
2882 UserGroupMember.user_id == user_id,
2885 UserGroupMember.user_id == user_id,
2883 UserGroup.users_group_active == true())
2886 UserGroup.users_group_active == true())
2884 if user_group_id:
2887 if user_group_id:
2885 q = q.filter(
2888 q = q.filter(
2886 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2889 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2887
2890
2888 return q.all()
2891 return q.all()
2889
2892
2890
2893
2891 class UserRepoToPerm(Base, BaseModel):
2894 class UserRepoToPerm(Base, BaseModel):
2892 __tablename__ = 'repo_to_perm'
2895 __tablename__ = 'repo_to_perm'
2893 __table_args__ = (
2896 __table_args__ = (
2894 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2897 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2895 base_table_args
2898 base_table_args
2896 )
2899 )
2897
2900
2898 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2901 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2899 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2902 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2900 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2903 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2901 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2904 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2902
2905
2903 user = relationship('User')
2906 user = relationship('User')
2904 repository = relationship('Repository')
2907 repository = relationship('Repository')
2905 permission = relationship('Permission')
2908 permission = relationship('Permission')
2906
2909
2907 @classmethod
2910 @classmethod
2908 def create(cls, user, repository, permission):
2911 def create(cls, user, repository, permission):
2909 n = cls()
2912 n = cls()
2910 n.user = user
2913 n.user = user
2911 n.repository = repository
2914 n.repository = repository
2912 n.permission = permission
2915 n.permission = permission
2913 Session().add(n)
2916 Session().add(n)
2914 return n
2917 return n
2915
2918
2916 def __unicode__(self):
2919 def __unicode__(self):
2917 return u'<%s => %s >' % (self.user, self.repository)
2920 return u'<%s => %s >' % (self.user, self.repository)
2918
2921
2919
2922
2920 class UserUserGroupToPerm(Base, BaseModel):
2923 class UserUserGroupToPerm(Base, BaseModel):
2921 __tablename__ = 'user_user_group_to_perm'
2924 __tablename__ = 'user_user_group_to_perm'
2922 __table_args__ = (
2925 __table_args__ = (
2923 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2926 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2924 base_table_args
2927 base_table_args
2925 )
2928 )
2926
2929
2927 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2930 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2928 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2931 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2929 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2932 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2930 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2933 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2931
2934
2932 user = relationship('User')
2935 user = relationship('User')
2933 user_group = relationship('UserGroup')
2936 user_group = relationship('UserGroup')
2934 permission = relationship('Permission')
2937 permission = relationship('Permission')
2935
2938
2936 @classmethod
2939 @classmethod
2937 def create(cls, user, user_group, permission):
2940 def create(cls, user, user_group, permission):
2938 n = cls()
2941 n = cls()
2939 n.user = user
2942 n.user = user
2940 n.user_group = user_group
2943 n.user_group = user_group
2941 n.permission = permission
2944 n.permission = permission
2942 Session().add(n)
2945 Session().add(n)
2943 return n
2946 return n
2944
2947
2945 def __unicode__(self):
2948 def __unicode__(self):
2946 return u'<%s => %s >' % (self.user, self.user_group)
2949 return u'<%s => %s >' % (self.user, self.user_group)
2947
2950
2948
2951
2949 class UserToPerm(Base, BaseModel):
2952 class UserToPerm(Base, BaseModel):
2950 __tablename__ = 'user_to_perm'
2953 __tablename__ = 'user_to_perm'
2951 __table_args__ = (
2954 __table_args__ = (
2952 UniqueConstraint('user_id', 'permission_id'),
2955 UniqueConstraint('user_id', 'permission_id'),
2953 base_table_args
2956 base_table_args
2954 )
2957 )
2955
2958
2956 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2959 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2957 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2958 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2961 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2959
2962
2960 user = relationship('User')
2963 user = relationship('User')
2961 permission = relationship('Permission', lazy='joined')
2964 permission = relationship('Permission', lazy='joined')
2962
2965
2963 def __unicode__(self):
2966 def __unicode__(self):
2964 return u'<%s => %s >' % (self.user, self.permission)
2967 return u'<%s => %s >' % (self.user, self.permission)
2965
2968
2966
2969
2967 class UserGroupRepoToPerm(Base, BaseModel):
2970 class UserGroupRepoToPerm(Base, BaseModel):
2968 __tablename__ = 'users_group_repo_to_perm'
2971 __tablename__ = 'users_group_repo_to_perm'
2969 __table_args__ = (
2972 __table_args__ = (
2970 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2973 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2971 base_table_args
2974 base_table_args
2972 )
2975 )
2973
2976
2974 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2977 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2975 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2978 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2976 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2979 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2977 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2980 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2978
2981
2979 users_group = relationship('UserGroup')
2982 users_group = relationship('UserGroup')
2980 permission = relationship('Permission')
2983 permission = relationship('Permission')
2981 repository = relationship('Repository')
2984 repository = relationship('Repository')
2982
2985
2983 @classmethod
2986 @classmethod
2984 def create(cls, users_group, repository, permission):
2987 def create(cls, users_group, repository, permission):
2985 n = cls()
2988 n = cls()
2986 n.users_group = users_group
2989 n.users_group = users_group
2987 n.repository = repository
2990 n.repository = repository
2988 n.permission = permission
2991 n.permission = permission
2989 Session().add(n)
2992 Session().add(n)
2990 return n
2993 return n
2991
2994
2992 def __unicode__(self):
2995 def __unicode__(self):
2993 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2996 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2994
2997
2995
2998
2996 class UserGroupUserGroupToPerm(Base, BaseModel):
2999 class UserGroupUserGroupToPerm(Base, BaseModel):
2997 __tablename__ = 'user_group_user_group_to_perm'
3000 __tablename__ = 'user_group_user_group_to_perm'
2998 __table_args__ = (
3001 __table_args__ = (
2999 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3002 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3000 CheckConstraint('target_user_group_id != user_group_id'),
3003 CheckConstraint('target_user_group_id != user_group_id'),
3001 base_table_args
3004 base_table_args
3002 )
3005 )
3003
3006
3004 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3007 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3005 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3008 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3006 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3009 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3007 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3010 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3008
3011
3009 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3012 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3010 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3013 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3011 permission = relationship('Permission')
3014 permission = relationship('Permission')
3012
3015
3013 @classmethod
3016 @classmethod
3014 def create(cls, target_user_group, user_group, permission):
3017 def create(cls, target_user_group, user_group, permission):
3015 n = cls()
3018 n = cls()
3016 n.target_user_group = target_user_group
3019 n.target_user_group = target_user_group
3017 n.user_group = user_group
3020 n.user_group = user_group
3018 n.permission = permission
3021 n.permission = permission
3019 Session().add(n)
3022 Session().add(n)
3020 return n
3023 return n
3021
3024
3022 def __unicode__(self):
3025 def __unicode__(self):
3023 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3026 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3024
3027
3025
3028
3026 class UserGroupToPerm(Base, BaseModel):
3029 class UserGroupToPerm(Base, BaseModel):
3027 __tablename__ = 'users_group_to_perm'
3030 __tablename__ = 'users_group_to_perm'
3028 __table_args__ = (
3031 __table_args__ = (
3029 UniqueConstraint('users_group_id', 'permission_id',),
3032 UniqueConstraint('users_group_id', 'permission_id',),
3030 base_table_args
3033 base_table_args
3031 )
3034 )
3032
3035
3033 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3034 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3037 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3035 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3038 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3036
3039
3037 users_group = relationship('UserGroup')
3040 users_group = relationship('UserGroup')
3038 permission = relationship('Permission')
3041 permission = relationship('Permission')
3039
3042
3040
3043
3041 class UserRepoGroupToPerm(Base, BaseModel):
3044 class UserRepoGroupToPerm(Base, BaseModel):
3042 __tablename__ = 'user_repo_group_to_perm'
3045 __tablename__ = 'user_repo_group_to_perm'
3043 __table_args__ = (
3046 __table_args__ = (
3044 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3047 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3045 base_table_args
3048 base_table_args
3046 )
3049 )
3047
3050
3048 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3051 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3052 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3050 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3053 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3051 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3054 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3052
3055
3053 user = relationship('User')
3056 user = relationship('User')
3054 group = relationship('RepoGroup')
3057 group = relationship('RepoGroup')
3055 permission = relationship('Permission')
3058 permission = relationship('Permission')
3056
3059
3057 @classmethod
3060 @classmethod
3058 def create(cls, user, repository_group, permission):
3061 def create(cls, user, repository_group, permission):
3059 n = cls()
3062 n = cls()
3060 n.user = user
3063 n.user = user
3061 n.group = repository_group
3064 n.group = repository_group
3062 n.permission = permission
3065 n.permission = permission
3063 Session().add(n)
3066 Session().add(n)
3064 return n
3067 return n
3065
3068
3066
3069
3067 class UserGroupRepoGroupToPerm(Base, BaseModel):
3070 class UserGroupRepoGroupToPerm(Base, BaseModel):
3068 __tablename__ = 'users_group_repo_group_to_perm'
3071 __tablename__ = 'users_group_repo_group_to_perm'
3069 __table_args__ = (
3072 __table_args__ = (
3070 UniqueConstraint('users_group_id', 'group_id'),
3073 UniqueConstraint('users_group_id', 'group_id'),
3071 base_table_args
3074 base_table_args
3072 )
3075 )
3073
3076
3074 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3077 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3075 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3078 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3076 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3079 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3077 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3080 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3078
3081
3079 users_group = relationship('UserGroup')
3082 users_group = relationship('UserGroup')
3080 permission = relationship('Permission')
3083 permission = relationship('Permission')
3081 group = relationship('RepoGroup')
3084 group = relationship('RepoGroup')
3082
3085
3083 @classmethod
3086 @classmethod
3084 def create(cls, user_group, repository_group, permission):
3087 def create(cls, user_group, repository_group, permission):
3085 n = cls()
3088 n = cls()
3086 n.users_group = user_group
3089 n.users_group = user_group
3087 n.group = repository_group
3090 n.group = repository_group
3088 n.permission = permission
3091 n.permission = permission
3089 Session().add(n)
3092 Session().add(n)
3090 return n
3093 return n
3091
3094
3092 def __unicode__(self):
3095 def __unicode__(self):
3093 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3096 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3094
3097
3095
3098
3096 class Statistics(Base, BaseModel):
3099 class Statistics(Base, BaseModel):
3097 __tablename__ = 'statistics'
3100 __tablename__ = 'statistics'
3098 __table_args__ = (
3101 __table_args__ = (
3099 base_table_args
3102 base_table_args
3100 )
3103 )
3101
3104
3102 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3105 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3103 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3106 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3104 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3107 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3105 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3108 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3106 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3109 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3107 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3110 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3108
3111
3109 repository = relationship('Repository', single_parent=True)
3112 repository = relationship('Repository', single_parent=True)
3110
3113
3111
3114
3112 class UserFollowing(Base, BaseModel):
3115 class UserFollowing(Base, BaseModel):
3113 __tablename__ = 'user_followings'
3116 __tablename__ = 'user_followings'
3114 __table_args__ = (
3117 __table_args__ = (
3115 UniqueConstraint('user_id', 'follows_repository_id'),
3118 UniqueConstraint('user_id', 'follows_repository_id'),
3116 UniqueConstraint('user_id', 'follows_user_id'),
3119 UniqueConstraint('user_id', 'follows_user_id'),
3117 base_table_args
3120 base_table_args
3118 )
3121 )
3119
3122
3120 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3123 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3121 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3122 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3125 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3123 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3126 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3124 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3127 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3125
3128
3126 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3129 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3127
3130
3128 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3131 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3129 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3132 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3130
3133
3131 @classmethod
3134 @classmethod
3132 def get_repo_followers(cls, repo_id):
3135 def get_repo_followers(cls, repo_id):
3133 return cls.query().filter(cls.follows_repo_id == repo_id)
3136 return cls.query().filter(cls.follows_repo_id == repo_id)
3134
3137
3135
3138
3136 class CacheKey(Base, BaseModel):
3139 class CacheKey(Base, BaseModel):
3137 __tablename__ = 'cache_invalidation'
3140 __tablename__ = 'cache_invalidation'
3138 __table_args__ = (
3141 __table_args__ = (
3139 UniqueConstraint('cache_key'),
3142 UniqueConstraint('cache_key'),
3140 Index('key_idx', 'cache_key'),
3143 Index('key_idx', 'cache_key'),
3141 base_table_args,
3144 base_table_args,
3142 )
3145 )
3143
3146
3144 CACHE_TYPE_FEED = 'FEED'
3147 CACHE_TYPE_FEED = 'FEED'
3145 CACHE_TYPE_README = 'README'
3148 CACHE_TYPE_README = 'README'
3146 # namespaces used to register process/thread aware caches
3149 # namespaces used to register process/thread aware caches
3147 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3150 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3148 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3151 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3149
3152
3150 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3153 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3151 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3154 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3152 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3155 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3153 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3156 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3154
3157
3155 def __init__(self, cache_key, cache_args=''):
3158 def __init__(self, cache_key, cache_args=''):
3156 self.cache_key = cache_key
3159 self.cache_key = cache_key
3157 self.cache_args = cache_args
3160 self.cache_args = cache_args
3158 self.cache_active = False
3161 self.cache_active = False
3159
3162
3160 def __unicode__(self):
3163 def __unicode__(self):
3161 return u"<%s('%s:%s[%s]')>" % (
3164 return u"<%s('%s:%s[%s]')>" % (
3162 self.__class__.__name__,
3165 self.__class__.__name__,
3163 self.cache_id, self.cache_key, self.cache_active)
3166 self.cache_id, self.cache_key, self.cache_active)
3164
3167
3165 def _cache_key_partition(self):
3168 def _cache_key_partition(self):
3166 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3169 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3167 return prefix, repo_name, suffix
3170 return prefix, repo_name, suffix
3168
3171
3169 def get_prefix(self):
3172 def get_prefix(self):
3170 """
3173 """
3171 Try to extract prefix from existing cache key. The key could consist
3174 Try to extract prefix from existing cache key. The key could consist
3172 of prefix, repo_name, suffix
3175 of prefix, repo_name, suffix
3173 """
3176 """
3174 # this returns prefix, repo_name, suffix
3177 # this returns prefix, repo_name, suffix
3175 return self._cache_key_partition()[0]
3178 return self._cache_key_partition()[0]
3176
3179
3177 def get_suffix(self):
3180 def get_suffix(self):
3178 """
3181 """
3179 get suffix that might have been used in _get_cache_key to
3182 get suffix that might have been used in _get_cache_key to
3180 generate self.cache_key. Only used for informational purposes
3183 generate self.cache_key. Only used for informational purposes
3181 in repo_edit.mako.
3184 in repo_edit.mako.
3182 """
3185 """
3183 # prefix, repo_name, suffix
3186 # prefix, repo_name, suffix
3184 return self._cache_key_partition()[2]
3187 return self._cache_key_partition()[2]
3185
3188
3186 @classmethod
3189 @classmethod
3187 def delete_all_cache(cls):
3190 def delete_all_cache(cls):
3188 """
3191 """
3189 Delete all cache keys from database.
3192 Delete all cache keys from database.
3190 Should only be run when all instances are down and all entries
3193 Should only be run when all instances are down and all entries
3191 thus stale.
3194 thus stale.
3192 """
3195 """
3193 cls.query().delete()
3196 cls.query().delete()
3194 Session().commit()
3197 Session().commit()
3195
3198
3196 @classmethod
3199 @classmethod
3197 def set_invalidate(cls, cache_uid, delete=False):
3200 def set_invalidate(cls, cache_uid, delete=False):
3198 """
3201 """
3199 Mark all caches of a repo as invalid in the database.
3202 Mark all caches of a repo as invalid in the database.
3200 """
3203 """
3201
3204
3202 try:
3205 try:
3203 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3206 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3204 if delete:
3207 if delete:
3205 qry.delete()
3208 qry.delete()
3206 log.debug('cache objects deleted for cache args %s',
3209 log.debug('cache objects deleted for cache args %s',
3207 safe_str(cache_uid))
3210 safe_str(cache_uid))
3208 else:
3211 else:
3209 qry.update({"cache_active": False})
3212 qry.update({"cache_active": False})
3210 log.debug('cache objects marked as invalid for cache args %s',
3213 log.debug('cache objects marked as invalid for cache args %s',
3211 safe_str(cache_uid))
3214 safe_str(cache_uid))
3212
3215
3213 Session().commit()
3216 Session().commit()
3214 except Exception:
3217 except Exception:
3215 log.exception(
3218 log.exception(
3216 'Cache key invalidation failed for cache args %s',
3219 'Cache key invalidation failed for cache args %s',
3217 safe_str(cache_uid))
3220 safe_str(cache_uid))
3218 Session().rollback()
3221 Session().rollback()
3219
3222
3220 @classmethod
3223 @classmethod
3221 def get_active_cache(cls, cache_key):
3224 def get_active_cache(cls, cache_key):
3222 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3225 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3223 if inv_obj:
3226 if inv_obj:
3224 return inv_obj
3227 return inv_obj
3225 return None
3228 return None
3226
3229
3227
3230
3228 class ChangesetComment(Base, BaseModel):
3231 class ChangesetComment(Base, BaseModel):
3229 __tablename__ = 'changeset_comments'
3232 __tablename__ = 'changeset_comments'
3230 __table_args__ = (
3233 __table_args__ = (
3231 Index('cc_revision_idx', 'revision'),
3234 Index('cc_revision_idx', 'revision'),
3232 base_table_args,
3235 base_table_args,
3233 )
3236 )
3234
3237
3235 COMMENT_OUTDATED = u'comment_outdated'
3238 COMMENT_OUTDATED = u'comment_outdated'
3236 COMMENT_TYPE_NOTE = u'note'
3239 COMMENT_TYPE_NOTE = u'note'
3237 COMMENT_TYPE_TODO = u'todo'
3240 COMMENT_TYPE_TODO = u'todo'
3238 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3241 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3239
3242
3240 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3243 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3241 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3244 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3242 revision = Column('revision', String(40), nullable=True)
3245 revision = Column('revision', String(40), nullable=True)
3243 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3246 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3244 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3247 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3245 line_no = Column('line_no', Unicode(10), nullable=True)
3248 line_no = Column('line_no', Unicode(10), nullable=True)
3246 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3249 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3247 f_path = Column('f_path', Unicode(1000), nullable=True)
3250 f_path = Column('f_path', Unicode(1000), nullable=True)
3248 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3251 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3249 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3252 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3250 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3253 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3251 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3254 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3252 renderer = Column('renderer', Unicode(64), nullable=True)
3255 renderer = Column('renderer', Unicode(64), nullable=True)
3253 display_state = Column('display_state', Unicode(128), nullable=True)
3256 display_state = Column('display_state', Unicode(128), nullable=True)
3254
3257
3255 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3258 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3256 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3259 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3257 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3260 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3258 author = relationship('User', lazy='joined')
3261 author = relationship('User', lazy='joined')
3259 repo = relationship('Repository')
3262 repo = relationship('Repository')
3260 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3263 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3261 pull_request = relationship('PullRequest', lazy='joined')
3264 pull_request = relationship('PullRequest', lazy='joined')
3262 pull_request_version = relationship('PullRequestVersion')
3265 pull_request_version = relationship('PullRequestVersion')
3263
3266
3264 @classmethod
3267 @classmethod
3265 def get_users(cls, revision=None, pull_request_id=None):
3268 def get_users(cls, revision=None, pull_request_id=None):
3266 """
3269 """
3267 Returns user associated with this ChangesetComment. ie those
3270 Returns user associated with this ChangesetComment. ie those
3268 who actually commented
3271 who actually commented
3269
3272
3270 :param cls:
3273 :param cls:
3271 :param revision:
3274 :param revision:
3272 """
3275 """
3273 q = Session().query(User)\
3276 q = Session().query(User)\
3274 .join(ChangesetComment.author)
3277 .join(ChangesetComment.author)
3275 if revision:
3278 if revision:
3276 q = q.filter(cls.revision == revision)
3279 q = q.filter(cls.revision == revision)
3277 elif pull_request_id:
3280 elif pull_request_id:
3278 q = q.filter(cls.pull_request_id == pull_request_id)
3281 q = q.filter(cls.pull_request_id == pull_request_id)
3279 return q.all()
3282 return q.all()
3280
3283
3281 @classmethod
3284 @classmethod
3282 def get_index_from_version(cls, pr_version, versions):
3285 def get_index_from_version(cls, pr_version, versions):
3283 num_versions = [x.pull_request_version_id for x in versions]
3286 num_versions = [x.pull_request_version_id for x in versions]
3284 try:
3287 try:
3285 return num_versions.index(pr_version) +1
3288 return num_versions.index(pr_version) +1
3286 except (IndexError, ValueError):
3289 except (IndexError, ValueError):
3287 return
3290 return
3288
3291
3289 @property
3292 @property
3290 def outdated(self):
3293 def outdated(self):
3291 return self.display_state == self.COMMENT_OUTDATED
3294 return self.display_state == self.COMMENT_OUTDATED
3292
3295
3293 def outdated_at_version(self, version):
3296 def outdated_at_version(self, version):
3294 """
3297 """
3295 Checks if comment is outdated for given pull request version
3298 Checks if comment is outdated for given pull request version
3296 """
3299 """
3297 return self.outdated and self.pull_request_version_id != version
3300 return self.outdated and self.pull_request_version_id != version
3298
3301
3299 def older_than_version(self, version):
3302 def older_than_version(self, version):
3300 """
3303 """
3301 Checks if comment is made from previous version than given
3304 Checks if comment is made from previous version than given
3302 """
3305 """
3303 if version is None:
3306 if version is None:
3304 return self.pull_request_version_id is not None
3307 return self.pull_request_version_id is not None
3305
3308
3306 return self.pull_request_version_id < version
3309 return self.pull_request_version_id < version
3307
3310
3308 @property
3311 @property
3309 def resolved(self):
3312 def resolved(self):
3310 return self.resolved_by[0] if self.resolved_by else None
3313 return self.resolved_by[0] if self.resolved_by else None
3311
3314
3312 @property
3315 @property
3313 def is_todo(self):
3316 def is_todo(self):
3314 return self.comment_type == self.COMMENT_TYPE_TODO
3317 return self.comment_type == self.COMMENT_TYPE_TODO
3315
3318
3316 @property
3319 @property
3317 def is_inline(self):
3320 def is_inline(self):
3318 return self.line_no and self.f_path
3321 return self.line_no and self.f_path
3319
3322
3320 def get_index_version(self, versions):
3323 def get_index_version(self, versions):
3321 return self.get_index_from_version(
3324 return self.get_index_from_version(
3322 self.pull_request_version_id, versions)
3325 self.pull_request_version_id, versions)
3323
3326
3324 def __repr__(self):
3327 def __repr__(self):
3325 if self.comment_id:
3328 if self.comment_id:
3326 return '<DB:Comment #%s>' % self.comment_id
3329 return '<DB:Comment #%s>' % self.comment_id
3327 else:
3330 else:
3328 return '<DB:Comment at %#x>' % id(self)
3331 return '<DB:Comment at %#x>' % id(self)
3329
3332
3330 def get_api_data(self):
3333 def get_api_data(self):
3331 comment = self
3334 comment = self
3332 data = {
3335 data = {
3333 'comment_id': comment.comment_id,
3336 'comment_id': comment.comment_id,
3334 'comment_type': comment.comment_type,
3337 'comment_type': comment.comment_type,
3335 'comment_text': comment.text,
3338 'comment_text': comment.text,
3336 'comment_status': comment.status_change,
3339 'comment_status': comment.status_change,
3337 'comment_f_path': comment.f_path,
3340 'comment_f_path': comment.f_path,
3338 'comment_lineno': comment.line_no,
3341 'comment_lineno': comment.line_no,
3339 'comment_author': comment.author,
3342 'comment_author': comment.author,
3340 'comment_created_on': comment.created_on
3343 'comment_created_on': comment.created_on
3341 }
3344 }
3342 return data
3345 return data
3343
3346
3344 def __json__(self):
3347 def __json__(self):
3345 data = dict()
3348 data = dict()
3346 data.update(self.get_api_data())
3349 data.update(self.get_api_data())
3347 return data
3350 return data
3348
3351
3349
3352
3350 class ChangesetStatus(Base, BaseModel):
3353 class ChangesetStatus(Base, BaseModel):
3351 __tablename__ = 'changeset_statuses'
3354 __tablename__ = 'changeset_statuses'
3352 __table_args__ = (
3355 __table_args__ = (
3353 Index('cs_revision_idx', 'revision'),
3356 Index('cs_revision_idx', 'revision'),
3354 Index('cs_version_idx', 'version'),
3357 Index('cs_version_idx', 'version'),
3355 UniqueConstraint('repo_id', 'revision', 'version'),
3358 UniqueConstraint('repo_id', 'revision', 'version'),
3356 base_table_args
3359 base_table_args
3357 )
3360 )
3358
3361
3359 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3362 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3360 STATUS_APPROVED = 'approved'
3363 STATUS_APPROVED = 'approved'
3361 STATUS_REJECTED = 'rejected'
3364 STATUS_REJECTED = 'rejected'
3362 STATUS_UNDER_REVIEW = 'under_review'
3365 STATUS_UNDER_REVIEW = 'under_review'
3363
3366
3364 STATUSES = [
3367 STATUSES = [
3365 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3368 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3366 (STATUS_APPROVED, _("Approved")),
3369 (STATUS_APPROVED, _("Approved")),
3367 (STATUS_REJECTED, _("Rejected")),
3370 (STATUS_REJECTED, _("Rejected")),
3368 (STATUS_UNDER_REVIEW, _("Under Review")),
3371 (STATUS_UNDER_REVIEW, _("Under Review")),
3369 ]
3372 ]
3370
3373
3371 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3374 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3372 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3375 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3373 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3376 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3374 revision = Column('revision', String(40), nullable=False)
3377 revision = Column('revision', String(40), nullable=False)
3375 status = Column('status', String(128), nullable=False, default=DEFAULT)
3378 status = Column('status', String(128), nullable=False, default=DEFAULT)
3376 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3379 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3377 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3380 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3378 version = Column('version', Integer(), nullable=False, default=0)
3381 version = Column('version', Integer(), nullable=False, default=0)
3379 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3382 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3380
3383
3381 author = relationship('User', lazy='joined')
3384 author = relationship('User', lazy='joined')
3382 repo = relationship('Repository')
3385 repo = relationship('Repository')
3383 comment = relationship('ChangesetComment', lazy='joined')
3386 comment = relationship('ChangesetComment', lazy='joined')
3384 pull_request = relationship('PullRequest', lazy='joined')
3387 pull_request = relationship('PullRequest', lazy='joined')
3385
3388
3386 def __unicode__(self):
3389 def __unicode__(self):
3387 return u"<%s('%s[v%s]:%s')>" % (
3390 return u"<%s('%s[v%s]:%s')>" % (
3388 self.__class__.__name__,
3391 self.__class__.__name__,
3389 self.status, self.version, self.author
3392 self.status, self.version, self.author
3390 )
3393 )
3391
3394
3392 @classmethod
3395 @classmethod
3393 def get_status_lbl(cls, value):
3396 def get_status_lbl(cls, value):
3394 return dict(cls.STATUSES).get(value)
3397 return dict(cls.STATUSES).get(value)
3395
3398
3396 @property
3399 @property
3397 def status_lbl(self):
3400 def status_lbl(self):
3398 return ChangesetStatus.get_status_lbl(self.status)
3401 return ChangesetStatus.get_status_lbl(self.status)
3399
3402
3400 def get_api_data(self):
3403 def get_api_data(self):
3401 status = self
3404 status = self
3402 data = {
3405 data = {
3403 'status_id': status.changeset_status_id,
3406 'status_id': status.changeset_status_id,
3404 'status': status.status,
3407 'status': status.status,
3405 }
3408 }
3406 return data
3409 return data
3407
3410
3408 def __json__(self):
3411 def __json__(self):
3409 data = dict()
3412 data = dict()
3410 data.update(self.get_api_data())
3413 data.update(self.get_api_data())
3411 return data
3414 return data
3412
3415
3413
3416
3414 class _PullRequestBase(BaseModel):
3417 class _PullRequestBase(BaseModel):
3415 """
3418 """
3416 Common attributes of pull request and version entries.
3419 Common attributes of pull request and version entries.
3417 """
3420 """
3418
3421
3419 # .status values
3422 # .status values
3420 STATUS_NEW = u'new'
3423 STATUS_NEW = u'new'
3421 STATUS_OPEN = u'open'
3424 STATUS_OPEN = u'open'
3422 STATUS_CLOSED = u'closed'
3425 STATUS_CLOSED = u'closed'
3423
3426
3424 title = Column('title', Unicode(255), nullable=True)
3427 title = Column('title', Unicode(255), nullable=True)
3425 description = Column(
3428 description = Column(
3426 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3429 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3427 nullable=True)
3430 nullable=True)
3428 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3431 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3429
3432
3430 # new/open/closed status of pull request (not approve/reject/etc)
3433 # new/open/closed status of pull request (not approve/reject/etc)
3431 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3434 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3432 created_on = Column(
3435 created_on = Column(
3433 'created_on', DateTime(timezone=False), nullable=False,
3436 'created_on', DateTime(timezone=False), nullable=False,
3434 default=datetime.datetime.now)
3437 default=datetime.datetime.now)
3435 updated_on = Column(
3438 updated_on = Column(
3436 'updated_on', DateTime(timezone=False), nullable=False,
3439 'updated_on', DateTime(timezone=False), nullable=False,
3437 default=datetime.datetime.now)
3440 default=datetime.datetime.now)
3438
3441
3439 @declared_attr
3442 @declared_attr
3440 def user_id(cls):
3443 def user_id(cls):
3441 return Column(
3444 return Column(
3442 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3445 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3443 unique=None)
3446 unique=None)
3444
3447
3445 # 500 revisions max
3448 # 500 revisions max
3446 _revisions = Column(
3449 _revisions = Column(
3447 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3450 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3448
3451
3449 @declared_attr
3452 @declared_attr
3450 def source_repo_id(cls):
3453 def source_repo_id(cls):
3451 # TODO: dan: rename column to source_repo_id
3454 # TODO: dan: rename column to source_repo_id
3452 return Column(
3455 return Column(
3453 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3456 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3454 nullable=False)
3457 nullable=False)
3455
3458
3456 source_ref = Column('org_ref', Unicode(255), nullable=False)
3459 source_ref = Column('org_ref', Unicode(255), nullable=False)
3457
3460
3458 @declared_attr
3461 @declared_attr
3459 def target_repo_id(cls):
3462 def target_repo_id(cls):
3460 # TODO: dan: rename column to target_repo_id
3463 # TODO: dan: rename column to target_repo_id
3461 return Column(
3464 return Column(
3462 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3465 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3463 nullable=False)
3466 nullable=False)
3464
3467
3465 target_ref = Column('other_ref', Unicode(255), nullable=False)
3468 target_ref = Column('other_ref', Unicode(255), nullable=False)
3466 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3469 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3467
3470
3468 # TODO: dan: rename column to last_merge_source_rev
3471 # TODO: dan: rename column to last_merge_source_rev
3469 _last_merge_source_rev = Column(
3472 _last_merge_source_rev = Column(
3470 'last_merge_org_rev', String(40), nullable=True)
3473 'last_merge_org_rev', String(40), nullable=True)
3471 # TODO: dan: rename column to last_merge_target_rev
3474 # TODO: dan: rename column to last_merge_target_rev
3472 _last_merge_target_rev = Column(
3475 _last_merge_target_rev = Column(
3473 'last_merge_other_rev', String(40), nullable=True)
3476 'last_merge_other_rev', String(40), nullable=True)
3474 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3477 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3475 merge_rev = Column('merge_rev', String(40), nullable=True)
3478 merge_rev = Column('merge_rev', String(40), nullable=True)
3476
3479
3477 reviewer_data = Column(
3480 reviewer_data = Column(
3478 'reviewer_data_json', MutationObj.as_mutable(
3481 'reviewer_data_json', MutationObj.as_mutable(
3479 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3482 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3480
3483
3481 @property
3484 @property
3482 def reviewer_data_json(self):
3485 def reviewer_data_json(self):
3483 return json.dumps(self.reviewer_data)
3486 return json.dumps(self.reviewer_data)
3484
3487
3485 @hybrid_property
3488 @hybrid_property
3486 def description_safe(self):
3489 def description_safe(self):
3487 from rhodecode.lib import helpers as h
3490 from rhodecode.lib import helpers as h
3488 return h.escape(self.description)
3491 return h.escape(self.description)
3489
3492
3490 @hybrid_property
3493 @hybrid_property
3491 def revisions(self):
3494 def revisions(self):
3492 return self._revisions.split(':') if self._revisions else []
3495 return self._revisions.split(':') if self._revisions else []
3493
3496
3494 @revisions.setter
3497 @revisions.setter
3495 def revisions(self, val):
3498 def revisions(self, val):
3496 self._revisions = ':'.join(val)
3499 self._revisions = ':'.join(val)
3497
3500
3498 @hybrid_property
3501 @hybrid_property
3499 def last_merge_status(self):
3502 def last_merge_status(self):
3500 return safe_int(self._last_merge_status)
3503 return safe_int(self._last_merge_status)
3501
3504
3502 @last_merge_status.setter
3505 @last_merge_status.setter
3503 def last_merge_status(self, val):
3506 def last_merge_status(self, val):
3504 self._last_merge_status = val
3507 self._last_merge_status = val
3505
3508
3506 @declared_attr
3509 @declared_attr
3507 def author(cls):
3510 def author(cls):
3508 return relationship('User', lazy='joined')
3511 return relationship('User', lazy='joined')
3509
3512
3510 @declared_attr
3513 @declared_attr
3511 def source_repo(cls):
3514 def source_repo(cls):
3512 return relationship(
3515 return relationship(
3513 'Repository',
3516 'Repository',
3514 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3517 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3515
3518
3516 @property
3519 @property
3517 def source_ref_parts(self):
3520 def source_ref_parts(self):
3518 return self.unicode_to_reference(self.source_ref)
3521 return self.unicode_to_reference(self.source_ref)
3519
3522
3520 @declared_attr
3523 @declared_attr
3521 def target_repo(cls):
3524 def target_repo(cls):
3522 return relationship(
3525 return relationship(
3523 'Repository',
3526 'Repository',
3524 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3527 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3525
3528
3526 @property
3529 @property
3527 def target_ref_parts(self):
3530 def target_ref_parts(self):
3528 return self.unicode_to_reference(self.target_ref)
3531 return self.unicode_to_reference(self.target_ref)
3529
3532
3530 @property
3533 @property
3531 def shadow_merge_ref(self):
3534 def shadow_merge_ref(self):
3532 return self.unicode_to_reference(self._shadow_merge_ref)
3535 return self.unicode_to_reference(self._shadow_merge_ref)
3533
3536
3534 @shadow_merge_ref.setter
3537 @shadow_merge_ref.setter
3535 def shadow_merge_ref(self, ref):
3538 def shadow_merge_ref(self, ref):
3536 self._shadow_merge_ref = self.reference_to_unicode(ref)
3539 self._shadow_merge_ref = self.reference_to_unicode(ref)
3537
3540
3538 def unicode_to_reference(self, raw):
3541 def unicode_to_reference(self, raw):
3539 """
3542 """
3540 Convert a unicode (or string) to a reference object.
3543 Convert a unicode (or string) to a reference object.
3541 If unicode evaluates to False it returns None.
3544 If unicode evaluates to False it returns None.
3542 """
3545 """
3543 if raw:
3546 if raw:
3544 refs = raw.split(':')
3547 refs = raw.split(':')
3545 return Reference(*refs)
3548 return Reference(*refs)
3546 else:
3549 else:
3547 return None
3550 return None
3548
3551
3549 def reference_to_unicode(self, ref):
3552 def reference_to_unicode(self, ref):
3550 """
3553 """
3551 Convert a reference object to unicode.
3554 Convert a reference object to unicode.
3552 If reference is None it returns None.
3555 If reference is None it returns None.
3553 """
3556 """
3554 if ref:
3557 if ref:
3555 return u':'.join(ref)
3558 return u':'.join(ref)
3556 else:
3559 else:
3557 return None
3560 return None
3558
3561
3559 def get_api_data(self, with_merge_state=True):
3562 def get_api_data(self, with_merge_state=True):
3560 from rhodecode.model.pull_request import PullRequestModel
3563 from rhodecode.model.pull_request import PullRequestModel
3561
3564
3562 pull_request = self
3565 pull_request = self
3563 if with_merge_state:
3566 if with_merge_state:
3564 merge_status = PullRequestModel().merge_status(pull_request)
3567 merge_status = PullRequestModel().merge_status(pull_request)
3565 merge_state = {
3568 merge_state = {
3566 'status': merge_status[0],
3569 'status': merge_status[0],
3567 'message': safe_unicode(merge_status[1]),
3570 'message': safe_unicode(merge_status[1]),
3568 }
3571 }
3569 else:
3572 else:
3570 merge_state = {'status': 'not_available',
3573 merge_state = {'status': 'not_available',
3571 'message': 'not_available'}
3574 'message': 'not_available'}
3572
3575
3573 merge_data = {
3576 merge_data = {
3574 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3577 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3575 'reference': (
3578 'reference': (
3576 pull_request.shadow_merge_ref._asdict()
3579 pull_request.shadow_merge_ref._asdict()
3577 if pull_request.shadow_merge_ref else None),
3580 if pull_request.shadow_merge_ref else None),
3578 }
3581 }
3579
3582
3580 data = {
3583 data = {
3581 'pull_request_id': pull_request.pull_request_id,
3584 'pull_request_id': pull_request.pull_request_id,
3582 'url': PullRequestModel().get_url(pull_request),
3585 'url': PullRequestModel().get_url(pull_request),
3583 'title': pull_request.title,
3586 'title': pull_request.title,
3584 'description': pull_request.description,
3587 'description': pull_request.description,
3585 'status': pull_request.status,
3588 'status': pull_request.status,
3586 'created_on': pull_request.created_on,
3589 'created_on': pull_request.created_on,
3587 'updated_on': pull_request.updated_on,
3590 'updated_on': pull_request.updated_on,
3588 'commit_ids': pull_request.revisions,
3591 'commit_ids': pull_request.revisions,
3589 'review_status': pull_request.calculated_review_status(),
3592 'review_status': pull_request.calculated_review_status(),
3590 'mergeable': merge_state,
3593 'mergeable': merge_state,
3591 'source': {
3594 'source': {
3592 'clone_url': pull_request.source_repo.clone_url(),
3595 'clone_url': pull_request.source_repo.clone_url(),
3593 'repository': pull_request.source_repo.repo_name,
3596 'repository': pull_request.source_repo.repo_name,
3594 'reference': {
3597 'reference': {
3595 'name': pull_request.source_ref_parts.name,
3598 'name': pull_request.source_ref_parts.name,
3596 'type': pull_request.source_ref_parts.type,
3599 'type': pull_request.source_ref_parts.type,
3597 'commit_id': pull_request.source_ref_parts.commit_id,
3600 'commit_id': pull_request.source_ref_parts.commit_id,
3598 },
3601 },
3599 },
3602 },
3600 'target': {
3603 'target': {
3601 'clone_url': pull_request.target_repo.clone_url(),
3604 'clone_url': pull_request.target_repo.clone_url(),
3602 'repository': pull_request.target_repo.repo_name,
3605 'repository': pull_request.target_repo.repo_name,
3603 'reference': {
3606 'reference': {
3604 'name': pull_request.target_ref_parts.name,
3607 'name': pull_request.target_ref_parts.name,
3605 'type': pull_request.target_ref_parts.type,
3608 'type': pull_request.target_ref_parts.type,
3606 'commit_id': pull_request.target_ref_parts.commit_id,
3609 'commit_id': pull_request.target_ref_parts.commit_id,
3607 },
3610 },
3608 },
3611 },
3609 'merge': merge_data,
3612 'merge': merge_data,
3610 'author': pull_request.author.get_api_data(include_secrets=False,
3613 'author': pull_request.author.get_api_data(include_secrets=False,
3611 details='basic'),
3614 details='basic'),
3612 'reviewers': [
3615 'reviewers': [
3613 {
3616 {
3614 'user': reviewer.get_api_data(include_secrets=False,
3617 'user': reviewer.get_api_data(include_secrets=False,
3615 details='basic'),
3618 details='basic'),
3616 'reasons': reasons,
3619 'reasons': reasons,
3617 'review_status': st[0][1].status if st else 'not_reviewed',
3620 'review_status': st[0][1].status if st else 'not_reviewed',
3618 }
3621 }
3619 for obj, reviewer, reasons, mandatory, st in
3622 for obj, reviewer, reasons, mandatory, st in
3620 pull_request.reviewers_statuses()
3623 pull_request.reviewers_statuses()
3621 ]
3624 ]
3622 }
3625 }
3623
3626
3624 return data
3627 return data
3625
3628
3626
3629
3627 class PullRequest(Base, _PullRequestBase):
3630 class PullRequest(Base, _PullRequestBase):
3628 __tablename__ = 'pull_requests'
3631 __tablename__ = 'pull_requests'
3629 __table_args__ = (
3632 __table_args__ = (
3630 base_table_args,
3633 base_table_args,
3631 )
3634 )
3632
3635
3633 pull_request_id = Column(
3636 pull_request_id = Column(
3634 'pull_request_id', Integer(), nullable=False, primary_key=True)
3637 'pull_request_id', Integer(), nullable=False, primary_key=True)
3635
3638
3636 def __repr__(self):
3639 def __repr__(self):
3637 if self.pull_request_id:
3640 if self.pull_request_id:
3638 return '<DB:PullRequest #%s>' % self.pull_request_id
3641 return '<DB:PullRequest #%s>' % self.pull_request_id
3639 else:
3642 else:
3640 return '<DB:PullRequest at %#x>' % id(self)
3643 return '<DB:PullRequest at %#x>' % id(self)
3641
3644
3642 reviewers = relationship('PullRequestReviewers',
3645 reviewers = relationship('PullRequestReviewers',
3643 cascade="all, delete, delete-orphan")
3646 cascade="all, delete, delete-orphan")
3644 statuses = relationship('ChangesetStatus',
3647 statuses = relationship('ChangesetStatus',
3645 cascade="all, delete, delete-orphan")
3648 cascade="all, delete, delete-orphan")
3646 comments = relationship('ChangesetComment',
3649 comments = relationship('ChangesetComment',
3647 cascade="all, delete, delete-orphan")
3650 cascade="all, delete, delete-orphan")
3648 versions = relationship('PullRequestVersion',
3651 versions = relationship('PullRequestVersion',
3649 cascade="all, delete, delete-orphan",
3652 cascade="all, delete, delete-orphan",
3650 lazy='dynamic')
3653 lazy='dynamic')
3651
3654
3652 @classmethod
3655 @classmethod
3653 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3656 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3654 internal_methods=None):
3657 internal_methods=None):
3655
3658
3656 class PullRequestDisplay(object):
3659 class PullRequestDisplay(object):
3657 """
3660 """
3658 Special object wrapper for showing PullRequest data via Versions
3661 Special object wrapper for showing PullRequest data via Versions
3659 It mimics PR object as close as possible. This is read only object
3662 It mimics PR object as close as possible. This is read only object
3660 just for display
3663 just for display
3661 """
3664 """
3662
3665
3663 def __init__(self, attrs, internal=None):
3666 def __init__(self, attrs, internal=None):
3664 self.attrs = attrs
3667 self.attrs = attrs
3665 # internal have priority over the given ones via attrs
3668 # internal have priority over the given ones via attrs
3666 self.internal = internal or ['versions']
3669 self.internal = internal or ['versions']
3667
3670
3668 def __getattr__(self, item):
3671 def __getattr__(self, item):
3669 if item in self.internal:
3672 if item in self.internal:
3670 return getattr(self, item)
3673 return getattr(self, item)
3671 try:
3674 try:
3672 return self.attrs[item]
3675 return self.attrs[item]
3673 except KeyError:
3676 except KeyError:
3674 raise AttributeError(
3677 raise AttributeError(
3675 '%s object has no attribute %s' % (self, item))
3678 '%s object has no attribute %s' % (self, item))
3676
3679
3677 def __repr__(self):
3680 def __repr__(self):
3678 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3681 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3679
3682
3680 def versions(self):
3683 def versions(self):
3681 return pull_request_obj.versions.order_by(
3684 return pull_request_obj.versions.order_by(
3682 PullRequestVersion.pull_request_version_id).all()
3685 PullRequestVersion.pull_request_version_id).all()
3683
3686
3684 def is_closed(self):
3687 def is_closed(self):
3685 return pull_request_obj.is_closed()
3688 return pull_request_obj.is_closed()
3686
3689
3687 @property
3690 @property
3688 def pull_request_version_id(self):
3691 def pull_request_version_id(self):
3689 return getattr(pull_request_obj, 'pull_request_version_id', None)
3692 return getattr(pull_request_obj, 'pull_request_version_id', None)
3690
3693
3691 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3694 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3692
3695
3693 attrs.author = StrictAttributeDict(
3696 attrs.author = StrictAttributeDict(
3694 pull_request_obj.author.get_api_data())
3697 pull_request_obj.author.get_api_data())
3695 if pull_request_obj.target_repo:
3698 if pull_request_obj.target_repo:
3696 attrs.target_repo = StrictAttributeDict(
3699 attrs.target_repo = StrictAttributeDict(
3697 pull_request_obj.target_repo.get_api_data())
3700 pull_request_obj.target_repo.get_api_data())
3698 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3701 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3699
3702
3700 if pull_request_obj.source_repo:
3703 if pull_request_obj.source_repo:
3701 attrs.source_repo = StrictAttributeDict(
3704 attrs.source_repo = StrictAttributeDict(
3702 pull_request_obj.source_repo.get_api_data())
3705 pull_request_obj.source_repo.get_api_data())
3703 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3706 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3704
3707
3705 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3708 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3706 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3709 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3707 attrs.revisions = pull_request_obj.revisions
3710 attrs.revisions = pull_request_obj.revisions
3708
3711
3709 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3712 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3710 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3713 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3711 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3714 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3712
3715
3713 return PullRequestDisplay(attrs, internal=internal_methods)
3716 return PullRequestDisplay(attrs, internal=internal_methods)
3714
3717
3715 def is_closed(self):
3718 def is_closed(self):
3716 return self.status == self.STATUS_CLOSED
3719 return self.status == self.STATUS_CLOSED
3717
3720
3718 def __json__(self):
3721 def __json__(self):
3719 return {
3722 return {
3720 'revisions': self.revisions,
3723 'revisions': self.revisions,
3721 }
3724 }
3722
3725
3723 def calculated_review_status(self):
3726 def calculated_review_status(self):
3724 from rhodecode.model.changeset_status import ChangesetStatusModel
3727 from rhodecode.model.changeset_status import ChangesetStatusModel
3725 return ChangesetStatusModel().calculated_review_status(self)
3728 return ChangesetStatusModel().calculated_review_status(self)
3726
3729
3727 def reviewers_statuses(self):
3730 def reviewers_statuses(self):
3728 from rhodecode.model.changeset_status import ChangesetStatusModel
3731 from rhodecode.model.changeset_status import ChangesetStatusModel
3729 return ChangesetStatusModel().reviewers_statuses(self)
3732 return ChangesetStatusModel().reviewers_statuses(self)
3730
3733
3731 @property
3734 @property
3732 def workspace_id(self):
3735 def workspace_id(self):
3733 from rhodecode.model.pull_request import PullRequestModel
3736 from rhodecode.model.pull_request import PullRequestModel
3734 return PullRequestModel()._workspace_id(self)
3737 return PullRequestModel()._workspace_id(self)
3735
3738
3736 def get_shadow_repo(self):
3739 def get_shadow_repo(self):
3737 workspace_id = self.workspace_id
3740 workspace_id = self.workspace_id
3738 vcs_obj = self.target_repo.scm_instance()
3741 vcs_obj = self.target_repo.scm_instance()
3739 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3742 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3740 self.target_repo.repo_id, workspace_id)
3743 self.target_repo.repo_id, workspace_id)
3741 if os.path.isdir(shadow_repository_path):
3744 if os.path.isdir(shadow_repository_path):
3742 return vcs_obj._get_shadow_instance(shadow_repository_path)
3745 return vcs_obj._get_shadow_instance(shadow_repository_path)
3743
3746
3744
3747
3745 class PullRequestVersion(Base, _PullRequestBase):
3748 class PullRequestVersion(Base, _PullRequestBase):
3746 __tablename__ = 'pull_request_versions'
3749 __tablename__ = 'pull_request_versions'
3747 __table_args__ = (
3750 __table_args__ = (
3748 base_table_args,
3751 base_table_args,
3749 )
3752 )
3750
3753
3751 pull_request_version_id = Column(
3754 pull_request_version_id = Column(
3752 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3755 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3753 pull_request_id = Column(
3756 pull_request_id = Column(
3754 'pull_request_id', Integer(),
3757 'pull_request_id', Integer(),
3755 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3758 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3756 pull_request = relationship('PullRequest')
3759 pull_request = relationship('PullRequest')
3757
3760
3758 def __repr__(self):
3761 def __repr__(self):
3759 if self.pull_request_version_id:
3762 if self.pull_request_version_id:
3760 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3763 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3761 else:
3764 else:
3762 return '<DB:PullRequestVersion at %#x>' % id(self)
3765 return '<DB:PullRequestVersion at %#x>' % id(self)
3763
3766
3764 @property
3767 @property
3765 def reviewers(self):
3768 def reviewers(self):
3766 return self.pull_request.reviewers
3769 return self.pull_request.reviewers
3767
3770
3768 @property
3771 @property
3769 def versions(self):
3772 def versions(self):
3770 return self.pull_request.versions
3773 return self.pull_request.versions
3771
3774
3772 def is_closed(self):
3775 def is_closed(self):
3773 # calculate from original
3776 # calculate from original
3774 return self.pull_request.status == self.STATUS_CLOSED
3777 return self.pull_request.status == self.STATUS_CLOSED
3775
3778
3776 def calculated_review_status(self):
3779 def calculated_review_status(self):
3777 return self.pull_request.calculated_review_status()
3780 return self.pull_request.calculated_review_status()
3778
3781
3779 def reviewers_statuses(self):
3782 def reviewers_statuses(self):
3780 return self.pull_request.reviewers_statuses()
3783 return self.pull_request.reviewers_statuses()
3781
3784
3782
3785
3783 class PullRequestReviewers(Base, BaseModel):
3786 class PullRequestReviewers(Base, BaseModel):
3784 __tablename__ = 'pull_request_reviewers'
3787 __tablename__ = 'pull_request_reviewers'
3785 __table_args__ = (
3788 __table_args__ = (
3786 base_table_args,
3789 base_table_args,
3787 )
3790 )
3788
3791
3789 @hybrid_property
3792 @hybrid_property
3790 def reasons(self):
3793 def reasons(self):
3791 if not self._reasons:
3794 if not self._reasons:
3792 return []
3795 return []
3793 return self._reasons
3796 return self._reasons
3794
3797
3795 @reasons.setter
3798 @reasons.setter
3796 def reasons(self, val):
3799 def reasons(self, val):
3797 val = val or []
3800 val = val or []
3798 if any(not isinstance(x, basestring) for x in val):
3801 if any(not isinstance(x, basestring) for x in val):
3799 raise Exception('invalid reasons type, must be list of strings')
3802 raise Exception('invalid reasons type, must be list of strings')
3800 self._reasons = val
3803 self._reasons = val
3801
3804
3802 pull_requests_reviewers_id = Column(
3805 pull_requests_reviewers_id = Column(
3803 'pull_requests_reviewers_id', Integer(), nullable=False,
3806 'pull_requests_reviewers_id', Integer(), nullable=False,
3804 primary_key=True)
3807 primary_key=True)
3805 pull_request_id = Column(
3808 pull_request_id = Column(
3806 "pull_request_id", Integer(),
3809 "pull_request_id", Integer(),
3807 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3810 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3808 user_id = Column(
3811 user_id = Column(
3809 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3812 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3810 _reasons = Column(
3813 _reasons = Column(
3811 'reason', MutationList.as_mutable(
3814 'reason', MutationList.as_mutable(
3812 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3815 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3813
3816
3814 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3817 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3815 user = relationship('User')
3818 user = relationship('User')
3816 pull_request = relationship('PullRequest')
3819 pull_request = relationship('PullRequest')
3817
3820
3818 rule_data = Column(
3821 rule_data = Column(
3819 'rule_data_json',
3822 'rule_data_json',
3820 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3823 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3821
3824
3822 def rule_user_group_data(self):
3825 def rule_user_group_data(self):
3823 """
3826 """
3824 Returns the voting user group rule data for this reviewer
3827 Returns the voting user group rule data for this reviewer
3825 """
3828 """
3826
3829
3827 if self.rule_data and 'vote_rule' in self.rule_data:
3830 if self.rule_data and 'vote_rule' in self.rule_data:
3828 user_group_data = {}
3831 user_group_data = {}
3829 if 'rule_user_group_entry_id' in self.rule_data:
3832 if 'rule_user_group_entry_id' in self.rule_data:
3830 # means a group with voting rules !
3833 # means a group with voting rules !
3831 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3834 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3832 user_group_data['name'] = self.rule_data['rule_name']
3835 user_group_data['name'] = self.rule_data['rule_name']
3833 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3836 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3834
3837
3835 return user_group_data
3838 return user_group_data
3836
3839
3837 def __unicode__(self):
3840 def __unicode__(self):
3838 return u"<%s('id:%s')>" % (self.__class__.__name__,
3841 return u"<%s('id:%s')>" % (self.__class__.__name__,
3839 self.pull_requests_reviewers_id)
3842 self.pull_requests_reviewers_id)
3840
3843
3841
3844
3842 class Notification(Base, BaseModel):
3845 class Notification(Base, BaseModel):
3843 __tablename__ = 'notifications'
3846 __tablename__ = 'notifications'
3844 __table_args__ = (
3847 __table_args__ = (
3845 Index('notification_type_idx', 'type'),
3848 Index('notification_type_idx', 'type'),
3846 base_table_args,
3849 base_table_args,
3847 )
3850 )
3848
3851
3849 TYPE_CHANGESET_COMMENT = u'cs_comment'
3852 TYPE_CHANGESET_COMMENT = u'cs_comment'
3850 TYPE_MESSAGE = u'message'
3853 TYPE_MESSAGE = u'message'
3851 TYPE_MENTION = u'mention'
3854 TYPE_MENTION = u'mention'
3852 TYPE_REGISTRATION = u'registration'
3855 TYPE_REGISTRATION = u'registration'
3853 TYPE_PULL_REQUEST = u'pull_request'
3856 TYPE_PULL_REQUEST = u'pull_request'
3854 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3857 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3855
3858
3856 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3859 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3857 subject = Column('subject', Unicode(512), nullable=True)
3860 subject = Column('subject', Unicode(512), nullable=True)
3858 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3861 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3859 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3862 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3860 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3863 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3861 type_ = Column('type', Unicode(255))
3864 type_ = Column('type', Unicode(255))
3862
3865
3863 created_by_user = relationship('User')
3866 created_by_user = relationship('User')
3864 notifications_to_users = relationship('UserNotification', lazy='joined',
3867 notifications_to_users = relationship('UserNotification', lazy='joined',
3865 cascade="all, delete, delete-orphan")
3868 cascade="all, delete, delete-orphan")
3866
3869
3867 @property
3870 @property
3868 def recipients(self):
3871 def recipients(self):
3869 return [x.user for x in UserNotification.query()\
3872 return [x.user for x in UserNotification.query()\
3870 .filter(UserNotification.notification == self)\
3873 .filter(UserNotification.notification == self)\
3871 .order_by(UserNotification.user_id.asc()).all()]
3874 .order_by(UserNotification.user_id.asc()).all()]
3872
3875
3873 @classmethod
3876 @classmethod
3874 def create(cls, created_by, subject, body, recipients, type_=None):
3877 def create(cls, created_by, subject, body, recipients, type_=None):
3875 if type_ is None:
3878 if type_ is None:
3876 type_ = Notification.TYPE_MESSAGE
3879 type_ = Notification.TYPE_MESSAGE
3877
3880
3878 notification = cls()
3881 notification = cls()
3879 notification.created_by_user = created_by
3882 notification.created_by_user = created_by
3880 notification.subject = subject
3883 notification.subject = subject
3881 notification.body = body
3884 notification.body = body
3882 notification.type_ = type_
3885 notification.type_ = type_
3883 notification.created_on = datetime.datetime.now()
3886 notification.created_on = datetime.datetime.now()
3884
3887
3885 # For each recipient link the created notification to his account
3888 # For each recipient link the created notification to his account
3886 for u in recipients:
3889 for u in recipients:
3887 assoc = UserNotification()
3890 assoc = UserNotification()
3888 assoc.user_id = u.user_id
3891 assoc.user_id = u.user_id
3889 assoc.notification = notification
3892 assoc.notification = notification
3890
3893
3891 # if created_by is inside recipients mark his notification
3894 # if created_by is inside recipients mark his notification
3892 # as read
3895 # as read
3893 if u.user_id == created_by.user_id:
3896 if u.user_id == created_by.user_id:
3894 assoc.read = True
3897 assoc.read = True
3895 Session().add(assoc)
3898 Session().add(assoc)
3896
3899
3897 Session().add(notification)
3900 Session().add(notification)
3898
3901
3899 return notification
3902 return notification
3900
3903
3901
3904
3902 class UserNotification(Base, BaseModel):
3905 class UserNotification(Base, BaseModel):
3903 __tablename__ = 'user_to_notification'
3906 __tablename__ = 'user_to_notification'
3904 __table_args__ = (
3907 __table_args__ = (
3905 UniqueConstraint('user_id', 'notification_id'),
3908 UniqueConstraint('user_id', 'notification_id'),
3906 base_table_args
3909 base_table_args
3907 )
3910 )
3908
3911
3909 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3912 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3910 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3913 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3911 read = Column('read', Boolean, default=False)
3914 read = Column('read', Boolean, default=False)
3912 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3915 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3913
3916
3914 user = relationship('User', lazy="joined")
3917 user = relationship('User', lazy="joined")
3915 notification = relationship('Notification', lazy="joined",
3918 notification = relationship('Notification', lazy="joined",
3916 order_by=lambda: Notification.created_on.desc(),)
3919 order_by=lambda: Notification.created_on.desc(),)
3917
3920
3918 def mark_as_read(self):
3921 def mark_as_read(self):
3919 self.read = True
3922 self.read = True
3920 Session().add(self)
3923 Session().add(self)
3921
3924
3922
3925
3923 class Gist(Base, BaseModel):
3926 class Gist(Base, BaseModel):
3924 __tablename__ = 'gists'
3927 __tablename__ = 'gists'
3925 __table_args__ = (
3928 __table_args__ = (
3926 Index('g_gist_access_id_idx', 'gist_access_id'),
3929 Index('g_gist_access_id_idx', 'gist_access_id'),
3927 Index('g_created_on_idx', 'created_on'),
3930 Index('g_created_on_idx', 'created_on'),
3928 base_table_args
3931 base_table_args
3929 )
3932 )
3930
3933
3931 GIST_PUBLIC = u'public'
3934 GIST_PUBLIC = u'public'
3932 GIST_PRIVATE = u'private'
3935 GIST_PRIVATE = u'private'
3933 DEFAULT_FILENAME = u'gistfile1.txt'
3936 DEFAULT_FILENAME = u'gistfile1.txt'
3934
3937
3935 ACL_LEVEL_PUBLIC = u'acl_public'
3938 ACL_LEVEL_PUBLIC = u'acl_public'
3936 ACL_LEVEL_PRIVATE = u'acl_private'
3939 ACL_LEVEL_PRIVATE = u'acl_private'
3937
3940
3938 gist_id = Column('gist_id', Integer(), primary_key=True)
3941 gist_id = Column('gist_id', Integer(), primary_key=True)
3939 gist_access_id = Column('gist_access_id', Unicode(250))
3942 gist_access_id = Column('gist_access_id', Unicode(250))
3940 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3943 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3941 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3944 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3942 gist_expires = Column('gist_expires', Float(53), nullable=False)
3945 gist_expires = Column('gist_expires', Float(53), nullable=False)
3943 gist_type = Column('gist_type', Unicode(128), nullable=False)
3946 gist_type = Column('gist_type', Unicode(128), nullable=False)
3944 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3947 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3945 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3948 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3946 acl_level = Column('acl_level', Unicode(128), nullable=True)
3949 acl_level = Column('acl_level', Unicode(128), nullable=True)
3947
3950
3948 owner = relationship('User')
3951 owner = relationship('User')
3949
3952
3950 def __repr__(self):
3953 def __repr__(self):
3951 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3954 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3952
3955
3953 @hybrid_property
3956 @hybrid_property
3954 def description_safe(self):
3957 def description_safe(self):
3955 from rhodecode.lib import helpers as h
3958 from rhodecode.lib import helpers as h
3956 return h.escape(self.gist_description)
3959 return h.escape(self.gist_description)
3957
3960
3958 @classmethod
3961 @classmethod
3959 def get_or_404(cls, id_):
3962 def get_or_404(cls, id_):
3960 from pyramid.httpexceptions import HTTPNotFound
3963 from pyramid.httpexceptions import HTTPNotFound
3961
3964
3962 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3965 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3963 if not res:
3966 if not res:
3964 raise HTTPNotFound()
3967 raise HTTPNotFound()
3965 return res
3968 return res
3966
3969
3967 @classmethod
3970 @classmethod
3968 def get_by_access_id(cls, gist_access_id):
3971 def get_by_access_id(cls, gist_access_id):
3969 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3972 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3970
3973
3971 def gist_url(self):
3974 def gist_url(self):
3972 from rhodecode.model.gist import GistModel
3975 from rhodecode.model.gist import GistModel
3973 return GistModel().get_url(self)
3976 return GistModel().get_url(self)
3974
3977
3975 @classmethod
3978 @classmethod
3976 def base_path(cls):
3979 def base_path(cls):
3977 """
3980 """
3978 Returns base path when all gists are stored
3981 Returns base path when all gists are stored
3979
3982
3980 :param cls:
3983 :param cls:
3981 """
3984 """
3982 from rhodecode.model.gist import GIST_STORE_LOC
3985 from rhodecode.model.gist import GIST_STORE_LOC
3983 q = Session().query(RhodeCodeUi)\
3986 q = Session().query(RhodeCodeUi)\
3984 .filter(RhodeCodeUi.ui_key == URL_SEP)
3987 .filter(RhodeCodeUi.ui_key == URL_SEP)
3985 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3988 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3986 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3989 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3987
3990
3988 def get_api_data(self):
3991 def get_api_data(self):
3989 """
3992 """
3990 Common function for generating gist related data for API
3993 Common function for generating gist related data for API
3991 """
3994 """
3992 gist = self
3995 gist = self
3993 data = {
3996 data = {
3994 'gist_id': gist.gist_id,
3997 'gist_id': gist.gist_id,
3995 'type': gist.gist_type,
3998 'type': gist.gist_type,
3996 'access_id': gist.gist_access_id,
3999 'access_id': gist.gist_access_id,
3997 'description': gist.gist_description,
4000 'description': gist.gist_description,
3998 'url': gist.gist_url(),
4001 'url': gist.gist_url(),
3999 'expires': gist.gist_expires,
4002 'expires': gist.gist_expires,
4000 'created_on': gist.created_on,
4003 'created_on': gist.created_on,
4001 'modified_at': gist.modified_at,
4004 'modified_at': gist.modified_at,
4002 'content': None,
4005 'content': None,
4003 'acl_level': gist.acl_level,
4006 'acl_level': gist.acl_level,
4004 }
4007 }
4005 return data
4008 return data
4006
4009
4007 def __json__(self):
4010 def __json__(self):
4008 data = dict(
4011 data = dict(
4009 )
4012 )
4010 data.update(self.get_api_data())
4013 data.update(self.get_api_data())
4011 return data
4014 return data
4012 # SCM functions
4015 # SCM functions
4013
4016
4014 def scm_instance(self, **kwargs):
4017 def scm_instance(self, **kwargs):
4015 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4018 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4016 return get_vcs_instance(
4019 return get_vcs_instance(
4017 repo_path=safe_str(full_repo_path), create=False)
4020 repo_path=safe_str(full_repo_path), create=False)
4018
4021
4019
4022
4020 class ExternalIdentity(Base, BaseModel):
4023 class ExternalIdentity(Base, BaseModel):
4021 __tablename__ = 'external_identities'
4024 __tablename__ = 'external_identities'
4022 __table_args__ = (
4025 __table_args__ = (
4023 Index('local_user_id_idx', 'local_user_id'),
4026 Index('local_user_id_idx', 'local_user_id'),
4024 Index('external_id_idx', 'external_id'),
4027 Index('external_id_idx', 'external_id'),
4025 base_table_args
4028 base_table_args
4026 )
4029 )
4027
4030
4028 external_id = Column('external_id', Unicode(255), default=u'',
4031 external_id = Column('external_id', Unicode(255), default=u'',
4029 primary_key=True)
4032 primary_key=True)
4030 external_username = Column('external_username', Unicode(1024), default=u'')
4033 external_username = Column('external_username', Unicode(1024), default=u'')
4031 local_user_id = Column('local_user_id', Integer(),
4034 local_user_id = Column('local_user_id', Integer(),
4032 ForeignKey('users.user_id'), primary_key=True)
4035 ForeignKey('users.user_id'), primary_key=True)
4033 provider_name = Column('provider_name', Unicode(255), default=u'',
4036 provider_name = Column('provider_name', Unicode(255), default=u'',
4034 primary_key=True)
4037 primary_key=True)
4035 access_token = Column('access_token', String(1024), default=u'')
4038 access_token = Column('access_token', String(1024), default=u'')
4036 alt_token = Column('alt_token', String(1024), default=u'')
4039 alt_token = Column('alt_token', String(1024), default=u'')
4037 token_secret = Column('token_secret', String(1024), default=u'')
4040 token_secret = Column('token_secret', String(1024), default=u'')
4038
4041
4039 @classmethod
4042 @classmethod
4040 def by_external_id_and_provider(cls, external_id, provider_name,
4043 def by_external_id_and_provider(cls, external_id, provider_name,
4041 local_user_id=None):
4044 local_user_id=None):
4042 """
4045 """
4043 Returns ExternalIdentity instance based on search params
4046 Returns ExternalIdentity instance based on search params
4044
4047
4045 :param external_id:
4048 :param external_id:
4046 :param provider_name:
4049 :param provider_name:
4047 :return: ExternalIdentity
4050 :return: ExternalIdentity
4048 """
4051 """
4049 query = cls.query()
4052 query = cls.query()
4050 query = query.filter(cls.external_id == external_id)
4053 query = query.filter(cls.external_id == external_id)
4051 query = query.filter(cls.provider_name == provider_name)
4054 query = query.filter(cls.provider_name == provider_name)
4052 if local_user_id:
4055 if local_user_id:
4053 query = query.filter(cls.local_user_id == local_user_id)
4056 query = query.filter(cls.local_user_id == local_user_id)
4054 return query.first()
4057 return query.first()
4055
4058
4056 @classmethod
4059 @classmethod
4057 def user_by_external_id_and_provider(cls, external_id, provider_name):
4060 def user_by_external_id_and_provider(cls, external_id, provider_name):
4058 """
4061 """
4059 Returns User instance based on search params
4062 Returns User instance based on search params
4060
4063
4061 :param external_id:
4064 :param external_id:
4062 :param provider_name:
4065 :param provider_name:
4063 :return: User
4066 :return: User
4064 """
4067 """
4065 query = User.query()
4068 query = User.query()
4066 query = query.filter(cls.external_id == external_id)
4069 query = query.filter(cls.external_id == external_id)
4067 query = query.filter(cls.provider_name == provider_name)
4070 query = query.filter(cls.provider_name == provider_name)
4068 query = query.filter(User.user_id == cls.local_user_id)
4071 query = query.filter(User.user_id == cls.local_user_id)
4069 return query.first()
4072 return query.first()
4070
4073
4071 @classmethod
4074 @classmethod
4072 def by_local_user_id(cls, local_user_id):
4075 def by_local_user_id(cls, local_user_id):
4073 """
4076 """
4074 Returns all tokens for user
4077 Returns all tokens for user
4075
4078
4076 :param local_user_id:
4079 :param local_user_id:
4077 :return: ExternalIdentity
4080 :return: ExternalIdentity
4078 """
4081 """
4079 query = cls.query()
4082 query = cls.query()
4080 query = query.filter(cls.local_user_id == local_user_id)
4083 query = query.filter(cls.local_user_id == local_user_id)
4081 return query
4084 return query
4082
4085
4083
4086
4084 class Integration(Base, BaseModel):
4087 class Integration(Base, BaseModel):
4085 __tablename__ = 'integrations'
4088 __tablename__ = 'integrations'
4086 __table_args__ = (
4089 __table_args__ = (
4087 base_table_args
4090 base_table_args
4088 )
4091 )
4089
4092
4090 integration_id = Column('integration_id', Integer(), primary_key=True)
4093 integration_id = Column('integration_id', Integer(), primary_key=True)
4091 integration_type = Column('integration_type', String(255))
4094 integration_type = Column('integration_type', String(255))
4092 enabled = Column('enabled', Boolean(), nullable=False)
4095 enabled = Column('enabled', Boolean(), nullable=False)
4093 name = Column('name', String(255), nullable=False)
4096 name = Column('name', String(255), nullable=False)
4094 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4097 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4095 default=False)
4098 default=False)
4096
4099
4097 settings = Column(
4100 settings = Column(
4098 'settings_json', MutationObj.as_mutable(
4101 'settings_json', MutationObj.as_mutable(
4099 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4102 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4100 repo_id = Column(
4103 repo_id = Column(
4101 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4104 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4102 nullable=True, unique=None, default=None)
4105 nullable=True, unique=None, default=None)
4103 repo = relationship('Repository', lazy='joined')
4106 repo = relationship('Repository', lazy='joined')
4104
4107
4105 repo_group_id = Column(
4108 repo_group_id = Column(
4106 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4109 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4107 nullable=True, unique=None, default=None)
4110 nullable=True, unique=None, default=None)
4108 repo_group = relationship('RepoGroup', lazy='joined')
4111 repo_group = relationship('RepoGroup', lazy='joined')
4109
4112
4110 @property
4113 @property
4111 def scope(self):
4114 def scope(self):
4112 if self.repo:
4115 if self.repo:
4113 return repr(self.repo)
4116 return repr(self.repo)
4114 if self.repo_group:
4117 if self.repo_group:
4115 if self.child_repos_only:
4118 if self.child_repos_only:
4116 return repr(self.repo_group) + ' (child repos only)'
4119 return repr(self.repo_group) + ' (child repos only)'
4117 else:
4120 else:
4118 return repr(self.repo_group) + ' (recursive)'
4121 return repr(self.repo_group) + ' (recursive)'
4119 if self.child_repos_only:
4122 if self.child_repos_only:
4120 return 'root_repos'
4123 return 'root_repos'
4121 return 'global'
4124 return 'global'
4122
4125
4123 def __repr__(self):
4126 def __repr__(self):
4124 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4127 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4125
4128
4126
4129
4127 class RepoReviewRuleUser(Base, BaseModel):
4130 class RepoReviewRuleUser(Base, BaseModel):
4128 __tablename__ = 'repo_review_rules_users'
4131 __tablename__ = 'repo_review_rules_users'
4129 __table_args__ = (
4132 __table_args__ = (
4130 base_table_args
4133 base_table_args
4131 )
4134 )
4132
4135
4133 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4136 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4134 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4137 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4135 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4138 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4136 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4139 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4137 user = relationship('User')
4140 user = relationship('User')
4138
4141
4139 def rule_data(self):
4142 def rule_data(self):
4140 return {
4143 return {
4141 'mandatory': self.mandatory
4144 'mandatory': self.mandatory
4142 }
4145 }
4143
4146
4144
4147
4145 class RepoReviewRuleUserGroup(Base, BaseModel):
4148 class RepoReviewRuleUserGroup(Base, BaseModel):
4146 __tablename__ = 'repo_review_rules_users_groups'
4149 __tablename__ = 'repo_review_rules_users_groups'
4147 __table_args__ = (
4150 __table_args__ = (
4148 base_table_args
4151 base_table_args
4149 )
4152 )
4150
4153
4151 VOTE_RULE_ALL = -1
4154 VOTE_RULE_ALL = -1
4152
4155
4153 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4156 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4154 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4157 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4155 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4158 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4156 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4159 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4157 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4160 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4158 users_group = relationship('UserGroup')
4161 users_group = relationship('UserGroup')
4159
4162
4160 def rule_data(self):
4163 def rule_data(self):
4161 return {
4164 return {
4162 'mandatory': self.mandatory,
4165 'mandatory': self.mandatory,
4163 'vote_rule': self.vote_rule
4166 'vote_rule': self.vote_rule
4164 }
4167 }
4165
4168
4166 @property
4169 @property
4167 def vote_rule_label(self):
4170 def vote_rule_label(self):
4168 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4171 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4169 return 'all must vote'
4172 return 'all must vote'
4170 else:
4173 else:
4171 return 'min. vote {}'.format(self.vote_rule)
4174 return 'min. vote {}'.format(self.vote_rule)
4172
4175
4173
4176
4174 class RepoReviewRule(Base, BaseModel):
4177 class RepoReviewRule(Base, BaseModel):
4175 __tablename__ = 'repo_review_rules'
4178 __tablename__ = 'repo_review_rules'
4176 __table_args__ = (
4179 __table_args__ = (
4177 base_table_args
4180 base_table_args
4178 )
4181 )
4179
4182
4180 repo_review_rule_id = Column(
4183 repo_review_rule_id = Column(
4181 'repo_review_rule_id', Integer(), primary_key=True)
4184 'repo_review_rule_id', Integer(), primary_key=True)
4182 repo_id = Column(
4185 repo_id = Column(
4183 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4186 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4184 repo = relationship('Repository', backref='review_rules')
4187 repo = relationship('Repository', backref='review_rules')
4185
4188
4186 review_rule_name = Column('review_rule_name', String(255))
4189 review_rule_name = Column('review_rule_name', String(255))
4187 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4190 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4188 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4191 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4189 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4192 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4190
4193
4191 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4194 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4192 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4195 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4193 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4196 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4194 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4197 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4195
4198
4196 rule_users = relationship('RepoReviewRuleUser')
4199 rule_users = relationship('RepoReviewRuleUser')
4197 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4200 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4198
4201
4199 def _validate_pattern(self, value):
4202 def _validate_pattern(self, value):
4200 re.compile('^' + glob2re(value) + '$')
4203 re.compile('^' + glob2re(value) + '$')
4201
4204
4202 @hybrid_property
4205 @hybrid_property
4203 def source_branch_pattern(self):
4206 def source_branch_pattern(self):
4204 return self._branch_pattern or '*'
4207 return self._branch_pattern or '*'
4205
4208
4206 @source_branch_pattern.setter
4209 @source_branch_pattern.setter
4207 def source_branch_pattern(self, value):
4210 def source_branch_pattern(self, value):
4208 self._validate_pattern(value)
4211 self._validate_pattern(value)
4209 self._branch_pattern = value or '*'
4212 self._branch_pattern = value or '*'
4210
4213
4211 @hybrid_property
4214 @hybrid_property
4212 def target_branch_pattern(self):
4215 def target_branch_pattern(self):
4213 return self._target_branch_pattern or '*'
4216 return self._target_branch_pattern or '*'
4214
4217
4215 @target_branch_pattern.setter
4218 @target_branch_pattern.setter
4216 def target_branch_pattern(self, value):
4219 def target_branch_pattern(self, value):
4217 self._validate_pattern(value)
4220 self._validate_pattern(value)
4218 self._target_branch_pattern = value or '*'
4221 self._target_branch_pattern = value or '*'
4219
4222
4220 @hybrid_property
4223 @hybrid_property
4221 def file_pattern(self):
4224 def file_pattern(self):
4222 return self._file_pattern or '*'
4225 return self._file_pattern or '*'
4223
4226
4224 @file_pattern.setter
4227 @file_pattern.setter
4225 def file_pattern(self, value):
4228 def file_pattern(self, value):
4226 self._validate_pattern(value)
4229 self._validate_pattern(value)
4227 self._file_pattern = value or '*'
4230 self._file_pattern = value or '*'
4228
4231
4229 def matches(self, source_branch, target_branch, files_changed):
4232 def matches(self, source_branch, target_branch, files_changed):
4230 """
4233 """
4231 Check if this review rule matches a branch/files in a pull request
4234 Check if this review rule matches a branch/files in a pull request
4232
4235
4233 :param source_branch: source branch name for the commit
4236 :param source_branch: source branch name for the commit
4234 :param target_branch: target branch name for the commit
4237 :param target_branch: target branch name for the commit
4235 :param files_changed: list of file paths changed in the pull request
4238 :param files_changed: list of file paths changed in the pull request
4236 """
4239 """
4237
4240
4238 source_branch = source_branch or ''
4241 source_branch = source_branch or ''
4239 target_branch = target_branch or ''
4242 target_branch = target_branch or ''
4240 files_changed = files_changed or []
4243 files_changed = files_changed or []
4241
4244
4242 branch_matches = True
4245 branch_matches = True
4243 if source_branch or target_branch:
4246 if source_branch or target_branch:
4244 if self.source_branch_pattern == '*':
4247 if self.source_branch_pattern == '*':
4245 source_branch_match = True
4248 source_branch_match = True
4246 else:
4249 else:
4247 if self.source_branch_pattern.startswith('re:'):
4250 if self.source_branch_pattern.startswith('re:'):
4248 source_pattern = self.source_branch_pattern[3:]
4251 source_pattern = self.source_branch_pattern[3:]
4249 else:
4252 else:
4250 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4253 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4251 source_branch_regex = re.compile(source_pattern)
4254 source_branch_regex = re.compile(source_pattern)
4252 source_branch_match = bool(source_branch_regex.search(source_branch))
4255 source_branch_match = bool(source_branch_regex.search(source_branch))
4253 if self.target_branch_pattern == '*':
4256 if self.target_branch_pattern == '*':
4254 target_branch_match = True
4257 target_branch_match = True
4255 else:
4258 else:
4256 if self.target_branch_pattern.startswith('re:'):
4259 if self.target_branch_pattern.startswith('re:'):
4257 target_pattern = self.target_branch_pattern[3:]
4260 target_pattern = self.target_branch_pattern[3:]
4258 else:
4261 else:
4259 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4262 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4260 target_branch_regex = re.compile(target_pattern)
4263 target_branch_regex = re.compile(target_pattern)
4261 target_branch_match = bool(target_branch_regex.search(target_branch))
4264 target_branch_match = bool(target_branch_regex.search(target_branch))
4262
4265
4263 branch_matches = source_branch_match and target_branch_match
4266 branch_matches = source_branch_match and target_branch_match
4264
4267
4265 files_matches = True
4268 files_matches = True
4266 if self.file_pattern != '*':
4269 if self.file_pattern != '*':
4267 files_matches = False
4270 files_matches = False
4268 if self.file_pattern.startswith('re:'):
4271 if self.file_pattern.startswith('re:'):
4269 file_pattern = self.file_pattern[3:]
4272 file_pattern = self.file_pattern[3:]
4270 else:
4273 else:
4271 file_pattern = glob2re(self.file_pattern)
4274 file_pattern = glob2re(self.file_pattern)
4272 file_regex = re.compile(file_pattern)
4275 file_regex = re.compile(file_pattern)
4273 for filename in files_changed:
4276 for filename in files_changed:
4274 if file_regex.search(filename):
4277 if file_regex.search(filename):
4275 files_matches = True
4278 files_matches = True
4276 break
4279 break
4277
4280
4278 return branch_matches and files_matches
4281 return branch_matches and files_matches
4279
4282
4280 @property
4283 @property
4281 def review_users(self):
4284 def review_users(self):
4282 """ Returns the users which this rule applies to """
4285 """ Returns the users which this rule applies to """
4283
4286
4284 users = collections.OrderedDict()
4287 users = collections.OrderedDict()
4285
4288
4286 for rule_user in self.rule_users:
4289 for rule_user in self.rule_users:
4287 if rule_user.user.active:
4290 if rule_user.user.active:
4288 if rule_user.user not in users:
4291 if rule_user.user not in users:
4289 users[rule_user.user.username] = {
4292 users[rule_user.user.username] = {
4290 'user': rule_user.user,
4293 'user': rule_user.user,
4291 'source': 'user',
4294 'source': 'user',
4292 'source_data': {},
4295 'source_data': {},
4293 'data': rule_user.rule_data()
4296 'data': rule_user.rule_data()
4294 }
4297 }
4295
4298
4296 for rule_user_group in self.rule_user_groups:
4299 for rule_user_group in self.rule_user_groups:
4297 source_data = {
4300 source_data = {
4298 'user_group_id': rule_user_group.users_group.users_group_id,
4301 'user_group_id': rule_user_group.users_group.users_group_id,
4299 'name': rule_user_group.users_group.users_group_name,
4302 'name': rule_user_group.users_group.users_group_name,
4300 'members': len(rule_user_group.users_group.members)
4303 'members': len(rule_user_group.users_group.members)
4301 }
4304 }
4302 for member in rule_user_group.users_group.members:
4305 for member in rule_user_group.users_group.members:
4303 if member.user.active:
4306 if member.user.active:
4304 key = member.user.username
4307 key = member.user.username
4305 if key in users:
4308 if key in users:
4306 # skip this member as we have him already
4309 # skip this member as we have him already
4307 # this prevents from override the "first" matched
4310 # this prevents from override the "first" matched
4308 # users with duplicates in multiple groups
4311 # users with duplicates in multiple groups
4309 continue
4312 continue
4310
4313
4311 users[key] = {
4314 users[key] = {
4312 'user': member.user,
4315 'user': member.user,
4313 'source': 'user_group',
4316 'source': 'user_group',
4314 'source_data': source_data,
4317 'source_data': source_data,
4315 'data': rule_user_group.rule_data()
4318 'data': rule_user_group.rule_data()
4316 }
4319 }
4317
4320
4318 return users
4321 return users
4319
4322
4320 def user_group_vote_rule(self):
4323 def user_group_vote_rule(self):
4321 rules = []
4324 rules = []
4322 if self.rule_user_groups:
4325 if self.rule_user_groups:
4323 for user_group in self.rule_user_groups:
4326 for user_group in self.rule_user_groups:
4324 rules.append(user_group)
4327 rules.append(user_group)
4325 return rules
4328 return rules
4326
4329
4327 def __repr__(self):
4330 def __repr__(self):
4328 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4331 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4329 self.repo_review_rule_id, self.repo)
4332 self.repo_review_rule_id, self.repo)
4330
4333
4331
4334
4332 class ScheduleEntry(Base, BaseModel):
4335 class ScheduleEntry(Base, BaseModel):
4333 __tablename__ = 'schedule_entries'
4336 __tablename__ = 'schedule_entries'
4334 __table_args__ = (
4337 __table_args__ = (
4335 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4338 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4336 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4339 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4337 base_table_args,
4340 base_table_args,
4338 )
4341 )
4339
4342
4340 schedule_types = ['crontab', 'timedelta', 'integer']
4343 schedule_types = ['crontab', 'timedelta', 'integer']
4341 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4344 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4342
4345
4343 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4346 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4344 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4347 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4345 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4348 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4346
4349
4347 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4350 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4348 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4351 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4349
4352
4350 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4353 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4351 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4354 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4352
4355
4353 # task
4356 # task
4354 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4357 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4355 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4358 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4356 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4359 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4357 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4360 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4358
4361
4359 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4362 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4360 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4363 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4361
4364
4362 @hybrid_property
4365 @hybrid_property
4363 def schedule_type(self):
4366 def schedule_type(self):
4364 return self._schedule_type
4367 return self._schedule_type
4365
4368
4366 @schedule_type.setter
4369 @schedule_type.setter
4367 def schedule_type(self, val):
4370 def schedule_type(self, val):
4368 if val not in self.schedule_types:
4371 if val not in self.schedule_types:
4369 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4372 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4370 val, self.schedule_type))
4373 val, self.schedule_type))
4371
4374
4372 self._schedule_type = val
4375 self._schedule_type = val
4373
4376
4374 @classmethod
4377 @classmethod
4375 def get_uid(cls, obj):
4378 def get_uid(cls, obj):
4376 args = obj.task_args
4379 args = obj.task_args
4377 kwargs = obj.task_kwargs
4380 kwargs = obj.task_kwargs
4378 if isinstance(args, JsonRaw):
4381 if isinstance(args, JsonRaw):
4379 try:
4382 try:
4380 args = json.loads(args)
4383 args = json.loads(args)
4381 except ValueError:
4384 except ValueError:
4382 args = tuple()
4385 args = tuple()
4383
4386
4384 if isinstance(kwargs, JsonRaw):
4387 if isinstance(kwargs, JsonRaw):
4385 try:
4388 try:
4386 kwargs = json.loads(kwargs)
4389 kwargs = json.loads(kwargs)
4387 except ValueError:
4390 except ValueError:
4388 kwargs = dict()
4391 kwargs = dict()
4389
4392
4390 dot_notation = obj.task_dot_notation
4393 dot_notation = obj.task_dot_notation
4391 val = '.'.join(map(safe_str, [
4394 val = '.'.join(map(safe_str, [
4392 sorted(dot_notation), args, sorted(kwargs.items())]))
4395 sorted(dot_notation), args, sorted(kwargs.items())]))
4393 return hashlib.sha1(val).hexdigest()
4396 return hashlib.sha1(val).hexdigest()
4394
4397
4395 @classmethod
4398 @classmethod
4396 def get_by_schedule_name(cls, schedule_name):
4399 def get_by_schedule_name(cls, schedule_name):
4397 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4400 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4398
4401
4399 @classmethod
4402 @classmethod
4400 def get_by_schedule_id(cls, schedule_id):
4403 def get_by_schedule_id(cls, schedule_id):
4401 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4404 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4402
4405
4403 @property
4406 @property
4404 def task(self):
4407 def task(self):
4405 return self.task_dot_notation
4408 return self.task_dot_notation
4406
4409
4407 @property
4410 @property
4408 def schedule(self):
4411 def schedule(self):
4409 from rhodecode.lib.celerylib.utils import raw_2_schedule
4412 from rhodecode.lib.celerylib.utils import raw_2_schedule
4410 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4413 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4411 return schedule
4414 return schedule
4412
4415
4413 @property
4416 @property
4414 def args(self):
4417 def args(self):
4415 try:
4418 try:
4416 return list(self.task_args or [])
4419 return list(self.task_args or [])
4417 except ValueError:
4420 except ValueError:
4418 return list()
4421 return list()
4419
4422
4420 @property
4423 @property
4421 def kwargs(self):
4424 def kwargs(self):
4422 try:
4425 try:
4423 return dict(self.task_kwargs or {})
4426 return dict(self.task_kwargs or {})
4424 except ValueError:
4427 except ValueError:
4425 return dict()
4428 return dict()
4426
4429
4427 def _as_raw(self, val):
4430 def _as_raw(self, val):
4428 if hasattr(val, 'de_coerce'):
4431 if hasattr(val, 'de_coerce'):
4429 val = val.de_coerce()
4432 val = val.de_coerce()
4430 if val:
4433 if val:
4431 val = json.dumps(val)
4434 val = json.dumps(val)
4432
4435
4433 return val
4436 return val
4434
4437
4435 @property
4438 @property
4436 def schedule_definition_raw(self):
4439 def schedule_definition_raw(self):
4437 return self._as_raw(self.schedule_definition)
4440 return self._as_raw(self.schedule_definition)
4438
4441
4439 @property
4442 @property
4440 def args_raw(self):
4443 def args_raw(self):
4441 return self._as_raw(self.task_args)
4444 return self._as_raw(self.task_args)
4442
4445
4443 @property
4446 @property
4444 def kwargs_raw(self):
4447 def kwargs_raw(self):
4445 return self._as_raw(self.task_kwargs)
4448 return self._as_raw(self.task_kwargs)
4446
4449
4447 def __repr__(self):
4450 def __repr__(self):
4448 return '<DB:ScheduleEntry({}:{})>'.format(
4451 return '<DB:ScheduleEntry({}:{})>'.format(
4449 self.schedule_entry_id, self.schedule_name)
4452 self.schedule_entry_id, self.schedule_name)
4450
4453
4451
4454
4452 @event.listens_for(ScheduleEntry, 'before_update')
4455 @event.listens_for(ScheduleEntry, 'before_update')
4453 def update_task_uid(mapper, connection, target):
4456 def update_task_uid(mapper, connection, target):
4454 target.task_uid = ScheduleEntry.get_uid(target)
4457 target.task_uid = ScheduleEntry.get_uid(target)
4455
4458
4456
4459
4457 @event.listens_for(ScheduleEntry, 'before_insert')
4460 @event.listens_for(ScheduleEntry, 'before_insert')
4458 def set_task_uid(mapper, connection, target):
4461 def set_task_uid(mapper, connection, target):
4459 target.task_uid = ScheduleEntry.get_uid(target)
4462 target.task_uid = ScheduleEntry.get_uid(target)
4460
4463
4461
4464
4462 class DbMigrateVersion(Base, BaseModel):
4465 class DbMigrateVersion(Base, BaseModel):
4463 __tablename__ = 'db_migrate_version'
4466 __tablename__ = 'db_migrate_version'
4464 __table_args__ = (
4467 __table_args__ = (
4465 base_table_args,
4468 base_table_args,
4466 )
4469 )
4467
4470
4468 repository_id = Column('repository_id', String(250), primary_key=True)
4471 repository_id = Column('repository_id', String(250), primary_key=True)
4469 repository_path = Column('repository_path', Text)
4472 repository_path = Column('repository_path', Text)
4470 version = Column('version', Integer)
4473 version = Column('version', Integer)
4471
4474
4472 @classmethod
4475 @classmethod
4473 def set_version(cls, version):
4476 def set_version(cls, version):
4474 """
4477 """
4475 Helper for forcing a different version, usually for debugging purposes via ishell.
4478 Helper for forcing a different version, usually for debugging purposes via ishell.
4476 """
4479 """
4477 ver = DbMigrateVersion.query().first()
4480 ver = DbMigrateVersion.query().first()
4478 ver.version = version
4481 ver.version = version
4479 Session().commit()
4482 Session().commit()
4480
4483
4481
4484
4482 class DbSession(Base, BaseModel):
4485 class DbSession(Base, BaseModel):
4483 __tablename__ = 'db_session'
4486 __tablename__ = 'db_session'
4484 __table_args__ = (
4487 __table_args__ = (
4485 base_table_args,
4488 base_table_args,
4486 )
4489 )
4487
4490
4488 def __repr__(self):
4491 def __repr__(self):
4489 return '<DB:DbSession({})>'.format(self.id)
4492 return '<DB:DbSession({})>'.format(self.id)
4490
4493
4491 id = Column('id', Integer())
4494 id = Column('id', Integer())
4492 namespace = Column('namespace', String(255), primary_key=True)
4495 namespace = Column('namespace', String(255), primary_key=True)
4493 accessed = Column('accessed', DateTime, nullable=False)
4496 accessed = Column('accessed', DateTime, nullable=False)
4494 created = Column('created', DateTime, nullable=False)
4497 created = Column('created', DateTime, nullable=False)
4495 data = Column('data', PickleType, nullable=False)
4498 data = Column('data', PickleType, nullable=False)
4496
4499
4497
4500
4498 class BeakerCache(Base, BaseModel):
4501 class BeakerCache(Base, BaseModel):
4499 __tablename__ = 'beaker_cache'
4502 __tablename__ = 'beaker_cache'
4500 __table_args__ = (
4503 __table_args__ = (
4501 base_table_args,
4504 base_table_args,
4502 )
4505 )
4503
4506
4504 def __repr__(self):
4507 def __repr__(self):
4505 return '<DB:DbSession({})>'.format(self.id)
4508 return '<DB:DbSession({})>'.format(self.id)
4506
4509
4507 id = Column('id', Integer())
4510 id = Column('id', Integer())
4508 namespace = Column('namespace', String(255), primary_key=True)
4511 namespace = Column('namespace', String(255), primary_key=True)
4509 accessed = Column('accessed', DateTime, nullable=False)
4512 accessed = Column('accessed', DateTime, nullable=False)
4510 created = Column('created', DateTime, nullable=False)
4513 created = Column('created', DateTime, nullable=False)
4511 data = Column('data', PickleType, nullable=False)
4514 data = Column('data', PickleType, nullable=False)
General Comments 0
You need to be logged in to leave comments. Login now