Show More
@@ -1,453 +1,453 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | JSON RPC utils |
|
22 | JSON RPC utils | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import collections |
|
25 | import collections | |
26 | import logging |
|
26 | import logging | |
27 |
|
27 | |||
28 | from rhodecode.api.exc import JSONRPCError |
|
28 | from rhodecode.api.exc import JSONRPCError | |
29 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
30 | HasPermissionAnyApi, HasRepoPermissionAnyApi, HasRepoGroupPermissionAnyApi) |
|
30 | HasPermissionAnyApi, HasRepoPermissionAnyApi, HasRepoGroupPermissionAnyApi) | |
31 | from rhodecode.lib.utils import safe_unicode |
|
31 | from rhodecode.lib.utils import safe_unicode | |
32 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
32 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
33 | from rhodecode.lib.view_utils import get_commit_from_ref_name |
|
33 | from rhodecode.lib.view_utils import get_commit_from_ref_name | |
34 | from rhodecode.lib.utils2 import str2bool |
|
34 | from rhodecode.lib.utils2 import str2bool | |
35 |
|
35 | |||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | class OAttr(object): |
|
39 | class OAttr(object): | |
40 | """ |
|
40 | """ | |
41 | Special Option that defines other attribute, and can default to them |
|
41 | Special Option that defines other attribute, and can default to them | |
42 |
|
42 | |||
43 | Example:: |
|
43 | Example:: | |
44 |
|
44 | |||
45 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
45 | def test(apiuser, userid=Optional(OAttr('apiuser')): | |
46 | user = Optional.extract(userid, evaluate_locals=local()) |
|
46 | user = Optional.extract(userid, evaluate_locals=local()) | |
47 | #if we pass in userid, we get it, else it will default to apiuser |
|
47 | #if we pass in userid, we get it, else it will default to apiuser | |
48 | #attribute |
|
48 | #attribute | |
49 | """ |
|
49 | """ | |
50 |
|
50 | |||
51 | def __init__(self, attr_name): |
|
51 | def __init__(self, attr_name): | |
52 | self.attr_name = attr_name |
|
52 | self.attr_name = attr_name | |
53 |
|
53 | |||
54 | def __repr__(self): |
|
54 | def __repr__(self): | |
55 | return '<OptionalAttr:%s>' % self.attr_name |
|
55 | return '<OptionalAttr:%s>' % self.attr_name | |
56 |
|
56 | |||
57 | def __call__(self): |
|
57 | def __call__(self): | |
58 | return self |
|
58 | return self | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | class Optional(object): |
|
61 | class Optional(object): | |
62 | """ |
|
62 | """ | |
63 | Defines an optional parameter:: |
|
63 | Defines an optional parameter:: | |
64 |
|
64 | |||
65 | param = param.getval() if isinstance(param, Optional) else param |
|
65 | param = param.getval() if isinstance(param, Optional) else param | |
66 | param = param() if isinstance(param, Optional) else param |
|
66 | param = param() if isinstance(param, Optional) else param | |
67 |
|
67 | |||
68 | is equivalent of:: |
|
68 | is equivalent of:: | |
69 |
|
69 | |||
70 | param = Optional.extract(param) |
|
70 | param = Optional.extract(param) | |
71 |
|
71 | |||
72 | """ |
|
72 | """ | |
73 |
|
73 | |||
74 | def __init__(self, type_): |
|
74 | def __init__(self, type_): | |
75 | self.type_ = type_ |
|
75 | self.type_ = type_ | |
76 |
|
76 | |||
77 | def __repr__(self): |
|
77 | def __repr__(self): | |
78 | return '<Optional:%s>' % self.type_.__repr__() |
|
78 | return '<Optional:%s>' % self.type_.__repr__() | |
79 |
|
79 | |||
80 | def __call__(self): |
|
80 | def __call__(self): | |
81 | return self.getval() |
|
81 | return self.getval() | |
82 |
|
82 | |||
83 | def getval(self, evaluate_locals=None): |
|
83 | def getval(self, evaluate_locals=None): | |
84 | """ |
|
84 | """ | |
85 | returns value from this Optional instance |
|
85 | returns value from this Optional instance | |
86 | """ |
|
86 | """ | |
87 | if isinstance(self.type_, OAttr): |
|
87 | if isinstance(self.type_, OAttr): | |
88 | param_name = self.type_.attr_name |
|
88 | param_name = self.type_.attr_name | |
89 | if evaluate_locals: |
|
89 | if evaluate_locals: | |
90 | return evaluate_locals[param_name] |
|
90 | return evaluate_locals[param_name] | |
91 | # use params name |
|
91 | # use params name | |
92 | return param_name |
|
92 | return param_name | |
93 | return self.type_ |
|
93 | return self.type_ | |
94 |
|
94 | |||
95 | @classmethod |
|
95 | @classmethod | |
96 | def extract(cls, val, evaluate_locals=None, binary=None): |
|
96 | def extract(cls, val, evaluate_locals=None, binary=None): | |
97 | """ |
|
97 | """ | |
98 | Extracts value from Optional() instance |
|
98 | Extracts value from Optional() instance | |
99 |
|
99 | |||
100 | :param val: |
|
100 | :param val: | |
101 | :return: original value if it's not Optional instance else |
|
101 | :return: original value if it's not Optional instance else | |
102 | value of instance |
|
102 | value of instance | |
103 | """ |
|
103 | """ | |
104 | if isinstance(val, cls): |
|
104 | if isinstance(val, cls): | |
105 | val = val.getval(evaluate_locals) |
|
105 | val = val.getval(evaluate_locals) | |
106 |
|
106 | |||
107 | if binary: |
|
107 | if binary: | |
108 | val = str2bool(val) |
|
108 | val = str2bool(val) | |
109 |
|
109 | |||
110 | return val |
|
110 | return val | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def parse_args(cli_args, key_prefix=''): |
|
113 | def parse_args(cli_args, key_prefix=''): | |
114 | from rhodecode.lib.utils2 import (escape_split) |
|
114 | from rhodecode.lib.utils2 import (escape_split) | |
115 | kwargs = collections.defaultdict(dict) |
|
115 | kwargs = collections.defaultdict(dict) | |
116 | for el in escape_split(cli_args, ','): |
|
116 | for el in escape_split(cli_args, ','): | |
117 | kv = escape_split(el, '=', 1) |
|
117 | kv = escape_split(el, '=', 1) | |
118 | if len(kv) == 2: |
|
118 | if len(kv) == 2: | |
119 | k, v = kv |
|
119 | k, v = kv | |
120 | kwargs[key_prefix + k] = v |
|
120 | kwargs[key_prefix + k] = v | |
121 | return kwargs |
|
121 | return kwargs | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | def get_origin(obj): |
|
124 | def get_origin(obj): | |
125 | """ |
|
125 | """ | |
126 | Get origin of permission from object. |
|
126 | Get origin of permission from object. | |
127 |
|
127 | |||
128 | :param obj: |
|
128 | :param obj: | |
129 | """ |
|
129 | """ | |
130 | origin = 'permission' |
|
130 | origin = 'permission' | |
131 |
|
131 | |||
132 | if getattr(obj, 'owner_row', '') and getattr(obj, 'admin_row', ''): |
|
132 | if getattr(obj, 'owner_row', '') and getattr(obj, 'admin_row', ''): | |
133 | # admin and owner case, maybe we should use dual string ? |
|
133 | # admin and owner case, maybe we should use dual string ? | |
134 | origin = 'owner' |
|
134 | origin = 'owner' | |
135 | elif getattr(obj, 'owner_row', ''): |
|
135 | elif getattr(obj, 'owner_row', ''): | |
136 | origin = 'owner' |
|
136 | origin = 'owner' | |
137 | elif getattr(obj, 'admin_row', ''): |
|
137 | elif getattr(obj, 'admin_row', ''): | |
138 | origin = 'super-admin' |
|
138 | origin = 'super-admin' | |
139 | return origin |
|
139 | return origin | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | def store_update(updates, attr, name): |
|
142 | def store_update(updates, attr, name): | |
143 | """ |
|
143 | """ | |
144 | Stores param in updates dict if it's not instance of Optional |
|
144 | Stores param in updates dict if it's not instance of Optional | |
145 | allows easy updates of passed in params |
|
145 | allows easy updates of passed in params | |
146 | """ |
|
146 | """ | |
147 | if not isinstance(attr, Optional): |
|
147 | if not isinstance(attr, Optional): | |
148 | updates[name] = attr |
|
148 | updates[name] = attr | |
149 |
|
149 | |||
150 |
|
150 | |||
151 | def has_superadmin_permission(apiuser): |
|
151 | def has_superadmin_permission(apiuser): | |
152 | """ |
|
152 | """ | |
153 | Return True if apiuser is admin or return False |
|
153 | Return True if apiuser is admin or return False | |
154 |
|
154 | |||
155 | :param apiuser: |
|
155 | :param apiuser: | |
156 | """ |
|
156 | """ | |
157 | if HasPermissionAnyApi('hg.admin')(user=apiuser): |
|
157 | if HasPermissionAnyApi('hg.admin')(user=apiuser): | |
158 | return True |
|
158 | return True | |
159 | return False |
|
159 | return False | |
160 |
|
160 | |||
161 |
|
161 | |||
162 | def validate_repo_permissions(apiuser, repoid, repo, perms): |
|
162 | def validate_repo_permissions(apiuser, repoid, repo, perms): | |
163 | """ |
|
163 | """ | |
164 | Raise JsonRPCError if apiuser is not authorized or return True |
|
164 | Raise JsonRPCError if apiuser is not authorized or return True | |
165 |
|
165 | |||
166 | :param apiuser: |
|
166 | :param apiuser: | |
167 | :param repoid: |
|
167 | :param repoid: | |
168 | :param repo: |
|
168 | :param repo: | |
169 | :param perms: |
|
169 | :param perms: | |
170 | """ |
|
170 | """ | |
171 | if not HasRepoPermissionAnyApi(*perms)( |
|
171 | if not HasRepoPermissionAnyApi(*perms)( | |
172 | user=apiuser, repo_name=repo.repo_name): |
|
172 | user=apiuser, repo_name=repo.repo_name): | |
173 | raise JSONRPCError( |
|
173 | raise JSONRPCError( | |
174 | 'repository `%s` does not exist' % repoid) |
|
174 | 'repository `%s` does not exist' % repoid) | |
175 |
|
175 | |||
176 | return True |
|
176 | return True | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | def validate_repo_group_permissions(apiuser, repogroupid, repo_group, perms): |
|
179 | def validate_repo_group_permissions(apiuser, repogroupid, repo_group, perms): | |
180 | """ |
|
180 | """ | |
181 | Raise JsonRPCError if apiuser is not authorized or return True |
|
181 | Raise JsonRPCError if apiuser is not authorized or return True | |
182 |
|
182 | |||
183 | :param apiuser: |
|
183 | :param apiuser: | |
184 | :param repogroupid: just the id of repository group |
|
184 | :param repogroupid: just the id of repository group | |
185 | :param repo_group: instance of repo_group |
|
185 | :param repo_group: instance of repo_group | |
186 | :param perms: |
|
186 | :param perms: | |
187 | """ |
|
187 | """ | |
188 | if not HasRepoGroupPermissionAnyApi(*perms)( |
|
188 | if not HasRepoGroupPermissionAnyApi(*perms)( | |
189 | user=apiuser, group_name=repo_group.group_name): |
|
189 | user=apiuser, group_name=repo_group.group_name): | |
190 | raise JSONRPCError( |
|
190 | raise JSONRPCError( | |
191 | 'repository group `%s` does not exist' % repogroupid) |
|
191 | 'repository group `%s` does not exist' % repogroupid) | |
192 |
|
192 | |||
193 | return True |
|
193 | return True | |
194 |
|
194 | |||
195 |
|
195 | |||
196 | def validate_set_owner_permissions(apiuser, owner): |
|
196 | def validate_set_owner_permissions(apiuser, owner): | |
197 | if isinstance(owner, Optional): |
|
197 | if isinstance(owner, Optional): | |
198 | owner = get_user_or_error(apiuser.user_id) |
|
198 | owner = get_user_or_error(apiuser.user_id) | |
199 | else: |
|
199 | else: | |
200 | if has_superadmin_permission(apiuser): |
|
200 | if has_superadmin_permission(apiuser): | |
201 | owner = get_user_or_error(owner) |
|
201 | owner = get_user_or_error(owner) | |
202 | else: |
|
202 | else: | |
203 | # forbid setting owner for non-admins |
|
203 | # forbid setting owner for non-admins | |
204 | raise JSONRPCError( |
|
204 | raise JSONRPCError( | |
205 | 'Only RhodeCode super-admin can specify `owner` param') |
|
205 | 'Only RhodeCode super-admin can specify `owner` param') | |
206 | return owner |
|
206 | return owner | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | def get_user_or_error(userid): |
|
209 | def get_user_or_error(userid): | |
210 | """ |
|
210 | """ | |
211 | Get user by id or name or return JsonRPCError if not found |
|
211 | Get user by id or name or return JsonRPCError if not found | |
212 |
|
212 | |||
213 | :param userid: |
|
213 | :param userid: | |
214 | """ |
|
214 | """ | |
215 | from rhodecode.model.user import UserModel |
|
215 | from rhodecode.model.user import UserModel | |
216 | user_model = UserModel() |
|
216 | user_model = UserModel() | |
217 |
|
217 | |||
218 | if isinstance(userid, (int, long)): |
|
218 | if isinstance(userid, (int, long)): | |
219 | try: |
|
219 | try: | |
220 | user = user_model.get_user(userid) |
|
220 | user = user_model.get_user(userid) | |
221 | except ValueError: |
|
221 | except ValueError: | |
222 | user = None |
|
222 | user = None | |
223 | else: |
|
223 | else: | |
224 | user = user_model.get_by_username(userid) |
|
224 | user = user_model.get_by_username(userid) | |
225 |
|
225 | |||
226 | if user is None: |
|
226 | if user is None: | |
227 | raise JSONRPCError( |
|
227 | raise JSONRPCError( | |
228 | 'user `%s` does not exist' % (userid,)) |
|
228 | 'user `%s` does not exist' % (userid,)) | |
229 | return user |
|
229 | return user | |
230 |
|
230 | |||
231 |
|
231 | |||
232 | def get_repo_or_error(repoid): |
|
232 | def get_repo_or_error(repoid): | |
233 | """ |
|
233 | """ | |
234 | Get repo by id or name or return JsonRPCError if not found |
|
234 | Get repo by id or name or return JsonRPCError if not found | |
235 |
|
235 | |||
236 | :param repoid: |
|
236 | :param repoid: | |
237 | """ |
|
237 | """ | |
238 | from rhodecode.model.repo import RepoModel |
|
238 | from rhodecode.model.repo import RepoModel | |
239 | repo_model = RepoModel() |
|
239 | repo_model = RepoModel() | |
240 |
|
240 | |||
241 | if isinstance(repoid, (int, long)): |
|
241 | if isinstance(repoid, (int, long)): | |
242 | try: |
|
242 | try: | |
243 | repo = repo_model.get_repo(repoid) |
|
243 | repo = repo_model.get_repo(repoid) | |
244 | except ValueError: |
|
244 | except ValueError: | |
245 | repo = None |
|
245 | repo = None | |
246 | else: |
|
246 | else: | |
247 | repo = repo_model.get_by_repo_name(repoid) |
|
247 | repo = repo_model.get_by_repo_name(repoid) | |
248 |
|
248 | |||
249 | if repo is None: |
|
249 | if repo is None: | |
250 | raise JSONRPCError( |
|
250 | raise JSONRPCError( | |
251 | 'repository `%s` does not exist' % (repoid,)) |
|
251 | 'repository `%s` does not exist' % (repoid,)) | |
252 | return repo |
|
252 | return repo | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | def get_repo_group_or_error(repogroupid): |
|
255 | def get_repo_group_or_error(repogroupid): | |
256 | """ |
|
256 | """ | |
257 | Get repo group by id or name or return JsonRPCError if not found |
|
257 | Get repo group by id or name or return JsonRPCError if not found | |
258 |
|
258 | |||
259 | :param repogroupid: |
|
259 | :param repogroupid: | |
260 | """ |
|
260 | """ | |
261 | from rhodecode.model.repo_group import RepoGroupModel |
|
261 | from rhodecode.model.repo_group import RepoGroupModel | |
262 | repo_group_model = RepoGroupModel() |
|
262 | repo_group_model = RepoGroupModel() | |
263 |
|
263 | |||
264 | if isinstance(repogroupid, (int, long)): |
|
264 | if isinstance(repogroupid, (int, long)): | |
265 | try: |
|
265 | try: | |
266 | repo_group = repo_group_model._get_repo_group(repogroupid) |
|
266 | repo_group = repo_group_model._get_repo_group(repogroupid) | |
267 | except ValueError: |
|
267 | except ValueError: | |
268 | repo_group = None |
|
268 | repo_group = None | |
269 | else: |
|
269 | else: | |
270 | repo_group = repo_group_model.get_by_group_name(repogroupid) |
|
270 | repo_group = repo_group_model.get_by_group_name(repogroupid) | |
271 |
|
271 | |||
272 | if repo_group is None: |
|
272 | if repo_group is None: | |
273 | raise JSONRPCError( |
|
273 | raise JSONRPCError( | |
274 | 'repository group `%s` does not exist' % (repogroupid,)) |
|
274 | 'repository group `%s` does not exist' % (repogroupid,)) | |
275 | return repo_group |
|
275 | return repo_group | |
276 |
|
276 | |||
277 |
|
277 | |||
278 | def get_user_group_or_error(usergroupid): |
|
278 | def get_user_group_or_error(usergroupid): | |
279 | """ |
|
279 | """ | |
280 | Get user group by id or name or return JsonRPCError if not found |
|
280 | Get user group by id or name or return JsonRPCError if not found | |
281 |
|
281 | |||
282 | :param usergroupid: |
|
282 | :param usergroupid: | |
283 | """ |
|
283 | """ | |
284 | from rhodecode.model.user_group import UserGroupModel |
|
284 | from rhodecode.model.user_group import UserGroupModel | |
285 | user_group_model = UserGroupModel() |
|
285 | user_group_model = UserGroupModel() | |
286 |
|
286 | |||
287 | if isinstance(usergroupid, (int, long)): |
|
287 | if isinstance(usergroupid, (int, long)): | |
288 | try: |
|
288 | try: | |
289 | user_group = user_group_model.get_group(usergroupid) |
|
289 | user_group = user_group_model.get_group(usergroupid) | |
290 | except ValueError: |
|
290 | except ValueError: | |
291 | user_group = None |
|
291 | user_group = None | |
292 | else: |
|
292 | else: | |
293 | user_group = user_group_model.get_by_name(usergroupid) |
|
293 | user_group = user_group_model.get_by_name(usergroupid) | |
294 |
|
294 | |||
295 | if user_group is None: |
|
295 | if user_group is None: | |
296 | raise JSONRPCError( |
|
296 | raise JSONRPCError( | |
297 | 'user group `%s` does not exist' % (usergroupid,)) |
|
297 | 'user group `%s` does not exist' % (usergroupid,)) | |
298 | return user_group |
|
298 | return user_group | |
299 |
|
299 | |||
300 |
|
300 | |||
301 | def get_perm_or_error(permid, prefix=None): |
|
301 | def get_perm_or_error(permid, prefix=None): | |
302 | """ |
|
302 | """ | |
303 | Get permission by id or name or return JsonRPCError if not found |
|
303 | Get permission by id or name or return JsonRPCError if not found | |
304 |
|
304 | |||
305 | :param permid: |
|
305 | :param permid: | |
306 | """ |
|
306 | """ | |
307 | from rhodecode.model.permission import PermissionModel |
|
307 | from rhodecode.model.permission import PermissionModel | |
308 |
|
308 | |||
309 | perm = PermissionModel.cls.get_by_key(permid) |
|
309 | perm = PermissionModel.cls.get_by_key(permid) | |
310 | if perm is None: |
|
310 | if perm is None: | |
311 | msg = 'permission `{}` does not exist.'.format(permid) |
|
311 | msg = 'permission `{}` does not exist.'.format(permid) | |
312 | if prefix: |
|
312 | if prefix: | |
313 | msg += ' Permission should start with prefix: `{}`'.format(prefix) |
|
313 | msg += ' Permission should start with prefix: `{}`'.format(prefix) | |
314 | raise JSONRPCError(msg) |
|
314 | raise JSONRPCError(msg) | |
315 |
|
315 | |||
316 | if prefix: |
|
316 | if prefix: | |
317 | if not perm.permission_name.startswith(prefix): |
|
317 | if not perm.permission_name.startswith(prefix): | |
318 | raise JSONRPCError('permission `%s` is invalid, ' |
|
318 | raise JSONRPCError('permission `%s` is invalid, ' | |
319 | 'should start with %s' % (permid, prefix)) |
|
319 | 'should start with %s' % (permid, prefix)) | |
320 | return perm |
|
320 | return perm | |
321 |
|
321 | |||
322 |
|
322 | |||
323 | def get_gist_or_error(gistid): |
|
323 | def get_gist_or_error(gistid): | |
324 | """ |
|
324 | """ | |
325 | Get gist by id or gist_access_id or return JsonRPCError if not found |
|
325 | Get gist by id or gist_access_id or return JsonRPCError if not found | |
326 |
|
326 | |||
327 | :param gistid: |
|
327 | :param gistid: | |
328 | """ |
|
328 | """ | |
329 | from rhodecode.model.gist import GistModel |
|
329 | from rhodecode.model.gist import GistModel | |
330 |
|
330 | |||
331 | gist = GistModel.cls.get_by_access_id(gistid) |
|
331 | gist = GistModel.cls.get_by_access_id(gistid) | |
332 | if gist is None: |
|
332 | if gist is None: | |
333 | raise JSONRPCError('gist `%s` does not exist' % (gistid,)) |
|
333 | raise JSONRPCError('gist `%s` does not exist' % (gistid,)) | |
334 | return gist |
|
334 | return gist | |
335 |
|
335 | |||
336 |
|
336 | |||
337 | def get_pull_request_or_error(pullrequestid): |
|
337 | def get_pull_request_or_error(pullrequestid): | |
338 | """ |
|
338 | """ | |
339 | Get pull request by id or return JsonRPCError if not found |
|
339 | Get pull request by id or return JsonRPCError if not found | |
340 |
|
340 | |||
341 | :param pullrequestid: |
|
341 | :param pullrequestid: | |
342 | """ |
|
342 | """ | |
343 | from rhodecode.model.pull_request import PullRequestModel |
|
343 | from rhodecode.model.pull_request import PullRequestModel | |
344 |
|
344 | |||
345 | try: |
|
345 | try: | |
346 | pull_request = PullRequestModel().get(int(pullrequestid)) |
|
346 | pull_request = PullRequestModel().get(int(pullrequestid)) | |
347 | except ValueError: |
|
347 | except ValueError: | |
348 | raise JSONRPCError('pullrequestid must be an integer') |
|
348 | raise JSONRPCError('pullrequestid must be an integer') | |
349 | if not pull_request: |
|
349 | if not pull_request: | |
350 | raise JSONRPCError('pull request `%s` does not exist' % ( |
|
350 | raise JSONRPCError('pull request `%s` does not exist' % ( | |
351 | pullrequestid,)) |
|
351 | pullrequestid,)) | |
352 | return pull_request |
|
352 | return pull_request | |
353 |
|
353 | |||
354 |
|
354 | |||
355 | def build_commit_data(commit, detail_level): |
|
355 | def build_commit_data(commit, detail_level): | |
356 | parsed_diff = [] |
|
356 | parsed_diff = [] | |
357 | if detail_level == 'extended': |
|
357 | if detail_level == 'extended': | |
358 | for f in commit.added: |
|
358 | for f_path in commit.added_paths: | |
359 |
parsed_diff.append(_get_commit_dict(filename=f |
|
359 | parsed_diff.append(_get_commit_dict(filename=f_path, op='A')) | |
360 | for f in commit.changed: |
|
360 | for f_path in commit.changed_paths: | |
361 |
parsed_diff.append(_get_commit_dict(filename=f |
|
361 | parsed_diff.append(_get_commit_dict(filename=f_path, op='M')) | |
362 | for f in commit.removed: |
|
362 | for f_path in commit.removed_paths: | |
363 |
parsed_diff.append(_get_commit_dict(filename=f |
|
363 | parsed_diff.append(_get_commit_dict(filename=f_path, op='D')) | |
364 |
|
364 | |||
365 | elif detail_level == 'full': |
|
365 | elif detail_level == 'full': | |
366 | from rhodecode.lib.diffs import DiffProcessor |
|
366 | from rhodecode.lib.diffs import DiffProcessor | |
367 | diff_processor = DiffProcessor(commit.diff()) |
|
367 | diff_processor = DiffProcessor(commit.diff()) | |
368 | for dp in diff_processor.prepare(): |
|
368 | for dp in diff_processor.prepare(): | |
369 | del dp['stats']['ops'] |
|
369 | del dp['stats']['ops'] | |
370 | _stats = dp['stats'] |
|
370 | _stats = dp['stats'] | |
371 | parsed_diff.append(_get_commit_dict( |
|
371 | parsed_diff.append(_get_commit_dict( | |
372 | filename=dp['filename'], op=dp['operation'], |
|
372 | filename=dp['filename'], op=dp['operation'], | |
373 | new_revision=dp['new_revision'], |
|
373 | new_revision=dp['new_revision'], | |
374 | old_revision=dp['old_revision'], |
|
374 | old_revision=dp['old_revision'], | |
375 | raw_diff=dp['raw_diff'], stats=_stats)) |
|
375 | raw_diff=dp['raw_diff'], stats=_stats)) | |
376 |
|
376 | |||
377 | return parsed_diff |
|
377 | return parsed_diff | |
378 |
|
378 | |||
379 |
|
379 | |||
380 | def get_commit_or_error(ref, repo): |
|
380 | def get_commit_or_error(ref, repo): | |
381 | try: |
|
381 | try: | |
382 | ref_type, _, ref_hash = ref.split(':') |
|
382 | ref_type, _, ref_hash = ref.split(':') | |
383 | except ValueError: |
|
383 | except ValueError: | |
384 | raise JSONRPCError( |
|
384 | raise JSONRPCError( | |
385 | 'Ref `{ref}` given in a wrong format. Please check the API' |
|
385 | 'Ref `{ref}` given in a wrong format. Please check the API' | |
386 | ' documentation for more details'.format(ref=ref)) |
|
386 | ' documentation for more details'.format(ref=ref)) | |
387 | try: |
|
387 | try: | |
388 | # TODO: dan: refactor this to use repo.scm_instance().get_commit() |
|
388 | # TODO: dan: refactor this to use repo.scm_instance().get_commit() | |
389 | # once get_commit supports ref_types |
|
389 | # once get_commit supports ref_types | |
390 | return get_commit_from_ref_name(repo, ref_hash) |
|
390 | return get_commit_from_ref_name(repo, ref_hash) | |
391 | except RepositoryError: |
|
391 | except RepositoryError: | |
392 | raise JSONRPCError('Ref `{ref}` does not exist'.format(ref=ref)) |
|
392 | raise JSONRPCError('Ref `{ref}` does not exist'.format(ref=ref)) | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | def _get_ref_hash(repo, type_, name): |
|
395 | def _get_ref_hash(repo, type_, name): | |
396 | vcs_repo = repo.scm_instance() |
|
396 | vcs_repo = repo.scm_instance() | |
397 | if type_ in ['branch'] and vcs_repo.alias in ('hg', 'git'): |
|
397 | if type_ in ['branch'] and vcs_repo.alias in ('hg', 'git'): | |
398 | return vcs_repo.branches[name] |
|
398 | return vcs_repo.branches[name] | |
399 | elif type_ in ['bookmark', 'book'] and vcs_repo.alias == 'hg': |
|
399 | elif type_ in ['bookmark', 'book'] and vcs_repo.alias == 'hg': | |
400 | return vcs_repo.bookmarks[name] |
|
400 | return vcs_repo.bookmarks[name] | |
401 | else: |
|
401 | else: | |
402 | raise ValueError() |
|
402 | raise ValueError() | |
403 |
|
403 | |||
404 |
|
404 | |||
405 | def resolve_ref_or_error(ref, repo, allowed_ref_types=None): |
|
405 | def resolve_ref_or_error(ref, repo, allowed_ref_types=None): | |
406 | allowed_ref_types = allowed_ref_types or ['bookmark', 'book', 'tag', 'branch'] |
|
406 | allowed_ref_types = allowed_ref_types or ['bookmark', 'book', 'tag', 'branch'] | |
407 |
|
407 | |||
408 | def _parse_ref(type_, name, hash_=None): |
|
408 | def _parse_ref(type_, name, hash_=None): | |
409 | return type_, name, hash_ |
|
409 | return type_, name, hash_ | |
410 |
|
410 | |||
411 | try: |
|
411 | try: | |
412 | ref_type, ref_name, ref_hash = _parse_ref(*ref.split(':')) |
|
412 | ref_type, ref_name, ref_hash = _parse_ref(*ref.split(':')) | |
413 | except TypeError: |
|
413 | except TypeError: | |
414 | raise JSONRPCError( |
|
414 | raise JSONRPCError( | |
415 | 'Ref `{ref}` given in a wrong format. Please check the API' |
|
415 | 'Ref `{ref}` given in a wrong format. Please check the API' | |
416 | ' documentation for more details'.format(ref=ref)) |
|
416 | ' documentation for more details'.format(ref=ref)) | |
417 |
|
417 | |||
418 | if ref_type not in allowed_ref_types: |
|
418 | if ref_type not in allowed_ref_types: | |
419 | raise JSONRPCError( |
|
419 | raise JSONRPCError( | |
420 | 'Ref `{ref}` type is not allowed. ' |
|
420 | 'Ref `{ref}` type is not allowed. ' | |
421 | 'Only:{allowed_refs} are possible.'.format( |
|
421 | 'Only:{allowed_refs} are possible.'.format( | |
422 | ref=ref, allowed_refs=allowed_ref_types)) |
|
422 | ref=ref, allowed_refs=allowed_ref_types)) | |
423 |
|
423 | |||
424 | try: |
|
424 | try: | |
425 | ref_hash = ref_hash or _get_ref_hash(repo, ref_type, ref_name) |
|
425 | ref_hash = ref_hash or _get_ref_hash(repo, ref_type, ref_name) | |
426 | except (KeyError, ValueError): |
|
426 | except (KeyError, ValueError): | |
427 | raise JSONRPCError( |
|
427 | raise JSONRPCError( | |
428 | 'The specified value:{type}:`{name}` does not exist, or is not allowed.'.format( |
|
428 | 'The specified value:{type}:`{name}` does not exist, or is not allowed.'.format( | |
429 | type=ref_type, name=ref_name)) |
|
429 | type=ref_type, name=ref_name)) | |
430 |
|
430 | |||
431 | return ':'.join([ref_type, ref_name, ref_hash]) |
|
431 | return ':'.join([ref_type, ref_name, ref_hash]) | |
432 |
|
432 | |||
433 |
|
433 | |||
434 | def _get_commit_dict( |
|
434 | def _get_commit_dict( | |
435 | filename, op, new_revision=None, old_revision=None, |
|
435 | filename, op, new_revision=None, old_revision=None, | |
436 | raw_diff=None, stats=None): |
|
436 | raw_diff=None, stats=None): | |
437 | if stats is None: |
|
437 | if stats is None: | |
438 | stats = { |
|
438 | stats = { | |
439 | "added": None, |
|
439 | "added": None, | |
440 | "binary": None, |
|
440 | "binary": None, | |
441 | "deleted": None |
|
441 | "deleted": None | |
442 | } |
|
442 | } | |
443 | return { |
|
443 | return { | |
444 | "filename": safe_unicode(filename), |
|
444 | "filename": safe_unicode(filename), | |
445 | "op": op, |
|
445 | "op": op, | |
446 |
|
446 | |||
447 | # extra details |
|
447 | # extra details | |
448 | "new_revision": new_revision, |
|
448 | "new_revision": new_revision, | |
449 | "old_revision": old_revision, |
|
449 | "old_revision": old_revision, | |
450 |
|
450 | |||
451 | "raw_diff": raw_diff, |
|
451 | "raw_diff": raw_diff, | |
452 | "stats": stats |
|
452 | "stats": stats | |
453 | } |
|
453 | } |
@@ -1,484 +1,493 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | GIT commit module |
|
22 | GIT commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import re |
|
25 | import re | |
26 | import stat |
|
26 | import stat | |
27 | from itertools import chain |
|
27 | from itertools import chain | |
28 | from StringIO import StringIO |
|
28 | from StringIO import StringIO | |
29 |
|
29 | |||
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
31 | |||
32 | from rhodecode.lib.datelib import utcdate_fromtimestamp |
|
32 | from rhodecode.lib.datelib import utcdate_fromtimestamp | |
33 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
33 | from rhodecode.lib.utils import safe_unicode, safe_str | |
34 | from rhodecode.lib.utils2 import safe_int |
|
34 | from rhodecode.lib.utils2 import safe_int | |
35 | from rhodecode.lib.vcs.conf import settings |
|
35 | from rhodecode.lib.vcs.conf import settings | |
36 | from rhodecode.lib.vcs.backends import base |
|
36 | from rhodecode.lib.vcs.backends import base | |
37 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
37 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError | |
38 | from rhodecode.lib.vcs.nodes import ( |
|
38 | from rhodecode.lib.vcs.nodes import ( | |
39 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, |
|
39 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, | |
40 | ChangedFileNodesGenerator, AddedFileNodesGenerator, |
|
40 | ChangedFileNodesGenerator, AddedFileNodesGenerator, | |
41 | RemovedFileNodesGenerator, LargeFileNode) |
|
41 | RemovedFileNodesGenerator, LargeFileNode) | |
42 | from rhodecode.lib.vcs.compat import configparser |
|
42 | from rhodecode.lib.vcs.compat import configparser | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class GitCommit(base.BaseCommit): |
|
45 | class GitCommit(base.BaseCommit): | |
46 | """ |
|
46 | """ | |
47 | Represents state of the repository at single commit id. |
|
47 | Represents state of the repository at single commit id. | |
48 | """ |
|
48 | """ | |
49 |
|
49 | |||
50 | _filter_pre_load = [ |
|
50 | _filter_pre_load = [ | |
51 | # done through a more complex tree walk on parents |
|
51 | # done through a more complex tree walk on parents | |
52 | "affected_files", |
|
52 | "affected_files", | |
53 | # done through subprocess not remote call |
|
53 | # done through subprocess not remote call | |
54 | "children", |
|
54 | "children", | |
55 | # done through a more complex tree walk on parents |
|
55 | # done through a more complex tree walk on parents | |
56 | "status", |
|
56 | "status", | |
57 | # mercurial specific property not supported here |
|
57 | # mercurial specific property not supported here | |
58 | "_file_paths", |
|
58 | "_file_paths", | |
59 | # mercurial specific property not supported here |
|
59 | # mercurial specific property not supported here | |
60 | 'obsolete', |
|
60 | 'obsolete', | |
61 | # mercurial specific property not supported here |
|
61 | # mercurial specific property not supported here | |
62 | 'phase', |
|
62 | 'phase', | |
63 | # mercurial specific property not supported here |
|
63 | # mercurial specific property not supported here | |
64 | 'hidden' |
|
64 | 'hidden' | |
65 | ] |
|
65 | ] | |
66 |
|
66 | |||
67 | def __init__(self, repository, raw_id, idx, pre_load=None): |
|
67 | def __init__(self, repository, raw_id, idx, pre_load=None): | |
68 | self.repository = repository |
|
68 | self.repository = repository | |
69 | self._remote = repository._remote |
|
69 | self._remote = repository._remote | |
70 | # TODO: johbo: Tweak of raw_id should not be necessary |
|
70 | # TODO: johbo: Tweak of raw_id should not be necessary | |
71 | self.raw_id = safe_str(raw_id) |
|
71 | self.raw_id = safe_str(raw_id) | |
72 | self.idx = idx |
|
72 | self.idx = idx | |
73 |
|
73 | |||
74 | self._set_bulk_properties(pre_load) |
|
74 | self._set_bulk_properties(pre_load) | |
75 |
|
75 | |||
76 | # caches |
|
76 | # caches | |
77 | self._stat_modes = {} # stat info for paths |
|
77 | self._stat_modes = {} # stat info for paths | |
78 | self._paths = {} # path processed with parse_tree |
|
78 | self._paths = {} # path processed with parse_tree | |
79 | self.nodes = {} |
|
79 | self.nodes = {} | |
80 | self._submodules = None |
|
80 | self._submodules = None | |
81 |
|
81 | |||
82 | def _set_bulk_properties(self, pre_load): |
|
82 | def _set_bulk_properties(self, pre_load): | |
83 |
|
83 | |||
84 | if not pre_load: |
|
84 | if not pre_load: | |
85 | return |
|
85 | return | |
86 | pre_load = [entry for entry in pre_load |
|
86 | pre_load = [entry for entry in pre_load | |
87 | if entry not in self._filter_pre_load] |
|
87 | if entry not in self._filter_pre_load] | |
88 | if not pre_load: |
|
88 | if not pre_load: | |
89 | return |
|
89 | return | |
90 |
|
90 | |||
91 | result = self._remote.bulk_request(self.raw_id, pre_load) |
|
91 | result = self._remote.bulk_request(self.raw_id, pre_load) | |
92 | for attr, value in result.items(): |
|
92 | for attr, value in result.items(): | |
93 | if attr in ["author", "message"]: |
|
93 | if attr in ["author", "message"]: | |
94 | if value: |
|
94 | if value: | |
95 | value = safe_unicode(value) |
|
95 | value = safe_unicode(value) | |
96 | elif attr == "date": |
|
96 | elif attr == "date": | |
97 | value = utcdate_fromtimestamp(*value) |
|
97 | value = utcdate_fromtimestamp(*value) | |
98 | elif attr == "parents": |
|
98 | elif attr == "parents": | |
99 | value = self._make_commits(value) |
|
99 | value = self._make_commits(value) | |
100 | elif attr == "branch": |
|
100 | elif attr == "branch": | |
101 | value = value[0] if value else None |
|
101 | value = value[0] if value else None | |
102 | self.__dict__[attr] = value |
|
102 | self.__dict__[attr] = value | |
103 |
|
103 | |||
104 | @LazyProperty |
|
104 | @LazyProperty | |
105 | def _commit(self): |
|
105 | def _commit(self): | |
106 | return self._remote[self.raw_id] |
|
106 | return self._remote[self.raw_id] | |
107 |
|
107 | |||
108 | @LazyProperty |
|
108 | @LazyProperty | |
109 | def _tree_id(self): |
|
109 | def _tree_id(self): | |
110 | return self._remote[self._commit['tree']]['id'] |
|
110 | return self._remote[self._commit['tree']]['id'] | |
111 |
|
111 | |||
112 | @LazyProperty |
|
112 | @LazyProperty | |
113 | def id(self): |
|
113 | def id(self): | |
114 | return self.raw_id |
|
114 | return self.raw_id | |
115 |
|
115 | |||
116 | @LazyProperty |
|
116 | @LazyProperty | |
117 | def short_id(self): |
|
117 | def short_id(self): | |
118 | return self.raw_id[:12] |
|
118 | return self.raw_id[:12] | |
119 |
|
119 | |||
120 | @LazyProperty |
|
120 | @LazyProperty | |
121 | def message(self): |
|
121 | def message(self): | |
122 | return safe_unicode(self._remote.message(self.id)) |
|
122 | return safe_unicode(self._remote.message(self.id)) | |
123 |
|
123 | |||
124 | @LazyProperty |
|
124 | @LazyProperty | |
125 | def committer(self): |
|
125 | def committer(self): | |
126 | return safe_unicode(self._remote.author(self.id)) |
|
126 | return safe_unicode(self._remote.author(self.id)) | |
127 |
|
127 | |||
128 | @LazyProperty |
|
128 | @LazyProperty | |
129 | def author(self): |
|
129 | def author(self): | |
130 | return safe_unicode(self._remote.author(self.id)) |
|
130 | return safe_unicode(self._remote.author(self.id)) | |
131 |
|
131 | |||
132 | @LazyProperty |
|
132 | @LazyProperty | |
133 | def date(self): |
|
133 | def date(self): | |
134 | unix_ts, tz = self._remote.date(self.raw_id) |
|
134 | unix_ts, tz = self._remote.date(self.raw_id) | |
135 | return utcdate_fromtimestamp(unix_ts, tz) |
|
135 | return utcdate_fromtimestamp(unix_ts, tz) | |
136 |
|
136 | |||
137 | @LazyProperty |
|
137 | @LazyProperty | |
138 | def status(self): |
|
138 | def status(self): | |
139 | """ |
|
139 | """ | |
140 | Returns modified, added, removed, deleted files for current commit |
|
140 | Returns modified, added, removed, deleted files for current commit | |
141 | """ |
|
141 | """ | |
142 | return self.changed, self.added, self.removed |
|
142 | return self.changed, self.added, self.removed | |
143 |
|
143 | |||
144 | @LazyProperty |
|
144 | @LazyProperty | |
145 | def tags(self): |
|
145 | def tags(self): | |
146 | tags = [safe_unicode(name) for name, |
|
146 | tags = [safe_unicode(name) for name, | |
147 | commit_id in self.repository.tags.iteritems() |
|
147 | commit_id in self.repository.tags.iteritems() | |
148 | if commit_id == self.raw_id] |
|
148 | if commit_id == self.raw_id] | |
149 | return tags |
|
149 | return tags | |
150 |
|
150 | |||
151 | @LazyProperty |
|
151 | @LazyProperty | |
152 | def commit_branches(self): |
|
152 | def commit_branches(self): | |
153 | branches = [] |
|
153 | branches = [] | |
154 | for name, commit_id in self.repository.branches.iteritems(): |
|
154 | for name, commit_id in self.repository.branches.iteritems(): | |
155 | if commit_id == self.raw_id: |
|
155 | if commit_id == self.raw_id: | |
156 | branches.append(name) |
|
156 | branches.append(name) | |
157 | return branches |
|
157 | return branches | |
158 |
|
158 | |||
159 | @LazyProperty |
|
159 | @LazyProperty | |
160 | def branch(self): |
|
160 | def branch(self): | |
161 | branches = self._remote.branch(self.raw_id) |
|
161 | branches = self._remote.branch(self.raw_id) | |
162 |
|
162 | |||
163 | if branches: |
|
163 | if branches: | |
164 | # actually commit can have multiple branches in git |
|
164 | # actually commit can have multiple branches in git | |
165 | return safe_unicode(branches[0]) |
|
165 | return safe_unicode(branches[0]) | |
166 |
|
166 | |||
167 | def _get_tree_id_for_path(self, path): |
|
167 | def _get_tree_id_for_path(self, path): | |
168 | path = safe_str(path) |
|
168 | path = safe_str(path) | |
169 | if path in self._paths: |
|
169 | if path in self._paths: | |
170 | return self._paths[path] |
|
170 | return self._paths[path] | |
171 |
|
171 | |||
172 | tree_id = self._tree_id |
|
172 | tree_id = self._tree_id | |
173 |
|
173 | |||
174 | path = path.strip('/') |
|
174 | path = path.strip('/') | |
175 | if path == '': |
|
175 | if path == '': | |
176 | data = [tree_id, "tree"] |
|
176 | data = [tree_id, "tree"] | |
177 | self._paths[''] = data |
|
177 | self._paths[''] = data | |
178 | return data |
|
178 | return data | |
179 |
|
179 | |||
180 | tree_id, tree_type, tree_mode = \ |
|
180 | tree_id, tree_type, tree_mode = \ | |
181 | self._remote.tree_and_type_for_path(self.raw_id, path) |
|
181 | self._remote.tree_and_type_for_path(self.raw_id, path) | |
182 | if tree_id is None: |
|
182 | if tree_id is None: | |
183 | raise self.no_node_at_path(path) |
|
183 | raise self.no_node_at_path(path) | |
184 |
|
184 | |||
185 | self._paths[path] = [tree_id, tree_type] |
|
185 | self._paths[path] = [tree_id, tree_type] | |
186 | self._stat_modes[path] = tree_mode |
|
186 | self._stat_modes[path] = tree_mode | |
187 |
|
187 | |||
188 | if path not in self._paths: |
|
188 | if path not in self._paths: | |
189 | raise self.no_node_at_path(path) |
|
189 | raise self.no_node_at_path(path) | |
190 |
|
190 | |||
191 | return self._paths[path] |
|
191 | return self._paths[path] | |
192 |
|
192 | |||
193 | def _get_kind(self, path): |
|
193 | def _get_kind(self, path): | |
194 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
194 | tree_id, type_ = self._get_tree_id_for_path(path) | |
195 | if type_ == 'blob': |
|
195 | if type_ == 'blob': | |
196 | return NodeKind.FILE |
|
196 | return NodeKind.FILE | |
197 | elif type_ == 'tree': |
|
197 | elif type_ == 'tree': | |
198 | return NodeKind.DIR |
|
198 | return NodeKind.DIR | |
199 | elif type_ == 'link': |
|
199 | elif type_ == 'link': | |
200 | return NodeKind.SUBMODULE |
|
200 | return NodeKind.SUBMODULE | |
201 | return None |
|
201 | return None | |
202 |
|
202 | |||
203 | def _get_filectx(self, path): |
|
203 | def _get_filectx(self, path): | |
204 | path = self._fix_path(path) |
|
204 | path = self._fix_path(path) | |
205 | if self._get_kind(path) != NodeKind.FILE: |
|
205 | if self._get_kind(path) != NodeKind.FILE: | |
206 | raise CommitError( |
|
206 | raise CommitError( | |
207 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
207 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
208 | return path |
|
208 | return path | |
209 |
|
209 | |||
210 | def _get_file_nodes(self): |
|
210 | def _get_file_nodes(self): | |
211 | return chain(*(t[2] for t in self.walk())) |
|
211 | return chain(*(t[2] for t in self.walk())) | |
212 |
|
212 | |||
213 | @LazyProperty |
|
213 | @LazyProperty | |
214 | def parents(self): |
|
214 | def parents(self): | |
215 | """ |
|
215 | """ | |
216 | Returns list of parent commits. |
|
216 | Returns list of parent commits. | |
217 | """ |
|
217 | """ | |
218 | parent_ids = self._remote.parents(self.id) |
|
218 | parent_ids = self._remote.parents(self.id) | |
219 | return self._make_commits(parent_ids) |
|
219 | return self._make_commits(parent_ids) | |
220 |
|
220 | |||
221 | @LazyProperty |
|
221 | @LazyProperty | |
222 | def children(self): |
|
222 | def children(self): | |
223 | """ |
|
223 | """ | |
224 | Returns list of child commits. |
|
224 | Returns list of child commits. | |
225 | """ |
|
225 | """ | |
226 |
|
226 | |||
227 | children = self._remote.children(self.raw_id) |
|
227 | children = self._remote.children(self.raw_id) | |
228 | return self._make_commits(children) |
|
228 | return self._make_commits(children) | |
229 |
|
229 | |||
230 | def _make_commits(self, commit_ids): |
|
230 | def _make_commits(self, commit_ids): | |
231 | def commit_maker(_commit_id): |
|
231 | def commit_maker(_commit_id): | |
232 | return self.repository.get_commit(commit_id=commit_id) |
|
232 | return self.repository.get_commit(commit_id=commit_id) | |
233 |
|
233 | |||
234 | return [commit_maker(commit_id) for commit_id in commit_ids] |
|
234 | return [commit_maker(commit_id) for commit_id in commit_ids] | |
235 |
|
235 | |||
236 | def get_file_mode(self, path): |
|
236 | def get_file_mode(self, path): | |
237 | """ |
|
237 | """ | |
238 | Returns stat mode of the file at the given `path`. |
|
238 | Returns stat mode of the file at the given `path`. | |
239 | """ |
|
239 | """ | |
240 | path = safe_str(path) |
|
240 | path = safe_str(path) | |
241 | # ensure path is traversed |
|
241 | # ensure path is traversed | |
242 | self._get_tree_id_for_path(path) |
|
242 | self._get_tree_id_for_path(path) | |
243 | return self._stat_modes[path] |
|
243 | return self._stat_modes[path] | |
244 |
|
244 | |||
245 | def is_link(self, path): |
|
245 | def is_link(self, path): | |
246 | return stat.S_ISLNK(self.get_file_mode(path)) |
|
246 | return stat.S_ISLNK(self.get_file_mode(path)) | |
247 |
|
247 | |||
248 | def is_node_binary(self, path): |
|
248 | def is_node_binary(self, path): | |
249 | tree_id, _ = self._get_tree_id_for_path(path) |
|
249 | tree_id, _ = self._get_tree_id_for_path(path) | |
250 | return self._remote.is_binary(tree_id) |
|
250 | return self._remote.is_binary(tree_id) | |
251 |
|
251 | |||
252 | def get_file_content(self, path): |
|
252 | def get_file_content(self, path): | |
253 | """ |
|
253 | """ | |
254 | Returns content of the file at given `path`. |
|
254 | Returns content of the file at given `path`. | |
255 | """ |
|
255 | """ | |
256 | tree_id, _ = self._get_tree_id_for_path(path) |
|
256 | tree_id, _ = self._get_tree_id_for_path(path) | |
257 | return self._remote.blob_as_pretty_string(tree_id) |
|
257 | return self._remote.blob_as_pretty_string(tree_id) | |
258 |
|
258 | |||
259 | def get_file_content_streamed(self, path): |
|
259 | def get_file_content_streamed(self, path): | |
260 | tree_id, _ = self._get_tree_id_for_path(path) |
|
260 | tree_id, _ = self._get_tree_id_for_path(path) | |
261 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') |
|
261 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') | |
262 | return stream_method(tree_id) |
|
262 | return stream_method(tree_id) | |
263 |
|
263 | |||
264 | def get_file_size(self, path): |
|
264 | def get_file_size(self, path): | |
265 | """ |
|
265 | """ | |
266 | Returns size of the file at given `path`. |
|
266 | Returns size of the file at given `path`. | |
267 | """ |
|
267 | """ | |
268 | tree_id, _ = self._get_tree_id_for_path(path) |
|
268 | tree_id, _ = self._get_tree_id_for_path(path) | |
269 | return self._remote.blob_raw_length(tree_id) |
|
269 | return self._remote.blob_raw_length(tree_id) | |
270 |
|
270 | |||
271 | def get_path_history(self, path, limit=None, pre_load=None): |
|
271 | def get_path_history(self, path, limit=None, pre_load=None): | |
272 | """ |
|
272 | """ | |
273 | Returns history of file as reversed list of `GitCommit` objects for |
|
273 | Returns history of file as reversed list of `GitCommit` objects for | |
274 | which file at given `path` has been modified. |
|
274 | which file at given `path` has been modified. | |
275 | """ |
|
275 | """ | |
276 |
|
276 | |||
277 | path = self._get_filectx(path) |
|
277 | path = self._get_filectx(path) | |
278 | hist = self._remote.node_history(self.raw_id, path, limit) |
|
278 | hist = self._remote.node_history(self.raw_id, path, limit) | |
279 | return [ |
|
279 | return [ | |
280 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
280 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) | |
281 | for commit_id in hist] |
|
281 | for commit_id in hist] | |
282 |
|
282 | |||
283 | def get_file_annotate(self, path, pre_load=None): |
|
283 | def get_file_annotate(self, path, pre_load=None): | |
284 | """ |
|
284 | """ | |
285 | Returns a generator of four element tuples with |
|
285 | Returns a generator of four element tuples with | |
286 | lineno, commit_id, commit lazy loader and line |
|
286 | lineno, commit_id, commit lazy loader and line | |
287 | """ |
|
287 | """ | |
288 |
|
288 | |||
289 | result = self._remote.node_annotate(self.raw_id, path) |
|
289 | result = self._remote.node_annotate(self.raw_id, path) | |
290 |
|
290 | |||
291 | for ln_no, commit_id, content in result: |
|
291 | for ln_no, commit_id, content in result: | |
292 | yield ( |
|
292 | yield ( | |
293 | ln_no, commit_id, |
|
293 | ln_no, commit_id, | |
294 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), |
|
294 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), | |
295 | content) |
|
295 | content) | |
296 |
|
296 | |||
297 | def get_nodes(self, path): |
|
297 | def get_nodes(self, path): | |
298 |
|
298 | |||
299 | if self._get_kind(path) != NodeKind.DIR: |
|
299 | if self._get_kind(path) != NodeKind.DIR: | |
300 | raise CommitError( |
|
300 | raise CommitError( | |
301 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
301 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
302 | path = self._fix_path(path) |
|
302 | path = self._fix_path(path) | |
303 |
|
303 | |||
304 | tree_id, _ = self._get_tree_id_for_path(path) |
|
304 | tree_id, _ = self._get_tree_id_for_path(path) | |
305 |
|
305 | |||
306 | dirnodes = [] |
|
306 | dirnodes = [] | |
307 | filenodes = [] |
|
307 | filenodes = [] | |
308 |
|
308 | |||
309 | # extracted tree ID gives us our files... |
|
309 | # extracted tree ID gives us our files... | |
310 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): |
|
310 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): | |
311 | if type_ == 'link': |
|
311 | if type_ == 'link': | |
312 | url = self._get_submodule_url('/'.join((path, name))) |
|
312 | url = self._get_submodule_url('/'.join((path, name))) | |
313 | dirnodes.append(SubModuleNode( |
|
313 | dirnodes.append(SubModuleNode( | |
314 | name, url=url, commit=id_, alias=self.repository.alias)) |
|
314 | name, url=url, commit=id_, alias=self.repository.alias)) | |
315 | continue |
|
315 | continue | |
316 |
|
316 | |||
317 | if path != '': |
|
317 | if path != '': | |
318 | obj_path = '/'.join((path, name)) |
|
318 | obj_path = '/'.join((path, name)) | |
319 | else: |
|
319 | else: | |
320 | obj_path = name |
|
320 | obj_path = name | |
321 | if obj_path not in self._stat_modes: |
|
321 | if obj_path not in self._stat_modes: | |
322 | self._stat_modes[obj_path] = stat_ |
|
322 | self._stat_modes[obj_path] = stat_ | |
323 |
|
323 | |||
324 | if type_ == 'tree': |
|
324 | if type_ == 'tree': | |
325 | dirnodes.append(DirNode(obj_path, commit=self)) |
|
325 | dirnodes.append(DirNode(obj_path, commit=self)) | |
326 | elif type_ == 'blob': |
|
326 | elif type_ == 'blob': | |
327 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) |
|
327 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) | |
328 | else: |
|
328 | else: | |
329 | raise CommitError( |
|
329 | raise CommitError( | |
330 | "Requested object should be Tree or Blob, is %s", type_) |
|
330 | "Requested object should be Tree or Blob, is %s", type_) | |
331 |
|
331 | |||
332 | nodes = dirnodes + filenodes |
|
332 | nodes = dirnodes + filenodes | |
333 | for node in nodes: |
|
333 | for node in nodes: | |
334 | if node.path not in self.nodes: |
|
334 | if node.path not in self.nodes: | |
335 | self.nodes[node.path] = node |
|
335 | self.nodes[node.path] = node | |
336 | nodes.sort() |
|
336 | nodes.sort() | |
337 | return nodes |
|
337 | return nodes | |
338 |
|
338 | |||
339 | def get_node(self, path, pre_load=None): |
|
339 | def get_node(self, path, pre_load=None): | |
340 | if isinstance(path, unicode): |
|
340 | if isinstance(path, unicode): | |
341 | path = path.encode('utf-8') |
|
341 | path = path.encode('utf-8') | |
342 | path = self._fix_path(path) |
|
342 | path = self._fix_path(path) | |
343 | if path not in self.nodes: |
|
343 | if path not in self.nodes: | |
344 | try: |
|
344 | try: | |
345 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
345 | tree_id, type_ = self._get_tree_id_for_path(path) | |
346 | except CommitError: |
|
346 | except CommitError: | |
347 | raise NodeDoesNotExistError( |
|
347 | raise NodeDoesNotExistError( | |
348 | "Cannot find one of parents' directories for a given " |
|
348 | "Cannot find one of parents' directories for a given " | |
349 | "path: %s" % path) |
|
349 | "path: %s" % path) | |
350 |
|
350 | |||
351 | if type_ in ['link', 'commit']: |
|
351 | if type_ in ['link', 'commit']: | |
352 | url = self._get_submodule_url(path) |
|
352 | url = self._get_submodule_url(path) | |
353 | node = SubModuleNode(path, url=url, commit=tree_id, |
|
353 | node = SubModuleNode(path, url=url, commit=tree_id, | |
354 | alias=self.repository.alias) |
|
354 | alias=self.repository.alias) | |
355 | elif type_ == 'tree': |
|
355 | elif type_ == 'tree': | |
356 | if path == '': |
|
356 | if path == '': | |
357 | node = RootNode(commit=self) |
|
357 | node = RootNode(commit=self) | |
358 | else: |
|
358 | else: | |
359 | node = DirNode(path, commit=self) |
|
359 | node = DirNode(path, commit=self) | |
360 | elif type_ == 'blob': |
|
360 | elif type_ == 'blob': | |
361 | node = FileNode(path, commit=self, pre_load=pre_load) |
|
361 | node = FileNode(path, commit=self, pre_load=pre_load) | |
362 | self._stat_modes[path] = node.mode |
|
362 | self._stat_modes[path] = node.mode | |
363 | else: |
|
363 | else: | |
364 | raise self.no_node_at_path(path) |
|
364 | raise self.no_node_at_path(path) | |
365 |
|
365 | |||
366 | # cache node |
|
366 | # cache node | |
367 | self.nodes[path] = node |
|
367 | self.nodes[path] = node | |
368 |
|
368 | |||
369 | return self.nodes[path] |
|
369 | return self.nodes[path] | |
370 |
|
370 | |||
371 | def get_largefile_node(self, path): |
|
371 | def get_largefile_node(self, path): | |
372 | tree_id, _ = self._get_tree_id_for_path(path) |
|
372 | tree_id, _ = self._get_tree_id_for_path(path) | |
373 | pointer_spec = self._remote.is_large_file(tree_id) |
|
373 | pointer_spec = self._remote.is_large_file(tree_id) | |
374 |
|
374 | |||
375 | if pointer_spec: |
|
375 | if pointer_spec: | |
376 | # content of that file regular FileNode is the hash of largefile |
|
376 | # content of that file regular FileNode is the hash of largefile | |
377 | file_id = pointer_spec.get('oid_hash') |
|
377 | file_id = pointer_spec.get('oid_hash') | |
378 | if self._remote.in_largefiles_store(file_id): |
|
378 | if self._remote.in_largefiles_store(file_id): | |
379 | lf_path = self._remote.store_path(file_id) |
|
379 | lf_path = self._remote.store_path(file_id) | |
380 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
380 | return LargeFileNode(lf_path, commit=self, org_path=path) | |
381 |
|
381 | |||
382 | @LazyProperty |
|
382 | @LazyProperty | |
383 | def affected_files(self): |
|
383 | def affected_files(self): | |
384 | """ |
|
384 | """ | |
385 | Gets a fast accessible file changes for given commit |
|
385 | Gets a fast accessible file changes for given commit | |
386 | """ |
|
386 | """ | |
387 | added, modified, deleted = self._changes_cache |
|
387 | added, modified, deleted = self._changes_cache | |
388 | return list(added.union(modified).union(deleted)) |
|
388 | return list(added.union(modified).union(deleted)) | |
389 |
|
389 | |||
390 | @LazyProperty |
|
390 | @LazyProperty | |
391 | def _changes_cache(self): |
|
391 | def _changes_cache(self): | |
392 | added = set() |
|
392 | added = set() | |
393 | modified = set() |
|
393 | modified = set() | |
394 | deleted = set() |
|
394 | deleted = set() | |
395 | _r = self._remote |
|
395 | _r = self._remote | |
396 |
|
396 | |||
397 | parents = self.parents |
|
397 | parents = self.parents | |
398 | if not self.parents: |
|
398 | if not self.parents: | |
399 | parents = [base.EmptyCommit()] |
|
399 | parents = [base.EmptyCommit()] | |
400 | for parent in parents: |
|
400 | for parent in parents: | |
401 | if isinstance(parent, base.EmptyCommit): |
|
401 | if isinstance(parent, base.EmptyCommit): | |
402 | oid = None |
|
402 | oid = None | |
403 | else: |
|
403 | else: | |
404 | oid = parent.raw_id |
|
404 | oid = parent.raw_id | |
405 | changes = _r.tree_changes(oid, self.raw_id) |
|
405 | changes = _r.tree_changes(oid, self.raw_id) | |
406 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
406 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
407 | if newpath and oldpath: |
|
407 | if newpath and oldpath: | |
408 | modified.add(newpath) |
|
408 | modified.add(newpath) | |
409 | elif newpath and not oldpath: |
|
409 | elif newpath and not oldpath: | |
410 | added.add(newpath) |
|
410 | added.add(newpath) | |
411 | elif not newpath and oldpath: |
|
411 | elif not newpath and oldpath: | |
412 | deleted.add(oldpath) |
|
412 | deleted.add(oldpath) | |
413 | return added, modified, deleted |
|
413 | return added, modified, deleted | |
414 |
|
414 | |||
415 | def _get_paths_for_status(self, status): |
|
415 | def _get_paths_for_status(self, status): | |
416 | """ |
|
416 | """ | |
417 | Returns sorted list of paths for given ``status``. |
|
417 | Returns sorted list of paths for given ``status``. | |
418 |
|
418 | |||
419 | :param status: one of: *added*, *modified* or *deleted* |
|
419 | :param status: one of: *added*, *modified* or *deleted* | |
420 | """ |
|
420 | """ | |
421 | added, modified, deleted = self._changes_cache |
|
421 | added, modified, deleted = self._changes_cache | |
422 | return sorted({ |
|
422 | return sorted({ | |
423 | 'added': list(added), |
|
423 | 'added': list(added), | |
424 | 'modified': list(modified), |
|
424 | 'modified': list(modified), | |
425 | 'deleted': list(deleted)}[status] |
|
425 | 'deleted': list(deleted)}[status] | |
426 | ) |
|
426 | ) | |
427 |
|
427 | |||
428 | @LazyProperty |
|
428 | @LazyProperty | |
429 | def added(self): |
|
429 | def added(self): | |
430 | """ |
|
430 | """ | |
431 | Returns list of added ``FileNode`` objects. |
|
431 | Returns list of added ``FileNode`` objects. | |
432 | """ |
|
432 | """ | |
433 | if not self.parents: |
|
433 | if not self.parents: | |
434 | return list(self._get_file_nodes()) |
|
434 | return list(self._get_file_nodes()) | |
435 | return AddedFileNodesGenerator( |
|
435 | return AddedFileNodesGenerator(self.added_paths, self) | |
436 | [n for n in self._get_paths_for_status('added')], self) |
|
436 | ||
|
437 | @LazyProperty | |||
|
438 | def added_paths(self): | |||
|
439 | return [n for n in self._get_paths_for_status('added')] | |||
437 |
|
440 | |||
438 | @LazyProperty |
|
441 | @LazyProperty | |
439 | def changed(self): |
|
442 | def changed(self): | |
440 | """ |
|
443 | """ | |
441 | Returns list of modified ``FileNode`` objects. |
|
444 | Returns list of modified ``FileNode`` objects. | |
442 | """ |
|
445 | """ | |
443 | if not self.parents: |
|
446 | if not self.parents: | |
444 | return [] |
|
447 | return [] | |
445 | return ChangedFileNodesGenerator( |
|
448 | return ChangedFileNodesGenerator(self.changed_paths, self) | |
446 | [n for n in self._get_paths_for_status('modified')], self) |
|
449 | ||
|
450 | @LazyProperty | |||
|
451 | def changed_paths(self): | |||
|
452 | return [n for n in self._get_paths_for_status('modified')] | |||
447 |
|
453 | |||
448 | @LazyProperty |
|
454 | @LazyProperty | |
449 | def removed(self): |
|
455 | def removed(self): | |
450 | """ |
|
456 | """ | |
451 | Returns list of removed ``FileNode`` objects. |
|
457 | Returns list of removed ``FileNode`` objects. | |
452 | """ |
|
458 | """ | |
453 | if not self.parents: |
|
459 | if not self.parents: | |
454 | return [] |
|
460 | return [] | |
455 | return RemovedFileNodesGenerator( |
|
461 | return RemovedFileNodesGenerator(self.removed_paths, self) | |
456 | [n for n in self._get_paths_for_status('deleted')], self) |
|
462 | ||
|
463 | @LazyProperty | |||
|
464 | def removed_paths(self): | |||
|
465 | return [n for n in self._get_paths_for_status('deleted')] | |||
457 |
|
466 | |||
458 | def _get_submodule_url(self, submodule_path): |
|
467 | def _get_submodule_url(self, submodule_path): | |
459 | git_modules_path = '.gitmodules' |
|
468 | git_modules_path = '.gitmodules' | |
460 |
|
469 | |||
461 | if self._submodules is None: |
|
470 | if self._submodules is None: | |
462 | self._submodules = {} |
|
471 | self._submodules = {} | |
463 |
|
472 | |||
464 | try: |
|
473 | try: | |
465 | submodules_node = self.get_node(git_modules_path) |
|
474 | submodules_node = self.get_node(git_modules_path) | |
466 | except NodeDoesNotExistError: |
|
475 | except NodeDoesNotExistError: | |
467 | return None |
|
476 | return None | |
468 |
|
477 | |||
469 | # ConfigParser fails if there are whitespaces, also it needs an iterable |
|
478 | # ConfigParser fails if there are whitespaces, also it needs an iterable | |
470 | # file like content |
|
479 | # file like content | |
471 | def iter_content(_content): |
|
480 | def iter_content(_content): | |
472 | for line in _content.splitlines(): |
|
481 | for line in _content.splitlines(): | |
473 | yield line |
|
482 | yield line | |
474 |
|
483 | |||
475 | parser = configparser.RawConfigParser() |
|
484 | parser = configparser.RawConfigParser() | |
476 | parser.read_file(iter_content(submodules_node.content)) |
|
485 | parser.read_file(iter_content(submodules_node.content)) | |
477 |
|
486 | |||
478 | for section in parser.sections(): |
|
487 | for section in parser.sections(): | |
479 | path = parser.get(section, 'path') |
|
488 | path = parser.get(section, 'path') | |
480 | url = parser.get(section, 'url') |
|
489 | url = parser.get(section, 'url') | |
481 | if path and url: |
|
490 | if path and url: | |
482 | self._submodules[path.strip('/')] = url |
|
491 | self._submodules[path.strip('/')] = url | |
483 |
|
492 | |||
484 | return self._submodules.get(submodule_path.strip('/')) |
|
493 | return self._submodules.get(submodule_path.strip('/')) |
@@ -1,389 +1,401 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | HG commit module |
|
22 | HG commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 |
|
26 | |||
27 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
27 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
28 |
|
28 | |||
29 | from rhodecode.lib.datelib import utcdate_fromtimestamp |
|
29 | from rhodecode.lib.datelib import utcdate_fromtimestamp | |
30 | from rhodecode.lib.utils import safe_str, safe_unicode |
|
30 | from rhodecode.lib.utils import safe_str, safe_unicode | |
31 | from rhodecode.lib.vcs import path as vcspath |
|
31 | from rhodecode.lib.vcs import path as vcspath | |
32 | from rhodecode.lib.vcs.backends import base |
|
32 | from rhodecode.lib.vcs.backends import base | |
33 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
33 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff | |
34 | from rhodecode.lib.vcs.exceptions import CommitError |
|
34 | from rhodecode.lib.vcs.exceptions import CommitError | |
35 | from rhodecode.lib.vcs.nodes import ( |
|
35 | from rhodecode.lib.vcs.nodes import ( | |
36 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, |
|
36 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, | |
37 | NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode, |
|
37 | NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode, | |
38 | LargeFileNode, LARGEFILE_PREFIX) |
|
38 | LargeFileNode, LARGEFILE_PREFIX) | |
39 | from rhodecode.lib.vcs.utils.paths import get_dirs_for_path |
|
39 | from rhodecode.lib.vcs.utils.paths import get_dirs_for_path | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class MercurialCommit(base.BaseCommit): |
|
42 | class MercurialCommit(base.BaseCommit): | |
43 | """ |
|
43 | """ | |
44 | Represents state of the repository at the single commit. |
|
44 | Represents state of the repository at the single commit. | |
45 | """ |
|
45 | """ | |
46 |
|
46 | |||
47 | _filter_pre_load = [ |
|
47 | _filter_pre_load = [ | |
48 | # git specific property not supported here |
|
48 | # git specific property not supported here | |
49 | "_commit", |
|
49 | "_commit", | |
50 | ] |
|
50 | ] | |
51 |
|
51 | |||
52 | def __init__(self, repository, raw_id, idx, pre_load=None): |
|
52 | def __init__(self, repository, raw_id, idx, pre_load=None): | |
53 | raw_id = safe_str(raw_id) |
|
53 | raw_id = safe_str(raw_id) | |
54 |
|
54 | |||
55 | self.repository = repository |
|
55 | self.repository = repository | |
56 | self._remote = repository._remote |
|
56 | self._remote = repository._remote | |
57 |
|
57 | |||
58 | self.raw_id = raw_id |
|
58 | self.raw_id = raw_id | |
59 | self.idx = idx |
|
59 | self.idx = idx | |
60 |
|
60 | |||
61 | self._set_bulk_properties(pre_load) |
|
61 | self._set_bulk_properties(pre_load) | |
62 |
|
62 | |||
63 | # caches |
|
63 | # caches | |
64 | self.nodes = {} |
|
64 | self.nodes = {} | |
65 |
|
65 | |||
66 | def _set_bulk_properties(self, pre_load): |
|
66 | def _set_bulk_properties(self, pre_load): | |
67 | if not pre_load: |
|
67 | if not pre_load: | |
68 | return |
|
68 | return | |
69 | pre_load = [entry for entry in pre_load |
|
69 | pre_load = [entry for entry in pre_load | |
70 | if entry not in self._filter_pre_load] |
|
70 | if entry not in self._filter_pre_load] | |
71 | if not pre_load: |
|
71 | if not pre_load: | |
72 | return |
|
72 | return | |
73 |
|
73 | |||
74 | result = self._remote.bulk_request(self.raw_id, pre_load) |
|
74 | result = self._remote.bulk_request(self.raw_id, pre_load) | |
75 | for attr, value in result.items(): |
|
75 | for attr, value in result.items(): | |
76 | if attr in ["author", "branch", "message"]: |
|
76 | if attr in ["author", "branch", "message"]: | |
77 | value = safe_unicode(value) |
|
77 | value = safe_unicode(value) | |
78 | elif attr == "affected_files": |
|
78 | elif attr == "affected_files": | |
79 | value = map(safe_unicode, value) |
|
79 | value = map(safe_unicode, value) | |
80 | elif attr == "date": |
|
80 | elif attr == "date": | |
81 | value = utcdate_fromtimestamp(*value) |
|
81 | value = utcdate_fromtimestamp(*value) | |
82 | elif attr in ["children", "parents"]: |
|
82 | elif attr in ["children", "parents"]: | |
83 | value = self._make_commits(value) |
|
83 | value = self._make_commits(value) | |
84 | elif attr in ["phase"]: |
|
84 | elif attr in ["phase"]: | |
85 | value = self._get_phase_text(value) |
|
85 | value = self._get_phase_text(value) | |
86 | self.__dict__[attr] = value |
|
86 | self.__dict__[attr] = value | |
87 |
|
87 | |||
88 | @LazyProperty |
|
88 | @LazyProperty | |
89 | def tags(self): |
|
89 | def tags(self): | |
90 | tags = [name for name, commit_id in self.repository.tags.iteritems() |
|
90 | tags = [name for name, commit_id in self.repository.tags.iteritems() | |
91 | if commit_id == self.raw_id] |
|
91 | if commit_id == self.raw_id] | |
92 | return tags |
|
92 | return tags | |
93 |
|
93 | |||
94 | @LazyProperty |
|
94 | @LazyProperty | |
95 | def branch(self): |
|
95 | def branch(self): | |
96 | return safe_unicode(self._remote.ctx_branch(self.raw_id)) |
|
96 | return safe_unicode(self._remote.ctx_branch(self.raw_id)) | |
97 |
|
97 | |||
98 | @LazyProperty |
|
98 | @LazyProperty | |
99 | def bookmarks(self): |
|
99 | def bookmarks(self): | |
100 | bookmarks = [ |
|
100 | bookmarks = [ | |
101 | name for name, commit_id in self.repository.bookmarks.iteritems() |
|
101 | name for name, commit_id in self.repository.bookmarks.iteritems() | |
102 | if commit_id == self.raw_id] |
|
102 | if commit_id == self.raw_id] | |
103 | return bookmarks |
|
103 | return bookmarks | |
104 |
|
104 | |||
105 | @LazyProperty |
|
105 | @LazyProperty | |
106 | def message(self): |
|
106 | def message(self): | |
107 | return safe_unicode(self._remote.ctx_description(self.raw_id)) |
|
107 | return safe_unicode(self._remote.ctx_description(self.raw_id)) | |
108 |
|
108 | |||
109 | @LazyProperty |
|
109 | @LazyProperty | |
110 | def committer(self): |
|
110 | def committer(self): | |
111 | return safe_unicode(self.author) |
|
111 | return safe_unicode(self.author) | |
112 |
|
112 | |||
113 | @LazyProperty |
|
113 | @LazyProperty | |
114 | def author(self): |
|
114 | def author(self): | |
115 | return safe_unicode(self._remote.ctx_user(self.raw_id)) |
|
115 | return safe_unicode(self._remote.ctx_user(self.raw_id)) | |
116 |
|
116 | |||
117 | @LazyProperty |
|
117 | @LazyProperty | |
118 | def date(self): |
|
118 | def date(self): | |
119 | return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id)) |
|
119 | return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id)) | |
120 |
|
120 | |||
121 | @LazyProperty |
|
121 | @LazyProperty | |
122 | def status(self): |
|
122 | def status(self): | |
123 | """ |
|
123 | """ | |
124 | Returns modified, added, removed, deleted files for current commit |
|
124 | Returns modified, added, removed, deleted files for current commit | |
125 | """ |
|
125 | """ | |
126 | return self._remote.ctx_status(self.raw_id) |
|
126 | return self._remote.ctx_status(self.raw_id) | |
127 |
|
127 | |||
128 | @LazyProperty |
|
128 | @LazyProperty | |
129 | def _file_paths(self): |
|
129 | def _file_paths(self): | |
130 | return self._remote.ctx_list(self.raw_id) |
|
130 | return self._remote.ctx_list(self.raw_id) | |
131 |
|
131 | |||
132 | @LazyProperty |
|
132 | @LazyProperty | |
133 | def _dir_paths(self): |
|
133 | def _dir_paths(self): | |
134 | p = list(set(get_dirs_for_path(*self._file_paths))) |
|
134 | p = list(set(get_dirs_for_path(*self._file_paths))) | |
135 | p.insert(0, '') |
|
135 | p.insert(0, '') | |
136 | return p |
|
136 | return p | |
137 |
|
137 | |||
138 | @LazyProperty |
|
138 | @LazyProperty | |
139 | def _paths(self): |
|
139 | def _paths(self): | |
140 | return self._dir_paths + self._file_paths |
|
140 | return self._dir_paths + self._file_paths | |
141 |
|
141 | |||
142 | @LazyProperty |
|
142 | @LazyProperty | |
143 | def id(self): |
|
143 | def id(self): | |
144 | if self.last: |
|
144 | if self.last: | |
145 | return u'tip' |
|
145 | return u'tip' | |
146 | return self.short_id |
|
146 | return self.short_id | |
147 |
|
147 | |||
148 | @LazyProperty |
|
148 | @LazyProperty | |
149 | def short_id(self): |
|
149 | def short_id(self): | |
150 | return self.raw_id[:12] |
|
150 | return self.raw_id[:12] | |
151 |
|
151 | |||
152 | def _make_commits(self, commit_ids, pre_load=None): |
|
152 | def _make_commits(self, commit_ids, pre_load=None): | |
153 | return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
153 | return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) | |
154 | for commit_id in commit_ids] |
|
154 | for commit_id in commit_ids] | |
155 |
|
155 | |||
156 | @LazyProperty |
|
156 | @LazyProperty | |
157 | def parents(self): |
|
157 | def parents(self): | |
158 | """ |
|
158 | """ | |
159 | Returns list of parent commits. |
|
159 | Returns list of parent commits. | |
160 | """ |
|
160 | """ | |
161 | parents = self._remote.ctx_parents(self.raw_id) |
|
161 | parents = self._remote.ctx_parents(self.raw_id) | |
162 | return self._make_commits(parents) |
|
162 | return self._make_commits(parents) | |
163 |
|
163 | |||
164 | def _get_phase_text(self, phase_id): |
|
164 | def _get_phase_text(self, phase_id): | |
165 | return { |
|
165 | return { | |
166 | 0: 'public', |
|
166 | 0: 'public', | |
167 | 1: 'draft', |
|
167 | 1: 'draft', | |
168 | 2: 'secret', |
|
168 | 2: 'secret', | |
169 | }.get(phase_id) or '' |
|
169 | }.get(phase_id) or '' | |
170 |
|
170 | |||
171 | @LazyProperty |
|
171 | @LazyProperty | |
172 | def phase(self): |
|
172 | def phase(self): | |
173 | phase_id = self._remote.ctx_phase(self.raw_id) |
|
173 | phase_id = self._remote.ctx_phase(self.raw_id) | |
174 | phase_text = self._get_phase_text(phase_id) |
|
174 | phase_text = self._get_phase_text(phase_id) | |
175 |
|
175 | |||
176 | return safe_unicode(phase_text) |
|
176 | return safe_unicode(phase_text) | |
177 |
|
177 | |||
178 | @LazyProperty |
|
178 | @LazyProperty | |
179 | def obsolete(self): |
|
179 | def obsolete(self): | |
180 | obsolete = self._remote.ctx_obsolete(self.raw_id) |
|
180 | obsolete = self._remote.ctx_obsolete(self.raw_id) | |
181 | return obsolete |
|
181 | return obsolete | |
182 |
|
182 | |||
183 | @LazyProperty |
|
183 | @LazyProperty | |
184 | def hidden(self): |
|
184 | def hidden(self): | |
185 | hidden = self._remote.ctx_hidden(self.raw_id) |
|
185 | hidden = self._remote.ctx_hidden(self.raw_id) | |
186 | return hidden |
|
186 | return hidden | |
187 |
|
187 | |||
188 | @LazyProperty |
|
188 | @LazyProperty | |
189 | def children(self): |
|
189 | def children(self): | |
190 | """ |
|
190 | """ | |
191 | Returns list of child commits. |
|
191 | Returns list of child commits. | |
192 | """ |
|
192 | """ | |
193 | children = self._remote.ctx_children(self.raw_id) |
|
193 | children = self._remote.ctx_children(self.raw_id) | |
194 | return self._make_commits(children) |
|
194 | return self._make_commits(children) | |
195 |
|
195 | |||
196 | def _fix_path(self, path): |
|
196 | def _fix_path(self, path): | |
197 | """ |
|
197 | """ | |
198 | Mercurial keeps filenodes as str so we need to encode from unicode |
|
198 | Mercurial keeps filenodes as str so we need to encode from unicode | |
199 | to str. |
|
199 | to str. | |
200 | """ |
|
200 | """ | |
201 | return safe_str(super(MercurialCommit, self)._fix_path(path)) |
|
201 | return safe_str(super(MercurialCommit, self)._fix_path(path)) | |
202 |
|
202 | |||
203 | def _get_kind(self, path): |
|
203 | def _get_kind(self, path): | |
204 | path = self._fix_path(path) |
|
204 | path = self._fix_path(path) | |
205 | if path in self._file_paths: |
|
205 | if path in self._file_paths: | |
206 | return NodeKind.FILE |
|
206 | return NodeKind.FILE | |
207 | elif path in self._dir_paths: |
|
207 | elif path in self._dir_paths: | |
208 | return NodeKind.DIR |
|
208 | return NodeKind.DIR | |
209 | else: |
|
209 | else: | |
210 | raise CommitError( |
|
210 | raise CommitError( | |
211 | "Node does not exist at the given path '%s'" % (path, )) |
|
211 | "Node does not exist at the given path '%s'" % (path, )) | |
212 |
|
212 | |||
213 | def _get_filectx(self, path): |
|
213 | def _get_filectx(self, path): | |
214 | path = self._fix_path(path) |
|
214 | path = self._fix_path(path) | |
215 | if self._get_kind(path) != NodeKind.FILE: |
|
215 | if self._get_kind(path) != NodeKind.FILE: | |
216 | raise CommitError( |
|
216 | raise CommitError( | |
217 | "File does not exist for idx %s at '%s'" % (self.raw_id, path)) |
|
217 | "File does not exist for idx %s at '%s'" % (self.raw_id, path)) | |
218 | return path |
|
218 | return path | |
219 |
|
219 | |||
220 | def get_file_mode(self, path): |
|
220 | def get_file_mode(self, path): | |
221 | """ |
|
221 | """ | |
222 | Returns stat mode of the file at the given ``path``. |
|
222 | Returns stat mode of the file at the given ``path``. | |
223 | """ |
|
223 | """ | |
224 | path = self._get_filectx(path) |
|
224 | path = self._get_filectx(path) | |
225 | if 'x' in self._remote.fctx_flags(self.raw_id, path): |
|
225 | if 'x' in self._remote.fctx_flags(self.raw_id, path): | |
226 | return base.FILEMODE_EXECUTABLE |
|
226 | return base.FILEMODE_EXECUTABLE | |
227 | else: |
|
227 | else: | |
228 | return base.FILEMODE_DEFAULT |
|
228 | return base.FILEMODE_DEFAULT | |
229 |
|
229 | |||
230 | def is_link(self, path): |
|
230 | def is_link(self, path): | |
231 | path = self._get_filectx(path) |
|
231 | path = self._get_filectx(path) | |
232 | return 'l' in self._remote.fctx_flags(self.raw_id, path) |
|
232 | return 'l' in self._remote.fctx_flags(self.raw_id, path) | |
233 |
|
233 | |||
234 | def is_node_binary(self, path): |
|
234 | def is_node_binary(self, path): | |
235 | path = self._get_filectx(path) |
|
235 | path = self._get_filectx(path) | |
236 | return self._remote.is_binary(self.raw_id, path) |
|
236 | return self._remote.is_binary(self.raw_id, path) | |
237 |
|
237 | |||
238 | def get_file_content(self, path): |
|
238 | def get_file_content(self, path): | |
239 | """ |
|
239 | """ | |
240 | Returns content of the file at given ``path``. |
|
240 | Returns content of the file at given ``path``. | |
241 | """ |
|
241 | """ | |
242 | path = self._get_filectx(path) |
|
242 | path = self._get_filectx(path) | |
243 | return self._remote.fctx_node_data(self.raw_id, path) |
|
243 | return self._remote.fctx_node_data(self.raw_id, path) | |
244 |
|
244 | |||
245 | def get_file_content_streamed(self, path): |
|
245 | def get_file_content_streamed(self, path): | |
246 | path = self._get_filectx(path) |
|
246 | path = self._get_filectx(path) | |
247 | stream_method = getattr(self._remote, 'stream:fctx_node_data') |
|
247 | stream_method = getattr(self._remote, 'stream:fctx_node_data') | |
248 | return stream_method(self.raw_id, path) |
|
248 | return stream_method(self.raw_id, path) | |
249 |
|
249 | |||
250 | def get_file_size(self, path): |
|
250 | def get_file_size(self, path): | |
251 | """ |
|
251 | """ | |
252 | Returns size of the file at given ``path``. |
|
252 | Returns size of the file at given ``path``. | |
253 | """ |
|
253 | """ | |
254 | path = self._get_filectx(path) |
|
254 | path = self._get_filectx(path) | |
255 | return self._remote.fctx_size(self.raw_id, path) |
|
255 | return self._remote.fctx_size(self.raw_id, path) | |
256 |
|
256 | |||
257 | def get_path_history(self, path, limit=None, pre_load=None): |
|
257 | def get_path_history(self, path, limit=None, pre_load=None): | |
258 | """ |
|
258 | """ | |
259 | Returns history of file as reversed list of `MercurialCommit` objects |
|
259 | Returns history of file as reversed list of `MercurialCommit` objects | |
260 | for which file at given ``path`` has been modified. |
|
260 | for which file at given ``path`` has been modified. | |
261 | """ |
|
261 | """ | |
262 | path = self._get_filectx(path) |
|
262 | path = self._get_filectx(path) | |
263 | hist = self._remote.node_history(self.raw_id, path, limit) |
|
263 | hist = self._remote.node_history(self.raw_id, path, limit) | |
264 | return [ |
|
264 | return [ | |
265 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
265 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) | |
266 | for commit_id in hist] |
|
266 | for commit_id in hist] | |
267 |
|
267 | |||
268 | def get_file_annotate(self, path, pre_load=None): |
|
268 | def get_file_annotate(self, path, pre_load=None): | |
269 | """ |
|
269 | """ | |
270 | Returns a generator of four element tuples with |
|
270 | Returns a generator of four element tuples with | |
271 | lineno, commit_id, commit lazy loader and line |
|
271 | lineno, commit_id, commit lazy loader and line | |
272 | """ |
|
272 | """ | |
273 | result = self._remote.fctx_annotate(self.raw_id, path) |
|
273 | result = self._remote.fctx_annotate(self.raw_id, path) | |
274 |
|
274 | |||
275 | for ln_no, commit_id, content in result: |
|
275 | for ln_no, commit_id, content in result: | |
276 | yield ( |
|
276 | yield ( | |
277 | ln_no, commit_id, |
|
277 | ln_no, commit_id, | |
278 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), |
|
278 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), | |
279 | content) |
|
279 | content) | |
280 |
|
280 | |||
281 | def get_nodes(self, path): |
|
281 | def get_nodes(self, path): | |
282 | """ |
|
282 | """ | |
283 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
283 | Returns combined ``DirNode`` and ``FileNode`` objects list representing | |
284 | state of commit at the given ``path``. If node at the given ``path`` |
|
284 | state of commit at the given ``path``. If node at the given ``path`` | |
285 | is not instance of ``DirNode``, CommitError would be raised. |
|
285 | is not instance of ``DirNode``, CommitError would be raised. | |
286 | """ |
|
286 | """ | |
287 |
|
287 | |||
288 | if self._get_kind(path) != NodeKind.DIR: |
|
288 | if self._get_kind(path) != NodeKind.DIR: | |
289 | raise CommitError( |
|
289 | raise CommitError( | |
290 | "Directory does not exist for idx %s at '%s'" % (self.raw_id, path)) |
|
290 | "Directory does not exist for idx %s at '%s'" % (self.raw_id, path)) | |
291 | path = self._fix_path(path) |
|
291 | path = self._fix_path(path) | |
292 |
|
292 | |||
293 | filenodes = [ |
|
293 | filenodes = [ | |
294 | FileNode(f, commit=self) for f in self._file_paths |
|
294 | FileNode(f, commit=self) for f in self._file_paths | |
295 | if os.path.dirname(f) == path] |
|
295 | if os.path.dirname(f) == path] | |
296 | # TODO: johbo: Check if this can be done in a more obvious way |
|
296 | # TODO: johbo: Check if this can be done in a more obvious way | |
297 | dirs = path == '' and '' or [ |
|
297 | dirs = path == '' and '' or [ | |
298 | d for d in self._dir_paths |
|
298 | d for d in self._dir_paths | |
299 | if d and vcspath.dirname(d) == path] |
|
299 | if d and vcspath.dirname(d) == path] | |
300 | dirnodes = [ |
|
300 | dirnodes = [ | |
301 | DirNode(d, commit=self) for d in dirs |
|
301 | DirNode(d, commit=self) for d in dirs | |
302 | if os.path.dirname(d) == path] |
|
302 | if os.path.dirname(d) == path] | |
303 |
|
303 | |||
304 | alias = self.repository.alias |
|
304 | alias = self.repository.alias | |
305 | for k, vals in self._submodules.iteritems(): |
|
305 | for k, vals in self._submodules.iteritems(): | |
306 | if vcspath.dirname(k) == path: |
|
306 | if vcspath.dirname(k) == path: | |
307 | loc = vals[0] |
|
307 | loc = vals[0] | |
308 | commit = vals[1] |
|
308 | commit = vals[1] | |
309 | dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias)) |
|
309 | dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias)) | |
310 |
|
310 | |||
311 | nodes = dirnodes + filenodes |
|
311 | nodes = dirnodes + filenodes | |
312 | for node in nodes: |
|
312 | for node in nodes: | |
313 | if node.path not in self.nodes: |
|
313 | if node.path not in self.nodes: | |
314 | self.nodes[node.path] = node |
|
314 | self.nodes[node.path] = node | |
315 | nodes.sort() |
|
315 | nodes.sort() | |
316 |
|
316 | |||
317 | return nodes |
|
317 | return nodes | |
318 |
|
318 | |||
319 | def get_node(self, path, pre_load=None): |
|
319 | def get_node(self, path, pre_load=None): | |
320 | """ |
|
320 | """ | |
321 | Returns `Node` object from the given `path`. If there is no node at |
|
321 | Returns `Node` object from the given `path`. If there is no node at | |
322 | the given `path`, `NodeDoesNotExistError` would be raised. |
|
322 | the given `path`, `NodeDoesNotExistError` would be raised. | |
323 | """ |
|
323 | """ | |
324 | path = self._fix_path(path) |
|
324 | path = self._fix_path(path) | |
325 |
|
325 | |||
326 | if path not in self.nodes: |
|
326 | if path not in self.nodes: | |
327 | if path in self._file_paths: |
|
327 | if path in self._file_paths: | |
328 | node = FileNode(path, commit=self, pre_load=pre_load) |
|
328 | node = FileNode(path, commit=self, pre_load=pre_load) | |
329 | elif path in self._dir_paths: |
|
329 | elif path in self._dir_paths: | |
330 | if path == '': |
|
330 | if path == '': | |
331 | node = RootNode(commit=self) |
|
331 | node = RootNode(commit=self) | |
332 | else: |
|
332 | else: | |
333 | node = DirNode(path, commit=self) |
|
333 | node = DirNode(path, commit=self) | |
334 | else: |
|
334 | else: | |
335 | raise self.no_node_at_path(path) |
|
335 | raise self.no_node_at_path(path) | |
336 |
|
336 | |||
337 | # cache node |
|
337 | # cache node | |
338 | self.nodes[path] = node |
|
338 | self.nodes[path] = node | |
339 | return self.nodes[path] |
|
339 | return self.nodes[path] | |
340 |
|
340 | |||
341 | def get_largefile_node(self, path): |
|
341 | def get_largefile_node(self, path): | |
342 | pointer_spec = self._remote.is_large_file(self.raw_id, path) |
|
342 | pointer_spec = self._remote.is_large_file(self.raw_id, path) | |
343 | if pointer_spec: |
|
343 | if pointer_spec: | |
344 | # content of that file regular FileNode is the hash of largefile |
|
344 | # content of that file regular FileNode is the hash of largefile | |
345 | file_id = self.get_file_content(path).strip() |
|
345 | file_id = self.get_file_content(path).strip() | |
346 |
|
346 | |||
347 | if self._remote.in_largefiles_store(file_id): |
|
347 | if self._remote.in_largefiles_store(file_id): | |
348 | lf_path = self._remote.store_path(file_id) |
|
348 | lf_path = self._remote.store_path(file_id) | |
349 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
349 | return LargeFileNode(lf_path, commit=self, org_path=path) | |
350 | elif self._remote.in_user_cache(file_id): |
|
350 | elif self._remote.in_user_cache(file_id): | |
351 | lf_path = self._remote.store_path(file_id) |
|
351 | lf_path = self._remote.store_path(file_id) | |
352 | self._remote.link(file_id, path) |
|
352 | self._remote.link(file_id, path) | |
353 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
353 | return LargeFileNode(lf_path, commit=self, org_path=path) | |
354 |
|
354 | |||
355 | @LazyProperty |
|
355 | @LazyProperty | |
356 | def _submodules(self): |
|
356 | def _submodules(self): | |
357 | """ |
|
357 | """ | |
358 | Returns a dictionary with submodule information from substate file |
|
358 | Returns a dictionary with submodule information from substate file | |
359 | of hg repository. |
|
359 | of hg repository. | |
360 | """ |
|
360 | """ | |
361 | return self._remote.ctx_substate(self.raw_id) |
|
361 | return self._remote.ctx_substate(self.raw_id) | |
362 |
|
362 | |||
363 | @LazyProperty |
|
363 | @LazyProperty | |
364 | def affected_files(self): |
|
364 | def affected_files(self): | |
365 | """ |
|
365 | """ | |
366 | Gets a fast accessible file changes for given commit |
|
366 | Gets a fast accessible file changes for given commit | |
367 | """ |
|
367 | """ | |
368 | return self._remote.ctx_files(self.raw_id) |
|
368 | return self._remote.ctx_files(self.raw_id) | |
369 |
|
369 | |||
370 | @property |
|
370 | @property | |
371 | def added(self): |
|
371 | def added(self): | |
372 | """ |
|
372 | """ | |
373 | Returns list of added ``FileNode`` objects. |
|
373 | Returns list of added ``FileNode`` objects. | |
374 | """ |
|
374 | """ | |
375 |
return AddedFileNodesGenerator( |
|
375 | return AddedFileNodesGenerator(self.added_paths, self) | |
|
376 | ||||
|
377 | @LazyProperty | |||
|
378 | def added_paths(self): | |||
|
379 | return [n for n in self.status[1]] | |||
376 |
|
380 | |||
377 | @property |
|
381 | @property | |
378 | def changed(self): |
|
382 | def changed(self): | |
379 | """ |
|
383 | """ | |
380 | Returns list of modified ``FileNode`` objects. |
|
384 | Returns list of modified ``FileNode`` objects. | |
381 | """ |
|
385 | """ | |
382 |
return ChangedFileNodesGenerator( |
|
386 | return ChangedFileNodesGenerator(self.changed_paths, self) | |
|
387 | ||||
|
388 | @LazyProperty | |||
|
389 | def changed_paths(self): | |||
|
390 | return [n for n in self.status[0]] | |||
383 |
|
391 | |||
384 | @property |
|
392 | @property | |
385 | def removed(self): |
|
393 | def removed(self): | |
386 | """ |
|
394 | """ | |
387 | Returns list of removed ``FileNode`` objects. |
|
395 | Returns list of removed ``FileNode`` objects. | |
388 | """ |
|
396 | """ | |
389 |
return RemovedFileNodesGenerator( |
|
397 | return RemovedFileNodesGenerator(self.removed_paths, self) | |
|
398 | ||||
|
399 | @LazyProperty | |||
|
400 | def removed_paths(self): | |||
|
401 | return [n for n in self.status[2]] |
@@ -1,245 +1,254 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | SVN commit module |
|
22 | SVN commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | import dateutil.parser |
|
26 | import dateutil.parser | |
27 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
27 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
28 |
|
28 | |||
29 | from rhodecode.lib.utils import safe_str, safe_unicode |
|
29 | from rhodecode.lib.utils import safe_str, safe_unicode | |
30 | from rhodecode.lib.vcs import nodes, path as vcspath |
|
30 | from rhodecode.lib.vcs import nodes, path as vcspath | |
31 | from rhodecode.lib.vcs.backends import base |
|
31 | from rhodecode.lib.vcs.backends import base | |
32 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
32 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | _SVN_PROP_TRUE = '*' |
|
35 | _SVN_PROP_TRUE = '*' | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | class SubversionCommit(base.BaseCommit): |
|
38 | class SubversionCommit(base.BaseCommit): | |
39 | """ |
|
39 | """ | |
40 | Subversion specific implementation of commits |
|
40 | Subversion specific implementation of commits | |
41 |
|
41 | |||
42 | .. attribute:: branch |
|
42 | .. attribute:: branch | |
43 |
|
43 | |||
44 | The Subversion backend does not support to assign branches to |
|
44 | The Subversion backend does not support to assign branches to | |
45 | specific commits. This attribute has always the value `None`. |
|
45 | specific commits. This attribute has always the value `None`. | |
46 |
|
46 | |||
47 | """ |
|
47 | """ | |
48 |
|
48 | |||
49 | def __init__(self, repository, commit_id): |
|
49 | def __init__(self, repository, commit_id): | |
50 | self.repository = repository |
|
50 | self.repository = repository | |
51 | self.idx = self.repository._get_commit_idx(commit_id) |
|
51 | self.idx = self.repository._get_commit_idx(commit_id) | |
52 | self._svn_rev = self.idx + 1 |
|
52 | self._svn_rev = self.idx + 1 | |
53 | self._remote = repository._remote |
|
53 | self._remote = repository._remote | |
54 | # TODO: handling of raw_id should be a method on repository itself, |
|
54 | # TODO: handling of raw_id should be a method on repository itself, | |
55 | # which knows how to translate commit index and commit id |
|
55 | # which knows how to translate commit index and commit id | |
56 | self.raw_id = commit_id |
|
56 | self.raw_id = commit_id | |
57 | self.short_id = commit_id |
|
57 | self.short_id = commit_id | |
58 | self.id = 'r%s' % (commit_id, ) |
|
58 | self.id = 'r%s' % (commit_id, ) | |
59 |
|
59 | |||
60 | # TODO: Implement the following placeholder attributes |
|
60 | # TODO: Implement the following placeholder attributes | |
61 | self.nodes = {} |
|
61 | self.nodes = {} | |
62 | self.tags = [] |
|
62 | self.tags = [] | |
63 |
|
63 | |||
64 | @property |
|
64 | @property | |
65 | def author(self): |
|
65 | def author(self): | |
66 | return safe_unicode(self._properties.get('svn:author')) |
|
66 | return safe_unicode(self._properties.get('svn:author')) | |
67 |
|
67 | |||
68 | @property |
|
68 | @property | |
69 | def date(self): |
|
69 | def date(self): | |
70 | return _date_from_svn_properties(self._properties) |
|
70 | return _date_from_svn_properties(self._properties) | |
71 |
|
71 | |||
72 | @property |
|
72 | @property | |
73 | def message(self): |
|
73 | def message(self): | |
74 | return safe_unicode(self._properties.get('svn:log')) |
|
74 | return safe_unicode(self._properties.get('svn:log')) | |
75 |
|
75 | |||
76 | @LazyProperty |
|
76 | @LazyProperty | |
77 | def _properties(self): |
|
77 | def _properties(self): | |
78 | return self._remote.revision_properties(self._svn_rev) |
|
78 | return self._remote.revision_properties(self._svn_rev) | |
79 |
|
79 | |||
80 | @LazyProperty |
|
80 | @LazyProperty | |
81 | def parents(self): |
|
81 | def parents(self): | |
82 | parent_idx = self.idx - 1 |
|
82 | parent_idx = self.idx - 1 | |
83 | if parent_idx >= 0: |
|
83 | if parent_idx >= 0: | |
84 | parent = self.repository.get_commit(commit_idx=parent_idx) |
|
84 | parent = self.repository.get_commit(commit_idx=parent_idx) | |
85 | return [parent] |
|
85 | return [parent] | |
86 | return [] |
|
86 | return [] | |
87 |
|
87 | |||
88 | @LazyProperty |
|
88 | @LazyProperty | |
89 | def children(self): |
|
89 | def children(self): | |
90 | child_idx = self.idx + 1 |
|
90 | child_idx = self.idx + 1 | |
91 | if child_idx < len(self.repository.commit_ids): |
|
91 | if child_idx < len(self.repository.commit_ids): | |
92 | child = self.repository.get_commit(commit_idx=child_idx) |
|
92 | child = self.repository.get_commit(commit_idx=child_idx) | |
93 | return [child] |
|
93 | return [child] | |
94 | return [] |
|
94 | return [] | |
95 |
|
95 | |||
96 | def get_file_mode(self, path): |
|
96 | def get_file_mode(self, path): | |
97 | # Note: Subversion flags files which are executable with a special |
|
97 | # Note: Subversion flags files which are executable with a special | |
98 | # property `svn:executable` which is set to the value ``"*"``. |
|
98 | # property `svn:executable` which is set to the value ``"*"``. | |
99 | if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE: |
|
99 | if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE: | |
100 | return base.FILEMODE_EXECUTABLE |
|
100 | return base.FILEMODE_EXECUTABLE | |
101 | else: |
|
101 | else: | |
102 | return base.FILEMODE_DEFAULT |
|
102 | return base.FILEMODE_DEFAULT | |
103 |
|
103 | |||
104 | def is_link(self, path): |
|
104 | def is_link(self, path): | |
105 | # Note: Subversion has a flag for special files, the content of the |
|
105 | # Note: Subversion has a flag for special files, the content of the | |
106 | # file contains the type of that file. |
|
106 | # file contains the type of that file. | |
107 | if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE: |
|
107 | if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE: | |
108 | return self.get_file_content(path).startswith('link') |
|
108 | return self.get_file_content(path).startswith('link') | |
109 | return False |
|
109 | return False | |
110 |
|
110 | |||
111 | def is_node_binary(self, path): |
|
111 | def is_node_binary(self, path): | |
112 | path = self._fix_path(path) |
|
112 | path = self._fix_path(path) | |
113 | return self._remote.is_binary(self._svn_rev, safe_str(path)) |
|
113 | return self._remote.is_binary(self._svn_rev, safe_str(path)) | |
114 |
|
114 | |||
115 | def _get_file_property(self, path, name): |
|
115 | def _get_file_property(self, path, name): | |
116 | file_properties = self._remote.node_properties( |
|
116 | file_properties = self._remote.node_properties( | |
117 | safe_str(path), self._svn_rev) |
|
117 | safe_str(path), self._svn_rev) | |
118 | return file_properties.get(name) |
|
118 | return file_properties.get(name) | |
119 |
|
119 | |||
120 | def get_file_content(self, path): |
|
120 | def get_file_content(self, path): | |
121 | path = self._fix_path(path) |
|
121 | path = self._fix_path(path) | |
122 | return self._remote.get_file_content(safe_str(path), self._svn_rev) |
|
122 | return self._remote.get_file_content(safe_str(path), self._svn_rev) | |
123 |
|
123 | |||
124 | def get_file_content_streamed(self, path): |
|
124 | def get_file_content_streamed(self, path): | |
125 | path = self._fix_path(path) |
|
125 | path = self._fix_path(path) | |
126 | stream_method = getattr(self._remote, 'stream:get_file_content') |
|
126 | stream_method = getattr(self._remote, 'stream:get_file_content') | |
127 | return stream_method(safe_str(path), self._svn_rev) |
|
127 | return stream_method(safe_str(path), self._svn_rev) | |
128 |
|
128 | |||
129 | def get_file_size(self, path): |
|
129 | def get_file_size(self, path): | |
130 | path = self._fix_path(path) |
|
130 | path = self._fix_path(path) | |
131 | return self._remote.get_file_size(safe_str(path), self._svn_rev) |
|
131 | return self._remote.get_file_size(safe_str(path), self._svn_rev) | |
132 |
|
132 | |||
133 | def get_path_history(self, path, limit=None, pre_load=None): |
|
133 | def get_path_history(self, path, limit=None, pre_load=None): | |
134 | path = safe_str(self._fix_path(path)) |
|
134 | path = safe_str(self._fix_path(path)) | |
135 | history = self._remote.node_history(path, self._svn_rev, limit) |
|
135 | history = self._remote.node_history(path, self._svn_rev, limit) | |
136 | return [ |
|
136 | return [ | |
137 | self.repository.get_commit(commit_id=str(svn_rev)) |
|
137 | self.repository.get_commit(commit_id=str(svn_rev)) | |
138 | for svn_rev in history] |
|
138 | for svn_rev in history] | |
139 |
|
139 | |||
140 | def get_file_annotate(self, path, pre_load=None): |
|
140 | def get_file_annotate(self, path, pre_load=None): | |
141 | result = self._remote.file_annotate(safe_str(path), self._svn_rev) |
|
141 | result = self._remote.file_annotate(safe_str(path), self._svn_rev) | |
142 |
|
142 | |||
143 | for zero_based_line_no, svn_rev, content in result: |
|
143 | for zero_based_line_no, svn_rev, content in result: | |
144 | commit_id = str(svn_rev) |
|
144 | commit_id = str(svn_rev) | |
145 | line_no = zero_based_line_no + 1 |
|
145 | line_no = zero_based_line_no + 1 | |
146 | yield ( |
|
146 | yield ( | |
147 | line_no, |
|
147 | line_no, | |
148 | commit_id, |
|
148 | commit_id, | |
149 | lambda: self.repository.get_commit(commit_id=commit_id), |
|
149 | lambda: self.repository.get_commit(commit_id=commit_id), | |
150 | content) |
|
150 | content) | |
151 |
|
151 | |||
152 | def get_node(self, path, pre_load=None): |
|
152 | def get_node(self, path, pre_load=None): | |
153 | path = self._fix_path(path) |
|
153 | path = self._fix_path(path) | |
154 | if path not in self.nodes: |
|
154 | if path not in self.nodes: | |
155 |
|
155 | |||
156 | if path == '': |
|
156 | if path == '': | |
157 | node = nodes.RootNode(commit=self) |
|
157 | node = nodes.RootNode(commit=self) | |
158 | else: |
|
158 | else: | |
159 | node_type = self._remote.get_node_type( |
|
159 | node_type = self._remote.get_node_type( | |
160 | safe_str(path), self._svn_rev) |
|
160 | safe_str(path), self._svn_rev) | |
161 | if node_type == 'dir': |
|
161 | if node_type == 'dir': | |
162 | node = nodes.DirNode(path, commit=self) |
|
162 | node = nodes.DirNode(path, commit=self) | |
163 | elif node_type == 'file': |
|
163 | elif node_type == 'file': | |
164 | node = nodes.FileNode(path, commit=self, pre_load=pre_load) |
|
164 | node = nodes.FileNode(path, commit=self, pre_load=pre_load) | |
165 | else: |
|
165 | else: | |
166 | raise self.no_node_at_path(path) |
|
166 | raise self.no_node_at_path(path) | |
167 |
|
167 | |||
168 | self.nodes[path] = node |
|
168 | self.nodes[path] = node | |
169 | return self.nodes[path] |
|
169 | return self.nodes[path] | |
170 |
|
170 | |||
171 | def get_nodes(self, path): |
|
171 | def get_nodes(self, path): | |
172 | if self._get_kind(path) != nodes.NodeKind.DIR: |
|
172 | if self._get_kind(path) != nodes.NodeKind.DIR: | |
173 | raise CommitError( |
|
173 | raise CommitError( | |
174 | "Directory does not exist for commit %s at " |
|
174 | "Directory does not exist for commit %s at " | |
175 | " '%s'" % (self.raw_id, path)) |
|
175 | " '%s'" % (self.raw_id, path)) | |
176 | path = self._fix_path(path) |
|
176 | path = self._fix_path(path) | |
177 |
|
177 | |||
178 | path_nodes = [] |
|
178 | path_nodes = [] | |
179 | for name, kind in self._remote.get_nodes( |
|
179 | for name, kind in self._remote.get_nodes( | |
180 | safe_str(path), revision=self._svn_rev): |
|
180 | safe_str(path), revision=self._svn_rev): | |
181 | node_path = vcspath.join(path, name) |
|
181 | node_path = vcspath.join(path, name) | |
182 | if kind == 'dir': |
|
182 | if kind == 'dir': | |
183 | node = nodes.DirNode(node_path, commit=self) |
|
183 | node = nodes.DirNode(node_path, commit=self) | |
184 | elif kind == 'file': |
|
184 | elif kind == 'file': | |
185 | node = nodes.FileNode(node_path, commit=self) |
|
185 | node = nodes.FileNode(node_path, commit=self) | |
186 | else: |
|
186 | else: | |
187 | raise ValueError("Node kind %s not supported." % (kind, )) |
|
187 | raise ValueError("Node kind %s not supported." % (kind, )) | |
188 | self.nodes[node_path] = node |
|
188 | self.nodes[node_path] = node | |
189 | path_nodes.append(node) |
|
189 | path_nodes.append(node) | |
190 |
|
190 | |||
191 | return path_nodes |
|
191 | return path_nodes | |
192 |
|
192 | |||
193 | def _get_kind(self, path): |
|
193 | def _get_kind(self, path): | |
194 | path = self._fix_path(path) |
|
194 | path = self._fix_path(path) | |
195 | kind = self._remote.get_node_type(path, self._svn_rev) |
|
195 | kind = self._remote.get_node_type(path, self._svn_rev) | |
196 | if kind == 'file': |
|
196 | if kind == 'file': | |
197 | return nodes.NodeKind.FILE |
|
197 | return nodes.NodeKind.FILE | |
198 | elif kind == 'dir': |
|
198 | elif kind == 'dir': | |
199 | return nodes.NodeKind.DIR |
|
199 | return nodes.NodeKind.DIR | |
200 | else: |
|
200 | else: | |
201 | raise CommitError( |
|
201 | raise CommitError( | |
202 | "Node does not exist at the given path '%s'" % (path, )) |
|
202 | "Node does not exist at the given path '%s'" % (path, )) | |
203 |
|
203 | |||
204 | @LazyProperty |
|
204 | @LazyProperty | |
205 | def _changes_cache(self): |
|
205 | def _changes_cache(self): | |
206 | return self._remote.revision_changes(self._svn_rev) |
|
206 | return self._remote.revision_changes(self._svn_rev) | |
207 |
|
207 | |||
208 | @LazyProperty |
|
208 | @LazyProperty | |
209 | def affected_files(self): |
|
209 | def affected_files(self): | |
210 | changed_files = set() |
|
210 | changed_files = set() | |
211 | for files in self._changes_cache.itervalues(): |
|
211 | for files in self._changes_cache.itervalues(): | |
212 | changed_files.update(files) |
|
212 | changed_files.update(files) | |
213 | return list(changed_files) |
|
213 | return list(changed_files) | |
214 |
|
214 | |||
215 | @LazyProperty |
|
215 | @LazyProperty | |
216 | def id(self): |
|
216 | def id(self): | |
217 | return self.raw_id |
|
217 | return self.raw_id | |
218 |
|
218 | |||
219 | @property |
|
219 | @property | |
220 | def added(self): |
|
220 | def added(self): | |
221 | return nodes.AddedFileNodesGenerator( |
|
221 | return nodes.AddedFileNodesGenerator(self.added_paths, self) | |
222 | self._changes_cache['added'], self) |
|
222 | ||
|
223 | @LazyProperty | |||
|
224 | def added_paths(self): | |||
|
225 | return [n for n in self._changes_cache['added']] | |||
223 |
|
226 | |||
224 | @property |
|
227 | @property | |
225 | def changed(self): |
|
228 | def changed(self): | |
226 | return nodes.ChangedFileNodesGenerator( |
|
229 | return nodes.ChangedFileNodesGenerator(self.changed_paths, self) | |
227 | self._changes_cache['changed'], self) |
|
230 | ||
|
231 | @LazyProperty | |||
|
232 | def changed_paths(self): | |||
|
233 | return [n for n in self._changes_cache['changed']] | |||
228 |
|
234 | |||
229 | @property |
|
235 | @property | |
230 | def removed(self): |
|
236 | def removed(self): | |
231 | return nodes.RemovedFileNodesGenerator( |
|
237 | return nodes.RemovedFileNodesGenerator(self.removed_paths, self) | |
232 | self._changes_cache['removed'], self) |
|
238 | ||
|
239 | @LazyProperty | |||
|
240 | def removed_paths(self): | |||
|
241 | return [n for n in self._changes_cache['removed']] | |||
233 |
|
242 | |||
234 |
|
243 | |||
235 | def _date_from_svn_properties(properties): |
|
244 | def _date_from_svn_properties(properties): | |
236 | """ |
|
245 | """ | |
237 | Parses the date out of given svn properties. |
|
246 | Parses the date out of given svn properties. | |
238 |
|
247 | |||
239 | :return: :class:`datetime.datetime` instance. The object is naive. |
|
248 | :return: :class:`datetime.datetime` instance. The object is naive. | |
240 | """ |
|
249 | """ | |
241 |
|
250 | |||
242 | aware_date = dateutil.parser.parse(properties.get('svn:date')) |
|
251 | aware_date = dateutil.parser.parse(properties.get('svn:date')) | |
243 | # final_date = aware_date.astimezone(dateutil.tz.tzlocal()) |
|
252 | # final_date = aware_date.astimezone(dateutil.tz.tzlocal()) | |
244 | final_date = aware_date |
|
253 | final_date = aware_date | |
245 | return final_date.replace(tzinfo=None) |
|
254 | return final_date.replace(tzinfo=None) |
General Comments 0
You need to be logged in to leave comments.
Login now