Show More
@@ -1,504 +1,502 | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.files |
|
3 | rhodecode.controllers.files | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Files controller for RhodeCode |
|
6 | Files controller for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 21, 2010 |
|
8 | :created_on: Apr 21, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 | from __future__ import with_statement |
|
25 | from __future__ import with_statement | |
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import tempfile |
|
29 | import tempfile | |
30 |
|
30 | |||
31 | from pylons import request, response, tmpl_context as c, url |
|
31 | from pylons import request, response, tmpl_context as c, url | |
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 | from pylons.controllers.util import redirect |
|
33 | from pylons.controllers.util import redirect | |
34 | from pylons.decorators import jsonify |
|
34 | from pylons.decorators import jsonify | |
35 | from paste.fileapp import FileApp, _FileIter |
|
|||
36 |
|
35 | |||
37 | from rhodecode.lib import diffs |
|
36 | from rhodecode.lib import diffs | |
38 | from rhodecode.lib import helpers as h |
|
37 | from rhodecode.lib import helpers as h | |
39 |
|
38 | |||
40 | from rhodecode.lib.compat import OrderedDict |
|
39 | from rhodecode.lib.compat import OrderedDict | |
41 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str |
|
40 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str | |
42 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
43 | from rhodecode.lib.base import BaseRepoController, render |
|
42 | from rhodecode.lib.base import BaseRepoController, render | |
44 | from rhodecode.lib.utils import EmptyChangeset |
|
43 | from rhodecode.lib.utils import EmptyChangeset | |
45 | from rhodecode.lib.vcs.conf import settings |
|
44 | from rhodecode.lib.vcs.conf import settings | |
46 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ |
|
45 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ | |
47 | ChangesetDoesNotExistError, EmptyRepositoryError, \ |
|
46 | ChangesetDoesNotExistError, EmptyRepositoryError, \ | |
48 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError |
|
47 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError | |
49 | from rhodecode.lib.vcs.nodes import FileNode |
|
48 | from rhodecode.lib.vcs.nodes import FileNode | |
50 |
|
49 | |||
51 | from rhodecode.model.repo import RepoModel |
|
50 | from rhodecode.model.repo import RepoModel | |
52 | from rhodecode.model.scm import ScmModel |
|
51 | from rhodecode.model.scm import ScmModel | |
53 | from rhodecode.model.db import Repository |
|
52 | from rhodecode.model.db import Repository | |
54 |
|
53 | |||
55 | from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\ |
|
54 | from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\ | |
56 | _context_url, get_line_ctx, get_ignore_ws |
|
55 | _context_url, get_line_ctx, get_ignore_ws | |
57 |
|
56 | |||
58 |
|
57 | |||
59 | log = logging.getLogger(__name__) |
|
58 | log = logging.getLogger(__name__) | |
60 |
|
59 | |||
61 |
|
60 | |||
62 | class FilesController(BaseRepoController): |
|
61 | class FilesController(BaseRepoController): | |
63 |
|
62 | |||
64 |
|
||||
65 | def __before__(self): |
|
63 | def __before__(self): | |
66 | super(FilesController, self).__before__() |
|
64 | super(FilesController, self).__before__() | |
67 | c.cut_off_limit = self.cut_off_limit |
|
65 | c.cut_off_limit = self.cut_off_limit | |
68 |
|
66 | |||
69 | def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True): |
|
67 | def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True): | |
70 | """ |
|
68 | """ | |
71 | Safe way to get changeset if error occur it redirects to tip with |
|
69 | Safe way to get changeset if error occur it redirects to tip with | |
72 | proper message |
|
70 | proper message | |
73 |
|
71 | |||
74 | :param rev: revision to fetch |
|
72 | :param rev: revision to fetch | |
75 | :param repo_name: repo name to redirect after |
|
73 | :param repo_name: repo name to redirect after | |
76 | """ |
|
74 | """ | |
77 |
|
75 | |||
78 | try: |
|
76 | try: | |
79 | return c.rhodecode_repo.get_changeset(rev) |
|
77 | return c.rhodecode_repo.get_changeset(rev) | |
80 | except EmptyRepositoryError, e: |
|
78 | except EmptyRepositoryError, e: | |
81 | if not redirect_after: |
|
79 | if not redirect_after: | |
82 | return None |
|
80 | return None | |
83 | url_ = url('files_add_home', |
|
81 | url_ = url('files_add_home', | |
84 | repo_name=c.repo_name, |
|
82 | repo_name=c.repo_name, | |
85 | revision=0, f_path='') |
|
83 | revision=0, f_path='') | |
86 | add_new = '<a href="%s">[%s]</a>' % (url_, _('add new')) |
|
84 | add_new = '<a href="%s">[%s]</a>' % (url_, _('add new')) | |
87 | h.flash(h.literal(_('There are no files yet %s' % add_new)), |
|
85 | h.flash(h.literal(_('There are no files yet %s' % add_new)), | |
88 | category='warning') |
|
86 | category='warning') | |
89 | redirect(h.url('summary_home', repo_name=repo_name)) |
|
87 | redirect(h.url('summary_home', repo_name=repo_name)) | |
90 |
|
88 | |||
91 | except RepositoryError, e: |
|
89 | except RepositoryError, e: | |
92 | h.flash(str(e), category='warning') |
|
90 | h.flash(str(e), category='warning') | |
93 | redirect(h.url('files_home', repo_name=repo_name, revision='tip')) |
|
91 | redirect(h.url('files_home', repo_name=repo_name, revision='tip')) | |
94 |
|
92 | |||
95 | def __get_filenode_or_redirect(self, repo_name, cs, path): |
|
93 | def __get_filenode_or_redirect(self, repo_name, cs, path): | |
96 | """ |
|
94 | """ | |
97 | Returns file_node, if error occurs or given path is directory, |
|
95 | Returns file_node, if error occurs or given path is directory, | |
98 | it'll redirect to top level path |
|
96 | it'll redirect to top level path | |
99 |
|
97 | |||
100 | :param repo_name: repo_name |
|
98 | :param repo_name: repo_name | |
101 | :param cs: given changeset |
|
99 | :param cs: given changeset | |
102 | :param path: path to lookup |
|
100 | :param path: path to lookup | |
103 | """ |
|
101 | """ | |
104 |
|
102 | |||
105 | try: |
|
103 | try: | |
106 | file_node = cs.get_node(path) |
|
104 | file_node = cs.get_node(path) | |
107 | if file_node.is_dir(): |
|
105 | if file_node.is_dir(): | |
108 | raise RepositoryError('given path is a directory') |
|
106 | raise RepositoryError('given path is a directory') | |
109 | except RepositoryError, e: |
|
107 | except RepositoryError, e: | |
110 | h.flash(str(e), category='warning') |
|
108 | h.flash(str(e), category='warning') | |
111 | redirect(h.url('files_home', repo_name=repo_name, |
|
109 | redirect(h.url('files_home', repo_name=repo_name, | |
112 | revision=cs.raw_id)) |
|
110 | revision=cs.raw_id)) | |
113 |
|
111 | |||
114 | return file_node |
|
112 | return file_node | |
115 |
|
113 | |||
116 | @LoginRequired() |
|
114 | @LoginRequired() | |
117 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
115 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
118 | 'repository.admin') |
|
116 | 'repository.admin') | |
119 | def index(self, repo_name, revision, f_path, annotate=False): |
|
117 | def index(self, repo_name, revision, f_path, annotate=False): | |
120 | # redirect to given revision from form if given |
|
118 | # redirect to given revision from form if given | |
121 | post_revision = request.POST.get('at_rev', None) |
|
119 | post_revision = request.POST.get('at_rev', None) | |
122 | if post_revision: |
|
120 | if post_revision: | |
123 | cs = self.__get_cs_or_redirect(post_revision, repo_name) |
|
121 | cs = self.__get_cs_or_redirect(post_revision, repo_name) | |
124 | redirect(url('files_home', repo_name=c.repo_name, |
|
122 | redirect(url('files_home', repo_name=c.repo_name, | |
125 | revision=cs.raw_id, f_path=f_path)) |
|
123 | revision=cs.raw_id, f_path=f_path)) | |
126 |
|
124 | |||
127 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) |
|
125 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) | |
128 | c.branch = request.GET.get('branch', None) |
|
126 | c.branch = request.GET.get('branch', None) | |
129 | c.f_path = f_path |
|
127 | c.f_path = f_path | |
130 | c.annotate = annotate |
|
128 | c.annotate = annotate | |
131 | cur_rev = c.changeset.revision |
|
129 | cur_rev = c.changeset.revision | |
132 |
|
130 | |||
133 | # prev link |
|
131 | # prev link | |
134 | try: |
|
132 | try: | |
135 | prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) |
|
133 | prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) | |
136 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
134 | c.url_prev = url('files_home', repo_name=c.repo_name, | |
137 | revision=prev_rev.raw_id, f_path=f_path) |
|
135 | revision=prev_rev.raw_id, f_path=f_path) | |
138 | if c.branch: |
|
136 | if c.branch: | |
139 | c.url_prev += '?branch=%s' % c.branch |
|
137 | c.url_prev += '?branch=%s' % c.branch | |
140 | except (ChangesetDoesNotExistError, VCSError): |
|
138 | except (ChangesetDoesNotExistError, VCSError): | |
141 | c.url_prev = '#' |
|
139 | c.url_prev = '#' | |
142 |
|
140 | |||
143 | # next link |
|
141 | # next link | |
144 | try: |
|
142 | try: | |
145 | next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) |
|
143 | next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) | |
146 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
144 | c.url_next = url('files_home', repo_name=c.repo_name, | |
147 | revision=next_rev.raw_id, f_path=f_path) |
|
145 | revision=next_rev.raw_id, f_path=f_path) | |
148 | if c.branch: |
|
146 | if c.branch: | |
149 | c.url_next += '?branch=%s' % c.branch |
|
147 | c.url_next += '?branch=%s' % c.branch | |
150 | except (ChangesetDoesNotExistError, VCSError): |
|
148 | except (ChangesetDoesNotExistError, VCSError): | |
151 | c.url_next = '#' |
|
149 | c.url_next = '#' | |
152 |
|
150 | |||
153 | # files or dirs |
|
151 | # files or dirs | |
154 | try: |
|
152 | try: | |
155 | c.file = c.changeset.get_node(f_path) |
|
153 | c.file = c.changeset.get_node(f_path) | |
156 |
|
154 | |||
157 | if c.file.is_file(): |
|
155 | if c.file.is_file(): | |
158 | _hist = c.changeset.get_file_history(f_path) |
|
156 | _hist = c.changeset.get_file_history(f_path) | |
159 | c.file_history = self._get_node_history(c.changeset, f_path, |
|
157 | c.file_history = self._get_node_history(c.changeset, f_path, | |
160 | _hist) |
|
158 | _hist) | |
161 | c.authors = [] |
|
159 | c.authors = [] | |
162 | for a in set([x.author for x in _hist]): |
|
160 | for a in set([x.author for x in _hist]): | |
163 | c.authors.append((h.email(a), h.person(a))) |
|
161 | c.authors.append((h.email(a), h.person(a))) | |
164 | else: |
|
162 | else: | |
165 | c.authors = c.file_history = [] |
|
163 | c.authors = c.file_history = [] | |
166 | except RepositoryError, e: |
|
164 | except RepositoryError, e: | |
167 | h.flash(str(e), category='warning') |
|
165 | h.flash(str(e), category='warning') | |
168 | redirect(h.url('files_home', repo_name=repo_name, |
|
166 | redirect(h.url('files_home', repo_name=repo_name, | |
169 |
revision= |
|
167 | revision='tip')) | |
170 |
|
168 | |||
171 | return render('files/files.html') |
|
169 | return render('files/files.html') | |
172 |
|
170 | |||
173 | @LoginRequired() |
|
171 | @LoginRequired() | |
174 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
172 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
175 | 'repository.admin') |
|
173 | 'repository.admin') | |
176 | def rawfile(self, repo_name, revision, f_path): |
|
174 | def rawfile(self, repo_name, revision, f_path): | |
177 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
175 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
178 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) |
|
176 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) | |
179 |
|
177 | |||
180 | response.content_disposition = 'attachment; filename=%s' % \ |
|
178 | response.content_disposition = 'attachment; filename=%s' % \ | |
181 | safe_str(f_path.split(Repository.url_sep())[-1]) |
|
179 | safe_str(f_path.split(Repository.url_sep())[-1]) | |
182 |
|
180 | |||
183 | response.content_type = file_node.mimetype |
|
181 | response.content_type = file_node.mimetype | |
184 | return file_node.content |
|
182 | return file_node.content | |
185 |
|
183 | |||
186 | @LoginRequired() |
|
184 | @LoginRequired() | |
187 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
185 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
188 | 'repository.admin') |
|
186 | 'repository.admin') | |
189 | def raw(self, repo_name, revision, f_path): |
|
187 | def raw(self, repo_name, revision, f_path): | |
190 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
188 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
191 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) |
|
189 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) | |
192 |
|
190 | |||
193 | raw_mimetype_mapping = { |
|
191 | raw_mimetype_mapping = { | |
194 | # map original mimetype to a mimetype used for "show as raw" |
|
192 | # map original mimetype to a mimetype used for "show as raw" | |
195 | # you can also provide a content-disposition to override the |
|
193 | # you can also provide a content-disposition to override the | |
196 | # default "attachment" disposition. |
|
194 | # default "attachment" disposition. | |
197 | # orig_type: (new_type, new_dispo) |
|
195 | # orig_type: (new_type, new_dispo) | |
198 |
|
196 | |||
199 | # show images inline: |
|
197 | # show images inline: | |
200 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
198 | 'image/x-icon': ('image/x-icon', 'inline'), | |
201 | 'image/png': ('image/png', 'inline'), |
|
199 | 'image/png': ('image/png', 'inline'), | |
202 | 'image/gif': ('image/gif', 'inline'), |
|
200 | 'image/gif': ('image/gif', 'inline'), | |
203 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
201 | 'image/jpeg': ('image/jpeg', 'inline'), | |
204 | 'image/svg+xml': ('image/svg+xml', 'inline'), |
|
202 | 'image/svg+xml': ('image/svg+xml', 'inline'), | |
205 | } |
|
203 | } | |
206 |
|
204 | |||
207 | mimetype = file_node.mimetype |
|
205 | mimetype = file_node.mimetype | |
208 | try: |
|
206 | try: | |
209 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
207 | mimetype, dispo = raw_mimetype_mapping[mimetype] | |
210 | except KeyError: |
|
208 | except KeyError: | |
211 | # we don't know anything special about this, handle it safely |
|
209 | # we don't know anything special about this, handle it safely | |
212 | if file_node.is_binary: |
|
210 | if file_node.is_binary: | |
213 | # do same as download raw for binary files |
|
211 | # do same as download raw for binary files | |
214 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
212 | mimetype, dispo = 'application/octet-stream', 'attachment' | |
215 | else: |
|
213 | else: | |
216 | # do not just use the original mimetype, but force text/plain, |
|
214 | # do not just use the original mimetype, but force text/plain, | |
217 | # otherwise it would serve text/html and that might be unsafe. |
|
215 | # otherwise it would serve text/html and that might be unsafe. | |
218 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
216 | # Note: underlying vcs library fakes text/plain mimetype if the | |
219 | # mimetype can not be determined and it thinks it is not |
|
217 | # mimetype can not be determined and it thinks it is not | |
220 | # binary.This might lead to erroneous text display in some |
|
218 | # binary.This might lead to erroneous text display in some | |
221 | # cases, but helps in other cases, like with text files |
|
219 | # cases, but helps in other cases, like with text files | |
222 | # without extension. |
|
220 | # without extension. | |
223 | mimetype, dispo = 'text/plain', 'inline' |
|
221 | mimetype, dispo = 'text/plain', 'inline' | |
224 |
|
222 | |||
225 | if dispo == 'attachment': |
|
223 | if dispo == 'attachment': | |
226 | dispo = 'attachment; filename=%s' % \ |
|
224 | dispo = 'attachment; filename=%s' % \ | |
227 | safe_str(f_path.split(os.sep)[-1]) |
|
225 | safe_str(f_path.split(os.sep)[-1]) | |
228 |
|
226 | |||
229 | response.content_disposition = dispo |
|
227 | response.content_disposition = dispo | |
230 | response.content_type = mimetype |
|
228 | response.content_type = mimetype | |
231 | return file_node.content |
|
229 | return file_node.content | |
232 |
|
230 | |||
233 | @LoginRequired() |
|
231 | @LoginRequired() | |
234 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
232 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
235 | def edit(self, repo_name, revision, f_path): |
|
233 | def edit(self, repo_name, revision, f_path): | |
236 | r_post = request.POST |
|
234 | r_post = request.POST | |
237 |
|
235 | |||
238 | c.cs = self.__get_cs_or_redirect(revision, repo_name) |
|
236 | c.cs = self.__get_cs_or_redirect(revision, repo_name) | |
239 | c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path) |
|
237 | c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path) | |
240 |
|
238 | |||
241 | if c.file.is_binary: |
|
239 | if c.file.is_binary: | |
242 | return redirect(url('files_home', repo_name=c.repo_name, |
|
240 | return redirect(url('files_home', repo_name=c.repo_name, | |
243 | revision=c.cs.raw_id, f_path=f_path)) |
|
241 | revision=c.cs.raw_id, f_path=f_path)) | |
244 |
|
242 | |||
245 | c.f_path = f_path |
|
243 | c.f_path = f_path | |
246 |
|
244 | |||
247 | if r_post: |
|
245 | if r_post: | |
248 |
|
246 | |||
249 | old_content = c.file.content |
|
247 | old_content = c.file.content | |
250 | sl = old_content.splitlines(1) |
|
248 | sl = old_content.splitlines(1) | |
251 | first_line = sl[0] if sl else '' |
|
249 | first_line = sl[0] if sl else '' | |
252 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
250 | # modes: 0 - Unix, 1 - Mac, 2 - DOS | |
253 | mode = detect_mode(first_line, 0) |
|
251 | mode = detect_mode(first_line, 0) | |
254 | content = convert_line_endings(r_post.get('content'), mode) |
|
252 | content = convert_line_endings(r_post.get('content'), mode) | |
255 |
|
253 | |||
256 | message = r_post.get('message') or (_('Edited %s via RhodeCode') |
|
254 | message = r_post.get('message') or (_('Edited %s via RhodeCode') | |
257 | % (f_path)) |
|
255 | % (f_path)) | |
258 | author = self.rhodecode_user.full_contact |
|
256 | author = self.rhodecode_user.full_contact | |
259 |
|
257 | |||
260 | if content == old_content: |
|
258 | if content == old_content: | |
261 | h.flash(_('No changes'), |
|
259 | h.flash(_('No changes'), | |
262 | category='warning') |
|
260 | category='warning') | |
263 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
261 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
264 | revision='tip')) |
|
262 | revision='tip')) | |
265 |
|
263 | |||
266 | try: |
|
264 | try: | |
267 | self.scm_model.commit_change(repo=c.rhodecode_repo, |
|
265 | self.scm_model.commit_change(repo=c.rhodecode_repo, | |
268 | repo_name=repo_name, cs=c.cs, |
|
266 | repo_name=repo_name, cs=c.cs, | |
269 | user=self.rhodecode_user, |
|
267 | user=self.rhodecode_user, | |
270 | author=author, message=message, |
|
268 | author=author, message=message, | |
271 | content=content, f_path=f_path) |
|
269 | content=content, f_path=f_path) | |
272 | h.flash(_('Successfully committed to %s' % f_path), |
|
270 | h.flash(_('Successfully committed to %s' % f_path), | |
273 | category='success') |
|
271 | category='success') | |
274 |
|
272 | |||
275 | except Exception: |
|
273 | except Exception: | |
276 | log.error(traceback.format_exc()) |
|
274 | log.error(traceback.format_exc()) | |
277 | h.flash(_('Error occurred during commit'), category='error') |
|
275 | h.flash(_('Error occurred during commit'), category='error') | |
278 | return redirect(url('changeset_home', |
|
276 | return redirect(url('changeset_home', | |
279 | repo_name=c.repo_name, revision='tip')) |
|
277 | repo_name=c.repo_name, revision='tip')) | |
280 |
|
278 | |||
281 | return render('files/files_edit.html') |
|
279 | return render('files/files_edit.html') | |
282 |
|
280 | |||
283 | @LoginRequired() |
|
281 | @LoginRequired() | |
284 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
282 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
285 | def add(self, repo_name, revision, f_path): |
|
283 | def add(self, repo_name, revision, f_path): | |
286 | r_post = request.POST |
|
284 | r_post = request.POST | |
287 | c.cs = self.__get_cs_or_redirect(revision, repo_name, |
|
285 | c.cs = self.__get_cs_or_redirect(revision, repo_name, | |
288 | redirect_after=False) |
|
286 | redirect_after=False) | |
289 | if c.cs is None: |
|
287 | if c.cs is None: | |
290 | c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias) |
|
288 | c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias) | |
291 |
|
289 | |||
292 | c.f_path = f_path |
|
290 | c.f_path = f_path | |
293 |
|
291 | |||
294 | if r_post: |
|
292 | if r_post: | |
295 | unix_mode = 0 |
|
293 | unix_mode = 0 | |
296 | content = convert_line_endings(r_post.get('content'), unix_mode) |
|
294 | content = convert_line_endings(r_post.get('content'), unix_mode) | |
297 |
|
295 | |||
298 | message = r_post.get('message') or (_('Added %s via RhodeCode') |
|
296 | message = r_post.get('message') or (_('Added %s via RhodeCode') | |
299 | % (f_path)) |
|
297 | % (f_path)) | |
300 | location = r_post.get('location') |
|
298 | location = r_post.get('location') | |
301 | filename = r_post.get('filename') |
|
299 | filename = r_post.get('filename') | |
302 | file_obj = r_post.get('upload_file', None) |
|
300 | file_obj = r_post.get('upload_file', None) | |
303 |
|
301 | |||
304 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
302 | if file_obj is not None and hasattr(file_obj, 'filename'): | |
305 | filename = file_obj.filename |
|
303 | filename = file_obj.filename | |
306 | content = file_obj.file |
|
304 | content = file_obj.file | |
307 |
|
305 | |||
308 | node_path = os.path.join(location, filename) |
|
306 | node_path = os.path.join(location, filename) | |
309 | author = self.rhodecode_user.full_contact |
|
307 | author = self.rhodecode_user.full_contact | |
310 |
|
308 | |||
311 | if not content: |
|
309 | if not content: | |
312 | h.flash(_('No content'), category='warning') |
|
310 | h.flash(_('No content'), category='warning') | |
313 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
311 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
314 | revision='tip')) |
|
312 | revision='tip')) | |
315 | if not filename: |
|
313 | if not filename: | |
316 | h.flash(_('No filename'), category='warning') |
|
314 | h.flash(_('No filename'), category='warning') | |
317 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
315 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
318 | revision='tip')) |
|
316 | revision='tip')) | |
319 |
|
317 | |||
320 | try: |
|
318 | try: | |
321 | self.scm_model.create_node(repo=c.rhodecode_repo, |
|
319 | self.scm_model.create_node(repo=c.rhodecode_repo, | |
322 | repo_name=repo_name, cs=c.cs, |
|
320 | repo_name=repo_name, cs=c.cs, | |
323 | user=self.rhodecode_user, |
|
321 | user=self.rhodecode_user, | |
324 | author=author, message=message, |
|
322 | author=author, message=message, | |
325 | content=content, f_path=node_path) |
|
323 | content=content, f_path=node_path) | |
326 | h.flash(_('Successfully committed to %s' % node_path), |
|
324 | h.flash(_('Successfully committed to %s' % node_path), | |
327 | category='success') |
|
325 | category='success') | |
328 | except NodeAlreadyExistsError, e: |
|
326 | except NodeAlreadyExistsError, e: | |
329 | h.flash(_(e), category='error') |
|
327 | h.flash(_(e), category='error') | |
330 | except Exception: |
|
328 | except Exception: | |
331 | log.error(traceback.format_exc()) |
|
329 | log.error(traceback.format_exc()) | |
332 | h.flash(_('Error occurred during commit'), category='error') |
|
330 | h.flash(_('Error occurred during commit'), category='error') | |
333 | return redirect(url('changeset_home', |
|
331 | return redirect(url('changeset_home', | |
334 | repo_name=c.repo_name, revision='tip')) |
|
332 | repo_name=c.repo_name, revision='tip')) | |
335 |
|
333 | |||
336 | return render('files/files_add.html') |
|
334 | return render('files/files_add.html') | |
337 |
|
335 | |||
338 | @LoginRequired() |
|
336 | @LoginRequired() | |
339 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
337 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
340 | 'repository.admin') |
|
338 | 'repository.admin') | |
341 | def archivefile(self, repo_name, fname): |
|
339 | def archivefile(self, repo_name, fname): | |
342 |
|
340 | |||
343 | fileformat = None |
|
341 | fileformat = None | |
344 | revision = None |
|
342 | revision = None | |
345 | ext = None |
|
343 | ext = None | |
346 | subrepos = request.GET.get('subrepos') == 'true' |
|
344 | subrepos = request.GET.get('subrepos') == 'true' | |
347 |
|
345 | |||
348 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
346 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): | |
349 | archive_spec = fname.split(ext_data[1]) |
|
347 | archive_spec = fname.split(ext_data[1]) | |
350 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
348 | if len(archive_spec) == 2 and archive_spec[1] == '': | |
351 | fileformat = a_type or ext_data[1] |
|
349 | fileformat = a_type or ext_data[1] | |
352 | revision = archive_spec[0] |
|
350 | revision = archive_spec[0] | |
353 | ext = ext_data[1] |
|
351 | ext = ext_data[1] | |
354 |
|
352 | |||
355 | try: |
|
353 | try: | |
356 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
354 | dbrepo = RepoModel().get_by_repo_name(repo_name) | |
357 | if dbrepo.enable_downloads is False: |
|
355 | if dbrepo.enable_downloads is False: | |
358 | return _('downloads disabled') |
|
356 | return _('downloads disabled') | |
359 |
|
357 | |||
360 | if c.rhodecode_repo.alias == 'hg': |
|
358 | if c.rhodecode_repo.alias == 'hg': | |
361 | # patch and reset hooks section of UI config to not run any |
|
359 | # patch and reset hooks section of UI config to not run any | |
362 | # hooks on fetching archives with subrepos |
|
360 | # hooks on fetching archives with subrepos | |
363 | for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'): |
|
361 | for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'): | |
364 | c.rhodecode_repo._repo.ui.setconfig('hooks', k, None) |
|
362 | c.rhodecode_repo._repo.ui.setconfig('hooks', k, None) | |
365 |
|
363 | |||
366 | cs = c.rhodecode_repo.get_changeset(revision) |
|
364 | cs = c.rhodecode_repo.get_changeset(revision) | |
367 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
365 | content_type = settings.ARCHIVE_SPECS[fileformat][0] | |
368 | except ChangesetDoesNotExistError: |
|
366 | except ChangesetDoesNotExistError: | |
369 | return _('Unknown revision %s') % revision |
|
367 | return _('Unknown revision %s') % revision | |
370 | except EmptyRepositoryError: |
|
368 | except EmptyRepositoryError: | |
371 | return _('Empty repository') |
|
369 | return _('Empty repository') | |
372 | except (ImproperArchiveTypeError, KeyError): |
|
370 | except (ImproperArchiveTypeError, KeyError): | |
373 | return _('Unknown archive type') |
|
371 | return _('Unknown archive type') | |
374 |
|
372 | |||
375 | fd, archive = tempfile.mkstemp() |
|
373 | fd, archive = tempfile.mkstemp() | |
376 | t = open(archive, 'wb') |
|
374 | t = open(archive, 'wb') | |
377 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) |
|
375 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) | |
378 | t.close() |
|
376 | t.close() | |
379 |
|
377 | |||
380 | def get_chunked_archive(archive): |
|
378 | def get_chunked_archive(archive): | |
381 | stream = open(archive, 'rb') |
|
379 | stream = open(archive, 'rb') | |
382 | while True: |
|
380 | while True: | |
383 | data = stream.read(16 * 1024) |
|
381 | data = stream.read(16 * 1024) | |
384 | if not data: |
|
382 | if not data: | |
385 | stream.close() |
|
383 | stream.close() | |
386 | os.close(fd) |
|
384 | os.close(fd) | |
387 | os.remove(archive) |
|
385 | os.remove(archive) | |
388 | break |
|
386 | break | |
389 | yield data |
|
387 | yield data | |
390 |
|
388 | |||
391 | response.content_disposition = str('attachment; filename=%s-%s%s' \ |
|
389 | response.content_disposition = str('attachment; filename=%s-%s%s' \ | |
392 | % (repo_name, revision[:12], ext)) |
|
390 | % (repo_name, revision[:12], ext)) | |
393 | response.content_type = str(content_type) |
|
391 | response.content_type = str(content_type) | |
394 | return get_chunked_archive(archive) |
|
392 | return get_chunked_archive(archive) | |
395 |
|
393 | |||
396 | @LoginRequired() |
|
394 | @LoginRequired() | |
397 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
395 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
398 | 'repository.admin') |
|
396 | 'repository.admin') | |
399 | def diff(self, repo_name, f_path): |
|
397 | def diff(self, repo_name, f_path): | |
400 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
398 | ignore_whitespace = request.GET.get('ignorews') == '1' | |
401 | line_context = request.GET.get('context', 3) |
|
399 | line_context = request.GET.get('context', 3) | |
402 | diff1 = request.GET.get('diff1', '') |
|
400 | diff1 = request.GET.get('diff1', '') | |
403 | diff2 = request.GET.get('diff2', '') |
|
401 | diff2 = request.GET.get('diff2', '') | |
404 | c.action = request.GET.get('diff') |
|
402 | c.action = request.GET.get('diff') | |
405 | c.no_changes = diff1 == diff2 |
|
403 | c.no_changes = diff1 == diff2 | |
406 | c.f_path = f_path |
|
404 | c.f_path = f_path | |
407 | c.big_diff = False |
|
405 | c.big_diff = False | |
408 | c.anchor_url = anchor_url |
|
406 | c.anchor_url = anchor_url | |
409 | c.ignorews_url = _ignorews_url |
|
407 | c.ignorews_url = _ignorews_url | |
410 | c.context_url = _context_url |
|
408 | c.context_url = _context_url | |
411 | c.changes = OrderedDict() |
|
409 | c.changes = OrderedDict() | |
412 | c.changes[diff2] = [] |
|
410 | c.changes[diff2] = [] | |
413 | try: |
|
411 | try: | |
414 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
412 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
415 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) |
|
413 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) | |
416 | node1 = c.changeset_1.get_node(f_path) |
|
414 | node1 = c.changeset_1.get_node(f_path) | |
417 | else: |
|
415 | else: | |
418 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) |
|
416 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) | |
419 | node1 = FileNode('.', '', changeset=c.changeset_1) |
|
417 | node1 = FileNode('.', '', changeset=c.changeset_1) | |
420 |
|
418 | |||
421 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
419 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
422 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) |
|
420 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) | |
423 | node2 = c.changeset_2.get_node(f_path) |
|
421 | node2 = c.changeset_2.get_node(f_path) | |
424 | else: |
|
422 | else: | |
425 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) |
|
423 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) | |
426 | node2 = FileNode('.', '', changeset=c.changeset_2) |
|
424 | node2 = FileNode('.', '', changeset=c.changeset_2) | |
427 | except RepositoryError: |
|
425 | except RepositoryError: | |
428 | return redirect(url('files_home', repo_name=c.repo_name, |
|
426 | return redirect(url('files_home', repo_name=c.repo_name, | |
429 | f_path=f_path)) |
|
427 | f_path=f_path)) | |
430 |
|
428 | |||
431 | if c.action == 'download': |
|
429 | if c.action == 'download': | |
432 | _diff = diffs.get_gitdiff(node1, node2, |
|
430 | _diff = diffs.get_gitdiff(node1, node2, | |
433 | ignore_whitespace=ignore_whitespace, |
|
431 | ignore_whitespace=ignore_whitespace, | |
434 | context=line_context) |
|
432 | context=line_context) | |
435 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
433 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
436 |
|
434 | |||
437 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
435 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) | |
438 | response.content_type = 'text/plain' |
|
436 | response.content_type = 'text/plain' | |
439 | response.content_disposition = ( |
|
437 | response.content_disposition = ( | |
440 | 'attachment; filename=%s' % diff_name |
|
438 | 'attachment; filename=%s' % diff_name | |
441 | ) |
|
439 | ) | |
442 | return diff.raw_diff() |
|
440 | return diff.raw_diff() | |
443 |
|
441 | |||
444 | elif c.action == 'raw': |
|
442 | elif c.action == 'raw': | |
445 | _diff = diffs.get_gitdiff(node1, node2, |
|
443 | _diff = diffs.get_gitdiff(node1, node2, | |
446 | ignore_whitespace=ignore_whitespace, |
|
444 | ignore_whitespace=ignore_whitespace, | |
447 | context=line_context) |
|
445 | context=line_context) | |
448 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
446 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
449 | response.content_type = 'text/plain' |
|
447 | response.content_type = 'text/plain' | |
450 | return diff.raw_diff() |
|
448 | return diff.raw_diff() | |
451 |
|
449 | |||
452 | else: |
|
450 | else: | |
453 | fid = h.FID(diff2, node2.path) |
|
451 | fid = h.FID(diff2, node2.path) | |
454 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
452 | line_context_lcl = get_line_ctx(fid, request.GET) | |
455 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
453 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) | |
456 |
|
454 | |||
457 | lim = request.GET.get('fulldiff') or self.cut_off_limit |
|
455 | lim = request.GET.get('fulldiff') or self.cut_off_limit | |
458 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, |
|
456 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, | |
459 | filenode_new=node2, |
|
457 | filenode_new=node2, | |
460 | cut_off_limit=lim, |
|
458 | cut_off_limit=lim, | |
461 | ignore_whitespace=ign_whitespace_lcl, |
|
459 | ignore_whitespace=ign_whitespace_lcl, | |
462 | line_context=line_context_lcl, |
|
460 | line_context=line_context_lcl, | |
463 | enable_comments=False) |
|
461 | enable_comments=False) | |
464 |
|
462 | |||
465 | c.changes = [('', node2, diff, cs1, cs2, st,)] |
|
463 | c.changes = [('', node2, diff, cs1, cs2, st,)] | |
466 |
|
464 | |||
467 | return render('files/file_diff.html') |
|
465 | return render('files/file_diff.html') | |
468 |
|
466 | |||
469 | def _get_node_history(self, cs, f_path, changesets=None): |
|
467 | def _get_node_history(self, cs, f_path, changesets=None): | |
470 | if changesets is None: |
|
468 | if changesets is None: | |
471 | changesets = cs.get_file_history(f_path) |
|
469 | changesets = cs.get_file_history(f_path) | |
472 | hist_l = [] |
|
470 | hist_l = [] | |
473 |
|
471 | |||
474 | changesets_group = ([], _("Changesets")) |
|
472 | changesets_group = ([], _("Changesets")) | |
475 | branches_group = ([], _("Branches")) |
|
473 | branches_group = ([], _("Branches")) | |
476 | tags_group = ([], _("Tags")) |
|
474 | tags_group = ([], _("Tags")) | |
477 | _hg = cs.repository.alias == 'hg' |
|
475 | _hg = cs.repository.alias == 'hg' | |
478 | for chs in changesets: |
|
476 | for chs in changesets: | |
479 | _branch = '(%s)' % chs.branch if _hg else '' |
|
477 | _branch = '(%s)' % chs.branch if _hg else '' | |
480 | n_desc = 'r%s:%s %s' % (chs.revision, chs.short_id, _branch) |
|
478 | n_desc = 'r%s:%s %s' % (chs.revision, chs.short_id, _branch) | |
481 | changesets_group[0].append((chs.raw_id, n_desc,)) |
|
479 | changesets_group[0].append((chs.raw_id, n_desc,)) | |
482 |
|
480 | |||
483 | hist_l.append(changesets_group) |
|
481 | hist_l.append(changesets_group) | |
484 |
|
482 | |||
485 | for name, chs in c.rhodecode_repo.branches.items(): |
|
483 | for name, chs in c.rhodecode_repo.branches.items(): | |
486 | branches_group[0].append((chs, name),) |
|
484 | branches_group[0].append((chs, name),) | |
487 | hist_l.append(branches_group) |
|
485 | hist_l.append(branches_group) | |
488 |
|
486 | |||
489 | for name, chs in c.rhodecode_repo.tags.items(): |
|
487 | for name, chs in c.rhodecode_repo.tags.items(): | |
490 | tags_group[0].append((chs, name),) |
|
488 | tags_group[0].append((chs, name),) | |
491 | hist_l.append(tags_group) |
|
489 | hist_l.append(tags_group) | |
492 |
|
490 | |||
493 | return hist_l |
|
491 | return hist_l | |
494 |
|
492 | |||
495 | @LoginRequired() |
|
493 | @LoginRequired() | |
496 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
494 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
497 | 'repository.admin') |
|
495 | 'repository.admin') | |
498 | @jsonify |
|
496 | @jsonify | |
499 | def nodelist(self, repo_name, revision, f_path): |
|
497 | def nodelist(self, repo_name, revision, f_path): | |
500 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
498 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
501 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
499 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
502 | _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path, |
|
500 | _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path, | |
503 | flat=False) |
|
501 | flat=False) | |
504 | return {'nodes': _d + _f} |
|
502 | return {'nodes': _d + _f} |
@@ -1,462 +1,472 | |||||
1 | import re |
|
1 | import re | |
2 | from itertools import chain |
|
2 | from itertools import chain | |
3 | from dulwich import objects |
|
3 | from dulwich import objects | |
4 | from subprocess import Popen, PIPE |
|
4 | from subprocess import Popen, PIPE | |
5 | from rhodecode.lib.vcs.conf import settings |
|
5 | from rhodecode.lib.vcs.conf import settings | |
6 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
6 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
7 | from rhodecode.lib.vcs.exceptions import ChangesetError |
|
7 | from rhodecode.lib.vcs.exceptions import ChangesetError | |
8 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
8 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError | |
9 | from rhodecode.lib.vcs.exceptions import VCSError |
|
9 | from rhodecode.lib.vcs.exceptions import VCSError | |
10 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
10 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError | |
11 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError |
|
11 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError | |
12 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
12 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
13 | from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \ |
|
13 | from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \ | |
14 | RemovedFileNode, SubModuleNode |
|
14 | RemovedFileNode, SubModuleNode | |
15 | from rhodecode.lib.vcs.utils import safe_unicode |
|
15 | from rhodecode.lib.vcs.utils import safe_unicode | |
16 | from rhodecode.lib.vcs.utils import date_fromtimestamp |
|
16 | from rhodecode.lib.vcs.utils import date_fromtimestamp | |
17 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
17 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
|
18 | from dulwich.objects import Commit, Tag | |||
18 |
|
19 | |||
19 |
|
20 | |||
20 | class GitChangeset(BaseChangeset): |
|
21 | class GitChangeset(BaseChangeset): | |
21 | """ |
|
22 | """ | |
22 | Represents state of the repository at single revision. |
|
23 | Represents state of the repository at single revision. | |
23 | """ |
|
24 | """ | |
24 |
|
25 | |||
25 | def __init__(self, repository, revision): |
|
26 | def __init__(self, repository, revision): | |
26 | self._stat_modes = {} |
|
27 | self._stat_modes = {} | |
27 | self.repository = repository |
|
28 | self.repository = repository | |
28 | self.raw_id = revision |
|
29 | self.raw_id = revision | |
29 | self.revision = repository.revisions.index(revision) |
|
|||
30 |
|
||||
31 | self.short_id = self.raw_id[:12] |
|
30 | self.short_id = self.raw_id[:12] | |
32 | self.id = self.raw_id |
|
31 | self.id = self.raw_id | |
33 | try: |
|
32 | try: | |
34 | commit = self.repository._repo.get_object(self.raw_id) |
|
33 | commit = self.repository._repo.get_object(self.raw_id) | |
35 | except KeyError: |
|
34 | except KeyError: | |
36 | raise RepositoryError("Cannot get object with id %s" % self.raw_id) |
|
35 | raise RepositoryError("Cannot get object with id %s" % self.raw_id) | |
37 | self._commit = commit |
|
36 | self._commit = commit | |
|
37 | ||||
|
38 | if isinstance(commit, Commit): | |||
38 | self._tree_id = commit.tree |
|
39 | self._tree_id = commit.tree | |
|
40 | self._commiter_property = 'committer' | |||
|
41 | self._date_property = 'commit_time' | |||
|
42 | self._date_tz_property = 'commit_timezone' | |||
|
43 | self.revision = repository.revisions.index(revision) | |||
|
44 | elif isinstance(commit, Tag): | |||
|
45 | self._commiter_property = 'tagger' | |||
|
46 | self._tree_id = commit.id | |||
|
47 | self._date_property = 'tag_time' | |||
|
48 | self._date_tz_property = 'tag_timezone' | |||
|
49 | self.revision = 'tag' | |||
39 |
|
50 | |||
40 | self.message = safe_unicode(commit.message) |
|
51 | self.message = safe_unicode(commit.message) | |
41 | #self.branch = None |
|
52 | #self.branch = None | |
42 | self.tags = [] |
|
53 | self.tags = [] | |
43 | self.nodes = {} |
|
54 | self.nodes = {} | |
44 | self._paths = {} |
|
55 | self._paths = {} | |
45 |
|
56 | |||
46 | @LazyProperty |
|
57 | @LazyProperty | |
47 | def author(self): |
|
58 | def author(self): | |
48 |
return safe_unicode(self._commit.commit |
|
59 | return safe_unicode(getattr(self._commit, self._commiter_property)) | |
49 |
|
60 | |||
50 | @LazyProperty |
|
61 | @LazyProperty | |
51 | def date(self): |
|
62 | def date(self): | |
52 |
return date_fromtimestamp(self._commit. |
|
63 | return date_fromtimestamp(getattr(self._commit, self._date_property), | |
53 |
self._commit. |
|
64 | getattr(self._commit, self._date_tz_property)) | |
54 |
|
65 | |||
55 | @LazyProperty |
|
66 | @LazyProperty | |
56 | def status(self): |
|
67 | def status(self): | |
57 | """ |
|
68 | """ | |
58 | Returns modified, added, removed, deleted files for current changeset |
|
69 | Returns modified, added, removed, deleted files for current changeset | |
59 | """ |
|
70 | """ | |
60 | return self.changed, self.added, self.removed |
|
71 | return self.changed, self.added, self.removed | |
61 |
|
72 | |||
62 | @LazyProperty |
|
73 | @LazyProperty | |
63 | def branch(self): |
|
74 | def branch(self): | |
64 |
|
75 | |||
65 | heads = self.repository._heads(reverse=False) |
|
76 | heads = self.repository._heads(reverse=False) | |
66 |
|
77 | |||
67 | ref = heads.get(self.raw_id) |
|
78 | ref = heads.get(self.raw_id) | |
68 | if ref: |
|
79 | if ref: | |
69 | return safe_unicode(ref) |
|
80 | return safe_unicode(ref) | |
70 |
|
81 | |||
71 | def _fix_path(self, path): |
|
82 | def _fix_path(self, path): | |
72 | """ |
|
83 | """ | |
73 | Paths are stored without trailing slash so we need to get rid off it if |
|
84 | Paths are stored without trailing slash so we need to get rid off it if | |
74 | needed. |
|
85 | needed. | |
75 | """ |
|
86 | """ | |
76 | if path.endswith('/'): |
|
87 | if path.endswith('/'): | |
77 | path = path.rstrip('/') |
|
88 | path = path.rstrip('/') | |
78 | return path |
|
89 | return path | |
79 |
|
90 | |||
80 | def _get_id_for_path(self, path): |
|
91 | def _get_id_for_path(self, path): | |
81 |
|
92 | |||
82 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; |
|
93 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; | |
83 | if not path in self._paths: |
|
94 | if not path in self._paths: | |
84 | path = path.strip('/') |
|
95 | path = path.strip('/') | |
85 | # set root tree |
|
96 | # set root tree | |
86 |
tree = self.repository._repo[self._ |
|
97 | tree = self.repository._repo[self._tree_id] | |
87 | if path == '': |
|
98 | if path == '': | |
88 | self._paths[''] = tree.id |
|
99 | self._paths[''] = tree.id | |
89 | return tree.id |
|
100 | return tree.id | |
90 | splitted = path.split('/') |
|
101 | splitted = path.split('/') | |
91 | dirs, name = splitted[:-1], splitted[-1] |
|
102 | dirs, name = splitted[:-1], splitted[-1] | |
92 | curdir = '' |
|
103 | curdir = '' | |
93 |
|
104 | |||
94 | # initially extract things from root dir |
|
105 | # initially extract things from root dir | |
95 | for item, stat, id in tree.iteritems(): |
|
106 | for item, stat, id in tree.iteritems(): | |
96 | if curdir: |
|
107 | if curdir: | |
97 | name = '/'.join((curdir, item)) |
|
108 | name = '/'.join((curdir, item)) | |
98 | else: |
|
109 | else: | |
99 | name = item |
|
110 | name = item | |
100 | self._paths[name] = id |
|
111 | self._paths[name] = id | |
101 | self._stat_modes[name] = stat |
|
112 | self._stat_modes[name] = stat | |
102 |
|
113 | |||
103 | for dir in dirs: |
|
114 | for dir in dirs: | |
104 | if curdir: |
|
115 | if curdir: | |
105 | curdir = '/'.join((curdir, dir)) |
|
116 | curdir = '/'.join((curdir, dir)) | |
106 | else: |
|
117 | else: | |
107 | curdir = dir |
|
118 | curdir = dir | |
108 | dir_id = None |
|
119 | dir_id = None | |
109 | for item, stat, id in tree.iteritems(): |
|
120 | for item, stat, id in tree.iteritems(): | |
110 | if dir == item: |
|
121 | if dir == item: | |
111 | dir_id = id |
|
122 | dir_id = id | |
112 | if dir_id: |
|
123 | if dir_id: | |
113 | # Update tree |
|
124 | # Update tree | |
114 | tree = self.repository._repo[dir_id] |
|
125 | tree = self.repository._repo[dir_id] | |
115 | if not isinstance(tree, objects.Tree): |
|
126 | if not isinstance(tree, objects.Tree): | |
116 | raise ChangesetError('%s is not a directory' % curdir) |
|
127 | raise ChangesetError('%s is not a directory' % curdir) | |
117 | else: |
|
128 | else: | |
118 | raise ChangesetError('%s have not been found' % curdir) |
|
129 | raise ChangesetError('%s have not been found' % curdir) | |
119 |
|
130 | |||
120 | # cache all items from the given traversed tree |
|
131 | # cache all items from the given traversed tree | |
121 | for item, stat, id in tree.iteritems(): |
|
132 | for item, stat, id in tree.iteritems(): | |
122 | if curdir: |
|
133 | if curdir: | |
123 | name = '/'.join((curdir, item)) |
|
134 | name = '/'.join((curdir, item)) | |
124 | else: |
|
135 | else: | |
125 | name = item |
|
136 | name = item | |
126 | self._paths[name] = id |
|
137 | self._paths[name] = id | |
127 | self._stat_modes[name] = stat |
|
138 | self._stat_modes[name] = stat | |
128 | if not path in self._paths: |
|
139 | if not path in self._paths: | |
129 | raise NodeDoesNotExistError("There is no file nor directory " |
|
140 | raise NodeDoesNotExistError("There is no file nor directory " | |
130 | "at the given path %r at revision %r" |
|
141 | "at the given path %r at revision %r" | |
131 | % (path, self.short_id)) |
|
142 | % (path, self.short_id)) | |
132 | return self._paths[path] |
|
143 | return self._paths[path] | |
133 |
|
144 | |||
134 | def _get_kind(self, path): |
|
145 | def _get_kind(self, path): | |
135 |
|
|
146 | obj = self.repository._repo[self._get_id_for_path(path)] | |
136 | obj = self.repository._repo[id] |
|
|||
137 | if isinstance(obj, objects.Blob): |
|
147 | if isinstance(obj, objects.Blob): | |
138 | return NodeKind.FILE |
|
148 | return NodeKind.FILE | |
139 | elif isinstance(obj, objects.Tree): |
|
149 | elif isinstance(obj, objects.Tree): | |
140 | return NodeKind.DIR |
|
150 | return NodeKind.DIR | |
141 |
|
151 | |||
142 | def _get_file_nodes(self): |
|
152 | def _get_file_nodes(self): | |
143 | return chain(*(t[2] for t in self.walk())) |
|
153 | return chain(*(t[2] for t in self.walk())) | |
144 |
|
154 | |||
145 | @LazyProperty |
|
155 | @LazyProperty | |
146 | def parents(self): |
|
156 | def parents(self): | |
147 | """ |
|
157 | """ | |
148 | Returns list of parents changesets. |
|
158 | Returns list of parents changesets. | |
149 | """ |
|
159 | """ | |
150 | return [self.repository.get_changeset(parent) |
|
160 | return [self.repository.get_changeset(parent) | |
151 | for parent in self._commit.parents] |
|
161 | for parent in self._commit.parents] | |
152 |
|
162 | |||
153 | def next(self, branch=None): |
|
163 | def next(self, branch=None): | |
154 |
|
164 | |||
155 | if branch and self.branch != branch: |
|
165 | if branch and self.branch != branch: | |
156 | raise VCSError('Branch option used on changeset not belonging ' |
|
166 | raise VCSError('Branch option used on changeset not belonging ' | |
157 | 'to that branch') |
|
167 | 'to that branch') | |
158 |
|
168 | |||
159 | def _next(changeset, branch): |
|
169 | def _next(changeset, branch): | |
160 | try: |
|
170 | try: | |
161 | next_ = changeset.revision + 1 |
|
171 | next_ = changeset.revision + 1 | |
162 | next_rev = changeset.repository.revisions[next_] |
|
172 | next_rev = changeset.repository.revisions[next_] | |
163 | except IndexError: |
|
173 | except IndexError: | |
164 | raise ChangesetDoesNotExistError |
|
174 | raise ChangesetDoesNotExistError | |
165 | cs = changeset.repository.get_changeset(next_rev) |
|
175 | cs = changeset.repository.get_changeset(next_rev) | |
166 |
|
176 | |||
167 | if branch and branch != cs.branch: |
|
177 | if branch and branch != cs.branch: | |
168 | return _next(cs, branch) |
|
178 | return _next(cs, branch) | |
169 |
|
179 | |||
170 | return cs |
|
180 | return cs | |
171 |
|
181 | |||
172 | return _next(self, branch) |
|
182 | return _next(self, branch) | |
173 |
|
183 | |||
174 | def prev(self, branch=None): |
|
184 | def prev(self, branch=None): | |
175 | if branch and self.branch != branch: |
|
185 | if branch and self.branch != branch: | |
176 | raise VCSError('Branch option used on changeset not belonging ' |
|
186 | raise VCSError('Branch option used on changeset not belonging ' | |
177 | 'to that branch') |
|
187 | 'to that branch') | |
178 |
|
188 | |||
179 | def _prev(changeset, branch): |
|
189 | def _prev(changeset, branch): | |
180 | try: |
|
190 | try: | |
181 | prev_ = changeset.revision - 1 |
|
191 | prev_ = changeset.revision - 1 | |
182 | if prev_ < 0: |
|
192 | if prev_ < 0: | |
183 | raise IndexError |
|
193 | raise IndexError | |
184 | prev_rev = changeset.repository.revisions[prev_] |
|
194 | prev_rev = changeset.repository.revisions[prev_] | |
185 | except IndexError: |
|
195 | except IndexError: | |
186 | raise ChangesetDoesNotExistError |
|
196 | raise ChangesetDoesNotExistError | |
187 |
|
197 | |||
188 | cs = changeset.repository.get_changeset(prev_rev) |
|
198 | cs = changeset.repository.get_changeset(prev_rev) | |
189 |
|
199 | |||
190 | if branch and branch != cs.branch: |
|
200 | if branch and branch != cs.branch: | |
191 | return _prev(cs, branch) |
|
201 | return _prev(cs, branch) | |
192 |
|
202 | |||
193 | return cs |
|
203 | return cs | |
194 |
|
204 | |||
195 | return _prev(self, branch) |
|
205 | return _prev(self, branch) | |
196 |
|
206 | |||
197 | def diff(self, ignore_whitespace=True, context=3): |
|
207 | def diff(self, ignore_whitespace=True, context=3): | |
198 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET |
|
208 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET | |
199 | rev2 = self |
|
209 | rev2 = self | |
200 | return ''.join(self.repository.get_diff(rev1, rev2, |
|
210 | return ''.join(self.repository.get_diff(rev1, rev2, | |
201 | ignore_whitespace=ignore_whitespace, |
|
211 | ignore_whitespace=ignore_whitespace, | |
202 | context=context)) |
|
212 | context=context)) | |
203 |
|
213 | |||
204 | def get_file_mode(self, path): |
|
214 | def get_file_mode(self, path): | |
205 | """ |
|
215 | """ | |
206 | Returns stat mode of the file at the given ``path``. |
|
216 | Returns stat mode of the file at the given ``path``. | |
207 | """ |
|
217 | """ | |
208 | # ensure path is traversed |
|
218 | # ensure path is traversed | |
209 | self._get_id_for_path(path) |
|
219 | self._get_id_for_path(path) | |
210 | return self._stat_modes[path] |
|
220 | return self._stat_modes[path] | |
211 |
|
221 | |||
212 | def get_file_content(self, path): |
|
222 | def get_file_content(self, path): | |
213 | """ |
|
223 | """ | |
214 | Returns content of the file at given ``path``. |
|
224 | Returns content of the file at given ``path``. | |
215 | """ |
|
225 | """ | |
216 | id = self._get_id_for_path(path) |
|
226 | id = self._get_id_for_path(path) | |
217 | blob = self.repository._repo[id] |
|
227 | blob = self.repository._repo[id] | |
218 | return blob.as_pretty_string() |
|
228 | return blob.as_pretty_string() | |
219 |
|
229 | |||
220 | def get_file_size(self, path): |
|
230 | def get_file_size(self, path): | |
221 | """ |
|
231 | """ | |
222 | Returns size of the file at given ``path``. |
|
232 | Returns size of the file at given ``path``. | |
223 | """ |
|
233 | """ | |
224 | id = self._get_id_for_path(path) |
|
234 | id = self._get_id_for_path(path) | |
225 | blob = self.repository._repo[id] |
|
235 | blob = self.repository._repo[id] | |
226 | return blob.raw_length() |
|
236 | return blob.raw_length() | |
227 |
|
237 | |||
228 | def get_file_changeset(self, path): |
|
238 | def get_file_changeset(self, path): | |
229 | """ |
|
239 | """ | |
230 | Returns last commit of the file at the given ``path``. |
|
240 | Returns last commit of the file at the given ``path``. | |
231 | """ |
|
241 | """ | |
232 | node = self.get_node(path) |
|
242 | node = self.get_node(path) | |
233 | return node.history[0] |
|
243 | return node.history[0] | |
234 |
|
244 | |||
235 | def get_file_history(self, path): |
|
245 | def get_file_history(self, path): | |
236 | """ |
|
246 | """ | |
237 | Returns history of file as reversed list of ``Changeset`` objects for |
|
247 | Returns history of file as reversed list of ``Changeset`` objects for | |
238 | which file at given ``path`` has been modified. |
|
248 | which file at given ``path`` has been modified. | |
239 |
|
249 | |||
240 | TODO: This function now uses os underlying 'git' and 'grep' commands |
|
250 | TODO: This function now uses os underlying 'git' and 'grep' commands | |
241 | which is generally not good. Should be replaced with algorithm |
|
251 | which is generally not good. Should be replaced with algorithm | |
242 | iterating commits. |
|
252 | iterating commits. | |
243 | """ |
|
253 | """ | |
244 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( |
|
254 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( | |
245 | self.id, path |
|
255 | self.id, path | |
246 | ) |
|
256 | ) | |
247 | so, se = self.repository.run_git_command(cmd) |
|
257 | so, se = self.repository.run_git_command(cmd) | |
248 | ids = re.findall(r'[0-9a-fA-F]{40}', so) |
|
258 | ids = re.findall(r'[0-9a-fA-F]{40}', so) | |
249 | return [self.repository.get_changeset(id) for id in ids] |
|
259 | return [self.repository.get_changeset(id) for id in ids] | |
250 |
|
260 | |||
251 | def get_file_annotate(self, path): |
|
261 | def get_file_annotate(self, path): | |
252 | """ |
|
262 | """ | |
253 | Returns a list of three element tuples with lineno,changeset and line |
|
263 | Returns a list of three element tuples with lineno,changeset and line | |
254 |
|
264 | |||
255 | TODO: This function now uses os underlying 'git' command which is |
|
265 | TODO: This function now uses os underlying 'git' command which is | |
256 | generally not good. Should be replaced with algorithm iterating |
|
266 | generally not good. Should be replaced with algorithm iterating | |
257 | commits. |
|
267 | commits. | |
258 | """ |
|
268 | """ | |
259 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) |
|
269 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) | |
260 | # -l ==> outputs long shas (and we need all 40 characters) |
|
270 | # -l ==> outputs long shas (and we need all 40 characters) | |
261 | # --root ==> doesn't put '^' character for bounderies |
|
271 | # --root ==> doesn't put '^' character for bounderies | |
262 | # -r sha ==> blames for the given revision |
|
272 | # -r sha ==> blames for the given revision | |
263 | so, se = self.repository.run_git_command(cmd) |
|
273 | so, se = self.repository.run_git_command(cmd) | |
264 |
|
274 | |||
265 | annotate = [] |
|
275 | annotate = [] | |
266 | for i, blame_line in enumerate(so.split('\n')[:-1]): |
|
276 | for i, blame_line in enumerate(so.split('\n')[:-1]): | |
267 | ln_no = i + 1 |
|
277 | ln_no = i + 1 | |
268 | id, line = re.split(r' ', blame_line, 1) |
|
278 | id, line = re.split(r' ', blame_line, 1) | |
269 | annotate.append((ln_no, self.repository.get_changeset(id), line)) |
|
279 | annotate.append((ln_no, self.repository.get_changeset(id), line)) | |
270 | return annotate |
|
280 | return annotate | |
271 |
|
281 | |||
272 | def fill_archive(self, stream=None, kind='tgz', prefix=None, |
|
282 | def fill_archive(self, stream=None, kind='tgz', prefix=None, | |
273 | subrepos=False): |
|
283 | subrepos=False): | |
274 | """ |
|
284 | """ | |
275 | Fills up given stream. |
|
285 | Fills up given stream. | |
276 |
|
286 | |||
277 | :param stream: file like object. |
|
287 | :param stream: file like object. | |
278 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. |
|
288 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. | |
279 | Default: ``tgz``. |
|
289 | Default: ``tgz``. | |
280 | :param prefix: name of root directory in archive. |
|
290 | :param prefix: name of root directory in archive. | |
281 | Default is repository name and changeset's raw_id joined with dash |
|
291 | Default is repository name and changeset's raw_id joined with dash | |
282 | (``repo-tip.<KIND>``). |
|
292 | (``repo-tip.<KIND>``). | |
283 | :param subrepos: include subrepos in this archive. |
|
293 | :param subrepos: include subrepos in this archive. | |
284 |
|
294 | |||
285 | :raise ImproperArchiveTypeError: If given kind is wrong. |
|
295 | :raise ImproperArchiveTypeError: If given kind is wrong. | |
286 | :raise VcsError: If given stream is None |
|
296 | :raise VcsError: If given stream is None | |
287 |
|
297 | |||
288 | """ |
|
298 | """ | |
289 | allowed_kinds = settings.ARCHIVE_SPECS.keys() |
|
299 | allowed_kinds = settings.ARCHIVE_SPECS.keys() | |
290 | if kind not in allowed_kinds: |
|
300 | if kind not in allowed_kinds: | |
291 | raise ImproperArchiveTypeError('Archive kind not supported use one' |
|
301 | raise ImproperArchiveTypeError('Archive kind not supported use one' | |
292 | 'of %s', allowed_kinds) |
|
302 | 'of %s', allowed_kinds) | |
293 |
|
303 | |||
294 | if prefix is None: |
|
304 | if prefix is None: | |
295 | prefix = '%s-%s' % (self.repository.name, self.short_id) |
|
305 | prefix = '%s-%s' % (self.repository.name, self.short_id) | |
296 | elif prefix.startswith('/'): |
|
306 | elif prefix.startswith('/'): | |
297 | raise VCSError("Prefix cannot start with leading slash") |
|
307 | raise VCSError("Prefix cannot start with leading slash") | |
298 | elif prefix.strip() == '': |
|
308 | elif prefix.strip() == '': | |
299 | raise VCSError("Prefix cannot be empty") |
|
309 | raise VCSError("Prefix cannot be empty") | |
300 |
|
310 | |||
301 | if kind == 'zip': |
|
311 | if kind == 'zip': | |
302 | frmt = 'zip' |
|
312 | frmt = 'zip' | |
303 | else: |
|
313 | else: | |
304 | frmt = 'tar' |
|
314 | frmt = 'tar' | |
305 | cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix, |
|
315 | cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix, | |
306 | self.raw_id) |
|
316 | self.raw_id) | |
307 | if kind == 'tgz': |
|
317 | if kind == 'tgz': | |
308 | cmd += ' | gzip -9' |
|
318 | cmd += ' | gzip -9' | |
309 | elif kind == 'tbz2': |
|
319 | elif kind == 'tbz2': | |
310 | cmd += ' | bzip2 -9' |
|
320 | cmd += ' | bzip2 -9' | |
311 |
|
321 | |||
312 | if stream is None: |
|
322 | if stream is None: | |
313 | raise VCSError('You need to pass in a valid stream for filling' |
|
323 | raise VCSError('You need to pass in a valid stream for filling' | |
314 | ' with archival data') |
|
324 | ' with archival data') | |
315 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, |
|
325 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, | |
316 | cwd=self.repository.path) |
|
326 | cwd=self.repository.path) | |
317 |
|
327 | |||
318 | buffer_size = 1024 * 8 |
|
328 | buffer_size = 1024 * 8 | |
319 | chunk = popen.stdout.read(buffer_size) |
|
329 | chunk = popen.stdout.read(buffer_size) | |
320 | while chunk: |
|
330 | while chunk: | |
321 | stream.write(chunk) |
|
331 | stream.write(chunk) | |
322 | chunk = popen.stdout.read(buffer_size) |
|
332 | chunk = popen.stdout.read(buffer_size) | |
323 | # Make sure all descriptors would be read |
|
333 | # Make sure all descriptors would be read | |
324 | popen.communicate() |
|
334 | popen.communicate() | |
325 |
|
335 | |||
326 | def get_nodes(self, path): |
|
336 | def get_nodes(self, path): | |
327 | if self._get_kind(path) != NodeKind.DIR: |
|
337 | if self._get_kind(path) != NodeKind.DIR: | |
328 | raise ChangesetError("Directory does not exist for revision %r at " |
|
338 | raise ChangesetError("Directory does not exist for revision %r at " | |
329 | " %r" % (self.revision, path)) |
|
339 | " %r" % (self.revision, path)) | |
330 | path = self._fix_path(path) |
|
340 | path = self._fix_path(path) | |
331 | id = self._get_id_for_path(path) |
|
341 | id = self._get_id_for_path(path) | |
332 | tree = self.repository._repo[id] |
|
342 | tree = self.repository._repo[id] | |
333 | dirnodes = [] |
|
343 | dirnodes = [] | |
334 | filenodes = [] |
|
344 | filenodes = [] | |
335 | als = self.repository.alias |
|
345 | als = self.repository.alias | |
336 | for name, stat, id in tree.iteritems(): |
|
346 | for name, stat, id in tree.iteritems(): | |
337 | if objects.S_ISGITLINK(stat): |
|
347 | if objects.S_ISGITLINK(stat): | |
338 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, |
|
348 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, | |
339 | alias=als)) |
|
349 | alias=als)) | |
340 | continue |
|
350 | continue | |
341 |
|
351 | |||
342 | obj = self.repository._repo.get_object(id) |
|
352 | obj = self.repository._repo.get_object(id) | |
343 | if path != '': |
|
353 | if path != '': | |
344 | obj_path = '/'.join((path, name)) |
|
354 | obj_path = '/'.join((path, name)) | |
345 | else: |
|
355 | else: | |
346 | obj_path = name |
|
356 | obj_path = name | |
347 | if obj_path not in self._stat_modes: |
|
357 | if obj_path not in self._stat_modes: | |
348 | self._stat_modes[obj_path] = stat |
|
358 | self._stat_modes[obj_path] = stat | |
349 | if isinstance(obj, objects.Tree): |
|
359 | if isinstance(obj, objects.Tree): | |
350 | dirnodes.append(DirNode(obj_path, changeset=self)) |
|
360 | dirnodes.append(DirNode(obj_path, changeset=self)) | |
351 | elif isinstance(obj, objects.Blob): |
|
361 | elif isinstance(obj, objects.Blob): | |
352 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) |
|
362 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) | |
353 | else: |
|
363 | else: | |
354 | raise ChangesetError("Requested object should be Tree " |
|
364 | raise ChangesetError("Requested object should be Tree " | |
355 | "or Blob, is %r" % type(obj)) |
|
365 | "or Blob, is %r" % type(obj)) | |
356 | nodes = dirnodes + filenodes |
|
366 | nodes = dirnodes + filenodes | |
357 | for node in nodes: |
|
367 | for node in nodes: | |
358 | if not node.path in self.nodes: |
|
368 | if not node.path in self.nodes: | |
359 | self.nodes[node.path] = node |
|
369 | self.nodes[node.path] = node | |
360 | nodes.sort() |
|
370 | nodes.sort() | |
361 | return nodes |
|
371 | return nodes | |
362 |
|
372 | |||
363 | def get_node(self, path): |
|
373 | def get_node(self, path): | |
364 | if isinstance(path, unicode): |
|
374 | if isinstance(path, unicode): | |
365 | path = path.encode('utf-8') |
|
375 | path = path.encode('utf-8') | |
366 | path = self._fix_path(path) |
|
376 | path = self._fix_path(path) | |
367 | if not path in self.nodes: |
|
377 | if not path in self.nodes: | |
368 | try: |
|
378 | try: | |
369 | id_ = self._get_id_for_path(path) |
|
379 | id_ = self._get_id_for_path(path) | |
370 | except ChangesetError: |
|
380 | except ChangesetError: | |
371 | raise NodeDoesNotExistError("Cannot find one of parents' " |
|
381 | raise NodeDoesNotExistError("Cannot find one of parents' " | |
372 | "directories for a given path: %s" % path) |
|
382 | "directories for a given path: %s" % path) | |
373 |
|
383 | |||
374 | als = self.repository.alias |
|
|||
375 | _GL = lambda m: m and objects.S_ISGITLINK(m) |
|
384 | _GL = lambda m: m and objects.S_ISGITLINK(m) | |
376 | if _GL(self._stat_modes.get(path)): |
|
385 | if _GL(self._stat_modes.get(path)): | |
377 |
node = SubModuleNode(path, url=None, changeset=id_, |
|
386 | node = SubModuleNode(path, url=None, changeset=id_, | |
|
387 | alias=self.repository.alias) | |||
378 | else: |
|
388 | else: | |
379 | obj = self.repository._repo.get_object(id_) |
|
389 | obj = self.repository._repo.get_object(id_) | |
380 |
|
390 | |||
381 | if isinstance(obj, objects.Tree): |
|
391 | if isinstance(obj, (objects.Tree, objects.Tag)): | |
382 | if path == '': |
|
392 | if path == '': | |
383 | node = RootNode(changeset=self) |
|
393 | node = RootNode(changeset=self) | |
384 | else: |
|
394 | else: | |
385 | node = DirNode(path, changeset=self) |
|
395 | node = DirNode(path, changeset=self) | |
386 | node._tree = obj |
|
396 | node._tree = obj | |
387 | elif isinstance(obj, objects.Blob): |
|
397 | elif isinstance(obj, objects.Blob): | |
388 | node = FileNode(path, changeset=self) |
|
398 | node = FileNode(path, changeset=self) | |
389 | node._blob = obj |
|
399 | node._blob = obj | |
390 | else: |
|
400 | else: | |
391 | raise NodeDoesNotExistError("There is no file nor directory " |
|
401 | raise NodeDoesNotExistError("There is no file nor directory " | |
392 | "at the given path %r at revision %r" |
|
402 | "at the given path %r at revision %r" | |
393 | % (path, self.short_id)) |
|
403 | % (path, self.short_id)) | |
394 | # cache node |
|
404 | # cache node | |
395 | self.nodes[path] = node |
|
405 | self.nodes[path] = node | |
396 | return self.nodes[path] |
|
406 | return self.nodes[path] | |
397 |
|
407 | |||
398 | @LazyProperty |
|
408 | @LazyProperty | |
399 | def affected_files(self): |
|
409 | def affected_files(self): | |
400 | """ |
|
410 | """ | |
401 | Get's a fast accessible file changes for given changeset |
|
411 | Get's a fast accessible file changes for given changeset | |
402 | """ |
|
412 | """ | |
403 |
|
413 | |||
404 | return self.added + self.changed |
|
414 | return self.added + self.changed | |
405 |
|
415 | |||
406 | @LazyProperty |
|
416 | @LazyProperty | |
407 | def _diff_name_status(self): |
|
417 | def _diff_name_status(self): | |
408 | output = [] |
|
418 | output = [] | |
409 | for parent in self.parents: |
|
419 | for parent in self.parents: | |
410 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, self.raw_id) |
|
420 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, self.raw_id) | |
411 | so, se = self.repository.run_git_command(cmd) |
|
421 | so, se = self.repository.run_git_command(cmd) | |
412 | output.append(so.strip()) |
|
422 | output.append(so.strip()) | |
413 | return '\n'.join(output) |
|
423 | return '\n'.join(output) | |
414 |
|
424 | |||
415 | def _get_paths_for_status(self, status): |
|
425 | def _get_paths_for_status(self, status): | |
416 | """ |
|
426 | """ | |
417 | Returns sorted list of paths for given ``status``. |
|
427 | Returns sorted list of paths for given ``status``. | |
418 |
|
428 | |||
419 | :param status: one of: *added*, *modified* or *deleted* |
|
429 | :param status: one of: *added*, *modified* or *deleted* | |
420 | """ |
|
430 | """ | |
421 | paths = set() |
|
431 | paths = set() | |
422 | char = status[0].upper() |
|
432 | char = status[0].upper() | |
423 | for line in self._diff_name_status.splitlines(): |
|
433 | for line in self._diff_name_status.splitlines(): | |
424 | if not line: |
|
434 | if not line: | |
425 | continue |
|
435 | continue | |
426 |
|
436 | |||
427 | if line.startswith(char): |
|
437 | if line.startswith(char): | |
428 | splitted = line.split(char, 1) |
|
438 | splitted = line.split(char, 1) | |
429 | if not len(splitted) == 2: |
|
439 | if not len(splitted) == 2: | |
430 | raise VCSError("Couldn't parse diff result:\n%s\n\n and " |
|
440 | raise VCSError("Couldn't parse diff result:\n%s\n\n and " | |
431 | "particularly that line: %s" % (self._diff_name_status, |
|
441 | "particularly that line: %s" % (self._diff_name_status, | |
432 | line)) |
|
442 | line)) | |
433 | _path = splitted[1].strip() |
|
443 | _path = splitted[1].strip() | |
434 | paths.add(_path) |
|
444 | paths.add(_path) | |
435 | return sorted(paths) |
|
445 | return sorted(paths) | |
436 |
|
446 | |||
437 | @LazyProperty |
|
447 | @LazyProperty | |
438 | def added(self): |
|
448 | def added(self): | |
439 | """ |
|
449 | """ | |
440 | Returns list of added ``FileNode`` objects. |
|
450 | Returns list of added ``FileNode`` objects. | |
441 | """ |
|
451 | """ | |
442 | if not self.parents: |
|
452 | if not self.parents: | |
443 | return list(self._get_file_nodes()) |
|
453 | return list(self._get_file_nodes()) | |
444 | return [self.get_node(path) for path in self._get_paths_for_status('added')] |
|
454 | return [self.get_node(path) for path in self._get_paths_for_status('added')] | |
445 |
|
455 | |||
446 | @LazyProperty |
|
456 | @LazyProperty | |
447 | def changed(self): |
|
457 | def changed(self): | |
448 | """ |
|
458 | """ | |
449 | Returns list of modified ``FileNode`` objects. |
|
459 | Returns list of modified ``FileNode`` objects. | |
450 | """ |
|
460 | """ | |
451 | if not self.parents: |
|
461 | if not self.parents: | |
452 | return [] |
|
462 | return [] | |
453 | return [self.get_node(path) for path in self._get_paths_for_status('modified')] |
|
463 | return [self.get_node(path) for path in self._get_paths_for_status('modified')] | |
454 |
|
464 | |||
455 | @LazyProperty |
|
465 | @LazyProperty | |
456 | def removed(self): |
|
466 | def removed(self): | |
457 | """ |
|
467 | """ | |
458 | Returns list of removed ``FileNode`` objects. |
|
468 | Returns list of removed ``FileNode`` objects. | |
459 | """ |
|
469 | """ | |
460 | if not self.parents: |
|
470 | if not self.parents: | |
461 | return [] |
|
471 | return [] | |
462 | return [RemovedFileNode(path) for path in self._get_paths_for_status('deleted')] |
|
472 | return [RemovedFileNode(path) for path in self._get_paths_for_status('deleted')] |
@@ -1,593 +1,599 | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.git |
|
3 | vcs.backends.git | |
4 | ~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Git backend implementation. |
|
6 | Git backend implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import os |
|
12 | import os | |
13 | import re |
|
13 | import re | |
14 | import time |
|
14 | import time | |
15 | import posixpath |
|
15 | import posixpath | |
16 | from dulwich.repo import Repo, NotGitRepository |
|
16 | from dulwich.repo import Repo, NotGitRepository | |
17 | #from dulwich.config import ConfigFile |
|
17 | #from dulwich.config import ConfigFile | |
18 | from string import Template |
|
18 | from string import Template | |
19 | from subprocess import Popen, PIPE |
|
19 | from subprocess import Popen, PIPE | |
20 | from rhodecode.lib.vcs.backends.base import BaseRepository |
|
20 | from rhodecode.lib.vcs.backends.base import BaseRepository | |
21 | from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError |
|
21 | from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError | |
22 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
22 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError | |
23 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError |
|
23 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError | |
24 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
24 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
25 | from rhodecode.lib.vcs.exceptions import TagAlreadyExistError |
|
25 | from rhodecode.lib.vcs.exceptions import TagAlreadyExistError | |
26 | from rhodecode.lib.vcs.exceptions import TagDoesNotExistError |
|
26 | from rhodecode.lib.vcs.exceptions import TagDoesNotExistError | |
27 | from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp |
|
27 | from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp | |
28 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
28 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
29 | from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict |
|
29 | from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict | |
30 | from rhodecode.lib.vcs.utils.paths import abspath |
|
30 | from rhodecode.lib.vcs.utils.paths import abspath | |
31 | from rhodecode.lib.vcs.utils.paths import get_user_home |
|
31 | from rhodecode.lib.vcs.utils.paths import get_user_home | |
32 | from .workdir import GitWorkdir |
|
32 | from .workdir import GitWorkdir | |
33 | from .changeset import GitChangeset |
|
33 | from .changeset import GitChangeset | |
34 | from .inmemory import GitInMemoryChangeset |
|
34 | from .inmemory import GitInMemoryChangeset | |
35 | from .config import ConfigFile |
|
35 | from .config import ConfigFile | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | class GitRepository(BaseRepository): |
|
38 | class GitRepository(BaseRepository): | |
39 | """ |
|
39 | """ | |
40 | Git repository backend. |
|
40 | Git repository backend. | |
41 | """ |
|
41 | """ | |
42 | DEFAULT_BRANCH_NAME = 'master' |
|
42 | DEFAULT_BRANCH_NAME = 'master' | |
43 | scm = 'git' |
|
43 | scm = 'git' | |
44 |
|
44 | |||
45 | def __init__(self, repo_path, create=False, src_url=None, |
|
45 | def __init__(self, repo_path, create=False, src_url=None, | |
46 | update_after_clone=False, bare=False): |
|
46 | update_after_clone=False, bare=False): | |
47 |
|
47 | |||
48 | self.path = abspath(repo_path) |
|
48 | self.path = abspath(repo_path) | |
49 | self._repo = self._get_repo(create, src_url, update_after_clone, bare) |
|
49 | self._repo = self._get_repo(create, src_url, update_after_clone, bare) | |
50 | #temporary set that to now at later we will move it to constructor |
|
50 | #temporary set that to now at later we will move it to constructor | |
51 | baseui = None |
|
51 | baseui = None | |
52 | if baseui is None: |
|
52 | if baseui is None: | |
53 | from mercurial.ui import ui |
|
53 | from mercurial.ui import ui | |
54 | baseui = ui() |
|
54 | baseui = ui() | |
55 | # patch the instance of GitRepo with an "FAKE" ui object to add |
|
55 | # patch the instance of GitRepo with an "FAKE" ui object to add | |
56 | # compatibility layer with Mercurial |
|
56 | # compatibility layer with Mercurial | |
57 | setattr(self._repo, 'ui', baseui) |
|
57 | setattr(self._repo, 'ui', baseui) | |
58 |
|
58 | |||
59 | try: |
|
59 | try: | |
60 | self.head = self._repo.head() |
|
60 | self.head = self._repo.head() | |
61 | except KeyError: |
|
61 | except KeyError: | |
62 | self.head = None |
|
62 | self.head = None | |
63 |
|
63 | |||
64 | self._config_files = [ |
|
64 | self._config_files = [ | |
65 | bare and abspath(self.path, 'config') or abspath(self.path, '.git', |
|
65 | bare and abspath(self.path, 'config') or abspath(self.path, '.git', | |
66 | 'config'), |
|
66 | 'config'), | |
67 | abspath(get_user_home(), '.gitconfig'), |
|
67 | abspath(get_user_home(), '.gitconfig'), | |
68 | ] |
|
68 | ] | |
69 | self.bare = self._repo.bare |
|
69 | self.bare = self._repo.bare | |
70 |
|
70 | |||
71 | @LazyProperty |
|
71 | @LazyProperty | |
72 | def revisions(self): |
|
72 | def revisions(self): | |
73 | """ |
|
73 | """ | |
74 | Returns list of revisions' ids, in ascending order. Being lazy |
|
74 | Returns list of revisions' ids, in ascending order. Being lazy | |
75 | attribute allows external tools to inject shas from cache. |
|
75 | attribute allows external tools to inject shas from cache. | |
76 | """ |
|
76 | """ | |
77 | return self._get_all_revisions() |
|
77 | return self._get_all_revisions() | |
78 |
|
78 | |||
79 | def run_git_command(self, cmd): |
|
79 | def run_git_command(self, cmd): | |
80 | """ |
|
80 | """ | |
81 | Runs given ``cmd`` as git command and returns tuple |
|
81 | Runs given ``cmd`` as git command and returns tuple | |
82 | (returncode, stdout, stderr). |
|
82 | (returncode, stdout, stderr). | |
83 |
|
83 | |||
84 | .. note:: |
|
84 | .. note:: | |
85 | This method exists only until log/blame functionality is implemented |
|
85 | This method exists only until log/blame functionality is implemented | |
86 | at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing |
|
86 | at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing | |
87 | os command's output is road to hell... |
|
87 | os command's output is road to hell... | |
88 |
|
88 | |||
89 | :param cmd: git command to be executed |
|
89 | :param cmd: git command to be executed | |
90 | """ |
|
90 | """ | |
91 |
|
91 | |||
92 | _copts = ['-c', 'core.quotepath=false', ] |
|
92 | _copts = ['-c', 'core.quotepath=false', ] | |
93 | _str_cmd = False |
|
93 | _str_cmd = False | |
94 | if isinstance(cmd, basestring): |
|
94 | if isinstance(cmd, basestring): | |
95 | cmd = [cmd] |
|
95 | cmd = [cmd] | |
96 | _str_cmd = True |
|
96 | _str_cmd = True | |
97 |
|
97 | |||
98 | gitenv = os.environ |
|
98 | gitenv = os.environ | |
99 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
99 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
100 |
|
100 | |||
101 | cmd = ['git'] + _copts + cmd |
|
101 | cmd = ['git'] + _copts + cmd | |
102 | if _str_cmd: |
|
102 | if _str_cmd: | |
103 | cmd = ' '.join(cmd) |
|
103 | cmd = ' '.join(cmd) | |
104 | try: |
|
104 | try: | |
105 | opts = dict( |
|
105 | opts = dict( | |
106 | shell=isinstance(cmd, basestring), |
|
106 | shell=isinstance(cmd, basestring), | |
107 | stdout=PIPE, |
|
107 | stdout=PIPE, | |
108 | stderr=PIPE, |
|
108 | stderr=PIPE, | |
109 | env=gitenv, |
|
109 | env=gitenv, | |
110 | ) |
|
110 | ) | |
111 | if os.path.isdir(self.path): |
|
111 | if os.path.isdir(self.path): | |
112 | opts['cwd'] = self.path |
|
112 | opts['cwd'] = self.path | |
113 | p = Popen(cmd, **opts) |
|
113 | p = Popen(cmd, **opts) | |
114 | except OSError, err: |
|
114 | except OSError, err: | |
115 | raise RepositoryError("Couldn't run git command (%s).\n" |
|
115 | raise RepositoryError("Couldn't run git command (%s).\n" | |
116 | "Original error was:%s" % (cmd, err)) |
|
116 | "Original error was:%s" % (cmd, err)) | |
117 | so, se = p.communicate() |
|
117 | so, se = p.communicate() | |
118 | if not se.startswith("fatal: bad default revision 'HEAD'") and \ |
|
118 | if not se.startswith("fatal: bad default revision 'HEAD'") and \ | |
119 | p.returncode != 0: |
|
119 | p.returncode != 0: | |
120 | raise RepositoryError("Couldn't run git command (%s).\n" |
|
120 | raise RepositoryError("Couldn't run git command (%s).\n" | |
121 | "stderr:\n%s" % (cmd, se)) |
|
121 | "stderr:\n%s" % (cmd, se)) | |
122 | return so, se |
|
122 | return so, se | |
123 |
|
123 | |||
124 | def _check_url(self, url): |
|
124 | def _check_url(self, url): | |
125 | """ |
|
125 | """ | |
126 | Functon will check given url and try to verify if it's a valid |
|
126 | Functon will check given url and try to verify if it's a valid | |
127 | link. Sometimes it may happened that mercurial will issue basic |
|
127 | link. Sometimes it may happened that mercurial will issue basic | |
128 | auth request that can cause whole API to hang when used from python |
|
128 | auth request that can cause whole API to hang when used from python | |
129 | or other external calls. |
|
129 | or other external calls. | |
130 |
|
130 | |||
131 | On failures it'll raise urllib2.HTTPError |
|
131 | On failures it'll raise urllib2.HTTPError | |
132 | """ |
|
132 | """ | |
133 |
|
133 | |||
134 | #TODO: implement this |
|
134 | #TODO: implement this | |
135 | pass |
|
135 | pass | |
136 |
|
136 | |||
137 | def _get_repo(self, create, src_url=None, update_after_clone=False, |
|
137 | def _get_repo(self, create, src_url=None, update_after_clone=False, | |
138 | bare=False): |
|
138 | bare=False): | |
139 | if create and os.path.exists(self.path): |
|
139 | if create and os.path.exists(self.path): | |
140 | raise RepositoryError("Location already exist") |
|
140 | raise RepositoryError("Location already exist") | |
141 | if src_url and not create: |
|
141 | if src_url and not create: | |
142 | raise RepositoryError("Create should be set to True if src_url is " |
|
142 | raise RepositoryError("Create should be set to True if src_url is " | |
143 | "given (clone operation creates repository)") |
|
143 | "given (clone operation creates repository)") | |
144 | try: |
|
144 | try: | |
145 | if create and src_url: |
|
145 | if create and src_url: | |
146 | self._check_url(src_url) |
|
146 | self._check_url(src_url) | |
147 | self.clone(src_url, update_after_clone, bare) |
|
147 | self.clone(src_url, update_after_clone, bare) | |
148 | return Repo(self.path) |
|
148 | return Repo(self.path) | |
149 | elif create: |
|
149 | elif create: | |
150 | os.mkdir(self.path) |
|
150 | os.mkdir(self.path) | |
151 | if bare: |
|
151 | if bare: | |
152 | return Repo.init_bare(self.path) |
|
152 | return Repo.init_bare(self.path) | |
153 | else: |
|
153 | else: | |
154 | return Repo.init(self.path) |
|
154 | return Repo.init(self.path) | |
155 | else: |
|
155 | else: | |
156 | return Repo(self.path) |
|
156 | return Repo(self.path) | |
157 | except (NotGitRepository, OSError), err: |
|
157 | except (NotGitRepository, OSError), err: | |
158 | raise RepositoryError(err) |
|
158 | raise RepositoryError(err) | |
159 |
|
159 | |||
160 | def _get_all_revisions(self): |
|
160 | def _get_all_revisions(self): | |
161 | cmd = 'rev-list --all --reverse --date-order' |
|
161 | cmd = 'rev-list --all --reverse --date-order' | |
162 | try: |
|
162 | try: | |
163 | so, se = self.run_git_command(cmd) |
|
163 | so, se = self.run_git_command(cmd) | |
164 | except RepositoryError: |
|
164 | except RepositoryError: | |
165 | # Can be raised for empty repositories |
|
165 | # Can be raised for empty repositories | |
166 | return [] |
|
166 | return [] | |
167 | return so.splitlines() |
|
167 | return so.splitlines() | |
168 |
|
168 | |||
169 | def _get_all_revisions2(self): |
|
169 | def _get_all_revisions2(self): | |
170 | #alternate implementation using dulwich |
|
170 | #alternate implementation using dulwich | |
171 | includes = [x[1][0] for x in self._parsed_refs.iteritems() |
|
171 | includes = [x[1][0] for x in self._parsed_refs.iteritems() | |
172 | if x[1][1] != 'T'] |
|
172 | if x[1][1] != 'T'] | |
173 | return [c.commit.id for c in self._repo.get_walker(include=includes)] |
|
173 | return [c.commit.id for c in self._repo.get_walker(include=includes)] | |
174 |
|
174 | |||
175 | def _get_revision(self, revision): |
|
175 | def _get_revision(self, revision): | |
176 | """ |
|
176 | """ | |
177 | For git backend we always return integer here. This way we ensure |
|
177 | For git backend we always return integer here. This way we ensure | |
178 | that changset's revision attribute would become integer. |
|
178 | that changset's revision attribute would become integer. | |
179 | """ |
|
179 | """ | |
180 | pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') |
|
180 | pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') | |
181 | is_bstr = lambda o: isinstance(o, (str, unicode)) |
|
181 | is_bstr = lambda o: isinstance(o, (str, unicode)) | |
182 | is_null = lambda o: len(o) == revision.count('0') |
|
182 | is_null = lambda o: len(o) == revision.count('0') | |
183 |
|
183 | |||
184 | if len(self.revisions) == 0: |
|
184 | if len(self.revisions) == 0: | |
185 | raise EmptyRepositoryError("There are no changesets yet") |
|
185 | raise EmptyRepositoryError("There are no changesets yet") | |
186 |
|
186 | |||
187 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): |
|
187 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): | |
188 | revision = self.revisions[-1] |
|
188 | revision = self.revisions[-1] | |
189 |
|
189 | |||
190 | if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12) |
|
190 | if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12) | |
191 | or isinstance(revision, int) or is_null(revision)): |
|
191 | or isinstance(revision, int) or is_null(revision)): | |
192 | try: |
|
192 | try: | |
193 | revision = self.revisions[int(revision)] |
|
193 | revision = self.revisions[int(revision)] | |
194 | except: |
|
194 | except: | |
195 | raise ChangesetDoesNotExistError("Revision %r does not exist " |
|
195 | raise ChangesetDoesNotExistError("Revision %r does not exist " | |
196 | "for this repository %s" % (revision, self)) |
|
196 | "for this repository %s" % (revision, self)) | |
197 |
|
197 | |||
198 | elif is_bstr(revision): |
|
198 | elif is_bstr(revision): | |
|
199 | # get by branch/tag name | |||
199 | _ref_revision = self._parsed_refs.get(revision) |
|
200 | _ref_revision = self._parsed_refs.get(revision) | |
|
201 | _tags_shas = self.tags.values() | |||
200 | if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']: |
|
202 | if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']: | |
201 | return _ref_revision[0] |
|
203 | return _ref_revision[0] | |
202 |
|
204 | |||
|
205 | # maybe it's a tag ? we don't have them in self.revisions | |||
|
206 | elif revision in _tags_shas: | |||
|
207 | return _tags_shas[_tags_shas.index(revision)] | |||
|
208 | ||||
203 | elif not pattern.match(revision) or revision not in self.revisions: |
|
209 | elif not pattern.match(revision) or revision not in self.revisions: | |
204 | raise ChangesetDoesNotExistError("Revision %r does not exist " |
|
210 | raise ChangesetDoesNotExistError("Revision %r does not exist " | |
205 | "for this repository %s" % (revision, self)) |
|
211 | "for this repository %s" % (revision, self)) | |
206 |
|
212 | |||
207 | # Ensure we return full id |
|
213 | # Ensure we return full id | |
208 | if not pattern.match(str(revision)): |
|
214 | if not pattern.match(str(revision)): | |
209 | raise ChangesetDoesNotExistError("Given revision %r not recognized" |
|
215 | raise ChangesetDoesNotExistError("Given revision %r not recognized" | |
210 | % revision) |
|
216 | % revision) | |
211 | return revision |
|
217 | return revision | |
212 |
|
218 | |||
213 | def _get_archives(self, archive_name='tip'): |
|
219 | def _get_archives(self, archive_name='tip'): | |
214 |
|
220 | |||
215 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
221 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
216 | yield {"type": i[0], "extension": i[1], "node": archive_name} |
|
222 | yield {"type": i[0], "extension": i[1], "node": archive_name} | |
217 |
|
223 | |||
218 | def _get_url(self, url): |
|
224 | def _get_url(self, url): | |
219 | """ |
|
225 | """ | |
220 | Returns normalized url. If schema is not given, would fall to |
|
226 | Returns normalized url. If schema is not given, would fall to | |
221 | filesystem (``file:///``) schema. |
|
227 | filesystem (``file:///``) schema. | |
222 | """ |
|
228 | """ | |
223 | url = str(url) |
|
229 | url = str(url) | |
224 | if url != 'default' and not '://' in url: |
|
230 | if url != 'default' and not '://' in url: | |
225 | url = ':///'.join(('file', url)) |
|
231 | url = ':///'.join(('file', url)) | |
226 | return url |
|
232 | return url | |
227 |
|
233 | |||
228 | @LazyProperty |
|
234 | @LazyProperty | |
229 | def name(self): |
|
235 | def name(self): | |
230 | return os.path.basename(self.path) |
|
236 | return os.path.basename(self.path) | |
231 |
|
237 | |||
232 | @LazyProperty |
|
238 | @LazyProperty | |
233 | def last_change(self): |
|
239 | def last_change(self): | |
234 | """ |
|
240 | """ | |
235 | Returns last change made on this repository as datetime object |
|
241 | Returns last change made on this repository as datetime object | |
236 | """ |
|
242 | """ | |
237 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
243 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
238 |
|
244 | |||
239 | def _get_mtime(self): |
|
245 | def _get_mtime(self): | |
240 | try: |
|
246 | try: | |
241 | return time.mktime(self.get_changeset().date.timetuple()) |
|
247 | return time.mktime(self.get_changeset().date.timetuple()) | |
242 | except RepositoryError: |
|
248 | except RepositoryError: | |
243 | idx_loc = '' if self.bare else '.git' |
|
249 | idx_loc = '' if self.bare else '.git' | |
244 | # fallback to filesystem |
|
250 | # fallback to filesystem | |
245 | in_path = os.path.join(self.path, idx_loc, "index") |
|
251 | in_path = os.path.join(self.path, idx_loc, "index") | |
246 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
252 | he_path = os.path.join(self.path, idx_loc, "HEAD") | |
247 | if os.path.exists(in_path): |
|
253 | if os.path.exists(in_path): | |
248 | return os.stat(in_path).st_mtime |
|
254 | return os.stat(in_path).st_mtime | |
249 | else: |
|
255 | else: | |
250 | return os.stat(he_path).st_mtime |
|
256 | return os.stat(he_path).st_mtime | |
251 |
|
257 | |||
252 | @LazyProperty |
|
258 | @LazyProperty | |
253 | def description(self): |
|
259 | def description(self): | |
254 | idx_loc = '' if self.bare else '.git' |
|
260 | idx_loc = '' if self.bare else '.git' | |
255 | undefined_description = u'unknown' |
|
261 | undefined_description = u'unknown' | |
256 | description_path = os.path.join(self.path, idx_loc, 'description') |
|
262 | description_path = os.path.join(self.path, idx_loc, 'description') | |
257 | if os.path.isfile(description_path): |
|
263 | if os.path.isfile(description_path): | |
258 | return safe_unicode(open(description_path).read()) |
|
264 | return safe_unicode(open(description_path).read()) | |
259 | else: |
|
265 | else: | |
260 | return undefined_description |
|
266 | return undefined_description | |
261 |
|
267 | |||
262 | @LazyProperty |
|
268 | @LazyProperty | |
263 | def contact(self): |
|
269 | def contact(self): | |
264 | undefined_contact = u'Unknown' |
|
270 | undefined_contact = u'Unknown' | |
265 | return undefined_contact |
|
271 | return undefined_contact | |
266 |
|
272 | |||
267 | @property |
|
273 | @property | |
268 | def branches(self): |
|
274 | def branches(self): | |
269 | if not self.revisions: |
|
275 | if not self.revisions: | |
270 | return {} |
|
276 | return {} | |
271 | sortkey = lambda ctx: ctx[0] |
|
277 | sortkey = lambda ctx: ctx[0] | |
272 | _branches = [(x[0], x[1][0]) |
|
278 | _branches = [(x[0], x[1][0]) | |
273 | for x in self._parsed_refs.iteritems() if x[1][1] == 'H'] |
|
279 | for x in self._parsed_refs.iteritems() if x[1][1] == 'H'] | |
274 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) |
|
280 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) | |
275 |
|
281 | |||
276 | @LazyProperty |
|
282 | @LazyProperty | |
277 | def tags(self): |
|
283 | def tags(self): | |
278 | return self._get_tags() |
|
284 | return self._get_tags() | |
279 |
|
285 | |||
280 | def _get_tags(self): |
|
286 | def _get_tags(self): | |
281 | if not self.revisions: |
|
287 | if not self.revisions: | |
282 | return {} |
|
288 | return {} | |
283 |
|
289 | |||
284 | sortkey = lambda ctx: ctx[0] |
|
290 | sortkey = lambda ctx: ctx[0] | |
285 | _tags = [(x[0], x[1][0]) |
|
291 | _tags = [(x[0], x[1][0]) | |
286 | for x in self._parsed_refs.iteritems() if x[1][1] == 'T'] |
|
292 | for x in self._parsed_refs.iteritems() if x[1][1] == 'T'] | |
287 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) |
|
293 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) | |
288 |
|
294 | |||
289 | def tag(self, name, user, revision=None, message=None, date=None, |
|
295 | def tag(self, name, user, revision=None, message=None, date=None, | |
290 | **kwargs): |
|
296 | **kwargs): | |
291 | """ |
|
297 | """ | |
292 | Creates and returns a tag for the given ``revision``. |
|
298 | Creates and returns a tag for the given ``revision``. | |
293 |
|
299 | |||
294 | :param name: name for new tag |
|
300 | :param name: name for new tag | |
295 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
301 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
296 | :param revision: changeset id for which new tag would be created |
|
302 | :param revision: changeset id for which new tag would be created | |
297 | :param message: message of the tag's commit |
|
303 | :param message: message of the tag's commit | |
298 | :param date: date of tag's commit |
|
304 | :param date: date of tag's commit | |
299 |
|
305 | |||
300 | :raises TagAlreadyExistError: if tag with same name already exists |
|
306 | :raises TagAlreadyExistError: if tag with same name already exists | |
301 | """ |
|
307 | """ | |
302 | if name in self.tags: |
|
308 | if name in self.tags: | |
303 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
309 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
304 | changeset = self.get_changeset(revision) |
|
310 | changeset = self.get_changeset(revision) | |
305 | message = message or "Added tag %s for commit %s" % (name, |
|
311 | message = message or "Added tag %s for commit %s" % (name, | |
306 | changeset.raw_id) |
|
312 | changeset.raw_id) | |
307 | self._repo.refs["refs/tags/%s" % name] = changeset._commit.id |
|
313 | self._repo.refs["refs/tags/%s" % name] = changeset._commit.id | |
308 |
|
314 | |||
309 | self.tags = self._get_tags() |
|
315 | self.tags = self._get_tags() | |
310 | return changeset |
|
316 | return changeset | |
311 |
|
317 | |||
312 | def remove_tag(self, name, user, message=None, date=None): |
|
318 | def remove_tag(self, name, user, message=None, date=None): | |
313 | """ |
|
319 | """ | |
314 | Removes tag with the given ``name``. |
|
320 | Removes tag with the given ``name``. | |
315 |
|
321 | |||
316 | :param name: name of the tag to be removed |
|
322 | :param name: name of the tag to be removed | |
317 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
323 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
318 | :param message: message of the tag's removal commit |
|
324 | :param message: message of the tag's removal commit | |
319 | :param date: date of tag's removal commit |
|
325 | :param date: date of tag's removal commit | |
320 |
|
326 | |||
321 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
327 | :raises TagDoesNotExistError: if tag with given name does not exists | |
322 | """ |
|
328 | """ | |
323 | if name not in self.tags: |
|
329 | if name not in self.tags: | |
324 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
330 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
325 | tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name) |
|
331 | tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name) | |
326 | try: |
|
332 | try: | |
327 | os.remove(tagpath) |
|
333 | os.remove(tagpath) | |
328 | self.tags = self._get_tags() |
|
334 | self.tags = self._get_tags() | |
329 | except OSError, e: |
|
335 | except OSError, e: | |
330 | raise RepositoryError(e.strerror) |
|
336 | raise RepositoryError(e.strerror) | |
331 |
|
337 | |||
332 | @LazyProperty |
|
338 | @LazyProperty | |
333 | def _parsed_refs(self): |
|
339 | def _parsed_refs(self): | |
334 | refs = self._repo.get_refs() |
|
340 | refs = self._repo.get_refs() | |
335 | keys = [('refs/heads/', 'H'), |
|
341 | keys = [('refs/heads/', 'H'), | |
336 | ('refs/remotes/origin/', 'RH'), |
|
342 | ('refs/remotes/origin/', 'RH'), | |
337 | ('refs/tags/', 'T')] |
|
343 | ('refs/tags/', 'T')] | |
338 | _refs = {} |
|
344 | _refs = {} | |
339 | for ref, sha in refs.iteritems(): |
|
345 | for ref, sha in refs.iteritems(): | |
340 | for k, type_ in keys: |
|
346 | for k, type_ in keys: | |
341 | if ref.startswith(k): |
|
347 | if ref.startswith(k): | |
342 | _key = ref[len(k):] |
|
348 | _key = ref[len(k):] | |
343 | _refs[_key] = [sha, type_] |
|
349 | _refs[_key] = [sha, type_] | |
344 | break |
|
350 | break | |
345 | return _refs |
|
351 | return _refs | |
346 |
|
352 | |||
347 | def _heads(self, reverse=False): |
|
353 | def _heads(self, reverse=False): | |
348 | refs = self._repo.get_refs() |
|
354 | refs = self._repo.get_refs() | |
349 | heads = {} |
|
355 | heads = {} | |
350 |
|
356 | |||
351 | for key, val in refs.items(): |
|
357 | for key, val in refs.items(): | |
352 | for ref_key in ['refs/heads/', 'refs/remotes/origin/']: |
|
358 | for ref_key in ['refs/heads/', 'refs/remotes/origin/']: | |
353 | if key.startswith(ref_key): |
|
359 | if key.startswith(ref_key): | |
354 | n = key[len(ref_key):] |
|
360 | n = key[len(ref_key):] | |
355 | if n not in ['HEAD']: |
|
361 | if n not in ['HEAD']: | |
356 | heads[n] = val |
|
362 | heads[n] = val | |
357 |
|
363 | |||
358 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) |
|
364 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) | |
359 |
|
365 | |||
360 | def get_changeset(self, revision=None): |
|
366 | def get_changeset(self, revision=None): | |
361 | """ |
|
367 | """ | |
362 | Returns ``GitChangeset`` object representing commit from git repository |
|
368 | Returns ``GitChangeset`` object representing commit from git repository | |
363 | at the given revision or head (most recent commit) if None given. |
|
369 | at the given revision or head (most recent commit) if None given. | |
364 | """ |
|
370 | """ | |
365 | if isinstance(revision, GitChangeset): |
|
371 | if isinstance(revision, GitChangeset): | |
366 | return revision |
|
372 | return revision | |
367 | revision = self._get_revision(revision) |
|
373 | revision = self._get_revision(revision) | |
368 | changeset = GitChangeset(repository=self, revision=revision) |
|
374 | changeset = GitChangeset(repository=self, revision=revision) | |
369 | return changeset |
|
375 | return changeset | |
370 |
|
376 | |||
371 | def get_changesets(self, start=None, end=None, start_date=None, |
|
377 | def get_changesets(self, start=None, end=None, start_date=None, | |
372 | end_date=None, branch_name=None, reverse=False): |
|
378 | end_date=None, branch_name=None, reverse=False): | |
373 | """ |
|
379 | """ | |
374 | Returns iterator of ``GitChangeset`` objects from start to end (both |
|
380 | Returns iterator of ``GitChangeset`` objects from start to end (both | |
375 | are inclusive), in ascending date order (unless ``reverse`` is set). |
|
381 | are inclusive), in ascending date order (unless ``reverse`` is set). | |
376 |
|
382 | |||
377 | :param start: changeset ID, as str; first returned changeset |
|
383 | :param start: changeset ID, as str; first returned changeset | |
378 | :param end: changeset ID, as str; last returned changeset |
|
384 | :param end: changeset ID, as str; last returned changeset | |
379 | :param start_date: if specified, changesets with commit date less than |
|
385 | :param start_date: if specified, changesets with commit date less than | |
380 | ``start_date`` would be filtered out from returned set |
|
386 | ``start_date`` would be filtered out from returned set | |
381 | :param end_date: if specified, changesets with commit date greater than |
|
387 | :param end_date: if specified, changesets with commit date greater than | |
382 | ``end_date`` would be filtered out from returned set |
|
388 | ``end_date`` would be filtered out from returned set | |
383 | :param branch_name: if specified, changesets not reachable from given |
|
389 | :param branch_name: if specified, changesets not reachable from given | |
384 | branch would be filtered out from returned set |
|
390 | branch would be filtered out from returned set | |
385 | :param reverse: if ``True``, returned generator would be reversed |
|
391 | :param reverse: if ``True``, returned generator would be reversed | |
386 | (meaning that returned changesets would have descending date order) |
|
392 | (meaning that returned changesets would have descending date order) | |
387 |
|
393 | |||
388 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
394 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
389 | exist. |
|
395 | exist. | |
390 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or |
|
396 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or | |
391 | ``end`` could not be found. |
|
397 | ``end`` could not be found. | |
392 |
|
398 | |||
393 | """ |
|
399 | """ | |
394 | if branch_name and branch_name not in self.branches: |
|
400 | if branch_name and branch_name not in self.branches: | |
395 | raise BranchDoesNotExistError("Branch '%s' not found" \ |
|
401 | raise BranchDoesNotExistError("Branch '%s' not found" \ | |
396 | % branch_name) |
|
402 | % branch_name) | |
397 | # %H at format means (full) commit hash, initial hashes are retrieved |
|
403 | # %H at format means (full) commit hash, initial hashes are retrieved | |
398 | # in ascending date order |
|
404 | # in ascending date order | |
399 | cmd_template = 'log --date-order --reverse --pretty=format:"%H"' |
|
405 | cmd_template = 'log --date-order --reverse --pretty=format:"%H"' | |
400 | cmd_params = {} |
|
406 | cmd_params = {} | |
401 | if start_date: |
|
407 | if start_date: | |
402 | cmd_template += ' --since "$since"' |
|
408 | cmd_template += ' --since "$since"' | |
403 | cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S') |
|
409 | cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S') | |
404 | if end_date: |
|
410 | if end_date: | |
405 | cmd_template += ' --until "$until"' |
|
411 | cmd_template += ' --until "$until"' | |
406 | cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S') |
|
412 | cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S') | |
407 | if branch_name: |
|
413 | if branch_name: | |
408 | cmd_template += ' $branch_name' |
|
414 | cmd_template += ' $branch_name' | |
409 | cmd_params['branch_name'] = branch_name |
|
415 | cmd_params['branch_name'] = branch_name | |
410 | else: |
|
416 | else: | |
411 | cmd_template += ' --all' |
|
417 | cmd_template += ' --all' | |
412 |
|
418 | |||
413 | cmd = Template(cmd_template).safe_substitute(**cmd_params) |
|
419 | cmd = Template(cmd_template).safe_substitute(**cmd_params) | |
414 | revs = self.run_git_command(cmd)[0].splitlines() |
|
420 | revs = self.run_git_command(cmd)[0].splitlines() | |
415 | start_pos = 0 |
|
421 | start_pos = 0 | |
416 | end_pos = len(revs) |
|
422 | end_pos = len(revs) | |
417 | if start: |
|
423 | if start: | |
418 | _start = self._get_revision(start) |
|
424 | _start = self._get_revision(start) | |
419 | try: |
|
425 | try: | |
420 | start_pos = revs.index(_start) |
|
426 | start_pos = revs.index(_start) | |
421 | except ValueError: |
|
427 | except ValueError: | |
422 | pass |
|
428 | pass | |
423 |
|
429 | |||
424 | if end is not None: |
|
430 | if end is not None: | |
425 | _end = self._get_revision(end) |
|
431 | _end = self._get_revision(end) | |
426 | try: |
|
432 | try: | |
427 | end_pos = revs.index(_end) |
|
433 | end_pos = revs.index(_end) | |
428 | except ValueError: |
|
434 | except ValueError: | |
429 | pass |
|
435 | pass | |
430 |
|
436 | |||
431 | if None not in [start, end] and start_pos > end_pos: |
|
437 | if None not in [start, end] and start_pos > end_pos: | |
432 | raise RepositoryError('start cannot be after end') |
|
438 | raise RepositoryError('start cannot be after end') | |
433 |
|
439 | |||
434 | if end_pos is not None: |
|
440 | if end_pos is not None: | |
435 | end_pos += 1 |
|
441 | end_pos += 1 | |
436 |
|
442 | |||
437 | revs = revs[start_pos:end_pos] |
|
443 | revs = revs[start_pos:end_pos] | |
438 | if reverse: |
|
444 | if reverse: | |
439 | revs = reversed(revs) |
|
445 | revs = reversed(revs) | |
440 | for rev in revs: |
|
446 | for rev in revs: | |
441 | yield self.get_changeset(rev) |
|
447 | yield self.get_changeset(rev) | |
442 |
|
448 | |||
443 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, |
|
449 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, | |
444 | context=3): |
|
450 | context=3): | |
445 | """ |
|
451 | """ | |
446 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
452 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
447 | ``rev2`` since ``rev1``. |
|
453 | ``rev2`` since ``rev1``. | |
448 |
|
454 | |||
449 | :param rev1: Entry point from which diff is shown. Can be |
|
455 | :param rev1: Entry point from which diff is shown. Can be | |
450 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
456 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
451 | the changes since empty state of the repository until ``rev2`` |
|
457 | the changes since empty state of the repository until ``rev2`` | |
452 | :param rev2: Until which revision changes should be shown. |
|
458 | :param rev2: Until which revision changes should be shown. | |
453 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
459 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
454 | changes. Defaults to ``False``. |
|
460 | changes. Defaults to ``False``. | |
455 | :param context: How many lines before/after changed lines should be |
|
461 | :param context: How many lines before/after changed lines should be | |
456 | shown. Defaults to ``3``. |
|
462 | shown. Defaults to ``3``. | |
457 | """ |
|
463 | """ | |
458 | flags = ['-U%s' % context] |
|
464 | flags = ['-U%s' % context] | |
459 | if ignore_whitespace: |
|
465 | if ignore_whitespace: | |
460 | flags.append('-w') |
|
466 | flags.append('-w') | |
461 |
|
467 | |||
462 | if hasattr(rev1, 'raw_id'): |
|
468 | if hasattr(rev1, 'raw_id'): | |
463 | rev1 = getattr(rev1, 'raw_id') |
|
469 | rev1 = getattr(rev1, 'raw_id') | |
464 |
|
470 | |||
465 | if hasattr(rev2, 'raw_id'): |
|
471 | if hasattr(rev2, 'raw_id'): | |
466 | rev2 = getattr(rev2, 'raw_id') |
|
472 | rev2 = getattr(rev2, 'raw_id') | |
467 |
|
473 | |||
468 | if rev1 == self.EMPTY_CHANGESET: |
|
474 | if rev1 == self.EMPTY_CHANGESET: | |
469 | rev2 = self.get_changeset(rev2).raw_id |
|
475 | rev2 = self.get_changeset(rev2).raw_id | |
470 | cmd = ' '.join(['show'] + flags + [rev2]) |
|
476 | cmd = ' '.join(['show'] + flags + [rev2]) | |
471 | else: |
|
477 | else: | |
472 | rev1 = self.get_changeset(rev1).raw_id |
|
478 | rev1 = self.get_changeset(rev1).raw_id | |
473 | rev2 = self.get_changeset(rev2).raw_id |
|
479 | rev2 = self.get_changeset(rev2).raw_id | |
474 | cmd = ' '.join(['diff'] + flags + [rev1, rev2]) |
|
480 | cmd = ' '.join(['diff'] + flags + [rev1, rev2]) | |
475 |
|
481 | |||
476 | if path: |
|
482 | if path: | |
477 | cmd += ' -- "%s"' % path |
|
483 | cmd += ' -- "%s"' % path | |
478 | stdout, stderr = self.run_git_command(cmd) |
|
484 | stdout, stderr = self.run_git_command(cmd) | |
479 | # If we used 'show' command, strip first few lines (until actual diff |
|
485 | # If we used 'show' command, strip first few lines (until actual diff | |
480 | # starts) |
|
486 | # starts) | |
481 | if rev1 == self.EMPTY_CHANGESET: |
|
487 | if rev1 == self.EMPTY_CHANGESET: | |
482 | lines = stdout.splitlines() |
|
488 | lines = stdout.splitlines() | |
483 | x = 0 |
|
489 | x = 0 | |
484 | for line in lines: |
|
490 | for line in lines: | |
485 | if line.startswith('diff'): |
|
491 | if line.startswith('diff'): | |
486 | break |
|
492 | break | |
487 | x += 1 |
|
493 | x += 1 | |
488 | # Append new line just like 'diff' command do |
|
494 | # Append new line just like 'diff' command do | |
489 | stdout = '\n'.join(lines[x:]) + '\n' |
|
495 | stdout = '\n'.join(lines[x:]) + '\n' | |
490 | return stdout |
|
496 | return stdout | |
491 |
|
497 | |||
492 | @LazyProperty |
|
498 | @LazyProperty | |
493 | def in_memory_changeset(self): |
|
499 | def in_memory_changeset(self): | |
494 | """ |
|
500 | """ | |
495 | Returns ``GitInMemoryChangeset`` object for this repository. |
|
501 | Returns ``GitInMemoryChangeset`` object for this repository. | |
496 | """ |
|
502 | """ | |
497 | return GitInMemoryChangeset(self) |
|
503 | return GitInMemoryChangeset(self) | |
498 |
|
504 | |||
499 | def clone(self, url, update_after_clone=True, bare=False): |
|
505 | def clone(self, url, update_after_clone=True, bare=False): | |
500 | """ |
|
506 | """ | |
501 | Tries to clone changes from external location. |
|
507 | Tries to clone changes from external location. | |
502 |
|
508 | |||
503 | :param update_after_clone: If set to ``False``, git won't checkout |
|
509 | :param update_after_clone: If set to ``False``, git won't checkout | |
504 | working directory |
|
510 | working directory | |
505 | :param bare: If set to ``True``, repository would be cloned into |
|
511 | :param bare: If set to ``True``, repository would be cloned into | |
506 | *bare* git repository (no working directory at all). |
|
512 | *bare* git repository (no working directory at all). | |
507 | """ |
|
513 | """ | |
508 | url = self._get_url(url) |
|
514 | url = self._get_url(url) | |
509 | cmd = ['clone'] |
|
515 | cmd = ['clone'] | |
510 | if bare: |
|
516 | if bare: | |
511 | cmd.append('--bare') |
|
517 | cmd.append('--bare') | |
512 | elif not update_after_clone: |
|
518 | elif not update_after_clone: | |
513 | cmd.append('--no-checkout') |
|
519 | cmd.append('--no-checkout') | |
514 | cmd += ['--', '"%s"' % url, '"%s"' % self.path] |
|
520 | cmd += ['--', '"%s"' % url, '"%s"' % self.path] | |
515 | cmd = ' '.join(cmd) |
|
521 | cmd = ' '.join(cmd) | |
516 | # If error occurs run_git_command raises RepositoryError already |
|
522 | # If error occurs run_git_command raises RepositoryError already | |
517 | self.run_git_command(cmd) |
|
523 | self.run_git_command(cmd) | |
518 |
|
524 | |||
519 | def pull(self, url): |
|
525 | def pull(self, url): | |
520 | """ |
|
526 | """ | |
521 | Tries to pull changes from external location. |
|
527 | Tries to pull changes from external location. | |
522 | """ |
|
528 | """ | |
523 | url = self._get_url(url) |
|
529 | url = self._get_url(url) | |
524 | cmd = ['pull'] |
|
530 | cmd = ['pull'] | |
525 | cmd.append("--ff-only") |
|
531 | cmd.append("--ff-only") | |
526 | cmd.append(url) |
|
532 | cmd.append(url) | |
527 | cmd = ' '.join(cmd) |
|
533 | cmd = ' '.join(cmd) | |
528 | # If error occurs run_git_command raises RepositoryError already |
|
534 | # If error occurs run_git_command raises RepositoryError already | |
529 | self.run_git_command(cmd) |
|
535 | self.run_git_command(cmd) | |
530 |
|
536 | |||
531 | def fetch(self, url): |
|
537 | def fetch(self, url): | |
532 | """ |
|
538 | """ | |
533 | Tries to pull changes from external location. |
|
539 | Tries to pull changes from external location. | |
534 | """ |
|
540 | """ | |
535 | url = self._get_url(url) |
|
541 | url = self._get_url(url) | |
536 | cmd = ['fetch'] |
|
542 | cmd = ['fetch'] | |
537 | cmd.append(url) |
|
543 | cmd.append(url) | |
538 | cmd = ' '.join(cmd) |
|
544 | cmd = ' '.join(cmd) | |
539 | # If error occurs run_git_command raises RepositoryError already |
|
545 | # If error occurs run_git_command raises RepositoryError already | |
540 | self.run_git_command(cmd) |
|
546 | self.run_git_command(cmd) | |
541 |
|
547 | |||
542 | @LazyProperty |
|
548 | @LazyProperty | |
543 | def workdir(self): |
|
549 | def workdir(self): | |
544 | """ |
|
550 | """ | |
545 | Returns ``Workdir`` instance for this repository. |
|
551 | Returns ``Workdir`` instance for this repository. | |
546 | """ |
|
552 | """ | |
547 | return GitWorkdir(self) |
|
553 | return GitWorkdir(self) | |
548 |
|
554 | |||
549 | def get_config_value(self, section, name, config_file=None): |
|
555 | def get_config_value(self, section, name, config_file=None): | |
550 | """ |
|
556 | """ | |
551 | Returns configuration value for a given [``section``] and ``name``. |
|
557 | Returns configuration value for a given [``section``] and ``name``. | |
552 |
|
558 | |||
553 | :param section: Section we want to retrieve value from |
|
559 | :param section: Section we want to retrieve value from | |
554 | :param name: Name of configuration we want to retrieve |
|
560 | :param name: Name of configuration we want to retrieve | |
555 | :param config_file: A path to file which should be used to retrieve |
|
561 | :param config_file: A path to file which should be used to retrieve | |
556 | configuration from (might also be a list of file paths) |
|
562 | configuration from (might also be a list of file paths) | |
557 | """ |
|
563 | """ | |
558 | if config_file is None: |
|
564 | if config_file is None: | |
559 | config_file = [] |
|
565 | config_file = [] | |
560 | elif isinstance(config_file, basestring): |
|
566 | elif isinstance(config_file, basestring): | |
561 | config_file = [config_file] |
|
567 | config_file = [config_file] | |
562 |
|
568 | |||
563 | def gen_configs(): |
|
569 | def gen_configs(): | |
564 | for path in config_file + self._config_files: |
|
570 | for path in config_file + self._config_files: | |
565 | try: |
|
571 | try: | |
566 | yield ConfigFile.from_path(path) |
|
572 | yield ConfigFile.from_path(path) | |
567 | except (IOError, OSError, ValueError): |
|
573 | except (IOError, OSError, ValueError): | |
568 | continue |
|
574 | continue | |
569 |
|
575 | |||
570 | for config in gen_configs(): |
|
576 | for config in gen_configs(): | |
571 | try: |
|
577 | try: | |
572 | return config.get(section, name) |
|
578 | return config.get(section, name) | |
573 | except KeyError: |
|
579 | except KeyError: | |
574 | continue |
|
580 | continue | |
575 | return None |
|
581 | return None | |
576 |
|
582 | |||
577 | def get_user_name(self, config_file=None): |
|
583 | def get_user_name(self, config_file=None): | |
578 | """ |
|
584 | """ | |
579 | Returns user's name from global configuration file. |
|
585 | Returns user's name from global configuration file. | |
580 |
|
586 | |||
581 | :param config_file: A path to file which should be used to retrieve |
|
587 | :param config_file: A path to file which should be used to retrieve | |
582 | configuration from (might also be a list of file paths) |
|
588 | configuration from (might also be a list of file paths) | |
583 | """ |
|
589 | """ | |
584 | return self.get_config_value('user', 'name', config_file) |
|
590 | return self.get_config_value('user', 'name', config_file) | |
585 |
|
591 | |||
586 | def get_user_email(self, config_file=None): |
|
592 | def get_user_email(self, config_file=None): | |
587 | """ |
|
593 | """ | |
588 | Returns user's email from global configuration file. |
|
594 | Returns user's email from global configuration file. | |
589 |
|
595 | |||
590 | :param config_file: A path to file which should be used to retrieve |
|
596 | :param config_file: A path to file which should be used to retrieve | |
591 | configuration from (might also be a list of file paths) |
|
597 | configuration from (might also be a list of file paths) | |
592 | """ |
|
598 | """ | |
593 | return self.get_config_value('user', 'email', config_file) |
|
599 | return self.get_config_value('user', 'email', config_file) |
General Comments 0
You need to be logged in to leave comments.
Login now