Show More
@@ -1,827 +1,832 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.git.repository |
|
3 | vcs.backends.git.repository | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Git repository implementation. |
|
6 | Git repository implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import errno |
|
12 | import errno | |
13 | import logging |
|
13 | import logging | |
14 | import os |
|
14 | import os | |
15 | import re |
|
15 | import re | |
16 | import time |
|
16 | import time | |
17 | import urllib.error |
|
17 | import urllib.error | |
18 | import urllib.parse |
|
18 | import urllib.parse | |
19 | import urllib.request |
|
19 | import urllib.request | |
20 | from collections import OrderedDict |
|
20 | from collections import OrderedDict | |
21 |
|
21 | |||
22 | import mercurial.util # import url as hg_url |
|
22 | ||
|
23 | try: | |||
|
24 | from mercurial.utils.urlutil import url as hg_url | |||
|
25 | except ImportError: # urlutil was introduced in Mercurial 5.8 | |||
|
26 | from mercurial.util import url as hg_url | |||
|
27 | ||||
23 | from dulwich.client import SubprocessGitClient |
|
28 | from dulwich.client import SubprocessGitClient | |
24 | from dulwich.config import ConfigFile |
|
29 | from dulwich.config import ConfigFile | |
25 | from dulwich.objects import Tag |
|
30 | from dulwich.objects import Tag | |
26 | from dulwich.repo import NotGitRepository, Repo |
|
31 | from dulwich.repo import NotGitRepository, Repo | |
27 | from dulwich.server import update_server_info |
|
32 | from dulwich.server import update_server_info | |
28 |
|
33 | |||
29 | from kallithea.lib.vcs import subprocessio |
|
34 | from kallithea.lib.vcs import subprocessio | |
30 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator |
|
35 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator | |
31 | from kallithea.lib.vcs.conf import settings |
|
36 | from kallithea.lib.vcs.conf import settings | |
32 | from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
37 | from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, | |
33 | TagDoesNotExistError) |
|
38 | TagDoesNotExistError) | |
34 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str |
|
39 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str | |
35 | from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers |
|
40 | from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers | |
36 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
41 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
37 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home |
|
42 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home | |
38 |
|
43 | |||
39 | from . import changeset, inmemory, workdir |
|
44 | from . import changeset, inmemory, workdir | |
40 |
|
45 | |||
41 |
|
46 | |||
42 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') |
|
47 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') | |
43 |
|
48 | |||
44 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
45 |
|
50 | |||
46 |
|
51 | |||
47 | class GitRepository(BaseRepository): |
|
52 | class GitRepository(BaseRepository): | |
48 | """ |
|
53 | """ | |
49 | Git repository backend. |
|
54 | Git repository backend. | |
50 | """ |
|
55 | """ | |
51 | DEFAULT_BRANCH_NAME = 'master' |
|
56 | DEFAULT_BRANCH_NAME = 'master' | |
52 | scm = 'git' |
|
57 | scm = 'git' | |
53 |
|
58 | |||
54 | def __init__(self, repo_path, create=False, src_url=None, |
|
59 | def __init__(self, repo_path, create=False, src_url=None, | |
55 | update_after_clone=False, bare=False, baseui=None): |
|
60 | update_after_clone=False, bare=False, baseui=None): | |
56 | baseui # unused |
|
61 | baseui # unused | |
57 | self.path = abspath(repo_path) |
|
62 | self.path = abspath(repo_path) | |
58 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) |
|
63 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) | |
59 | self.bare = self.repo.bare |
|
64 | self.bare = self.repo.bare | |
60 |
|
65 | |||
61 | @property |
|
66 | @property | |
62 | def _config_files(self): |
|
67 | def _config_files(self): | |
63 | return [ |
|
68 | return [ | |
64 | self.bare and abspath(self.path, 'config') |
|
69 | self.bare and abspath(self.path, 'config') | |
65 | or abspath(self.path, '.git', 'config'), |
|
70 | or abspath(self.path, '.git', 'config'), | |
66 | abspath(get_user_home(), '.gitconfig'), |
|
71 | abspath(get_user_home(), '.gitconfig'), | |
67 | ] |
|
72 | ] | |
68 |
|
73 | |||
69 | @property |
|
74 | @property | |
70 | def _repo(self): |
|
75 | def _repo(self): | |
71 | return self.repo |
|
76 | return self.repo | |
72 |
|
77 | |||
73 | @property |
|
78 | @property | |
74 | def head(self): |
|
79 | def head(self): | |
75 | try: |
|
80 | try: | |
76 | return self._repo.head() |
|
81 | return self._repo.head() | |
77 | except KeyError: |
|
82 | except KeyError: | |
78 | return None |
|
83 | return None | |
79 |
|
84 | |||
80 | @property |
|
85 | @property | |
81 | def _empty(self): |
|
86 | def _empty(self): | |
82 | """ |
|
87 | """ | |
83 | Checks if repository is empty ie. without any changesets |
|
88 | Checks if repository is empty ie. without any changesets | |
84 | """ |
|
89 | """ | |
85 |
|
90 | |||
86 | try: |
|
91 | try: | |
87 | self.revisions[0] |
|
92 | self.revisions[0] | |
88 | except (KeyError, IndexError): |
|
93 | except (KeyError, IndexError): | |
89 | return True |
|
94 | return True | |
90 | return False |
|
95 | return False | |
91 |
|
96 | |||
92 | @LazyProperty |
|
97 | @LazyProperty | |
93 | def revisions(self): |
|
98 | def revisions(self): | |
94 | """ |
|
99 | """ | |
95 | Returns list of revisions' ids, in ascending order. Being lazy |
|
100 | Returns list of revisions' ids, in ascending order. Being lazy | |
96 | attribute allows external tools to inject shas from cache. |
|
101 | attribute allows external tools to inject shas from cache. | |
97 | """ |
|
102 | """ | |
98 | return self._get_all_revisions() |
|
103 | return self._get_all_revisions() | |
99 |
|
104 | |||
100 | @classmethod |
|
105 | @classmethod | |
101 | def _run_git_command(cls, cmd, cwd=None): |
|
106 | def _run_git_command(cls, cmd, cwd=None): | |
102 | """ |
|
107 | """ | |
103 | Runs given ``cmd`` as git command and returns output bytes in a tuple |
|
108 | Runs given ``cmd`` as git command and returns output bytes in a tuple | |
104 | (stdout, stderr) ... or raise RepositoryError. |
|
109 | (stdout, stderr) ... or raise RepositoryError. | |
105 |
|
110 | |||
106 | :param cmd: git command to be executed |
|
111 | :param cmd: git command to be executed | |
107 | :param cwd: passed directly to subprocess |
|
112 | :param cwd: passed directly to subprocess | |
108 | """ |
|
113 | """ | |
109 | # need to clean fix GIT_DIR ! |
|
114 | # need to clean fix GIT_DIR ! | |
110 | gitenv = dict(os.environ) |
|
115 | gitenv = dict(os.environ) | |
111 | gitenv.pop('GIT_DIR', None) |
|
116 | gitenv.pop('GIT_DIR', None) | |
112 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
117 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
113 |
|
118 | |||
114 | assert isinstance(cmd, list), cmd |
|
119 | assert isinstance(cmd, list), cmd | |
115 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd |
|
120 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd | |
116 | try: |
|
121 | try: | |
117 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) |
|
122 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) | |
118 | except (EnvironmentError, OSError) as err: |
|
123 | except (EnvironmentError, OSError) as err: | |
119 | # output from the failing process is in str(EnvironmentError) |
|
124 | # output from the failing process is in str(EnvironmentError) | |
120 | msg = ("Couldn't run git command %s.\n" |
|
125 | msg = ("Couldn't run git command %s.\n" | |
121 | "Subprocess failed with '%s': %s\n" % |
|
126 | "Subprocess failed with '%s': %s\n" % | |
122 | (cmd, type(err).__name__, err) |
|
127 | (cmd, type(err).__name__, err) | |
123 | ).strip() |
|
128 | ).strip() | |
124 | log.error(msg) |
|
129 | log.error(msg) | |
125 | raise RepositoryError(msg) |
|
130 | raise RepositoryError(msg) | |
126 |
|
131 | |||
127 | try: |
|
132 | try: | |
128 | stdout = b''.join(p.output) |
|
133 | stdout = b''.join(p.output) | |
129 | stderr = b''.join(p.error) |
|
134 | stderr = b''.join(p.error) | |
130 | finally: |
|
135 | finally: | |
131 | p.close() |
|
136 | p.close() | |
132 | # TODO: introduce option to make commands fail if they have any stderr output? |
|
137 | # TODO: introduce option to make commands fail if they have any stderr output? | |
133 | if stderr: |
|
138 | if stderr: | |
134 | log.debug('stderr from %s:\n%s', cmd, stderr) |
|
139 | log.debug('stderr from %s:\n%s', cmd, stderr) | |
135 | else: |
|
140 | else: | |
136 | log.debug('stderr from %s: None', cmd) |
|
141 | log.debug('stderr from %s: None', cmd) | |
137 | return stdout, stderr |
|
142 | return stdout, stderr | |
138 |
|
143 | |||
139 | def run_git_command(self, cmd): |
|
144 | def run_git_command(self, cmd): | |
140 | """ |
|
145 | """ | |
141 | Runs given ``cmd`` as git command with cwd set to current repo. |
|
146 | Runs given ``cmd`` as git command with cwd set to current repo. | |
142 | Returns stdout as unicode str ... or raise RepositoryError. |
|
147 | Returns stdout as unicode str ... or raise RepositoryError. | |
143 | """ |
|
148 | """ | |
144 | cwd = None |
|
149 | cwd = None | |
145 | if os.path.isdir(self.path): |
|
150 | if os.path.isdir(self.path): | |
146 | cwd = self.path |
|
151 | cwd = self.path | |
147 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) |
|
152 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) | |
148 | return safe_str(stdout) |
|
153 | return safe_str(stdout) | |
149 |
|
154 | |||
150 | @staticmethod |
|
155 | @staticmethod | |
151 | def _check_url(url): |
|
156 | def _check_url(url): | |
152 | r""" |
|
157 | r""" | |
153 | Raise URLError if url doesn't seem like a valid safe Git URL. We |
|
158 | Raise URLError if url doesn't seem like a valid safe Git URL. We | |
154 | only allow http, https, git, and ssh URLs. |
|
159 | only allow http, https, git, and ssh URLs. | |
155 |
|
160 | |||
156 | For http and https URLs, make a connection and probe to see if it is valid. |
|
161 | For http and https URLs, make a connection and probe to see if it is valid. | |
157 |
|
162 | |||
158 | >>> GitRepository._check_url('git://example.com/my%20fine repo') |
|
163 | >>> GitRepository._check_url('git://example.com/my%20fine repo') | |
159 |
|
164 | |||
160 | >>> GitRepository._check_url('http://example.com:65537/repo') |
|
165 | >>> GitRepository._check_url('http://example.com:65537/repo') | |
161 | Traceback (most recent call last): |
|
166 | Traceback (most recent call last): | |
162 | ... |
|
167 | ... | |
163 | urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'> |
|
168 | urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'> | |
164 | >>> GitRepository._check_url('foo') |
|
169 | >>> GitRepository._check_url('foo') | |
165 | Traceback (most recent call last): |
|
170 | Traceback (most recent call last): | |
166 | ... |
|
171 | ... | |
167 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'> |
|
172 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'> | |
168 | >>> GitRepository._check_url('file:///repo') |
|
173 | >>> GitRepository._check_url('file:///repo') | |
169 | Traceback (most recent call last): |
|
174 | Traceback (most recent call last): | |
170 | ... |
|
175 | ... | |
171 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'file:///repo'> |
|
176 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'file:///repo'> | |
172 | >>> GitRepository._check_url('git+http://example.com/repo') |
|
177 | >>> GitRepository._check_url('git+http://example.com/repo') | |
173 | Traceback (most recent call last): |
|
178 | Traceback (most recent call last): | |
174 | ... |
|
179 | ... | |
175 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+http://example.com/repo'> |
|
180 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+http://example.com/repo'> | |
176 | >>> GitRepository._check_url('git://example.com/%09') |
|
181 | >>> GitRepository._check_url('git://example.com/%09') | |
177 | Traceback (most recent call last): |
|
182 | Traceback (most recent call last): | |
178 | ... |
|
183 | ... | |
179 | urllib.error.URLError: <urlopen error Invalid escape character in path: '%'> |
|
184 | urllib.error.URLError: <urlopen error Invalid escape character in path: '%'> | |
180 | >>> GitRepository._check_url('git://example.com/%x00') |
|
185 | >>> GitRepository._check_url('git://example.com/%x00') | |
181 | Traceback (most recent call last): |
|
186 | Traceback (most recent call last): | |
182 | ... |
|
187 | ... | |
183 | urllib.error.URLError: <urlopen error Invalid escape character in path: '%'> |
|
188 | urllib.error.URLError: <urlopen error Invalid escape character in path: '%'> | |
184 | >>> GitRepository._check_url(r'git://example.com/\u0009') |
|
189 | >>> GitRepository._check_url(r'git://example.com/\u0009') | |
185 | Traceback (most recent call last): |
|
190 | Traceback (most recent call last): | |
186 | ... |
|
191 | ... | |
187 | urllib.error.URLError: <urlopen error Invalid escape character in path: '\'> |
|
192 | urllib.error.URLError: <urlopen error Invalid escape character in path: '\'> | |
188 | >>> GitRepository._check_url(r'git://example.com/\t') |
|
193 | >>> GitRepository._check_url(r'git://example.com/\t') | |
189 | Traceback (most recent call last): |
|
194 | Traceback (most recent call last): | |
190 | ... |
|
195 | ... | |
191 | urllib.error.URLError: <urlopen error Invalid escape character in path: '\'> |
|
196 | urllib.error.URLError: <urlopen error Invalid escape character in path: '\'> | |
192 | >>> GitRepository._check_url('git://example.com/\t') |
|
197 | >>> GitRepository._check_url('git://example.com/\t') | |
193 | Traceback (most recent call last): |
|
198 | Traceback (most recent call last): | |
194 | ... |
|
199 | ... | |
195 | urllib.error.URLError: <urlopen error Invalid ...> |
|
200 | urllib.error.URLError: <urlopen error Invalid ...> | |
196 |
|
201 | |||
197 | The failure above will be one of, depending on the level of WhatWG support: |
|
202 | The failure above will be one of, depending on the level of WhatWG support: | |
198 | urllib.error.URLError: <urlopen error Invalid whitespace character in path: '\t'> |
|
203 | urllib.error.URLError: <urlopen error Invalid whitespace character in path: '\t'> | |
199 | urllib.error.URLError: <urlopen error Invalid url: 'git://example.com/ ' normalizes to 'git://example.com/'> |
|
204 | urllib.error.URLError: <urlopen error Invalid url: 'git://example.com/ ' normalizes to 'git://example.com/'> | |
200 | """ |
|
205 | """ | |
201 | try: |
|
206 | try: | |
202 | parsed_url = urllib.parse.urlparse(url) |
|
207 | parsed_url = urllib.parse.urlparse(url) | |
203 | parsed_url.port # trigger netloc parsing which might raise ValueError |
|
208 | parsed_url.port # trigger netloc parsing which might raise ValueError | |
204 | except ValueError: |
|
209 | except ValueError: | |
205 | raise urllib.error.URLError("Error parsing URL: %r" % url) |
|
210 | raise urllib.error.URLError("Error parsing URL: %r" % url) | |
206 |
|
211 | |||
207 | # check first if it's not an local url |
|
212 | # check first if it's not an local url | |
208 | if os.path.isabs(url) and os.path.isdir(url): |
|
213 | if os.path.isabs(url) and os.path.isdir(url): | |
209 | return |
|
214 | return | |
210 |
|
215 | |||
211 | unparsed_url = urllib.parse.urlunparse(parsed_url) |
|
216 | unparsed_url = urllib.parse.urlunparse(parsed_url) | |
212 | if unparsed_url != url: |
|
217 | if unparsed_url != url: | |
213 | raise urllib.error.URLError("Invalid url: '%s' normalizes to '%s'" % (url, unparsed_url)) |
|
218 | raise urllib.error.URLError("Invalid url: '%s' normalizes to '%s'" % (url, unparsed_url)) | |
214 |
|
219 | |||
215 | if parsed_url.scheme == 'git': |
|
220 | if parsed_url.scheme == 'git': | |
216 | # Mitigate problems elsewhere with incorrect handling of encoded paths. |
|
221 | # Mitigate problems elsewhere with incorrect handling of encoded paths. | |
217 | # Don't trust urllib.parse.unquote but be prepared for more flexible implementations elsewhere. |
|
222 | # Don't trust urllib.parse.unquote but be prepared for more flexible implementations elsewhere. | |
218 | # Space is the only allowed whitespace character - directly or % encoded. No other % or \ is allowed. |
|
223 | # Space is the only allowed whitespace character - directly or % encoded. No other % or \ is allowed. | |
219 | for c in parsed_url.path.replace('%20', ' '): |
|
224 | for c in parsed_url.path.replace('%20', ' '): | |
220 | if c in '%\\': |
|
225 | if c in '%\\': | |
221 | raise urllib.error.URLError("Invalid escape character in path: '%s'" % c) |
|
226 | raise urllib.error.URLError("Invalid escape character in path: '%s'" % c) | |
222 | if c.isspace() and c != ' ': |
|
227 | if c.isspace() and c != ' ': | |
223 | raise urllib.error.URLError("Invalid whitespace character in path: %r" % c) |
|
228 | raise urllib.error.URLError("Invalid whitespace character in path: %r" % c) | |
224 | return |
|
229 | return | |
225 |
|
230 | |||
226 | if parsed_url.scheme not in ['http', 'https']: |
|
231 | if parsed_url.scheme not in ['http', 'https']: | |
227 | raise urllib.error.URLError("Unsupported protocol in URL %r" % url) |
|
232 | raise urllib.error.URLError("Unsupported protocol in URL %r" % url) | |
228 |
|
233 | |||
229 |
url_obj = |
|
234 | url_obj = hg_url(safe_bytes(url)) | |
230 | test_uri, handlers = get_urllib_request_handlers(url_obj) |
|
235 | test_uri, handlers = get_urllib_request_handlers(url_obj) | |
231 | if not test_uri.endswith(b'info/refs'): |
|
236 | if not test_uri.endswith(b'info/refs'): | |
232 | test_uri = test_uri.rstrip(b'/') + b'/info/refs' |
|
237 | test_uri = test_uri.rstrip(b'/') + b'/info/refs' | |
233 |
|
238 | |||
234 | url_obj.passwd = b'*****' |
|
239 | url_obj.passwd = b'*****' | |
235 | cleaned_uri = str(url_obj) |
|
240 | cleaned_uri = str(url_obj) | |
236 |
|
241 | |||
237 | o = urllib.request.build_opener(*handlers) |
|
242 | o = urllib.request.build_opener(*handlers) | |
238 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
243 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | |
239 |
|
244 | |||
240 | req = urllib.request.Request( |
|
245 | req = urllib.request.Request( | |
241 | "%s?%s" % ( |
|
246 | "%s?%s" % ( | |
242 | safe_str(test_uri), |
|
247 | safe_str(test_uri), | |
243 | urllib.parse.urlencode({"service": 'git-upload-pack'}) |
|
248 | urllib.parse.urlencode({"service": 'git-upload-pack'}) | |
244 | )) |
|
249 | )) | |
245 |
|
250 | |||
246 | try: |
|
251 | try: | |
247 | resp = o.open(req) |
|
252 | resp = o.open(req) | |
248 | if resp.code != 200: |
|
253 | if resp.code != 200: | |
249 | raise Exception('Return Code is not 200') |
|
254 | raise Exception('Return Code is not 200') | |
250 | except Exception as e: |
|
255 | except Exception as e: | |
251 | # means it cannot be cloned |
|
256 | # means it cannot be cloned | |
252 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
257 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) | |
253 |
|
258 | |||
254 | # now detect if it's proper git repo |
|
259 | # now detect if it's proper git repo | |
255 | gitdata = resp.read() |
|
260 | gitdata = resp.read() | |
256 | if b'service=git-upload-pack' not in gitdata: |
|
261 | if b'service=git-upload-pack' not in gitdata: | |
257 | raise urllib.error.URLError( |
|
262 | raise urllib.error.URLError( | |
258 | "url [%s] does not look like an git" % cleaned_uri) |
|
263 | "url [%s] does not look like an git" % cleaned_uri) | |
259 |
|
264 | |||
260 | def _get_repo(self, create, src_url=None, update_after_clone=False, |
|
265 | def _get_repo(self, create, src_url=None, update_after_clone=False, | |
261 | bare=False): |
|
266 | bare=False): | |
262 | if create and os.path.exists(self.path): |
|
267 | if create and os.path.exists(self.path): | |
263 | raise RepositoryError("Location already exist") |
|
268 | raise RepositoryError("Location already exist") | |
264 | if src_url and not create: |
|
269 | if src_url and not create: | |
265 | raise RepositoryError("Create should be set to True if src_url is " |
|
270 | raise RepositoryError("Create should be set to True if src_url is " | |
266 | "given (clone operation creates repository)") |
|
271 | "given (clone operation creates repository)") | |
267 | try: |
|
272 | try: | |
268 | if create and src_url: |
|
273 | if create and src_url: | |
269 | GitRepository._check_url(src_url) |
|
274 | GitRepository._check_url(src_url) | |
270 | self.clone(src_url, update_after_clone, bare) |
|
275 | self.clone(src_url, update_after_clone, bare) | |
271 | return Repo(self.path) |
|
276 | return Repo(self.path) | |
272 | elif create: |
|
277 | elif create: | |
273 | os.makedirs(self.path) |
|
278 | os.makedirs(self.path) | |
274 | if bare: |
|
279 | if bare: | |
275 | return Repo.init_bare(self.path) |
|
280 | return Repo.init_bare(self.path) | |
276 | else: |
|
281 | else: | |
277 | return Repo.init(self.path) |
|
282 | return Repo.init(self.path) | |
278 | else: |
|
283 | else: | |
279 | return Repo(self.path) |
|
284 | return Repo(self.path) | |
280 | except (NotGitRepository, OSError) as err: |
|
285 | except (NotGitRepository, OSError) as err: | |
281 | raise RepositoryError(err) |
|
286 | raise RepositoryError(err) | |
282 |
|
287 | |||
283 | def _get_all_revisions(self): |
|
288 | def _get_all_revisions(self): | |
284 | # we must check if this repo is not empty, since later command |
|
289 | # we must check if this repo is not empty, since later command | |
285 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
290 | # fails if it is. And it's cheaper to ask than throw the subprocess | |
286 | # errors |
|
291 | # errors | |
287 | try: |
|
292 | try: | |
288 | self._repo.head() |
|
293 | self._repo.head() | |
289 | except KeyError: |
|
294 | except KeyError: | |
290 | return [] |
|
295 | return [] | |
291 |
|
296 | |||
292 | rev_filter = settings.GIT_REV_FILTER |
|
297 | rev_filter = settings.GIT_REV_FILTER | |
293 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] |
|
298 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] | |
294 | try: |
|
299 | try: | |
295 | so = self.run_git_command(cmd) |
|
300 | so = self.run_git_command(cmd) | |
296 | except RepositoryError: |
|
301 | except RepositoryError: | |
297 | # Can be raised for empty repositories |
|
302 | # Can be raised for empty repositories | |
298 | return [] |
|
303 | return [] | |
299 | return so.splitlines() |
|
304 | return so.splitlines() | |
300 |
|
305 | |||
301 | def _get_all_revisions2(self): |
|
306 | def _get_all_revisions2(self): | |
302 | # alternate implementation using dulwich |
|
307 | # alternate implementation using dulwich | |
303 | includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items() |
|
308 | includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items() | |
304 | if type_ != b'T'] |
|
309 | if type_ != b'T'] | |
305 | return [c.commit.id for c in self._repo.get_walker(include=includes)] |
|
310 | return [c.commit.id for c in self._repo.get_walker(include=includes)] | |
306 |
|
311 | |||
307 | def _get_revision(self, revision): |
|
312 | def _get_revision(self, revision): | |
308 | """ |
|
313 | """ | |
309 | Given any revision identifier, returns a 40 char string with revision hash. |
|
314 | Given any revision identifier, returns a 40 char string with revision hash. | |
310 | """ |
|
315 | """ | |
311 | if self._empty: |
|
316 | if self._empty: | |
312 | raise EmptyRepositoryError("There are no changesets yet") |
|
317 | raise EmptyRepositoryError("There are no changesets yet") | |
313 |
|
318 | |||
314 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): |
|
319 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): | |
315 | revision = -1 |
|
320 | revision = -1 | |
316 |
|
321 | |||
317 | if isinstance(revision, int): |
|
322 | if isinstance(revision, int): | |
318 | try: |
|
323 | try: | |
319 | return self.revisions[revision] |
|
324 | return self.revisions[revision] | |
320 | except IndexError: |
|
325 | except IndexError: | |
321 | msg = "Revision %r does not exist for %s" % (revision, self.name) |
|
326 | msg = "Revision %r does not exist for %s" % (revision, self.name) | |
322 | raise ChangesetDoesNotExistError(msg) |
|
327 | raise ChangesetDoesNotExistError(msg) | |
323 |
|
328 | |||
324 | if isinstance(revision, str): |
|
329 | if isinstance(revision, str): | |
325 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): |
|
330 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): | |
326 | try: |
|
331 | try: | |
327 | return self.revisions[int(revision)] |
|
332 | return self.revisions[int(revision)] | |
328 | except IndexError: |
|
333 | except IndexError: | |
329 | msg = "Revision %r does not exist for %s" % (revision, self) |
|
334 | msg = "Revision %r does not exist for %s" % (revision, self) | |
330 | raise ChangesetDoesNotExistError(msg) |
|
335 | raise ChangesetDoesNotExistError(msg) | |
331 |
|
336 | |||
332 | # get by branch/tag name |
|
337 | # get by branch/tag name | |
333 | _ref_revision = self._parsed_refs.get(safe_bytes(revision)) |
|
338 | _ref_revision = self._parsed_refs.get(safe_bytes(revision)) | |
334 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: |
|
339 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: | |
335 | return ascii_str(_ref_revision[0]) |
|
340 | return ascii_str(_ref_revision[0]) | |
336 |
|
341 | |||
337 | if revision in self.revisions: |
|
342 | if revision in self.revisions: | |
338 | return revision |
|
343 | return revision | |
339 |
|
344 | |||
340 | # maybe it's a tag ? we don't have them in self.revisions |
|
345 | # maybe it's a tag ? we don't have them in self.revisions | |
341 | if revision in self.tags.values(): |
|
346 | if revision in self.tags.values(): | |
342 | return revision |
|
347 | return revision | |
343 |
|
348 | |||
344 | if SHA_PATTERN.match(revision): |
|
349 | if SHA_PATTERN.match(revision): | |
345 | msg = "Revision %r does not exist for %s" % (revision, self.name) |
|
350 | msg = "Revision %r does not exist for %s" % (revision, self.name) | |
346 | raise ChangesetDoesNotExistError(msg) |
|
351 | raise ChangesetDoesNotExistError(msg) | |
347 |
|
352 | |||
348 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) |
|
353 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) | |
349 |
|
354 | |||
350 | def get_ref_revision(self, ref_type, ref_name): |
|
355 | def get_ref_revision(self, ref_type, ref_name): | |
351 | """ |
|
356 | """ | |
352 | Returns ``GitChangeset`` object representing repository's |
|
357 | Returns ``GitChangeset`` object representing repository's | |
353 | changeset at the given ``revision``. |
|
358 | changeset at the given ``revision``. | |
354 | """ |
|
359 | """ | |
355 | return self._get_revision(ref_name) |
|
360 | return self._get_revision(ref_name) | |
356 |
|
361 | |||
357 | def _get_archives(self, archive_name='tip'): |
|
362 | def _get_archives(self, archive_name='tip'): | |
358 |
|
363 | |||
359 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
364 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
360 | yield {"type": i[0], "extension": i[1], "node": archive_name} |
|
365 | yield {"type": i[0], "extension": i[1], "node": archive_name} | |
361 |
|
366 | |||
362 | def _get_url(self, url): |
|
367 | def _get_url(self, url): | |
363 | """ |
|
368 | """ | |
364 | Returns normalized url. If schema is not given, would fall to |
|
369 | Returns normalized url. If schema is not given, would fall to | |
365 | filesystem (``file:///``) schema. |
|
370 | filesystem (``file:///``) schema. | |
366 | """ |
|
371 | """ | |
367 | if url != 'default' and '://' not in url: |
|
372 | if url != 'default' and '://' not in url: | |
368 | url = ':///'.join(('file', url)) |
|
373 | url = ':///'.join(('file', url)) | |
369 | return url |
|
374 | return url | |
370 |
|
375 | |||
371 | @LazyProperty |
|
376 | @LazyProperty | |
372 | def name(self): |
|
377 | def name(self): | |
373 | return os.path.basename(self.path) |
|
378 | return os.path.basename(self.path) | |
374 |
|
379 | |||
375 | @LazyProperty |
|
380 | @LazyProperty | |
376 | def last_change(self): |
|
381 | def last_change(self): | |
377 | """ |
|
382 | """ | |
378 | Returns last change made on this repository as datetime object |
|
383 | Returns last change made on this repository as datetime object | |
379 | """ |
|
384 | """ | |
380 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
385 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
381 |
|
386 | |||
382 | def _get_mtime(self): |
|
387 | def _get_mtime(self): | |
383 | try: |
|
388 | try: | |
384 | return time.mktime(self.get_changeset().date.timetuple()) |
|
389 | return time.mktime(self.get_changeset().date.timetuple()) | |
385 | except RepositoryError: |
|
390 | except RepositoryError: | |
386 | idx_loc = '' if self.bare else '.git' |
|
391 | idx_loc = '' if self.bare else '.git' | |
387 | # fallback to filesystem |
|
392 | # fallback to filesystem | |
388 | in_path = os.path.join(self.path, idx_loc, "index") |
|
393 | in_path = os.path.join(self.path, idx_loc, "index") | |
389 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
394 | he_path = os.path.join(self.path, idx_loc, "HEAD") | |
390 | if os.path.exists(in_path): |
|
395 | if os.path.exists(in_path): | |
391 | return os.stat(in_path).st_mtime |
|
396 | return os.stat(in_path).st_mtime | |
392 | else: |
|
397 | else: | |
393 | return os.stat(he_path).st_mtime |
|
398 | return os.stat(he_path).st_mtime | |
394 |
|
399 | |||
395 | @LazyProperty |
|
400 | @LazyProperty | |
396 | def description(self): |
|
401 | def description(self): | |
397 | return safe_str(self._repo.get_description() or b'unknown') |
|
402 | return safe_str(self._repo.get_description() or b'unknown') | |
398 |
|
403 | |||
399 | @property |
|
404 | @property | |
400 | def branches(self): |
|
405 | def branches(self): | |
401 | if not self.revisions: |
|
406 | if not self.revisions: | |
402 | return {} |
|
407 | return {} | |
403 | _branches = [(safe_str(key), ascii_str(sha)) |
|
408 | _branches = [(safe_str(key), ascii_str(sha)) | |
404 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H'] |
|
409 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H'] | |
405 | return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False)) |
|
410 | return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False)) | |
406 |
|
411 | |||
407 | @LazyProperty |
|
412 | @LazyProperty | |
408 | def closed_branches(self): |
|
413 | def closed_branches(self): | |
409 | return {} |
|
414 | return {} | |
410 |
|
415 | |||
411 | @LazyProperty |
|
416 | @LazyProperty | |
412 | def tags(self): |
|
417 | def tags(self): | |
413 | return self._get_tags() |
|
418 | return self._get_tags() | |
414 |
|
419 | |||
415 | def _get_tags(self): |
|
420 | def _get_tags(self): | |
416 | if not self.revisions: |
|
421 | if not self.revisions: | |
417 | return {} |
|
422 | return {} | |
418 | _tags = [(safe_str(key), ascii_str(sha)) |
|
423 | _tags = [(safe_str(key), ascii_str(sha)) | |
419 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T'] |
|
424 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T'] | |
420 | return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True)) |
|
425 | return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True)) | |
421 |
|
426 | |||
422 | def tag(self, name, user, revision=None, message=None, date=None, |
|
427 | def tag(self, name, user, revision=None, message=None, date=None, | |
423 | **kwargs): |
|
428 | **kwargs): | |
424 | """ |
|
429 | """ | |
425 | Creates and returns a tag for the given ``revision``. |
|
430 | Creates and returns a tag for the given ``revision``. | |
426 |
|
431 | |||
427 | :param name: name for new tag |
|
432 | :param name: name for new tag | |
428 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
433 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
429 | :param revision: changeset id for which new tag would be created |
|
434 | :param revision: changeset id for which new tag would be created | |
430 | :param message: message of the tag's commit |
|
435 | :param message: message of the tag's commit | |
431 | :param date: date of tag's commit |
|
436 | :param date: date of tag's commit | |
432 |
|
437 | |||
433 | :raises TagAlreadyExistError: if tag with same name already exists |
|
438 | :raises TagAlreadyExistError: if tag with same name already exists | |
434 | """ |
|
439 | """ | |
435 | if name in self.tags: |
|
440 | if name in self.tags: | |
436 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
441 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
437 | changeset = self.get_changeset(revision) |
|
442 | changeset = self.get_changeset(revision) | |
438 | message = message or "Added tag %s for commit %s" % (name, |
|
443 | message = message or "Added tag %s for commit %s" % (name, | |
439 | changeset.raw_id) |
|
444 | changeset.raw_id) | |
440 | self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id |
|
445 | self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id | |
441 |
|
446 | |||
442 | self._parsed_refs = self._get_parsed_refs() |
|
447 | self._parsed_refs = self._get_parsed_refs() | |
443 | self.tags = self._get_tags() |
|
448 | self.tags = self._get_tags() | |
444 | return changeset |
|
449 | return changeset | |
445 |
|
450 | |||
446 | def remove_tag(self, name, user, message=None, date=None): |
|
451 | def remove_tag(self, name, user, message=None, date=None): | |
447 | """ |
|
452 | """ | |
448 | Removes tag with the given ``name``. |
|
453 | Removes tag with the given ``name``. | |
449 |
|
454 | |||
450 | :param name: name of the tag to be removed |
|
455 | :param name: name of the tag to be removed | |
451 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
456 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
452 | :param message: message of the tag's removal commit |
|
457 | :param message: message of the tag's removal commit | |
453 | :param date: date of tag's removal commit |
|
458 | :param date: date of tag's removal commit | |
454 |
|
459 | |||
455 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
460 | :raises TagDoesNotExistError: if tag with given name does not exists | |
456 | """ |
|
461 | """ | |
457 | if name not in self.tags: |
|
462 | if name not in self.tags: | |
458 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
463 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
459 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' |
|
464 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' | |
460 | tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name) |
|
465 | tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name) | |
461 | try: |
|
466 | try: | |
462 | os.remove(tagpath) |
|
467 | os.remove(tagpath) | |
463 | self._parsed_refs = self._get_parsed_refs() |
|
468 | self._parsed_refs = self._get_parsed_refs() | |
464 | self.tags = self._get_tags() |
|
469 | self.tags = self._get_tags() | |
465 | except OSError as e: |
|
470 | except OSError as e: | |
466 | raise RepositoryError(e.strerror) |
|
471 | raise RepositoryError(e.strerror) | |
467 |
|
472 | |||
468 | @LazyProperty |
|
473 | @LazyProperty | |
469 | def bookmarks(self): |
|
474 | def bookmarks(self): | |
470 | """ |
|
475 | """ | |
471 | Gets bookmarks for this repository |
|
476 | Gets bookmarks for this repository | |
472 | """ |
|
477 | """ | |
473 | return {} |
|
478 | return {} | |
474 |
|
479 | |||
475 | @LazyProperty |
|
480 | @LazyProperty | |
476 | def _parsed_refs(self): |
|
481 | def _parsed_refs(self): | |
477 | return self._get_parsed_refs() |
|
482 | return self._get_parsed_refs() | |
478 |
|
483 | |||
479 | def _get_parsed_refs(self): |
|
484 | def _get_parsed_refs(self): | |
480 | """Return refs as a dict, like: |
|
485 | """Return refs as a dict, like: | |
481 | { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] } |
|
486 | { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] } | |
482 | """ |
|
487 | """ | |
483 | _repo = self._repo |
|
488 | _repo = self._repo | |
484 | refs = _repo.get_refs() |
|
489 | refs = _repo.get_refs() | |
485 | keys = [(b'refs/heads/', b'H'), |
|
490 | keys = [(b'refs/heads/', b'H'), | |
486 | (b'refs/remotes/origin/', b'RH'), |
|
491 | (b'refs/remotes/origin/', b'RH'), | |
487 | (b'refs/tags/', b'T')] |
|
492 | (b'refs/tags/', b'T')] | |
488 | _refs = {} |
|
493 | _refs = {} | |
489 | for ref, sha in refs.items(): |
|
494 | for ref, sha in refs.items(): | |
490 | for k, type_ in keys: |
|
495 | for k, type_ in keys: | |
491 | if ref.startswith(k): |
|
496 | if ref.startswith(k): | |
492 | _key = ref[len(k):] |
|
497 | _key = ref[len(k):] | |
493 | if type_ == b'T': |
|
498 | if type_ == b'T': | |
494 | obj = _repo.get_object(sha) |
|
499 | obj = _repo.get_object(sha) | |
495 | if isinstance(obj, Tag): |
|
500 | if isinstance(obj, Tag): | |
496 | sha = _repo.get_object(sha).object[1] |
|
501 | sha = _repo.get_object(sha).object[1] | |
497 | _refs[_key] = [sha, type_] |
|
502 | _refs[_key] = [sha, type_] | |
498 | break |
|
503 | break | |
499 | return _refs |
|
504 | return _refs | |
500 |
|
505 | |||
501 | def _heads(self, reverse=False): |
|
506 | def _heads(self, reverse=False): | |
502 | refs = self._repo.get_refs() |
|
507 | refs = self._repo.get_refs() | |
503 | heads = {} |
|
508 | heads = {} | |
504 |
|
509 | |||
505 | for key, val in refs.items(): |
|
510 | for key, val in refs.items(): | |
506 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: |
|
511 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: | |
507 | if key.startswith(ref_key): |
|
512 | if key.startswith(ref_key): | |
508 | n = key[len(ref_key):] |
|
513 | n = key[len(ref_key):] | |
509 | if n not in [b'HEAD']: |
|
514 | if n not in [b'HEAD']: | |
510 | heads[n] = val |
|
515 | heads[n] = val | |
511 |
|
516 | |||
512 | return heads if reverse else dict((y, x) for x, y in heads.items()) |
|
517 | return heads if reverse else dict((y, x) for x, y in heads.items()) | |
513 |
|
518 | |||
514 | def get_changeset(self, revision=None): |
|
519 | def get_changeset(self, revision=None): | |
515 | """ |
|
520 | """ | |
516 | Returns ``GitChangeset`` object representing commit from git repository |
|
521 | Returns ``GitChangeset`` object representing commit from git repository | |
517 | at the given revision or head (most recent commit) if None given. |
|
522 | at the given revision or head (most recent commit) if None given. | |
518 | """ |
|
523 | """ | |
519 | if isinstance(revision, changeset.GitChangeset): |
|
524 | if isinstance(revision, changeset.GitChangeset): | |
520 | return revision |
|
525 | return revision | |
521 | return changeset.GitChangeset(repository=self, revision=self._get_revision(revision)) |
|
526 | return changeset.GitChangeset(repository=self, revision=self._get_revision(revision)) | |
522 |
|
527 | |||
523 | def get_changesets(self, start=None, end=None, start_date=None, |
|
528 | def get_changesets(self, start=None, end=None, start_date=None, | |
524 | end_date=None, branch_name=None, reverse=False, max_revisions=None): |
|
529 | end_date=None, branch_name=None, reverse=False, max_revisions=None): | |
525 | """ |
|
530 | """ | |
526 | Returns iterator of ``GitChangeset`` objects from start to end (both |
|
531 | Returns iterator of ``GitChangeset`` objects from start to end (both | |
527 | are inclusive), in ascending date order (unless ``reverse`` is set). |
|
532 | are inclusive), in ascending date order (unless ``reverse`` is set). | |
528 |
|
533 | |||
529 | :param start: changeset ID, as str; first returned changeset |
|
534 | :param start: changeset ID, as str; first returned changeset | |
530 | :param end: changeset ID, as str; last returned changeset |
|
535 | :param end: changeset ID, as str; last returned changeset | |
531 | :param start_date: if specified, changesets with commit date less than |
|
536 | :param start_date: if specified, changesets with commit date less than | |
532 | ``start_date`` would be filtered out from returned set |
|
537 | ``start_date`` would be filtered out from returned set | |
533 | :param end_date: if specified, changesets with commit date greater than |
|
538 | :param end_date: if specified, changesets with commit date greater than | |
534 | ``end_date`` would be filtered out from returned set |
|
539 | ``end_date`` would be filtered out from returned set | |
535 | :param branch_name: if specified, changesets not reachable from given |
|
540 | :param branch_name: if specified, changesets not reachable from given | |
536 | branch would be filtered out from returned set |
|
541 | branch would be filtered out from returned set | |
537 | :param reverse: if ``True``, returned generator would be reversed |
|
542 | :param reverse: if ``True``, returned generator would be reversed | |
538 | (meaning that returned changesets would have descending date order) |
|
543 | (meaning that returned changesets would have descending date order) | |
539 |
|
544 | |||
540 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
545 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
541 | exist. |
|
546 | exist. | |
542 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or |
|
547 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or | |
543 | ``end`` could not be found. |
|
548 | ``end`` could not be found. | |
544 |
|
549 | |||
545 | """ |
|
550 | """ | |
546 | if branch_name and branch_name not in self.branches: |
|
551 | if branch_name and branch_name not in self.branches: | |
547 | raise BranchDoesNotExistError("Branch '%s' not found" |
|
552 | raise BranchDoesNotExistError("Branch '%s' not found" | |
548 | % branch_name) |
|
553 | % branch_name) | |
549 | # actually we should check now if it's not an empty repo to not spaw |
|
554 | # actually we should check now if it's not an empty repo to not spaw | |
550 | # subprocess commands |
|
555 | # subprocess commands | |
551 | if self._empty: |
|
556 | if self._empty: | |
552 | raise EmptyRepositoryError("There are no changesets yet") |
|
557 | raise EmptyRepositoryError("There are no changesets yet") | |
553 |
|
558 | |||
554 | # %H at format means (full) commit hash, initial hashes are retrieved |
|
559 | # %H at format means (full) commit hash, initial hashes are retrieved | |
555 | # in ascending date order |
|
560 | # in ascending date order | |
556 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] |
|
561 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] | |
557 | if max_revisions: |
|
562 | if max_revisions: | |
558 | cmd += ['--max-count=%s' % max_revisions] |
|
563 | cmd += ['--max-count=%s' % max_revisions] | |
559 | if start_date: |
|
564 | if start_date: | |
560 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] |
|
565 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] | |
561 | if end_date: |
|
566 | if end_date: | |
562 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] |
|
567 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] | |
563 | if branch_name: |
|
568 | if branch_name: | |
564 | cmd.append(branch_name) |
|
569 | cmd.append(branch_name) | |
565 | else: |
|
570 | else: | |
566 | cmd.append(settings.GIT_REV_FILTER) |
|
571 | cmd.append(settings.GIT_REV_FILTER) | |
567 |
|
572 | |||
568 | revs = self.run_git_command(cmd).splitlines() |
|
573 | revs = self.run_git_command(cmd).splitlines() | |
569 | start_pos = 0 |
|
574 | start_pos = 0 | |
570 | end_pos = len(revs) |
|
575 | end_pos = len(revs) | |
571 | if start: |
|
576 | if start: | |
572 | _start = self._get_revision(start) |
|
577 | _start = self._get_revision(start) | |
573 | try: |
|
578 | try: | |
574 | start_pos = revs.index(_start) |
|
579 | start_pos = revs.index(_start) | |
575 | except ValueError: |
|
580 | except ValueError: | |
576 | pass |
|
581 | pass | |
577 |
|
582 | |||
578 | if end is not None: |
|
583 | if end is not None: | |
579 | _end = self._get_revision(end) |
|
584 | _end = self._get_revision(end) | |
580 | try: |
|
585 | try: | |
581 | end_pos = revs.index(_end) |
|
586 | end_pos = revs.index(_end) | |
582 | except ValueError: |
|
587 | except ValueError: | |
583 | pass |
|
588 | pass | |
584 |
|
589 | |||
585 | if None not in [start, end] and start_pos > end_pos: |
|
590 | if None not in [start, end] and start_pos > end_pos: | |
586 | raise RepositoryError('start cannot be after end') |
|
591 | raise RepositoryError('start cannot be after end') | |
587 |
|
592 | |||
588 | if end_pos is not None: |
|
593 | if end_pos is not None: | |
589 | end_pos += 1 |
|
594 | end_pos += 1 | |
590 |
|
595 | |||
591 | revs = revs[start_pos:end_pos] |
|
596 | revs = revs[start_pos:end_pos] | |
592 | if reverse: |
|
597 | if reverse: | |
593 | revs.reverse() |
|
598 | revs.reverse() | |
594 |
|
599 | |||
595 | return CollectionGenerator(self, revs) |
|
600 | return CollectionGenerator(self, revs) | |
596 |
|
601 | |||
597 | def get_diff_changesets(self, org_rev, other_repo, other_rev): |
|
602 | def get_diff_changesets(self, org_rev, other_repo, other_rev): | |
598 | """ |
|
603 | """ | |
599 | Returns lists of changesets that can be merged from this repo @org_rev |
|
604 | Returns lists of changesets that can be merged from this repo @org_rev | |
600 | to other_repo @other_rev |
|
605 | to other_repo @other_rev | |
601 | ... and the other way |
|
606 | ... and the other way | |
602 | ... and the ancestors that would be used for merge |
|
607 | ... and the ancestors that would be used for merge | |
603 |
|
608 | |||
604 | :param org_rev: the revision we want our compare to be made |
|
609 | :param org_rev: the revision we want our compare to be made | |
605 | :param other_repo: repo object, most likely the fork of org_repo. It has |
|
610 | :param other_repo: repo object, most likely the fork of org_repo. It has | |
606 | all changesets that we need to obtain |
|
611 | all changesets that we need to obtain | |
607 | :param other_rev: revision we want out compare to be made on other_repo |
|
612 | :param other_rev: revision we want out compare to be made on other_repo | |
608 | """ |
|
613 | """ | |
609 | org_changesets = [] |
|
614 | org_changesets = [] | |
610 | ancestors = None |
|
615 | ancestors = None | |
611 | if org_rev == other_rev: |
|
616 | if org_rev == other_rev: | |
612 | other_changesets = [] |
|
617 | other_changesets = [] | |
613 | elif self != other_repo: |
|
618 | elif self != other_repo: | |
614 | gitrepo = Repo(self.path) |
|
619 | gitrepo = Repo(self.path) | |
615 | SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo) |
|
620 | SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo) | |
616 |
|
621 | |||
617 | gitrepo_remote = Repo(other_repo.path) |
|
622 | gitrepo_remote = Repo(other_repo.path) | |
618 | SubprocessGitClient(thin_packs=False).fetch(self.path, gitrepo_remote) |
|
623 | SubprocessGitClient(thin_packs=False).fetch(self.path, gitrepo_remote) | |
619 |
|
624 | |||
620 | revs = [ |
|
625 | revs = [ | |
621 | ascii_str(x.commit.id) |
|
626 | ascii_str(x.commit.id) | |
622 | for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], |
|
627 | for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], | |
623 | exclude=[ascii_bytes(org_rev)]) |
|
628 | exclude=[ascii_bytes(org_rev)]) | |
624 | ] |
|
629 | ] | |
625 | other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] |
|
630 | other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] | |
626 | if other_changesets: |
|
631 | if other_changesets: | |
627 | ancestors = [other_changesets[0].parents[0].raw_id] |
|
632 | ancestors = [other_changesets[0].parents[0].raw_id] | |
628 | else: |
|
633 | else: | |
629 | # no changesets from other repo, ancestor is the other_rev |
|
634 | # no changesets from other repo, ancestor is the other_rev | |
630 | ancestors = [other_rev] |
|
635 | ancestors = [other_rev] | |
631 |
|
636 | |||
632 | gitrepo.close() |
|
637 | gitrepo.close() | |
633 | gitrepo_remote.close() |
|
638 | gitrepo_remote.close() | |
634 |
|
639 | |||
635 | else: |
|
640 | else: | |
636 | so = self.run_git_command( |
|
641 | so = self.run_git_command( | |
637 | ['log', '--reverse', '--pretty=format:%H', |
|
642 | ['log', '--reverse', '--pretty=format:%H', | |
638 | '-s', '%s..%s' % (org_rev, other_rev)] |
|
643 | '-s', '%s..%s' % (org_rev, other_rev)] | |
639 | ) |
|
644 | ) | |
640 | other_changesets = [self.get_changeset(cs) |
|
645 | other_changesets = [self.get_changeset(cs) | |
641 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] |
|
646 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] | |
642 | so = self.run_git_command( |
|
647 | so = self.run_git_command( | |
643 | ['merge-base', org_rev, other_rev] |
|
648 | ['merge-base', org_rev, other_rev] | |
644 | ) |
|
649 | ) | |
645 | ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] |
|
650 | ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] | |
646 |
|
651 | |||
647 | return other_changesets, org_changesets, ancestors |
|
652 | return other_changesets, org_changesets, ancestors | |
648 |
|
653 | |||
649 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, |
|
654 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, | |
650 | context=3): |
|
655 | context=3): | |
651 | """ |
|
656 | """ | |
652 | Returns (git like) *diff*, as plain bytes text. Shows changes |
|
657 | Returns (git like) *diff*, as plain bytes text. Shows changes | |
653 | introduced by ``rev2`` since ``rev1``. |
|
658 | introduced by ``rev2`` since ``rev1``. | |
654 |
|
659 | |||
655 | :param rev1: Entry point from which diff is shown. Can be |
|
660 | :param rev1: Entry point from which diff is shown. Can be | |
656 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
661 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
657 | the changes since empty state of the repository until ``rev2`` |
|
662 | the changes since empty state of the repository until ``rev2`` | |
658 | :param rev2: Until which revision changes should be shown. |
|
663 | :param rev2: Until which revision changes should be shown. | |
659 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
664 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
660 | changes. Defaults to ``False``. |
|
665 | changes. Defaults to ``False``. | |
661 | :param context: How many lines before/after changed lines should be |
|
666 | :param context: How many lines before/after changed lines should be | |
662 | shown. Defaults to ``3``. Due to limitations in Git, if |
|
667 | shown. Defaults to ``3``. Due to limitations in Git, if | |
663 | value passed-in is greater than ``2**31-1`` |
|
668 | value passed-in is greater than ``2**31-1`` | |
664 | (``2147483647``), it will be set to ``2147483647`` |
|
669 | (``2147483647``), it will be set to ``2147483647`` | |
665 | instead. If negative value is passed-in, it will be set to |
|
670 | instead. If negative value is passed-in, it will be set to | |
666 | ``0`` instead. |
|
671 | ``0`` instead. | |
667 | """ |
|
672 | """ | |
668 |
|
673 | |||
669 | # Git internally uses a signed long int for storing context |
|
674 | # Git internally uses a signed long int for storing context | |
670 | # size (number of lines to show before and after the |
|
675 | # size (number of lines to show before and after the | |
671 | # differences). This can result in integer overflow, so we |
|
676 | # differences). This can result in integer overflow, so we | |
672 | # ensure the requested context is smaller by one than the |
|
677 | # ensure the requested context is smaller by one than the | |
673 | # number that would cause the overflow. It is highly unlikely |
|
678 | # number that would cause the overflow. It is highly unlikely | |
674 | # that a single file will contain that many lines, so this |
|
679 | # that a single file will contain that many lines, so this | |
675 | # kind of change should not cause any realistic consequences. |
|
680 | # kind of change should not cause any realistic consequences. | |
676 | overflowed_long_int = 2**31 |
|
681 | overflowed_long_int = 2**31 | |
677 |
|
682 | |||
678 | if context >= overflowed_long_int: |
|
683 | if context >= overflowed_long_int: | |
679 | context = overflowed_long_int - 1 |
|
684 | context = overflowed_long_int - 1 | |
680 |
|
685 | |||
681 | # Negative context values make no sense, and will result in |
|
686 | # Negative context values make no sense, and will result in | |
682 | # errors. Ensure this does not happen. |
|
687 | # errors. Ensure this does not happen. | |
683 | if context < 0: |
|
688 | if context < 0: | |
684 | context = 0 |
|
689 | context = 0 | |
685 |
|
690 | |||
686 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] |
|
691 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] | |
687 | if ignore_whitespace: |
|
692 | if ignore_whitespace: | |
688 | flags.append('-w') |
|
693 | flags.append('-w') | |
689 |
|
694 | |||
690 | if hasattr(rev1, 'raw_id'): |
|
695 | if hasattr(rev1, 'raw_id'): | |
691 | rev1 = getattr(rev1, 'raw_id') |
|
696 | rev1 = getattr(rev1, 'raw_id') | |
692 |
|
697 | |||
693 | if hasattr(rev2, 'raw_id'): |
|
698 | if hasattr(rev2, 'raw_id'): | |
694 | rev2 = getattr(rev2, 'raw_id') |
|
699 | rev2 = getattr(rev2, 'raw_id') | |
695 |
|
700 | |||
696 | if rev1 == self.EMPTY_CHANGESET: |
|
701 | if rev1 == self.EMPTY_CHANGESET: | |
697 | rev2 = self.get_changeset(rev2).raw_id |
|
702 | rev2 = self.get_changeset(rev2).raw_id | |
698 | cmd = ['show'] + flags + [rev2] |
|
703 | cmd = ['show'] + flags + [rev2] | |
699 | else: |
|
704 | else: | |
700 | rev1 = self.get_changeset(rev1).raw_id |
|
705 | rev1 = self.get_changeset(rev1).raw_id | |
701 | rev2 = self.get_changeset(rev2).raw_id |
|
706 | rev2 = self.get_changeset(rev2).raw_id | |
702 | cmd = ['diff'] + flags + [rev1, rev2] |
|
707 | cmd = ['diff'] + flags + [rev1, rev2] | |
703 |
|
708 | |||
704 | if path: |
|
709 | if path: | |
705 | cmd += ['--', path] |
|
710 | cmd += ['--', path] | |
706 |
|
711 | |||
707 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) |
|
712 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) | |
708 | # If we used 'show' command, strip first few lines (until actual diff |
|
713 | # If we used 'show' command, strip first few lines (until actual diff | |
709 | # starts) |
|
714 | # starts) | |
710 | if rev1 == self.EMPTY_CHANGESET: |
|
715 | if rev1 == self.EMPTY_CHANGESET: | |
711 | parts = stdout.split(b'\ndiff ', 1) |
|
716 | parts = stdout.split(b'\ndiff ', 1) | |
712 | if len(parts) > 1: |
|
717 | if len(parts) > 1: | |
713 | stdout = b'diff ' + parts[1] |
|
718 | stdout = b'diff ' + parts[1] | |
714 | return stdout |
|
719 | return stdout | |
715 |
|
720 | |||
716 | @LazyProperty |
|
721 | @LazyProperty | |
717 | def in_memory_changeset(self): |
|
722 | def in_memory_changeset(self): | |
718 | """ |
|
723 | """ | |
719 | Returns ``GitInMemoryChangeset`` object for this repository. |
|
724 | Returns ``GitInMemoryChangeset`` object for this repository. | |
720 | """ |
|
725 | """ | |
721 | return inmemory.GitInMemoryChangeset(self) |
|
726 | return inmemory.GitInMemoryChangeset(self) | |
722 |
|
727 | |||
723 | def clone(self, url, update_after_clone=True, bare=False): |
|
728 | def clone(self, url, update_after_clone=True, bare=False): | |
724 | """ |
|
729 | """ | |
725 | Tries to clone changes from external location. |
|
730 | Tries to clone changes from external location. | |
726 |
|
731 | |||
727 | :param update_after_clone: If set to ``False``, git won't checkout |
|
732 | :param update_after_clone: If set to ``False``, git won't checkout | |
728 | working directory |
|
733 | working directory | |
729 | :param bare: If set to ``True``, repository would be cloned into |
|
734 | :param bare: If set to ``True``, repository would be cloned into | |
730 | *bare* git repository (no working directory at all). |
|
735 | *bare* git repository (no working directory at all). | |
731 | """ |
|
736 | """ | |
732 | url = self._get_url(url) |
|
737 | url = self._get_url(url) | |
733 | cmd = ['clone', '-q'] |
|
738 | cmd = ['clone', '-q'] | |
734 | if bare: |
|
739 | if bare: | |
735 | cmd.append('--bare') |
|
740 | cmd.append('--bare') | |
736 | elif not update_after_clone: |
|
741 | elif not update_after_clone: | |
737 | cmd.append('--no-checkout') |
|
742 | cmd.append('--no-checkout') | |
738 | cmd += ['--', url, self.path] |
|
743 | cmd += ['--', url, self.path] | |
739 | # If error occurs run_git_command raises RepositoryError already |
|
744 | # If error occurs run_git_command raises RepositoryError already | |
740 | self.run_git_command(cmd) |
|
745 | self.run_git_command(cmd) | |
741 |
|
746 | |||
742 | def pull(self, url): |
|
747 | def pull(self, url): | |
743 | """ |
|
748 | """ | |
744 | Tries to pull changes from external location. |
|
749 | Tries to pull changes from external location. | |
745 | """ |
|
750 | """ | |
746 | url = self._get_url(url) |
|
751 | url = self._get_url(url) | |
747 | cmd = ['pull', '--ff-only', url] |
|
752 | cmd = ['pull', '--ff-only', url] | |
748 | # If error occurs run_git_command raises RepositoryError already |
|
753 | # If error occurs run_git_command raises RepositoryError already | |
749 | self.run_git_command(cmd) |
|
754 | self.run_git_command(cmd) | |
750 |
|
755 | |||
751 | def fetch(self, url): |
|
756 | def fetch(self, url): | |
752 | """ |
|
757 | """ | |
753 | Tries to pull changes from external location. |
|
758 | Tries to pull changes from external location. | |
754 | """ |
|
759 | """ | |
755 | url = self._get_url(url) |
|
760 | url = self._get_url(url) | |
756 | so = self.run_git_command(['ls-remote', '-h', url]) |
|
761 | so = self.run_git_command(['ls-remote', '-h', url]) | |
757 | cmd = ['fetch', url, '--'] |
|
762 | cmd = ['fetch', url, '--'] | |
758 | for line in so.splitlines(): |
|
763 | for line in so.splitlines(): | |
759 | sha, ref = line.split('\t') |
|
764 | sha, ref = line.split('\t') | |
760 | cmd.append('+%s:%s' % (ref, ref)) |
|
765 | cmd.append('+%s:%s' % (ref, ref)) | |
761 | self.run_git_command(cmd) |
|
766 | self.run_git_command(cmd) | |
762 |
|
767 | |||
763 | def _update_server_info(self): |
|
768 | def _update_server_info(self): | |
764 | """ |
|
769 | """ | |
765 | runs gits update-server-info command in this repo instance |
|
770 | runs gits update-server-info command in this repo instance | |
766 | """ |
|
771 | """ | |
767 | try: |
|
772 | try: | |
768 | update_server_info(self._repo) |
|
773 | update_server_info(self._repo) | |
769 | except OSError as e: |
|
774 | except OSError as e: | |
770 | if e.errno not in [errno.ENOENT, errno.EROFS]: |
|
775 | if e.errno not in [errno.ENOENT, errno.EROFS]: | |
771 | raise |
|
776 | raise | |
772 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock |
|
777 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock | |
773 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) |
|
778 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) | |
774 |
|
779 | |||
775 | @LazyProperty |
|
780 | @LazyProperty | |
776 | def workdir(self): |
|
781 | def workdir(self): | |
777 | """ |
|
782 | """ | |
778 | Returns ``Workdir`` instance for this repository. |
|
783 | Returns ``Workdir`` instance for this repository. | |
779 | """ |
|
784 | """ | |
780 | return workdir.GitWorkdir(self) |
|
785 | return workdir.GitWorkdir(self) | |
781 |
|
786 | |||
782 | def get_config_value(self, section, name, config_file=None): |
|
787 | def get_config_value(self, section, name, config_file=None): | |
783 | """ |
|
788 | """ | |
784 | Returns configuration value for a given [``section``] and ``name``. |
|
789 | Returns configuration value for a given [``section``] and ``name``. | |
785 |
|
790 | |||
786 | :param section: Section we want to retrieve value from |
|
791 | :param section: Section we want to retrieve value from | |
787 | :param name: Name of configuration we want to retrieve |
|
792 | :param name: Name of configuration we want to retrieve | |
788 | :param config_file: A path to file which should be used to retrieve |
|
793 | :param config_file: A path to file which should be used to retrieve | |
789 | configuration from (might also be a list of file paths) |
|
794 | configuration from (might also be a list of file paths) | |
790 | """ |
|
795 | """ | |
791 | if config_file is None: |
|
796 | if config_file is None: | |
792 | config_file = [] |
|
797 | config_file = [] | |
793 | elif isinstance(config_file, str): |
|
798 | elif isinstance(config_file, str): | |
794 | config_file = [config_file] |
|
799 | config_file = [config_file] | |
795 |
|
800 | |||
796 | def gen_configs(): |
|
801 | def gen_configs(): | |
797 | for path in config_file + self._config_files: |
|
802 | for path in config_file + self._config_files: | |
798 | try: |
|
803 | try: | |
799 | yield ConfigFile.from_path(path) |
|
804 | yield ConfigFile.from_path(path) | |
800 | except (IOError, OSError, ValueError): |
|
805 | except (IOError, OSError, ValueError): | |
801 | continue |
|
806 | continue | |
802 |
|
807 | |||
803 | for config in gen_configs(): |
|
808 | for config in gen_configs(): | |
804 | try: |
|
809 | try: | |
805 | value = config.get(section, name) |
|
810 | value = config.get(section, name) | |
806 | except KeyError: |
|
811 | except KeyError: | |
807 | continue |
|
812 | continue | |
808 | return None if value is None else safe_str(value) |
|
813 | return None if value is None else safe_str(value) | |
809 | return None |
|
814 | return None | |
810 |
|
815 | |||
811 | def get_user_name(self, config_file=None): |
|
816 | def get_user_name(self, config_file=None): | |
812 | """ |
|
817 | """ | |
813 | Returns user's name from global configuration file. |
|
818 | Returns user's name from global configuration file. | |
814 |
|
819 | |||
815 | :param config_file: A path to file which should be used to retrieve |
|
820 | :param config_file: A path to file which should be used to retrieve | |
816 | configuration from (might also be a list of file paths) |
|
821 | configuration from (might also be a list of file paths) | |
817 | """ |
|
822 | """ | |
818 | return self.get_config_value('user', 'name', config_file) |
|
823 | return self.get_config_value('user', 'name', config_file) | |
819 |
|
824 | |||
820 | def get_user_email(self, config_file=None): |
|
825 | def get_user_email(self, config_file=None): | |
821 | """ |
|
826 | """ | |
822 | Returns user's email from global configuration file. |
|
827 | Returns user's email from global configuration file. | |
823 |
|
828 | |||
824 | :param config_file: A path to file which should be used to retrieve |
|
829 | :param config_file: A path to file which should be used to retrieve | |
825 | configuration from (might also be a list of file paths) |
|
830 | configuration from (might also be a list of file paths) | |
826 | """ |
|
831 | """ | |
827 | return self.get_config_value('user', 'email', config_file) |
|
832 | return self.get_config_value('user', 'email', config_file) |
@@ -1,685 +1,690 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.hg.repository |
|
3 | vcs.backends.hg.repository | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Mercurial repository implementation. |
|
6 | Mercurial repository implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import datetime |
|
12 | import datetime | |
13 | import logging |
|
13 | import logging | |
14 | import os |
|
14 | import os | |
15 | import time |
|
15 | import time | |
16 | import urllib.error |
|
16 | import urllib.error | |
17 | import urllib.parse |
|
17 | import urllib.parse | |
18 | import urllib.request |
|
18 | import urllib.request | |
19 | from collections import OrderedDict |
|
19 | from collections import OrderedDict | |
20 |
|
20 | |||
21 | import mercurial.commands |
|
21 | import mercurial.commands | |
22 | import mercurial.error |
|
22 | import mercurial.error | |
23 | import mercurial.exchange |
|
23 | import mercurial.exchange | |
24 | import mercurial.hg |
|
24 | import mercurial.hg | |
25 | import mercurial.hgweb |
|
25 | import mercurial.hgweb | |
26 | import mercurial.httppeer |
|
26 | import mercurial.httppeer | |
27 | import mercurial.localrepo |
|
27 | import mercurial.localrepo | |
28 | import mercurial.match |
|
28 | import mercurial.match | |
29 | import mercurial.mdiff |
|
29 | import mercurial.mdiff | |
30 | import mercurial.node |
|
30 | import mercurial.node | |
31 | import mercurial.patch |
|
31 | import mercurial.patch | |
32 | import mercurial.scmutil |
|
32 | import mercurial.scmutil | |
33 | import mercurial.sshpeer |
|
33 | import mercurial.sshpeer | |
34 | import mercurial.tags |
|
34 | import mercurial.tags | |
35 | import mercurial.ui |
|
35 | import mercurial.ui | |
36 | import mercurial.unionrepo |
|
36 | import mercurial.unionrepo | |
37 | import mercurial.util |
|
37 | ||
|
38 | ||||
|
39 | try: | |||
|
40 | from mercurial.utils.urlutil import url as hg_url | |||
|
41 | except ImportError: # urlutil was introduced in Mercurial 5.8 | |||
|
42 | from mercurial.util import url as hg_url | |||
38 |
|
43 | |||
39 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator |
|
44 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator | |
40 | from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
45 | from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, | |
41 | TagDoesNotExistError, VCSError) |
|
46 | TagDoesNotExistError, VCSError) | |
42 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str |
|
47 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str | |
43 | from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers |
|
48 | from kallithea.lib.vcs.utils.helpers import get_urllib_request_handlers | |
44 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
49 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
45 | from kallithea.lib.vcs.utils.paths import abspath |
|
50 | from kallithea.lib.vcs.utils.paths import abspath | |
46 |
|
51 | |||
47 | from . import changeset, inmemory, workdir |
|
52 | from . import changeset, inmemory, workdir | |
48 |
|
53 | |||
49 |
|
54 | |||
50 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
51 |
|
56 | |||
52 |
|
57 | |||
53 | class MercurialRepository(BaseRepository): |
|
58 | class MercurialRepository(BaseRepository): | |
54 | """ |
|
59 | """ | |
55 | Mercurial repository backend |
|
60 | Mercurial repository backend | |
56 | """ |
|
61 | """ | |
57 | DEFAULT_BRANCH_NAME = 'default' |
|
62 | DEFAULT_BRANCH_NAME = 'default' | |
58 | scm = 'hg' |
|
63 | scm = 'hg' | |
59 |
|
64 | |||
60 | def __init__(self, repo_path, create=False, baseui=None, src_url=None, |
|
65 | def __init__(self, repo_path, create=False, baseui=None, src_url=None, | |
61 | update_after_clone=False): |
|
66 | update_after_clone=False): | |
62 | """ |
|
67 | """ | |
63 | Raises RepositoryError if repository could not be find at the given |
|
68 | Raises RepositoryError if repository could not be find at the given | |
64 | ``repo_path``. |
|
69 | ``repo_path``. | |
65 |
|
70 | |||
66 | :param repo_path: local path of the repository |
|
71 | :param repo_path: local path of the repository | |
67 | :param create=False: if set to True, would try to create repository if |
|
72 | :param create=False: if set to True, would try to create repository if | |
68 | it does not exist rather than raising exception |
|
73 | it does not exist rather than raising exception | |
69 | :param baseui=None: user data |
|
74 | :param baseui=None: user data | |
70 | :param src_url=None: would try to clone repository from given location |
|
75 | :param src_url=None: would try to clone repository from given location | |
71 | :param update_after_clone=False: sets update of working copy after |
|
76 | :param update_after_clone=False: sets update of working copy after | |
72 | making a clone |
|
77 | making a clone | |
73 | """ |
|
78 | """ | |
74 |
|
79 | |||
75 | if not isinstance(repo_path, str): |
|
80 | if not isinstance(repo_path, str): | |
76 | raise VCSError('Mercurial backend requires repository path to ' |
|
81 | raise VCSError('Mercurial backend requires repository path to ' | |
77 | 'be instance of <str> got %s instead' % |
|
82 | 'be instance of <str> got %s instead' % | |
78 | type(repo_path)) |
|
83 | type(repo_path)) | |
79 | self.path = abspath(repo_path) |
|
84 | self.path = abspath(repo_path) | |
80 | self.baseui = baseui or mercurial.ui.ui() |
|
85 | self.baseui = baseui or mercurial.ui.ui() | |
81 | # We've set path and ui, now we can set _repo itself |
|
86 | # We've set path and ui, now we can set _repo itself | |
82 | self._repo = self._get_repo(create, src_url, update_after_clone) |
|
87 | self._repo = self._get_repo(create, src_url, update_after_clone) | |
83 |
|
88 | |||
84 | @property |
|
89 | @property | |
85 | def _empty(self): |
|
90 | def _empty(self): | |
86 | """ |
|
91 | """ | |
87 | Checks if repository is empty ie. without any changesets |
|
92 | Checks if repository is empty ie. without any changesets | |
88 | """ |
|
93 | """ | |
89 | # TODO: Following raises errors when using InMemoryChangeset... |
|
94 | # TODO: Following raises errors when using InMemoryChangeset... | |
90 | # return len(self._repo.changelog) == 0 |
|
95 | # return len(self._repo.changelog) == 0 | |
91 | return len(self.revisions) == 0 |
|
96 | return len(self.revisions) == 0 | |
92 |
|
97 | |||
93 | @LazyProperty |
|
98 | @LazyProperty | |
94 | def revisions(self): |
|
99 | def revisions(self): | |
95 | """ |
|
100 | """ | |
96 | Returns list of revisions' ids, in ascending order. Being lazy |
|
101 | Returns list of revisions' ids, in ascending order. Being lazy | |
97 | attribute allows external tools to inject shas from cache. |
|
102 | attribute allows external tools to inject shas from cache. | |
98 | """ |
|
103 | """ | |
99 | return self._get_all_revisions() |
|
104 | return self._get_all_revisions() | |
100 |
|
105 | |||
101 | @LazyProperty |
|
106 | @LazyProperty | |
102 | def name(self): |
|
107 | def name(self): | |
103 | return os.path.basename(self.path) |
|
108 | return os.path.basename(self.path) | |
104 |
|
109 | |||
105 | @LazyProperty |
|
110 | @LazyProperty | |
106 | def branches(self): |
|
111 | def branches(self): | |
107 | return self._get_branches() |
|
112 | return self._get_branches() | |
108 |
|
113 | |||
109 | @LazyProperty |
|
114 | @LazyProperty | |
110 | def closed_branches(self): |
|
115 | def closed_branches(self): | |
111 | return self._get_branches(normal=False, closed=True) |
|
116 | return self._get_branches(normal=False, closed=True) | |
112 |
|
117 | |||
113 | @LazyProperty |
|
118 | @LazyProperty | |
114 | def allbranches(self): |
|
119 | def allbranches(self): | |
115 | """ |
|
120 | """ | |
116 | List all branches, including closed branches. |
|
121 | List all branches, including closed branches. | |
117 | """ |
|
122 | """ | |
118 | return self._get_branches(closed=True) |
|
123 | return self._get_branches(closed=True) | |
119 |
|
124 | |||
120 | def _get_branches(self, normal=True, closed=False): |
|
125 | def _get_branches(self, normal=True, closed=False): | |
121 | """ |
|
126 | """ | |
122 | Gets branches for this repository |
|
127 | Gets branches for this repository | |
123 | Returns only not closed branches by default |
|
128 | Returns only not closed branches by default | |
124 |
|
129 | |||
125 | :param closed: return also closed branches for mercurial |
|
130 | :param closed: return also closed branches for mercurial | |
126 | :param normal: return also normal branches |
|
131 | :param normal: return also normal branches | |
127 | """ |
|
132 | """ | |
128 |
|
133 | |||
129 | if self._empty: |
|
134 | if self._empty: | |
130 | return {} |
|
135 | return {} | |
131 |
|
136 | |||
132 | bt = OrderedDict() |
|
137 | bt = OrderedDict() | |
133 | for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()): |
|
138 | for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()): | |
134 | if isclosed: |
|
139 | if isclosed: | |
135 | if closed: |
|
140 | if closed: | |
136 | bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node)) |
|
141 | bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node)) | |
137 | else: |
|
142 | else: | |
138 | if normal: |
|
143 | if normal: | |
139 | bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node)) |
|
144 | bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node)) | |
140 | return bt |
|
145 | return bt | |
141 |
|
146 | |||
142 | @LazyProperty |
|
147 | @LazyProperty | |
143 | def tags(self): |
|
148 | def tags(self): | |
144 | """ |
|
149 | """ | |
145 | Gets tags for this repository |
|
150 | Gets tags for this repository | |
146 | """ |
|
151 | """ | |
147 | return self._get_tags() |
|
152 | return self._get_tags() | |
148 |
|
153 | |||
149 | def _get_tags(self): |
|
154 | def _get_tags(self): | |
150 | if self._empty: |
|
155 | if self._empty: | |
151 | return {} |
|
156 | return {} | |
152 |
|
157 | |||
153 | return OrderedDict(sorted( |
|
158 | return OrderedDict(sorted( | |
154 | ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()), |
|
159 | ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()), | |
155 | reverse=True, |
|
160 | reverse=True, | |
156 | key=lambda x: x[0], # sort by name |
|
161 | key=lambda x: x[0], # sort by name | |
157 | )) |
|
162 | )) | |
158 |
|
163 | |||
159 | def tag(self, name, user, revision=None, message=None, date=None, |
|
164 | def tag(self, name, user, revision=None, message=None, date=None, | |
160 | **kwargs): |
|
165 | **kwargs): | |
161 | """ |
|
166 | """ | |
162 | Creates and returns a tag for the given ``revision``. |
|
167 | Creates and returns a tag for the given ``revision``. | |
163 |
|
168 | |||
164 | :param name: name for new tag |
|
169 | :param name: name for new tag | |
165 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
170 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
166 | :param revision: changeset id for which new tag would be created |
|
171 | :param revision: changeset id for which new tag would be created | |
167 | :param message: message of the tag's commit |
|
172 | :param message: message of the tag's commit | |
168 | :param date: date of tag's commit |
|
173 | :param date: date of tag's commit | |
169 |
|
174 | |||
170 | :raises TagAlreadyExistError: if tag with same name already exists |
|
175 | :raises TagAlreadyExistError: if tag with same name already exists | |
171 | """ |
|
176 | """ | |
172 | if name in self.tags: |
|
177 | if name in self.tags: | |
173 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
178 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
174 | changeset = self.get_changeset(revision) |
|
179 | changeset = self.get_changeset(revision) | |
175 | local = kwargs.setdefault('local', False) |
|
180 | local = kwargs.setdefault('local', False) | |
176 |
|
181 | |||
177 | if message is None: |
|
182 | if message is None: | |
178 | message = "Added tag %s for changeset %s" % (name, |
|
183 | message = "Added tag %s for changeset %s" % (name, | |
179 | changeset.short_id) |
|
184 | changeset.short_id) | |
180 |
|
185 | |||
181 | if date is None: |
|
186 | if date is None: | |
182 | date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')) |
|
187 | date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')) | |
183 |
|
188 | |||
184 | try: |
|
189 | try: | |
185 | mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date) |
|
190 | mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date) | |
186 | except mercurial.error.Abort as e: |
|
191 | except mercurial.error.Abort as e: | |
187 | raise RepositoryError(e.args[0]) |
|
192 | raise RepositoryError(e.args[0]) | |
188 |
|
193 | |||
189 | # Reinitialize tags |
|
194 | # Reinitialize tags | |
190 | self.tags = self._get_tags() |
|
195 | self.tags = self._get_tags() | |
191 | tag_id = self.tags[name] |
|
196 | tag_id = self.tags[name] | |
192 |
|
197 | |||
193 | return self.get_changeset(revision=tag_id) |
|
198 | return self.get_changeset(revision=tag_id) | |
194 |
|
199 | |||
195 | def remove_tag(self, name, user, message=None, date=None): |
|
200 | def remove_tag(self, name, user, message=None, date=None): | |
196 | """ |
|
201 | """ | |
197 | Removes tag with the given ``name``. |
|
202 | Removes tag with the given ``name``. | |
198 |
|
203 | |||
199 | :param name: name of the tag to be removed |
|
204 | :param name: name of the tag to be removed | |
200 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
205 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
201 | :param message: message of the tag's removal commit |
|
206 | :param message: message of the tag's removal commit | |
202 | :param date: date of tag's removal commit |
|
207 | :param date: date of tag's removal commit | |
203 |
|
208 | |||
204 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
209 | :raises TagDoesNotExistError: if tag with given name does not exists | |
205 | """ |
|
210 | """ | |
206 | if name not in self.tags: |
|
211 | if name not in self.tags: | |
207 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
212 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
208 | if message is None: |
|
213 | if message is None: | |
209 | message = "Removed tag %s" % name |
|
214 | message = "Removed tag %s" % name | |
210 | if date is None: |
|
215 | if date is None: | |
211 | date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')) |
|
216 | date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')) | |
212 | local = False |
|
217 | local = False | |
213 |
|
218 | |||
214 | try: |
|
219 | try: | |
215 | mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.node.nullid, safe_bytes(message), local, safe_bytes(user), date) |
|
220 | mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.node.nullid, safe_bytes(message), local, safe_bytes(user), date) | |
216 | self.tags = self._get_tags() |
|
221 | self.tags = self._get_tags() | |
217 | except mercurial.error.Abort as e: |
|
222 | except mercurial.error.Abort as e: | |
218 | raise RepositoryError(e.args[0]) |
|
223 | raise RepositoryError(e.args[0]) | |
219 |
|
224 | |||
220 | @LazyProperty |
|
225 | @LazyProperty | |
221 | def bookmarks(self): |
|
226 | def bookmarks(self): | |
222 | """ |
|
227 | """ | |
223 | Gets bookmarks for this repository |
|
228 | Gets bookmarks for this repository | |
224 | """ |
|
229 | """ | |
225 | return self._get_bookmarks() |
|
230 | return self._get_bookmarks() | |
226 |
|
231 | |||
227 | def _get_bookmarks(self): |
|
232 | def _get_bookmarks(self): | |
228 | if self._empty: |
|
233 | if self._empty: | |
229 | return {} |
|
234 | return {} | |
230 |
|
235 | |||
231 | return OrderedDict(sorted( |
|
236 | return OrderedDict(sorted( | |
232 | ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo._bookmarks.items()), |
|
237 | ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo._bookmarks.items()), | |
233 | reverse=True, |
|
238 | reverse=True, | |
234 | key=lambda x: x[0], # sort by name |
|
239 | key=lambda x: x[0], # sort by name | |
235 | )) |
|
240 | )) | |
236 |
|
241 | |||
237 | def _get_all_revisions(self): |
|
242 | def _get_all_revisions(self): | |
238 | return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()] |
|
243 | return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()] | |
239 |
|
244 | |||
240 | def get_diff(self, rev1, rev2, path='', ignore_whitespace=False, |
|
245 | def get_diff(self, rev1, rev2, path='', ignore_whitespace=False, | |
241 | context=3): |
|
246 | context=3): | |
242 | """ |
|
247 | """ | |
243 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
248 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
244 | ``rev2`` since ``rev1``. |
|
249 | ``rev2`` since ``rev1``. | |
245 |
|
250 | |||
246 | :param rev1: Entry point from which diff is shown. Can be |
|
251 | :param rev1: Entry point from which diff is shown. Can be | |
247 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
252 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
248 | the changes since empty state of the repository until ``rev2`` |
|
253 | the changes since empty state of the repository until ``rev2`` | |
249 | :param rev2: Until which revision changes should be shown. |
|
254 | :param rev2: Until which revision changes should be shown. | |
250 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
255 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
251 | changes. Defaults to ``False``. |
|
256 | changes. Defaults to ``False``. | |
252 | :param context: How many lines before/after changed lines should be |
|
257 | :param context: How many lines before/after changed lines should be | |
253 | shown. Defaults to ``3``. If negative value is passed-in, it will be |
|
258 | shown. Defaults to ``3``. If negative value is passed-in, it will be | |
254 | set to ``0`` instead. |
|
259 | set to ``0`` instead. | |
255 | """ |
|
260 | """ | |
256 |
|
261 | |||
257 | # Negative context values make no sense, and will result in |
|
262 | # Negative context values make no sense, and will result in | |
258 | # errors. Ensure this does not happen. |
|
263 | # errors. Ensure this does not happen. | |
259 | if context < 0: |
|
264 | if context < 0: | |
260 | context = 0 |
|
265 | context = 0 | |
261 |
|
266 | |||
262 | if hasattr(rev1, 'raw_id'): |
|
267 | if hasattr(rev1, 'raw_id'): | |
263 | rev1 = getattr(rev1, 'raw_id') |
|
268 | rev1 = getattr(rev1, 'raw_id') | |
264 |
|
269 | |||
265 | if hasattr(rev2, 'raw_id'): |
|
270 | if hasattr(rev2, 'raw_id'): | |
266 | rev2 = getattr(rev2, 'raw_id') |
|
271 | rev2 = getattr(rev2, 'raw_id') | |
267 |
|
272 | |||
268 | # Check if given revisions are present at repository (may raise |
|
273 | # Check if given revisions are present at repository (may raise | |
269 | # ChangesetDoesNotExistError) |
|
274 | # ChangesetDoesNotExistError) | |
270 | if rev1 != self.EMPTY_CHANGESET: |
|
275 | if rev1 != self.EMPTY_CHANGESET: | |
271 | self.get_changeset(rev1) |
|
276 | self.get_changeset(rev1) | |
272 | self.get_changeset(rev2) |
|
277 | self.get_changeset(rev2) | |
273 | if path: |
|
278 | if path: | |
274 | file_filter = mercurial.match.exact([safe_bytes(path)]) |
|
279 | file_filter = mercurial.match.exact([safe_bytes(path)]) | |
275 | else: |
|
280 | else: | |
276 | file_filter = None |
|
281 | file_filter = None | |
277 |
|
282 | |||
278 | return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter, |
|
283 | return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter, | |
279 | opts=mercurial.mdiff.diffopts(git=True, |
|
284 | opts=mercurial.mdiff.diffopts(git=True, | |
280 | showfunc=True, |
|
285 | showfunc=True, | |
281 | ignorews=ignore_whitespace, |
|
286 | ignorews=ignore_whitespace, | |
282 | context=context))) |
|
287 | context=context))) | |
283 |
|
288 | |||
284 | @staticmethod |
|
289 | @staticmethod | |
285 | def _check_url(url, repoui=None): |
|
290 | def _check_url(url, repoui=None): | |
286 | r""" |
|
291 | r""" | |
287 | Raise URLError if url doesn't seem like a valid safe Hg URL. We |
|
292 | Raise URLError if url doesn't seem like a valid safe Hg URL. We | |
288 | only allow http, https, ssh, and hg-git URLs. |
|
293 | only allow http, https, ssh, and hg-git URLs. | |
289 |
|
294 | |||
290 | For http, https and git URLs, make a connection and probe to see if it is valid. |
|
295 | For http, https and git URLs, make a connection and probe to see if it is valid. | |
291 |
|
296 | |||
292 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
297 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
293 | when the return code is non 200 |
|
298 | when the return code is non 200 | |
294 |
|
299 | |||
295 | >>> MercurialRepository._check_url('file:///repo') |
|
300 | >>> MercurialRepository._check_url('file:///repo') | |
296 |
|
301 | |||
297 | >>> MercurialRepository._check_url('http://example.com:65537/repo') |
|
302 | >>> MercurialRepository._check_url('http://example.com:65537/repo') | |
298 | Traceback (most recent call last): |
|
303 | Traceback (most recent call last): | |
299 | ... |
|
304 | ... | |
300 | urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'> |
|
305 | urllib.error.URLError: <urlopen error Error parsing URL: 'http://example.com:65537/repo'> | |
301 | >>> MercurialRepository._check_url('foo') |
|
306 | >>> MercurialRepository._check_url('foo') | |
302 | Traceback (most recent call last): |
|
307 | Traceback (most recent call last): | |
303 | ... |
|
308 | ... | |
304 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'> |
|
309 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'foo'> | |
305 | >>> MercurialRepository._check_url('git+ssh://example.com/my%20fine repo') |
|
310 | >>> MercurialRepository._check_url('git+ssh://example.com/my%20fine repo') | |
306 | Traceback (most recent call last): |
|
311 | Traceback (most recent call last): | |
307 | ... |
|
312 | ... | |
308 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+ssh://example.com/my%20fine repo'> |
|
313 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'git+ssh://example.com/my%20fine repo'> | |
309 | >>> MercurialRepository._check_url('svn+http://example.com/repo') |
|
314 | >>> MercurialRepository._check_url('svn+http://example.com/repo') | |
310 | Traceback (most recent call last): |
|
315 | Traceback (most recent call last): | |
311 | ... |
|
316 | ... | |
312 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'svn+http://example.com/repo'> |
|
317 | urllib.error.URLError: <urlopen error Unsupported protocol in URL 'svn+http://example.com/repo'> | |
313 | """ |
|
318 | """ | |
314 | try: |
|
319 | try: | |
315 | parsed_url = urllib.parse.urlparse(url) |
|
320 | parsed_url = urllib.parse.urlparse(url) | |
316 | parsed_url.port # trigger netloc parsing which might raise ValueError |
|
321 | parsed_url.port # trigger netloc parsing which might raise ValueError | |
317 | except ValueError: |
|
322 | except ValueError: | |
318 | raise urllib.error.URLError("Error parsing URL: %r" % url) |
|
323 | raise urllib.error.URLError("Error parsing URL: %r" % url) | |
319 |
|
324 | |||
320 | # check first if it's not an local url |
|
325 | # check first if it's not an local url | |
321 | if os.path.isabs(url) and os.path.isdir(url) or parsed_url.scheme == 'file': |
|
326 | if os.path.isabs(url) and os.path.isdir(url) or parsed_url.scheme == 'file': | |
322 | # When creating repos, _get_url will use file protocol for local paths |
|
327 | # When creating repos, _get_url will use file protocol for local paths | |
323 | return |
|
328 | return | |
324 |
|
329 | |||
325 | if parsed_url.scheme not in ['http', 'https', 'ssh', 'git+http', 'git+https']: |
|
330 | if parsed_url.scheme not in ['http', 'https', 'ssh', 'git+http', 'git+https']: | |
326 | raise urllib.error.URLError("Unsupported protocol in URL %r" % url) |
|
331 | raise urllib.error.URLError("Unsupported protocol in URL %r" % url) | |
327 |
|
332 | |||
328 | url = safe_bytes(url) |
|
333 | url = safe_bytes(url) | |
329 |
|
334 | |||
330 | if parsed_url.scheme == 'ssh': |
|
335 | if parsed_url.scheme == 'ssh': | |
331 | # in case of invalid uri or authentication issues, sshpeer will |
|
336 | # in case of invalid uri or authentication issues, sshpeer will | |
332 | # throw an exception. |
|
337 | # throw an exception. | |
333 | mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') |
|
338 | mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') | |
334 | return |
|
339 | return | |
335 |
|
340 | |||
336 | if '+' in parsed_url.scheme: # strip 'git+' for hg-git URLs |
|
341 | if '+' in parsed_url.scheme: # strip 'git+' for hg-git URLs | |
337 | url = url.split(b'+', 1)[1] |
|
342 | url = url.split(b'+', 1)[1] | |
338 |
|
343 | |||
339 |
url_obj = |
|
344 | url_obj = hg_url(url) | |
340 | test_uri, handlers = get_urllib_request_handlers(url_obj) |
|
345 | test_uri, handlers = get_urllib_request_handlers(url_obj) | |
341 |
|
346 | |||
342 | url_obj.passwd = b'*****' |
|
347 | url_obj.passwd = b'*****' | |
343 | cleaned_uri = str(url_obj) |
|
348 | cleaned_uri = str(url_obj) | |
344 |
|
349 | |||
345 | o = urllib.request.build_opener(*handlers) |
|
350 | o = urllib.request.build_opener(*handlers) | |
346 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), |
|
351 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), | |
347 | ('Accept', 'application/mercurial-0.1')] |
|
352 | ('Accept', 'application/mercurial-0.1')] | |
348 |
|
353 | |||
349 | req = urllib.request.Request( |
|
354 | req = urllib.request.Request( | |
350 | "%s?%s" % ( |
|
355 | "%s?%s" % ( | |
351 | safe_str(test_uri), |
|
356 | safe_str(test_uri), | |
352 | urllib.parse.urlencode({ |
|
357 | urllib.parse.urlencode({ | |
353 | 'cmd': 'between', |
|
358 | 'cmd': 'between', | |
354 | 'pairs': "%s-%s" % ('0' * 40, '0' * 40), |
|
359 | 'pairs': "%s-%s" % ('0' * 40, '0' * 40), | |
355 | }) |
|
360 | }) | |
356 | )) |
|
361 | )) | |
357 |
|
362 | |||
358 | try: |
|
363 | try: | |
359 | resp = o.open(req) |
|
364 | resp = o.open(req) | |
360 | if resp.code != 200: |
|
365 | if resp.code != 200: | |
361 | raise Exception('Return Code is not 200') |
|
366 | raise Exception('Return Code is not 200') | |
362 | except Exception as e: |
|
367 | except Exception as e: | |
363 | # means it cannot be cloned |
|
368 | # means it cannot be cloned | |
364 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
369 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) | |
365 |
|
370 | |||
366 | if parsed_url.scheme in ['http', 'https']: # skip git+http://... etc |
|
371 | if parsed_url.scheme in ['http', 'https']: # skip git+http://... etc | |
367 | # now check if it's a proper hg repo |
|
372 | # now check if it's a proper hg repo | |
368 | try: |
|
373 | try: | |
369 | mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') |
|
374 | mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') | |
370 | except Exception as e: |
|
375 | except Exception as e: | |
371 | raise urllib.error.URLError( |
|
376 | raise urllib.error.URLError( | |
372 | "url [%s] does not look like an hg repo org_exc: %s" |
|
377 | "url [%s] does not look like an hg repo org_exc: %s" | |
373 | % (cleaned_uri, e)) |
|
378 | % (cleaned_uri, e)) | |
374 |
|
379 | |||
375 | def _get_repo(self, create, src_url=None, update_after_clone=False): |
|
380 | def _get_repo(self, create, src_url=None, update_after_clone=False): | |
376 | """ |
|
381 | """ | |
377 | Function will check for mercurial repository in given path and return |
|
382 | Function will check for mercurial repository in given path and return | |
378 | a localrepo object. If there is no repository in that path it will |
|
383 | a localrepo object. If there is no repository in that path it will | |
379 | raise an exception unless ``create`` parameter is set to True - in |
|
384 | raise an exception unless ``create`` parameter is set to True - in | |
380 | that case repository would be created and returned. |
|
385 | that case repository would be created and returned. | |
381 | If ``src_url`` is given, would try to clone repository from the |
|
386 | If ``src_url`` is given, would try to clone repository from the | |
382 | location at given clone_point. Additionally it'll make update to |
|
387 | location at given clone_point. Additionally it'll make update to | |
383 | working copy accordingly to ``update_after_clone`` flag |
|
388 | working copy accordingly to ``update_after_clone`` flag | |
384 | """ |
|
389 | """ | |
385 | try: |
|
390 | try: | |
386 | if src_url: |
|
391 | if src_url: | |
387 | url = self._get_url(src_url) |
|
392 | url = self._get_url(src_url) | |
388 | opts = {} |
|
393 | opts = {} | |
389 | if not update_after_clone: |
|
394 | if not update_after_clone: | |
390 | opts.update({'noupdate': True}) |
|
395 | opts.update({'noupdate': True}) | |
391 | MercurialRepository._check_url(url, self.baseui) |
|
396 | MercurialRepository._check_url(url, self.baseui) | |
392 | mercurial.commands.clone(self.baseui, safe_bytes(url), safe_bytes(self.path), **opts) |
|
397 | mercurial.commands.clone(self.baseui, safe_bytes(url), safe_bytes(self.path), **opts) | |
393 |
|
398 | |||
394 | # Don't try to create if we've already cloned repo |
|
399 | # Don't try to create if we've already cloned repo | |
395 | create = False |
|
400 | create = False | |
396 | return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create) |
|
401 | return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create) | |
397 | except (mercurial.error.Abort, mercurial.error.RepoError) as err: |
|
402 | except (mercurial.error.Abort, mercurial.error.RepoError) as err: | |
398 | if create: |
|
403 | if create: | |
399 | msg = "Cannot create repository at %s. Original error was %s" \ |
|
404 | msg = "Cannot create repository at %s. Original error was %s" \ | |
400 | % (self.name, err) |
|
405 | % (self.name, err) | |
401 | else: |
|
406 | else: | |
402 | msg = "Not valid repository at %s. Original error was %s" \ |
|
407 | msg = "Not valid repository at %s. Original error was %s" \ | |
403 | % (self.name, err) |
|
408 | % (self.name, err) | |
404 | raise RepositoryError(msg) |
|
409 | raise RepositoryError(msg) | |
405 |
|
410 | |||
406 | @LazyProperty |
|
411 | @LazyProperty | |
407 | def in_memory_changeset(self): |
|
412 | def in_memory_changeset(self): | |
408 | return inmemory.MercurialInMemoryChangeset(self) |
|
413 | return inmemory.MercurialInMemoryChangeset(self) | |
409 |
|
414 | |||
410 | @LazyProperty |
|
415 | @LazyProperty | |
411 | def description(self): |
|
416 | def description(self): | |
412 | _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True) |
|
417 | _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True) | |
413 | return safe_str(_desc or b'unknown') |
|
418 | return safe_str(_desc or b'unknown') | |
414 |
|
419 | |||
415 | @LazyProperty |
|
420 | @LazyProperty | |
416 | def last_change(self): |
|
421 | def last_change(self): | |
417 | """ |
|
422 | """ | |
418 | Returns last change made on this repository as datetime object |
|
423 | Returns last change made on this repository as datetime object | |
419 | """ |
|
424 | """ | |
420 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
425 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
421 |
|
426 | |||
422 | def _get_mtime(self): |
|
427 | def _get_mtime(self): | |
423 | try: |
|
428 | try: | |
424 | return time.mktime(self.get_changeset().date.timetuple()) |
|
429 | return time.mktime(self.get_changeset().date.timetuple()) | |
425 | except RepositoryError: |
|
430 | except RepositoryError: | |
426 | # fallback to filesystem |
|
431 | # fallback to filesystem | |
427 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
432 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") | |
428 | st_path = os.path.join(self.path, '.hg', "store") |
|
433 | st_path = os.path.join(self.path, '.hg', "store") | |
429 | if os.path.exists(cl_path): |
|
434 | if os.path.exists(cl_path): | |
430 | return os.stat(cl_path).st_mtime |
|
435 | return os.stat(cl_path).st_mtime | |
431 | else: |
|
436 | else: | |
432 | return os.stat(st_path).st_mtime |
|
437 | return os.stat(st_path).st_mtime | |
433 |
|
438 | |||
434 | def _get_revision(self, revision): |
|
439 | def _get_revision(self, revision): | |
435 | """ |
|
440 | """ | |
436 | Given any revision identifier, returns a 40 char string with revision hash. |
|
441 | Given any revision identifier, returns a 40 char string with revision hash. | |
437 |
|
442 | |||
438 | :param revision: str or int or None |
|
443 | :param revision: str or int or None | |
439 | """ |
|
444 | """ | |
440 | if self._empty: |
|
445 | if self._empty: | |
441 | raise EmptyRepositoryError("There are no changesets yet") |
|
446 | raise EmptyRepositoryError("There are no changesets yet") | |
442 |
|
447 | |||
443 | if revision in [-1, None]: |
|
448 | if revision in [-1, None]: | |
444 | revision = b'tip' |
|
449 | revision = b'tip' | |
445 | elif isinstance(revision, str): |
|
450 | elif isinstance(revision, str): | |
446 | revision = safe_bytes(revision) |
|
451 | revision = safe_bytes(revision) | |
447 |
|
452 | |||
448 | try: |
|
453 | try: | |
449 | if isinstance(revision, int): |
|
454 | if isinstance(revision, int): | |
450 | return ascii_str(self._repo[revision].hex()) |
|
455 | return ascii_str(self._repo[revision].hex()) | |
451 | return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex()) |
|
456 | return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex()) | |
452 | except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError): |
|
457 | except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError): | |
453 | msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name) |
|
458 | msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name) | |
454 | raise ChangesetDoesNotExistError(msg) |
|
459 | raise ChangesetDoesNotExistError(msg) | |
455 | except (LookupError, ): |
|
460 | except (LookupError, ): | |
456 | msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name) |
|
461 | msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name) | |
457 | raise ChangesetDoesNotExistError(msg) |
|
462 | raise ChangesetDoesNotExistError(msg) | |
458 |
|
463 | |||
459 | def get_ref_revision(self, ref_type, ref_name): |
|
464 | def get_ref_revision(self, ref_type, ref_name): | |
460 | """ |
|
465 | """ | |
461 | Returns revision number for the given reference. |
|
466 | Returns revision number for the given reference. | |
462 | """ |
|
467 | """ | |
463 | if ref_type == 'rev' and not ref_name.strip('0'): |
|
468 | if ref_type == 'rev' and not ref_name.strip('0'): | |
464 | return self.EMPTY_CHANGESET |
|
469 | return self.EMPTY_CHANGESET | |
465 | # lookup up the exact node id |
|
470 | # lookup up the exact node id | |
466 | _revset_predicates = { |
|
471 | _revset_predicates = { | |
467 | 'branch': 'branch', |
|
472 | 'branch': 'branch', | |
468 | 'book': 'bookmark', |
|
473 | 'book': 'bookmark', | |
469 | 'tag': 'tag', |
|
474 | 'tag': 'tag', | |
470 | 'rev': 'id', |
|
475 | 'rev': 'id', | |
471 | } |
|
476 | } | |
472 | # avoid expensive branch(x) iteration over whole repo |
|
477 | # avoid expensive branch(x) iteration over whole repo | |
473 | rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type] |
|
478 | rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type] | |
474 | try: |
|
479 | try: | |
475 | revs = self._repo.revs(rev_spec, ref_name, ref_name) |
|
480 | revs = self._repo.revs(rev_spec, ref_name, ref_name) | |
476 | except LookupError: |
|
481 | except LookupError: | |
477 | msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name) |
|
482 | msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name) | |
478 | raise ChangesetDoesNotExistError(msg) |
|
483 | raise ChangesetDoesNotExistError(msg) | |
479 | except mercurial.error.RepoLookupError: |
|
484 | except mercurial.error.RepoLookupError: | |
480 | msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name) |
|
485 | msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name) | |
481 | raise ChangesetDoesNotExistError(msg) |
|
486 | raise ChangesetDoesNotExistError(msg) | |
482 | if revs: |
|
487 | if revs: | |
483 | revision = revs.last() |
|
488 | revision = revs.last() | |
484 | else: |
|
489 | else: | |
485 | # TODO: just report 'not found'? |
|
490 | # TODO: just report 'not found'? | |
486 | revision = ref_name |
|
491 | revision = ref_name | |
487 |
|
492 | |||
488 | return self._get_revision(revision) |
|
493 | return self._get_revision(revision) | |
489 |
|
494 | |||
490 | def _get_archives(self, archive_name='tip'): |
|
495 | def _get_archives(self, archive_name='tip'): | |
491 | allowed = self.baseui.configlist(b"web", b"allow_archive", |
|
496 | allowed = self.baseui.configlist(b"web", b"allow_archive", | |
492 | untrusted=True) |
|
497 | untrusted=True) | |
493 | for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]: |
|
498 | for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]: | |
494 | if name in allowed or self._repo.ui.configbool(b"web", |
|
499 | if name in allowed or self._repo.ui.configbool(b"web", | |
495 | b"allow" + name, |
|
500 | b"allow" + name, | |
496 | untrusted=True): |
|
501 | untrusted=True): | |
497 | yield {"type": safe_str(name), "extension": ext, "node": archive_name} |
|
502 | yield {"type": safe_str(name), "extension": ext, "node": archive_name} | |
498 |
|
503 | |||
499 | def _get_url(self, url): |
|
504 | def _get_url(self, url): | |
500 | """ |
|
505 | """ | |
501 | Returns normalized url. If schema is not given, fall back to |
|
506 | Returns normalized url. If schema is not given, fall back to | |
502 | filesystem (``file:///``) schema. |
|
507 | filesystem (``file:///``) schema. | |
503 | """ |
|
508 | """ | |
504 | if url != 'default' and '://' not in url: |
|
509 | if url != 'default' and '://' not in url: | |
505 | url = "file:" + urllib.request.pathname2url(url) |
|
510 | url = "file:" + urllib.request.pathname2url(url) | |
506 | return url |
|
511 | return url | |
507 |
|
512 | |||
508 | def get_changeset(self, revision=None): |
|
513 | def get_changeset(self, revision=None): | |
509 | """ |
|
514 | """ | |
510 | Returns ``MercurialChangeset`` object representing repository's |
|
515 | Returns ``MercurialChangeset`` object representing repository's | |
511 | changeset at the given ``revision``. |
|
516 | changeset at the given ``revision``. | |
512 | """ |
|
517 | """ | |
513 | return changeset.MercurialChangeset(repository=self, revision=self._get_revision(revision)) |
|
518 | return changeset.MercurialChangeset(repository=self, revision=self._get_revision(revision)) | |
514 |
|
519 | |||
515 | def get_changesets(self, start=None, end=None, start_date=None, |
|
520 | def get_changesets(self, start=None, end=None, start_date=None, | |
516 | end_date=None, branch_name=None, reverse=False, max_revisions=None): |
|
521 | end_date=None, branch_name=None, reverse=False, max_revisions=None): | |
517 | """ |
|
522 | """ | |
518 | Returns iterator of ``MercurialChangeset`` objects from start to end |
|
523 | Returns iterator of ``MercurialChangeset`` objects from start to end | |
519 | (both are inclusive) |
|
524 | (both are inclusive) | |
520 |
|
525 | |||
521 | :param start: None, str, int or mercurial lookup format |
|
526 | :param start: None, str, int or mercurial lookup format | |
522 | :param end: None, str, int or mercurial lookup format |
|
527 | :param end: None, str, int or mercurial lookup format | |
523 | :param start_date: |
|
528 | :param start_date: | |
524 | :param end_date: |
|
529 | :param end_date: | |
525 | :param branch_name: |
|
530 | :param branch_name: | |
526 | :param reversed: return changesets in reversed order |
|
531 | :param reversed: return changesets in reversed order | |
527 | """ |
|
532 | """ | |
528 | start_raw_id = self._get_revision(start) |
|
533 | start_raw_id = self._get_revision(start) | |
529 | start_pos = None if start is None else self.revisions.index(start_raw_id) |
|
534 | start_pos = None if start is None else self.revisions.index(start_raw_id) | |
530 | end_raw_id = self._get_revision(end) |
|
535 | end_raw_id = self._get_revision(end) | |
531 | end_pos = None if end is None else self.revisions.index(end_raw_id) |
|
536 | end_pos = None if end is None else self.revisions.index(end_raw_id) | |
532 |
|
537 | |||
533 | if start_pos is not None and end_pos is not None and start_pos > end_pos: |
|
538 | if start_pos is not None and end_pos is not None and start_pos > end_pos: | |
534 | raise RepositoryError("Start revision '%s' cannot be " |
|
539 | raise RepositoryError("Start revision '%s' cannot be " | |
535 | "after end revision '%s'" % (start, end)) |
|
540 | "after end revision '%s'" % (start, end)) | |
536 |
|
541 | |||
537 | if branch_name and branch_name not in self.allbranches: |
|
542 | if branch_name and branch_name not in self.allbranches: | |
538 | msg = "Branch %r not found in %s" % (branch_name, self.name) |
|
543 | msg = "Branch %r not found in %s" % (branch_name, self.name) | |
539 | raise BranchDoesNotExistError(msg) |
|
544 | raise BranchDoesNotExistError(msg) | |
540 | if end_pos is not None: |
|
545 | if end_pos is not None: | |
541 | end_pos += 1 |
|
546 | end_pos += 1 | |
542 | # filter branches |
|
547 | # filter branches | |
543 | filter_ = [] |
|
548 | filter_ = [] | |
544 | if branch_name: |
|
549 | if branch_name: | |
545 | filter_.append(b'branch("%s")' % safe_bytes(branch_name)) |
|
550 | filter_.append(b'branch("%s")' % safe_bytes(branch_name)) | |
546 | if start_date: |
|
551 | if start_date: | |
547 | filter_.append(b'date(">%s")' % safe_bytes(str(start_date))) |
|
552 | filter_.append(b'date(">%s")' % safe_bytes(str(start_date))) | |
548 | if end_date: |
|
553 | if end_date: | |
549 | filter_.append(b'date("<%s")' % safe_bytes(str(end_date))) |
|
554 | filter_.append(b'date("<%s")' % safe_bytes(str(end_date))) | |
550 | if filter_ or max_revisions: |
|
555 | if filter_ or max_revisions: | |
551 | if filter_: |
|
556 | if filter_: | |
552 | revspec = b' and '.join(filter_) |
|
557 | revspec = b' and '.join(filter_) | |
553 | else: |
|
558 | else: | |
554 | revspec = b'all()' |
|
559 | revspec = b'all()' | |
555 | if max_revisions: |
|
560 | if max_revisions: | |
556 | revspec = b'limit(%s, %d)' % (revspec, max_revisions) |
|
561 | revspec = b'limit(%s, %d)' % (revspec, max_revisions) | |
557 | revisions = mercurial.scmutil.revrange(self._repo, [revspec]) |
|
562 | revisions = mercurial.scmutil.revrange(self._repo, [revspec]) | |
558 | else: |
|
563 | else: | |
559 | revisions = self.revisions |
|
564 | revisions = self.revisions | |
560 |
|
565 | |||
561 | # this is very much a hack to turn this into a list; a better solution |
|
566 | # this is very much a hack to turn this into a list; a better solution | |
562 | # would be to get rid of this function entirely and use revsets |
|
567 | # would be to get rid of this function entirely and use revsets | |
563 | revs = list(revisions)[start_pos:end_pos] |
|
568 | revs = list(revisions)[start_pos:end_pos] | |
564 | if reverse: |
|
569 | if reverse: | |
565 | revs.reverse() |
|
570 | revs.reverse() | |
566 |
|
571 | |||
567 | return CollectionGenerator(self, revs) |
|
572 | return CollectionGenerator(self, revs) | |
568 |
|
573 | |||
569 | def get_diff_changesets(self, org_rev, other_repo, other_rev): |
|
574 | def get_diff_changesets(self, org_rev, other_repo, other_rev): | |
570 | """ |
|
575 | """ | |
571 | Returns lists of changesets that can be merged from this repo @org_rev |
|
576 | Returns lists of changesets that can be merged from this repo @org_rev | |
572 | to other_repo @other_rev |
|
577 | to other_repo @other_rev | |
573 | ... and the other way |
|
578 | ... and the other way | |
574 | ... and the ancestors that would be used for merge |
|
579 | ... and the ancestors that would be used for merge | |
575 |
|
580 | |||
576 | :param org_rev: the revision we want our compare to be made |
|
581 | :param org_rev: the revision we want our compare to be made | |
577 | :param other_repo: repo object, most likely the fork of org_repo. It has |
|
582 | :param other_repo: repo object, most likely the fork of org_repo. It has | |
578 | all changesets that we need to obtain |
|
583 | all changesets that we need to obtain | |
579 | :param other_rev: revision we want out compare to be made on other_repo |
|
584 | :param other_rev: revision we want out compare to be made on other_repo | |
580 | """ |
|
585 | """ | |
581 | ancestors = None |
|
586 | ancestors = None | |
582 | if org_rev == other_rev: |
|
587 | if org_rev == other_rev: | |
583 | org_changesets = [] |
|
588 | org_changesets = [] | |
584 | other_changesets = [] |
|
589 | other_changesets = [] | |
585 |
|
590 | |||
586 | else: |
|
591 | else: | |
587 | # case two independent repos |
|
592 | # case two independent repos | |
588 | if self != other_repo: |
|
593 | if self != other_repo: | |
589 | hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui, |
|
594 | hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui, | |
590 | safe_bytes(other_repo.path), |
|
595 | safe_bytes(other_repo.path), | |
591 | safe_bytes(self.path)) |
|
596 | safe_bytes(self.path)) | |
592 | # all ancestors of other_rev will be in other_repo and |
|
597 | # all ancestors of other_rev will be in other_repo and | |
593 | # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot |
|
598 | # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot | |
594 |
|
599 | |||
595 | # no remote compare do it on the same repository |
|
600 | # no remote compare do it on the same repository | |
596 | else: |
|
601 | else: | |
597 | hgrepo = other_repo._repo |
|
602 | hgrepo = other_repo._repo | |
598 |
|
603 | |||
599 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in |
|
604 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in | |
600 | hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] |
|
605 | hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] | |
601 | if ancestors: |
|
606 | if ancestors: | |
602 | log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) |
|
607 | log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) | |
603 | else: |
|
608 | else: | |
604 | log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) |
|
609 | log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) | |
605 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in |
|
610 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in | |
606 | hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! |
|
611 | hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! | |
607 |
|
612 | |||
608 | other_changesets = [ |
|
613 | other_changesets = [ | |
609 | other_repo.get_changeset(rev) |
|
614 | other_repo.get_changeset(rev) | |
610 | for rev in hgrepo.revs( |
|
615 | for rev in hgrepo.revs( | |
611 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
616 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
612 | ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) |
|
617 | ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) | |
613 | ] |
|
618 | ] | |
614 | org_changesets = [ |
|
619 | org_changesets = [ | |
615 | self.get_changeset(ascii_str(hgrepo[rev].hex())) |
|
620 | self.get_changeset(ascii_str(hgrepo[rev].hex())) | |
616 | for rev in hgrepo.revs( |
|
621 | for rev in hgrepo.revs( | |
617 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
622 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
618 | ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) |
|
623 | ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) | |
619 | ] |
|
624 | ] | |
620 |
|
625 | |||
621 | return other_changesets, org_changesets, ancestors |
|
626 | return other_changesets, org_changesets, ancestors | |
622 |
|
627 | |||
623 | def pull(self, url): |
|
628 | def pull(self, url): | |
624 | """ |
|
629 | """ | |
625 | Tries to pull changes from external location. |
|
630 | Tries to pull changes from external location. | |
626 | """ |
|
631 | """ | |
627 | other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url))) |
|
632 | other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url))) | |
628 | try: |
|
633 | try: | |
629 | mercurial.exchange.pull(self._repo, other, heads=None, force=None) |
|
634 | mercurial.exchange.pull(self._repo, other, heads=None, force=None) | |
630 | except mercurial.error.Abort as err: |
|
635 | except mercurial.error.Abort as err: | |
631 | # Propagate error but with vcs's type |
|
636 | # Propagate error but with vcs's type | |
632 | raise RepositoryError(str(err)) |
|
637 | raise RepositoryError(str(err)) | |
633 |
|
638 | |||
634 | @LazyProperty |
|
639 | @LazyProperty | |
635 | def workdir(self): |
|
640 | def workdir(self): | |
636 | """ |
|
641 | """ | |
637 | Returns ``Workdir`` instance for this repository. |
|
642 | Returns ``Workdir`` instance for this repository. | |
638 | """ |
|
643 | """ | |
639 | return workdir.MercurialWorkdir(self) |
|
644 | return workdir.MercurialWorkdir(self) | |
640 |
|
645 | |||
641 | def get_config_value(self, section, name=None, config_file=None): |
|
646 | def get_config_value(self, section, name=None, config_file=None): | |
642 | """ |
|
647 | """ | |
643 | Returns configuration value for a given [``section``] and ``name``. |
|
648 | Returns configuration value for a given [``section``] and ``name``. | |
644 |
|
649 | |||
645 | :param section: Section we want to retrieve value from |
|
650 | :param section: Section we want to retrieve value from | |
646 | :param name: Name of configuration we want to retrieve |
|
651 | :param name: Name of configuration we want to retrieve | |
647 | :param config_file: A path to file which should be used to retrieve |
|
652 | :param config_file: A path to file which should be used to retrieve | |
648 | configuration from (might also be a list of file paths) |
|
653 | configuration from (might also be a list of file paths) | |
649 | """ |
|
654 | """ | |
650 | if config_file is None: |
|
655 | if config_file is None: | |
651 | config_file = [] |
|
656 | config_file = [] | |
652 | elif isinstance(config_file, str): |
|
657 | elif isinstance(config_file, str): | |
653 | config_file = [config_file] |
|
658 | config_file = [config_file] | |
654 |
|
659 | |||
655 | config = self._repo.ui |
|
660 | config = self._repo.ui | |
656 | if config_file: |
|
661 | if config_file: | |
657 | config = mercurial.ui.ui() |
|
662 | config = mercurial.ui.ui() | |
658 | for path in config_file: |
|
663 | for path in config_file: | |
659 | config.readconfig(safe_bytes(path)) |
|
664 | config.readconfig(safe_bytes(path)) | |
660 | value = config.config(safe_bytes(section), safe_bytes(name)) |
|
665 | value = config.config(safe_bytes(section), safe_bytes(name)) | |
661 | return value if value is None else safe_str(value) |
|
666 | return value if value is None else safe_str(value) | |
662 |
|
667 | |||
663 | def get_user_name(self, config_file=None): |
|
668 | def get_user_name(self, config_file=None): | |
664 | """ |
|
669 | """ | |
665 | Returns user's name from global configuration file. |
|
670 | Returns user's name from global configuration file. | |
666 |
|
671 | |||
667 | :param config_file: A path to file which should be used to retrieve |
|
672 | :param config_file: A path to file which should be used to retrieve | |
668 | configuration from (might also be a list of file paths) |
|
673 | configuration from (might also be a list of file paths) | |
669 | """ |
|
674 | """ | |
670 | username = self.get_config_value('ui', 'username', config_file=config_file) |
|
675 | username = self.get_config_value('ui', 'username', config_file=config_file) | |
671 | if username: |
|
676 | if username: | |
672 | return author_name(username) |
|
677 | return author_name(username) | |
673 | return None |
|
678 | return None | |
674 |
|
679 | |||
675 | def get_user_email(self, config_file=None): |
|
680 | def get_user_email(self, config_file=None): | |
676 | """ |
|
681 | """ | |
677 | Returns user's email from global configuration file. |
|
682 | Returns user's email from global configuration file. | |
678 |
|
683 | |||
679 | :param config_file: A path to file which should be used to retrieve |
|
684 | :param config_file: A path to file which should be used to retrieve | |
680 | configuration from (might also be a list of file paths) |
|
685 | configuration from (might also be a list of file paths) | |
681 | """ |
|
686 | """ | |
682 | username = self.get_config_value('ui', 'username', config_file=config_file) |
|
687 | username = self.get_config_value('ui', 'username', config_file=config_file) | |
683 | if username: |
|
688 | if username: | |
684 | return author_email(username) |
|
689 | return author_email(username) | |
685 | return None |
|
690 | return None |
@@ -1,265 +1,265 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Utilities aimed to help achieve mostly basic tasks. |
|
2 | Utilities aimed to help achieve mostly basic tasks. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | import datetime |
|
5 | import datetime | |
6 | import logging |
|
6 | import logging | |
7 | import os |
|
7 | import os | |
8 | import re |
|
8 | import re | |
9 | import time |
|
9 | import time | |
10 | import urllib.request |
|
10 | import urllib.request | |
11 |
|
11 | |||
12 | import mercurial.url |
|
12 | import mercurial.url | |
13 | from pygments import highlight |
|
13 | from pygments import highlight | |
14 | from pygments.formatters import TerminalFormatter |
|
14 | from pygments.formatters import TerminalFormatter | |
15 | from pygments.lexers import ClassNotFound, guess_lexer_for_filename |
|
15 | from pygments.lexers import ClassNotFound, guess_lexer_for_filename | |
16 |
|
16 | |||
17 | from kallithea.lib.vcs import backends |
|
17 | from kallithea.lib.vcs import backends | |
18 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError |
|
18 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError | |
19 | from kallithea.lib.vcs.utils import safe_str |
|
19 | from kallithea.lib.vcs.utils import safe_str | |
20 | from kallithea.lib.vcs.utils.paths import abspath |
|
20 | from kallithea.lib.vcs.utils.paths import abspath | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | ALIASES = ['hg', 'git'] |
|
23 | ALIASES = ['hg', 'git'] | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def get_scm(path, search_up=False, explicit_alias=None): |
|
26 | def get_scm(path, search_up=False, explicit_alias=None): | |
27 | """ |
|
27 | """ | |
28 | Returns one of alias from ``ALIASES`` (in order of precedence same as |
|
28 | Returns one of alias from ``ALIASES`` (in order of precedence same as | |
29 | shortcuts given in ``ALIASES``) and top working dir path for the given |
|
29 | shortcuts given in ``ALIASES``) and top working dir path for the given | |
30 | argument. If no scm-specific directory is found or more than one scm is |
|
30 | argument. If no scm-specific directory is found or more than one scm is | |
31 | found at that directory, ``VCSError`` is raised. |
|
31 | found at that directory, ``VCSError`` is raised. | |
32 |
|
32 | |||
33 | :param search_up: if set to ``True``, this function would try to |
|
33 | :param search_up: if set to ``True``, this function would try to | |
34 | move up to parent directory every time no scm is recognized for the |
|
34 | move up to parent directory every time no scm is recognized for the | |
35 | currently checked path. Default: ``False``. |
|
35 | currently checked path. Default: ``False``. | |
36 | :param explicit_alias: can be one of available backend aliases, when given |
|
36 | :param explicit_alias: can be one of available backend aliases, when given | |
37 | it will return given explicit alias in repositories under more than one |
|
37 | it will return given explicit alias in repositories under more than one | |
38 | version control, if explicit_alias is different than found it will raise |
|
38 | version control, if explicit_alias is different than found it will raise | |
39 | VCSError |
|
39 | VCSError | |
40 | """ |
|
40 | """ | |
41 | if not os.path.isdir(path): |
|
41 | if not os.path.isdir(path): | |
42 | raise VCSError("Given path %s is not a directory" % path) |
|
42 | raise VCSError("Given path %s is not a directory" % path) | |
43 |
|
43 | |||
44 | while True: |
|
44 | while True: | |
45 | found_scms = [(scm, path) for scm in get_scms_for_path(path)] |
|
45 | found_scms = [(scm, path) for scm in get_scms_for_path(path)] | |
46 | if found_scms or not search_up: |
|
46 | if found_scms or not search_up: | |
47 | break |
|
47 | break | |
48 | newpath = abspath(path, '..') |
|
48 | newpath = abspath(path, '..') | |
49 | if newpath == path: |
|
49 | if newpath == path: | |
50 | break |
|
50 | break | |
51 | path = newpath |
|
51 | path = newpath | |
52 |
|
52 | |||
53 | if len(found_scms) > 1: |
|
53 | if len(found_scms) > 1: | |
54 | for scm in found_scms: |
|
54 | for scm in found_scms: | |
55 | if scm[0] == explicit_alias: |
|
55 | if scm[0] == explicit_alias: | |
56 | return scm |
|
56 | return scm | |
57 | raise VCSError('More than one [%s] scm found at given path %s' |
|
57 | raise VCSError('More than one [%s] scm found at given path %s' | |
58 | % (', '.join((x[0] for x in found_scms)), path)) |
|
58 | % (', '.join((x[0] for x in found_scms)), path)) | |
59 |
|
59 | |||
60 | if len(found_scms) == 0: |
|
60 | if len(found_scms) == 0: | |
61 | raise VCSError('No scm found at given path %s' % path) |
|
61 | raise VCSError('No scm found at given path %s' % path) | |
62 |
|
62 | |||
63 | return found_scms[0] |
|
63 | return found_scms[0] | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def get_scms_for_path(path): |
|
66 | def get_scms_for_path(path): | |
67 | """ |
|
67 | """ | |
68 | Returns all scm's found at the given path. If no scm is recognized |
|
68 | Returns all scm's found at the given path. If no scm is recognized | |
69 | - empty list is returned. |
|
69 | - empty list is returned. | |
70 |
|
70 | |||
71 | :param path: path to directory which should be checked. May be callable. |
|
71 | :param path: path to directory which should be checked. May be callable. | |
72 |
|
72 | |||
73 | :raises VCSError: if given ``path`` is not a directory |
|
73 | :raises VCSError: if given ``path`` is not a directory | |
74 | """ |
|
74 | """ | |
75 | if hasattr(path, '__call__'): |
|
75 | if hasattr(path, '__call__'): | |
76 | path = path() |
|
76 | path = path() | |
77 | if not os.path.isdir(path): |
|
77 | if not os.path.isdir(path): | |
78 | raise VCSError("Given path %r is not a directory" % path) |
|
78 | raise VCSError("Given path %r is not a directory" % path) | |
79 |
|
79 | |||
80 | result = [] |
|
80 | result = [] | |
81 | for key in ALIASES: |
|
81 | for key in ALIASES: | |
82 | # find .hg / .git |
|
82 | # find .hg / .git | |
83 | dirname = os.path.join(path, '.' + key) |
|
83 | dirname = os.path.join(path, '.' + key) | |
84 | if os.path.isdir(dirname): |
|
84 | if os.path.isdir(dirname): | |
85 | result.append(key) |
|
85 | result.append(key) | |
86 | continue |
|
86 | continue | |
87 | # find rm__.hg / rm__.git too - left overs from old method for deleting |
|
87 | # find rm__.hg / rm__.git too - left overs from old method for deleting | |
88 | dirname = os.path.join(path, 'rm__.' + key) |
|
88 | dirname = os.path.join(path, 'rm__.' + key) | |
89 | if os.path.isdir(dirname): |
|
89 | if os.path.isdir(dirname): | |
90 | return result |
|
90 | return result | |
91 | # We still need to check if it's not bare repository as |
|
91 | # We still need to check if it's not bare repository as | |
92 | # bare repos don't have working directories |
|
92 | # bare repos don't have working directories | |
93 | try: |
|
93 | try: | |
94 | backends.get_backend(key)(path) |
|
94 | backends.get_backend(key)(path) | |
95 | result.append(key) |
|
95 | result.append(key) | |
96 | continue |
|
96 | continue | |
97 | except RepositoryError: |
|
97 | except RepositoryError: | |
98 | # Wrong backend |
|
98 | # Wrong backend | |
99 | pass |
|
99 | pass | |
100 | except VCSError: |
|
100 | except VCSError: | |
101 | # No backend at all |
|
101 | # No backend at all | |
102 | pass |
|
102 | pass | |
103 | return result |
|
103 | return result | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | def get_scm_size(alias, root_path): |
|
106 | def get_scm_size(alias, root_path): | |
107 | if not alias.startswith('.'): |
|
107 | if not alias.startswith('.'): | |
108 | alias += '.' |
|
108 | alias += '.' | |
109 |
|
109 | |||
110 | size_scm, size_root = 0, 0 |
|
110 | size_scm, size_root = 0, 0 | |
111 | for path, dirs, files in os.walk(root_path): |
|
111 | for path, dirs, files in os.walk(root_path): | |
112 | if path.find(alias) != -1: |
|
112 | if path.find(alias) != -1: | |
113 | for f in files: |
|
113 | for f in files: | |
114 | try: |
|
114 | try: | |
115 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
115 | size_scm += os.path.getsize(os.path.join(path, f)) | |
116 | except OSError: |
|
116 | except OSError: | |
117 | pass |
|
117 | pass | |
118 | else: |
|
118 | else: | |
119 | for f in files: |
|
119 | for f in files: | |
120 | try: |
|
120 | try: | |
121 | size_root += os.path.getsize(os.path.join(path, f)) |
|
121 | size_root += os.path.getsize(os.path.join(path, f)) | |
122 | except OSError: |
|
122 | except OSError: | |
123 | pass |
|
123 | pass | |
124 |
|
124 | |||
125 | return size_scm, size_root |
|
125 | return size_scm, size_root | |
126 |
|
126 | |||
127 |
|
127 | |||
128 | def get_highlighted_code(name, code, type='terminal'): |
|
128 | def get_highlighted_code(name, code, type='terminal'): | |
129 | """ |
|
129 | """ | |
130 | If pygments are available on the system |
|
130 | If pygments are available on the system | |
131 | then returned output is colored. Otherwise |
|
131 | then returned output is colored. Otherwise | |
132 | unchanged content is returned. |
|
132 | unchanged content is returned. | |
133 | """ |
|
133 | """ | |
134 | try: |
|
134 | try: | |
135 | lexer = guess_lexer_for_filename(name, code) |
|
135 | lexer = guess_lexer_for_filename(name, code) | |
136 | formatter = TerminalFormatter() |
|
136 | formatter = TerminalFormatter() | |
137 | content = highlight(code, lexer, formatter) |
|
137 | content = highlight(code, lexer, formatter) | |
138 | except ClassNotFound: |
|
138 | except ClassNotFound: | |
139 | logging.debug("Couldn't guess Lexer, will not use pygments.") |
|
139 | logging.debug("Couldn't guess Lexer, will not use pygments.") | |
140 | content = code |
|
140 | content = code | |
141 | return content |
|
141 | return content | |
142 |
|
142 | |||
143 |
|
143 | |||
144 | def parse_changesets(text): |
|
144 | def parse_changesets(text): | |
145 | """ |
|
145 | """ | |
146 | Returns dictionary with *start*, *main* and *end* ids. |
|
146 | Returns dictionary with *start*, *main* and *end* ids. | |
147 |
|
147 | |||
148 | Examples:: |
|
148 | Examples:: | |
149 |
|
149 | |||
150 | >>> parse_changesets('aaabbb') |
|
150 | >>> parse_changesets('aaabbb') | |
151 | {'start': None, 'main': 'aaabbb', 'end': None} |
|
151 | {'start': None, 'main': 'aaabbb', 'end': None} | |
152 | >>> parse_changesets('aaabbb..cccddd') |
|
152 | >>> parse_changesets('aaabbb..cccddd') | |
153 | {'start': 'aaabbb', 'end': 'cccddd', 'main': None} |
|
153 | {'start': 'aaabbb', 'end': 'cccddd', 'main': None} | |
154 |
|
154 | |||
155 | """ |
|
155 | """ | |
156 | text = text.strip() |
|
156 | text = text.strip() | |
157 | CID_RE = r'[a-zA-Z0-9]+' |
|
157 | CID_RE = r'[a-zA-Z0-9]+' | |
158 | if '..' not in text: |
|
158 | if '..' not in text: | |
159 | m = re.match(r'^(?P<cid>%s)$' % CID_RE, text) |
|
159 | m = re.match(r'^(?P<cid>%s)$' % CID_RE, text) | |
160 | if m: |
|
160 | if m: | |
161 | return { |
|
161 | return { | |
162 | 'start': None, |
|
162 | 'start': None, | |
163 | 'main': text, |
|
163 | 'main': text, | |
164 | 'end': None, |
|
164 | 'end': None, | |
165 | } |
|
165 | } | |
166 | else: |
|
166 | else: | |
167 | RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE) |
|
167 | RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE) | |
168 | m = re.match(RE, text) |
|
168 | m = re.match(RE, text) | |
169 | if m: |
|
169 | if m: | |
170 | result = m.groupdict() |
|
170 | result = m.groupdict() | |
171 | result['main'] = None |
|
171 | result['main'] = None | |
172 | return result |
|
172 | return result | |
173 | raise ValueError("IDs not recognized") |
|
173 | raise ValueError("IDs not recognized") | |
174 |
|
174 | |||
175 |
|
175 | |||
176 | def parse_datetime(text): |
|
176 | def parse_datetime(text): | |
177 | """ |
|
177 | """ | |
178 | Parses given text and returns ``datetime.datetime`` instance or raises |
|
178 | Parses given text and returns ``datetime.datetime`` instance or raises | |
179 | ``ValueError``. |
|
179 | ``ValueError``. | |
180 |
|
180 | |||
181 | :param text: string of desired date/datetime or something more verbose, |
|
181 | :param text: string of desired date/datetime or something more verbose, | |
182 | like *yesterday*, *2weeks 3days*, etc. |
|
182 | like *yesterday*, *2weeks 3days*, etc. | |
183 | """ |
|
183 | """ | |
184 |
|
184 | |||
185 | text = text.strip().lower() |
|
185 | text = text.strip().lower() | |
186 |
|
186 | |||
187 | INPUT_FORMATS = ( |
|
187 | INPUT_FORMATS = ( | |
188 | '%Y-%m-%d %H:%M:%S', |
|
188 | '%Y-%m-%d %H:%M:%S', | |
189 | '%Y-%m-%d %H:%M', |
|
189 | '%Y-%m-%d %H:%M', | |
190 | '%Y-%m-%d', |
|
190 | '%Y-%m-%d', | |
191 | '%m/%d/%Y %H:%M:%S', |
|
191 | '%m/%d/%Y %H:%M:%S', | |
192 | '%m/%d/%Y %H:%M', |
|
192 | '%m/%d/%Y %H:%M', | |
193 | '%m/%d/%Y', |
|
193 | '%m/%d/%Y', | |
194 | '%m/%d/%y %H:%M:%S', |
|
194 | '%m/%d/%y %H:%M:%S', | |
195 | '%m/%d/%y %H:%M', |
|
195 | '%m/%d/%y %H:%M', | |
196 | '%m/%d/%y', |
|
196 | '%m/%d/%y', | |
197 | ) |
|
197 | ) | |
198 | for format in INPUT_FORMATS: |
|
198 | for format in INPUT_FORMATS: | |
199 | try: |
|
199 | try: | |
200 | return datetime.datetime(*time.strptime(text, format)[:6]) |
|
200 | return datetime.datetime(*time.strptime(text, format)[:6]) | |
201 | except ValueError: |
|
201 | except ValueError: | |
202 | pass |
|
202 | pass | |
203 |
|
203 | |||
204 | # Try descriptive texts |
|
204 | # Try descriptive texts | |
205 | if text == 'tomorrow': |
|
205 | if text == 'tomorrow': | |
206 | future = datetime.datetime.now() + datetime.timedelta(days=1) |
|
206 | future = datetime.datetime.now() + datetime.timedelta(days=1) | |
207 | args = future.timetuple()[:3] + (23, 59, 59) |
|
207 | args = future.timetuple()[:3] + (23, 59, 59) | |
208 | return datetime.datetime(*args) |
|
208 | return datetime.datetime(*args) | |
209 | elif text == 'today': |
|
209 | elif text == 'today': | |
210 | return datetime.datetime(*datetime.datetime.today().timetuple()[:3]) |
|
210 | return datetime.datetime(*datetime.datetime.today().timetuple()[:3]) | |
211 | elif text == 'now': |
|
211 | elif text == 'now': | |
212 | return datetime.datetime.now() |
|
212 | return datetime.datetime.now() | |
213 | elif text == 'yesterday': |
|
213 | elif text == 'yesterday': | |
214 | past = datetime.datetime.now() - datetime.timedelta(days=1) |
|
214 | past = datetime.datetime.now() - datetime.timedelta(days=1) | |
215 | return datetime.datetime(*past.timetuple()[:3]) |
|
215 | return datetime.datetime(*past.timetuple()[:3]) | |
216 | else: |
|
216 | else: | |
217 | days = 0 |
|
217 | days = 0 | |
218 | matched = re.match( |
|
218 | matched = re.match( | |
219 | r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text) |
|
219 | r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text) | |
220 | if matched: |
|
220 | if matched: | |
221 | groupdict = matched.groupdict() |
|
221 | groupdict = matched.groupdict() | |
222 | if groupdict['days']: |
|
222 | if groupdict['days']: | |
223 | days += int(matched.groupdict()['days']) |
|
223 | days += int(matched.groupdict()['days']) | |
224 | if groupdict['weeks']: |
|
224 | if groupdict['weeks']: | |
225 | days += int(matched.groupdict()['weeks']) * 7 |
|
225 | days += int(matched.groupdict()['weeks']) * 7 | |
226 | past = datetime.datetime.now() - datetime.timedelta(days=days) |
|
226 | past = datetime.datetime.now() - datetime.timedelta(days=days) | |
227 | return datetime.datetime(*past.timetuple()[:3]) |
|
227 | return datetime.datetime(*past.timetuple()[:3]) | |
228 |
|
228 | |||
229 | raise ValueError('Wrong date: "%s"' % text) |
|
229 | raise ValueError('Wrong date: "%s"' % text) | |
230 |
|
230 | |||
231 |
|
231 | |||
232 | def get_dict_for_attrs(obj, attrs): |
|
232 | def get_dict_for_attrs(obj, attrs): | |
233 | """ |
|
233 | """ | |
234 | Returns dictionary for each attribute from given ``obj``. |
|
234 | Returns dictionary for each attribute from given ``obj``. | |
235 | """ |
|
235 | """ | |
236 | data = {} |
|
236 | data = {} | |
237 | for attr in attrs: |
|
237 | for attr in attrs: | |
238 | data[attr] = getattr(obj, attr) |
|
238 | data[attr] = getattr(obj, attr) | |
239 | return data |
|
239 | return data | |
240 |
|
240 | |||
241 | def get_urllib_request_handlers(url_obj): |
|
241 | def get_urllib_request_handlers(url_obj): | |
242 | handlers = [] |
|
242 | handlers = [] | |
243 | test_uri, authinfo = url_obj.authinfo() |
|
243 | test_uri, authinfo = url_obj.authinfo() | |
244 |
|
244 | |||
245 | if authinfo: |
|
245 | if authinfo: | |
246 | # authinfo is a tuple (realm, uris, user, password) where 'uris' itself |
|
246 | # authinfo is a tuple (realm, uris, user, password) where 'uris' itself | |
247 | # is a tuple of URIs. |
|
247 | # is a tuple of URIs. | |
248 |
# If url_obj is obtained via mercurial |
|
248 | # If url_obj is obtained via mercurial urlutil, the obtained authinfo | |
249 | # values will be bytes, e.g. |
|
249 | # values will be bytes, e.g. | |
250 | # (None, (b'http://127.0.0.1/repo', b'127.0.0.1'), b'user', b'pass') |
|
250 | # (None, (b'http://127.0.0.1/repo', b'127.0.0.1'), b'user', b'pass') | |
251 | # However, urllib expects strings, not bytes, so we must convert them. |
|
251 | # However, urllib expects strings, not bytes, so we must convert them. | |
252 |
|
252 | |||
253 | # create a password manager |
|
253 | # create a password manager | |
254 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() |
|
254 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
255 | passmgr.add_password( |
|
255 | passmgr.add_password( | |
256 | safe_str(authinfo[0]) if authinfo[0] else None, # realm |
|
256 | safe_str(authinfo[0]) if authinfo[0] else None, # realm | |
257 | tuple(safe_str(x) for x in authinfo[1]), # uris |
|
257 | tuple(safe_str(x) for x in authinfo[1]), # uris | |
258 | safe_str(authinfo[2]), # user |
|
258 | safe_str(authinfo[2]), # user | |
259 | safe_str(authinfo[3]), # password |
|
259 | safe_str(authinfo[3]), # password | |
260 | ) |
|
260 | ) | |
261 |
|
261 | |||
262 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), |
|
262 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), | |
263 | mercurial.url.httpdigestauthhandler(passmgr))) |
|
263 | mercurial.url.httpdigestauthhandler(passmgr))) | |
264 |
|
264 | |||
265 | return test_uri, handlers |
|
265 | return test_uri, handlers |
General Comments 0
You need to be logged in to leave comments.
Login now