##// END OF EJS Templates
vcs-lib: bulk of changes for python3 support
super-admin -
r5074:09a42e1d default
parent child Browse files
Show More
@@ -1,182 +1,190 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Various version Control System version lib (vcs) management abstraction layer
22 Various version Control System version lib (vcs) management abstraction layer
23 for Python. Build with server client architecture.
23 for Python. Build with server client architecture.
24 """
24 """
25 import io
25 import io
26 import atexit
26 import atexit
27 import logging
27 import logging
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.vcs.conf import settings
31 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
32 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 VCSError, RepositoryError, CommitError, VCSCommunicationError)
34 VCSError, RepositoryError, CommitError, VCSCommunicationError)
35
35
36 __all__ = [
36 __all__ = [
37 'get_vcs_instance', 'get_backend',
37 'get_vcs_instance', 'get_backend',
38 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
38 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
39 ]
39 ]
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # The pycurl library directly accesses C API functions and is not patched by
43 # The pycurl library directly accesses C API functions and is not patched by
44 # gevent. This will potentially lead to deadlocks due to incompatibility to
44 # gevent. This will potentially lead to deadlocks due to incompatibility to
45 # gevent. Therefore we check if gevent is active and import a gevent compatible
45 # gevent. Therefore we check if gevent is active and import a gevent compatible
46 # wrapper in that case.
46 # wrapper in that case.
47 try:
47 try:
48 from gevent import monkey
48 from gevent import monkey
49 if monkey.is_module_patched('__builtin__'):
49 if monkey.is_module_patched('__builtin__'):
50 import geventcurl as pycurl
50 import geventcurl as pycurl
51 log.debug('Using gevent comapatible pycurl: %s', pycurl)
51 log.debug('Using gevent comapatible pycurl: %s', pycurl)
52 else:
52 else:
53 import pycurl
53 import pycurl
54 except ImportError:
54 except ImportError:
55 import pycurl
55 import pycurl
56
56
57
57
58 def connect_http(server_and_port):
58 def connect_http(server_and_port):
59 log.debug('Initialized VCSServer connections to %s.', server_and_port)
59 log.debug('Initialized VCSServer connections to %s.', server_and_port)
60
60
61 from rhodecode.lib.vcs import connection, client_http
61 from rhodecode.lib.vcs import connection, client_http
62 from rhodecode.lib.middleware.utils import scm_app
62 from rhodecode.lib.middleware.utils import scm_app
63
63
64 session_factory = client_http.ThreadlocalSessionFactory()
64 session_factory = client_http.ThreadlocalSessionFactory()
65
65
66 connection.Git = client_http.RemoteVCSMaker(
66 connection.Git = client_http.RemoteVCSMaker(
67 server_and_port, '/git', 'git', session_factory)
67 server_and_port, '/git', 'git', session_factory)
68 connection.Hg = client_http.RemoteVCSMaker(
68 connection.Hg = client_http.RemoteVCSMaker(
69 server_and_port, '/hg', 'hg', session_factory)
69 server_and_port, '/hg', 'hg', session_factory)
70 connection.Svn = client_http.RemoteVCSMaker(
70 connection.Svn = client_http.RemoteVCSMaker(
71 server_and_port, '/svn', 'svn', session_factory)
71 server_and_port, '/svn', 'svn', session_factory)
72 connection.Service = client_http.ServiceConnection(
72 connection.Service = client_http.ServiceConnection(
73 server_and_port, '/_service', session_factory)
73 server_and_port, '/_service', session_factory)
74
74
75 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
75 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
76 server_and_port, '/proxy/hg')
76 server_and_port, '/proxy/hg')
77 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
77 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
78 server_and_port, '/proxy/git')
78 server_and_port, '/proxy/git')
79
79
80 @atexit.register
80 @atexit.register
81 def free_connection_resources():
81 def free_connection_resources():
82 connection.Git = None
82 connection.Git = None
83 connection.Hg = None
83 connection.Hg = None
84 connection.Svn = None
84 connection.Svn = None
85 connection.Service = None
85 connection.Service = None
86
86
87
87
88 def connect_vcs(server_and_port, protocol):
88 def connect_vcs(server_and_port, protocol):
89 """
89 """
90 Initializes the connection to the vcs server.
90 Initializes the connection to the vcs server.
91
91
92 :param server_and_port: str, e.g. "localhost:9900"
92 :param server_and_port: str, e.g. "localhost:9900"
93 :param protocol: str or "http"
93 :param protocol: str or "http"
94 """
94 """
95 if protocol == 'http':
95 if protocol == 'http':
96 connect_http(server_and_port)
96 connect_http(server_and_port)
97 else:
97 else:
98 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
98 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
99
99
100
100
101 class CurlSession(object):
101 class CurlSession(object):
102 """
102 """
103 Modeled so that it provides a subset of the requests interface.
103 Modeled so that it provides a subset of the requests interface.
104
104
105 This has been created so that it does only provide a minimal API for our
105 This has been created so that it does only provide a minimal API for our
106 needs. The parts which it provides are based on the API of the library
106 needs. The parts which it provides are based on the API of the library
107 `requests` which allows us to easily benchmark against it.
107 `requests` which allows us to easily benchmark against it.
108
108
109 Please have a look at the class :class:`requests.Session` when you extend
109 Please have a look at the class :class:`requests.Session` when you extend
110 it.
110 it.
111 """
111 """
112 CURL_UA = f'RhodeCode HTTP {rhodecode.__version__}'
112 CURL_UA = f'RhodeCode HTTP {rhodecode.__version__}'
113
113
114 def __init__(self):
114 def __init__(self):
115 curl = pycurl.Curl()
115 curl = pycurl.Curl()
116 # TODO: johbo: I did test with 7.19 of libcurl. This version has
116 # TODO: johbo: I did test with 7.19 of libcurl. This version has
117 # trouble with 100 - continue being set in the expect header. This
117 # trouble with 100 - continue being set in the expect header. This
118 # can lead to massive performance drops, switching it off here.
118 # can lead to massive performance drops, switching it off here.
119
119
120 curl.setopt(curl.TCP_NODELAY, True)
120 curl.setopt(curl.TCP_NODELAY, True)
121 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
121 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
122 curl.setopt(curl.USERAGENT, safe_bytes(self.CURL_UA))
122 curl.setopt(curl.USERAGENT, safe_bytes(self.CURL_UA))
123 curl.setopt(curl.SSL_VERIFYPEER, 0)
123 curl.setopt(curl.SSL_VERIFYPEER, 0)
124 curl.setopt(curl.SSL_VERIFYHOST, 0)
124 curl.setopt(curl.SSL_VERIFYHOST, 0)
125 self._curl = curl
125 self._curl = curl
126
126
127 def post(self, url, data, allow_redirects=False, headers=None):
127 def post(self, url, data, allow_redirects=False, headers=None):
128 headers = headers or {}
128 headers = headers or {}
129 # format is ['header_name1: header_value1', 'header_name2: header_value2'])
129 # format is ['header_name1: header_value1', 'header_name2: header_value2'])
130 headers_list = [b"Expect:"] + [safe_bytes('{}: {}'.format(k, v)) for k, v in headers.items()]
130 headers_list = [b"Expect:"] + [safe_bytes('{}: {}'.format(k, v)) for k, v in headers.items()]
131 response_buffer = io.BytesIO()
131 response_buffer = io.BytesIO()
132
132
133 curl = self._curl
133 curl = self._curl
134 curl.setopt(curl.URL, url)
134 curl.setopt(curl.URL, url)
135 curl.setopt(curl.POST, True)
135 curl.setopt(curl.POST, True)
136 curl.setopt(curl.POSTFIELDS, data)
136 curl.setopt(curl.POSTFIELDS, data)
137 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
137 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
138 curl.setopt(curl.WRITEDATA, response_buffer)
138 curl.setopt(curl.WRITEDATA, response_buffer)
139 curl.setopt(curl.HTTPHEADER, headers_list)
139 curl.setopt(curl.HTTPHEADER, headers_list)
140 curl.perform()
140 curl.perform()
141
141
142 status_code = curl.getinfo(pycurl.HTTP_CODE)
142 status_code = curl.getinfo(pycurl.HTTP_CODE)
143
143 content_type = curl.getinfo(pycurl.CONTENT_TYPE)
144 return CurlResponse(response_buffer, status_code)
144 return CurlResponse(response_buffer, status_code, content_type)
145
145
146
146
147 class CurlResponse(object):
147 class CurlResponse(object):
148 """
148 """
149 The response of a request, modeled after the requests API.
149 The response of a request, modeled after the requests API.
150
150
151 This class provides a subset of the response interface known from the
151 This class provides a subset of the response interface known from the
152 library `requests`. It is intentionally kept similar, so that we can use
152 library `requests`. It is intentionally kept similar, so that we can use
153 `requests` as a drop in replacement for benchmarking purposes.
153 `requests` as a drop in replacement for benchmarking purposes.
154 """
154 """
155
155
156 def __init__(self, response_buffer, status_code):
156 def __init__(self, response_buffer, status_code, content_type=''):
157 self._response_buffer = response_buffer
157 self._response_buffer = response_buffer
158 self._status_code = status_code
158 self._status_code = status_code
159 self._content_type = content_type
160
161 def __repr__(self):
162 return f'CurlResponse(code={self._status_code}, content_type={self._content_type})'
159
163
160 @property
164 @property
161 def content(self):
165 def content(self):
162 try:
166 try:
163 return self._response_buffer.getvalue()
167 return self._response_buffer.getvalue()
164 finally:
168 finally:
165 self._response_buffer.close()
169 self._response_buffer.close()
166
170
167 @property
171 @property
168 def status_code(self):
172 def status_code(self):
169 return self._status_code
173 return self._status_code
170
174
175 @property
176 def content_type(self):
177 return self._content_type
178
171 def iter_content(self, chunk_size):
179 def iter_content(self, chunk_size):
172 self._response_buffer.seek(0)
180 self._response_buffer.seek(0)
173 while 1:
181 while 1:
174 chunk = self._response_buffer.read(chunk_size)
182 chunk = self._response_buffer.read(chunk_size)
175 if not chunk:
183 if not chunk:
176 break
184 break
177 yield chunk
185 yield chunk
178
186
179
187
180 def _create_http_rpc_session():
188 def _create_http_rpc_session():
181 session = CurlSession()
189 session = CurlSession()
182 return session
190 return session
@@ -1,95 +1,95 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 VCS Backends module
22 VCS Backends module
23 """
23 """
24
24
25 import os
25 import os
26 import logging
26 import logging
27
27
28 from pprint import pformat
28 from rhodecode import typing
29
29
30 from rhodecode.lib.vcs.conf import settings
30 from rhodecode.lib.vcs.conf import settings
31 from rhodecode.lib.vcs.exceptions import VCSError
31 from rhodecode.lib.vcs.exceptions import VCSError
32 from rhodecode.lib.vcs.utils.helpers import get_scm
32 from rhodecode.lib.vcs.utils.helpers import get_scm
33 from rhodecode.lib.vcs.utils.imports import import_class
33 from rhodecode.lib.vcs.utils.imports import import_class
34
34
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 def get_vcs_instance(repo_path, *args, **kwargs):
39 def get_vcs_instance(repo_path, *args, **kwargs) -> typing.VCSRepo | None:
40 """
40 """
41 Given a path to a repository an instance of the corresponding vcs backend
41 Given a path to a repository an instance of the corresponding vcs backend
42 repository class is created and returned. If no repository can be found
42 repository class is created and returned. If no repository can be found
43 for the path it returns None. Arguments and keyword arguments are passed
43 for the path it returns None. Arguments and keyword arguments are passed
44 to the vcs backend repository class.
44 to the vcs backend repository class.
45 """
45 """
46 from rhodecode.lib.utils2 import safe_str
46 from rhodecode.lib.utils2 import safe_str
47
47
48 explicit_vcs_alias = kwargs.pop('_vcs_alias', None)
48 explicit_vcs_alias = kwargs.pop('_vcs_alias', None)
49 try:
49 try:
50 vcs_alias = safe_str(explicit_vcs_alias or get_scm(repo_path)[0])
50 vcs_alias = safe_str(explicit_vcs_alias or get_scm(repo_path)[0])
51 log.debug(
51 log.debug(
52 'Creating instance of %s repository from %s', vcs_alias,
52 'Creating instance of %s repository from %s', vcs_alias,
53 safe_str(repo_path))
53 safe_str(repo_path))
54 backend = get_backend(vcs_alias)
54 backend = get_backend(vcs_alias)
55
55
56 if explicit_vcs_alias:
56 if explicit_vcs_alias:
57 # do final verification of existance of the path, this does the
57 # do final verification of existence of the path, this does the
58 # same as get_scm() call which we skip in explicit_vcs_alias
58 # same as get_scm() call which we skip in explicit_vcs_alias
59 if not os.path.isdir(repo_path):
59 if not os.path.isdir(repo_path):
60 raise VCSError("Given path %s is not a directory" % repo_path)
60 raise VCSError(f"Given path {repo_path} is not a directory")
61 except VCSError:
61 except VCSError:
62 log.exception(
62 log.exception(
63 'Perhaps this repository is in db and not in '
63 'Perhaps this repository is in db and not in '
64 'filesystem run rescan repositories with '
64 'filesystem run rescan repositories with '
65 '"destroy old data" option from admin panel')
65 '"destroy old data" option from admin panel')
66 return None
66 return None
67
67
68 return backend(repo_path=repo_path, *args, **kwargs)
68 return backend(repo_path=repo_path, *args, **kwargs)
69
69
70
70
71 def get_backend(alias):
71 def get_backend(alias) -> typing.VCSRepoClass:
72 """
72 """
73 Returns ``Repository`` class identified by the given alias or raises
73 Returns ``Repository`` class identified by the given alias or raises
74 VCSError if alias is not recognized or backend class cannot be imported.
74 VCSError if alias is not recognized or backend class cannot be imported.
75 """
75 """
76 if alias not in settings.BACKENDS:
76 if alias not in settings.BACKENDS:
77 raise VCSError(
77 raise VCSError(
78 "Given alias '%s' is not recognized! Allowed aliases:\n%s" %
78 f"Given alias '{alias}' is not recognized! "
79 (alias, pformat(settings.BACKENDS.keys())))
79 f"Allowed aliases:{settings.BACKENDS.keys()}")
80 backend_path = settings.BACKENDS[alias]
80 backend_path = settings.BACKENDS[alias]
81 klass = import_class(backend_path)
81 klass = import_class(backend_path)
82 return klass
82 return klass
83
83
84
84
85 def get_supported_backends():
85 def get_supported_backends():
86 """
86 """
87 Returns list of aliases of supported backends.
87 Returns list of aliases of supported backends.
88 """
88 """
89 return settings.BACKENDS.keys()
89 return settings.BACKENDS.keys()
90
90
91
91
92 def get_vcsserver_service_data():
92 def get_vcsserver_service_data():
93 from rhodecode.lib.vcs import connection
93 from rhodecode.lib.vcs import connection
94 return connection.Service.get_vcsserver_service_data()
94 return connection.Service.get_vcsserver_service_data()
95
95
@@ -1,1935 +1,1989 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import dataclasses
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37
37
38 import rhodecode
38 import rhodecode
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib.utils2 import safe_str, CachedProperty
40 from rhodecode.lib.utils2 import safe_str, CachedProperty
41 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.utils import author_name, author_email
42 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 RepositoryError)
48 RepositoryError)
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 FILEMODE_DEFAULT = 0o100644
54 FILEMODE_DEFAULT = 0o100644
55 FILEMODE_EXECUTABLE = 0o100755
55 FILEMODE_EXECUTABLE = 0o100755
56 EMPTY_COMMIT_ID = '0' * 40
56 EMPTY_COMMIT_ID = '0' * 40
57
57
58 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
59
58
59 @dataclasses.dataclass
60 class Reference:
61 type: str
62 name: str
63 commit_id: str
60
64
61 class Reference(_Reference):
65 def __iter__(self):
66 yield self.type
67 yield self.name
68 yield self.commit_id
62
69
63 @property
70 @property
64 def branch(self):
71 def branch(self):
65 if self.type == 'branch':
72 if self.type == 'branch':
66 return self.name
73 return self.name
67
74
68 @property
75 @property
69 def bookmark(self):
76 def bookmark(self):
70 if self.type == 'book':
77 if self.type == 'book':
71 return self.name
78 return self.name
72
79
73 @property
80 @property
74 def to_str(self):
81 def to_str(self):
75 return reference_to_unicode(self)
82 return reference_to_unicode(self)
76
83
84 def asdict(self):
85 return dict(
86 type=self.type,
87 name=self.name,
88 commit_id=self.commit_id
89 )
77
90
78 def unicode_to_reference(raw):
91
92 def unicode_to_reference(raw: str):
79 """
93 """
80 Convert a unicode (or string) to a reference object.
94 Convert a unicode (or string) to a reference object.
81 If unicode evaluates to False it returns None.
95 If unicode evaluates to False it returns None.
82 """
96 """
83 if raw:
97 if raw:
84 refs = raw.split(':')
98 refs = raw.split(':')
85 return Reference(*refs)
99 return Reference(*refs)
86 else:
100 else:
87 return None
101 return None
88
102
89
103
90 def reference_to_unicode(ref: Reference):
104 def reference_to_unicode(ref: Reference):
91 """
105 """
92 Convert a reference object to unicode.
106 Convert a reference object to unicode.
93 If reference is None it returns None.
107 If reference is None it returns None.
94 """
108 """
95 if ref:
109 if ref:
96 return ':'.join(ref)
110 return ':'.join(ref)
97 else:
111 else:
98 return None
112 return None
99
113
100
114
101 class MergeFailureReason(object):
115 class MergeFailureReason(object):
102 """
116 """
103 Enumeration with all the reasons why the server side merge could fail.
117 Enumeration with all the reasons why the server side merge could fail.
104
118
105 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
106 database.
120 database.
107
121
108 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
109 reasons.
123 reasons.
110 """
124 """
111
125
112 # Everything went well.
126 # Everything went well.
113 NONE = 0
127 NONE = 0
114
128
115 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
116 UNKNOWN = 1
130 UNKNOWN = 1
117
131
118 # The merge was not successful, there are conflicts.
132 # The merge was not successful, there are conflicts.
119 MERGE_FAILED = 2
133 MERGE_FAILED = 2
120
134
121 # The merge succeeded but we could not push it to the target repository.
135 # The merge succeeded but we could not push it to the target repository.
122 PUSH_FAILED = 3
136 PUSH_FAILED = 3
123
137
124 # The specified target is not a head in the target repository.
138 # The specified target is not a head in the target repository.
125 TARGET_IS_NOT_HEAD = 4
139 TARGET_IS_NOT_HEAD = 4
126
140
127 # The source repository contains more branches than the target. Pushing
141 # The source repository contains more branches than the target. Pushing
128 # the merge will create additional branches in the target.
142 # the merge will create additional branches in the target.
129 HG_SOURCE_HAS_MORE_BRANCHES = 5
143 HG_SOURCE_HAS_MORE_BRANCHES = 5
130
144
131 # The target reference has multiple heads. That does not allow to correctly
145 # The target reference has multiple heads. That does not allow to correctly
132 # identify the target location. This could only happen for mercurial
146 # identify the target location. This could only happen for mercurial
133 # branches.
147 # branches.
134 HG_TARGET_HAS_MULTIPLE_HEADS = 6
148 HG_TARGET_HAS_MULTIPLE_HEADS = 6
135
149
136 # The target repository is locked
150 # The target repository is locked
137 TARGET_IS_LOCKED = 7
151 TARGET_IS_LOCKED = 7
138
152
139 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
153 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
140 # A involved commit could not be found.
154 # A involved commit could not be found.
141 _DEPRECATED_MISSING_COMMIT = 8
155 _DEPRECATED_MISSING_COMMIT = 8
142
156
143 # The target repo reference is missing.
157 # The target repo reference is missing.
144 MISSING_TARGET_REF = 9
158 MISSING_TARGET_REF = 9
145
159
146 # The source repo reference is missing.
160 # The source repo reference is missing.
147 MISSING_SOURCE_REF = 10
161 MISSING_SOURCE_REF = 10
148
162
149 # The merge was not successful, there are conflicts related to sub
163 # The merge was not successful, there are conflicts related to sub
150 # repositories.
164 # repositories.
151 SUBREPO_MERGE_FAILED = 11
165 SUBREPO_MERGE_FAILED = 11
152
166
153
167
154 class UpdateFailureReason(object):
168 class UpdateFailureReason(object):
155 """
169 """
156 Enumeration with all the reasons why the pull request update could fail.
170 Enumeration with all the reasons why the pull request update could fail.
157
171
158 DO NOT change the number of the reasons, as they may be stored in the
172 DO NOT change the number of the reasons, as they may be stored in the
159 database.
173 database.
160
174
161 Changing the name of a reason is acceptable and encouraged to deprecate old
175 Changing the name of a reason is acceptable and encouraged to deprecate old
162 reasons.
176 reasons.
163 """
177 """
164
178
165 # Everything went well.
179 # Everything went well.
166 NONE = 0
180 NONE = 0
167
181
168 # An unexpected exception was raised. Check the logs for more details.
182 # An unexpected exception was raised. Check the logs for more details.
169 UNKNOWN = 1
183 UNKNOWN = 1
170
184
171 # The pull request is up to date.
185 # The pull request is up to date.
172 NO_CHANGE = 2
186 NO_CHANGE = 2
173
187
174 # The pull request has a reference type that is not supported for update.
188 # The pull request has a reference type that is not supported for update.
175 WRONG_REF_TYPE = 3
189 WRONG_REF_TYPE = 3
176
190
177 # Update failed because the target reference is missing.
191 # Update failed because the target reference is missing.
178 MISSING_TARGET_REF = 4
192 MISSING_TARGET_REF = 4
179
193
180 # Update failed because the source reference is missing.
194 # Update failed because the source reference is missing.
181 MISSING_SOURCE_REF = 5
195 MISSING_SOURCE_REF = 5
182
196
183
197
184 class MergeResponse(object):
198 class MergeResponse(object):
185
199
186 # uses .format(**metadata) for variables
200 # uses .format(**metadata) for variables
187 MERGE_STATUS_MESSAGES = {
201 MERGE_STATUS_MESSAGES = {
188 MergeFailureReason.NONE: lazy_ugettext(
202 MergeFailureReason.NONE: lazy_ugettext(
189 'This pull request can be automatically merged.'),
203 'This pull request can be automatically merged.'),
190 MergeFailureReason.UNKNOWN: lazy_ugettext(
204 MergeFailureReason.UNKNOWN: lazy_ugettext(
191 'This pull request cannot be merged because of an unhandled exception. '
205 'This pull request cannot be merged because of an unhandled exception. '
192 '{exception}'),
206 '{exception}'),
193 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
207 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
194 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
208 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
195 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
209 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
196 'This pull request could not be merged because push to '
210 'This pull request could not be merged because push to '
197 'target:`{target}@{merge_commit}` failed.'),
211 'target:`{target}@{merge_commit}` failed.'),
198 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
212 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
199 'This pull request cannot be merged because the target '
213 'This pull request cannot be merged because the target '
200 '`{target_ref.name}` is not a head.'),
214 '`{target_ref.name}` is not a head.'),
201 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
215 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
202 'This pull request cannot be merged because the source contains '
216 'This pull request cannot be merged because the source contains '
203 'more branches than the target.'),
217 'more branches than the target.'),
204 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
218 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
205 'This pull request cannot be merged because the target `{target_ref.name}` '
219 'This pull request cannot be merged because the target `{target_ref.name}` '
206 'has multiple heads: `{heads}`.'),
220 'has multiple heads: `{heads}`.'),
207 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
221 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
208 'This pull request cannot be merged because the target repository is '
222 'This pull request cannot be merged because the target repository is '
209 'locked by {locked_by}.'),
223 'locked by {locked_by}.'),
210
224
211 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
212 'This pull request cannot be merged because the target '
226 'This pull request cannot be merged because the target '
213 'reference `{target_ref.name}` is missing.'),
227 'reference `{target_ref.name}` is missing.'),
214 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
215 'This pull request cannot be merged because the source '
229 'This pull request cannot be merged because the source '
216 'reference `{source_ref.name}` is missing.'),
230 'reference `{source_ref.name}` is missing.'),
217 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
231 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
218 'This pull request cannot be merged because of conflicts related '
232 'This pull request cannot be merged because of conflicts related '
219 'to sub repositories.'),
233 'to sub repositories.'),
220
234
221 # Deprecations
235 # Deprecations
222 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
236 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
223 u'This pull request cannot be merged because the target or the '
237 'This pull request cannot be merged because the target or the '
224 u'source reference is missing.'),
238 'source reference is missing.'),
225
239
226 }
240 }
227
241
228 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
242 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
229 self.possible = possible
243 self.possible = possible
230 self.executed = executed
244 self.executed = executed
231 self.merge_ref = merge_ref
245 self.merge_ref = merge_ref
232 self.failure_reason = failure_reason
246 self.failure_reason = failure_reason
233 self.metadata = metadata or {}
247 self.metadata = metadata or {}
234
248
235 def __repr__(self):
249 def __repr__(self):
236 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
250 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
237
251
238 def __eq__(self, other):
252 def __eq__(self, other):
239 same_instance = isinstance(other, self.__class__)
253 same_instance = isinstance(other, self.__class__)
240 return same_instance \
254 return same_instance \
241 and self.possible == other.possible \
255 and self.possible == other.possible \
242 and self.executed == other.executed \
256 and self.executed == other.executed \
243 and self.failure_reason == other.failure_reason
257 and self.failure_reason == other.failure_reason
244
258
245 @property
259 @property
246 def label(self):
260 def label(self):
247 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
261 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
248 not k.startswith('_'))
262 not k.startswith('_'))
249 return label_dict.get(self.failure_reason)
263 return label_dict.get(self.failure_reason)
250
264
251 @property
265 @property
252 def merge_status_message(self):
266 def merge_status_message(self):
253 """
267 """
254 Return a human friendly error message for the given merge status code.
268 Return a human friendly error message for the given merge status code.
255 """
269 """
256 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
270 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
257
271
258 try:
272 try:
259 return msg.format(**self.metadata)
273 return msg.format(**self.metadata)
260 except Exception:
274 except Exception:
261 log.exception('Failed to format %s message', self)
275 log.exception('Failed to format %s message', self)
262 return msg
276 return msg
263
277
264 def asdict(self):
278 def asdict(self):
265 data = {}
279 data = {}
266 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
280 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
267 'merge_status_message']:
281 'merge_status_message']:
268 data[k] = getattr(self, k)
282 data[k] = getattr(self, k)
269 return data
283 return data
270
284
271
285
272 class TargetRefMissing(ValueError):
286 class TargetRefMissing(ValueError):
273 pass
287 pass
274
288
275
289
276 class SourceRefMissing(ValueError):
290 class SourceRefMissing(ValueError):
277 pass
291 pass
278
292
279
293
280 class BaseRepository(object):
294 class BaseRepository(object):
281 """
295 """
282 Base Repository for final backends
296 Base Repository for final backends
283
297
284 .. attribute:: DEFAULT_BRANCH_NAME
298 .. attribute:: DEFAULT_BRANCH_NAME
285
299
286 name of default branch (i.e. "trunk" for svn, "master" for git etc.
300 name of default branch (i.e. "trunk" for svn, "master" for git etc.
287
301
288 .. attribute:: commit_ids
302 .. attribute:: commit_ids
289
303
290 list of all available commit ids, in ascending order
304 list of all available commit ids, in ascending order
291
305
292 .. attribute:: path
306 .. attribute:: path
293
307
294 absolute path to the repository
308 absolute path to the repository
295
309
296 .. attribute:: bookmarks
310 .. attribute:: bookmarks
297
311
298 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
312 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
299 there are no bookmarks or the backend implementation does not support
313 there are no bookmarks or the backend implementation does not support
300 bookmarks.
314 bookmarks.
301
315
302 .. attribute:: tags
316 .. attribute:: tags
303
317
304 Mapping from name to :term:`Commit ID` of the tag.
318 Mapping from name to :term:`Commit ID` of the tag.
305
319
306 """
320 """
307
321
308 DEFAULT_BRANCH_NAME = None
322 DEFAULT_BRANCH_NAME = None
309 DEFAULT_CONTACT = u"Unknown"
323 DEFAULT_CONTACT = u"Unknown"
310 DEFAULT_DESCRIPTION = u"unknown"
324 DEFAULT_DESCRIPTION = u"unknown"
311 EMPTY_COMMIT_ID = '0' * 40
325 EMPTY_COMMIT_ID = '0' * 40
312 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
326 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
313
327
314 path = None
328 path = None
315
329
316 _is_empty = None
330 _is_empty = None
317 _commit_ids = {}
331 _commit_ids = {}
318
332
319 def __init__(self, repo_path, config=None, create=False, **kwargs):
333 def __init__(self, repo_path, config=None, create=False, **kwargs):
320 """
334 """
321 Initializes repository. Raises RepositoryError if repository could
335 Initializes repository. Raises RepositoryError if repository could
322 not be find at the given ``repo_path`` or directory at ``repo_path``
336 not be find at the given ``repo_path`` or directory at ``repo_path``
323 exists and ``create`` is set to True.
337 exists and ``create`` is set to True.
324
338
325 :param repo_path: local path of the repository
339 :param repo_path: local path of the repository
326 :param config: repository configuration
340 :param config: repository configuration
327 :param create=False: if set to True, would try to create repository.
341 :param create=False: if set to True, would try to create repository.
328 :param src_url=None: if set, should be proper url from which repository
342 :param src_url=None: if set, should be proper url from which repository
329 would be cloned; requires ``create`` parameter to be set to True -
343 would be cloned; requires ``create`` parameter to be set to True -
330 raises RepositoryError if src_url is set and create evaluates to
344 raises RepositoryError if src_url is set and create evaluates to
331 False
345 False
332 """
346 """
333 raise NotImplementedError
347 raise NotImplementedError
334
348
335 def __repr__(self):
349 def __repr__(self):
336 return '<%s at %s>' % (self.__class__.__name__, self.path)
350 return '<%s at %s>' % (self.__class__.__name__, self.path)
337
351
338 def __len__(self):
352 def __len__(self):
339 return self.count()
353 return self.count()
340
354
341 def __eq__(self, other):
355 def __eq__(self, other):
342 same_instance = isinstance(other, self.__class__)
356 same_instance = isinstance(other, self.__class__)
343 return same_instance and other.path == self.path
357 return same_instance and other.path == self.path
344
358
345 def __ne__(self, other):
359 def __ne__(self, other):
346 return not self.__eq__(other)
360 return not self.__eq__(other)
347
361
348 def get_create_shadow_cache_pr_path(self, db_repo):
362 def get_create_shadow_cache_pr_path(self, db_repo):
349 path = db_repo.cached_diffs_dir
363 path = db_repo.cached_diffs_dir
350 if not os.path.exists(path):
364 if not os.path.exists(path):
351 os.makedirs(path, 0o755)
365 os.makedirs(path, 0o755)
352 return path
366 return path
353
367
354 @classmethod
368 @classmethod
355 def get_default_config(cls, default=None):
369 def get_default_config(cls, default=None):
356 config = Config()
370 config = Config()
357 if default and isinstance(default, list):
371 if default and isinstance(default, list):
358 for section, key, val in default:
372 for section, key, val in default:
359 config.set(section, key, val)
373 config.set(section, key, val)
360 return config
374 return config
361
375
362 @LazyProperty
376 @LazyProperty
363 def _remote(self):
377 def _remote(self):
364 raise NotImplementedError
378 raise NotImplementedError
365
379
366 def _heads(self, branch=None):
380 def _heads(self, branch=None):
367 return []
381 return []
368
382
369 @LazyProperty
383 @LazyProperty
370 def EMPTY_COMMIT(self):
384 def EMPTY_COMMIT(self):
371 return EmptyCommit(self.EMPTY_COMMIT_ID)
385 return EmptyCommit(self.EMPTY_COMMIT_ID)
372
386
373 @LazyProperty
387 @LazyProperty
374 def alias(self):
388 def alias(self):
375 for k, v in settings.BACKENDS.items():
389 for k, v in settings.BACKENDS.items():
376 if v.split('.')[-1] == str(self.__class__.__name__):
390 if v.split('.')[-1] == str(self.__class__.__name__):
377 return k
391 return k
378
392
379 @LazyProperty
393 @LazyProperty
380 def name(self):
394 def name(self):
381 return safe_str(os.path.basename(self.path))
395 return safe_str(os.path.basename(self.path))
382
396
383 @LazyProperty
397 @LazyProperty
384 def description(self):
398 def description(self):
385 raise NotImplementedError
399 raise NotImplementedError
386
400
387 def refs(self):
401 def refs(self):
388 """
402 """
389 returns a `dict` with branches, bookmarks, tags, and closed_branches
403 returns a `dict` with branches, bookmarks, tags, and closed_branches
390 for this repository
404 for this repository
391 """
405 """
392 return dict(
406 return dict(
393 branches=self.branches,
407 branches=self.branches,
394 branches_closed=self.branches_closed,
408 branches_closed=self.branches_closed,
395 tags=self.tags,
409 tags=self.tags,
396 bookmarks=self.bookmarks
410 bookmarks=self.bookmarks
397 )
411 )
398
412
399 @LazyProperty
413 @LazyProperty
400 def branches(self):
414 def branches(self):
401 """
415 """
402 A `dict` which maps branch names to commit ids.
416 A `dict` which maps branch names to commit ids.
403 """
417 """
404 raise NotImplementedError
418 raise NotImplementedError
405
419
406 @LazyProperty
420 @LazyProperty
407 def branches_closed(self):
421 def branches_closed(self):
408 """
422 """
409 A `dict` which maps tags names to commit ids.
423 A `dict` which maps tags names to commit ids.
410 """
424 """
411 raise NotImplementedError
425 raise NotImplementedError
412
426
413 @LazyProperty
427 @LazyProperty
414 def bookmarks(self):
428 def bookmarks(self):
415 """
429 """
416 A `dict` which maps tags names to commit ids.
430 A `dict` which maps tags names to commit ids.
417 """
431 """
418 raise NotImplementedError
432 raise NotImplementedError
419
433
420 @LazyProperty
434 @LazyProperty
421 def tags(self):
435 def tags(self):
422 """
436 """
423 A `dict` which maps tags names to commit ids.
437 A `dict` which maps tags names to commit ids.
424 """
438 """
425 raise NotImplementedError
439 raise NotImplementedError
426
440
427 @LazyProperty
441 @LazyProperty
428 def size(self):
442 def size(self):
429 """
443 """
430 Returns combined size in bytes for all repository files
444 Returns combined size in bytes for all repository files
431 """
445 """
432 tip = self.get_commit()
446 tip = self.get_commit()
433 return tip.size
447 return tip.size
434
448
435 def size_at_commit(self, commit_id):
449 def size_at_commit(self, commit_id):
436 commit = self.get_commit(commit_id)
450 commit = self.get_commit(commit_id)
437 return commit.size
451 return commit.size
438
452
439 def _check_for_empty(self):
453 def _check_for_empty(self):
440 no_commits = len(self._commit_ids) == 0
454 no_commits = len(self._commit_ids) == 0
441 if no_commits:
455 if no_commits:
442 # check on remote to be sure
456 # check on remote to be sure
443 return self._remote.is_empty()
457 return self._remote.is_empty()
444 else:
458 else:
445 return False
459 return False
446
460
447 def is_empty(self):
461 def is_empty(self):
448 if rhodecode.is_test:
462 if rhodecode.is_test:
449 return self._check_for_empty()
463 return self._check_for_empty()
450
464
451 if self._is_empty is None:
465 if self._is_empty is None:
452 # cache empty for production, but not tests
466 # cache empty for production, but not tests
453 self._is_empty = self._check_for_empty()
467 self._is_empty = self._check_for_empty()
454
468
455 return self._is_empty
469 return self._is_empty
456
470
457 @staticmethod
471 @staticmethod
458 def check_url(url, config):
472 def check_url(url, config):
459 """
473 """
460 Function will check given url and try to verify if it's a valid
474 Function will check given url and try to verify if it's a valid
461 link.
475 link.
462 """
476 """
463 raise NotImplementedError
477 raise NotImplementedError
464
478
465 @staticmethod
479 @staticmethod
466 def is_valid_repository(path):
480 def is_valid_repository(path):
467 """
481 """
468 Check if given `path` contains a valid repository of this backend
482 Check if given `path` contains a valid repository of this backend
469 """
483 """
470 raise NotImplementedError
484 raise NotImplementedError
471
485
472 # ==========================================================================
486 # ==========================================================================
473 # COMMITS
487 # COMMITS
474 # ==========================================================================
488 # ==========================================================================
475
489
476 @CachedProperty
490 @CachedProperty
477 def commit_ids(self):
491 def commit_ids(self):
478 raise NotImplementedError
492 raise NotImplementedError
479
493
480 def append_commit_id(self, commit_id):
494 def append_commit_id(self, commit_id):
481 if commit_id not in self.commit_ids:
495 if commit_id not in self.commit_ids:
482 self._rebuild_cache(self.commit_ids + [commit_id])
496 self._rebuild_cache(self.commit_ids + [commit_id])
483
497
484 # clear cache
498 # clear cache
485 self._invalidate_prop_cache('commit_ids')
499 self._invalidate_prop_cache('commit_ids')
486 self._is_empty = False
500 self._is_empty = False
487
501
488 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
502 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
489 translate_tag=None, maybe_unreachable=False, reference_obj=None):
503 translate_tag=None, maybe_unreachable=False, reference_obj=None):
490 """
504 """
491 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
505 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
492 are both None, most recent commit is returned.
506 are both None, most recent commit is returned.
493
507
494 :param pre_load: Optional. List of commit attributes to load.
508 :param pre_load: Optional. List of commit attributes to load.
495
509
496 :raises ``EmptyRepositoryError``: if there are no commits
510 :raises ``EmptyRepositoryError``: if there are no commits
497 """
511 """
498 raise NotImplementedError
512 raise NotImplementedError
499
513
500 def __iter__(self):
514 def __iter__(self):
501 for commit_id in self.commit_ids:
515 for commit_id in self.commit_ids:
502 yield self.get_commit(commit_id=commit_id)
516 yield self.get_commit(commit_id=commit_id)
503
517
504 def get_commits(
518 def get_commits(
505 self, start_id=None, end_id=None, start_date=None, end_date=None,
519 self, start_id=None, end_id=None, start_date=None, end_date=None,
506 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
520 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
507 """
521 """
508 Returns iterator of `BaseCommit` objects from start to end
522 Returns iterator of `BaseCommit` objects from start to end
509 not inclusive. This should behave just like a list, ie. end is not
523 not inclusive. This should behave just like a list, ie. end is not
510 inclusive.
524 inclusive.
511
525
512 :param start_id: None or str, must be a valid commit id
526 :param start_id: None or str, must be a valid commit id
513 :param end_id: None or str, must be a valid commit id
527 :param end_id: None or str, must be a valid commit id
514 :param start_date:
528 :param start_date:
515 :param end_date:
529 :param end_date:
516 :param branch_name:
530 :param branch_name:
517 :param show_hidden:
531 :param show_hidden:
518 :param pre_load:
532 :param pre_load:
519 :param translate_tags:
533 :param translate_tags:
520 """
534 """
521 raise NotImplementedError
535 raise NotImplementedError
522
536
523 def __getitem__(self, key):
537 def __getitem__(self, key):
524 """
538 """
525 Allows index based access to the commit objects of this repository.
539 Allows index based access to the commit objects of this repository.
526 """
540 """
527 pre_load = ["author", "branch", "date", "message", "parents"]
541 pre_load = ["author", "branch", "date", "message", "parents"]
528 if isinstance(key, slice):
542 if isinstance(key, slice):
529 return self._get_range(key, pre_load)
543 return self._get_range(key, pre_load)
530 return self.get_commit(commit_idx=key, pre_load=pre_load)
544 return self.get_commit(commit_idx=key, pre_load=pre_load)
531
545
532 def _get_range(self, slice_obj, pre_load):
546 def _get_range(self, slice_obj, pre_load):
533 for commit_id in self.commit_ids.__getitem__(slice_obj):
547 for commit_id in self.commit_ids.__getitem__(slice_obj):
534 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
548 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
535
549
536 def count(self):
550 def count(self):
537 return len(self.commit_ids)
551 return len(self.commit_ids)
538
552
539 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
553 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
540 """
554 """
541 Creates and returns a tag for the given ``commit_id``.
555 Creates and returns a tag for the given ``commit_id``.
542
556
543 :param name: name for new tag
557 :param name: name for new tag
544 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
558 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
545 :param commit_id: commit id for which new tag would be created
559 :param commit_id: commit id for which new tag would be created
546 :param message: message of the tag's commit
560 :param message: message of the tag's commit
547 :param date: date of tag's commit
561 :param date: date of tag's commit
548
562
549 :raises TagAlreadyExistError: if tag with same name already exists
563 :raises TagAlreadyExistError: if tag with same name already exists
550 """
564 """
551 raise NotImplementedError
565 raise NotImplementedError
552
566
553 def remove_tag(self, name, user, message=None, date=None):
567 def remove_tag(self, name, user, message=None, date=None):
554 """
568 """
555 Removes tag with the given ``name``.
569 Removes tag with the given ``name``.
556
570
557 :param name: name of the tag to be removed
571 :param name: name of the tag to be removed
558 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
572 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
559 :param message: message of the tag's removal commit
573 :param message: message of the tag's removal commit
560 :param date: date of tag's removal commit
574 :param date: date of tag's removal commit
561
575
562 :raises TagDoesNotExistError: if tag with given name does not exists
576 :raises TagDoesNotExistError: if tag with given name does not exists
563 """
577 """
564 raise NotImplementedError
578 raise NotImplementedError
565
579
566 def get_diff(
580 def get_diff(
567 self, commit1, commit2, path=None, ignore_whitespace=False,
581 self, commit1, commit2, path=None, ignore_whitespace=False,
568 context=3, path1=None):
582 context=3, path1=None):
569 """
583 """
570 Returns (git like) *diff*, as plain text. Shows changes introduced by
584 Returns (git like) *diff*, as plain text. Shows changes introduced by
571 `commit2` since `commit1`.
585 `commit2` since `commit1`.
572
586
573 :param commit1: Entry point from which diff is shown. Can be
587 :param commit1: Entry point from which diff is shown. Can be
574 ``self.EMPTY_COMMIT`` - in this case, patch showing all
588 ``self.EMPTY_COMMIT`` - in this case, patch showing all
575 the changes since empty state of the repository until `commit2`
589 the changes since empty state of the repository until `commit2`
576 :param commit2: Until which commit changes should be shown.
590 :param commit2: Until which commit changes should be shown.
577 :param path: Can be set to a path of a file to create a diff of that
591 :param path: Can be set to a path of a file to create a diff of that
578 file. If `path1` is also set, this value is only associated to
592 file. If `path1` is also set, this value is only associated to
579 `commit2`.
593 `commit2`.
580 :param ignore_whitespace: If set to ``True``, would not show whitespace
594 :param ignore_whitespace: If set to ``True``, would not show whitespace
581 changes. Defaults to ``False``.
595 changes. Defaults to ``False``.
582 :param context: How many lines before/after changed lines should be
596 :param context: How many lines before/after changed lines should be
583 shown. Defaults to ``3``.
597 shown. Defaults to ``3``.
584 :param path1: Can be set to a path to associate with `commit1`. This
598 :param path1: Can be set to a path to associate with `commit1`. This
585 parameter works only for backends which support diff generation for
599 parameter works only for backends which support diff generation for
586 different paths. Other backends will raise a `ValueError` if `path1`
600 different paths. Other backends will raise a `ValueError` if `path1`
587 is set and has a different value than `path`.
601 is set and has a different value than `path`.
588 :param file_path: filter this diff by given path pattern
602 :param file_path: filter this diff by given path pattern
589 """
603 """
590 raise NotImplementedError
604 raise NotImplementedError
591
605
592 def strip(self, commit_id, branch=None):
606 def strip(self, commit_id, branch=None):
593 """
607 """
594 Strip given commit_id from the repository
608 Strip given commit_id from the repository
595 """
609 """
596 raise NotImplementedError
610 raise NotImplementedError
597
611
598 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
612 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
599 """
613 """
600 Return a latest common ancestor commit if one exists for this repo
614 Return a latest common ancestor commit if one exists for this repo
601 `commit_id1` vs `commit_id2` from `repo2`.
615 `commit_id1` vs `commit_id2` from `repo2`.
602
616
603 :param commit_id1: Commit it from this repository to use as a
617 :param commit_id1: Commit it from this repository to use as a
604 target for the comparison.
618 target for the comparison.
605 :param commit_id2: Source commit id to use for comparison.
619 :param commit_id2: Source commit id to use for comparison.
606 :param repo2: Source repository to use for comparison.
620 :param repo2: Source repository to use for comparison.
607 """
621 """
608 raise NotImplementedError
622 raise NotImplementedError
609
623
610 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
624 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
611 """
625 """
612 Compare this repository's revision `commit_id1` with `commit_id2`.
626 Compare this repository's revision `commit_id1` with `commit_id2`.
613
627
614 Returns a tuple(commits, ancestor) that would be merged from
628 Returns a tuple(commits, ancestor) that would be merged from
615 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
629 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
616 will be returned as ancestor.
630 will be returned as ancestor.
617
631
618 :param commit_id1: Commit it from this repository to use as a
632 :param commit_id1: Commit it from this repository to use as a
619 target for the comparison.
633 target for the comparison.
620 :param commit_id2: Source commit id to use for comparison.
634 :param commit_id2: Source commit id to use for comparison.
621 :param repo2: Source repository to use for comparison.
635 :param repo2: Source repository to use for comparison.
622 :param merge: If set to ``True`` will do a merge compare which also
636 :param merge: If set to ``True`` will do a merge compare which also
623 returns the common ancestor.
637 returns the common ancestor.
624 :param pre_load: Optional. List of commit attributes to load.
638 :param pre_load: Optional. List of commit attributes to load.
625 """
639 """
626 raise NotImplementedError
640 raise NotImplementedError
627
641
628 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
642 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
629 user_name='', user_email='', message='', dry_run=False,
643 user_name='', user_email='', message='', dry_run=False,
630 use_rebase=False, close_branch=False):
644 use_rebase=False, close_branch=False):
631 """
645 """
632 Merge the revisions specified in `source_ref` from `source_repo`
646 Merge the revisions specified in `source_ref` from `source_repo`
633 onto the `target_ref` of this repository.
647 onto the `target_ref` of this repository.
634
648
635 `source_ref` and `target_ref` are named tupls with the following
649 `source_ref` and `target_ref` are named tupls with the following
636 fields `type`, `name` and `commit_id`.
650 fields `type`, `name` and `commit_id`.
637
651
638 Returns a MergeResponse named tuple with the following fields
652 Returns a MergeResponse named tuple with the following fields
639 'possible', 'executed', 'source_commit', 'target_commit',
653 'possible', 'executed', 'source_commit', 'target_commit',
640 'merge_commit'.
654 'merge_commit'.
641
655
642 :param repo_id: `repo_id` target repo id.
656 :param repo_id: `repo_id` target repo id.
643 :param workspace_id: `workspace_id` unique identifier.
657 :param workspace_id: `workspace_id` unique identifier.
644 :param target_ref: `target_ref` points to the commit on top of which
658 :param target_ref: `target_ref` points to the commit on top of which
645 the `source_ref` should be merged.
659 the `source_ref` should be merged.
646 :param source_repo: The repository that contains the commits to be
660 :param source_repo: The repository that contains the commits to be
647 merged.
661 merged.
648 :param source_ref: `source_ref` points to the topmost commit from
662 :param source_ref: `source_ref` points to the topmost commit from
649 the `source_repo` which should be merged.
663 the `source_repo` which should be merged.
650 :param user_name: Merge commit `user_name`.
664 :param user_name: Merge commit `user_name`.
651 :param user_email: Merge commit `user_email`.
665 :param user_email: Merge commit `user_email`.
652 :param message: Merge commit `message`.
666 :param message: Merge commit `message`.
653 :param dry_run: If `True` the merge will not take place.
667 :param dry_run: If `True` the merge will not take place.
654 :param use_rebase: If `True` commits from the source will be rebased
668 :param use_rebase: If `True` commits from the source will be rebased
655 on top of the target instead of being merged.
669 on top of the target instead of being merged.
656 :param close_branch: If `True` branch will be close before merging it
670 :param close_branch: If `True` branch will be close before merging it
657 """
671 """
658 if dry_run:
672 if dry_run:
659 message = message or settings.MERGE_DRY_RUN_MESSAGE
673 message = message or settings.MERGE_DRY_RUN_MESSAGE
660 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
674 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
661 user_name = user_name or settings.MERGE_DRY_RUN_USER
675 user_name = user_name or settings.MERGE_DRY_RUN_USER
662 else:
676 else:
663 if not user_name:
677 if not user_name:
664 raise ValueError('user_name cannot be empty')
678 raise ValueError('user_name cannot be empty')
665 if not user_email:
679 if not user_email:
666 raise ValueError('user_email cannot be empty')
680 raise ValueError('user_email cannot be empty')
667 if not message:
681 if not message:
668 raise ValueError('message cannot be empty')
682 raise ValueError('message cannot be empty')
669
683
670 try:
684 try:
671 return self._merge_repo(
685 return self._merge_repo(
672 repo_id, workspace_id, target_ref, source_repo,
686 repo_id, workspace_id, target_ref, source_repo,
673 source_ref, message, user_name, user_email, dry_run=dry_run,
687 source_ref, message, user_name, user_email, dry_run=dry_run,
674 use_rebase=use_rebase, close_branch=close_branch)
688 use_rebase=use_rebase, close_branch=close_branch)
675 except RepositoryError as exc:
689 except RepositoryError as exc:
676 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
690 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
677 return MergeResponse(
691 return MergeResponse(
678 False, False, None, MergeFailureReason.UNKNOWN,
692 False, False, None, MergeFailureReason.UNKNOWN,
679 metadata={'exception': str(exc)})
693 metadata={'exception': str(exc)})
680
694
681 def _merge_repo(self, repo_id, workspace_id, target_ref,
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
682 source_repo, source_ref, merge_message,
696 source_repo, source_ref, merge_message,
683 merger_name, merger_email, dry_run=False,
697 merger_name, merger_email, dry_run=False,
684 use_rebase=False, close_branch=False):
698 use_rebase=False, close_branch=False):
685 """Internal implementation of merge."""
699 """Internal implementation of merge."""
686 raise NotImplementedError
700 raise NotImplementedError
687
701
688 def _maybe_prepare_merge_workspace(
702 def _maybe_prepare_merge_workspace(
689 self, repo_id, workspace_id, target_ref, source_ref):
703 self, repo_id, workspace_id, target_ref, source_ref):
690 """
704 """
691 Create the merge workspace.
705 Create the merge workspace.
692
706
693 :param workspace_id: `workspace_id` unique identifier.
707 :param workspace_id: `workspace_id` unique identifier.
694 """
708 """
695 raise NotImplementedError
709 raise NotImplementedError
696
710
697 @classmethod
711 @classmethod
698 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
712 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
699 """
713 """
700 Legacy version that was used before. We still need it for
714 Legacy version that was used before. We still need it for
701 backward compat
715 backward compat
702 """
716 """
703 return os.path.join(
717 return os.path.join(
704 os.path.dirname(repo_path),
718 os.path.dirname(repo_path),
705 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
719 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
706
720
707 @classmethod
721 @classmethod
708 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
722 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
709 # The name of the shadow repository must start with '.', so it is
723 # The name of the shadow repository must start with '.', so it is
710 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
724 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
711 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
725 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
712 if os.path.exists(legacy_repository_path):
726 if os.path.exists(legacy_repository_path):
713 return legacy_repository_path
727 return legacy_repository_path
714 else:
728 else:
715 return os.path.join(
729 return os.path.join(
716 os.path.dirname(repo_path),
730 os.path.dirname(repo_path),
717 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
731 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
718
732
719 def cleanup_merge_workspace(self, repo_id, workspace_id):
733 def cleanup_merge_workspace(self, repo_id, workspace_id):
720 """
734 """
721 Remove merge workspace.
735 Remove merge workspace.
722
736
723 This function MUST not fail in case there is no workspace associated to
737 This function MUST not fail in case there is no workspace associated to
724 the given `workspace_id`.
738 the given `workspace_id`.
725
739
726 :param workspace_id: `workspace_id` unique identifier.
740 :param workspace_id: `workspace_id` unique identifier.
727 """
741 """
728 shadow_repository_path = self._get_shadow_repository_path(
742 shadow_repository_path = self._get_shadow_repository_path(
729 self.path, repo_id, workspace_id)
743 self.path, repo_id, workspace_id)
730 shadow_repository_path_del = '{}.{}.delete'.format(
744 shadow_repository_path_del = '{}.{}.delete'.format(
731 shadow_repository_path, time.time())
745 shadow_repository_path, time.time())
732
746
733 # move the shadow repo, so it never conflicts with the one used.
747 # move the shadow repo, so it never conflicts with the one used.
734 # we use this method because shutil.rmtree had some edge case problems
748 # we use this method because shutil.rmtree had some edge case problems
735 # removing symlinked repositories
749 # removing symlinked repositories
736 if not os.path.isdir(shadow_repository_path):
750 if not os.path.isdir(shadow_repository_path):
737 return
751 return
738
752
739 shutil.move(shadow_repository_path, shadow_repository_path_del)
753 shutil.move(shadow_repository_path, shadow_repository_path_del)
740 try:
754 try:
741 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
755 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
742 except Exception:
756 except Exception:
743 log.exception('Failed to gracefully remove shadow repo under %s',
757 log.exception('Failed to gracefully remove shadow repo under %s',
744 shadow_repository_path_del)
758 shadow_repository_path_del)
745 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
759 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
746
760
747 # ========== #
761 # ========== #
748 # COMMIT API #
762 # COMMIT API #
749 # ========== #
763 # ========== #
750
764
751 @LazyProperty
765 @LazyProperty
752 def in_memory_commit(self):
766 def in_memory_commit(self):
753 """
767 """
754 Returns :class:`InMemoryCommit` object for this repository.
768 Returns :class:`InMemoryCommit` object for this repository.
755 """
769 """
756 raise NotImplementedError
770 raise NotImplementedError
757
771
758 # ======================== #
772 # ======================== #
759 # UTILITIES FOR SUBCLASSES #
773 # UTILITIES FOR SUBCLASSES #
760 # ======================== #
774 # ======================== #
761
775
762 def _validate_diff_commits(self, commit1, commit2):
776 def _validate_diff_commits(self, commit1, commit2):
763 """
777 """
764 Validates that the given commits are related to this repository.
778 Validates that the given commits are related to this repository.
765
779
766 Intended as a utility for sub classes to have a consistent validation
780 Intended as a utility for sub classes to have a consistent validation
767 of input parameters in methods like :meth:`get_diff`.
781 of input parameters in methods like :meth:`get_diff`.
768 """
782 """
769 self._validate_commit(commit1)
783 self._validate_commit(commit1)
770 self._validate_commit(commit2)
784 self._validate_commit(commit2)
771 if (isinstance(commit1, EmptyCommit) and
785 if (isinstance(commit1, EmptyCommit) and
772 isinstance(commit2, EmptyCommit)):
786 isinstance(commit2, EmptyCommit)):
773 raise ValueError("Cannot compare two empty commits")
787 raise ValueError("Cannot compare two empty commits")
774
788
775 def _validate_commit(self, commit):
789 def _validate_commit(self, commit):
776 if not isinstance(commit, BaseCommit):
790 if not isinstance(commit, BaseCommit):
777 raise TypeError(
791 raise TypeError(
778 "%s is not of type BaseCommit" % repr(commit))
792 "%s is not of type BaseCommit" % repr(commit))
779 if commit.repository != self and not isinstance(commit, EmptyCommit):
793 if commit.repository != self and not isinstance(commit, EmptyCommit):
780 raise ValueError(
794 raise ValueError(
781 "Commit %s must be a valid commit from this repository %s, "
795 "Commit %s must be a valid commit from this repository %s, "
782 "related to this repository instead %s." %
796 "related to this repository instead %s." %
783 (commit, self, commit.repository))
797 (commit, self, commit.repository))
784
798
785 def _validate_commit_id(self, commit_id):
799 def _validate_commit_id(self, commit_id):
786 if not isinstance(commit_id, str):
800 if not isinstance(commit_id, str):
787 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
801 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
788
802
789 def _validate_commit_idx(self, commit_idx):
803 def _validate_commit_idx(self, commit_idx):
790 if not isinstance(commit_idx, int):
804 if not isinstance(commit_idx, int):
791 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
805 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
792
806
793 def _validate_branch_name(self, branch_name):
807 def _validate_branch_name(self, branch_name):
794 if branch_name and branch_name not in self.branches_all:
808 if branch_name and branch_name not in self.branches_all:
795 msg = ("Branch %s not found in %s" % (branch_name, self))
809 msg = ("Branch %s not found in %s" % (branch_name, self))
796 raise BranchDoesNotExistError(msg)
810 raise BranchDoesNotExistError(msg)
797
811
798 #
812 #
799 # Supporting deprecated API parts
813 # Supporting deprecated API parts
800 # TODO: johbo: consider to move this into a mixin
814 # TODO: johbo: consider to move this into a mixin
801 #
815 #
802
816
803 @property
817 @property
804 def EMPTY_CHANGESET(self):
818 def EMPTY_CHANGESET(self):
805 warnings.warn(
819 warnings.warn(
806 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
820 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
807 return self.EMPTY_COMMIT_ID
821 return self.EMPTY_COMMIT_ID
808
822
809 @property
823 @property
810 def revisions(self):
824 def revisions(self):
811 warnings.warn("Use commits attribute instead", DeprecationWarning)
825 warnings.warn("Use commits attribute instead", DeprecationWarning)
812 return self.commit_ids
826 return self.commit_ids
813
827
814 @revisions.setter
828 @revisions.setter
815 def revisions(self, value):
829 def revisions(self, value):
816 warnings.warn("Use commits attribute instead", DeprecationWarning)
830 warnings.warn("Use commits attribute instead", DeprecationWarning)
817 self.commit_ids = value
831 self.commit_ids = value
818
832
819 def get_changeset(self, revision=None, pre_load=None):
833 def get_changeset(self, revision=None, pre_load=None):
820 warnings.warn("Use get_commit instead", DeprecationWarning)
834 warnings.warn("Use get_commit instead", DeprecationWarning)
821 commit_id = None
835 commit_id = None
822 commit_idx = None
836 commit_idx = None
823 if isinstance(revision, str):
837 if isinstance(revision, str):
824 commit_id = revision
838 commit_id = revision
825 else:
839 else:
826 commit_idx = revision
840 commit_idx = revision
827 return self.get_commit(
841 return self.get_commit(
828 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
842 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
829
843
830 def get_changesets(
844 def get_changesets(
831 self, start=None, end=None, start_date=None, end_date=None,
845 self, start=None, end=None, start_date=None, end_date=None,
832 branch_name=None, pre_load=None):
846 branch_name=None, pre_load=None):
833 warnings.warn("Use get_commits instead", DeprecationWarning)
847 warnings.warn("Use get_commits instead", DeprecationWarning)
834 start_id = self._revision_to_commit(start)
848 start_id = self._revision_to_commit(start)
835 end_id = self._revision_to_commit(end)
849 end_id = self._revision_to_commit(end)
836 return self.get_commits(
850 return self.get_commits(
837 start_id=start_id, end_id=end_id, start_date=start_date,
851 start_id=start_id, end_id=end_id, start_date=start_date,
838 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
852 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
839
853
840 def _revision_to_commit(self, revision):
854 def _revision_to_commit(self, revision):
841 """
855 """
842 Translates a revision to a commit_id
856 Translates a revision to a commit_id
843
857
844 Helps to support the old changeset based API which allows to use
858 Helps to support the old changeset based API which allows to use
845 commit ids and commit indices interchangeable.
859 commit ids and commit indices interchangeable.
846 """
860 """
847 if revision is None:
861 if revision is None:
848 return revision
862 return revision
849
863
850 if isinstance(revision, str):
864 if isinstance(revision, str):
851 commit_id = revision
865 commit_id = revision
852 else:
866 else:
853 commit_id = self.commit_ids[revision]
867 commit_id = self.commit_ids[revision]
854 return commit_id
868 return commit_id
855
869
856 @property
870 @property
857 def in_memory_changeset(self):
871 def in_memory_changeset(self):
858 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
872 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
859 return self.in_memory_commit
873 return self.in_memory_commit
860
874
861 def get_path_permissions(self, username):
875 def get_path_permissions(self, username):
862 """
876 """
863 Returns a path permission checker or None if not supported
877 Returns a path permission checker or None if not supported
864
878
865 :param username: session user name
879 :param username: session user name
866 :return: an instance of BasePathPermissionChecker or None
880 :return: an instance of BasePathPermissionChecker or None
867 """
881 """
868 return None
882 return None
869
883
870 def install_hooks(self, force=False):
884 def install_hooks(self, force=False):
871 return self._remote.install_hooks(force)
885 return self._remote.install_hooks(force)
872
886
873 def get_hooks_info(self):
887 def get_hooks_info(self):
874 return self._remote.get_hooks_info()
888 return self._remote.get_hooks_info()
875
889
876 def vcsserver_invalidate_cache(self, delete=False):
890 def vcsserver_invalidate_cache(self, delete=False):
877 return self._remote.vcsserver_invalidate_cache(delete)
891 return self._remote.vcsserver_invalidate_cache(delete)
878
892
879
893
880 class BaseCommit(object):
894 class BaseCommit(object):
881 """
895 """
882 Each backend should implement it's commit representation.
896 Each backend should implement it's commit representation.
883
897
884 **Attributes**
898 **Attributes**
885
899
886 ``repository``
900 ``repository``
887 repository object within which commit exists
901 repository object within which commit exists
888
902
889 ``id``
903 ``id``
890 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
904 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
891 just ``tip``.
905 just ``tip``.
892
906
893 ``raw_id``
907 ``raw_id``
894 raw commit representation (i.e. full 40 length sha for git
908 raw commit representation (i.e. full 40 length sha for git
895 backend)
909 backend)
896
910
897 ``short_id``
911 ``short_id``
898 shortened (if apply) version of ``raw_id``; it would be simple
912 shortened (if apply) version of ``raw_id``; it would be simple
899 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
913 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
900 as ``raw_id`` for subversion
914 as ``raw_id`` for subversion
901
915
902 ``idx``
916 ``idx``
903 commit index
917 commit index
904
918
905 ``files``
919 ``files``
906 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
920 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
907
921
908 ``dirs``
922 ``dirs``
909 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
923 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
910
924
911 ``nodes``
925 ``nodes``
912 combined list of ``Node`` objects
926 combined list of ``Node`` objects
913
927
914 ``author``
928 ``author``
915 author of the commit, as unicode
929 author of the commit, as unicode
916
930
917 ``message``
931 ``message``
918 message of the commit, as unicode
932 message of the commit, as unicode
919
933
920 ``parents``
934 ``parents``
921 list of parent commits
935 list of parent commits
922
936
923 """
937 """
924 repository = None
938 repository = None
925 branch = None
939 branch = None
926
940
927 """
941 """
928 Depending on the backend this should be set to the branch name of the
942 Depending on the backend this should be set to the branch name of the
929 commit. Backends not supporting branches on commits should leave this
943 commit. Backends not supporting branches on commits should leave this
930 value as ``None``.
944 value as ``None``.
931 """
945 """
932
946
933 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
947 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
934 """
948 """
935 This template is used to generate a default prefix for repository archives
949 This template is used to generate a default prefix for repository archives
936 if no prefix has been specified.
950 if no prefix has been specified.
937 """
951 """
938
952
939 def __repr__(self):
953 def __repr__(self):
940 return self.__str__()
954 return self.__str__()
941
955
942 def __str__(self):
956 def __str__(self):
943 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
957 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
944
958
945 def __eq__(self, other):
959 def __eq__(self, other):
946 same_instance = isinstance(other, self.__class__)
960 same_instance = isinstance(other, self.__class__)
947 return same_instance and self.raw_id == other.raw_id
961 return same_instance and self.raw_id == other.raw_id
948
962
949 def __json__(self):
963 def __json__(self):
950 parents = []
964 parents = []
951 try:
965 try:
952 for parent in self.parents:
966 for parent in self.parents:
953 parents.append({'raw_id': parent.raw_id})
967 parents.append({'raw_id': parent.raw_id})
954 except NotImplementedError:
968 except NotImplementedError:
955 # empty commit doesn't have parents implemented
969 # empty commit doesn't have parents implemented
956 pass
970 pass
957
971
958 return {
972 return {
959 'short_id': self.short_id,
973 'short_id': self.short_id,
960 'raw_id': self.raw_id,
974 'raw_id': self.raw_id,
961 'revision': self.idx,
975 'revision': self.idx,
962 'message': self.message,
976 'message': self.message,
963 'date': self.date,
977 'date': self.date,
964 'author': self.author,
978 'author': self.author,
965 'parents': parents,
979 'parents': parents,
966 'branch': self.branch
980 'branch': self.branch
967 }
981 }
968
982
969 def __getstate__(self):
983 def __getstate__(self):
970 d = self.__dict__.copy()
984 d = self.__dict__.copy()
971 d.pop('_remote', None)
985 d.pop('_remote', None)
972 d.pop('repository', None)
986 d.pop('repository', None)
973 return d
987 return d
974
988
989 def get_remote(self):
990 return self._remote
991
975 def serialize(self):
992 def serialize(self):
976 return self.__json__()
993 return self.__json__()
977
994
978 def _get_refs(self):
995 def _get_refs(self):
979 return {
996 return {
980 'branches': [self.branch] if self.branch else [],
997 'branches': [self.branch] if self.branch else [],
981 'bookmarks': getattr(self, 'bookmarks', []),
998 'bookmarks': getattr(self, 'bookmarks', []),
982 'tags': self.tags
999 'tags': self.tags
983 }
1000 }
984
1001
985 @LazyProperty
1002 @LazyProperty
986 def last(self):
1003 def last(self):
987 """
1004 """
988 ``True`` if this is last commit in repository, ``False``
1005 ``True`` if this is last commit in repository, ``False``
989 otherwise; trying to access this attribute while there is no
1006 otherwise; trying to access this attribute while there is no
990 commits would raise `EmptyRepositoryError`
1007 commits would raise `EmptyRepositoryError`
991 """
1008 """
992 if self.repository is None:
1009 if self.repository is None:
993 raise CommitError("Cannot check if it's most recent commit")
1010 raise CommitError("Cannot check if it's most recent commit")
994 return self.raw_id == self.repository.commit_ids[-1]
1011 return self.raw_id == self.repository.commit_ids[-1]
995
1012
996 @LazyProperty
1013 @LazyProperty
997 def parents(self):
1014 def parents(self):
998 """
1015 """
999 Returns list of parent commits.
1016 Returns list of parent commits.
1000 """
1017 """
1001 raise NotImplementedError
1018 raise NotImplementedError
1002
1019
1003 @LazyProperty
1020 @LazyProperty
1004 def first_parent(self):
1021 def first_parent(self):
1005 """
1022 """
1006 Returns list of parent commits.
1023 Returns list of parent commits.
1007 """
1024 """
1008 return self.parents[0] if self.parents else EmptyCommit()
1025 return self.parents[0] if self.parents else EmptyCommit()
1009
1026
1010 @property
1027 @property
1011 def merge(self):
1028 def merge(self):
1012 """
1029 """
1013 Returns boolean if commit is a merge.
1030 Returns boolean if commit is a merge.
1014 """
1031 """
1015 return len(self.parents) > 1
1032 return len(self.parents) > 1
1016
1033
1017 @LazyProperty
1034 @LazyProperty
1018 def children(self):
1035 def children(self):
1019 """
1036 """
1020 Returns list of child commits.
1037 Returns list of child commits.
1021 """
1038 """
1022 raise NotImplementedError
1039 raise NotImplementedError
1023
1040
1024 @LazyProperty
1041 @LazyProperty
1025 def id(self):
1042 def id(self):
1026 """
1043 """
1027 Returns string identifying this commit.
1044 Returns string identifying this commit.
1028 """
1045 """
1029 raise NotImplementedError
1046 raise NotImplementedError
1030
1047
1031 @LazyProperty
1048 @LazyProperty
1032 def raw_id(self):
1049 def raw_id(self):
1033 """
1050 """
1034 Returns raw string identifying this commit.
1051 Returns raw string identifying this commit.
1035 """
1052 """
1036 raise NotImplementedError
1053 raise NotImplementedError
1037
1054
1038 @LazyProperty
1055 @LazyProperty
1039 def short_id(self):
1056 def short_id(self):
1040 """
1057 """
1041 Returns shortened version of ``raw_id`` attribute, as string,
1058 Returns shortened version of ``raw_id`` attribute, as string,
1042 identifying this commit, useful for presentation to users.
1059 identifying this commit, useful for presentation to users.
1043 """
1060 """
1044 raise NotImplementedError
1061 raise NotImplementedError
1045
1062
1046 @LazyProperty
1063 @LazyProperty
1047 def idx(self):
1064 def idx(self):
1048 """
1065 """
1049 Returns integer identifying this commit.
1066 Returns integer identifying this commit.
1050 """
1067 """
1051 raise NotImplementedError
1068 raise NotImplementedError
1052
1069
1053 @LazyProperty
1070 @LazyProperty
1054 def committer(self):
1071 def committer(self):
1055 """
1072 """
1056 Returns committer for this commit
1073 Returns committer for this commit
1057 """
1074 """
1058 raise NotImplementedError
1075 raise NotImplementedError
1059
1076
1060 @LazyProperty
1077 @LazyProperty
1061 def committer_name(self):
1078 def committer_name(self):
1062 """
1079 """
1063 Returns committer name for this commit
1080 Returns committer name for this commit
1064 """
1081 """
1065
1082
1066 return author_name(self.committer)
1083 return author_name(self.committer)
1067
1084
1068 @LazyProperty
1085 @LazyProperty
1069 def committer_email(self):
1086 def committer_email(self):
1070 """
1087 """
1071 Returns committer email address for this commit
1088 Returns committer email address for this commit
1072 """
1089 """
1073
1090
1074 return author_email(self.committer)
1091 return author_email(self.committer)
1075
1092
1076 @LazyProperty
1093 @LazyProperty
1077 def author(self):
1094 def author(self):
1078 """
1095 """
1079 Returns author for this commit
1096 Returns author for this commit
1080 """
1097 """
1081
1098
1082 raise NotImplementedError
1099 raise NotImplementedError
1083
1100
1084 @LazyProperty
1101 @LazyProperty
1085 def author_name(self):
1102 def author_name(self):
1086 """
1103 """
1087 Returns author name for this commit
1104 Returns author name for this commit
1088 """
1105 """
1089
1106
1090 return author_name(self.author)
1107 return author_name(self.author)
1091
1108
1092 @LazyProperty
1109 @LazyProperty
1093 def author_email(self):
1110 def author_email(self):
1094 """
1111 """
1095 Returns author email address for this commit
1112 Returns author email address for this commit
1096 """
1113 """
1097
1114
1098 return author_email(self.author)
1115 return author_email(self.author)
1099
1116
1100 def get_file_mode(self, path):
1117 def get_file_mode(self, path: bytes):
1101 """
1118 """
1102 Returns stat mode of the file at `path`.
1119 Returns stat mode of the file at `path`.
1103 """
1120 """
1104 raise NotImplementedError
1121 raise NotImplementedError
1105
1122
1106 def is_link(self, path):
1123 def is_link(self, path):
1107 """
1124 """
1108 Returns ``True`` if given `path` is a symlink
1125 Returns ``True`` if given `path` is a symlink
1109 """
1126 """
1110 raise NotImplementedError
1127 raise NotImplementedError
1111
1128
1112 def is_node_binary(self, path):
1129 def is_node_binary(self, path):
1113 """
1130 """
1114 Returns ``True`` is given path is a binary file
1131 Returns ``True`` is given path is a binary file
1115 """
1132 """
1116 raise NotImplementedError
1133 raise NotImplementedError
1117
1134
1118 def get_file_content(self, path):
1135 def node_md5_hash(self, path):
1136 """
1137 Returns md5 hash of a node data
1138 """
1139 raise NotImplementedError
1140
1141 def get_file_content(self, path) -> bytes:
1119 """
1142 """
1120 Returns content of the file at the given `path`.
1143 Returns content of the file at the given `path`.
1121 """
1144 """
1122 raise NotImplementedError
1145 raise NotImplementedError
1123
1146
1124 def get_file_content_streamed(self, path):
1147 def get_file_content_streamed(self, path):
1125 """
1148 """
1126 returns a streaming response from vcsserver with file content
1149 returns a streaming response from vcsserver with file content
1127 """
1150 """
1128 raise NotImplementedError
1151 raise NotImplementedError
1129
1152
1130 def get_file_size(self, path):
1153 def get_file_size(self, path):
1131 """
1154 """
1132 Returns size of the file at the given `path`.
1155 Returns size of the file at the given `path`.
1133 """
1156 """
1134 raise NotImplementedError
1157 raise NotImplementedError
1135
1158
1136 def get_path_commit(self, path, pre_load=None):
1159 def get_path_commit(self, path, pre_load=None):
1137 """
1160 """
1138 Returns last commit of the file at the given `path`.
1161 Returns last commit of the file at the given `path`.
1139
1162
1140 :param pre_load: Optional. List of commit attributes to load.
1163 :param pre_load: Optional. List of commit attributes to load.
1141 """
1164 """
1142 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1165 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1143 if not commits:
1166 if not commits:
1144 raise RepositoryError(
1167 raise RepositoryError(
1145 'Failed to fetch history for path {}. '
1168 'Failed to fetch history for path {}. '
1146 'Please check if such path exists in your repository'.format(
1169 'Please check if such path exists in your repository'.format(
1147 path))
1170 path))
1148 return commits[0]
1171 return commits[0]
1149
1172
1150 def get_path_history(self, path, limit=None, pre_load=None):
1173 def get_path_history(self, path, limit=None, pre_load=None):
1151 """
1174 """
1152 Returns history of file as reversed list of :class:`BaseCommit`
1175 Returns history of file as reversed list of :class:`BaseCommit`
1153 objects for which file at given `path` has been modified.
1176 objects for which file at given `path` has been modified.
1154
1177
1155 :param limit: Optional. Allows to limit the size of the returned
1178 :param limit: Optional. Allows to limit the size of the returned
1156 history. This is intended as a hint to the underlying backend, so
1179 history. This is intended as a hint to the underlying backend, so
1157 that it can apply optimizations depending on the limit.
1180 that it can apply optimizations depending on the limit.
1158 :param pre_load: Optional. List of commit attributes to load.
1181 :param pre_load: Optional. List of commit attributes to load.
1159 """
1182 """
1160 raise NotImplementedError
1183 raise NotImplementedError
1161
1184
1162 def get_file_annotate(self, path, pre_load=None):
1185 def get_file_annotate(self, path, pre_load=None):
1163 """
1186 """
1164 Returns a generator of four element tuples with
1187 Returns a generator of four element tuples with
1165 lineno, sha, commit lazy loader and line
1188 lineno, sha, commit lazy loader and line
1166
1189
1167 :param pre_load: Optional. List of commit attributes to load.
1190 :param pre_load: Optional. List of commit attributes to load.
1168 """
1191 """
1169 raise NotImplementedError
1192 raise NotImplementedError
1170
1193
1171 def get_nodes(self, path):
1194 def get_nodes(self, path, pre_load=None):
1172 """
1195 """
1173 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1196 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1174 state of commit at the given ``path``.
1197 state of commit at the given ``path``.
1175
1198
1176 :raises ``CommitError``: if node at the given ``path`` is not
1199 :raises ``CommitError``: if node at the given ``path`` is not
1177 instance of ``DirNode``
1200 instance of ``DirNode``
1178 """
1201 """
1179 raise NotImplementedError
1202 raise NotImplementedError
1180
1203
1181 def get_node(self, path):
1204 def get_node(self, path):
1182 """
1205 """
1183 Returns ``Node`` object from the given ``path``.
1206 Returns ``Node`` object from the given ``path``.
1184
1207
1185 :raises ``NodeDoesNotExistError``: if there is no node at the given
1208 :raises ``NodeDoesNotExistError``: if there is no node at the given
1186 ``path``
1209 ``path``
1187 """
1210 """
1188 raise NotImplementedError
1211 raise NotImplementedError
1189
1212
1190 def get_largefile_node(self, path):
1213 def get_largefile_node(self, path):
1191 """
1214 """
1192 Returns the path to largefile from Mercurial/Git-lfs storage.
1215 Returns the path to largefile from Mercurial/Git-lfs storage.
1193 or None if it's not a largefile node
1216 or None if it's not a largefile node
1194 """
1217 """
1195 return None
1218 return None
1196
1219
1197 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1220 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1198 archive_dir_name=None, write_metadata=False, mtime=None,
1221 archive_dir_name=None, write_metadata=False, mtime=None,
1199 archive_at_path='/'):
1222 archive_at_path='/', cache_config=None):
1200 """
1223 """
1201 Creates an archive containing the contents of the repository.
1224 Creates an archive containing the contents of the repository.
1202
1225
1203 :param archive_dest_path: path to the file which to create the archive.
1226 :param archive_name_key: unique key under this archive should be generated
1204 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1227 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1205 :param archive_dir_name: name of root directory in archive.
1228 :param archive_dir_name: name of root directory in archive.
1206 Default is repository name and commit's short_id joined with dash:
1229 Default is repository name and commit's short_id joined with dash:
1207 ``"{repo_name}-{short_id}"``.
1230 ``"{repo_name}-{short_id}"``.
1208 :param write_metadata: write a metadata file into archive.
1231 :param write_metadata: write a metadata file into archive.
1209 :param mtime: custom modification time for archive creation, defaults
1232 :param mtime: custom modification time for archive creation, defaults
1210 to time.time() if not given.
1233 to time.time() if not given.
1211 :param archive_at_path: pack files at this path (default '/')
1234 :param archive_at_path: pack files at this path (default '/')
1235 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1212
1236
1213 :raise VCSError: If prefix has a problem.
1237 :raise VCSError: If prefix has a problem.
1214 """
1238 """
1239 cache_config = cache_config or {}
1215 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1240 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1216 if kind not in allowed_kinds:
1241 if kind not in allowed_kinds:
1217 raise ImproperArchiveTypeError(
1242 raise ImproperArchiveTypeError(
1218 'Archive kind (%s) not supported use one of %s' %
1243 'Archive kind (%s) not supported use one of %s' %
1219 (kind, allowed_kinds))
1244 (kind, allowed_kinds))
1220
1245
1221 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1246 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1222 mtime = mtime is not None or time.mktime(self.date.timetuple())
1247 mtime = mtime is not None or time.mktime(self.date.timetuple())
1223 commit_id = self.raw_id
1248 commit_id = self.raw_id
1224
1249
1225 return self.repository._remote.archive_repo(
1250 return self.repository._remote.archive_repo(
1226 archive_dest_path, kind, mtime, archive_at_path,
1251 archive_name_key, kind, mtime, archive_at_path,
1227 archive_dir_name, commit_id)
1252 archive_dir_name, commit_id, cache_config)
1228
1253
1229 def _validate_archive_prefix(self, archive_dir_name):
1254 def _validate_archive_prefix(self, archive_dir_name):
1230 if archive_dir_name is None:
1255 if archive_dir_name is None:
1231 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1256 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1232 repo_name=safe_str(self.repository.name),
1257 repo_name=safe_str(self.repository.name),
1233 short_id=self.short_id)
1258 short_id=self.short_id)
1234 elif not isinstance(archive_dir_name, str):
1259 elif not isinstance(archive_dir_name, str):
1235 raise ValueError("prefix not a bytes object: %s" % repr(archive_dir_name))
1260 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1236 elif archive_dir_name.startswith('/'):
1261 elif archive_dir_name.startswith('/'):
1237 raise VCSError("Prefix cannot start with leading slash")
1262 raise VCSError("Prefix cannot start with leading slash")
1238 elif archive_dir_name.strip() == '':
1263 elif archive_dir_name.strip() == '':
1239 raise VCSError("Prefix cannot be empty")
1264 raise VCSError("Prefix cannot be empty")
1265 elif not archive_dir_name.isascii():
1266 raise VCSError("Prefix cannot contain non ascii characters")
1240 return archive_dir_name
1267 return archive_dir_name
1241
1268
1242 @LazyProperty
1269 @LazyProperty
1243 def root(self):
1270 def root(self):
1244 """
1271 """
1245 Returns ``RootNode`` object for this commit.
1272 Returns ``RootNode`` object for this commit.
1246 """
1273 """
1247 return self.get_node('')
1274 return self.get_node('')
1248
1275
1249 def next(self, branch=None):
1276 def next(self, branch=None):
1250 """
1277 """
1251 Returns next commit from current, if branch is gives it will return
1278 Returns next commit from current, if branch is gives it will return
1252 next commit belonging to this branch
1279 next commit belonging to this branch
1253
1280
1254 :param branch: show commits within the given named branch
1281 :param branch: show commits within the given named branch
1255 """
1282 """
1256 indexes = range(self.idx + 1, self.repository.count())
1283 indexes = range(self.idx + 1, self.repository.count())
1257 return self._find_next(indexes, branch)
1284 return self._find_next(indexes, branch)
1258
1285
1259 def prev(self, branch=None):
1286 def prev(self, branch=None):
1260 """
1287 """
1261 Returns previous commit from current, if branch is gives it will
1288 Returns previous commit from current, if branch is gives it will
1262 return previous commit belonging to this branch
1289 return previous commit belonging to this branch
1263
1290
1264 :param branch: show commit within the given named branch
1291 :param branch: show commit within the given named branch
1265 """
1292 """
1266 indexes = range(self.idx - 1, -1, -1)
1293 indexes = range(self.idx - 1, -1, -1)
1267 return self._find_next(indexes, branch)
1294 return self._find_next(indexes, branch)
1268
1295
1269 def _find_next(self, indexes, branch=None):
1296 def _find_next(self, indexes, branch=None):
1270 if branch and self.branch != branch:
1297 if branch and self.branch != branch:
1271 raise VCSError('Branch option used on commit not belonging '
1298 raise VCSError('Branch option used on commit not belonging '
1272 'to that branch')
1299 'to that branch')
1273
1300
1274 for next_idx in indexes:
1301 for next_idx in indexes:
1275 commit = self.repository.get_commit(commit_idx=next_idx)
1302 commit = self.repository.get_commit(commit_idx=next_idx)
1276 if branch and branch != commit.branch:
1303 if branch and branch != commit.branch:
1277 continue
1304 continue
1278 return commit
1305 return commit
1279 raise CommitDoesNotExistError
1306 raise CommitDoesNotExistError
1280
1307
1281 def diff(self, ignore_whitespace=True, context=3):
1308 def diff(self, ignore_whitespace=True, context=3):
1282 """
1309 """
1283 Returns a `Diff` object representing the change made by this commit.
1310 Returns a `Diff` object representing the change made by this commit.
1284 """
1311 """
1285 parent = self.first_parent
1312 parent = self.first_parent
1286 diff = self.repository.get_diff(
1313 diff = self.repository.get_diff(
1287 parent, self,
1314 parent, self,
1288 ignore_whitespace=ignore_whitespace,
1315 ignore_whitespace=ignore_whitespace,
1289 context=context)
1316 context=context)
1290 return diff
1317 return diff
1291
1318
1292 @LazyProperty
1319 @LazyProperty
1293 def added(self):
1320 def added(self):
1294 """
1321 """
1295 Returns list of added ``FileNode`` objects.
1322 Returns list of added ``FileNode`` objects.
1296 """
1323 """
1297 raise NotImplementedError
1324 raise NotImplementedError
1298
1325
1299 @LazyProperty
1326 @LazyProperty
1300 def changed(self):
1327 def changed(self):
1301 """
1328 """
1302 Returns list of modified ``FileNode`` objects.
1329 Returns list of modified ``FileNode`` objects.
1303 """
1330 """
1304 raise NotImplementedError
1331 raise NotImplementedError
1305
1332
1306 @LazyProperty
1333 @LazyProperty
1307 def removed(self):
1334 def removed(self):
1308 """
1335 """
1309 Returns list of removed ``FileNode`` objects.
1336 Returns list of removed ``FileNode`` objects.
1310 """
1337 """
1311 raise NotImplementedError
1338 raise NotImplementedError
1312
1339
1313 @LazyProperty
1340 @LazyProperty
1314 def size(self):
1341 def size(self):
1315 """
1342 """
1316 Returns total number of bytes from contents of all filenodes.
1343 Returns total number of bytes from contents of all filenodes.
1317 """
1344 """
1318 return sum((node.size for node in self.get_filenodes_generator()))
1345 return sum((node.size for node in self.get_filenodes_generator()))
1319
1346
1320 def walk(self, topurl=''):
1347 def walk(self, topurl=''):
1321 """
1348 """
1322 Similar to os.walk method. Insted of filesystem it walks through
1349 Similar to os.walk method. Insted of filesystem it walks through
1323 commit starting at given ``topurl``. Returns generator of tuples
1350 commit starting at given ``topurl``. Returns generator of tuples
1324 (topnode, dirnodes, filenodes).
1351 (top_node, dirnodes, filenodes).
1325 """
1352 """
1326 topnode = self.get_node(topurl)
1353 from rhodecode.lib.vcs.nodes import DirNode
1327 if not topnode.is_dir():
1354
1355 if isinstance(topurl, DirNode):
1356 top_node = topurl
1357 else:
1358 top_node = self.get_node(topurl)
1359
1360 has_default_pre_load = False
1361 if isinstance(top_node, DirNode):
1362 # used to inject as we walk same defaults as given top_node
1363 default_pre_load = top_node.default_pre_load
1364 has_default_pre_load = True
1365
1366 if not top_node.is_dir():
1328 return
1367 return
1329 yield (topnode, topnode.dirs, topnode.files)
1368 yield top_node, top_node.dirs, top_node.files
1330 for dirnode in topnode.dirs:
1369 for dir_node in top_node.dirs:
1331 for tup in self.walk(dirnode.path):
1370 if has_default_pre_load:
1371 dir_node.default_pre_load = default_pre_load
1372 for tup in self.walk(dir_node):
1332 yield tup
1373 yield tup
1333
1374
1334 def get_filenodes_generator(self):
1375 def get_filenodes_generator(self):
1335 """
1376 """
1336 Returns generator that yields *all* file nodes.
1377 Returns generator that yields *all* file nodes.
1337 """
1378 """
1338 for topnode, dirs, files in self.walk():
1379 for topnode, dirs, files in self.walk():
1339 for node in files:
1380 for node in files:
1340 yield node
1381 yield node
1341
1382
1342 #
1383 #
1343 # Utilities for sub classes to support consistent behavior
1384 # Utilities for sub classes to support consistent behavior
1344 #
1385 #
1345
1386
1346 def no_node_at_path(self, path):
1387 def no_node_at_path(self, path):
1347 return NodeDoesNotExistError(
1388 return NodeDoesNotExistError(
1348 u"There is no file nor directory at the given path: "
1389 f"There is no file nor directory at the given path: "
1349 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1390 f"`{safe_str(path)}` at commit {self.short_id}")
1350
1391
1351 def _fix_path(self, path):
1392 def _fix_path(self, path: str) -> str:
1352 """
1393 """
1353 Paths are stored without trailing slash so we need to get rid off it if
1394 Paths are stored without trailing slash so we need to get rid off it if
1354 needed.
1395 needed.
1355 """
1396 """
1356 return path.rstrip('/')
1397 return safe_str(path).rstrip('/')
1357
1398
1358 #
1399 #
1359 # Deprecated API based on changesets
1400 # Deprecated API based on changesets
1360 #
1401 #
1361
1402
1362 @property
1403 @property
1363 def revision(self):
1404 def revision(self):
1364 warnings.warn("Use idx instead", DeprecationWarning)
1405 warnings.warn("Use idx instead", DeprecationWarning)
1365 return self.idx
1406 return self.idx
1366
1407
1367 @revision.setter
1408 @revision.setter
1368 def revision(self, value):
1409 def revision(self, value):
1369 warnings.warn("Use idx instead", DeprecationWarning)
1410 warnings.warn("Use idx instead", DeprecationWarning)
1370 self.idx = value
1411 self.idx = value
1371
1412
1372 def get_file_changeset(self, path):
1413 def get_file_changeset(self, path):
1373 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1414 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1374 return self.get_path_commit(path)
1415 return self.get_path_commit(path)
1375
1416
1376
1417
1377 class BaseChangesetClass(type):
1418 class BaseChangesetClass(type):
1378
1419
1379 def __instancecheck__(self, instance):
1420 def __instancecheck__(self, instance):
1380 return isinstance(instance, BaseCommit)
1421 return isinstance(instance, BaseCommit)
1381
1422
1382
1423
1383 class BaseChangeset(BaseCommit):
1424 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1384
1385 __metaclass__ = BaseChangesetClass
1386
1425
1387 def __new__(cls, *args, **kwargs):
1426 def __new__(cls, *args, **kwargs):
1388 warnings.warn(
1427 warnings.warn(
1389 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1428 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1390 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1429 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1391
1430
1392
1431
1393 class BaseInMemoryCommit(object):
1432 class BaseInMemoryCommit(object):
1394 """
1433 """
1395 Represents differences between repository's state (most recent head) and
1434 Represents differences between repository's state (most recent head) and
1396 changes made *in place*.
1435 changes made *in place*.
1397
1436
1398 **Attributes**
1437 **Attributes**
1399
1438
1400 ``repository``
1439 ``repository``
1401 repository object for this in-memory-commit
1440 repository object for this in-memory-commit
1402
1441
1403 ``added``
1442 ``added``
1404 list of ``FileNode`` objects marked as *added*
1443 list of ``FileNode`` objects marked as *added*
1405
1444
1406 ``changed``
1445 ``changed``
1407 list of ``FileNode`` objects marked as *changed*
1446 list of ``FileNode`` objects marked as *changed*
1408
1447
1409 ``removed``
1448 ``removed``
1410 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1449 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1411 *removed*
1450 *removed*
1412
1451
1413 ``parents``
1452 ``parents``
1414 list of :class:`BaseCommit` instances representing parents of
1453 list of :class:`BaseCommit` instances representing parents of
1415 in-memory commit. Should always be 2-element sequence.
1454 in-memory commit. Should always be 2-element sequence.
1416
1455
1417 """
1456 """
1418
1457
1419 def __init__(self, repository):
1458 def __init__(self, repository):
1420 self.repository = repository
1459 self.repository = repository
1421 self.added = []
1460 self.added = []
1422 self.changed = []
1461 self.changed = []
1423 self.removed = []
1462 self.removed = []
1424 self.parents = []
1463 self.parents = []
1425
1464
1426 def add(self, *filenodes):
1465 def add(self, *filenodes):
1427 """
1466 """
1428 Marks given ``FileNode`` objects as *to be committed*.
1467 Marks given ``FileNode`` objects as *to be committed*.
1429
1468
1430 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1469 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1431 latest commit
1470 latest commit
1432 :raises ``NodeAlreadyAddedError``: if node with same path is already
1471 :raises ``NodeAlreadyAddedError``: if node with same path is already
1433 marked as *added*
1472 marked as *added*
1434 """
1473 """
1435 # Check if not already marked as *added* first
1474 # Check if not already marked as *added* first
1436 for node in filenodes:
1475 for node in filenodes:
1437 if node.path in (n.path for n in self.added):
1476 if node.path in (n.path for n in self.added):
1438 raise NodeAlreadyAddedError(
1477 raise NodeAlreadyAddedError(
1439 "Such FileNode %s is already marked for addition"
1478 "Such FileNode %s is already marked for addition"
1440 % node.path)
1479 % node.path)
1441 for node in filenodes:
1480 for node in filenodes:
1442 self.added.append(node)
1481 self.added.append(node)
1443
1482
1444 def change(self, *filenodes):
1483 def change(self, *filenodes):
1445 """
1484 """
1446 Marks given ``FileNode`` objects to be *changed* in next commit.
1485 Marks given ``FileNode`` objects to be *changed* in next commit.
1447
1486
1448 :raises ``EmptyRepositoryError``: if there are no commits yet
1487 :raises ``EmptyRepositoryError``: if there are no commits yet
1449 :raises ``NodeAlreadyExistsError``: if node with same path is already
1488 :raises ``NodeAlreadyExistsError``: if node with same path is already
1450 marked to be *changed*
1489 marked to be *changed*
1451 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1490 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1452 marked to be *removed*
1491 marked to be *removed*
1453 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1492 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1454 commit
1493 commit
1455 :raises ``NodeNotChangedError``: if node hasn't really be changed
1494 :raises ``NodeNotChangedError``: if node hasn't really be changed
1456 """
1495 """
1457 for node in filenodes:
1496 for node in filenodes:
1458 if node.path in (n.path for n in self.removed):
1497 if node.path in (n.path for n in self.removed):
1459 raise NodeAlreadyRemovedError(
1498 raise NodeAlreadyRemovedError(
1460 "Node at %s is already marked as removed" % node.path)
1499 "Node at %s is already marked as removed" % node.path)
1461 try:
1500 try:
1462 self.repository.get_commit()
1501 self.repository.get_commit()
1463 except EmptyRepositoryError:
1502 except EmptyRepositoryError:
1464 raise EmptyRepositoryError(
1503 raise EmptyRepositoryError(
1465 "Nothing to change - try to *add* new nodes rather than "
1504 "Nothing to change - try to *add* new nodes rather than "
1466 "changing them")
1505 "changing them")
1467 for node in filenodes:
1506 for node in filenodes:
1468 if node.path in (n.path for n in self.changed):
1507 if node.path in (n.path for n in self.changed):
1469 raise NodeAlreadyChangedError(
1508 raise NodeAlreadyChangedError(
1470 "Node at '%s' is already marked as changed" % node.path)
1509 "Node at '%s' is already marked as changed" % node.path)
1471 self.changed.append(node)
1510 self.changed.append(node)
1472
1511
1473 def remove(self, *filenodes):
1512 def remove(self, *filenodes):
1474 """
1513 """
1475 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1514 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1476 *removed* in next commit.
1515 *removed* in next commit.
1477
1516
1478 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1517 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1479 be *removed*
1518 be *removed*
1480 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1519 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1481 be *changed*
1520 be *changed*
1482 """
1521 """
1483 for node in filenodes:
1522 for node in filenodes:
1484 if node.path in (n.path for n in self.removed):
1523 if node.path in (n.path for n in self.removed):
1485 raise NodeAlreadyRemovedError(
1524 raise NodeAlreadyRemovedError(
1486 "Node is already marked to for removal at %s" % node.path)
1525 "Node is already marked to for removal at %s" % node.path)
1487 if node.path in (n.path for n in self.changed):
1526 if node.path in (n.path for n in self.changed):
1488 raise NodeAlreadyChangedError(
1527 raise NodeAlreadyChangedError(
1489 "Node is already marked to be changed at %s" % node.path)
1528 "Node is already marked to be changed at %s" % node.path)
1490 # We only mark node as *removed* - real removal is done by
1529 # We only mark node as *removed* - real removal is done by
1491 # commit method
1530 # commit method
1492 self.removed.append(node)
1531 self.removed.append(node)
1493
1532
1494 def reset(self):
1533 def reset(self):
1495 """
1534 """
1496 Resets this instance to initial state (cleans ``added``, ``changed``
1535 Resets this instance to initial state (cleans ``added``, ``changed``
1497 and ``removed`` lists).
1536 and ``removed`` lists).
1498 """
1537 """
1499 self.added = []
1538 self.added = []
1500 self.changed = []
1539 self.changed = []
1501 self.removed = []
1540 self.removed = []
1502 self.parents = []
1541 self.parents = []
1503
1542
1504 def get_ipaths(self):
1543 def get_ipaths(self):
1505 """
1544 """
1506 Returns generator of paths from nodes marked as added, changed or
1545 Returns generator of paths from nodes marked as added, changed or
1507 removed.
1546 removed.
1508 """
1547 """
1509 for node in itertools.chain(self.added, self.changed, self.removed):
1548 for node in itertools.chain(self.added, self.changed, self.removed):
1510 yield node.path
1549 yield node.path
1511
1550
1512 def get_paths(self):
1551 def get_paths(self):
1513 """
1552 """
1514 Returns list of paths from nodes marked as added, changed or removed.
1553 Returns list of paths from nodes marked as added, changed or removed.
1515 """
1554 """
1516 return list(self.get_ipaths())
1555 return list(self.get_ipaths())
1517
1556
1518 def check_integrity(self, parents=None):
1557 def check_integrity(self, parents=None):
1519 """
1558 """
1520 Checks in-memory commit's integrity. Also, sets parents if not
1559 Checks in-memory commit's integrity. Also, sets parents if not
1521 already set.
1560 already set.
1522
1561
1523 :raises CommitError: if any error occurs (i.e.
1562 :raises CommitError: if any error occurs (i.e.
1524 ``NodeDoesNotExistError``).
1563 ``NodeDoesNotExistError``).
1525 """
1564 """
1526 if not self.parents:
1565 if not self.parents:
1527 parents = parents or []
1566 parents = parents or []
1528 if len(parents) == 0:
1567 if len(parents) == 0:
1529 try:
1568 try:
1530 parents = [self.repository.get_commit(), None]
1569 parents = [self.repository.get_commit(), None]
1531 except EmptyRepositoryError:
1570 except EmptyRepositoryError:
1532 parents = [None, None]
1571 parents = [None, None]
1533 elif len(parents) == 1:
1572 elif len(parents) == 1:
1534 parents += [None]
1573 parents += [None]
1535 self.parents = parents
1574 self.parents = parents
1536
1575
1537 # Local parents, only if not None
1576 # Local parents, only if not None
1538 parents = [p for p in self.parents if p]
1577 parents = [p for p in self.parents if p]
1539
1578
1540 # Check nodes marked as added
1579 # Check nodes marked as added
1541 for p in parents:
1580 for p in parents:
1542 for node in self.added:
1581 for node in self.added:
1543 try:
1582 try:
1544 p.get_node(node.path)
1583 p.get_node(node.path)
1545 except NodeDoesNotExistError:
1584 except NodeDoesNotExistError:
1546 pass
1585 pass
1547 else:
1586 else:
1548 raise NodeAlreadyExistsError(
1587 raise NodeAlreadyExistsError(
1549 "Node `%s` already exists at %s" % (node.path, p))
1588 "Node `%s` already exists at %s" % (node.path, p))
1550
1589
1551 # Check nodes marked as changed
1590 # Check nodes marked as changed
1552 missing = set(self.changed)
1591 missing = set(self.changed)
1553 not_changed = set(self.changed)
1592 not_changed = set(self.changed)
1554 if self.changed and not parents:
1593 if self.changed and not parents:
1555 raise NodeDoesNotExistError(str(self.changed[0].path))
1594 raise NodeDoesNotExistError(str(self.changed[0].path))
1556 for p in parents:
1595 for p in parents:
1557 for node in self.changed:
1596 for node in self.changed:
1558 try:
1597 try:
1559 old = p.get_node(node.path)
1598 old = p.get_node(node.path)
1560 missing.remove(node)
1599 missing.remove(node)
1561 # if content actually changed, remove node from not_changed
1600 # if content actually changed, remove node from not_changed
1562 if old.content != node.content:
1601 if old.content != node.content:
1563 not_changed.remove(node)
1602 not_changed.remove(node)
1564 except NodeDoesNotExistError:
1603 except NodeDoesNotExistError:
1565 pass
1604 pass
1566 if self.changed and missing:
1605 if self.changed and missing:
1567 raise NodeDoesNotExistError(
1606 raise NodeDoesNotExistError(
1568 "Node `%s` marked as modified but missing in parents: %s"
1607 "Node `%s` marked as modified but missing in parents: %s"
1569 % (node.path, parents))
1608 % (node.path, parents))
1570
1609
1571 if self.changed and not_changed:
1610 if self.changed and not_changed:
1572 raise NodeNotChangedError(
1611 raise NodeNotChangedError(
1573 "Node `%s` wasn't actually changed (parents: %s)"
1612 "Node `%s` wasn't actually changed (parents: %s)"
1574 % (not_changed.pop().path, parents))
1613 % (not_changed.pop().path, parents))
1575
1614
1576 # Check nodes marked as removed
1615 # Check nodes marked as removed
1577 if self.removed and not parents:
1616 if self.removed and not parents:
1578 raise NodeDoesNotExistError(
1617 raise NodeDoesNotExistError(
1579 "Cannot remove node at %s as there "
1618 "Cannot remove node at %s as there "
1580 "were no parents specified" % self.removed[0].path)
1619 "were no parents specified" % self.removed[0].path)
1581 really_removed = set()
1620 really_removed = set()
1582 for p in parents:
1621 for p in parents:
1583 for node in self.removed:
1622 for node in self.removed:
1584 try:
1623 try:
1585 p.get_node(node.path)
1624 p.get_node(node.path)
1586 really_removed.add(node)
1625 really_removed.add(node)
1587 except CommitError:
1626 except CommitError:
1588 pass
1627 pass
1589 not_removed = set(self.removed) - really_removed
1628 not_removed = set(self.removed) - really_removed
1590 if not_removed:
1629 if not_removed:
1591 # TODO: johbo: This code branch does not seem to be covered
1630 # TODO: johbo: This code branch does not seem to be covered
1592 raise NodeDoesNotExistError(
1631 raise NodeDoesNotExistError(
1593 "Cannot remove node at %s from "
1632 "Cannot remove node at %s from "
1594 "following parents: %s" % (not_removed, parents))
1633 "following parents: %s" % (not_removed, parents))
1595
1634
1596 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1635 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1597 """
1636 """
1598 Performs in-memory commit (doesn't check workdir in any way) and
1637 Performs in-memory commit (doesn't check workdir in any way) and
1599 returns newly created :class:`BaseCommit`. Updates repository's
1638 returns newly created :class:`BaseCommit`. Updates repository's
1600 attribute `commits`.
1639 attribute `commits`.
1601
1640
1602 .. note::
1641 .. note::
1603
1642
1604 While overriding this method each backend's should call
1643 While overriding this method each backend's should call
1605 ``self.check_integrity(parents)`` in the first place.
1644 ``self.check_integrity(parents)`` in the first place.
1606
1645
1607 :param message: message of the commit
1646 :param message: message of the commit
1608 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1647 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1609 :param parents: single parent or sequence of parents from which commit
1648 :param parents: single parent or sequence of parents from which commit
1610 would be derived
1649 would be derived
1611 :param date: ``datetime.datetime`` instance. Defaults to
1650 :param date: ``datetime.datetime`` instance. Defaults to
1612 ``datetime.datetime.now()``.
1651 ``datetime.datetime.now()``.
1613 :param branch: branch name, as string. If none given, default backend's
1652 :param branch: branch name, as string. If none given, default backend's
1614 branch would be used.
1653 branch would be used.
1615
1654
1616 :raises ``CommitError``: if any error occurs while committing
1655 :raises ``CommitError``: if any error occurs while committing
1617 """
1656 """
1618 raise NotImplementedError
1657 raise NotImplementedError
1619
1658
1620
1659
1621 class BaseInMemoryChangesetClass(type):
1660 class BaseInMemoryChangesetClass(type):
1622
1661
1623 def __instancecheck__(self, instance):
1662 def __instancecheck__(self, instance):
1624 return isinstance(instance, BaseInMemoryCommit)
1663 return isinstance(instance, BaseInMemoryCommit)
1625
1664
1626
1665
1627 class BaseInMemoryChangeset(BaseInMemoryCommit):
1666 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1628
1629 __metaclass__ = BaseInMemoryChangesetClass
1630
1667
1631 def __new__(cls, *args, **kwargs):
1668 def __new__(cls, *args, **kwargs):
1632 warnings.warn(
1669 warnings.warn(
1633 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1670 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1634 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1671 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1635
1672
1636
1673
1637 class EmptyCommit(BaseCommit):
1674 class EmptyCommit(BaseCommit):
1638 """
1675 """
1639 An dummy empty commit. It's possible to pass hash when creating
1676 An dummy empty commit. It's possible to pass hash when creating
1640 an EmptyCommit
1677 an EmptyCommit
1641 """
1678 """
1642
1679
1643 def __init__(
1680 def __init__(
1644 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1681 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1645 message='', author='', date=None):
1682 message='', author='', date=None):
1646 self._empty_commit_id = commit_id
1683 self._empty_commit_id = commit_id
1647 # TODO: johbo: Solve idx parameter, default value does not make
1684 # TODO: johbo: Solve idx parameter, default value does not make
1648 # too much sense
1685 # too much sense
1649 self.idx = idx
1686 self.idx = idx
1650 self.message = message
1687 self.message = message
1651 self.author = author
1688 self.author = author
1652 self.date = date or datetime.datetime.fromtimestamp(0)
1689 self.date = date or datetime.datetime.fromtimestamp(0)
1653 self.repository = repo
1690 self.repository = repo
1654 self.alias = alias
1691 self.alias = alias
1655
1692
1656 @LazyProperty
1693 @LazyProperty
1657 def raw_id(self):
1694 def raw_id(self):
1658 """
1695 """
1659 Returns raw string identifying this commit, useful for web
1696 Returns raw string identifying this commit, useful for web
1660 representation.
1697 representation.
1661 """
1698 """
1662
1699
1663 return self._empty_commit_id
1700 return self._empty_commit_id
1664
1701
1665 @LazyProperty
1702 @LazyProperty
1666 def branch(self):
1703 def branch(self):
1667 if self.alias:
1704 if self.alias:
1668 from rhodecode.lib.vcs.backends import get_backend
1705 from rhodecode.lib.vcs.backends import get_backend
1669 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1706 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1670
1707
1671 @LazyProperty
1708 @LazyProperty
1672 def short_id(self):
1709 def short_id(self):
1673 return self.raw_id[:12]
1710 return self.raw_id[:12]
1674
1711
1675 @LazyProperty
1712 @LazyProperty
1676 def id(self):
1713 def id(self):
1677 return self.raw_id
1714 return self.raw_id
1678
1715
1679 def get_path_commit(self, path):
1716 def get_path_commit(self, path, pre_load=None):
1680 return self
1717 return self
1681
1718
1682 def get_file_content(self, path):
1719 def get_file_content(self, path) -> bytes:
1683 return u''
1720 return b''
1684
1721
1685 def get_file_content_streamed(self, path):
1722 def get_file_content_streamed(self, path):
1686 yield self.get_file_content()
1723 yield self.get_file_content(path)
1687
1724
1688 def get_file_size(self, path):
1725 def get_file_size(self, path):
1689 return 0
1726 return 0
1690
1727
1691
1728
1692 class EmptyChangesetClass(type):
1729 class EmptyChangesetClass(type):
1693
1730
1694 def __instancecheck__(self, instance):
1731 def __instancecheck__(self, instance):
1695 return isinstance(instance, EmptyCommit)
1732 return isinstance(instance, EmptyCommit)
1696
1733
1697
1734
1698 class EmptyChangeset(EmptyCommit):
1735 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1699
1700 __metaclass__ = EmptyChangesetClass
1701
1736
1702 def __new__(cls, *args, **kwargs):
1737 def __new__(cls, *args, **kwargs):
1703 warnings.warn(
1738 warnings.warn(
1704 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1739 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1705 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1740 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1706
1741
1707 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1742 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1708 alias=None, revision=-1, message='', author='', date=None):
1743 alias=None, revision=-1, message='', author='', date=None):
1709 if requested_revision is not None:
1744 if requested_revision is not None:
1710 warnings.warn(
1745 warnings.warn(
1711 "Parameter requested_revision not supported anymore",
1746 "Parameter requested_revision not supported anymore",
1712 DeprecationWarning)
1747 DeprecationWarning)
1713 super(EmptyChangeset, self).__init__(
1748 super(EmptyChangeset, self).__init__(
1714 commit_id=cs, repo=repo, alias=alias, idx=revision,
1749 commit_id=cs, repo=repo, alias=alias, idx=revision,
1715 message=message, author=author, date=date)
1750 message=message, author=author, date=date)
1716
1751
1717 @property
1752 @property
1718 def revision(self):
1753 def revision(self):
1719 warnings.warn("Use idx instead", DeprecationWarning)
1754 warnings.warn("Use idx instead", DeprecationWarning)
1720 return self.idx
1755 return self.idx
1721
1756
1722 @revision.setter
1757 @revision.setter
1723 def revision(self, value):
1758 def revision(self, value):
1724 warnings.warn("Use idx instead", DeprecationWarning)
1759 warnings.warn("Use idx instead", DeprecationWarning)
1725 self.idx = value
1760 self.idx = value
1726
1761
1727
1762
1728 class EmptyRepository(BaseRepository):
1763 class EmptyRepository(BaseRepository):
1729 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1764 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1730 pass
1765 pass
1731
1766
1732 def get_diff(self, *args, **kwargs):
1767 def get_diff(self, *args, **kwargs):
1733 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1768 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1734 return GitDiff('')
1769 return GitDiff(b'')
1735
1770
1736
1771
1737 class CollectionGenerator(object):
1772 class CollectionGenerator(object):
1738
1773
1739 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1774 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1740 self.repo = repo
1775 self.repo = repo
1741 self.commit_ids = commit_ids
1776 self.commit_ids = commit_ids
1742 # TODO: (oliver) this isn't currently hooked up
1777 self.collection_size = collection_size
1743 self.collection_size = None
1744 self.pre_load = pre_load
1778 self.pre_load = pre_load
1745 self.translate_tag = translate_tag
1779 self.translate_tag = translate_tag
1746
1780
1747 def __len__(self):
1781 def __len__(self):
1748 if self.collection_size is not None:
1782 if self.collection_size is not None:
1749 return self.collection_size
1783 return self.collection_size
1750 return self.commit_ids.__len__()
1784 return self.commit_ids.__len__()
1751
1785
1752 def __iter__(self):
1786 def __iter__(self):
1753 for commit_id in self.commit_ids:
1787 for commit_id in self.commit_ids:
1754 # TODO: johbo: Mercurial passes in commit indices or commit ids
1788 # TODO: johbo: Mercurial passes in commit indices or commit ids
1755 yield self._commit_factory(commit_id)
1789 yield self._commit_factory(commit_id)
1756
1790
1757 def _commit_factory(self, commit_id):
1791 def _commit_factory(self, commit_id):
1758 """
1792 """
1759 Allows backends to override the way commits are generated.
1793 Allows backends to override the way commits are generated.
1760 """
1794 """
1761 return self.repo.get_commit(
1795 return self.repo.get_commit(
1762 commit_id=commit_id, pre_load=self.pre_load,
1796 commit_id=commit_id, pre_load=self.pre_load,
1763 translate_tag=self.translate_tag)
1797 translate_tag=self.translate_tag)
1764
1798
1765 def __getslice__(self, i, j):
1799 def __getitem__(self, key):
1766 """
1800 """Return either a single element by index, or a sliced collection."""
1767 Returns an iterator of sliced repository
1801
1768 """
1802 if isinstance(key, slice):
1769 commit_ids = self.commit_ids[i:j]
1803 commit_ids = self.commit_ids[key.start:key.stop]
1804
1805 else:
1806 # single item
1807 commit_ids = self.commit_ids[key]
1808
1770 return self.__class__(
1809 return self.__class__(
1771 self.repo, commit_ids, pre_load=self.pre_load,
1810 self.repo, commit_ids, pre_load=self.pre_load,
1772 translate_tag=self.translate_tag)
1811 translate_tag=self.translate_tag)
1773
1812
1774 def __repr__(self):
1813 def __repr__(self):
1775 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1814 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1776
1815
1777
1816
1778 class Config(object):
1817 class Config(object):
1779 """
1818 """
1780 Represents the configuration for a repository.
1819 Represents the configuration for a repository.
1781
1820
1782 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1821 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1783 standard library. It implements only the needed subset.
1822 standard library. It implements only the needed subset.
1784 """
1823 """
1785
1824
1786 def __init__(self):
1825 def __init__(self):
1787 self._values = {}
1826 self._values = {}
1788
1827
1789 def copy(self):
1828 def copy(self):
1790 clone = Config()
1829 clone = Config()
1791 for section, values in self._values.items():
1830 for section, values in self._values.items():
1792 clone._values[section] = values.copy()
1831 clone._values[section] = values.copy()
1793 return clone
1832 return clone
1794
1833
1795 def __repr__(self):
1834 def __repr__(self):
1796 return '<Config(%s sections) at %s>' % (
1835 return '<Config(%s sections) at %s>' % (
1797 len(self._values), hex(id(self)))
1836 len(self._values), hex(id(self)))
1798
1837
1799 def items(self, section):
1838 def items(self, section):
1800 return self._values.get(section, {}).items()
1839 return self._values.get(section, {}).items()
1801
1840
1802 def get(self, section, option):
1841 def get(self, section, option):
1803 return self._values.get(section, {}).get(option)
1842 return self._values.get(section, {}).get(option)
1804
1843
1805 def set(self, section, option, value):
1844 def set(self, section, option, value):
1806 section_values = self._values.setdefault(section, {})
1845 section_values = self._values.setdefault(section, {})
1807 section_values[option] = value
1846 section_values[option] = value
1808
1847
1809 def clear_section(self, section):
1848 def clear_section(self, section):
1810 self._values[section] = {}
1849 self._values[section] = {}
1811
1850
1812 def serialize(self):
1851 def serialize(self):
1813 """
1852 """
1814 Creates a list of three tuples (section, key, value) representing
1853 Creates a list of three tuples (section, key, value) representing
1815 this config object.
1854 this config object.
1816 """
1855 """
1817 items = []
1856 items = []
1818 for section in self._values:
1857 for section in self._values:
1819 for option, value in self._values[section].items():
1858 for option, value in self._values[section].items():
1820 items.append(
1859 items.append(
1821 (safe_str(section), safe_str(option), safe_str(value)))
1860 (safe_str(section), safe_str(option), safe_str(value)))
1822 return items
1861 return items
1823
1862
1824
1863
1825 class Diff(object):
1864 class Diff(object):
1826 """
1865 """
1827 Represents a diff result from a repository backend.
1866 Represents a diff result from a repository backend.
1828
1867
1829 Subclasses have to provide a backend specific value for
1868 Subclasses have to provide a backend specific value for
1830 :attr:`_header_re` and :attr:`_meta_re`.
1869 :attr:`_header_re` and :attr:`_meta_re`.
1831 """
1870 """
1832 _meta_re = None
1871 _meta_re = None
1833 _header_re = None
1872 _header_re: bytes = re.compile(br"")
1834
1873
1835 def __init__(self, raw_diff):
1874 def __init__(self, raw_diff: bytes):
1836 self.raw = raw_diff
1875 if not isinstance(raw_diff, bytes):
1876 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1877
1878 self.raw = memoryview(raw_diff)
1879
1880 def get_header_re(self):
1881 return self._header_re
1837
1882
1838 def chunks(self):
1883 def chunks(self):
1839 """
1884 """
1840 split the diff in chunks of separate --git a/file b/file chunks
1885 split the diff in chunks of separate --git a/file b/file chunks
1841 to make diffs consistent we must prepend with \n, and make sure
1886 to make diffs consistent we must prepend with \n, and make sure
1842 we can detect last chunk as this was also has special rule
1887 we can detect last chunk as this was also has special rule
1843 """
1888 """
1844
1889
1845 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1890 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1846 header = diff_parts[0]
1847
1848 if self._meta_re:
1849 match = self._meta_re.match(header)
1850
1891
1851 chunks = diff_parts[1:]
1892 chunks = diff_parts[1:]
1852 total_chunks = len(chunks)
1893 total_chunks = len(chunks)
1853
1894
1854 return (
1895 def diff_iter(_chunks):
1855 DiffChunk(chunk, self, cur_chunk == total_chunks)
1896 for cur_chunk, chunk in enumerate(_chunks, start=1):
1856 for cur_chunk, chunk in enumerate(chunks, start=1))
1897 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1898 return diff_iter(chunks)
1857
1899
1858
1900
1859 class DiffChunk(object):
1901 class DiffChunk(object):
1860
1902
1861 def __init__(self, chunk, diff, last_chunk):
1903 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1862 self._diff = diff
1904 self.diff_obj = diff_obj
1863
1905
1864 # since we split by \ndiff --git that part is lost from original diff
1906 # since we split by \ndiff --git that part is lost from original diff
1865 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1907 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1866 if not last_chunk:
1908 if not is_last_chunk:
1867 chunk += '\n'
1909 chunk += b'\n'
1868
1910 header_re = self.diff_obj.get_header_re()
1869 match = self._diff._header_re.match(chunk)
1911 match = header_re.match(chunk)
1870 self.header = match.groupdict()
1912 self.header = match.groupdict()
1871 self.diff = chunk[match.end():]
1913 self.diff = chunk[match.end():]
1872 self.raw = chunk
1914 self.raw = chunk
1873
1915
1916 @property
1917 def header_as_str(self):
1918 if self.header:
1919 def safe_str_on_bytes(val):
1920 if isinstance(val, bytes):
1921 return safe_str(val)
1922 return val
1923 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1924
1925 def __repr__(self):
1926 return f'DiffChunk({self.header_as_str})'
1927
1874
1928
1875 class BasePathPermissionChecker(object):
1929 class BasePathPermissionChecker(object):
1876
1930
1877 @staticmethod
1931 @staticmethod
1878 def create_from_patterns(includes, excludes):
1932 def create_from_patterns(includes, excludes):
1879 if includes and '*' in includes and not excludes:
1933 if includes and '*' in includes and not excludes:
1880 return AllPathPermissionChecker()
1934 return AllPathPermissionChecker()
1881 elif excludes and '*' in excludes:
1935 elif excludes and '*' in excludes:
1882 return NonePathPermissionChecker()
1936 return NonePathPermissionChecker()
1883 else:
1937 else:
1884 return PatternPathPermissionChecker(includes, excludes)
1938 return PatternPathPermissionChecker(includes, excludes)
1885
1939
1886 @property
1940 @property
1887 def has_full_access(self):
1941 def has_full_access(self):
1888 raise NotImplemented()
1942 raise NotImplementedError()
1889
1943
1890 def has_access(self, path):
1944 def has_access(self, path):
1891 raise NotImplemented()
1945 raise NotImplementedError()
1892
1946
1893
1947
1894 class AllPathPermissionChecker(BasePathPermissionChecker):
1948 class AllPathPermissionChecker(BasePathPermissionChecker):
1895
1949
1896 @property
1950 @property
1897 def has_full_access(self):
1951 def has_full_access(self):
1898 return True
1952 return True
1899
1953
1900 def has_access(self, path):
1954 def has_access(self, path):
1901 return True
1955 return True
1902
1956
1903
1957
1904 class NonePathPermissionChecker(BasePathPermissionChecker):
1958 class NonePathPermissionChecker(BasePathPermissionChecker):
1905
1959
1906 @property
1960 @property
1907 def has_full_access(self):
1961 def has_full_access(self):
1908 return False
1962 return False
1909
1963
1910 def has_access(self, path):
1964 def has_access(self, path):
1911 return False
1965 return False
1912
1966
1913
1967
1914 class PatternPathPermissionChecker(BasePathPermissionChecker):
1968 class PatternPathPermissionChecker(BasePathPermissionChecker):
1915
1969
1916 def __init__(self, includes, excludes):
1970 def __init__(self, includes, excludes):
1917 self.includes = includes
1971 self.includes = includes
1918 self.excludes = excludes
1972 self.excludes = excludes
1919 self.includes_re = [] if not includes else [
1973 self.includes_re = [] if not includes else [
1920 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1974 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1921 self.excludes_re = [] if not excludes else [
1975 self.excludes_re = [] if not excludes else [
1922 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1976 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1923
1977
1924 @property
1978 @property
1925 def has_full_access(self):
1979 def has_full_access(self):
1926 return '*' in self.includes and not self.excludes
1980 return '*' in self.includes and not self.excludes
1927
1981
1928 def has_access(self, path):
1982 def has_access(self, path):
1929 for regex in self.excludes_re:
1983 for regex in self.excludes_re:
1930 if regex.match(path):
1984 if regex.match(path):
1931 return False
1985 return False
1932 for regex in self.includes_re:
1986 for regex in self.includes_re:
1933 if regex.match(path):
1987 if regex.match(path):
1934 return True
1988 return True
1935 return False
1989 return False
@@ -1,494 +1,491 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
26 import io
27 import stat
25 import stat
28 import configparser
26 import configparser
29 from itertools import chain
27 from itertools import chain
30
28
31 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
32
30
33 from rhodecode.lib.datelib import utcdate_fromtimestamp
31 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.str_utils import safe_bytes, safe_str
35 from rhodecode.lib.utils2 import safe_int
36 from rhodecode.lib.vcs.conf import settings
37 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends import base
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
34 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
36 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
37 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 RemovedFileNodesGenerator, LargeFileNode)
38 RemovedFileNodesGenerator, LargeFileNode)
43
39
44
40
45 class GitCommit(base.BaseCommit):
41 class GitCommit(base.BaseCommit):
46 """
42 """
47 Represents state of the repository at single commit id.
43 Represents state of the repository at single commit id.
48 """
44 """
49
45
50 _filter_pre_load = [
46 _filter_pre_load = [
51 # done through a more complex tree walk on parents
47 # done through a more complex tree walk on parents
52 "affected_files",
48 "affected_files",
53 # done through subprocess not remote call
49 # done through subprocess not remote call
54 "children",
50 "children",
55 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
56 "status",
52 "status",
57 # mercurial specific property not supported here
53 # mercurial specific property not supported here
58 "_file_paths",
54 "_file_paths",
59 # mercurial specific property not supported here
55 # mercurial specific property not supported here
60 'obsolete',
56 'obsolete',
61 # mercurial specific property not supported here
57 # mercurial specific property not supported here
62 'phase',
58 'phase',
63 # mercurial specific property not supported here
59 # mercurial specific property not supported here
64 'hidden'
60 'hidden'
65 ]
61 ]
66
62
67 def __init__(self, repository, raw_id, idx, pre_load=None):
63 def __init__(self, repository, raw_id, idx, pre_load=None):
68 self.repository = repository
64 self.repository = repository
69 self._remote = repository._remote
65 self._remote = repository._remote
70 # TODO: johbo: Tweak of raw_id should not be necessary
66 # TODO: johbo: Tweak of raw_id should not be necessary
71 self.raw_id = safe_str(raw_id)
67 self.raw_id = safe_str(raw_id)
72 self.idx = idx
68 self.idx = idx
73
69
74 self._set_bulk_properties(pre_load)
70 self._set_bulk_properties(pre_load)
75
71
76 # caches
72 # caches
77 self._stat_modes = {} # stat info for paths
73 self._stat_modes = {} # stat info for paths
78 self._paths = {} # path processed with parse_tree
74 self._paths = {} # path processed with parse_tree
79 self.nodes = {}
75 self.nodes = {}
80 self._submodules = None
76 self._submodules = None
81
77
82 def _set_bulk_properties(self, pre_load):
78 def _set_bulk_properties(self, pre_load):
83
79
84 if not pre_load:
80 if not pre_load:
85 return
81 return
86 pre_load = [entry for entry in pre_load
82 pre_load = [entry for entry in pre_load
87 if entry not in self._filter_pre_load]
83 if entry not in self._filter_pre_load]
88 if not pre_load:
84 if not pre_load:
89 return
85 return
90
86
91 result = self._remote.bulk_request(self.raw_id, pre_load)
87 result = self._remote.bulk_request(self.raw_id, pre_load)
92 for attr, value in result.items():
88 for attr, value in result.items():
93 if attr in ["author", "message"]:
89 if attr in ["author", "message"]:
94 if value:
90 if value:
95 value = safe_unicode(value)
91 value = safe_str(value)
96 elif attr == "date":
92 elif attr == "date":
97 value = utcdate_fromtimestamp(*value)
93 value = utcdate_fromtimestamp(*value)
98 elif attr == "parents":
94 elif attr == "parents":
99 value = self._make_commits(value)
95 value = self._make_commits(value)
100 elif attr == "branch":
96 elif attr == "branch":
101 value = self._set_branch(value)
97 value = self._set_branch(value)
102 self.__dict__[attr] = value
98 self.__dict__[attr] = value
103
99
104 @LazyProperty
100 @LazyProperty
105 def _commit(self):
101 def _commit(self):
106 return self._remote[self.raw_id]
102 return self._remote[self.raw_id]
107
103
108 @LazyProperty
104 @LazyProperty
109 def _tree_id(self):
105 def _tree_id(self):
110 return self._remote[self._commit['tree']]['id']
106 return self._remote[self._commit['tree']]['id']
111
107
112 @LazyProperty
108 @LazyProperty
113 def id(self):
109 def id(self):
114 return self.raw_id
110 return self.raw_id
115
111
116 @LazyProperty
112 @LazyProperty
117 def short_id(self):
113 def short_id(self):
118 return self.raw_id[:12]
114 return self.raw_id[:12]
119
115
120 @LazyProperty
116 @LazyProperty
121 def message(self):
117 def message(self):
122 return safe_unicode(self._remote.message(self.id))
118 return safe_str(self._remote.message(self.id))
123
119
124 @LazyProperty
120 @LazyProperty
125 def committer(self):
121 def committer(self):
126 return safe_unicode(self._remote.author(self.id))
122 return safe_str(self._remote.author(self.id))
127
123
128 @LazyProperty
124 @LazyProperty
129 def author(self):
125 def author(self):
130 return safe_unicode(self._remote.author(self.id))
126 return safe_str(self._remote.author(self.id))
131
127
132 @LazyProperty
128 @LazyProperty
133 def date(self):
129 def date(self):
134 unix_ts, tz = self._remote.date(self.raw_id)
130 unix_ts, tz = self._remote.date(self.raw_id)
135 return utcdate_fromtimestamp(unix_ts, tz)
131 return utcdate_fromtimestamp(unix_ts, tz)
136
132
137 @LazyProperty
133 @LazyProperty
138 def status(self):
134 def status(self):
139 """
135 """
140 Returns modified, added, removed, deleted files for current commit
136 Returns modified, added, removed, deleted files for current commit
141 """
137 """
142 return self.changed, self.added, self.removed
138 return self.changed, self.added, self.removed
143
139
144 @LazyProperty
140 @LazyProperty
145 def tags(self):
141 def tags(self):
146 tags = [safe_unicode(name) for name,
142 tags = [safe_str(name) for name,
147 commit_id in self.repository.tags.items()
143 commit_id in self.repository.tags.items()
148 if commit_id == self.raw_id]
144 if commit_id == self.raw_id]
149 return tags
145 return tags
150
146
151 @LazyProperty
147 @LazyProperty
152 def commit_branches(self):
148 def commit_branches(self):
153 branches = []
149 branches = []
154 for name, commit_id in self.repository.branches.items():
150 for name, commit_id in self.repository.branches.items():
155 if commit_id == self.raw_id:
151 if commit_id == self.raw_id:
156 branches.append(name)
152 branches.append(name)
157 return branches
153 return branches
158
154
159 def _set_branch(self, branches):
155 def _set_branch(self, branches):
160 if branches:
156 if branches:
161 # actually commit can have multiple branches in git
157 # actually commit can have multiple branches in git
162 return safe_unicode(branches[0])
158 return safe_str(branches[0])
163
159
164 @LazyProperty
160 @LazyProperty
165 def branch(self):
161 def branch(self):
166 branches = self._remote.branch(self.raw_id)
162 branches = self._remote.branch(self.raw_id)
167 return self._set_branch(branches)
163 return self._set_branch(branches)
168
164
169 def _get_tree_id_for_path(self, path):
165 def _get_tree_id_for_path(self, path):
166
170 path = safe_str(path)
167 path = safe_str(path)
171 if path in self._paths:
168 if path in self._paths:
172 return self._paths[path]
169 return self._paths[path]
173
170
174 tree_id = self._tree_id
171 tree_id = self._tree_id
175
172
176 path = path.strip('/')
173 path = path.strip('/')
177 if path == '':
174 if path == '':
178 data = [tree_id, "tree"]
175 data = [tree_id, "tree"]
179 self._paths[''] = data
176 self._paths[''] = data
180 return data
177 return data
181
178
182 tree_id, tree_type, tree_mode = \
179 tree_id, tree_type, tree_mode = \
183 self._remote.tree_and_type_for_path(self.raw_id, path)
180 self._remote.tree_and_type_for_path(self.raw_id, path)
184 if tree_id is None:
181 if tree_id is None:
185 raise self.no_node_at_path(path)
182 raise self.no_node_at_path(path)
186
183
187 self._paths[path] = [tree_id, tree_type]
184 self._paths[path] = [tree_id, tree_type]
188 self._stat_modes[path] = tree_mode
185 self._stat_modes[path] = tree_mode
189
186
190 if path not in self._paths:
187 if path not in self._paths:
191 raise self.no_node_at_path(path)
188 raise self.no_node_at_path(path)
192
189
193 return self._paths[path]
190 return self._paths[path]
194
191
195 def _get_kind(self, path):
192 def _get_kind(self, path):
196 tree_id, type_ = self._get_tree_id_for_path(path)
193 tree_id, type_ = self._get_tree_id_for_path(path)
197 if type_ == 'blob':
194 if type_ == 'blob':
198 return NodeKind.FILE
195 return NodeKind.FILE
199 elif type_ == 'tree':
196 elif type_ == 'tree':
200 return NodeKind.DIR
197 return NodeKind.DIR
201 elif type_ == 'link':
198 elif type_ == 'link':
202 return NodeKind.SUBMODULE
199 return NodeKind.SUBMODULE
203 return None
200 return None
204
201
205 def _get_filectx(self, path):
202 def _assert_is_path(self, path):
206 path = self._fix_path(path)
203 path = self._fix_path(path)
207 if self._get_kind(path) != NodeKind.FILE:
204 if self._get_kind(path) != NodeKind.FILE:
208 raise CommitError(
205 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
209 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
210 return path
206 return path
211
207
212 def _get_file_nodes(self):
208 def _get_file_nodes(self):
213 return chain(*(t[2] for t in self.walk()))
209 return chain(*(t[2] for t in self.walk()))
214
210
215 @LazyProperty
211 @LazyProperty
216 def parents(self):
212 def parents(self):
217 """
213 """
218 Returns list of parent commits.
214 Returns list of parent commits.
219 """
215 """
220 parent_ids = self._remote.parents(self.id)
216 parent_ids = self._remote.parents(self.id)
221 return self._make_commits(parent_ids)
217 return self._make_commits(parent_ids)
222
218
223 @LazyProperty
219 @LazyProperty
224 def children(self):
220 def children(self):
225 """
221 """
226 Returns list of child commits.
222 Returns list of child commits.
227 """
223 """
228
224
229 children = self._remote.children(self.raw_id)
225 children = self._remote.children(self.raw_id)
230 return self._make_commits(children)
226 return self._make_commits(children)
231
227
232 def _make_commits(self, commit_ids):
228 def _make_commits(self, commit_ids):
233 def commit_maker(_commit_id):
229 def commit_maker(_commit_id):
234 return self.repository.get_commit(commit_id=commit_id)
230 return self.repository.get_commit(commit_id=_commit_id)
235
231
236 return [commit_maker(commit_id) for commit_id in commit_ids]
232 return [commit_maker(commit_id) for commit_id in commit_ids]
237
233
238 def get_file_mode(self, path):
234 def get_file_mode(self, path: bytes):
239 """
235 """
240 Returns stat mode of the file at the given `path`.
236 Returns stat mode of the file at the given `path`.
241 """
237 """
242 path = safe_str(path)
238 path = self._assert_is_path(path)
239
243 # ensure path is traversed
240 # ensure path is traversed
244 self._get_tree_id_for_path(path)
241 self._get_tree_id_for_path(path)
242
245 return self._stat_modes[path]
243 return self._stat_modes[path]
246
244
247 def is_link(self, path):
245 def is_link(self, path):
248 return stat.S_ISLNK(self.get_file_mode(path))
246 return stat.S_ISLNK(self.get_file_mode(path))
249
247
250 def is_node_binary(self, path):
248 def is_node_binary(self, path):
251 tree_id, _ = self._get_tree_id_for_path(path)
249 tree_id, _ = self._get_tree_id_for_path(path)
252 return self._remote.is_binary(tree_id)
250 return self._remote.is_binary(tree_id)
253
251
252 def node_md5_hash(self, path):
253 path = self._assert_is_path(path)
254 return self._remote.md5_hash(self.raw_id, path)
255
254 def get_file_content(self, path):
256 def get_file_content(self, path):
255 """
257 """
256 Returns content of the file at given `path`.
258 Returns content of the file at given `path`.
257 """
259 """
258 tree_id, _ = self._get_tree_id_for_path(path)
260 tree_id, _ = self._get_tree_id_for_path(path)
259 return self._remote.blob_as_pretty_string(tree_id)
261 return self._remote.blob_as_pretty_string(tree_id)
260
262
261 def get_file_content_streamed(self, path):
263 def get_file_content_streamed(self, path):
262 tree_id, _ = self._get_tree_id_for_path(path)
264 tree_id, _ = self._get_tree_id_for_path(path)
263 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
265 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
264 return stream_method(tree_id)
266 return stream_method(tree_id)
265
267
266 def get_file_size(self, path):
268 def get_file_size(self, path):
267 """
269 """
268 Returns size of the file at given `path`.
270 Returns size of the file at given `path`.
269 """
271 """
270 tree_id, _ = self._get_tree_id_for_path(path)
272 tree_id, _ = self._get_tree_id_for_path(path)
271 return self._remote.blob_raw_length(tree_id)
273 return self._remote.blob_raw_length(tree_id)
272
274
273 def get_path_history(self, path, limit=None, pre_load=None):
275 def get_path_history(self, path, limit=None, pre_load=None):
274 """
276 """
275 Returns history of file as reversed list of `GitCommit` objects for
277 Returns history of file as reversed list of `GitCommit` objects for
276 which file at given `path` has been modified.
278 which file at given `path` has been modified.
277 """
279 """
278
280
279 path = self._get_filectx(path)
281 path = self._assert_is_path(path)
280 hist = self._remote.node_history(self.raw_id, path, limit)
282 hist = self._remote.node_history(self.raw_id, path, limit)
281 return [
283 return [
282 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
284 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
283 for commit_id in hist]
285 for commit_id in hist]
284
286
285 def get_file_annotate(self, path, pre_load=None):
287 def get_file_annotate(self, path, pre_load=None):
286 """
288 """
287 Returns a generator of four element tuples with
289 Returns a generator of four element tuples with
288 lineno, commit_id, commit lazy loader and line
290 lineno, commit_id, commit lazy loader and line
289 """
291 """
290
292
291 result = self._remote.node_annotate(self.raw_id, path)
293 result = self._remote.node_annotate(self.raw_id, path)
292
294
293 for ln_no, commit_id, content in result:
295 for ln_no, commit_id, content in result:
294 yield (
296 yield (
295 ln_no, commit_id,
297 ln_no, commit_id,
296 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
298 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
297 content)
299 content)
298
300
299 def get_nodes(self, path):
301 def get_nodes(self, path, pre_load=None):
300
302
301 if self._get_kind(path) != NodeKind.DIR:
303 if self._get_kind(path) != NodeKind.DIR:
302 raise CommitError(
304 raise CommitError(
303 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
305 f"Directory does not exist for commit {self.raw_id} at '{path}'")
304 path = self._fix_path(path)
306 path = self._fix_path(path)
305
307
306 tree_id, _ = self._get_tree_id_for_path(path)
308 tree_id, _ = self._get_tree_id_for_path(path)
307
309
308 dirnodes = []
310 dirnodes = []
309 filenodes = []
311 filenodes = []
310
312
311 # extracted tree ID gives us our files...
313 # extracted tree ID gives us our files...
312 bytes_path = safe_str(path) # libgit operates on bytes
314 bytes_path = safe_str(path) # libgit operates on bytes
313 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
315 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
314 if type_ == 'link':
316 if type_ == 'link':
315 url = self._get_submodule_url('/'.join((bytes_path, name)))
317 url = self._get_submodule_url('/'.join((bytes_path, name)))
316 dirnodes.append(SubModuleNode(
318 dirnodes.append(SubModuleNode(
317 name, url=url, commit=id_, alias=self.repository.alias))
319 name, url=url, commit=id_, alias=self.repository.alias))
318 continue
320 continue
319
321
320 if bytes_path != '':
322 if bytes_path != '':
321 obj_path = '/'.join((bytes_path, name))
323 obj_path = '/'.join((bytes_path, name))
322 else:
324 else:
323 obj_path = name
325 obj_path = name
324 if obj_path not in self._stat_modes:
326 if obj_path not in self._stat_modes:
325 self._stat_modes[obj_path] = stat_
327 self._stat_modes[obj_path] = stat_
326
328
327 if type_ == 'tree':
329 if type_ == 'tree':
328 dirnodes.append(DirNode(obj_path, commit=self))
330 dirnodes.append(DirNode(safe_bytes(obj_path), commit=self))
329 elif type_ == 'blob':
331 elif type_ == 'blob':
330 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
332 filenodes.append(FileNode(safe_bytes(obj_path), commit=self, mode=stat_, pre_load=pre_load))
331 else:
333 else:
332 raise CommitError(
334 raise CommitError(f"Requested object should be Tree or Blob, is {type_}")
333 "Requested object should be Tree or Blob, is %s", type_)
334
335
335 nodes = dirnodes + filenodes
336 nodes = dirnodes + filenodes
336 for node in nodes:
337 for node in nodes:
337 if node.path not in self.nodes:
338 if node.path not in self.nodes:
338 self.nodes[node.path] = node
339 self.nodes[node.path] = node
339 nodes.sort()
340 nodes.sort()
340 return nodes
341 return nodes
341
342
342 def get_node(self, path, pre_load=None):
343 def get_node(self, path, pre_load=None):
343 path = self._fix_path(path)
344 path = self._fix_path(path)
344 if path not in self.nodes:
345 if path not in self.nodes:
345 try:
346 try:
346 tree_id, type_ = self._get_tree_id_for_path(path)
347 tree_id, type_ = self._get_tree_id_for_path(path)
347 except CommitError:
348 except CommitError:
348 raise NodeDoesNotExistError(
349 raise NodeDoesNotExistError(
349 "Cannot find one of parents' directories for a given "
350 f"Cannot find one of parents' directories for a given "
350 "path: %s" % path)
351 f"path: {path}")
351
352
352 if type_ in ['link', 'commit']:
353 if type_ in ['link', 'commit']:
353 url = self._get_submodule_url(path)
354 url = self._get_submodule_url(path)
354 node = SubModuleNode(path, url=url, commit=tree_id,
355 node = SubModuleNode(path, url=url, commit=tree_id,
355 alias=self.repository.alias)
356 alias=self.repository.alias)
356 elif type_ == 'tree':
357 elif type_ == 'tree':
357 if path == '':
358 if path == '':
358 node = RootNode(commit=self)
359 node = RootNode(commit=self)
359 else:
360 else:
360 node = DirNode(path, commit=self)
361 node = DirNode(safe_bytes(path), commit=self)
361 elif type_ == 'blob':
362 elif type_ == 'blob':
362 node = FileNode(path, commit=self, pre_load=pre_load)
363 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
363 self._stat_modes[path] = node.mode
364 self._stat_modes[path] = node.mode
364 else:
365 else:
365 raise self.no_node_at_path(path)
366 raise self.no_node_at_path(path)
366
367
367 # cache node
368 # cache node
368 self.nodes[path] = node
369 self.nodes[path] = node
369
370
370 return self.nodes[path]
371 return self.nodes[path]
371
372
372 def get_largefile_node(self, path):
373 def get_largefile_node(self, path):
373 tree_id, _ = self._get_tree_id_for_path(path)
374 tree_id, _ = self._get_tree_id_for_path(path)
374 pointer_spec = self._remote.is_large_file(tree_id)
375 pointer_spec = self._remote.is_large_file(tree_id)
375
376
376 if pointer_spec:
377 if pointer_spec:
377 # content of that file regular FileNode is the hash of largefile
378 # content of that file regular FileNode is the hash of largefile
378 file_id = pointer_spec.get('oid_hash')
379 file_id = pointer_spec.get('oid_hash')
379 if self._remote.in_largefiles_store(file_id):
380 if self._remote.in_largefiles_store(file_id):
380 lf_path = self._remote.store_path(file_id)
381 lf_path = self._remote.store_path(file_id)
381 return LargeFileNode(lf_path, commit=self, org_path=path)
382 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
382
383
383 @LazyProperty
384 @LazyProperty
384 def affected_files(self):
385 def affected_files(self):
385 """
386 """
386 Gets a fast accessible file changes for given commit
387 Gets a fast accessible file changes for given commit
387 """
388 """
388 added, modified, deleted = self._changes_cache
389 added, modified, deleted = self._changes_cache
389 return list(added.union(modified).union(deleted))
390 return list(added.union(modified).union(deleted))
390
391
391 @LazyProperty
392 @LazyProperty
392 def _changes_cache(self):
393 def _changes_cache(self):
393 added = set()
394 added = set()
394 modified = set()
395 modified = set()
395 deleted = set()
396 deleted = set()
396 _r = self._remote
397
397
398 parents = self.parents
398 parents = self.parents
399 if not self.parents:
399 if not self.parents:
400 parents = [base.EmptyCommit()]
400 parents = [base.EmptyCommit()]
401 for parent in parents:
401 for parent in parents:
402 if isinstance(parent, base.EmptyCommit):
402 if isinstance(parent, base.EmptyCommit):
403 oid = None
403 oid = None
404 else:
404 else:
405 oid = parent.raw_id
405 oid = parent.raw_id
406 changes = _r.tree_changes(oid, self.raw_id)
406 _added, _modified, _deleted = self._remote.tree_changes(oid, self.raw_id)
407 for (oldpath, newpath), (_, _), (_, _) in changes:
407 added = added | set(_added)
408 if newpath and oldpath:
408 modified = modified | set(_modified)
409 modified.add(newpath)
409 deleted = deleted | set(_deleted)
410 elif newpath and not oldpath:
410
411 added.add(newpath)
412 elif not newpath and oldpath:
413 deleted.add(oldpath)
414 return added, modified, deleted
411 return added, modified, deleted
415
412
416 def _get_paths_for_status(self, status):
413 def _get_paths_for_status(self, status):
417 """
414 """
418 Returns sorted list of paths for given ``status``.
415 Returns sorted list of paths for given ``status``.
419
416
420 :param status: one of: *added*, *modified* or *deleted*
417 :param status: one of: *added*, *modified* or *deleted*
421 """
418 """
422 added, modified, deleted = self._changes_cache
419 added, modified, deleted = self._changes_cache
423 return sorted({
420 return sorted({
424 'added': list(added),
421 'added': list(added),
425 'modified': list(modified),
422 'modified': list(modified),
426 'deleted': list(deleted)}[status]
423 'deleted': list(deleted)}[status]
427 )
424 )
428
425
429 @LazyProperty
426 @LazyProperty
430 def added(self):
427 def added(self):
431 """
428 """
432 Returns list of added ``FileNode`` objects.
429 Returns list of added ``FileNode`` objects.
433 """
430 """
434 if not self.parents:
431 if not self.parents:
435 return list(self._get_file_nodes())
432 return list(self._get_file_nodes())
436 return AddedFileNodesGenerator(self.added_paths, self)
433 return AddedFileNodesGenerator(self.added_paths, self)
437
434
438 @LazyProperty
435 @LazyProperty
439 def added_paths(self):
436 def added_paths(self):
440 return [n for n in self._get_paths_for_status('added')]
437 return [n for n in self._get_paths_for_status('added')]
441
438
442 @LazyProperty
439 @LazyProperty
443 def changed(self):
440 def changed(self):
444 """
441 """
445 Returns list of modified ``FileNode`` objects.
442 Returns list of modified ``FileNode`` objects.
446 """
443 """
447 if not self.parents:
444 if not self.parents:
448 return []
445 return []
449 return ChangedFileNodesGenerator(self.changed_paths, self)
446 return ChangedFileNodesGenerator(self.changed_paths, self)
450
447
451 @LazyProperty
448 @LazyProperty
452 def changed_paths(self):
449 def changed_paths(self):
453 return [n for n in self._get_paths_for_status('modified')]
450 return [n for n in self._get_paths_for_status('modified')]
454
451
455 @LazyProperty
452 @LazyProperty
456 def removed(self):
453 def removed(self):
457 """
454 """
458 Returns list of removed ``FileNode`` objects.
455 Returns list of removed ``FileNode`` objects.
459 """
456 """
460 if not self.parents:
457 if not self.parents:
461 return []
458 return []
462 return RemovedFileNodesGenerator(self.removed_paths, self)
459 return RemovedFileNodesGenerator(self.removed_paths, self)
463
460
464 @LazyProperty
461 @LazyProperty
465 def removed_paths(self):
462 def removed_paths(self):
466 return [n for n in self._get_paths_for_status('deleted')]
463 return [n for n in self._get_paths_for_status('deleted')]
467
464
468 def _get_submodule_url(self, submodule_path):
465 def _get_submodule_url(self, submodule_path):
469 git_modules_path = '.gitmodules'
466 git_modules_path = '.gitmodules'
470
467
471 if self._submodules is None:
468 if self._submodules is None:
472 self._submodules = {}
469 self._submodules = {}
473
470
474 try:
471 try:
475 submodules_node = self.get_node(git_modules_path)
472 submodules_node = self.get_node(git_modules_path)
476 except NodeDoesNotExistError:
473 except NodeDoesNotExistError:
477 return None
474 return None
478
475
479 # ConfigParser fails if there are whitespaces, also it needs an iterable
476 # ConfigParser fails if there are whitespaces, also it needs an iterable
480 # file like content
477 # file like content
481 def iter_content(_content):
478 def iter_content(_content):
482 for line in _content.splitlines():
479 for line in _content.splitlines():
483 yield line
480 yield line
484
481
485 parser = configparser.RawConfigParser()
482 parser = configparser.RawConfigParser()
486 parser.read_file(iter_content(submodules_node.content))
483 parser.read_file(iter_content(submodules_node.content))
487
484
488 for section in parser.sections():
485 for section in parser.sections():
489 path = parser.get(section, 'path')
486 path = parser.get(section, 'path')
490 url = parser.get(section, 'url')
487 url = parser.get(section, 'url')
491 if path and url:
488 if path and url:
492 self._submodules[path.strip('/')] = url
489 self._submodules[path.strip('/')] = url
493
490
494 return self._submodules.get(submodule_path.strip('/'))
491 return self._submodules.get(submodule_path.strip('/'))
@@ -1,49 +1,49 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT diff module
22 GIT diff module
23 """
23 """
24
24
25 import re
25 import re
26
26
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class GitDiff(base.Diff):
30 class GitDiff(base.Diff):
31
31
32 _header_re = re.compile(r"""
32 _header_re = re.compile(br"""
33 #^diff[ ]--git
33 #^diff[ ]--git
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
35 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
35 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
36 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
36 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
37 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
37 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
38 (?:^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
38 (?:^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
39 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
39 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
40 (?:^copy[ ]from[ ](?P<copy_from>[^\r\n]+)\n
40 (?:^copy[ ]from[ ](?P<copy_from>[^\r\n]+)\n
41 ^copy[ ]to[ ](?P<copy_to>[^\r\n]+)(?:\n|$))?
41 ^copy[ ]to[ ](?P<copy_to>[^\r\n]+)(?:\n|$))?
42 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
42 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
43 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
43 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
44 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
44 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
45 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
45 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
46 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
46 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
47 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
47 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
48 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
48 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
49 """, re.VERBOSE | re.MULTILINE)
49 """, re.VERBOSE | re.MULTILINE)
@@ -1,106 +1,107 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT inmemory module
22 GIT inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.str_utils import safe_str, get_default_encodings
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
31
31
32 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
32 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
33 """
33 """
34 Performs in-memory commit (doesn't check workdir in any way) and
34 Performs in-memory commit (doesn't check workdir in any way) and
35 returns newly created `GitCommit`. Updates repository's
35 returns newly created `GitCommit`. Updates repository's
36 `commit_ids`.
36 `commit_ids`.
37
37
38 :param message: message of the commit
38 :param message: message of the commit
39 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
39 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
40 :param parents: single parent or sequence of parents from which commit
40 :param parents: single parent or sequence of parents from which commit
41 would be derived
41 would be derived
42 :param date: `datetime.datetime` instance. Defaults to
42 :param date: `datetime.datetime` instance. Defaults to
43 ``datetime.datetime.now()``.
43 ``datetime.datetime.now()``.
44 :param branch: branch name, as string. If none given, default backend's
44 :param branch: branch name, as string. If none given, default backend's
45 branch would be used.
45 branch would be used.
46
46
47 :raises `CommitError`: if any error occurs while committing
47 :raises `CommitError`: if any error occurs while committing
48 """
48 """
49 self.check_integrity(parents)
49 self.check_integrity(parents)
50 if branch is None:
50 if branch is None:
51 branch = self.repository.DEFAULT_BRANCH_NAME
51 branch = self.repository.DEFAULT_BRANCH_NAME
52
52
53 ENCODING = "UTF-8"
54
55 commit_tree = None
53 commit_tree = None
56 if self.parents[0]:
54 if self.parents[0]:
57 commit_tree = self.parents[0]._commit['tree']
55 commit_tree = self.parents[0]._commit['tree']
58
56
57 encoding = get_default_encodings()[0]
59 updated = []
58 updated = []
60 for node in self.added + self.changed:
59 for node in self.added + self.changed:
61
60 content = node.content
62 if node.is_binary:
61 # TODO: left for reference pre py3 migration, probably need to be removed
63 content = node.content
62 # if node.is_binary:
64 else:
63 # content = node.content
65 content = node.content.encode(ENCODING)
64 # else:
65 # content = node.content.encode(ENCODING)
66
66
67 updated.append({
67 updated.append({
68 'path': node.path,
68 'path': node.path,
69 'node_path': node.name.encode(ENCODING),
69 'node_path': node.name,
70 'content': content,
70 'content': content,
71 'mode': node.mode,
71 'mode': node.mode,
72 })
72 })
73
73
74 removed = [node.path for node in self.removed]
74 removed = [node.path for node in self.removed]
75
75
76 date, tz = date_to_timestamp_plus_offset(date)
76 date, tz = date_to_timestamp_plus_offset(date)
77
77
78 # TODO: johbo: Make kwargs explicit and check if this is needed.
79 author_time = kwargs.pop('author_time', date)
78 author_time = kwargs.pop('author_time', date)
80 author_tz = kwargs.pop('author_timezone', tz)
79 author_tz = kwargs.pop('author_timezone', tz)
81
80
82 commit_data = {
81 commit_data = {
83 'parents': [p._commit['id'] for p in self.parents if p],
82 'parents': [p._commit['id'] for p in self.parents if p],
84 'author': safe_str(author),
83 'author': safe_str(author),
85 'committer': safe_str(author),
84 'committer': safe_str(author),
86 'encoding': ENCODING,
85 'encoding': encoding,
87 'message': safe_str(message),
86 'message': safe_str(message),
87
88 'commit_time': int(date),
88 'commit_time': int(date),
89 'commit_timezone': tz,
90
89 'author_time': int(author_time),
91 'author_time': int(author_time),
90 'commit_timezone': tz,
91 'author_timezone': author_tz,
92 'author_timezone': author_tz,
92 }
93 }
93
94
94 commit_id = self.repository._remote.commit(
95 commit_id = self.repository._remote.commit(
95 commit_data, branch, commit_tree, updated, removed)
96 commit_data, branch, commit_tree, updated, removed)
96
97
97 # Update vcs repository object
98 # Update vcs repository object
98 self.repository.append_commit_id(commit_id)
99 self.repository.append_commit_id(commit_id)
99
100
100 # invalidate parsed refs after commit
101 # invalidate parsed refs after commit
101 self.repository._refs = self.repository._get_refs()
102 self.repository._refs = self.repository._get_refs()
102 self.repository.branches = self.repository._get_branches()
103 self.repository.branches = self.repository._get_branches()
103 tip = self.repository.get_commit(commit_id)
104 tip = self.repository.get_commit(commit_id)
104
105
105 self.reset()
106 self.reset()
106 return tip
107 return tip
@@ -1,1052 +1,1055 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from collections import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.hash_utils import safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
48 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
59
59
60 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
61
61
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
64
64
65 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
66 self.config = config if config else self.get_default_config()
66 self.config = config if config else self.get_default_config()
67 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 self.with_wire = with_wire or {"cache": False} # default should not use cache
68
68
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
70
70
71 # caches
71 # caches
72 self._commit_ids = {}
72 self._commit_ids = {}
73
73
74 @LazyProperty
74 @LazyProperty
75 def _remote(self):
75 def _remote(self):
76 repo_id = self.path
76 repo_id = self.path
77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
78
78
79 @LazyProperty
79 @LazyProperty
80 def bare(self):
80 def bare(self):
81 return self._remote.bare()
81 return self._remote.bare()
82
82
83 @LazyProperty
83 @LazyProperty
84 def head(self):
84 def head(self):
85 return self._remote.head()
85 return self._remote.head()
86
86
87 @CachedProperty
87 @CachedProperty
88 def commit_ids(self):
88 def commit_ids(self):
89 """
89 """
90 Returns list of commit ids, in ascending order. Being lazy
90 Returns list of commit ids, in ascending order. Being lazy
91 attribute allows external tools to inject commit ids from cache.
91 attribute allows external tools to inject commit ids from cache.
92 """
92 """
93 commit_ids = self._get_all_commit_ids()
93 commit_ids = self._get_all_commit_ids()
94 self._rebuild_cache(commit_ids)
94 self._rebuild_cache(commit_ids)
95 return commit_ids
95 return commit_ids
96
96
97 def _rebuild_cache(self, commit_ids):
97 def _rebuild_cache(self, commit_ids):
98 self._commit_ids = dict((commit_id, index)
98 self._commit_ids = dict((commit_id, index)
99 for index, commit_id in enumerate(commit_ids))
99 for index, commit_id in enumerate(commit_ids))
100
100
101 def run_git_command(self, cmd, **opts):
101 def run_git_command(self, cmd, **opts):
102 """
102 """
103 Runs given ``cmd`` as git command and returns tuple
103 Runs given ``cmd`` as git command and returns tuple
104 (stdout, stderr).
104 (stdout, stderr).
105
105
106 :param cmd: git command to be executed
106 :param cmd: git command to be executed
107 :param opts: env options to pass into Subprocess command
107 :param opts: env options to pass into Subprocess command
108 """
108 """
109 if not isinstance(cmd, list):
109 if not isinstance(cmd, list):
110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
111
111
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
113 out, err = self._remote.run_git_command(cmd, **opts)
113 out, err = self._remote.run_git_command(cmd, **opts)
114 if err and not skip_stderr_log:
114 if err and not skip_stderr_log:
115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
116 return out, err
116 return out, err
117
117
118 @staticmethod
118 @staticmethod
119 def check_url(url, config):
119 def check_url(url, config):
120 """
120 """
121 Function will check given url and try to verify if it's a valid
121 Function will check given url and try to verify if it's a valid
122 link. Sometimes it may happened that git will issue basic
122 link. Sometimes it may happened that git will issue basic
123 auth request that can cause whole API to hang when used from python
123 auth request that can cause whole API to hang when used from python
124 or other external calls.
124 or other external calls.
125
125
126 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 On failures it'll raise urllib2.HTTPError, exception is also thrown
127 when the return code is non 200
127 when the return code is non 200
128 """
128 """
129 # check first if it's not an url
129 # check first if it's not an url
130 if os.path.isdir(url) or url.startswith('file:'):
130 if os.path.isdir(url) or url.startswith('file:'):
131 return True
131 return True
132
132
133 if '+' in url.split('://', 1)[0]:
133 if '+' in url.split('://', 1)[0]:
134 url = url.split('+', 1)[1]
134 url = url.split('+', 1)[1]
135
135
136 # Request the _remote to verify the url
136 # Request the _remote to verify the url
137 return connection.Git.check_url(url, config.serialize())
137 return connection.Git.check_url(url, config.serialize())
138
138
139 @staticmethod
139 @staticmethod
140 def is_valid_repository(path):
140 def is_valid_repository(path):
141 if os.path.isdir(os.path.join(path, '.git')):
141 if os.path.isdir(os.path.join(path, '.git')):
142 return True
142 return True
143 # check case of bare repository
143 # check case of bare repository
144 try:
144 try:
145 GitRepository(path)
145 GitRepository(path)
146 return True
146 return True
147 except VCSError:
147 except VCSError:
148 pass
148 pass
149 return False
149 return False
150
150
151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
152 bare=False):
152 bare=False):
153 if create and os.path.exists(self.path):
153 if create and os.path.exists(self.path):
154 raise RepositoryError(
154 raise RepositoryError(
155 "Cannot create repository at %s, location already exist"
155 "Cannot create repository at %s, location already exist"
156 % self.path)
156 % self.path)
157
157
158 if bare and do_workspace_checkout:
158 if bare and do_workspace_checkout:
159 raise RepositoryError("Cannot update a bare repository")
159 raise RepositoryError("Cannot update a bare repository")
160 try:
160 try:
161
161
162 if src_url:
162 if src_url:
163 # check URL before any actions
163 # check URL before any actions
164 GitRepository.check_url(src_url, self.config)
164 GitRepository.check_url(src_url, self.config)
165
165
166 if create:
166 if create:
167 os.makedirs(self.path, mode=0o755)
167 os.makedirs(self.path, mode=0o755)
168
168
169 if bare:
169 if bare:
170 self._remote.init_bare()
170 self._remote.init_bare()
171 else:
171 else:
172 self._remote.init()
172 self._remote.init()
173
173
174 if src_url and bare:
174 if src_url and bare:
175 # bare repository only allows a fetch and checkout is not allowed
175 # bare repository only allows a fetch and checkout is not allowed
176 self.fetch(src_url, commit_ids=None)
176 self.fetch(src_url, commit_ids=None)
177 elif src_url:
177 elif src_url:
178 self.pull(src_url, commit_ids=None,
178 self.pull(src_url, commit_ids=None,
179 update_after=do_workspace_checkout)
179 update_after=do_workspace_checkout)
180
180
181 else:
181 else:
182 if not self._remote.assert_correct_path():
182 if not self._remote.assert_correct_path():
183 raise RepositoryError(
183 raise RepositoryError(
184 'Path "%s" does not contain a Git repository' %
184 'Path "%s" does not contain a Git repository' %
185 (self.path,))
185 (self.path,))
186
186
187 # TODO: johbo: check if we have to translate the OSError here
187 # TODO: johbo: check if we have to translate the OSError here
188 except OSError as err:
188 except OSError as err:
189 raise RepositoryError(err)
189 raise RepositoryError(err)
190
190
191 def _get_all_commit_ids(self):
191 def _get_all_commit_ids(self):
192 return self._remote.get_all_commit_ids()
192 return self._remote.get_all_commit_ids()
193
193
194 def _get_commit_ids(self, filters=None):
194 def _get_commit_ids(self, filters=None):
195 # we must check if this repo is not empty, since later command
195 # we must check if this repo is not empty, since later command
196 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # fails if it is. And it's cheaper to ask than throw the subprocess
197 # errors
197 # errors
198
198
199 head = self._remote.head(show_exc=False)
199 head = self._remote.head(show_exc=False)
200
200
201 if not head:
201 if not head:
202 return []
202 return []
203
203
204 rev_filter = ['--branches', '--tags']
204 rev_filter = ['--branches', '--tags']
205 extra_filter = []
205 extra_filter = []
206
206
207 if filters:
207 if filters:
208 if filters.get('since'):
208 if filters.get('since'):
209 extra_filter.append('--since=%s' % (filters['since']))
209 extra_filter.append('--since=%s' % (filters['since']))
210 if filters.get('until'):
210 if filters.get('until'):
211 extra_filter.append('--until=%s' % (filters['until']))
211 extra_filter.append('--until=%s' % (filters['until']))
212 if filters.get('branch_name'):
212 if filters.get('branch_name'):
213 rev_filter = []
213 rev_filter = []
214 extra_filter.append(filters['branch_name'])
214 extra_filter.append(filters['branch_name'])
215 rev_filter.extend(extra_filter)
215 rev_filter.extend(extra_filter)
216
216
217 # if filters.get('start') or filters.get('end'):
217 # if filters.get('start') or filters.get('end'):
218 # # skip is offset, max-count is limit
218 # # skip is offset, max-count is limit
219 # if filters.get('start'):
219 # if filters.get('start'):
220 # extra_filter += ' --skip=%s' % filters['start']
220 # extra_filter += ' --skip=%s' % filters['start']
221 # if filters.get('end'):
221 # if filters.get('end'):
222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
223
223
224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
225 try:
225 try:
226 output, __ = self.run_git_command(cmd)
226 output, __ = self.run_git_command(cmd)
227 except RepositoryError:
227 except RepositoryError:
228 # Can be raised for empty repositories
228 # Can be raised for empty repositories
229 return []
229 return []
230 return output.splitlines()
230 return output.splitlines()
231
231
232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
233
233
234 def is_null(value):
234 def is_null(value):
235 return len(value) == commit_id_or_idx.count('0')
235 return len(value) == commit_id_or_idx.count('0')
236
236
237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
238 return self.commit_ids[-1]
238 return self.commit_ids[-1]
239
239
240 commit_missing_err = "Commit {} does not exist for `{}`".format(
240 commit_missing_err = "Commit {} does not exist for `{}`".format(
241 *map(safe_str, [commit_id_or_idx, self.name]))
241 *map(safe_str, [commit_id_or_idx, self.name]))
242
242
243 is_bstr = isinstance(commit_id_or_idx, str)
243 is_bstr = isinstance(commit_id_or_idx, str)
244 is_branch = reference_obj and reference_obj.branch
244 is_branch = reference_obj and reference_obj.branch
245
245
246 lookup_ok = False
246 lookup_ok = False
247 if is_bstr:
247 if is_bstr:
248 # Need to call remote to translate id for tagging scenarios,
248 # Need to call remote to translate id for tagging scenarios,
249 # or branch that are numeric
249 # or branch that are numeric
250 try:
250 try:
251 remote_data = self._remote.get_object(commit_id_or_idx,
251 remote_data = self._remote.get_object(commit_id_or_idx,
252 maybe_unreachable=maybe_unreachable)
252 maybe_unreachable=maybe_unreachable)
253 commit_id_or_idx = remote_data["commit_id"]
253 commit_id_or_idx = remote_data["commit_id"]
254 lookup_ok = True
254 lookup_ok = True
255 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
256 lookup_ok = False
256 lookup_ok = False
257
257
258 if lookup_ok is False:
258 if lookup_ok is False:
259 is_numeric_idx = \
259 is_numeric_idx = \
260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
261 or isinstance(commit_id_or_idx, int)
261 or isinstance(commit_id_or_idx, int)
262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
263 try:
263 try:
264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
265 lookup_ok = True
265 lookup_ok = True
266 except Exception:
266 except Exception:
267 raise CommitDoesNotExistError(commit_missing_err)
267 raise CommitDoesNotExistError(commit_missing_err)
268
268
269 # we failed regular lookup, and by integer number lookup
269 # we failed regular lookup, and by integer number lookup
270 if lookup_ok is False:
270 if lookup_ok is False:
271 raise CommitDoesNotExistError(commit_missing_err)
271 raise CommitDoesNotExistError(commit_missing_err)
272
272
273 # Ensure we return full id
273 # Ensure we return full id
274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
275 raise CommitDoesNotExistError(
275 raise CommitDoesNotExistError(
276 "Given commit id %s not recognized" % commit_id_or_idx)
276 "Given commit id %s not recognized" % commit_id_or_idx)
277 return commit_id_or_idx
277 return commit_id_or_idx
278
278
279 def get_hook_location(self):
279 def get_hook_location(self):
280 """
280 """
281 returns absolute path to location where hooks are stored
281 returns absolute path to location where hooks are stored
282 """
282 """
283 loc = os.path.join(self.path, 'hooks')
283 loc = os.path.join(self.path, 'hooks')
284 if not self.bare:
284 if not self.bare:
285 loc = os.path.join(self.path, '.git', 'hooks')
285 loc = os.path.join(self.path, '.git', 'hooks')
286 return loc
286 return loc
287
287
288 @LazyProperty
288 @LazyProperty
289 def last_change(self):
289 def last_change(self):
290 """
290 """
291 Returns last change made on this repository as
291 Returns last change made on this repository as
292 `datetime.datetime` object.
292 `datetime.datetime` object.
293 """
293 """
294 try:
294 try:
295 return self.get_commit().date
295 return self.get_commit().date
296 except RepositoryError:
296 except RepositoryError:
297 tzoffset = makedate()[1]
297 tzoffset = makedate()[1]
298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
299
299
300 def _get_fs_mtime(self):
300 def _get_fs_mtime(self):
301 idx_loc = '' if self.bare else '.git'
301 idx_loc = '' if self.bare else '.git'
302 # fallback to filesystem
302 # fallback to filesystem
303 in_path = os.path.join(self.path, idx_loc, "index")
303 in_path = os.path.join(self.path, idx_loc, "index")
304 he_path = os.path.join(self.path, idx_loc, "HEAD")
304 he_path = os.path.join(self.path, idx_loc, "HEAD")
305 if os.path.exists(in_path):
305 if os.path.exists(in_path):
306 return os.stat(in_path).st_mtime
306 return os.stat(in_path).st_mtime
307 else:
307 else:
308 return os.stat(he_path).st_mtime
308 return os.stat(he_path).st_mtime
309
309
310 @LazyProperty
310 @LazyProperty
311 def description(self):
311 def description(self):
312 description = self._remote.get_description()
312 description = self._remote.get_description()
313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
313 return safe_str(description or self.DEFAULT_DESCRIPTION)
314
314
315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
316 if self.is_empty():
316 if self.is_empty():
317 return OrderedDict()
317 return OrderedDict()
318
318
319 result = []
319 result = []
320 for ref, sha in self._refs.items():
320 for ref, sha in self._refs.items():
321 if ref.startswith(prefix):
321 if ref.startswith(prefix):
322 ref_name = ref
322 ref_name = ref
323 if strip_prefix:
323 if strip_prefix:
324 ref_name = ref[len(prefix):]
324 ref_name = ref[len(prefix):]
325 result.append((safe_unicode(ref_name), sha))
325 result.append((safe_str(ref_name), sha))
326
326
327 def get_name(entry):
327 def get_name(entry):
328 return entry[0]
328 return entry[0]
329
329
330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
331
331
332 def _get_branches(self):
332 def _get_branches(self):
333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
334
334
335 @CachedProperty
335 @CachedProperty
336 def branches(self):
336 def branches(self):
337 return self._get_branches()
337 return self._get_branches()
338
338
339 @CachedProperty
339 @CachedProperty
340 def branches_closed(self):
340 def branches_closed(self):
341 return {}
341 return {}
342
342
343 @CachedProperty
343 @CachedProperty
344 def bookmarks(self):
344 def bookmarks(self):
345 return {}
345 return {}
346
346
347 @CachedProperty
347 @CachedProperty
348 def branches_all(self):
348 def branches_all(self):
349 all_branches = {}
349 all_branches = {}
350 all_branches.update(self.branches)
350 all_branches.update(self.branches)
351 all_branches.update(self.branches_closed)
351 all_branches.update(self.branches_closed)
352 return all_branches
352 return all_branches
353
353
354 @CachedProperty
354 @CachedProperty
355 def tags(self):
355 def tags(self):
356 return self._get_tags()
356 return self._get_tags()
357
357
358 def _get_tags(self):
358 def _get_tags(self):
359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
360
360
361 def tag(self, name, user, commit_id=None, message=None, date=None,
361 def tag(self, name, user, commit_id=None, message=None, date=None,
362 **kwargs):
362 **kwargs):
363 # TODO: fix this method to apply annotated tags correct with message
363 # TODO: fix this method to apply annotated tags correct with message
364 """
364 """
365 Creates and returns a tag for the given ``commit_id``.
365 Creates and returns a tag for the given ``commit_id``.
366
366
367 :param name: name for new tag
367 :param name: name for new tag
368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
369 :param commit_id: commit id for which new tag would be created
369 :param commit_id: commit id for which new tag would be created
370 :param message: message of the tag's commit
370 :param message: message of the tag's commit
371 :param date: date of tag's commit
371 :param date: date of tag's commit
372
372
373 :raises TagAlreadyExistError: if tag with same name already exists
373 :raises TagAlreadyExistError: if tag with same name already exists
374 """
374 """
375 if name in self.tags:
375 if name in self.tags:
376 raise TagAlreadyExistError("Tag %s already exists" % name)
376 raise TagAlreadyExistError("Tag %s already exists" % name)
377 commit = self.get_commit(commit_id=commit_id)
377 commit = self.get_commit(commit_id=commit_id)
378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
379
379
380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
381
381
382 self._invalidate_prop_cache('tags')
382 self._invalidate_prop_cache('tags')
383 self._invalidate_prop_cache('_refs')
383 self._invalidate_prop_cache('_refs')
384
384
385 return commit
385 return commit
386
386
387 def remove_tag(self, name, user, message=None, date=None):
387 def remove_tag(self, name, user, message=None, date=None):
388 """
388 """
389 Removes tag with the given ``name``.
389 Removes tag with the given ``name``.
390
390
391 :param name: name of the tag to be removed
391 :param name: name of the tag to be removed
392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
393 :param message: message of the tag's removal commit
393 :param message: message of the tag's removal commit
394 :param date: date of tag's removal commit
394 :param date: date of tag's removal commit
395
395
396 :raises TagDoesNotExistError: if tag with given name does not exists
396 :raises TagDoesNotExistError: if tag with given name does not exists
397 """
397 """
398 if name not in self.tags:
398 if name not in self.tags:
399 raise TagDoesNotExistError("Tag %s does not exist" % name)
399 raise TagDoesNotExistError("Tag %s does not exist" % name)
400
400
401 self._remote.tag_remove(name)
401 self._remote.tag_remove(name)
402 self._invalidate_prop_cache('tags')
402 self._invalidate_prop_cache('tags')
403 self._invalidate_prop_cache('_refs')
403 self._invalidate_prop_cache('_refs')
404
404
405 def _get_refs(self):
405 def _get_refs(self):
406 return self._remote.get_refs()
406 return self._remote.get_refs()
407
407
408 @CachedProperty
408 @CachedProperty
409 def _refs(self):
409 def _refs(self):
410 return self._get_refs()
410 return self._get_refs()
411
411
412 @property
412 @property
413 def _ref_tree(self):
413 def _ref_tree(self):
414 node = tree = {}
414 node = tree = {}
415 for ref, sha in self._refs.items():
415 for ref, sha in self._refs.items():
416 path = ref.split('/')
416 path = ref.split('/')
417 for bit in path[:-1]:
417 for bit in path[:-1]:
418 node = node.setdefault(bit, {})
418 node = node.setdefault(bit, {})
419 node[path[-1]] = sha
419 node[path[-1]] = sha
420 node = tree
420 node = tree
421 return tree
421 return tree
422
422
423 def get_remote_ref(self, ref_name):
423 def get_remote_ref(self, ref_name):
424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
425 try:
425 try:
426 return self._refs[ref_key]
426 return self._refs[ref_key]
427 except Exception:
427 except Exception:
428 return
428 return
429
429
430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
432 """
432 """
433 Returns `GitCommit` object representing commit from git repository
433 Returns `GitCommit` object representing commit from git repository
434 at the given `commit_id` or head (most recent commit) if None given.
434 at the given `commit_id` or head (most recent commit) if None given.
435 """
435 """
436
436
437 if self.is_empty():
437 if self.is_empty():
438 raise EmptyRepositoryError("There are no commits yet")
438 raise EmptyRepositoryError("There are no commits yet")
439
439
440 if commit_id is not None:
440 if commit_id is not None:
441 self._validate_commit_id(commit_id)
441 self._validate_commit_id(commit_id)
442 try:
442 try:
443 # we have cached idx, use it without contacting the remote
443 # we have cached idx, use it without contacting the remote
444 idx = self._commit_ids[commit_id]
444 idx = self._commit_ids[commit_id]
445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
446 except KeyError:
446 except KeyError:
447 pass
447 pass
448
448
449 elif commit_idx is not None:
449 elif commit_idx is not None:
450 self._validate_commit_idx(commit_idx)
450 self._validate_commit_idx(commit_idx)
451 try:
451 try:
452 _commit_id = self.commit_ids[commit_idx]
452 _commit_id = self.commit_ids[commit_idx]
453 if commit_idx < 0:
453 if commit_idx < 0:
454 commit_idx = self.commit_ids.index(_commit_id)
454 commit_idx = self.commit_ids.index(_commit_id)
455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
456 except IndexError:
456 except IndexError:
457 commit_id = commit_idx
457 commit_id = commit_idx
458 else:
458 else:
459 commit_id = "tip"
459 commit_id = "tip"
460
460
461 if translate_tag:
461 if translate_tag:
462 commit_id = self._lookup_commit(
462 commit_id = self._lookup_commit(
463 commit_id, maybe_unreachable=maybe_unreachable,
463 commit_id, maybe_unreachable=maybe_unreachable,
464 reference_obj=reference_obj)
464 reference_obj=reference_obj)
465
465
466 try:
466 try:
467 idx = self._commit_ids[commit_id]
467 idx = self._commit_ids[commit_id]
468 except KeyError:
468 except KeyError:
469 idx = -1
469 idx = -1
470
470
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472
472
473 def get_commits(
473 def get_commits(
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 """
476 """
477 Returns generator of `GitCommit` objects from start to end (both
477 Returns generator of `GitCommit` objects from start to end (both
478 are inclusive), in ascending date order.
478 are inclusive), in ascending date order.
479
479
480 :param start_id: None, str(commit_id)
480 :param start_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
482 :param start_date: if specified, commits with commit date less than
482 :param start_date: if specified, commits with commit date less than
483 ``start_date`` would be filtered out from returned set
483 ``start_date`` would be filtered out from returned set
484 :param end_date: if specified, commits with commit date greater than
484 :param end_date: if specified, commits with commit date greater than
485 ``end_date`` would be filtered out from returned set
485 ``end_date`` would be filtered out from returned set
486 :param branch_name: if specified, commits not reachable from given
486 :param branch_name: if specified, commits not reachable from given
487 branch would be filtered out from returned set
487 branch would be filtered out from returned set
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 Mercurial evolve
489 Mercurial evolve
490 :raise BranchDoesNotExistError: If given `branch_name` does not
490 :raise BranchDoesNotExistError: If given `branch_name` does not
491 exist.
491 exist.
492 :raise CommitDoesNotExistError: If commits for given `start` or
492 :raise CommitDoesNotExistError: If commits for given `start` or
493 `end` could not be found.
493 `end` could not be found.
494
494
495 """
495 """
496 if self.is_empty():
496 if self.is_empty():
497 raise EmptyRepositoryError("There are no commits yet")
497 raise EmptyRepositoryError("There are no commits yet")
498
498
499 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
500
500
501 if start_id is not None:
501 if start_id is not None:
502 self._validate_commit_id(start_id)
502 self._validate_commit_id(start_id)
503 if end_id is not None:
503 if end_id is not None:
504 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
505
505
506 start_raw_id = self._lookup_commit(start_id)
506 start_raw_id = self._lookup_commit(start_id)
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 end_raw_id = self._lookup_commit(end_id)
508 end_raw_id = self._lookup_commit(end_id)
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510
510
511 if None not in [start_id, end_id] and start_pos > end_pos:
511 if None not in [start_id, end_id] and start_pos > end_pos:
512 raise RepositoryError(
512 raise RepositoryError(
513 "Start commit '%s' cannot be after end commit '%s'" %
513 "Start commit '%s' cannot be after end commit '%s'" %
514 (start_id, end_id))
514 (start_id, end_id))
515
515
516 if end_pos is not None:
516 if end_pos is not None:
517 end_pos += 1
517 end_pos += 1
518
518
519 filter_ = []
519 filter_ = []
520 if branch_name:
520 if branch_name:
521 filter_.append({'branch_name': branch_name})
521 filter_.append({'branch_name': branch_name})
522 if start_date and not end_date:
522 if start_date and not end_date:
523 filter_.append({'since': start_date})
523 filter_.append({'since': start_date})
524 if end_date and not start_date:
524 if end_date and not start_date:
525 filter_.append({'until': end_date})
525 filter_.append({'until': end_date})
526 if start_date and end_date:
526 if start_date and end_date:
527 filter_.append({'since': start_date})
527 filter_.append({'since': start_date})
528 filter_.append({'until': end_date})
528 filter_.append({'until': end_date})
529
529
530 # if start_pos or end_pos:
530 # if start_pos or end_pos:
531 # filter_.append({'start': start_pos})
531 # filter_.append({'start': start_pos})
532 # filter_.append({'end': end_pos})
532 # filter_.append({'end': end_pos})
533
533
534 if filter_:
534 if filter_:
535 revfilters = {
535 revfilters = {
536 'branch_name': branch_name,
536 'branch_name': branch_name,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 'start': start_pos,
539 'start': start_pos,
540 'end': end_pos,
540 'end': end_pos,
541 }
541 }
542 commit_ids = self._get_commit_ids(filters=revfilters)
542 commit_ids = self._get_commit_ids(filters=revfilters)
543
543
544 else:
544 else:
545 commit_ids = self.commit_ids
545 commit_ids = self.commit_ids
546
546
547 if start_pos or end_pos:
547 if start_pos or end_pos:
548 commit_ids = commit_ids[start_pos: end_pos]
548 commit_ids = commit_ids[start_pos: end_pos]
549
549
550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
551 translate_tag=translate_tags)
551 translate_tag=translate_tags)
552
552
553 def get_diff(
553 def get_diff(
554 self, commit1, commit2, path='', ignore_whitespace=False,
554 self, commit1, commit2, path='', ignore_whitespace=False,
555 context=3, path1=None):
555 context=3, path1=None):
556 """
556 """
557 Returns (git like) *diff*, as plain text. Shows changes introduced by
557 Returns (git like) *diff*, as plain text. Shows changes introduced by
558 ``commit2`` since ``commit1``.
558 ``commit2`` since ``commit1``.
559
559
560 :param commit1: Entry point from which diff is shown. Can be
560 :param commit1: Entry point from which diff is shown. Can be
561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
562 the changes since empty state of the repository until ``commit2``
562 the changes since empty state of the repository until ``commit2``
563 :param commit2: Until which commits changes should be shown.
563 :param commit2: Until which commits changes should be shown.
564 :param path:
564 :param ignore_whitespace: If set to ``True``, would not show whitespace
565 :param ignore_whitespace: If set to ``True``, would not show whitespace
565 changes. Defaults to ``False``.
566 changes. Defaults to ``False``.
566 :param context: How many lines before/after changed lines should be
567 :param context: How many lines before/after changed lines should be
567 shown. Defaults to ``3``.
568 shown. Defaults to ``3``.
569 :param path1:
568 """
570 """
569 self._validate_diff_commits(commit1, commit2)
571 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
572 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
573 raise ValueError("Diff of two different paths not supported.")
572
574
573 if path:
575 if path:
574 file_filter = path
576 file_filter = path
575 else:
577 else:
576 file_filter = None
578 file_filter = None
577
579
578 diff = self._remote.diff(
580 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
581 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
582 opt_ignorews=ignore_whitespace,
581 context=context)
583 context=context)
584
582 return GitDiff(diff)
585 return GitDiff(diff)
583
586
584 def strip(self, commit_id, branch_name):
587 def strip(self, commit_id, branch_name):
585 commit = self.get_commit(commit_id=commit_id)
588 commit = self.get_commit(commit_id=commit_id)
586 if commit.merge:
589 if commit.merge:
587 raise Exception('Cannot reset to merge commit')
590 raise Exception('Cannot reset to merge commit')
588
591
589 # parent is going to be the new head now
592 # parent is going to be the new head now
590 commit = commit.parents[0]
593 commit = commit.parents[0]
591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
594 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592
595
593 # clear cached properties
596 # clear cached properties
594 self._invalidate_prop_cache('commit_ids')
597 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('_refs')
598 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('branches')
599 self._invalidate_prop_cache('branches')
597
600
598 return len(self.commit_ids)
601 return len(self.commit_ids)
599
602
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
603 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
604 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 self, commit_id1, repo2, commit_id2)
605 self, commit_id1, repo2, commit_id2)
603
606
604 if commit_id1 == commit_id2:
607 if commit_id1 == commit_id2:
605 return commit_id1
608 return commit_id1
606
609
607 if self != repo2:
610 if self != repo2:
608 commits = self._remote.get_missing_revs(
611 commits = self._remote.get_missing_revs(
609 commit_id1, commit_id2, repo2.path)
612 commit_id1, commit_id2, repo2.path)
610 if commits:
613 if commits:
611 commit = repo2.get_commit(commits[-1])
614 commit = repo2.get_commit(commits[-1])
612 if commit.parents:
615 if commit.parents:
613 ancestor_id = commit.parents[0].raw_id
616 ancestor_id = commit.parents[0].raw_id
614 else:
617 else:
615 ancestor_id = None
618 ancestor_id = None
616 else:
619 else:
617 # no commits from other repo, ancestor_id is the commit_id2
620 # no commits from other repo, ancestor_id is the commit_id2
618 ancestor_id = commit_id2
621 ancestor_id = commit_id2
619 else:
622 else:
620 output, __ = self.run_git_command(
623 output, __ = self.run_git_command(
621 ['merge-base', commit_id1, commit_id2])
624 ['merge-base', commit_id1, commit_id2])
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
625 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623
626
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
627 log.debug('Found common ancestor with sha: %s', ancestor_id)
625
628
626 return ancestor_id
629 return ancestor_id
627
630
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
631 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 repo1 = self
632 repo1 = self
630 ancestor_id = None
633 ancestor_id = None
631
634
632 if commit_id1 == commit_id2:
635 if commit_id1 == commit_id2:
633 commits = []
636 commits = []
634 elif repo1 != repo2:
637 elif repo1 != repo2:
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
638 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 repo2.path)
639 repo2.path)
637 commits = [
640 commits = [
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
641 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 for commit_id in reversed(missing_ids)]
642 for commit_id in reversed(missing_ids)]
640 else:
643 else:
641 output, __ = repo1.run_git_command(
644 output, __ = repo1.run_git_command(
642 ['log', '--reverse', '--pretty=format: %H', '-s',
645 ['log', '--reverse', '--pretty=format: %H', '-s',
643 '%s..%s' % (commit_id1, commit_id2)])
646 '%s..%s' % (commit_id1, commit_id2)])
644 commits = [
647 commits = [
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
648 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
649 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647
650
648 return commits
651 return commits
649
652
650 @LazyProperty
653 @LazyProperty
651 def in_memory_commit(self):
654 def in_memory_commit(self):
652 """
655 """
653 Returns ``GitInMemoryCommit`` object for this repository.
656 Returns ``GitInMemoryCommit`` object for this repository.
654 """
657 """
655 return GitInMemoryCommit(self)
658 return GitInMemoryCommit(self)
656
659
657 def pull(self, url, commit_ids=None, update_after=False):
660 def pull(self, url, commit_ids=None, update_after=False):
658 """
661 """
659 Pull changes from external location. Pull is different in GIT
662 Pull changes from external location. Pull is different in GIT
660 that fetch since it's doing a checkout
663 that fetch since it's doing a checkout
661
664
662 :param commit_ids: Optional. Can be set to a list of commit ids
665 :param commit_ids: Optional. Can be set to a list of commit ids
663 which shall be pulled from the other repository.
666 which shall be pulled from the other repository.
664 """
667 """
665 refs = None
668 refs = None
666 if commit_ids is not None:
669 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
670 remote_refs = self._remote.get_remote_refs(url)
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
671 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 self._remote.pull(url, refs=refs, update_after=update_after)
672 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.invalidate_vcs_cache()
673 self._remote.invalidate_vcs_cache()
671
674
672 def fetch(self, url, commit_ids=None):
675 def fetch(self, url, commit_ids=None):
673 """
676 """
674 Fetch all git objects from external location.
677 Fetch all git objects from external location.
675 """
678 """
676 self._remote.sync_fetch(url, refs=commit_ids)
679 self._remote.sync_fetch(url, refs=commit_ids)
677 self._remote.invalidate_vcs_cache()
680 self._remote.invalidate_vcs_cache()
678
681
679 def push(self, url):
682 def push(self, url):
680 refs = None
683 refs = None
681 self._remote.sync_push(url, refs=refs)
684 self._remote.sync_push(url, refs=refs)
682
685
683 def set_refs(self, ref_name, commit_id):
686 def set_refs(self, ref_name, commit_id):
684 self._remote.set_refs(ref_name, commit_id)
687 self._remote.set_refs(ref_name, commit_id)
685 self._invalidate_prop_cache('_refs')
688 self._invalidate_prop_cache('_refs')
686
689
687 def remove_ref(self, ref_name):
690 def remove_ref(self, ref_name):
688 self._remote.remove_ref(ref_name)
691 self._remote.remove_ref(ref_name)
689 self._invalidate_prop_cache('_refs')
692 self._invalidate_prop_cache('_refs')
690
693
691 def run_gc(self, prune=True):
694 def run_gc(self, prune=True):
692 cmd = ['gc', '--aggressive']
695 cmd = ['gc', '--aggressive']
693 if prune:
696 if prune:
694 cmd += ['--prune=now']
697 cmd += ['--prune=now']
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
698 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 return stderr
699 return stderr
697
700
698 def _update_server_info(self):
701 def _update_server_info(self):
699 """
702 """
700 runs gits update-server-info command in this repo instance
703 runs gits update-server-info command in this repo instance
701 """
704 """
702 self._remote.update_server_info()
705 self._remote.update_server_info()
703
706
704 def _current_branch(self):
707 def _current_branch(self):
705 """
708 """
706 Return the name of the current branch.
709 Return the name of the current branch.
707
710
708 It only works for non bare repositories (i.e. repositories with a
711 It only works for non bare repositories (i.e. repositories with a
709 working copy)
712 working copy)
710 """
713 """
711 if self.bare:
714 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
715 raise RepositoryError('Bare git repos do not have active branches')
713
716
714 if self.is_empty():
717 if self.is_empty():
715 return None
718 return None
716
719
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
720 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
721 return stdout.strip()
719
722
720 def _checkout(self, branch_name, create=False, force=False):
723 def _checkout(self, branch_name, create=False, force=False):
721 """
724 """
722 Checkout a branch in the working directory.
725 Checkout a branch in the working directory.
723
726
724 It tries to create the branch if create is True, failing if the branch
727 It tries to create the branch if create is True, failing if the branch
725 already exists.
728 already exists.
726
729
727 It only works for non bare repositories (i.e. repositories with a
730 It only works for non bare repositories (i.e. repositories with a
728 working copy)
731 working copy)
729 """
732 """
730 if self.bare:
733 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
734 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
735
733 cmd = ['checkout']
736 cmd = ['checkout']
734 if force:
737 if force:
735 cmd.append('-f')
738 cmd.append('-f')
736 if create:
739 if create:
737 cmd.append('-b')
740 cmd.append('-b')
738 cmd.append(branch_name)
741 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
742 self.run_git_command(cmd, fail_on_stderr=False)
740
743
741 def _create_branch(self, branch_name, commit_id):
744 def _create_branch(self, branch_name, commit_id):
742 """
745 """
743 creates a branch in a GIT repo
746 creates a branch in a GIT repo
744 """
747 """
745 self._remote.create_branch(branch_name, commit_id)
748 self._remote.create_branch(branch_name, commit_id)
746
749
747 def _identify(self):
750 def _identify(self):
748 """
751 """
749 Return the current state of the working directory.
752 Return the current state of the working directory.
750 """
753 """
751 if self.bare:
754 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
755 raise RepositoryError('Bare git repos do not have active branches')
753
756
754 if self.is_empty():
757 if self.is_empty():
755 return None
758 return None
756
759
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
760 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
761 return stdout.strip()
759
762
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
763 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
764 """
762 Create a local clone of the current repo.
765 Create a local clone of the current repo.
763 """
766 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
767 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
768 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
769 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
770 self.path, os.path.abspath(clone_path)]
768
771
769 self.run_git_command(cmd, fail_on_stderr=False)
772 self.run_git_command(cmd, fail_on_stderr=False)
770
773
771 # if we get the different source branch, make sure we also fetch it for
774 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
775 # merge conditions
773 if source_branch and source_branch != branch_name:
776 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
777 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
778 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
779 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
780 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
781 self.run_git_command(cmd, fail_on_stderr=False)
779
782
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
783 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
784 """
782 Fetch a branch from a local repository.
785 Fetch a branch from a local repository.
783 """
786 """
784 repository_path = os.path.abspath(repository_path)
787 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
788 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
789 raise ValueError('Cannot fetch from the same repository')
787
790
788 if use_origin:
791 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
792 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
793 branch=branch_name)
791
794
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
795 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
796 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
797 self.run_git_command(cmd, fail_on_stderr=False)
795
798
796 def _local_reset(self, branch_name):
799 def _local_reset(self, branch_name):
797 branch_name = '{}'.format(branch_name)
800 branch_name = '{}'.format(branch_name)
798 cmd = ['reset', '--hard', branch_name, '--']
801 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
802 self.run_git_command(cmd, fail_on_stderr=False)
800
803
801 def _last_fetch_heads(self):
804 def _last_fetch_heads(self):
802 """
805 """
803 Return the last fetched heads that need merging.
806 Return the last fetched heads that need merging.
804
807
805 The algorithm is defined at
808 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
809 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
810 """
808 if not self.bare:
811 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
812 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
813 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
814 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
815
813 heads = []
816 heads = []
814 with open(fetch_heads_path) as f:
817 with open(fetch_heads_path) as f:
815 for line in f:
818 for line in f:
816 if ' not-for-merge ' in line:
819 if ' not-for-merge ' in line:
817 continue
820 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
821 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
822 heads.append(line)
820
823
821 return heads
824 return heads
822
825
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
826 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
827 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825
828
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
829 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
830 """
828 Pull a branch from a local repository.
831 Pull a branch from a local repository.
829 """
832 """
830 if self.bare:
833 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
834 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
835 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
836 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
837 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
838 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
839 # pull complains about it being an unknown flag.
837 cmd = ['pull']
840 cmd = ['pull']
838 if ff_only:
841 if ff_only:
839 cmd.append('--ff-only')
842 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
843 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
844 self.run_git_command(cmd, fail_on_stderr=False)
842
845
843 def _local_merge(self, merge_message, user_name, user_email, heads):
846 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
847 """
845 Merge the given head into the checked out branch.
848 Merge the given head into the checked out branch.
846
849
847 It will force a merge commit.
850 It will force a merge commit.
848
851
849 Currently it raises an error if the repo is empty, as it is not possible
852 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
853 to create a merge commit in an empty repo.
851
854
852 :param merge_message: The message to use for the merge commit.
855 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
856 :param heads: the heads to merge.
854 """
857 """
855 if self.bare:
858 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
859 raise RepositoryError('Cannot merge into a bare git repository')
857
860
858 if not heads:
861 if not heads:
859 return
862 return
860
863
861 if self.is_empty():
864 if self.is_empty():
862 # TODO(skreft): do something more robust in this case.
865 # TODO(skreft): do something more robust in this case.
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
866 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 unresolved = None
867 unresolved = None
865
868
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
869 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
870 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
871 cmd = ['-c', f'user.name="{user_name}"',
869 '-c', 'user.email=%s' % safe_str(user_email),
872 '-c', f'user.email={user_email}',
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
873 'merge', '--no-ff', '-m', safe_str(merge_message)]
871
874
872 merge_cmd = cmd + heads
875 merge_cmd = cmd + heads
873
876
874 try:
877 try:
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
878 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 except RepositoryError:
879 except RepositoryError:
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
880 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 fail_on_stderr=False)[0].splitlines()
881 fail_on_stderr=False)[0].splitlines()
879 # NOTE(marcink): we add U notation for consistent with HG backend output
882 # NOTE(marcink): we add U notation for consistent with HG backend output
880 unresolved = ['U {}'.format(f) for f in files]
883 unresolved = ['U {}'.format(f) for f in files]
881
884
882 # Cleanup any merge leftovers
885 # Cleanup any merge leftovers
883 self._remote.invalidate_vcs_cache()
886 self._remote.invalidate_vcs_cache()
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
887 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885
888
886 if unresolved:
889 if unresolved:
887 raise UnresolvedFilesInRepo(unresolved)
890 raise UnresolvedFilesInRepo(unresolved)
888 else:
891 else:
889 raise
892 raise
890
893
891 def _local_push(
894 def _local_push(
892 self, source_branch, repository_path, target_branch,
895 self, source_branch, repository_path, target_branch,
893 enable_hooks=False, rc_scm_data=None):
896 enable_hooks=False, rc_scm_data=None):
894 """
897 """
895 Push the source_branch to the given repository and target_branch.
898 Push the source_branch to the given repository and target_branch.
896
899
897 Currently it if the target_branch is not master and the target repo is
900 Currently it if the target_branch is not master and the target repo is
898 empty, the push will work, but then GitRepository won't be able to find
901 empty, the push will work, but then GitRepository won't be able to find
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
902 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 pointing to master, which does not exist).
903 pointing to master, which does not exist).
901
904
902 It does not run the hooks in the target repo.
905 It does not run the hooks in the target repo.
903 """
906 """
904 # TODO(skreft): deal with the case in which the target repo is empty,
907 # TODO(skreft): deal with the case in which the target repo is empty,
905 # and the target_branch is not master.
908 # and the target_branch is not master.
906 target_repo = GitRepository(repository_path)
909 target_repo = GitRepository(repository_path)
907 if (not target_repo.bare and
910 if (not target_repo.bare and
908 target_repo._current_branch() == target_branch):
911 target_repo._current_branch() == target_branch):
909 # Git prevents pushing to the checked out branch, so simulate it by
912 # Git prevents pushing to the checked out branch, so simulate it by
910 # pulling into the target repository.
913 # pulling into the target repository.
911 target_repo._local_pull(self.path, source_branch)
914 target_repo._local_pull(self.path, source_branch)
912 else:
915 else:
913 cmd = ['push', os.path.abspath(repository_path),
916 cmd = ['push', os.path.abspath(repository_path),
914 '%s:%s' % (source_branch, target_branch)]
917 '%s:%s' % (source_branch, target_branch)]
915 gitenv = {}
918 gitenv = {}
916 if rc_scm_data:
919 if rc_scm_data:
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
920 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918
921
919 if not enable_hooks:
922 if not enable_hooks:
920 gitenv['RC_SKIP_HOOKS'] = '1'
923 gitenv['RC_SKIP_HOOKS'] = '1'
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
924 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922
925
923 def _get_new_pr_branch(self, source_branch, target_branch):
926 def _get_new_pr_branch(self, source_branch, target_branch):
924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
927 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
925 pr_branches = []
928 pr_branches = []
926 for branch in self.branches:
929 for branch in self.branches:
927 if branch.startswith(prefix):
930 if branch.startswith(prefix):
928 pr_branches.append(int(branch[len(prefix):]))
931 pr_branches.append(int(branch[len(prefix):]))
929
932
930 if not pr_branches:
933 if not pr_branches:
931 branch_id = 0
934 branch_id = 0
932 else:
935 else:
933 branch_id = max(pr_branches) + 1
936 branch_id = max(pr_branches) + 1
934
937
935 return '%s%d' % (prefix, branch_id)
938 return '%s%d' % (prefix, branch_id)
936
939
937 def _maybe_prepare_merge_workspace(
940 def _maybe_prepare_merge_workspace(
938 self, repo_id, workspace_id, target_ref, source_ref):
941 self, repo_id, workspace_id, target_ref, source_ref):
939 shadow_repository_path = self._get_shadow_repository_path(
942 shadow_repository_path = self._get_shadow_repository_path(
940 self.path, repo_id, workspace_id)
943 self.path, repo_id, workspace_id)
941 if not os.path.exists(shadow_repository_path):
944 if not os.path.exists(shadow_repository_path):
942 self._local_clone(
945 self._local_clone(
943 shadow_repository_path, target_ref.name, source_ref.name)
946 shadow_repository_path, target_ref.name, source_ref.name)
944 log.debug('Prepared %s shadow repository in %s',
947 log.debug('Prepared %s shadow repository in %s',
945 self.alias, shadow_repository_path)
948 self.alias, shadow_repository_path)
946
949
947 return shadow_repository_path
950 return shadow_repository_path
948
951
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
952 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 source_repo, source_ref, merge_message,
953 source_repo, source_ref, merge_message,
951 merger_name, merger_email, dry_run=False,
954 merger_name, merger_email, dry_run=False,
952 use_rebase=False, close_branch=False):
955 use_rebase=False, close_branch=False):
953
956
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
957 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 'rebase' if use_rebase else 'merge', dry_run)
958 'rebase' if use_rebase else 'merge', dry_run)
956 if target_ref.commit_id != self.branches[target_ref.name]:
959 if target_ref.commit_id != self.branches[target_ref.name]:
957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
960 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 target_ref.commit_id, self.branches[target_ref.name])
961 target_ref.commit_id, self.branches[target_ref.name])
959 return MergeResponse(
962 return MergeResponse(
960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
963 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 metadata={'target_ref': target_ref})
964 metadata={'target_ref': target_ref})
962
965
963 shadow_repository_path = self._maybe_prepare_merge_workspace(
966 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 repo_id, workspace_id, target_ref, source_ref)
967 repo_id, workspace_id, target_ref, source_ref)
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
968 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966
969
967 # checkout source, if it's different. Otherwise we could not
970 # checkout source, if it's different. Otherwise we could not
968 # fetch proper commits for merge testing
971 # fetch proper commits for merge testing
969 if source_ref.name != target_ref.name:
972 if source_ref.name != target_ref.name:
970 if shadow_repo.get_remote_ref(source_ref.name):
973 if shadow_repo.get_remote_ref(source_ref.name):
971 shadow_repo._checkout(source_ref.name, force=True)
974 shadow_repo._checkout(source_ref.name, force=True)
972
975
973 # checkout target, and fetch changes
976 # checkout target, and fetch changes
974 shadow_repo._checkout(target_ref.name, force=True)
977 shadow_repo._checkout(target_ref.name, force=True)
975
978
976 # fetch/reset pull the target, in case it is changed
979 # fetch/reset pull the target, in case it is changed
977 # this handles even force changes
980 # this handles even force changes
978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
981 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_reset(target_ref.name)
982 shadow_repo._local_reset(target_ref.name)
980
983
981 # Need to reload repo to invalidate the cache, or otherwise we cannot
984 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # retrieve the last target commit.
985 # retrieve the last target commit.
983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
986 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
987 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
988 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 target_ref, target_ref.commit_id,
989 target_ref, target_ref.commit_id,
987 shadow_repo.branches[target_ref.name])
990 shadow_repo.branches[target_ref.name])
988 return MergeResponse(
991 return MergeResponse(
989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
992 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 metadata={'target_ref': target_ref})
993 metadata={'target_ref': target_ref})
991
994
992 # calculate new branch
995 # calculate new branch
993 pr_branch = shadow_repo._get_new_pr_branch(
996 pr_branch = shadow_repo._get_new_pr_branch(
994 source_ref.name, target_ref.name)
997 source_ref.name, target_ref.name)
995 log.debug('using pull-request merge branch: `%s`', pr_branch)
998 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 # checkout to temp branch, and fetch changes
999 # checkout to temp branch, and fetch changes
997 shadow_repo._checkout(pr_branch, create=True)
1000 shadow_repo._checkout(pr_branch, create=True)
998 try:
1001 try:
999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1002 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 except RepositoryError:
1003 except RepositoryError:
1001 log.exception('Failure when doing local fetch on '
1004 log.exception('Failure when doing local fetch on '
1002 'shadow repo: %s', shadow_repo)
1005 'shadow repo: %s', shadow_repo)
1003 return MergeResponse(
1006 return MergeResponse(
1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1007 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 metadata={'source_ref': source_ref})
1008 metadata={'source_ref': source_ref})
1006
1009
1007 merge_ref = None
1010 merge_ref = None
1008 merge_failure_reason = MergeFailureReason.NONE
1011 merge_failure_reason = MergeFailureReason.NONE
1009 metadata = {}
1012 metadata = {}
1010 try:
1013 try:
1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1014 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 [source_ref.commit_id])
1015 [source_ref.commit_id])
1013 merge_possible = True
1016 merge_possible = True
1014
1017
1015 # Need to invalidate the cache, or otherwise we
1018 # Need to invalidate the cache, or otherwise we
1016 # cannot retrieve the merge commit.
1019 # cannot retrieve the merge commit.
1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1020 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 merge_commit_id = shadow_repo.branches[pr_branch]
1021 merge_commit_id = shadow_repo.branches[pr_branch]
1019
1022
1020 # Set a reference pointing to the merge commit. This reference may
1023 # Set a reference pointing to the merge commit. This reference may
1021 # be used to easily identify the last successful merge commit in
1024 # be used to easily identify the last successful merge commit in
1022 # the shadow repository.
1025 # the shadow repository.
1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1026 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1027 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 except RepositoryError as e:
1028 except RepositoryError as e:
1026 log.exception('Failure when doing local merge on git shadow repo')
1029 log.exception('Failure when doing local merge on git shadow repo')
1027 if isinstance(e, UnresolvedFilesInRepo):
1030 if isinstance(e, UnresolvedFilesInRepo):
1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1031 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029
1032
1030 merge_possible = False
1033 merge_possible = False
1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1034 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032
1035
1033 if merge_possible and not dry_run:
1036 if merge_possible and not dry_run:
1034 try:
1037 try:
1035 shadow_repo._local_push(
1038 shadow_repo._local_push(
1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1039 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1040 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 merge_succeeded = True
1041 merge_succeeded = True
1039 except RepositoryError:
1042 except RepositoryError:
1040 log.exception(
1043 log.exception(
1041 'Failure when doing local push from the shadow '
1044 'Failure when doing local push from the shadow '
1042 'repository to the target repository at %s.', self.path)
1045 'repository to the target repository at %s.', self.path)
1043 merge_succeeded = False
1046 merge_succeeded = False
1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1047 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 metadata['target'] = 'git shadow repo'
1048 metadata['target'] = 'git shadow repo'
1046 metadata['merge_commit'] = pr_branch
1049 metadata['merge_commit'] = pr_branch
1047 else:
1050 else:
1048 merge_succeeded = False
1051 merge_succeeded = False
1049
1052
1050 return MergeResponse(
1053 return MergeResponse(
1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1054 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 metadata=metadata)
1055 metadata=metadata)
@@ -1,402 +1,405 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.str_utils import safe_bytes, safe_str
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
33 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
34 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
35 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
36 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
37 LargeFileNode)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
38 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
39
41
40
42 class MercurialCommit(base.BaseCommit):
41 class MercurialCommit(base.BaseCommit):
43 """
42 """
44 Represents state of the repository at the single commit.
43 Represents state of the repository at the single commit.
45 """
44 """
46
45
47 _filter_pre_load = [
46 _filter_pre_load = [
48 # git specific property not supported here
47 # git specific property not supported here
49 "_commit",
48 "_commit",
50 ]
49 ]
51
50
52 def __init__(self, repository, raw_id, idx, pre_load=None):
51 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
52 raw_id = safe_str(raw_id)
54
53
55 self.repository = repository
54 self.repository = repository
56 self._remote = repository._remote
55 self._remote = repository._remote
57
56
58 self.raw_id = raw_id
57 self.raw_id = raw_id
59 self.idx = idx
58 self.idx = idx
60
59
61 self._set_bulk_properties(pre_load)
60 self._set_bulk_properties(pre_load)
62
61
63 # caches
62 # caches
64 self.nodes = {}
63 self.nodes = {}
64 self._stat_modes = {} # stat info for paths
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.raw_id, pre_load)
74 result = self._remote.bulk_request(self.raw_id, pre_load)
75
75
76 for attr, value in result.items():
76 for attr, value in result.items():
77 if attr in ["author", "branch", "message"]:
77 if attr in ["author", "branch", "message"]:
78 value = safe_unicode(value)
78 value = safe_str(value)
79 elif attr == "affected_files":
79 elif attr == "affected_files":
80 value = map(safe_unicode, value)
80 value = list(map(safe_str, value))
81 elif attr == "date":
81 elif attr == "date":
82 value = utcdate_fromtimestamp(*value)
82 value = utcdate_fromtimestamp(*value)
83 elif attr in ["children", "parents"]:
83 elif attr in ["children", "parents"]:
84 value = self._make_commits(value)
84 value = self._make_commits(value)
85 elif attr in ["phase"]:
85 elif attr in ["phase"]:
86 value = self._get_phase_text(value)
86 value = self._get_phase_text(value)
87 self.__dict__[attr] = value
87 self.__dict__[attr] = value
88
88
89 @LazyProperty
89 @LazyProperty
90 def tags(self):
90 def tags(self):
91 tags = [name for name, commit_id in self.repository.tags.items()
91 tags = [name for name, commit_id in self.repository.tags.items()
92 if commit_id == self.raw_id]
92 if commit_id == self.raw_id]
93 return tags
93 return tags
94
94
95 @LazyProperty
95 @LazyProperty
96 def branch(self):
96 def branch(self):
97 return safe_unicode(self._remote.ctx_branch(self.raw_id))
97 return safe_str(self._remote.ctx_branch(self.raw_id))
98
98
99 @LazyProperty
99 @LazyProperty
100 def bookmarks(self):
100 def bookmarks(self):
101 bookmarks = [
101 bookmarks = [
102 name for name, commit_id in self.repository.bookmarks.items()
102 name for name, commit_id in self.repository.bookmarks.items()
103 if commit_id == self.raw_id]
103 if commit_id == self.raw_id]
104 return bookmarks
104 return bookmarks
105
105
106 @LazyProperty
106 @LazyProperty
107 def message(self):
107 def message(self):
108 return safe_unicode(self._remote.ctx_description(self.raw_id))
108 return safe_str(self._remote.ctx_description(self.raw_id))
109
109
110 @LazyProperty
110 @LazyProperty
111 def committer(self):
111 def committer(self):
112 return safe_unicode(self.author)
112 return safe_str(self.author)
113
113
114 @LazyProperty
114 @LazyProperty
115 def author(self):
115 def author(self):
116 return safe_unicode(self._remote.ctx_user(self.raw_id))
116 return safe_str(self._remote.ctx_user(self.raw_id))
117
117
118 @LazyProperty
118 @LazyProperty
119 def date(self):
119 def date(self):
120 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
121
121
122 @LazyProperty
122 @LazyProperty
123 def status(self):
123 def status(self):
124 """
124 """
125 Returns modified, added, removed, deleted files for current commit
125 Returns modified, added, removed, deleted files for current commit
126 """
126 """
127 return self._remote.ctx_status(self.raw_id)
127 return self._remote.ctx_status(self.raw_id)
128
128
129 @LazyProperty
129 @LazyProperty
130 def _file_paths(self):
130 def _file_paths(self):
131 return self._remote.ctx_list(self.raw_id)
131 return self._remote.ctx_list(self.raw_id)
132
132
133 @LazyProperty
133 @LazyProperty
134 def _dir_paths(self):
134 def _dir_paths(self):
135 p = list(set(get_dirs_for_path(*self._file_paths)))
135 dir_paths = ['']
136 p.insert(0, '')
136 dir_paths.extend(list(set(get_dirs_for_path(*self._file_paths))))
137 return p
137
138 return dir_paths
138
139
139 @LazyProperty
140 @LazyProperty
140 def _paths(self):
141 def _paths(self):
141 return self._dir_paths + self._file_paths
142 return self._dir_paths + self._file_paths
142
143
143 @LazyProperty
144 @LazyProperty
144 def id(self):
145 def id(self):
145 if self.last:
146 if self.last:
146 return u'tip'
147 return 'tip'
147 return self.short_id
148 return self.short_id
148
149
149 @LazyProperty
150 @LazyProperty
150 def short_id(self):
151 def short_id(self):
151 return self.raw_id[:12]
152 return self.raw_id[:12]
152
153
153 def _make_commits(self, commit_ids, pre_load=None):
154 def _make_commits(self, commit_ids, pre_load=None):
154 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
155 return [self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
155 for commit_id in commit_ids]
156 for commit_id in commit_ids]
156
157
157 @LazyProperty
158 @LazyProperty
158 def parents(self):
159 def parents(self):
159 """
160 """
160 Returns list of parent commits.
161 Returns list of parent commits.
161 """
162 """
162 parents = self._remote.ctx_parents(self.raw_id)
163 parents = self._remote.ctx_parents(self.raw_id)
163 return self._make_commits(parents)
164 return self._make_commits(parents)
164
165
165 def _get_phase_text(self, phase_id):
166 def _get_phase_text(self, phase_id):
166 return {
167 return {
167 0: 'public',
168 0: 'public',
168 1: 'draft',
169 1: 'draft',
169 2: 'secret',
170 2: 'secret',
170 }.get(phase_id) or ''
171 }.get(phase_id) or ''
171
172
172 @LazyProperty
173 @LazyProperty
173 def phase(self):
174 def phase(self):
174 phase_id = self._remote.ctx_phase(self.raw_id)
175 phase_id = self._remote.ctx_phase(self.raw_id)
175 phase_text = self._get_phase_text(phase_id)
176 phase_text = self._get_phase_text(phase_id)
176
177
177 return safe_unicode(phase_text)
178 return safe_str(phase_text)
178
179
179 @LazyProperty
180 @LazyProperty
180 def obsolete(self):
181 def obsolete(self):
181 obsolete = self._remote.ctx_obsolete(self.raw_id)
182 obsolete = self._remote.ctx_obsolete(self.raw_id)
182 return obsolete
183 return obsolete
183
184
184 @LazyProperty
185 @LazyProperty
185 def hidden(self):
186 def hidden(self):
186 hidden = self._remote.ctx_hidden(self.raw_id)
187 hidden = self._remote.ctx_hidden(self.raw_id)
187 return hidden
188 return hidden
188
189
189 @LazyProperty
190 @LazyProperty
190 def children(self):
191 def children(self):
191 """
192 """
192 Returns list of child commits.
193 Returns list of child commits.
193 """
194 """
194 children = self._remote.ctx_children(self.raw_id)
195 children = self._remote.ctx_children(self.raw_id)
195 return self._make_commits(children)
196 return self._make_commits(children)
196
197
197 def _fix_path(self, path):
198 """
199 Mercurial keeps filenodes as str so we need to encode from unicode
200 to str.
201 """
202 return safe_str(super(MercurialCommit, self)._fix_path(path))
203
204 def _get_kind(self, path):
198 def _get_kind(self, path):
205 path = self._fix_path(path)
199 path = self._fix_path(path)
206 if path in self._file_paths:
200 if path in self._file_paths:
207 return NodeKind.FILE
201 return NodeKind.FILE
208 elif path in self._dir_paths:
202 elif path in self._dir_paths:
209 return NodeKind.DIR
203 return NodeKind.DIR
210 else:
204 else:
211 raise CommitError(
205 raise CommitError(f"Node does not exist at the given path '{path}'")
212 "Node does not exist at the given path '%s'" % (path, ))
213
206
214 def _get_filectx(self, path):
207 def _assert_is_path(self, path) -> str:
215 path = self._fix_path(path)
208 path = self._fix_path(path)
216 if self._get_kind(path) != NodeKind.FILE:
209 if self._get_kind(path) != NodeKind.FILE:
217 raise CommitError(
210 raise CommitError(f"File does not exist for commit {self.raw_id} at '{path}'")
218 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
211
219 return path
212 return path
220
213
221 def get_file_mode(self, path):
214 def get_file_mode(self, path: bytes):
222 """
215 """
223 Returns stat mode of the file at the given ``path``.
216 Returns stat mode of the file at the given ``path``.
224 """
217 """
225 path = self._get_filectx(path)
218 path = self._assert_is_path(path)
226 if 'x' in self._remote.fctx_flags(self.raw_id, path):
219
220 if path not in self._stat_modes:
221 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
222
223 if 'x' in self._stat_modes[path]:
227 return base.FILEMODE_EXECUTABLE
224 return base.FILEMODE_EXECUTABLE
228 else:
225 return base.FILEMODE_DEFAULT
229 return base.FILEMODE_DEFAULT
230
226
231 def is_link(self, path):
227 def is_link(self, path):
232 path = self._get_filectx(path)
228 path = self._assert_is_path(path)
233 return 'l' in self._remote.fctx_flags(self.raw_id, path)
229 if path not in self._stat_modes:
230 self._stat_modes[path] = self._remote.fctx_flags(self.raw_id, path)
231
232 return 'l' in self._stat_modes[path]
234
233
235 def is_node_binary(self, path):
234 def is_node_binary(self, path):
236 path = self._get_filectx(path)
235 path = self._assert_is_path(path)
237 return self._remote.is_binary(self.raw_id, path)
236 return self._remote.is_binary(self.raw_id, path)
238
237
238 def node_md5_hash(self, path):
239 path = self._assert_is_path(path)
240 return self._remote.md5_hash(self.raw_id, path)
241
239 def get_file_content(self, path):
242 def get_file_content(self, path):
240 """
243 """
241 Returns content of the file at given ``path``.
244 Returns content of the file at given ``path``.
242 """
245 """
243 path = self._get_filectx(path)
246 path = self._assert_is_path(path)
244 return self._remote.fctx_node_data(self.raw_id, path)
247 return self._remote.fctx_node_data(self.raw_id, path)
245
248
246 def get_file_content_streamed(self, path):
249 def get_file_content_streamed(self, path):
247 path = self._get_filectx(path)
250 path = self._assert_is_path(path)
248 stream_method = getattr(self._remote, 'stream:fctx_node_data')
251 stream_method = getattr(self._remote, 'stream:fctx_node_data')
249 return stream_method(self.raw_id, path)
252 return stream_method(self.raw_id, path)
250
253
251 def get_file_size(self, path):
254 def get_file_size(self, path):
252 """
255 """
253 Returns size of the file at given ``path``.
256 Returns size of the file at given ``path``.
254 """
257 """
255 path = self._get_filectx(path)
258 path = self._assert_is_path(path)
256 return self._remote.fctx_size(self.raw_id, path)
259 return self._remote.fctx_size(self.raw_id, path)
257
260
258 def get_path_history(self, path, limit=None, pre_load=None):
261 def get_path_history(self, path, limit=None, pre_load=None):
259 """
262 """
260 Returns history of file as reversed list of `MercurialCommit` objects
263 Returns history of file as reversed list of `MercurialCommit` objects
261 for which file at given ``path`` has been modified.
264 for which file at given ``path`` has been modified.
262 """
265 """
263 path = self._get_filectx(path)
266 path = self._assert_is_path(path)
264 hist = self._remote.node_history(self.raw_id, path, limit)
267 hist = self._remote.node_history(self.raw_id, path, limit)
265 return [
268 return [
266 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
269 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
267 for commit_id in hist]
270 for commit_id in hist]
268
271
269 def get_file_annotate(self, path, pre_load=None):
272 def get_file_annotate(self, path, pre_load=None):
270 """
273 """
271 Returns a generator of four element tuples with
274 Returns a generator of four element tuples with
272 lineno, commit_id, commit lazy loader and line
275 lineno, commit_id, commit lazy loader and line
273 """
276 """
274 result = self._remote.fctx_annotate(self.raw_id, path)
277 result = self._remote.fctx_annotate(self.raw_id, path)
275
278
276 for ln_no, commit_id, content in result:
279 for ln_no, commit_id, content in result:
277 yield (
280 yield (
278 ln_no, commit_id,
281 ln_no, commit_id,
279 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
282 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
280 content)
283 content)
281
284
282 def get_nodes(self, path):
285 def get_nodes(self, path, pre_load=None):
283 """
286 """
284 Returns combined ``DirNode`` and ``FileNode`` objects list representing
287 Returns combined ``DirNode`` and ``FileNode`` objects list representing
285 state of commit at the given ``path``. If node at the given ``path``
288 state of commit at the given ``path``. If node at the given ``path``
286 is not instance of ``DirNode``, CommitError would be raised.
289 is not instance of ``DirNode``, CommitError would be raised.
287 """
290 """
288
291
289 if self._get_kind(path) != NodeKind.DIR:
292 if self._get_kind(path) != NodeKind.DIR:
290 raise CommitError(
293 raise CommitError(
291 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
294 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
292 path = self._fix_path(path)
295 path = self._fix_path(path)
293
296
294 filenodes = [
297 filenodes = [
295 FileNode(f, commit=self) for f in self._file_paths
298 FileNode(safe_bytes(f), commit=self, pre_load=pre_load) for f in self._file_paths
296 if os.path.dirname(f) == path]
299 if os.path.dirname(f) == path]
297 # TODO: johbo: Check if this can be done in a more obvious way
300 # TODO: johbo: Check if this can be done in a more obvious way
298 dirs = path == '' and '' or [
301 dirs = path == '' and '' or [
299 d for d in self._dir_paths
302 d for d in self._dir_paths
300 if d and vcspath.dirname(d) == path]
303 if d and vcspath.dirname(d) == path]
301 dirnodes = [
304 dirnodes = [
302 DirNode(d, commit=self) for d in dirs
305 DirNode(safe_bytes(d), commit=self) for d in dirs
303 if os.path.dirname(d) == path]
306 if os.path.dirname(d) == path]
304
307
305 alias = self.repository.alias
308 alias = self.repository.alias
306 for k, vals in self._submodules.items():
309 for k, vals in self._submodules.items():
307 if vcspath.dirname(k) == path:
310 if vcspath.dirname(k) == path:
308 loc = vals[0]
311 loc = vals[0]
309 commit = vals[1]
312 commit = vals[1]
310 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
313 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
311
314
312 nodes = dirnodes + filenodes
315 nodes = dirnodes + filenodes
313 for node in nodes:
316 for node in nodes:
314 if node.path not in self.nodes:
317 if node.path not in self.nodes:
315 self.nodes[node.path] = node
318 self.nodes[node.path] = node
316 nodes.sort()
319 nodes.sort()
317
320
318 return nodes
321 return nodes
319
322
320 def get_node(self, path, pre_load=None):
323 def get_node(self, path, pre_load=None):
321 """
324 """
322 Returns `Node` object from the given `path`. If there is no node at
325 Returns `Node` object from the given `path`. If there is no node at
323 the given `path`, `NodeDoesNotExistError` would be raised.
326 the given `path`, `NodeDoesNotExistError` would be raised.
324 """
327 """
325 path = self._fix_path(path)
328 path = self._fix_path(path)
326
329
327 if path not in self.nodes:
330 if path not in self.nodes:
328 if path in self._file_paths:
331 if path in self._file_paths:
329 node = FileNode(path, commit=self, pre_load=pre_load)
332 node = FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
330 elif path in self._dir_paths:
333 elif path in self._dir_paths:
331 if path == '':
334 if path == '':
332 node = RootNode(commit=self)
335 node = RootNode(commit=self)
333 else:
336 else:
334 node = DirNode(path, commit=self)
337 node = DirNode(safe_bytes(path), commit=self)
335 else:
338 else:
336 raise self.no_node_at_path(path)
339 raise self.no_node_at_path(path)
337
340
338 # cache node
341 # cache node
339 self.nodes[path] = node
342 self.nodes[path] = node
340 return self.nodes[path]
343 return self.nodes[path]
341
344
342 def get_largefile_node(self, path):
345 def get_largefile_node(self, path):
343 pointer_spec = self._remote.is_large_file(self.raw_id, path)
346 pointer_spec = self._remote.is_large_file(self.raw_id, path)
344 if pointer_spec:
347 if pointer_spec:
345 # content of that file regular FileNode is the hash of largefile
348 # content of that file regular FileNode is the hash of largefile
346 file_id = self.get_file_content(path).strip()
349 file_id = self.get_file_content(path).strip()
347
350
348 if self._remote.in_largefiles_store(file_id):
351 if self._remote.in_largefiles_store(file_id):
349 lf_path = self._remote.store_path(file_id)
352 lf_path = self._remote.store_path(file_id)
350 return LargeFileNode(lf_path, commit=self, org_path=path)
353 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
351 elif self._remote.in_user_cache(file_id):
354 elif self._remote.in_user_cache(file_id):
352 lf_path = self._remote.store_path(file_id)
355 lf_path = self._remote.store_path(file_id)
353 self._remote.link(file_id, path)
356 self._remote.link(file_id, path)
354 return LargeFileNode(lf_path, commit=self, org_path=path)
357 return LargeFileNode(safe_bytes(lf_path), commit=self, org_path=path)
355
358
356 @LazyProperty
359 @LazyProperty
357 def _submodules(self):
360 def _submodules(self):
358 """
361 """
359 Returns a dictionary with submodule information from substate file
362 Returns a dictionary with submodule information from substate file
360 of hg repository.
363 of hg repository.
361 """
364 """
362 return self._remote.ctx_substate(self.raw_id)
365 return self._remote.ctx_substate(self.raw_id)
363
366
364 @LazyProperty
367 @LazyProperty
365 def affected_files(self):
368 def affected_files(self):
366 """
369 """
367 Gets a fast accessible file changes for given commit
370 Gets a fast accessible file changes for given commit
368 """
371 """
369 return self._remote.ctx_files(self.raw_id)
372 return self._remote.ctx_files(self.raw_id)
370
373
371 @property
374 @property
372 def added(self):
375 def added(self):
373 """
376 """
374 Returns list of added ``FileNode`` objects.
377 Returns list of added ``FileNode`` objects.
375 """
378 """
376 return AddedFileNodesGenerator(self.added_paths, self)
379 return AddedFileNodesGenerator(self.added_paths, self)
377
380
378 @LazyProperty
381 @LazyProperty
379 def added_paths(self):
382 def added_paths(self):
380 return [n for n in self.status[1]]
383 return [n for n in self.status[1]]
381
384
382 @property
385 @property
383 def changed(self):
386 def changed(self):
384 """
387 """
385 Returns list of modified ``FileNode`` objects.
388 Returns list of modified ``FileNode`` objects.
386 """
389 """
387 return ChangedFileNodesGenerator(self.changed_paths, self)
390 return ChangedFileNodesGenerator(self.changed_paths, self)
388
391
389 @LazyProperty
392 @LazyProperty
390 def changed_paths(self):
393 def changed_paths(self):
391 return [n for n in self.status[0]]
394 return [n for n in self.status[0]]
392
395
393 @property
396 @property
394 def removed(self):
397 def removed(self):
395 """
398 """
396 Returns list of removed ``FileNode`` objects.
399 Returns list of removed ``FileNode`` objects.
397 """
400 """
398 return RemovedFileNodesGenerator(self.removed_paths, self)
401 return RemovedFileNodesGenerator(self.removed_paths, self)
399
402
400 @LazyProperty
403 @LazyProperty
401 def removed_paths(self):
404 def removed_paths(self):
402 return [n for n in self.status[2]]
405 return [n for n in self.status[2]]
@@ -1,49 +1,49 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG diff module
22 HG diff module
23 """
23 """
24
24
25 import re
25 import re
26
26
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class MercurialDiff(base.Diff):
30 class MercurialDiff(base.Diff):
31
31
32 _header_re = re.compile(r"""
32 _header_re = re.compile(br"""
33 #^diff[ ]--git
33 #^diff[ ]--git
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
34 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
35 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
35 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
36 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
36 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
37 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
37 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
38 (?:^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
38 (?:^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
39 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
39 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
40 (?:^copy[ ]from[ ](?P<copy_from>[^\r\n]+)\n
40 (?:^copy[ ]from[ ](?P<copy_from>[^\r\n]+)\n
41 ^copy[ ]to[ ](?P<copy_to>[^\r\n]+)(?:\n|$))?
41 ^copy[ ]to[ ](?P<copy_to>[^\r\n]+)(?:\n|$))?
42 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
42 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
43 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
43 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
44 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
44 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
45 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
45 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
46 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
46 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
47 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
47 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)(?:\n|$))?
48 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
48 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)(?:\n|$))?
49 """, re.VERBOSE | re.MULTILINE)
49 """, re.VERBOSE | re.MULTILINE)
@@ -1,96 +1,96 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG inmemory module
22 HG inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.str_utils import safe_str
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 from rhodecode.lib.vcs.exceptions import RepositoryError
28 from rhodecode.lib.vcs.exceptions import RepositoryError
29
29
30
30
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32
32
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
34 """
34 """
35 Performs in-memory commit (doesn't check workdir in any way) and
35 Performs in-memory commit (doesn't check workdir in any way) and
36 returns newly created `MercurialCommit`. Updates repository's
36 returns newly created `MercurialCommit`. Updates repository's
37 `commit_ids`.
37 `commit_ids`.
38
38
39 :param message: message of the commit
39 :param message: message of the commit
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 :param parents: single parent or sequence of parents from which commit
41 :param parents: single parent or sequence of parents from which commit
42 would be derived
42 would be derived
43 :param date: `datetime.datetime` instance. Defaults to
43 :param date: `datetime.datetime` instance. Defaults to
44 ``datetime.datetime.now()``.
44 ``datetime.datetime.now()``.
45 :param branch: Optional. Branch name as unicode. Will use the backend's
45 :param branch: Optional. Branch name as unicode. Will use the backend's
46 default if not given.
46 default if not given.
47
47
48 :raises `RepositoryError`: if any error occurs while committing
48 :raises `RepositoryError`: if any error occurs while committing
49 """
49 """
50 self.check_integrity(parents)
50 self.check_integrity(parents)
51
51
52 if not isinstance(message, str) or not isinstance(author, str):
52 if not isinstance(message, str) or not isinstance(author, str):
53 # TODO: johbo: Should be a TypeError
53 # TODO: johbo: Should be a TypeError
54 raise RepositoryError(
54 raise RepositoryError(
55 f'Given message and author needs to be '
55 f'Given message and author needs to be '
56 f'an <str> instance got {type(message)} & {type(author)} instead'
56 f'an <str> instance got {type(message)} & {type(author)} instead'
57 )
57 )
58
58
59 if branch is None:
59 if branch is None:
60 branch = self.repository.DEFAULT_BRANCH_NAME
60 branch = self.repository.DEFAULT_BRANCH_NAME
61 kwargs['branch'] = safe_str(branch)
61 kwargs['branch'] = safe_str(branch)
62
62
63 message = safe_str(message)
63 message = safe_str(message)
64 author = safe_str(author)
64 author = safe_str(author)
65
65
66 parent_ids = [p.raw_id if p else None for p in self.parents]
66 parent_ids = [p.raw_id if p else None for p in self.parents]
67
67
68 ENCODING = "UTF-8"
69
70 updated = []
68 updated = []
71 for node in self.added + self.changed:
69 for node in self.added + self.changed:
72 if node.is_binary:
70 content = node.content
73 content = node.content
71 # TODO: left for reference pre py3 migration, probably need to be removed
74 else:
72 # if node.is_binary:
75 content = node.content.encode(ENCODING)
73 # content = node.content
74 # else:
75 # content = node.content.encode(ENCODING)
76 updated.append({
76 updated.append({
77 'path': node.path,
77 'path': node.path,
78 'content': content,
78 'content': content,
79 'mode': node.mode,
79 'mode': node.mode,
80 })
80 })
81
81
82 removed = [node.path for node in self.removed]
82 removed = [node.path for node in self.removed]
83
83
84 date, tz = date_to_timestamp_plus_offset(date)
84 date, tz = date_to_timestamp_plus_offset(date)
85
85
86 commit_id = self.repository._remote.commitctx(
86 commit_id = self.repository._remote.commitctx(
87 message=message, parents=parent_ids,
87 message=message, parents=parent_ids,
88 commit_time=date, commit_timezone=tz, user=author,
88 commit_time=date, commit_timezone=tz, user=author,
89 files=self.get_paths(), extra=kwargs, removed=removed,
89 files=self.get_paths(), extra=kwargs, removed=removed,
90 updated=updated)
90 updated=updated)
91 self.repository.append_commit_id(commit_id)
91 self.repository.append_commit_id(commit_id)
92
92
93 self.repository.branches = self.repository._get_branches()
93 self.repository.branches = self.repository._get_branches()
94 tip = self.repository.get_commit(commit_id)
94 tip = self.repository.get_commit(commit_id)
95 self.reset()
95 self.reset()
96 return tip
96 return tip
@@ -1,1013 +1,1015 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import configparser
27 import configparser
28 import urllib.request, urllib.parse, urllib.error
28 import urllib.request
29 import urllib.parse
30 import urllib.error
29
31
30 from zope.cachedescriptors.property import Lazy as LazyProperty
32 from zope.cachedescriptors.property import Lazy as LazyProperty
31
33
32 from collections import OrderedDict
34 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
35 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
36 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
38 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
39 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
40 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
42 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
48 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
49
48 hexlify = binascii.hexlify
50 hexlify = binascii.hexlify
49 nullid = "\0" * 20
51 nullid = "\0" * 20
50
52
51 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
52
54
53
55
54 class MercurialRepository(BaseRepository):
56 class MercurialRepository(BaseRepository):
55 """
57 """
56 Mercurial repository backend
58 Mercurial repository backend
57 """
59 """
58 DEFAULT_BRANCH_NAME = 'default'
60 DEFAULT_BRANCH_NAME = 'default'
59
61
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
64 """
63 Raises RepositoryError if repository could not be find at the given
65 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
66 ``repo_path``.
65
67
66 :param repo_path: local path of the repository
68 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
69 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
70 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
71 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
72 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
73 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
74 making a clone
73 :param bare: not used, compatible with other VCS
75 :param bare: not used, compatible with other VCS
74 """
76 """
75
77
76 self.path = safe_str(os.path.abspath(repo_path))
78 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
79 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
80 # because sometimes we init the repos with config we need to meet
79 # special requirements
81 # special requirements
80 self.config = config if config else self.get_default_config(
82 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
83 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
84 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
85
84 self._init_repo(create, src_url, do_workspace_checkout)
86 self._init_repo(create, src_url, do_workspace_checkout)
85
87
86 # caches
88 # caches
87 self._commit_ids = {}
89 self._commit_ids = {}
88
90
89 @LazyProperty
91 @LazyProperty
90 def _remote(self):
92 def _remote(self):
91 repo_id = self.path
93 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
94 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
95
94 @CachedProperty
96 @CachedProperty
95 def commit_ids(self):
97 def commit_ids(self):
96 """
98 """
97 Returns list of commit ids, in ascending order. Being lazy
99 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
100 attribute allows external tools to inject shas from cache.
99 """
101 """
100 commit_ids = self._get_all_commit_ids()
102 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
103 self._rebuild_cache(commit_ids)
102 return commit_ids
104 return commit_ids
103
105
104 def _rebuild_cache(self, commit_ids):
106 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
107 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
108 for index, commit_id in enumerate(commit_ids))
107
109
108 @CachedProperty
110 @CachedProperty
109 def branches(self):
111 def branches(self):
110 return self._get_branches()
112 return self._get_branches()
111
113
112 @CachedProperty
114 @CachedProperty
113 def branches_closed(self):
115 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
116 return self._get_branches(active=False, closed=True)
115
117
116 @CachedProperty
118 @CachedProperty
117 def branches_all(self):
119 def branches_all(self):
118 all_branches = {}
120 all_branches = {}
119 all_branches.update(self.branches)
121 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
122 all_branches.update(self.branches_closed)
121 return all_branches
123 return all_branches
122
124
123 def _get_branches(self, active=True, closed=False):
125 def _get_branches(self, active=True, closed=False):
124 """
126 """
125 Gets branches for this repository
127 Gets branches for this repository
126 Returns only not closed active branches by default
128 Returns only not closed active branches by default
127
129
128 :param active: return also active branches
130 :param active: return also active branches
129 :param closed: return also closed branches
131 :param closed: return also closed branches
130
132
131 """
133 """
132 if self.is_empty():
134 if self.is_empty():
133 return {}
135 return {}
134
136
135 def get_name(ctx):
137 def get_name(ctx):
136 return ctx[0]
138 return ctx[0]
137
139
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
140 _branches = [(n, h,) for n, h in
139 self._remote.branches(active, closed).items()]
141 self._remote.branches(active, closed).items()]
140
142
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
143 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
144
143 @CachedProperty
145 @CachedProperty
144 def tags(self):
146 def tags(self):
145 """
147 """
146 Gets tags for this repository
148 Gets tags for this repository
147 """
149 """
148 return self._get_tags()
150 return self._get_tags()
149
151
150 def _get_tags(self):
152 def _get_tags(self):
151 if self.is_empty():
153 if self.is_empty():
152 return {}
154 return {}
153
155
154 def get_name(ctx):
156 def get_name(ctx):
155 return ctx[0]
157 return ctx[0]
156
158
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
159 _tags = [(n, h,) for n, h in
158 self._remote.tags().items()]
160 self._remote.tags().items()]
159
161
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
162 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
163
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
164 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
165 """
164 Creates and returns a tag for the given ``commit_id``.
166 Creates and returns a tag for the given ``commit_id``.
165
167
166 :param name: name for new tag
168 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
169 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
170 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
171 :param message: message of the tag's commit
170 :param date: date of tag's commit
172 :param date: date of tag's commit
171
173
172 :raises TagAlreadyExistError: if tag with same name already exists
174 :raises TagAlreadyExistError: if tag with same name already exists
173 """
175 """
174 if name in self.tags:
176 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
177 raise TagAlreadyExistError("Tag %s already exists" % name)
176
178
177 commit = self.get_commit(commit_id=commit_id)
179 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
180 local = kwargs.setdefault('local', False)
179
181
180 if message is None:
182 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
183 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
184
183 date, tz = date_to_timestamp_plus_offset(date)
185 date, tz = date_to_timestamp_plus_offset(date)
184
186
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
187 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
188 self._remote.invalidate_vcs_cache()
187
189
188 # Reinitialize tags
190 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
191 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
192 tag_id = self.tags[name]
191
193
192 return self.get_commit(commit_id=tag_id)
194 return self.get_commit(commit_id=tag_id)
193
195
194 def remove_tag(self, name, user, message=None, date=None):
196 def remove_tag(self, name, user, message=None, date=None):
195 """
197 """
196 Removes tag with the given `name`.
198 Removes tag with the given `name`.
197
199
198 :param name: name of the tag to be removed
200 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
201 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
202 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
203 :param date: date of tag's removal commit
202
204
203 :raises TagDoesNotExistError: if tag with given name does not exists
205 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
206 """
205 if name not in self.tags:
207 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
208 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
209
208 if message is None:
210 if message is None:
209 message = "Removed tag %s" % name
211 message = "Removed tag %s" % name
210 local = False
212 local = False
211
213
212 date, tz = date_to_timestamp_plus_offset(date)
214 date, tz = date_to_timestamp_plus_offset(date)
213
215
214 self._remote.tag(name, nullid, message, local, user, date, tz)
216 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
217 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
218 self._invalidate_prop_cache('tags')
217
219
218 @LazyProperty
220 @LazyProperty
219 def bookmarks(self):
221 def bookmarks(self):
220 """
222 """
221 Gets bookmarks for this repository
223 Gets bookmarks for this repository
222 """
224 """
223 return self._get_bookmarks()
225 return self._get_bookmarks()
224
226
225 def _get_bookmarks(self):
227 def _get_bookmarks(self):
226 if self.is_empty():
228 if self.is_empty():
227 return {}
229 return {}
228
230
229 def get_name(ctx):
231 def get_name(ctx):
230 return ctx[0]
232 return ctx[0]
231
233
232 _bookmarks = [
234 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
235 (n, h) for n, h in
234 self._remote.bookmarks().items()]
236 self._remote.bookmarks().items()]
235
237
236 return OrderedDict(sorted(_bookmarks, key=get_name))
238 return OrderedDict(sorted(_bookmarks, key=get_name))
237
239
238 def _get_all_commit_ids(self):
240 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
241 return self._remote.get_all_commit_ids('visible')
240
242
241 def get_diff(
243 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
244 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
245 context=3, path1=None):
244 """
246 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
247 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
248 `commit2` since `commit1`.
247
249
248 :param commit1: Entry point from which diff is shown. Can be
250 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
251 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
252 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
253 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
254 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
255 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
256 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
257 shown. Defaults to ``3``.
256 """
258 """
257 self._validate_diff_commits(commit1, commit2)
259 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
260 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
261 raise ValueError("Diff of two different paths not supported.")
260
262
261 if path:
263 if path:
262 file_filter = [self.path, path]
264 file_filter = [self.path, path]
263 else:
265 else:
264 file_filter = None
266 file_filter = None
265
267
266 diff = self._remote.diff(
268 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
269 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
270 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
271 context=context)
270 return MercurialDiff(diff)
272 return MercurialDiff(diff)
271
273
272 def strip(self, commit_id, branch=None):
274 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
275 self._remote.strip(commit_id, update=False, backup="none")
274
276
275 self._remote.invalidate_vcs_cache()
277 self._remote.invalidate_vcs_cache()
276 # clear cache
278 # clear cache
277 self._invalidate_prop_cache('commit_ids')
279 self._invalidate_prop_cache('commit_ids')
278
280
279 return len(self.commit_ids)
281 return len(self.commit_ids)
280
282
281 def verify(self):
283 def verify(self):
282 verify = self._remote.verify()
284 verify = self._remote.verify()
283
285
284 self._remote.invalidate_vcs_cache()
286 self._remote.invalidate_vcs_cache()
285 return verify
287 return verify
286
288
287 def hg_update_cache(self):
289 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
290 update_cache = self._remote.hg_update_cache()
289
291
290 self._remote.invalidate_vcs_cache()
292 self._remote.invalidate_vcs_cache()
291 return update_cache
293 return update_cache
292
294
293 def hg_rebuild_fn_cache(self):
295 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
296 update_cache = self._remote.hg_rebuild_fn_cache()
295
297
296 self._remote.invalidate_vcs_cache()
298 self._remote.invalidate_vcs_cache()
297 return update_cache
299 return update_cache
298
300
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
301 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
302 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
303 self, commit_id1, repo2, commit_id2)
302
304
303 if commit_id1 == commit_id2:
305 if commit_id1 == commit_id2:
304 return commit_id1
306 return commit_id1
305
307
306 ancestors = self._remote.revs_from_revspec(
308 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
309 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
310 other_path=repo2.path)
309
311
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
312 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
313
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
314 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
315 return ancestor_id
314
316
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
317 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
318 if commit_id1 == commit_id2:
317 commits = []
319 commits = []
318 else:
320 else:
319 if merge:
321 if merge:
320 indexes = self._remote.revs_from_revspec(
322 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
323 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
324 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
325 else:
324 indexes = self._remote.revs_from_revspec(
326 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
327 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
328 commit_id1, other_path=repo2.path)
327
329
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
330 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
331 for idx in indexes]
330
332
331 return commits
333 return commits
332
334
333 @staticmethod
335 @staticmethod
334 def check_url(url, config):
336 def check_url(url, config):
335 """
337 """
336 Function will check given url and try to verify if it's a valid
338 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
339 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
340 auth request that can cause whole API to hang when used from python
339 or other external calls.
341 or other external calls.
340
342
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
343 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
344 when the return code is non 200
343 """
345 """
344 # check first if it's not an local url
346 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
347 if os.path.isdir(url) or url.startswith('file:'):
346 return True
348 return True
347
349
348 # Request the _remote to verify the url
350 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
351 return connection.Hg.check_url(url, config.serialize())
350
352
351 @staticmethod
353 @staticmethod
352 def is_valid_repository(path):
354 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
355 return os.path.isdir(os.path.join(path, '.hg'))
354
356
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
357 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
358 """
357 Function will check for mercurial repository in given path. If there
359 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
360 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
361 `create` parameter is set to True - in that case repository would
360 be created.
362 be created.
361
363
362 If `src_url` is given, would try to clone repository from the
364 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
365 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
366 working copy accordingly to `do_workspace_checkout` flag.
365 """
367 """
366 if create and os.path.exists(self.path):
368 if create and os.path.exists(self.path):
367 raise RepositoryError(
369 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
370 f"Cannot create repository at {self.path}, location already exist")
369 % self.path)
370
371
371 if src_url:
372 if src_url:
372 url = str(self._get_url(src_url))
373 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
374 MercurialRepository.check_url(url, self.config)
374
375
375 self._remote.clone(url, self.path, do_workspace_checkout)
376 self._remote.clone(url, self.path, do_workspace_checkout)
376
377
377 # Don't try to create if we've already cloned repo
378 # Don't try to create if we've already cloned repo
378 create = False
379 create = False
379
380
380 if create:
381 if create:
381 os.makedirs(self.path, mode=0o755)
382 os.makedirs(self.path, mode=0o755)
383
382 self._remote.localrepository(create)
384 self._remote.localrepository(create)
383
385
384 @LazyProperty
386 @LazyProperty
385 def in_memory_commit(self):
387 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
388 return MercurialInMemoryCommit(self)
387
389
388 @LazyProperty
390 @LazyProperty
389 def description(self):
391 def description(self):
390 description = self._remote.get_config_value(
392 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
393 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
394 return safe_str(description or self.DEFAULT_DESCRIPTION)
393
395
394 @LazyProperty
396 @LazyProperty
395 def contact(self):
397 def contact(self):
396 contact = (
398 contact = (
397 self._remote.get_config_value("web", "contact") or
399 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
400 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
401 return safe_str(contact or self.DEFAULT_CONTACT)
400
402
401 @LazyProperty
403 @LazyProperty
402 def last_change(self):
404 def last_change(self):
403 """
405 """
404 Returns last change made on this repository as
406 Returns last change made on this repository as
405 `datetime.datetime` object.
407 `datetime.datetime` object.
406 """
408 """
407 try:
409 try:
408 return self.get_commit().date
410 return self.get_commit().date
409 except RepositoryError:
411 except RepositoryError:
410 tzoffset = makedate()[1]
412 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
413 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
414
413 def _get_fs_mtime(self):
415 def _get_fs_mtime(self):
414 # fallback to filesystem
416 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
417 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
418 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
419 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
420 return os.stat(cl_path).st_mtime
419 else:
421 else:
420 return os.stat(st_path).st_mtime
422 return os.stat(st_path).st_mtime
421
423
422 def _get_url(self, url):
424 def _get_url(self, url):
423 """
425 """
424 Returns normalized url. If schema is not given, would fall
426 Returns normalized url. If schema is not given, would fall
425 to filesystem
427 to filesystem
426 (``file:///``) schema.
428 (``file:///``) schema.
427 """
429 """
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
430 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.request.pathname2url(url)
431 url = "file:" + urllib.request.pathname2url(url)
431 return url
432 return url
432
433
433 def get_hook_location(self):
434 def get_hook_location(self):
434 """
435 """
435 returns absolute path to location where hooks are stored
436 returns absolute path to location where hooks are stored
436 """
437 """
437 return os.path.join(self.path, '.hg', '.hgrc')
438 return os.path.join(self.path, '.hg', '.hgrc')
438
439
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 """
442 """
442 Returns ``MercurialCommit`` object representing repository's
443 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
444 commit at the given `commit_id` or `commit_idx`.
444 """
445 """
445 if self.is_empty():
446 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
447 raise EmptyRepositoryError("There are no commits yet")
447
448
448 if commit_id is not None:
449 if commit_id is not None:
449 self._validate_commit_id(commit_id)
450 self._validate_commit_id(commit_id)
450 try:
451 try:
451 # we have cached idx, use it without contacting the remote
452 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
453 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
455 except KeyError:
455 pass
456 pass
456
457
457 elif commit_idx is not None:
458 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
459 self._validate_commit_idx(commit_idx)
459 try:
460 try:
460 _commit_id = self.commit_ids[commit_idx]
461 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
462 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
463 commit_idx = self.commit_ids.index(_commit_id)
463
464
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
466 except IndexError:
466 commit_id = commit_idx
467 commit_id = commit_idx
467 else:
468 else:
468 commit_id = "tip"
469 commit_id = "tip"
469
470
470 #TODO: decide if we pass bytes or str into lookup ?
471 # case here is no cached version, do an actual lookup instead
471 # if isinstance(commit_id, unicode):
472 # commit_id = safe_str(commit_id)
473
474 try:
472 try:
475 raw_id, idx = self._remote.lookup(commit_id, both=True)
473 raw_id, idx = self._remote.lookup(commit_id, both=True)
476 except CommitDoesNotExistError:
474 except CommitDoesNotExistError:
477 msg = "Commit {} does not exist for `{}`".format(
475 msg = "Commit {} does not exist for `{}`".format(
478 *map(safe_str, [commit_id, self.name]))
476 *map(safe_str, [commit_id, self.name]))
479 raise CommitDoesNotExistError(msg)
477 raise CommitDoesNotExistError(msg)
480
478
481 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
479 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
482
480
483 def get_commits(
481 def get_commits(
484 self, start_id=None, end_id=None, start_date=None, end_date=None,
482 self, start_id=None, end_id=None, start_date=None, end_date=None,
485 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
483 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
486 """
484 """
487 Returns generator of ``MercurialCommit`` objects from start to end
485 Returns generator of ``MercurialCommit`` objects from start to end
488 (both are inclusive)
486 (both are inclusive)
489
487
490 :param start_id: None, str(commit_id)
488 :param start_id: None, str(commit_id)
491 :param end_id: None, str(commit_id)
489 :param end_id: None, str(commit_id)
492 :param start_date: if specified, commits with commit date less than
490 :param start_date: if specified, commits with commit date less than
493 ``start_date`` would be filtered out from returned set
491 ``start_date`` would be filtered out from returned set
494 :param end_date: if specified, commits with commit date greater than
492 :param end_date: if specified, commits with commit date greater than
495 ``end_date`` would be filtered out from returned set
493 ``end_date`` would be filtered out from returned set
496 :param branch_name: if specified, commits not reachable from given
494 :param branch_name: if specified, commits not reachable from given
497 branch would be filtered out from returned set
495 branch would be filtered out from returned set
498 :param show_hidden: Show hidden commits such as obsolete or hidden from
496 :param show_hidden: Show hidden commits such as obsolete or hidden from
499 Mercurial evolve
497 Mercurial evolve
500 :raise BranchDoesNotExistError: If given ``branch_name`` does not
498 :raise BranchDoesNotExistError: If given ``branch_name`` does not
501 exist.
499 exist.
502 :raise CommitDoesNotExistError: If commit for given ``start`` or
500 :raise CommitDoesNotExistError: If commit for given ``start`` or
503 ``end`` could not be found.
501 ``end`` could not be found.
504 """
502 """
505 # actually we should check now if it's not an empty repo
503 # actually we should check now if it's not an empty repo
506 if self.is_empty():
504 if self.is_empty():
507 raise EmptyRepositoryError("There are no commits yet")
505 raise EmptyRepositoryError("There are no commits yet")
508 self._validate_branch_name(branch_name)
506 self._validate_branch_name(branch_name)
509
507
510 branch_ancestors = False
508 branch_ancestors = False
511 if start_id is not None:
509 if start_id is not None:
512 self._validate_commit_id(start_id)
510 self._validate_commit_id(start_id)
513 c_start = self.get_commit(commit_id=start_id)
511 c_start = self.get_commit(commit_id=start_id)
514 start_pos = self._commit_ids[c_start.raw_id]
512 start_pos = self._commit_ids[c_start.raw_id]
515 else:
513 else:
516 start_pos = None
514 start_pos = None
517
515
518 if end_id is not None:
516 if end_id is not None:
519 self._validate_commit_id(end_id)
517 self._validate_commit_id(end_id)
520 c_end = self.get_commit(commit_id=end_id)
518 c_end = self.get_commit(commit_id=end_id)
521 end_pos = max(0, self._commit_ids[c_end.raw_id])
519 end_pos = max(0, self._commit_ids[c_end.raw_id])
522 else:
520 else:
523 end_pos = None
521 end_pos = None
524
522
525 if None not in [start_id, end_id] and start_pos > end_pos:
523 if None not in [start_id, end_id] and start_pos > end_pos:
526 raise RepositoryError(
524 raise RepositoryError(
527 "Start commit '%s' cannot be after end commit '%s'" %
525 "Start commit '%s' cannot be after end commit '%s'" %
528 (start_id, end_id))
526 (start_id, end_id))
529
527
530 if end_pos is not None:
528 if end_pos is not None:
531 end_pos += 1
529 end_pos += 1
532
530
533 commit_filter = []
531 commit_filter = []
534
532
535 if branch_name and not branch_ancestors:
533 if branch_name and not branch_ancestors:
536 commit_filter.append('branch("%s")' % (branch_name,))
534 commit_filter.append('branch("%s")' % (branch_name,))
537 elif branch_name and branch_ancestors:
535 elif branch_name and branch_ancestors:
538 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
536 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
539
537
540 if start_date and not end_date:
538 if start_date and not end_date:
541 commit_filter.append('date(">%s")' % (start_date,))
539 commit_filter.append('date(">%s")' % (start_date,))
542 if end_date and not start_date:
540 if end_date and not start_date:
543 commit_filter.append('date("<%s")' % (end_date,))
541 commit_filter.append('date("<%s")' % (end_date,))
544 if start_date and end_date:
542 if start_date and end_date:
545 commit_filter.append(
543 commit_filter.append(
546 'date(">%s") and date("<%s")' % (start_date, end_date))
544 'date(">%s") and date("<%s")' % (start_date, end_date))
547
545
548 if not show_hidden:
546 if not show_hidden:
549 commit_filter.append('not obsolete()')
547 commit_filter.append('not obsolete()')
550 commit_filter.append('not hidden()')
548 commit_filter.append('not hidden()')
551
549
552 # TODO: johbo: Figure out a simpler way for this solution
550 # TODO: johbo: Figure out a simpler way for this solution
553 collection_generator = CollectionGenerator
551 collection_generator = CollectionGenerator
554 if commit_filter:
552 if commit_filter:
555 commit_filter = ' and '.join(map(safe_str, commit_filter))
553 commit_filter = ' and '.join(map(safe_str, commit_filter))
556 revisions = self._remote.rev_range([commit_filter])
554 revisions = self._remote.rev_range([commit_filter])
557 collection_generator = MercurialIndexBasedCollectionGenerator
555 collection_generator = MercurialIndexBasedCollectionGenerator
558 else:
556 else:
559 revisions = self.commit_ids
557 revisions = self.commit_ids
560
558
561 if start_pos or end_pos:
559 if start_pos or end_pos:
562 revisions = revisions[start_pos:end_pos]
560 revisions = revisions[start_pos:end_pos]
563
561
564 return collection_generator(self, revisions, pre_load=pre_load)
562 return collection_generator(self, revisions, pre_load=pre_load)
565
563
566 def pull(self, url, commit_ids=None):
564 def pull(self, url, commit_ids=None):
567 """
565 """
568 Pull changes from external location.
566 Pull changes from external location.
569
567
570 :param commit_ids: Optional. Can be set to a list of commit ids
568 :param commit_ids: Optional. Can be set to a list of commit ids
571 which shall be pulled from the other repository.
569 which shall be pulled from the other repository.
572 """
570 """
573 url = self._get_url(url)
571 url = self._get_url(url)
574 self._remote.pull(url, commit_ids=commit_ids)
572 self._remote.pull(url, commit_ids=commit_ids)
575 self._remote.invalidate_vcs_cache()
573 self._remote.invalidate_vcs_cache()
576
574
577 def fetch(self, url, commit_ids=None):
575 def fetch(self, url, commit_ids=None):
578 """
576 """
579 Backward compatibility with GIT fetch==pull
577 Backward compatibility with GIT fetch==pull
580 """
578 """
581 return self.pull(url, commit_ids=commit_ids)
579 return self.pull(url, commit_ids=commit_ids)
582
580
583 def push(self, url):
581 def push(self, url):
584 url = self._get_url(url)
582 url = self._get_url(url)
585 self._remote.sync_push(url)
583 self._remote.sync_push(url)
586
584
587 def _local_clone(self, clone_path):
585 def _local_clone(self, clone_path):
588 """
586 """
589 Create a local clone of the current repo.
587 Create a local clone of the current repo.
590 """
588 """
591 self._remote.clone(self.path, clone_path, update_after_clone=True,
589 self._remote.clone(self.path, clone_path, update_after_clone=True,
592 hooks=False)
590 hooks=False)
593
591
594 def _update(self, revision, clean=False):
592 def _update(self, revision, clean=False):
595 """
593 """
596 Update the working copy to the specified revision.
594 Update the working copy to the specified revision.
597 """
595 """
598 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
596 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
599 self._remote.update(revision, clean=clean)
597 self._remote.update(revision, clean=clean)
600
598
601 def _identify(self):
599 def _identify(self):
602 """
600 """
603 Return the current state of the working directory.
601 Return the current state of the working directory.
604 """
602 """
605 return self._remote.identify().strip().rstrip('+')
603 return self._remote.identify().strip().rstrip('+')
606
604
607 def _heads(self, branch=None):
605 def _heads(self, branch=None):
608 """
606 """
609 Return the commit ids of the repository heads.
607 Return the commit ids of the repository heads.
610 """
608 """
611 return self._remote.heads(branch=branch).strip().split(' ')
609 return self._remote.heads(branch=branch).strip().split(' ')
612
610
613 def _ancestor(self, revision1, revision2):
611 def _ancestor(self, revision1, revision2):
614 """
612 """
615 Return the common ancestor of the two revisions.
613 Return the common ancestor of the two revisions.
616 """
614 """
617 return self._remote.ancestor(revision1, revision2)
615 return self._remote.ancestor(revision1, revision2)
618
616
619 def _local_push(
617 def _local_push(
620 self, revision, repository_path, push_branches=False,
618 self, revision, repository_path, push_branches=False,
621 enable_hooks=False):
619 enable_hooks=False):
622 """
620 """
623 Push the given revision to the specified repository.
621 Push the given revision to the specified repository.
624
622
625 :param push_branches: allow to create branches in the target repo.
623 :param push_branches: allow to create branches in the target repo.
626 """
624 """
627 self._remote.push(
625 self._remote.push(
628 [revision], repository_path, hooks=enable_hooks,
626 [revision], repository_path, hooks=enable_hooks,
629 push_branches=push_branches)
627 push_branches=push_branches)
630
628
631 def _local_merge(self, target_ref, merge_message, user_name, user_email,
629 def _local_merge(self, target_ref, merge_message, user_name, user_email,
632 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
630 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
633 """
631 """
634 Merge the given source_revision into the checked out revision.
632 Merge the given source_revision into the checked out revision.
635
633
636 Returns the commit id of the merge and a boolean indicating if the
634 Returns the commit id of the merge and a boolean indicating if the
637 commit needs to be pushed.
635 commit needs to be pushed.
638 """
636 """
639 source_ref_commit_id = source_ref.commit_id
637 source_ref_commit_id = source_ref.commit_id
640 target_ref_commit_id = target_ref.commit_id
638 target_ref_commit_id = target_ref.commit_id
641
639
642 # update our workdir to target ref, for proper merge
640 # update our workdir to target ref, for proper merge
643 self._update(target_ref_commit_id, clean=True)
641 self._update(target_ref_commit_id, clean=True)
644
642
645 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
643 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
646 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
644 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
647
645
648 if close_commit_id:
646 if close_commit_id:
649 # NOTE(marcink): if we get the close commit, this is our new source
647 # NOTE(marcink): if we get the close commit, this is our new source
650 # which will include the close commit itself.
648 # which will include the close commit itself.
651 source_ref_commit_id = close_commit_id
649 source_ref_commit_id = close_commit_id
652
650
653 if ancestor == source_ref_commit_id:
651 if ancestor == source_ref_commit_id:
654 # Nothing to do, the changes were already integrated
652 # Nothing to do, the changes were already integrated
655 return target_ref_commit_id, False
653 return target_ref_commit_id, False
656
654
657 elif ancestor == target_ref_commit_id and is_the_same_branch:
655 elif ancestor == target_ref_commit_id and is_the_same_branch:
658 # In this case we should force a commit message
656 # In this case we should force a commit message
659 return source_ref_commit_id, True
657 return source_ref_commit_id, True
660
658
661 unresolved = None
659 unresolved = None
662 if use_rebase:
660 if use_rebase:
663 try:
661 try:
664 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
662 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
665 self.bookmark(bookmark_name, revision=source_ref.commit_id)
663 self.bookmark(bookmark_name, revision=source_ref.commit_id)
666 self._remote.rebase(
664 self._remote.rebase(
667 source=source_ref_commit_id, dest=target_ref_commit_id)
665 source=source_ref_commit_id, dest=target_ref_commit_id)
668 self._remote.invalidate_vcs_cache()
666 self._remote.invalidate_vcs_cache()
669 self._update(bookmark_name, clean=True)
667 self._update(bookmark_name, clean=True)
670 return self._identify(), True
668 return self._identify(), True
671 except RepositoryError as e:
669 except RepositoryError as e:
672 # The rebase-abort may raise another exception which 'hides'
670 # The rebase-abort may raise another exception which 'hides'
673 # the original one, therefore we log it here.
671 # the original one, therefore we log it here.
674 log.exception('Error while rebasing shadow repo during merge.')
672 log.exception('Error while rebasing shadow repo during merge.')
675 if 'unresolved conflicts' in safe_str(e):
673 if 'unresolved conflicts' in safe_str(e):
676 unresolved = self._remote.get_unresolved_files()
674 unresolved = self._remote.get_unresolved_files()
677 log.debug('unresolved files: %s', unresolved)
675 log.debug('unresolved files: %s', unresolved)
678
676
679 # Cleanup any rebase leftovers
677 # Cleanup any rebase leftovers
680 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
681 self._remote.rebase(abort=True)
679 self._remote.rebase(abort=True)
682 self._remote.invalidate_vcs_cache()
680 self._remote.invalidate_vcs_cache()
683 self._remote.update(clean=True)
681 self._remote.update(clean=True)
684 if unresolved:
682 if unresolved:
685 raise UnresolvedFilesInRepo(unresolved)
683 raise UnresolvedFilesInRepo(unresolved)
686 else:
684 else:
687 raise
685 raise
688 else:
686 else:
689 try:
687 try:
690 self._remote.merge(source_ref_commit_id)
688 self._remote.merge(source_ref_commit_id)
691 self._remote.invalidate_vcs_cache()
689 self._remote.invalidate_vcs_cache()
692 self._remote.commit(
690 self._remote.commit(
693 message=safe_str(merge_message),
691 message=safe_str(merge_message),
694 username=safe_str('%s <%s>' % (user_name, user_email)))
692 username=safe_str('%s <%s>' % (user_name, user_email)))
695 self._remote.invalidate_vcs_cache()
693 self._remote.invalidate_vcs_cache()
696 return self._identify(), True
694 return self._identify(), True
697 except RepositoryError as e:
695 except RepositoryError as e:
698 # The merge-abort may raise another exception which 'hides'
696 # The merge-abort may raise another exception which 'hides'
699 # the original one, therefore we log it here.
697 # the original one, therefore we log it here.
700 log.exception('Error while merging shadow repo during merge.')
698 log.exception('Error while merging shadow repo during merge.')
701 if 'unresolved merge conflicts' in safe_str(e):
699 if 'unresolved merge conflicts' in safe_str(e):
702 unresolved = self._remote.get_unresolved_files()
700 unresolved = self._remote.get_unresolved_files()
703 log.debug('unresolved files: %s', unresolved)
701 log.debug('unresolved files: %s', unresolved)
704
702
705 # Cleanup any merge leftovers
703 # Cleanup any merge leftovers
706 self._remote.update(clean=True)
704 self._remote.update(clean=True)
707 if unresolved:
705 if unresolved:
708 raise UnresolvedFilesInRepo(unresolved)
706 raise UnresolvedFilesInRepo(unresolved)
709 else:
707 else:
710 raise
708 raise
711
709
712 def _local_close(self, target_ref, user_name, user_email,
710 def _local_close(self, target_ref, user_name, user_email,
713 source_ref, close_message=''):
711 source_ref, close_message=''):
714 """
712 """
715 Close the branch of the given source_revision
713 Close the branch of the given source_revision
716
714
717 Returns the commit id of the close and a boolean indicating if the
715 Returns the commit id of the close and a boolean indicating if the
718 commit needs to be pushed.
716 commit needs to be pushed.
719 """
717 """
720 self._update(source_ref.commit_id)
718 self._update(source_ref.commit_id)
721 message = close_message or "Closing branch: `{}`".format(source_ref.name)
719 message = close_message or "Closing branch: `{}`".format(source_ref.name)
722 try:
720 try:
723 self._remote.commit(
721 self._remote.commit(
724 message=safe_str(message),
722 message=safe_str(message),
725 username=safe_str('%s <%s>' % (user_name, user_email)),
723 username=safe_str('%s <%s>' % (user_name, user_email)),
726 close_branch=True)
724 close_branch=True)
727 self._remote.invalidate_vcs_cache()
725 self._remote.invalidate_vcs_cache()
728 return self._identify(), True
726 return self._identify(), True
729 except RepositoryError:
727 except RepositoryError:
730 # Cleanup any commit leftovers
728 # Cleanup any commit leftovers
731 self._remote.update(clean=True)
729 self._remote.update(clean=True)
732 raise
730 raise
733
731
734 def _is_the_same_branch(self, target_ref, source_ref):
732 def _is_the_same_branch(self, target_ref, source_ref):
735 return (
733 return (
736 self._get_branch_name(target_ref) ==
734 self._get_branch_name(target_ref) ==
737 self._get_branch_name(source_ref))
735 self._get_branch_name(source_ref))
738
736
739 def _get_branch_name(self, ref):
737 def _get_branch_name(self, ref):
740 if ref.type == 'branch':
738 if ref.type == 'branch':
741 return ref.name
739 return ref.name
742 return self._remote.ctx_branch(ref.commit_id)
740 return self._remote.ctx_branch(ref.commit_id)
743
741
744 def _maybe_prepare_merge_workspace(
742 def _maybe_prepare_merge_workspace(
745 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
743 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
746 shadow_repository_path = self._get_shadow_repository_path(
744 shadow_repository_path = self._get_shadow_repository_path(
747 self.path, repo_id, workspace_id)
745 self.path, repo_id, workspace_id)
748 if not os.path.exists(shadow_repository_path):
746 if not os.path.exists(shadow_repository_path):
749 self._local_clone(shadow_repository_path)
747 self._local_clone(shadow_repository_path)
750 log.debug(
748 log.debug(
751 'Prepared shadow repository in %s', shadow_repository_path)
749 'Prepared shadow repository in %s', shadow_repository_path)
752
750
753 return shadow_repository_path
751 return shadow_repository_path
754
752
755 def _merge_repo(self, repo_id, workspace_id, target_ref,
753 def _merge_repo(self, repo_id, workspace_id, target_ref,
756 source_repo, source_ref, merge_message,
754 source_repo, source_ref, merge_message,
757 merger_name, merger_email, dry_run=False,
755 merger_name, merger_email, dry_run=False,
758 use_rebase=False, close_branch=False):
756 use_rebase=False, close_branch=False):
759
757
760 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
758 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
761 'rebase' if use_rebase else 'merge', dry_run)
759 'rebase' if use_rebase else 'merge', dry_run)
762 if target_ref.commit_id not in self._heads():
760 if target_ref.commit_id not in self._heads():
763 return MergeResponse(
761 return MergeResponse(
764 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
762 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
765 metadata={'target_ref': target_ref})
763 metadata={'target_ref': target_ref})
766
764
767 try:
765 try:
768 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
766 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
769 heads_all = self._heads(target_ref.name)
767 heads_all = self._heads(target_ref.name)
770 max_heads = 10
768 max_heads = 10
771 if len(heads_all) > max_heads:
769 if len(heads_all) > max_heads:
772 heads = '\n,'.join(
770 heads = '\n,'.join(
773 heads_all[:max_heads] +
771 heads_all[:max_heads] +
774 ['and {} more.'.format(len(heads_all)-max_heads)])
772 ['and {} more.'.format(len(heads_all)-max_heads)])
775 else:
773 else:
776 heads = '\n,'.join(heads_all)
774 heads = '\n,'.join(heads_all)
777 metadata = {
775 metadata = {
778 'target_ref': target_ref,
776 'target_ref': target_ref,
779 'source_ref': source_ref,
777 'source_ref': source_ref,
780 'heads': heads
778 'heads': heads
781 }
779 }
782 return MergeResponse(
780 return MergeResponse(
783 False, False, None,
781 False, False, None,
784 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
782 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
785 metadata=metadata)
783 metadata=metadata)
786 except CommitDoesNotExistError:
784 except CommitDoesNotExistError:
787 log.exception('Failure when looking up branch heads on hg target')
785 log.exception('Failure when looking up branch heads on hg target')
788 return MergeResponse(
786 return MergeResponse(
789 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
787 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
790 metadata={'target_ref': target_ref})
788 metadata={'target_ref': target_ref})
791
789
792 shadow_repository_path = self._maybe_prepare_merge_workspace(
790 shadow_repository_path = self._maybe_prepare_merge_workspace(
793 repo_id, workspace_id, target_ref, source_ref)
791 repo_id, workspace_id, target_ref, source_ref)
794 shadow_repo = self.get_shadow_instance(shadow_repository_path)
792 shadow_repo = self.get_shadow_instance(shadow_repository_path)
795
793
796 log.debug('Pulling in target reference %s', target_ref)
794 log.debug('Pulling in target reference %s', target_ref)
797 self._validate_pull_reference(target_ref)
795 self._validate_pull_reference(target_ref)
798 shadow_repo._local_pull(self.path, target_ref)
796 shadow_repo._local_pull(self.path, target_ref)
799
797
800 try:
798 try:
801 log.debug('Pulling in source reference %s', source_ref)
799 log.debug('Pulling in source reference %s', source_ref)
802 source_repo._validate_pull_reference(source_ref)
800 source_repo._validate_pull_reference(source_ref)
803 shadow_repo._local_pull(source_repo.path, source_ref)
801 shadow_repo._local_pull(source_repo.path, source_ref)
804 except CommitDoesNotExistError:
802 except CommitDoesNotExistError:
805 log.exception('Failure when doing local pull on hg shadow repo')
803 log.exception('Failure when doing local pull on hg shadow repo')
806 return MergeResponse(
804 return MergeResponse(
807 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
805 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
808 metadata={'source_ref': source_ref})
806 metadata={'source_ref': source_ref})
809
807
810 merge_ref = None
808 merge_ref = None
811 merge_commit_id = None
809 merge_commit_id = None
812 close_commit_id = None
810 close_commit_id = None
813 merge_failure_reason = MergeFailureReason.NONE
811 merge_failure_reason = MergeFailureReason.NONE
814 metadata = {}
812 metadata = {}
815
813
816 # enforce that close branch should be used only in case we source from
814 # enforce that close branch should be used only in case we source from
817 # an actual Branch
815 # an actual Branch
818 close_branch = close_branch and source_ref.type == 'branch'
816 close_branch = close_branch and source_ref.type == 'branch'
819
817
820 # don't allow to close branch if source and target are the same
818 # don't allow to close branch if source and target are the same
821 close_branch = close_branch and source_ref.name != target_ref.name
819 close_branch = close_branch and source_ref.name != target_ref.name
822
820
823 needs_push_on_close = False
821 needs_push_on_close = False
824 if close_branch and not use_rebase and not dry_run:
822 if close_branch and not use_rebase and not dry_run:
825 try:
823 try:
826 close_commit_id, needs_push_on_close = shadow_repo._local_close(
824 close_commit_id, needs_push_on_close = shadow_repo._local_close(
827 target_ref, merger_name, merger_email, source_ref)
825 target_ref, merger_name, merger_email, source_ref)
828 merge_possible = True
826 merge_possible = True
829 except RepositoryError:
827 except RepositoryError:
830 log.exception('Failure when doing close branch on '
828 log.exception('Failure when doing close branch on '
831 'shadow repo: %s', shadow_repo)
829 'shadow repo: %s', shadow_repo)
832 merge_possible = False
830 merge_possible = False
833 merge_failure_reason = MergeFailureReason.MERGE_FAILED
831 merge_failure_reason = MergeFailureReason.MERGE_FAILED
834 else:
832 else:
835 merge_possible = True
833 merge_possible = True
836
834
837 needs_push = False
835 needs_push = False
838 if merge_possible:
836 if merge_possible:
839
837
840 try:
838 try:
841 merge_commit_id, needs_push = shadow_repo._local_merge(
839 merge_commit_id, needs_push = shadow_repo._local_merge(
842 target_ref, merge_message, merger_name, merger_email,
840 target_ref, merge_message, merger_name, merger_email,
843 source_ref, use_rebase=use_rebase,
841 source_ref, use_rebase=use_rebase,
844 close_commit_id=close_commit_id, dry_run=dry_run)
842 close_commit_id=close_commit_id, dry_run=dry_run)
845 merge_possible = True
843 merge_possible = True
846
844
847 # read the state of the close action, if it
845 # read the state of the close action, if it
848 # maybe required a push
846 # maybe required a push
849 needs_push = needs_push or needs_push_on_close
847 needs_push = needs_push or needs_push_on_close
850
848
851 # Set a bookmark pointing to the merge commit. This bookmark
849 # Set a bookmark pointing to the merge commit. This bookmark
852 # may be used to easily identify the last successful merge
850 # may be used to easily identify the last successful merge
853 # commit in the shadow repository.
851 # commit in the shadow repository.
854 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
852 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
855 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
853 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
856 except SubrepoMergeError:
854 except SubrepoMergeError:
857 log.exception(
855 log.exception(
858 'Subrepo merge error during local merge on hg shadow repo.')
856 'Subrepo merge error during local merge on hg shadow repo.')
859 merge_possible = False
857 merge_possible = False
860 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
858 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
861 needs_push = False
859 needs_push = False
862 except RepositoryError as e:
860 except RepositoryError as e:
863 log.exception('Failure when doing local merge on hg shadow repo')
861 log.exception('Failure when doing local merge on hg shadow repo')
864 if isinstance(e, UnresolvedFilesInRepo):
862 if isinstance(e, UnresolvedFilesInRepo):
865 all_conflicts = list(e.args[0])
863 all_conflicts = list(e.args[0])
866 max_conflicts = 20
864 max_conflicts = 20
867 if len(all_conflicts) > max_conflicts:
865 if len(all_conflicts) > max_conflicts:
868 conflicts = all_conflicts[:max_conflicts] \
866 conflicts = all_conflicts[:max_conflicts] \
869 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
867 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
870 else:
868 else:
871 conflicts = all_conflicts
869 conflicts = all_conflicts
872 metadata['unresolved_files'] = \
870 metadata['unresolved_files'] = \
873 '\n* conflict: ' + \
871 '\n* conflict: ' + \
874 ('\n * conflict: '.join(conflicts))
872 ('\n * conflict: '.join(conflicts))
875
873
876 merge_possible = False
874 merge_possible = False
877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
875 merge_failure_reason = MergeFailureReason.MERGE_FAILED
878 needs_push = False
876 needs_push = False
879
877
880 if merge_possible and not dry_run:
878 if merge_possible and not dry_run:
881 if needs_push:
879 if needs_push:
882 # In case the target is a bookmark, update it, so after pushing
880 # In case the target is a bookmark, update it, so after pushing
883 # the bookmarks is also updated in the target.
881 # the bookmarks is also updated in the target.
884 if target_ref.type == 'book':
882 if target_ref.type == 'book':
885 shadow_repo.bookmark(
883 shadow_repo.bookmark(
886 target_ref.name, revision=merge_commit_id)
884 target_ref.name, revision=merge_commit_id)
887 try:
885 try:
888 shadow_repo_with_hooks = self.get_shadow_instance(
886 shadow_repo_with_hooks = self.get_shadow_instance(
889 shadow_repository_path,
887 shadow_repository_path,
890 enable_hooks=True)
888 enable_hooks=True)
891 # This is the actual merge action, we push from shadow
889 # This is the actual merge action, we push from shadow
892 # into origin.
890 # into origin.
893 # Note: the push_branches option will push any new branch
891 # Note: the push_branches option will push any new branch
894 # defined in the source repository to the target. This may
892 # defined in the source repository to the target. This may
895 # be dangerous as branches are permanent in Mercurial.
893 # be dangerous as branches are permanent in Mercurial.
896 # This feature was requested in issue #441.
894 # This feature was requested in issue #441.
897 shadow_repo_with_hooks._local_push(
895 shadow_repo_with_hooks._local_push(
898 merge_commit_id, self.path, push_branches=True,
896 merge_commit_id, self.path, push_branches=True,
899 enable_hooks=True)
897 enable_hooks=True)
900
898
901 # maybe we also need to push the close_commit_id
899 # maybe we also need to push the close_commit_id
902 if close_commit_id:
900 if close_commit_id:
903 shadow_repo_with_hooks._local_push(
901 shadow_repo_with_hooks._local_push(
904 close_commit_id, self.path, push_branches=True,
902 close_commit_id, self.path, push_branches=True,
905 enable_hooks=True)
903 enable_hooks=True)
906 merge_succeeded = True
904 merge_succeeded = True
907 except RepositoryError:
905 except RepositoryError:
908 log.exception(
906 log.exception(
909 'Failure when doing local push from the shadow '
907 'Failure when doing local push from the shadow '
910 'repository to the target repository at %s.', self.path)
908 'repository to the target repository at %s.', self.path)
911 merge_succeeded = False
909 merge_succeeded = False
912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
910 merge_failure_reason = MergeFailureReason.PUSH_FAILED
913 metadata['target'] = 'hg shadow repo'
911 metadata['target'] = 'hg shadow repo'
914 metadata['merge_commit'] = merge_commit_id
912 metadata['merge_commit'] = merge_commit_id
915 else:
913 else:
916 merge_succeeded = True
914 merge_succeeded = True
917 else:
915 else:
918 merge_succeeded = False
916 merge_succeeded = False
919
917
920 return MergeResponse(
918 return MergeResponse(
921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
919 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
922 metadata=metadata)
920 metadata=metadata)
923
921
924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
922 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
925 config = self.config.copy()
923 config = self.config.copy()
926 if not enable_hooks:
924 if not enable_hooks:
927 config.clear_section('hooks')
925 config.clear_section('hooks')
928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
926 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
929
927
930 def _validate_pull_reference(self, reference):
928 def _validate_pull_reference(self, reference):
931 if not (reference.name in self.bookmarks or
929 if not (reference.name in self.bookmarks or
932 reference.name in self.branches or
930 reference.name in self.branches or
933 self.get_commit(reference.commit_id)):
931 self.get_commit(reference.commit_id)):
934 raise CommitDoesNotExistError(
932 raise CommitDoesNotExistError(
935 'Unknown branch, bookmark or commit id')
933 'Unknown branch, bookmark or commit id')
936
934
937 def _local_pull(self, repository_path, reference):
935 def _local_pull(self, repository_path, reference):
938 """
936 """
939 Fetch a branch, bookmark or commit from a local repository.
937 Fetch a branch, bookmark or commit from a local repository.
940 """
938 """
941 repository_path = os.path.abspath(repository_path)
939 repository_path = os.path.abspath(repository_path)
942 if repository_path == self.path:
940 if repository_path == self.path:
943 raise ValueError('Cannot pull from the same repository')
941 raise ValueError('Cannot pull from the same repository')
944
942
945 reference_type_to_option_name = {
943 reference_type_to_option_name = {
946 'book': 'bookmark',
944 'book': 'bookmark',
947 'branch': 'branch',
945 'branch': 'branch',
948 }
946 }
949 option_name = reference_type_to_option_name.get(
947 option_name = reference_type_to_option_name.get(
950 reference.type, 'revision')
948 reference.type, 'revision')
951
949
952 if option_name == 'revision':
950 if option_name == 'revision':
953 ref = reference.commit_id
951 ref = reference.commit_id
954 else:
952 else:
955 ref = reference.name
953 ref = reference.name
956
954
957 options = {option_name: [ref]}
955 options = {option_name: [ref]}
958 self._remote.pull_cmd(repository_path, hooks=False, **options)
956 self._remote.pull_cmd(repository_path, hooks=False, **options)
959 self._remote.invalidate_vcs_cache()
957 self._remote.invalidate_vcs_cache()
960
958
961 def bookmark(self, bookmark, revision=None):
959 def bookmark(self, bookmark, revision=None):
962 if isinstance(bookmark, str):
960 if isinstance(bookmark, str):
963 bookmark = safe_str(bookmark)
961 bookmark = safe_str(bookmark)
964 self._remote.bookmark(bookmark, revision=revision)
962 self._remote.bookmark(bookmark, revision=revision)
965 self._remote.invalidate_vcs_cache()
963 self._remote.invalidate_vcs_cache()
966
964
967 def get_path_permissions(self, username):
965 def get_path_permissions(self, username):
968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
966 hgacl_file = os.path.join(self.path, '.hg/hgacl')
969
967
970 def read_patterns(suffix):
968 def read_patterns(suffix):
971 svalue = None
969 svalue = None
972 for section, option in [
970 for section, option in [
973 ('narrowacl', username + suffix),
971 ('narrowacl', username + suffix),
974 ('narrowacl', 'default' + suffix),
972 ('narrowacl', 'default' + suffix),
975 ('narrowhgacl', username + suffix),
973 ('narrowhgacl', username + suffix),
976 ('narrowhgacl', 'default' + suffix)
974 ('narrowhgacl', 'default' + suffix)
977 ]:
975 ]:
978 try:
976 try:
979 svalue = hgacl.get(section, option)
977 svalue = hgacl.get(section, option)
980 break # stop at the first value we find
978 break # stop at the first value we find
981 except configparser.NoOptionError:
979 except configparser.NoOptionError:
982 pass
980 pass
983 if not svalue:
981 if not svalue:
984 return None
982 return None
985 result = ['/']
983 result = ['/']
986 for pattern in svalue.split():
984 for pattern in svalue.split():
987 result.append(pattern)
985 result.append(pattern)
988 if '*' not in pattern and '?' not in pattern:
986 if '*' not in pattern and '?' not in pattern:
989 result.append(pattern + '/*')
987 result.append(pattern + '/*')
990 return result
988 return result
991
989
992 if os.path.exists(hgacl_file):
990 if os.path.exists(hgacl_file):
993 try:
991 try:
994 hgacl = configparser.RawConfigParser()
992 hgacl = configparser.RawConfigParser()
995 hgacl.read(hgacl_file)
993 hgacl.read(hgacl_file)
996
994
997 includes = read_patterns('.includes')
995 includes = read_patterns('.includes')
998 excludes = read_patterns('.excludes')
996 excludes = read_patterns('.excludes')
999 return BasePathPermissionChecker.create_from_patterns(
997 return BasePathPermissionChecker.create_from_patterns(
1000 includes, excludes)
998 includes, excludes)
1001 except BaseException as e:
999 except BaseException as e:
1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1000 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1003 hgacl_file, self.name, e)
1001 hgacl_file, self.name, e)
1004 raise exceptions.RepositoryRequirementError(msg)
1002 raise exceptions.RepositoryRequirementError(msg)
1005 else:
1003 else:
1006 return None
1004 return None
1007
1005
1008
1006
1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1007 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1010
1008
1011 def _commit_factory(self, commit_id):
1009 def _commit_factory(self, commit_id):
1012 return self.repo.get_commit(
1010 if isinstance(commit_id, int):
1013 commit_idx=commit_id, pre_load=self.pre_load)
1011 return self.repo.get_commit(
1012 commit_idx=commit_id, pre_load=self.pre_load)
1013 else:
1014 return self.repo.get_commit(
1015 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,253 +1,256 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN commit module
22 SVN commit module
23 """
23 """
24
24
25
25
26 import dateutil.parser
26 import dateutil.parser
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.utils import safe_str, safe_unicode
29 from rhodecode.lib.str_utils import safe_bytes, safe_str
30 from rhodecode.lib.vcs import nodes, path as vcspath
30 from rhodecode.lib.vcs import nodes, path as vcspath
31 from rhodecode.lib.vcs.backends import base
31 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
32 from rhodecode.lib.vcs.exceptions import CommitError
33
33
34
34
35 _SVN_PROP_TRUE = '*'
35 _SVN_PROP_TRUE = '*'
36
36
37
37
38 class SubversionCommit(base.BaseCommit):
38 class SubversionCommit(base.BaseCommit):
39 """
39 """
40 Subversion specific implementation of commits
40 Subversion specific implementation of commits
41
41
42 .. attribute:: branch
42 .. attribute:: branch
43
43
44 The Subversion backend does not support to assign branches to
44 The Subversion backend does not support to assign branches to
45 specific commits. This attribute has always the value `None`.
45 specific commits. This attribute has always the value `None`.
46
46
47 """
47 """
48
48
49 def __init__(self, repository, commit_id):
49 def __init__(self, repository, commit_id):
50 self.repository = repository
50 self.repository = repository
51 self.idx = self.repository._get_commit_idx(commit_id)
51 self.idx = self.repository._get_commit_idx(commit_id)
52 self._svn_rev = self.idx + 1
52 self._svn_rev = self.idx + 1
53 self._remote = repository._remote
53 self._remote = repository._remote
54 # TODO: handling of raw_id should be a method on repository itself,
54 # TODO: handling of raw_id should be a method on repository itself,
55 # which knows how to translate commit index and commit id
55 # which knows how to translate commit index and commit id
56 self.raw_id = commit_id
56 self.raw_id = commit_id
57 self.short_id = commit_id
57 self.short_id = commit_id
58 self.id = 'r%s' % (commit_id, )
58 self.id = 'r%s' % (commit_id, )
59
59
60 # TODO: Implement the following placeholder attributes
60 # TODO: Implement the following placeholder attributes
61 self.nodes = {}
61 self.nodes = {}
62 self.tags = []
62 self.tags = []
63
63
64 @property
64 @property
65 def author(self):
65 def author(self):
66 return safe_unicode(self._properties.get('svn:author'))
66 return safe_str(self._properties.get('svn:author'))
67
67
68 @property
68 @property
69 def date(self):
69 def date(self):
70 return _date_from_svn_properties(self._properties)
70 return _date_from_svn_properties(self._properties)
71
71
72 @property
72 @property
73 def message(self):
73 def message(self):
74 return safe_unicode(self._properties.get('svn:log'))
74 return safe_str(self._properties.get('svn:log'))
75
75
76 @LazyProperty
76 @LazyProperty
77 def _properties(self):
77 def _properties(self):
78 return self._remote.revision_properties(self._svn_rev)
78 return self._remote.revision_properties(self._svn_rev)
79
79
80 @LazyProperty
80 @LazyProperty
81 def parents(self):
81 def parents(self):
82 parent_idx = self.idx - 1
82 parent_idx = self.idx - 1
83 if parent_idx >= 0:
83 if parent_idx >= 0:
84 parent = self.repository.get_commit(commit_idx=parent_idx)
84 parent = self.repository.get_commit(commit_idx=parent_idx)
85 return [parent]
85 return [parent]
86 return []
86 return []
87
87
88 @LazyProperty
88 @LazyProperty
89 def children(self):
89 def children(self):
90 child_idx = self.idx + 1
90 child_idx = self.idx + 1
91 if child_idx < len(self.repository.commit_ids):
91 if child_idx < len(self.repository.commit_ids):
92 child = self.repository.get_commit(commit_idx=child_idx)
92 child = self.repository.get_commit(commit_idx=child_idx)
93 return [child]
93 return [child]
94 return []
94 return []
95
95
96 def get_file_mode(self, path):
96 def get_file_mode(self, path: bytes):
97 # Note: Subversion flags files which are executable with a special
97 # Note: Subversion flags files which are executable with a special
98 # property `svn:executable` which is set to the value ``"*"``.
98 # property `svn:executable` which is set to the value ``"*"``.
99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
100 return base.FILEMODE_EXECUTABLE
100 return base.FILEMODE_EXECUTABLE
101 else:
101 else:
102 return base.FILEMODE_DEFAULT
102 return base.FILEMODE_DEFAULT
103
103
104 def is_link(self, path):
104 def is_link(self, path):
105 # Note: Subversion has a flag for special files, the content of the
105 # Note: Subversion has a flag for special files, the content of the
106 # file contains the type of that file.
106 # file contains the type of that file.
107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
108 return self.get_file_content(path).startswith('link')
108 return self.get_file_content(path).startswith(b'link')
109 return False
109 return False
110
110
111 def is_node_binary(self, path):
111 def is_node_binary(self, path):
112 path = self._fix_path(path)
112 path = self._fix_path(path)
113 return self._remote.is_binary(self._svn_rev, safe_str(path))
113 return self._remote.is_binary(self._svn_rev, safe_str(path))
114
114
115 def node_md5_hash(self, path):
116 path = self._fix_path(path)
117 return self._remote.md5_hash(self._svn_rev, safe_str(path))
118
115 def _get_file_property(self, path, name):
119 def _get_file_property(self, path, name):
116 file_properties = self._remote.node_properties(
120 file_properties = self._remote.node_properties(
117 safe_str(path), self._svn_rev)
121 safe_str(path), self._svn_rev)
118 return file_properties.get(name)
122 return file_properties.get(name)
119
123
120 def get_file_content(self, path):
124 def get_file_content(self, path):
121 path = self._fix_path(path)
125 path = self._fix_path(path)
122 return self._remote.get_file_content(safe_str(path), self._svn_rev)
126 return self._remote.get_file_content(self._svn_rev, safe_str(path))
123
127
124 def get_file_content_streamed(self, path):
128 def get_file_content_streamed(self, path):
125 path = self._fix_path(path)
129 path = self._fix_path(path)
130
126 stream_method = getattr(self._remote, 'stream:get_file_content')
131 stream_method = getattr(self._remote, 'stream:get_file_content')
127 return stream_method(safe_str(path), self._svn_rev)
132 return stream_method(self._svn_rev, safe_str(path))
128
133
129 def get_file_size(self, path):
134 def get_file_size(self, path):
130 path = self._fix_path(path)
135 path = self._fix_path(path)
131 return self._remote.get_file_size(safe_str(path), self._svn_rev)
136 return self._remote.get_file_size(self._svn_rev, safe_str(path))
132
137
133 def get_path_history(self, path, limit=None, pre_load=None):
138 def get_path_history(self, path, limit=None, pre_load=None):
134 path = safe_str(self._fix_path(path))
139 path = safe_str(self._fix_path(path))
135 history = self._remote.node_history(path, self._svn_rev, limit)
140 history = self._remote.node_history(path, self._svn_rev, limit)
136 return [
141 return [
137 self.repository.get_commit(commit_id=str(svn_rev))
142 self.repository.get_commit(commit_id=str(svn_rev))
138 for svn_rev in history]
143 for svn_rev in history]
139
144
140 def get_file_annotate(self, path, pre_load=None):
145 def get_file_annotate(self, path, pre_load=None):
141 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
146 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
142
147
143 for zero_based_line_no, svn_rev, content in result:
148 for zero_based_line_no, svn_rev, content in result:
144 commit_id = str(svn_rev)
149 commit_id = str(svn_rev)
145 line_no = zero_based_line_no + 1
150 line_no = zero_based_line_no + 1
146 yield (
151 yield (
147 line_no,
152 line_no,
148 commit_id,
153 commit_id,
149 lambda: self.repository.get_commit(commit_id=commit_id),
154 lambda: self.repository.get_commit(commit_id=commit_id),
150 content)
155 content)
151
156
152 def get_node(self, path, pre_load=None):
157 def get_node(self, path, pre_load=None):
153 path = self._fix_path(path)
158 path = self._fix_path(path)
154 if path not in self.nodes:
159 if path not in self.nodes:
155
160
156 if path == '':
161 if path == '':
157 node = nodes.RootNode(commit=self)
162 node = nodes.RootNode(commit=self)
158 else:
163 else:
159 node_type = self._remote.get_node_type(
164 node_type = self._remote.get_node_type(self._svn_rev, safe_str(path))
160 safe_str(path), self._svn_rev)
161 if node_type == 'dir':
165 if node_type == 'dir':
162 node = nodes.DirNode(path, commit=self)
166 node = nodes.DirNode(safe_bytes(path), commit=self)
163 elif node_type == 'file':
167 elif node_type == 'file':
164 node = nodes.FileNode(path, commit=self, pre_load=pre_load)
168 node = nodes.FileNode(safe_bytes(path), commit=self, pre_load=pre_load)
165 else:
169 else:
166 raise self.no_node_at_path(path)
170 raise self.no_node_at_path(path)
167
171
168 self.nodes[path] = node
172 self.nodes[path] = node
169 return self.nodes[path]
173 return self.nodes[path]
170
174
171 def get_nodes(self, path):
175 def get_nodes(self, path, pre_load=None):
172 if self._get_kind(path) != nodes.NodeKind.DIR:
176 if self._get_kind(path) != nodes.NodeKind.DIR:
173 raise CommitError(
177 raise CommitError(
174 "Directory does not exist for commit %s at "
178 f"Directory does not exist for commit {self.raw_id} at '{path}'")
175 " '%s'" % (self.raw_id, path))
176 path = safe_str(self._fix_path(path))
179 path = safe_str(self._fix_path(path))
177
180
178 path_nodes = []
181 path_nodes = []
179 for name, kind in self._remote.get_nodes(path, revision=self._svn_rev):
182 for name, kind in self._remote.get_nodes(self._svn_rev, path):
180 node_path = vcspath.join(path, name)
183 node_path = vcspath.join(path, name)
181 if kind == 'dir':
184 if kind == 'dir':
182 node = nodes.DirNode(node_path, commit=self)
185 node = nodes.DirNode(safe_bytes(node_path), commit=self)
183 elif kind == 'file':
186 elif kind == 'file':
184 node = nodes.FileNode(node_path, commit=self)
187 node = nodes.FileNode(safe_bytes(node_path), commit=self, pre_load=pre_load)
185 else:
188 else:
186 raise ValueError("Node kind %s not supported." % (kind, ))
189 raise ValueError(f"Node kind {kind} not supported.")
187 self.nodes[node_path] = node
190 self.nodes[node_path] = node
188 path_nodes.append(node)
191 path_nodes.append(node)
189
192
190 return path_nodes
193 return path_nodes
191
194
192 def _get_kind(self, path):
195 def _get_kind(self, path):
193 path = self._fix_path(path)
196 path = self._fix_path(path)
194 kind = self._remote.get_node_type(path, self._svn_rev)
197 kind = self._remote.get_node_type(self._svn_rev, path)
195 if kind == 'file':
198 if kind == 'file':
196 return nodes.NodeKind.FILE
199 return nodes.NodeKind.FILE
197 elif kind == 'dir':
200 elif kind == 'dir':
198 return nodes.NodeKind.DIR
201 return nodes.NodeKind.DIR
199 else:
202 else:
200 raise CommitError(
203 raise CommitError(
201 "Node does not exist at the given path '%s'" % (path, ))
204 "Node does not exist at the given path '%s'" % (path, ))
202
205
203 @LazyProperty
206 @LazyProperty
204 def _changes_cache(self):
207 def _changes_cache(self):
205 return self._remote.revision_changes(self._svn_rev)
208 return self._remote.revision_changes(self._svn_rev)
206
209
207 @LazyProperty
210 @LazyProperty
208 def affected_files(self):
211 def affected_files(self):
209 changed_files = set()
212 changed_files = set()
210 for files in self._changes_cache.values():
213 for files in self._changes_cache.values():
211 changed_files.update(files)
214 changed_files.update(files)
212 return list(changed_files)
215 return list(changed_files)
213
216
214 @LazyProperty
217 @LazyProperty
215 def id(self):
218 def id(self):
216 return self.raw_id
219 return self.raw_id
217
220
218 @property
221 @property
219 def added(self):
222 def added(self):
220 return nodes.AddedFileNodesGenerator(self.added_paths, self)
223 return nodes.AddedFileNodesGenerator(self.added_paths, self)
221
224
222 @LazyProperty
225 @LazyProperty
223 def added_paths(self):
226 def added_paths(self):
224 return [n for n in self._changes_cache['added']]
227 return [n for n in self._changes_cache['added']]
225
228
226 @property
229 @property
227 def changed(self):
230 def changed(self):
228 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
231 return nodes.ChangedFileNodesGenerator(self.changed_paths, self)
229
232
230 @LazyProperty
233 @LazyProperty
231 def changed_paths(self):
234 def changed_paths(self):
232 return [n for n in self._changes_cache['changed']]
235 return [n for n in self._changes_cache['changed']]
233
236
234 @property
237 @property
235 def removed(self):
238 def removed(self):
236 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
239 return nodes.RemovedFileNodesGenerator(self.removed_paths, self)
237
240
238 @LazyProperty
241 @LazyProperty
239 def removed_paths(self):
242 def removed_paths(self):
240 return [n for n in self._changes_cache['removed']]
243 return [n for n in self._changes_cache['removed']]
241
244
242
245
243 def _date_from_svn_properties(properties):
246 def _date_from_svn_properties(properties):
244 """
247 """
245 Parses the date out of given svn properties.
248 Parses the date out of given svn properties.
246
249
247 :return: :class:`datetime.datetime` instance. The object is naive.
250 :return: :class:`datetime.datetime` instance. The object is naive.
248 """
251 """
249
252
250 aware_date = dateutil.parser.parse(properties.get('svn:date'))
253 aware_date = dateutil.parser.parse(properties.get('svn:date'))
251 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
254 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
252 final_date = aware_date
255 final_date = aware_date
253 return final_date.replace(tzinfo=None)
256 return final_date.replace(tzinfo=None)
@@ -1,51 +1,51 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN diff module
22 SVN diff module
23 """
23 """
24
24
25 import re
25 import re
26
26
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class SubversionDiff(base.Diff):
30 class SubversionDiff(base.Diff):
31
31
32 _meta_re = re.compile(r"""
32 _meta_re = re.compile(br"""
33 (?:^(?P<svn_bin_patch>Cannot[ ]display:[ ]file[ ]marked[ ]as[ ]a[ ]binary[ ]type.)(?:\n|$))?
33 (?:^(?P<svn_bin_patch>Cannot[ ]display:[ ]file[ ]marked[ ]as[ ]a[ ]binary[ ]type.)(?:\n|$))?
34 """, re.VERBOSE | re.MULTILINE)
34 """, re.VERBOSE | re.MULTILINE)
35
35
36 _header_re = re.compile(r"""
36 _header_re = re.compile(br"""
37 #^diff[ ]--git
37 #^diff[ ]--git
38 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
38 [ ]"?a/(?P<a_path>.+?)"?[ ]"?b/(?P<b_path>.+?)"?\n
39 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
39 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
40 ^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
40 ^rename[ ]from[ ](?P<rename_from>[^\r\n]+)\n
41 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
41 ^rename[ ]to[ ](?P<rename_to>[^\r\n]+)(?:\n|$))?
42 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
42 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
43 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
43 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
44 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
44 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
45 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
45 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
46 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
46 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
47 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
47 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
48 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
48 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
49 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)\t\(revision[ ]\d+\)(?:\n|$))?
49 (?:^---[ ]("?a/(?P<a_file>.+)|/dev/null)\t\(revision[ ]\d+\)(?:\n|$))?
50 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)\t\(revision[ ]\d+\)(?:\n|$))?
50 (?:^\+\+\+[ ]("?b/(?P<b_file>.+)|/dev/null)\t\(revision[ ]\d+\)(?:\n|$))?
51 """, re.VERBOSE | re.MULTILINE)
51 """, re.VERBOSE | re.MULTILINE)
@@ -1,79 +1,79 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 SVN inmemory module
23 SVN inmemory module
24 """
24 """
25
25
26 from rhodecode.lib.datelib import date_astimestamp
26 from rhodecode.lib.datelib import date_astimestamp
27 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.str_utils import safe_str, safe_bytes
28 from rhodecode.lib.vcs.backends import base
28 from rhodecode.lib.vcs.backends import base
29
29
30
30
31 class SubversionInMemoryCommit(base.BaseInMemoryCommit):
31 class SubversionInMemoryCommit(base.BaseInMemoryCommit):
32
32
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
34 if branch not in (None, self.repository.DEFAULT_BRANCH_NAME):
34 if branch not in (None, self.repository.DEFAULT_BRANCH_NAME):
35 raise NotImplementedError("Branches are not yet supported")
35 raise NotImplementedError("Branches are not yet supported")
36
36
37 self.check_integrity(parents)
37 self.check_integrity(parents)
38
38
39 message = safe_str(message)
39 message = safe_str(message)
40 author = safe_str(author)
40 author = safe_str(author)
41
41
42 updated = []
42 updated = []
43 for node in self.added:
43 for node in self.added:
44 node_data = {
44 node_data = {
45 'path': node.path,
45 'path': safe_bytes(node.path),
46 'content': safe_str(node.content),
46 'content': node.content,
47 'mode': node.mode,
47 'mode': node.mode,
48 }
48 }
49 if node.is_binary:
49 if node.is_binary:
50 node_data['properties'] = {
50 node_data['properties'] = {
51 'svn:mime-type': 'application/octet-stream'
51 'svn:mime-type': 'application/octet-stream'
52 }
52 }
53 updated.append(node_data)
53 updated.append(node_data)
54 for node in self.changed:
54 for node in self.changed:
55 updated.append({
55 updated.append({
56 'path': node.path,
56 'path': safe_bytes(node.path),
57 'content': safe_str(node.content),
57 'content': node.content,
58 'mode': node.mode,
58 'mode': node.mode,
59 })
59 })
60
60
61 removed = []
61 removed = []
62 for node in self.removed:
62 for node in self.removed:
63 removed.append({
63 removed.append({
64 'path': node.path,
64 'path': safe_bytes(node.path),
65 })
65 })
66
66
67 timestamp = date_astimestamp(date) if date else None
67 timestamp = date_astimestamp(date) if date else None
68 svn_rev = self.repository._remote.commit(
68 svn_rev = self.repository._remote.commit(
69 message=message, author=author, timestamp=timestamp,
69 message=message, author=author, timestamp=timestamp,
70 updated=updated, removed=removed)
70 updated=updated, removed=removed)
71
71
72 # TODO: Find a nicer way. If commit_ids is not yet evaluated, then
72 # TODO: Find a nicer way. If commit_ids is not yet evaluated, then
73 # we should not add the commit_id, if it is already evaluated, it
73 # we should not add the commit_id, if it is already evaluated, it
74 # will not be evaluated again.
74 # will not be evaluated again.
75 commit_id = str(svn_rev)
75 commit_id = str(svn_rev)
76 self.repository.append_commit_id(commit_id)
76 self.repository.append_commit_id(commit_id)
77 tip = self.repository.get_commit()
77 tip = self.repository.get_commit()
78 self.reset()
78 self.reset()
79 return tip
79 return tip
@@ -1,370 +1,369 b''
1
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib.request, urllib.parse, urllib.error
27 import urllib.request
28 import urllib.parse
29 import urllib.error
28
30
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
30
32
31 from collections import OrderedDict
33 from collections import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
34 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
35 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
38 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
39 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
40 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
41 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
42 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
45 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
46 VCSError, NodeDoesNotExistError)
45
47
46
48
47 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
48
50
49
51
50 class SubversionRepository(base.BaseRepository):
52 class SubversionRepository(base.BaseRepository):
51 """
53 """
52 Subversion backend implementation
54 Subversion backend implementation
53
55
54 .. important::
56 .. important::
55
57
56 It is very important to distinguish the commit index and the commit id
58 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
59 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
60 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
61 the other side will always be a `str`.
60
62
61 There is a specific trap since the first commit will have the index
63 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
64 ``0`` but the svn id will be ``"1"``.
63
65
64 """
66 """
65
67
66 # Note: Subversion does not really have a default branch name.
68 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
69 DEFAULT_BRANCH_NAME = None
68
70
69 contact = base.BaseRepository.DEFAULT_CONTACT
71 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
72 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
73
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
74 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
75 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
77 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
78 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
79
78 self._init_repo(create, src_url)
80 self._init_repo(create, src_url)
79
81
80 # caches
82 # caches
81 self._commit_ids = {}
83 self._commit_ids = {}
82
84
83 @LazyProperty
85 @LazyProperty
84 def _remote(self):
86 def _remote(self):
85 repo_id = self.path
87 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
88 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
89
88 def _init_repo(self, create, src_url):
90 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
91 if create and os.path.exists(self.path):
90 raise RepositoryError(
92 raise RepositoryError(
91 "Cannot create repository at %s, location already exist"
93 f"Cannot create repository at {self.path}, location already exist"
92 % self.path)
94 )
93
95
94 if create:
96 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
97 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
98 if src_url:
97 src_url = _sanitize_url(src_url)
99 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
100 self._remote.import_remote_repository(src_url)
99 else:
101 else:
100 self._check_path()
102 self._check_path()
101
103
102 @CachedProperty
104 @CachedProperty
103 def commit_ids(self):
105 def commit_ids(self):
104 head = self._remote.lookup(None)
106 head = self._remote.lookup(None)
105 return [str(r) for r in range(1, head + 1)]
107 return [str(r) for r in range(1, head + 1)]
106
108
107 def _rebuild_cache(self, commit_ids):
109 def _rebuild_cache(self, commit_ids):
108 pass
110 pass
109
111
110 def run_svn_command(self, cmd, **opts):
112 def run_svn_command(self, cmd, **opts):
111 """
113 """
112 Runs given ``cmd`` as svn command and returns tuple
114 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
115 (stdout, stderr).
114
116
115 :param cmd: full svn command to be executed
117 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
118 :param opts: env options to pass into Subprocess command
117 """
119 """
118 if not isinstance(cmd, list):
120 if not isinstance(cmd, list):
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
121 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
120
122
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
123 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
124 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
125 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
126 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
127 return out, err
126
128
127 @LazyProperty
129 @LazyProperty
128 def branches(self):
130 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
131 return self._tags_or_branches('vcs_svn_branch')
130
132
131 @LazyProperty
133 @LazyProperty
132 def branches_closed(self):
134 def branches_closed(self):
133 return {}
135 return {}
134
136
135 @LazyProperty
137 @LazyProperty
136 def bookmarks(self):
138 def bookmarks(self):
137 return {}
139 return {}
138
140
139 @LazyProperty
141 @LazyProperty
140 def branches_all(self):
142 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
143 # TODO: johbo: Implement proper branch support
142 all_branches = {}
144 all_branches = {}
143 all_branches.update(self.branches)
145 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
146 all_branches.update(self.branches_closed)
145 return all_branches
147 return all_branches
146
148
147 @LazyProperty
149 @LazyProperty
148 def tags(self):
150 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
151 return self._tags_or_branches('vcs_svn_tag')
150
152
151 def _tags_or_branches(self, config_section):
153 def _tags_or_branches(self, config_section):
152 found_items = {}
154 found_items = {}
153
155
154 if self.is_empty():
156 if self.is_empty():
155 return {}
157 return {}
156
158
157 for pattern in self._patterns_from_section(config_section):
159 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
160 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
161 tip = self.get_commit()
160 try:
162 try:
161 if pattern.endswith('*'):
163 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
164 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
165 directories = basedir.dirs
164 else:
166 else:
165 directories = (tip.get_node(pattern), )
167 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
168 except NodeDoesNotExistError:
167 continue
169 continue
168 found_items.update(
170 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
169 (safe_unicode(n.path),
170 self.commit_ids[-1])
171 for n in directories)
172
171
173 def get_name(item):
172 def get_name(item):
174 return item[0]
173 return item[0]
175
174
176 return OrderedDict(sorted(found_items.items(), key=get_name))
175 return OrderedDict(sorted(found_items.items(), key=get_name))
177
176
178 def _patterns_from_section(self, section):
177 def _patterns_from_section(self, section):
179 return (pattern for key, pattern in self.config.items(section))
178 return (pattern for key, pattern in self.config.items(section))
180
179
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
180 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 if self != repo2:
181 if self != repo2:
183 raise ValueError(
182 raise ValueError(
184 "Subversion does not support getting common ancestor of"
183 "Subversion does not support getting common ancestor of"
185 " different repositories.")
184 " different repositories.")
186
185
187 if int(commit_id1) < int(commit_id2):
186 if int(commit_id1) < int(commit_id2):
188 return commit_id1
187 return commit_id1
189 return commit_id2
188 return commit_id2
190
189
191 def verify(self):
190 def verify(self):
192 verify = self._remote.verify()
191 verify = self._remote.verify()
193
192
194 self._remote.invalidate_vcs_cache()
193 self._remote.invalidate_vcs_cache()
195 return verify
194 return verify
196
195
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
196 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 # TODO: johbo: Implement better comparison, this is a very naive
197 # TODO: johbo: Implement better comparison, this is a very naive
199 # version which does not allow to compare branches, tags or folders
198 # version which does not allow to compare branches, tags or folders
200 # at all.
199 # at all.
201 if repo2 != self:
200 if repo2 != self:
202 raise ValueError(
201 raise ValueError(
203 "Subversion does not support comparison of of different "
202 "Subversion does not support comparison of of different "
204 "repositories.")
203 "repositories.")
205
204
206 if commit_id1 == commit_id2:
205 if commit_id1 == commit_id2:
207 return []
206 return []
208
207
209 commit_idx1 = self._get_commit_idx(commit_id1)
208 commit_idx1 = self._get_commit_idx(commit_id1)
210 commit_idx2 = self._get_commit_idx(commit_id2)
209 commit_idx2 = self._get_commit_idx(commit_id2)
211
210
212 commits = [
211 commits = [
213 self.get_commit(commit_idx=idx)
212 self.get_commit(commit_idx=idx)
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
213 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215
214
216 return commits
215 return commits
217
216
218 def _get_commit_idx(self, commit_id):
217 def _get_commit_idx(self, commit_id):
219 try:
218 try:
220 svn_rev = int(commit_id)
219 svn_rev = int(commit_id)
221 except:
220 except:
222 # TODO: johbo: this might be only one case, HEAD, check this
221 # TODO: johbo: this might be only one case, HEAD, check this
223 svn_rev = self._remote.lookup(commit_id)
222 svn_rev = self._remote.lookup(commit_id)
224 commit_idx = svn_rev - 1
223 commit_idx = svn_rev - 1
225 if commit_idx >= len(self.commit_ids):
224 if commit_idx >= len(self.commit_ids):
226 raise CommitDoesNotExistError(
225 raise CommitDoesNotExistError(
227 "Commit at index %s does not exist." % (commit_idx, ))
226 "Commit at index %s does not exist." % (commit_idx, ))
228 return commit_idx
227 return commit_idx
229
228
230 @staticmethod
229 @staticmethod
231 def check_url(url, config):
230 def check_url(url, config):
232 """
231 """
233 Check if `url` is a valid source to import a Subversion repository.
232 Check if `url` is a valid source to import a Subversion repository.
234 """
233 """
235 # convert to URL if it's a local directory
234 # convert to URL if it's a local directory
236 if os.path.isdir(url):
235 if os.path.isdir(url):
237 url = 'file://' + urllib.request.pathname2url(url)
236 url = 'file://' + urllib.request.pathname2url(url)
238 return connection.Svn.check_url(url, config.serialize())
237 return connection.Svn.check_url(url, config.serialize())
239
238
240 @staticmethod
239 @staticmethod
241 def is_valid_repository(path):
240 def is_valid_repository(path):
242 try:
241 try:
243 SubversionRepository(path)
242 SubversionRepository(path)
244 return True
243 return True
245 except VCSError:
244 except VCSError:
246 pass
245 pass
247 return False
246 return False
248
247
249 def _check_path(self):
248 def _check_path(self):
250 if not os.path.exists(self.path):
249 if not os.path.exists(self.path):
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
250 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 if not self._remote.is_path_valid_repository(self.path):
251 if not self._remote.is_path_valid_repository(self.path):
253 raise VCSError(
252 raise VCSError(
254 'Path "%s" does not contain a Subversion repository' %
253 'Path "%s" does not contain a Subversion repository' %
255 (self.path, ))
254 (self.path, ))
256
255
257 @LazyProperty
256 @LazyProperty
258 def last_change(self):
257 def last_change(self):
259 """
258 """
260 Returns last change made on this repository as
259 Returns last change made on this repository as
261 `datetime.datetime` object.
260 `datetime.datetime` object.
262 """
261 """
263 # Subversion always has a first commit which has id "0" and contains
262 # Subversion always has a first commit which has id "0" and contains
264 # what we are looking for.
263 # what we are looking for.
265 last_id = len(self.commit_ids)
264 last_id = len(self.commit_ids)
266 properties = self._remote.revision_properties(last_id)
265 properties = self._remote.revision_properties(last_id)
267 return _date_from_svn_properties(properties)
266 return _date_from_svn_properties(properties)
268
267
269 @LazyProperty
268 @LazyProperty
270 def in_memory_commit(self):
269 def in_memory_commit(self):
271 return SubversionInMemoryCommit(self)
270 return SubversionInMemoryCommit(self)
272
271
273 def get_hook_location(self):
272 def get_hook_location(self):
274 """
273 """
275 returns absolute path to location where hooks are stored
274 returns absolute path to location where hooks are stored
276 """
275 """
277 return os.path.join(self.path, 'hooks')
276 return os.path.join(self.path, 'hooks')
278
277
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
278 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
279 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 if self.is_empty():
280 if self.is_empty():
282 raise EmptyRepositoryError("There are no commits yet")
281 raise EmptyRepositoryError("There are no commits yet")
283 if commit_id is not None:
282 if commit_id is not None:
284 self._validate_commit_id(commit_id)
283 self._validate_commit_id(commit_id)
285 elif commit_idx is not None:
284 elif commit_idx is not None:
286 self._validate_commit_idx(commit_idx)
285 self._validate_commit_idx(commit_idx)
287 try:
286 try:
288 commit_id = self.commit_ids[commit_idx]
287 commit_id = self.commit_ids[commit_idx]
289 except IndexError:
288 except IndexError:
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
289 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291
290
292 commit_id = self._sanitize_commit_id(commit_id)
291 commit_id = self._sanitize_commit_id(commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
292 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 return commit
293 return commit
295
294
296 def get_commits(
295 def get_commits(
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
296 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
297 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 if self.is_empty():
298 if self.is_empty():
300 raise EmptyRepositoryError("There are no commit_ids yet")
299 raise EmptyRepositoryError("There are no commit_ids yet")
301 self._validate_branch_name(branch_name)
300 self._validate_branch_name(branch_name)
302
301
303 if start_id is not None:
302 if start_id is not None:
304 self._validate_commit_id(start_id)
303 self._validate_commit_id(start_id)
305 if end_id is not None:
304 if end_id is not None:
306 self._validate_commit_id(end_id)
305 self._validate_commit_id(end_id)
307
306
308 start_raw_id = self._sanitize_commit_id(start_id)
307 start_raw_id = self._sanitize_commit_id(start_id)
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
308 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 end_raw_id = self._sanitize_commit_id(end_id)
309 end_raw_id = self._sanitize_commit_id(end_id)
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
310 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312
311
313 if None not in [start_id, end_id] and start_pos > end_pos:
312 if None not in [start_id, end_id] and start_pos > end_pos:
314 raise RepositoryError(
313 raise RepositoryError(
315 "Start commit '%s' cannot be after end commit '%s'" %
314 "Start commit '%s' cannot be after end commit '%s'" %
316 (start_id, end_id))
315 (start_id, end_id))
317 if end_pos is not None:
316 if end_pos is not None:
318 end_pos += 1
317 end_pos += 1
319
318
320 # Date based filtering
319 # Date based filtering
321 if start_date or end_date:
320 if start_date or end_date:
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
321 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 date_astimestamp(start_date) if start_date else None,
322 date_astimestamp(start_date) if start_date else None,
324 date_astimestamp(end_date) if end_date else None)
323 date_astimestamp(end_date) if end_date else None)
325 start_pos = start_raw_id - 1
324 start_pos = start_raw_id - 1
326 end_pos = end_raw_id
325 end_pos = end_raw_id
327
326
328 commit_ids = self.commit_ids
327 commit_ids = self.commit_ids
329
328
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
329 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
330 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 svn_rev = long(self.commit_ids[-1])
331 svn_rev = int(self.commit_ids[-1])
333 commit_ids = self._remote.node_history(
332 commit_ids = self._remote.node_history(
334 path=branch_name, revision=svn_rev, limit=None)
333 path=branch_name, revision=svn_rev, limit=None)
335 commit_ids = [str(i) for i in reversed(commit_ids)]
334 commit_ids = [str(i) for i in reversed(commit_ids)]
336
335
337 if start_pos or end_pos:
336 if start_pos or end_pos:
338 commit_ids = commit_ids[start_pos:end_pos]
337 commit_ids = commit_ids[start_pos:end_pos]
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
338 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340
339
341 def _sanitize_commit_id(self, commit_id):
340 def _sanitize_commit_id(self, commit_id):
342 if commit_id and commit_id.isdigit():
341 if commit_id and commit_id.isdigit():
343 if int(commit_id) <= len(self.commit_ids):
342 if int(commit_id) <= len(self.commit_ids):
344 return commit_id
343 return commit_id
345 else:
344 else:
346 raise CommitDoesNotExistError(
345 raise CommitDoesNotExistError(
347 "Commit %s does not exist." % (commit_id, ))
346 "Commit %s does not exist." % (commit_id, ))
348 if commit_id not in [
347 if commit_id not in [
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
348 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 raise CommitDoesNotExistError(
349 raise CommitDoesNotExistError(
351 "Commit id %s not understood." % (commit_id, ))
350 "Commit id %s not understood." % (commit_id, ))
352 svn_rev = self._remote.lookup('HEAD')
351 svn_rev = self._remote.lookup('HEAD')
353 return str(svn_rev)
352 return str(svn_rev)
354
353
355 def get_diff(
354 def get_diff(
356 self, commit1, commit2, path=None, ignore_whitespace=False,
355 self, commit1, commit2, path=None, ignore_whitespace=False,
357 context=3, path1=None):
356 context=3, path1=None):
358 self._validate_diff_commits(commit1, commit2)
357 self._validate_diff_commits(commit1, commit2)
359 svn_rev1 = long(commit1.raw_id)
358 svn_rev1 = int(commit1.raw_id)
360 svn_rev2 = long(commit2.raw_id)
359 svn_rev2 = int(commit2.raw_id)
361 diff = self._remote.diff(
360 diff = self._remote.diff(
362 svn_rev1, svn_rev2, path1=path1, path2=path,
361 svn_rev1, svn_rev2, path1=path1, path2=path,
363 ignore_whitespace=ignore_whitespace, context=context)
362 ignore_whitespace=ignore_whitespace, context=context)
364 return SubversionDiff(diff)
363 return SubversionDiff(diff)
365
364
366
365
367 def _sanitize_url(url):
366 def _sanitize_url(url):
368 if '://' not in url:
367 if '://' not in url:
369 url = 'file://' + urllib.request.pathname2url(url)
368 url = 'file://' + urllib.request.pathname2url(url)
370 return url
369 return url
General Comments 0
You need to be logged in to leave comments. Login now