##// END OF EJS Templates
largefiles: enabled download of largefiles for git and mercurial from web interface....
marcink -
r1577:3fd4ff52 default
parent child Browse files
Show More
@@ -1,241 +1,241 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the Enterprise
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 args@
8 8 { pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 , ...
12 12 }:
13 13
14 14 let
15 15
16 16 # Use nixpkgs from args or import them. We use this indirect approach
17 17 # through args to be able to use the name `pkgs` for our customized packages.
18 18 # Otherwise we will end up with an infinite recursion.
19 19 nixpkgs = args.pkgs or (import <nixpkgs> { });
20 20
21 21 # johbo: Interim bridge which allows us to build with the upcoming
22 22 # nixos.16.09 branch (unstable at the moment of writing this note) and the
23 23 # current stable nixos-16.03.
24 24 backwardsCompatibleFetchgit = { ... }@args:
25 25 let
26 26 origSources = nixpkgs.fetchgit args;
27 27 in
28 28 nixpkgs.lib.overrideDerivation origSources (oldAttrs: {
29 29 NIX_PREFETCH_GIT_CHECKOUT_HOOK = ''
30 30 find $out -name '.git*' -print0 | xargs -0 rm -rf
31 31 '';
32 32 });
33 33
34 34 # Create a customized version of nixpkgs which should be used throughout the
35 35 # rest of this file.
36 36 pkgs = nixpkgs.overridePackages (self: super: {
37 37 fetchgit = backwardsCompatibleFetchgit;
38 38 });
39 39
40 40 # Evaluates to the last segment of a file system path.
41 41 basename = path: with pkgs.lib; last (splitString "/" path);
42 42
43 43 # source code filter used as arugment to builtins.filterSource.
44 44 src-filter = path: type: with pkgs.lib;
45 45 let
46 46 ext = last (splitString "." path);
47 47 in
48 48 !builtins.elem (basename path) [
49 49 ".git" ".hg" "__pycache__" ".eggs"
50 50 "bower_components" "node_modules"
51 51 "build" "data" "result" "tmp"] &&
52 52 !builtins.elem ext ["egg-info" "pyc"] &&
53 53 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
54 54 # it would still be good to restore it since we want to ignore "result-*".
55 55 !hasPrefix "result" path;
56 56
57 57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 58 then pythonPackages
59 59 else getAttr pythonPackages pkgs;
60 60
61 61 buildBowerComponents =
62 62 pkgs.buildBowerComponents or
63 63 (import ./pkgs/backport-16.03-build-bower-components.nix { inherit pkgs; });
64 64
65 65 sources = pkgs.config.rc.sources or {};
66 66 version = builtins.readFile ./rhodecode/VERSION;
67 67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
68 68
69 69 nodeEnv = import ./pkgs/node-default.nix {
70 70 inherit pkgs;
71 71 };
72 72 nodeDependencies = nodeEnv.shell.nodeDependencies;
73 73
74 74 bowerComponents = buildBowerComponents {
75 75 name = "enterprise-ce-${version}";
76 76 generated = ./pkgs/bower-packages.nix;
77 77 src = rhodecode-enterprise-ce-src;
78 78 };
79 79
80 80 pythonGeneratedPackages = self: basePythonPackages.override (a: {
81 81 inherit self;
82 82 })
83 83 // (scopedImport {
84 84 self = self;
85 85 super = basePythonPackages;
86 86 inherit pkgs;
87 87 inherit (pkgs) fetchurl fetchgit;
88 88 } ./pkgs/python-packages.nix);
89 89
90 90 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
91 91 inherit
92 92 basePythonPackages
93 93 pkgs;
94 94 };
95 95
96 96 pythonLocalOverrides = self: super: {
97 97 rhodecode-enterprise-ce =
98 98 let
99 99 linkNodeAndBowerPackages = ''
100 100 echo "Export RhodeCode CE path"
101 101 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
102 102 echo "Link node packages"
103 103 rm -fr node_modules
104 104 mkdir node_modules
105 105 # johbo: Linking individual packages allows us to run "npm install"
106 106 # inside of a shell to try things out. Re-entering the shell will
107 107 # restore a clean environment.
108 108 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
109 109
110 110 echo "DONE: Link node packages"
111 111
112 112 echo "Link bower packages"
113 113 rm -fr bower_components
114 114 mkdir bower_components
115 115
116 116 ln -s ${bowerComponents}/bower_components/* bower_components/
117 117 echo "DONE: Link bower packages"
118 118 '';
119 119 in super.rhodecode-enterprise-ce.override (attrs: {
120 120
121 121 inherit
122 122 doCheck
123 123 version;
124 124 name = "rhodecode-enterprise-ce-${version}";
125 125 releaseName = "RhodeCodeEnterpriseCE-${version}";
126 126 src = rhodecode-enterprise-ce-src;
127 127 dontStrip = true; # prevent strip, we don't need it.
128 128
129 129 buildInputs =
130 130 attrs.buildInputs ++
131 131 (with self; [
132 132 pkgs.nodePackages.bower
133 133 pkgs.nodePackages.grunt-cli
134 134 pkgs.subversion
135 135 pytest-catchlog
136 136 rhodecode-testdata
137 137 ]);
138 138
139 139 #TODO: either move this into overrides, OR use the new machanics from
140 140 # pip2nix and requiremtn.txt file
141 141 propagatedBuildInputs = attrs.propagatedBuildInputs ++ (with self; [
142 142 rhodecode-tools
143 143 ]);
144 144
145 145 # TODO: johbo: Make a nicer way to expose the parts. Maybe
146 146 # pkgs/default.nix?
147 147 passthru = {
148 148 inherit
149 149 bowerComponents
150 150 linkNodeAndBowerPackages
151 151 myPythonPackagesUnfix
152 152 pythonLocalOverrides;
153 153 pythonPackages = self;
154 154 };
155 155
156 156 LC_ALL = "en_US.UTF-8";
157 157 LOCALE_ARCHIVE =
158 158 if pkgs.stdenv ? glibc
159 159 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
160 160 else "";
161 161
162 162 preCheck = ''
163 163 export PATH="$out/bin:$PATH"
164 164 '';
165 165
166 166 postCheck = ''
167 167 rm -rf $out/lib/${self.python.libPrefix}/site-packages/pytest_pylons
168 168 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
169 169 '';
170 170
171 171 preBuild = linkNodeAndBowerPackages + ''
172 172 grunt
173 173 rm -fr node_modules
174 174 '';
175 175
176 176 postInstall = ''
177 177 # python based programs need to be wrapped
178 178 ln -s ${self.supervisor}/bin/supervisor* $out/bin/
179 179 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
180 180 ln -s ${self.PasteScript}/bin/paster $out/bin/
181 181 ln -s ${self.channelstream}/bin/channelstream $out/bin/
182 182 ln -s ${self.pyramid}/bin/* $out/bin/ #*/
183 183
184 184 # rhodecode-tools
185 185 # TODO: johbo: re-think this. Do the tools import anything from enterprise?
186 186 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
187 187
188 188 # note that condition should be restricted when adding further tools
189 189 for file in $out/bin/*; do #*/
190 190 wrapProgram $file \
191 191 --prefix PYTHONPATH : $PYTHONPATH \
192 192 --prefix PATH : $PATH \
193 193 --set PYTHONHASHSEED random
194 194 done
195 195
196 196 mkdir $out/etc
197 197 cp configs/production.ini $out/etc
198 198
199 199 echo "Writing meta information for rccontrol to nix-support/rccontrol"
200 200 mkdir -p $out/nix-support/rccontrol
201 201 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
202 202 echo "DONE: Meta information for rccontrol written"
203 203
204 204 # TODO: johbo: Make part of ac-tests
205 205 if [ ! -f rhodecode/public/js/scripts.js ]; then
206 206 echo "Missing scripts.js"
207 207 exit 1
208 208 fi
209 209 if [ ! -f rhodecode/public/css/style.css ]; then
210 210 echo "Missing style.css"
211 211 exit 1
212 212 fi
213 213 '';
214 214
215 215 });
216 216
217 217 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
218 218 inherit
219 219 doCheck
220 220 pkgs
221 221 pythonPackages;
222 222 };
223 223
224 224 };
225 225
226 226 rhodecode-testdata-src = sources.rhodecode-testdata or (
227 227 pkgs.fetchhg {
228 228 url = "https://code.rhodecode.com/upstream/rc_testdata";
229 rev = "v0.9.0";
230 sha256 = "0k0ccb7cncd6mmzwckfbr6l7fsymcympwcm948qc3i0f0m6bbg1y";
229 rev = "v0.10.0";
230 sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0";
231 231 });
232 232
233 233 # Apply all overrides and fix the final package set
234 234 myPythonPackagesUnfix = with pkgs.lib;
235 235 (extends pythonExternalOverrides
236 236 (extends pythonLocalOverrides
237 237 (extends pythonOverrides
238 238 pythonGeneratedPackages)));
239 239 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
240 240
241 241 in myPythonPackages.rhodecode-enterprise-ce
@@ -1,1087 +1,1097 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Files controller for RhodeCode Enterprise
23 23 """
24 24
25 25 import itertools
26 26 import logging
27 27 import os
28 28 import shutil
29 29 import tempfile
30 30
31 31 from pylons import request, response, tmpl_context as c, url
32 32 from pylons.i18n.translation import _
33 33 from pylons.controllers.util import redirect
34 34 from webob.exc import HTTPNotFound, HTTPBadRequest
35 35
36 36 from rhodecode.controllers.utils import parse_path_ref
37 37 from rhodecode.lib import diffs, helpers as h, caches
38 38 from rhodecode.lib.compat import OrderedDict
39 39 from rhodecode.lib.codeblocks import (
40 40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 41 from rhodecode.lib.utils import jsonify, action_logger
42 42 from rhodecode.lib.utils2 import (
43 43 convert_line_endings, detect_mode, safe_str, str2bool)
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
46 46 from rhodecode.lib.base import BaseRepoController, render
47 47 from rhodecode.lib.vcs import path as vcspath
48 48 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 49 from rhodecode.lib.vcs.conf import settings
50 50 from rhodecode.lib.vcs.exceptions import (
51 51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
52 52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
53 53 NodeDoesNotExistError, CommitError, NodeError)
54 54 from rhodecode.lib.vcs.nodes import FileNode
55 55
56 56 from rhodecode.model.repo import RepoModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.db import Repository
59 59
60 60 from rhodecode.controllers.changeset import (
61 61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
62 62 from rhodecode.lib.exceptions import NonRelativePathError
63 63
64 64 log = logging.getLogger(__name__)
65 65
66 66
67 67 class FilesController(BaseRepoController):
68 68
69 69 def __before__(self):
70 70 super(FilesController, self).__before__()
71 71 c.cut_off_limit = self.cut_off_limit_file
72 72
73 73 def _get_default_encoding(self):
74 74 enc_list = getattr(c, 'default_encodings', [])
75 75 return enc_list[0] if enc_list else 'UTF-8'
76 76
77 77 def __get_commit_or_redirect(self, commit_id, repo_name,
78 78 redirect_after=True):
79 79 """
80 80 This is a safe way to get commit. If an error occurs it redirects to
81 81 tip with proper message
82 82
83 83 :param commit_id: id of commit to fetch
84 84 :param repo_name: repo name to redirect after
85 85 :param redirect_after: toggle redirection
86 86 """
87 87 try:
88 88 return c.rhodecode_repo.get_commit(commit_id)
89 89 except EmptyRepositoryError:
90 90 if not redirect_after:
91 91 return None
92 92 url_ = url('files_add_home',
93 93 repo_name=c.repo_name,
94 94 revision=0, f_path='', anchor='edit')
95 95 if h.HasRepoPermissionAny(
96 96 'repository.write', 'repository.admin')(c.repo_name):
97 97 add_new = h.link_to(
98 98 _('Click here to add a new file.'),
99 99 url_, class_="alert-link")
100 100 else:
101 101 add_new = ""
102 102 h.flash(h.literal(
103 103 _('There are no files yet. %s') % add_new), category='warning')
104 104 redirect(h.url('summary_home', repo_name=repo_name))
105 105 except (CommitDoesNotExistError, LookupError):
106 106 msg = _('No such commit exists for this repository')
107 107 h.flash(msg, category='error')
108 108 raise HTTPNotFound()
109 109 except RepositoryError as e:
110 110 h.flash(safe_str(e), category='error')
111 111 raise HTTPNotFound()
112 112
113 113 def __get_filenode_or_redirect(self, repo_name, commit, path):
114 114 """
115 115 Returns file_node, if error occurs or given path is directory,
116 116 it'll redirect to top level path
117 117
118 118 :param repo_name: repo_name
119 119 :param commit: given commit
120 120 :param path: path to lookup
121 121 """
122 122 try:
123 123 file_node = commit.get_node(path)
124 124 if file_node.is_dir():
125 125 raise RepositoryError('The given path is a directory')
126 126 except CommitDoesNotExistError:
127 127 msg = _('No such commit exists for this repository')
128 128 log.exception(msg)
129 129 h.flash(msg, category='error')
130 130 raise HTTPNotFound()
131 131 except RepositoryError as e:
132 132 h.flash(safe_str(e), category='error')
133 133 raise HTTPNotFound()
134 134
135 135 return file_node
136 136
137 137 def __get_tree_cache_manager(self, repo_name, namespace_type):
138 138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
139 139 return caches.get_cache_manager('repo_cache_long', _namespace)
140 140
141 141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
142 142 full_load=False, force=False):
143 143 def _cached_tree():
144 144 log.debug('Generating cached file tree for %s, %s, %s',
145 145 repo_name, commit_id, f_path)
146 146 c.full_load = full_load
147 147 return render('files/files_browser_tree.mako')
148 148
149 149 cache_manager = self.__get_tree_cache_manager(
150 150 repo_name, caches.FILE_TREE)
151 151
152 152 cache_key = caches.compute_key_from_params(
153 153 repo_name, commit_id, f_path)
154 154
155 155 if force:
156 156 # we want to force recompute of caches
157 157 cache_manager.remove_value(cache_key)
158 158
159 159 return cache_manager.get(cache_key, createfunc=_cached_tree)
160 160
161 161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
162 162 def _cached_nodes():
163 163 log.debug('Generating cached nodelist for %s, %s, %s',
164 164 repo_name, commit_id, f_path)
165 165 _d, _f = ScmModel().get_nodes(
166 166 repo_name, commit_id, f_path, flat=False)
167 167 return _d + _f
168 168
169 169 cache_manager = self.__get_tree_cache_manager(
170 170 repo_name, caches.FILE_SEARCH_TREE_META)
171 171
172 172 cache_key = caches.compute_key_from_params(
173 173 repo_name, commit_id, f_path)
174 174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
175 175
176 176 @LoginRequired()
177 177 @HasRepoPermissionAnyDecorator(
178 178 'repository.read', 'repository.write', 'repository.admin')
179 179 def index(
180 180 self, repo_name, revision, f_path, annotate=False, rendered=False):
181 181 commit_id = revision
182 182
183 183 # redirect to given commit_id from form if given
184 184 get_commit_id = request.GET.get('at_rev', None)
185 185 if get_commit_id:
186 186 self.__get_commit_or_redirect(get_commit_id, repo_name)
187 187
188 188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
189 189 c.branch = request.GET.get('branch', None)
190 190 c.f_path = f_path
191 191 c.annotate = annotate
192 192 # default is false, but .rst/.md files later are autorendered, we can
193 193 # overwrite autorendering by setting this GET flag
194 194 c.renderer = rendered or not request.GET.get('no-render', False)
195 195
196 196 # prev link
197 197 try:
198 198 prev_commit = c.commit.prev(c.branch)
199 199 c.prev_commit = prev_commit
200 200 c.url_prev = url('files_home', repo_name=c.repo_name,
201 201 revision=prev_commit.raw_id, f_path=f_path)
202 202 if c.branch:
203 203 c.url_prev += '?branch=%s' % c.branch
204 204 except (CommitDoesNotExistError, VCSError):
205 205 c.url_prev = '#'
206 206 c.prev_commit = EmptyCommit()
207 207
208 208 # next link
209 209 try:
210 210 next_commit = c.commit.next(c.branch)
211 211 c.next_commit = next_commit
212 212 c.url_next = url('files_home', repo_name=c.repo_name,
213 213 revision=next_commit.raw_id, f_path=f_path)
214 214 if c.branch:
215 215 c.url_next += '?branch=%s' % c.branch
216 216 except (CommitDoesNotExistError, VCSError):
217 217 c.url_next = '#'
218 218 c.next_commit = EmptyCommit()
219 219
220 220 # files or dirs
221 221 try:
222 222 c.file = c.commit.get_node(f_path)
223 223 c.file_author = True
224 224 c.file_tree = ''
225 225 if c.file.is_file():
226 c.lf_node = c.file.get_largefile_node()
227
226 228 c.file_source_page = 'true'
227 229 c.file_last_commit = c.file.last_commit
228 230 if c.file.size < self.cut_off_limit_file:
229 231 if c.annotate: # annotation has precedence over renderer
230 232 c.annotated_lines = filenode_as_annotated_lines_tokens(
231 233 c.file
232 234 )
233 235 else:
234 236 c.renderer = (
235 237 c.renderer and h.renderer_from_filename(c.file.path)
236 238 )
237 239 if not c.renderer:
238 240 c.lines = filenode_as_lines_tokens(c.file)
239 241
240 242 c.on_branch_head = self._is_valid_head(
241 243 commit_id, c.rhodecode_repo)
242 244 c.branch_or_raw_id = c.commit.branch or c.commit.raw_id
243 245
244 246 author = c.file_last_commit.author
245 247 c.authors = [(h.email(author),
246 248 h.person(author, 'username_or_name_or_email'))]
247 249 else:
248 250 c.file_source_page = 'false'
249 251 c.authors = []
250 252 c.file_tree = self._get_tree_at_commit(
251 253 repo_name, c.commit.raw_id, f_path)
252 254
253 255 except RepositoryError as e:
254 256 h.flash(safe_str(e), category='error')
255 257 raise HTTPNotFound()
256 258
257 259 if request.environ.get('HTTP_X_PJAX'):
258 260 return render('files/files_pjax.mako')
259 261
260 262 return render('files/files.mako')
261 263
262 264 @LoginRequired()
263 265 @HasRepoPermissionAnyDecorator(
264 266 'repository.read', 'repository.write', 'repository.admin')
265 267 def annotate_previous(self, repo_name, revision, f_path):
266 268
267 269 commit_id = revision
268 270 commit = self.__get_commit_or_redirect(commit_id, repo_name)
269 271 prev_commit_id = commit.raw_id
270 272
271 273 f_path = f_path
272 274 is_file = False
273 275 try:
274 276 _file = commit.get_node(f_path)
275 277 is_file = _file.is_file()
276 278 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
277 279 pass
278 280
279 281 if is_file:
280 282 history = commit.get_file_history(f_path)
281 283 prev_commit_id = history[1].raw_id \
282 284 if len(history) > 1 else prev_commit_id
283 285
284 286 return redirect(h.url(
285 287 'files_annotate_home', repo_name=repo_name,
286 288 revision=prev_commit_id, f_path=f_path))
287 289
288 290 @LoginRequired()
289 291 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
290 292 'repository.admin')
291 293 @jsonify
292 294 def history(self, repo_name, revision, f_path):
293 295 commit = self.__get_commit_or_redirect(revision, repo_name)
294 296 f_path = f_path
295 297 _file = commit.get_node(f_path)
296 298 if _file.is_file():
297 299 file_history, _hist = self._get_node_history(commit, f_path)
298 300
299 301 res = []
300 302 for obj in file_history:
301 303 res.append({
302 304 'text': obj[1],
303 305 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
304 306 })
305 307
306 308 data = {
307 309 'more': False,
308 310 'results': res
309 311 }
310 312 return data
311 313
312 314 @LoginRequired()
313 315 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
314 316 'repository.admin')
315 317 def authors(self, repo_name, revision, f_path):
316 318 commit = self.__get_commit_or_redirect(revision, repo_name)
317 319 file_node = commit.get_node(f_path)
318 320 if file_node.is_file():
319 321 c.file_last_commit = file_node.last_commit
320 322 if request.GET.get('annotate') == '1':
321 323 # use _hist from annotation if annotation mode is on
322 324 commit_ids = set(x[1] for x in file_node.annotate)
323 325 _hist = (
324 326 c.rhodecode_repo.get_commit(commit_id)
325 327 for commit_id in commit_ids)
326 328 else:
327 329 _f_history, _hist = self._get_node_history(commit, f_path)
328 330 c.file_author = False
329 331 c.authors = []
330 332 for author in set(commit.author for commit in _hist):
331 333 c.authors.append((
332 334 h.email(author),
333 335 h.person(author, 'username_or_name_or_email')))
334 336 return render('files/file_authors_box.mako')
335 337
336 338 @LoginRequired()
337 339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
338 340 'repository.admin')
339 341 def rawfile(self, repo_name, revision, f_path):
340 342 """
341 343 Action for download as raw
342 344 """
343 345 commit = self.__get_commit_or_redirect(revision, repo_name)
344 346 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
345 347
348 if request.GET.get('lf'):
349 # only if lf get flag is passed, we download this file
350 # as LFS/Largefile
351 lf_node = file_node.get_largefile_node()
352 if lf_node:
353 # overwrite our pointer with the REAL large-file
354 file_node = lf_node
355
346 356 response.content_disposition = 'attachment; filename=%s' % \
347 357 safe_str(f_path.split(Repository.NAME_SEP)[-1])
348 358
349 359 response.content_type = file_node.mimetype
350 360 charset = self._get_default_encoding()
351 361 if charset:
352 362 response.charset = charset
353 363
354 364 return file_node.content
355 365
356 366 @LoginRequired()
357 367 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
358 368 'repository.admin')
359 369 def raw(self, repo_name, revision, f_path):
360 370 """
361 371 Action for show as raw, some mimetypes are "rendered",
362 372 those include images, icons.
363 373 """
364 374 commit = self.__get_commit_or_redirect(revision, repo_name)
365 375 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
366 376
367 377 raw_mimetype_mapping = {
368 378 # map original mimetype to a mimetype used for "show as raw"
369 379 # you can also provide a content-disposition to override the
370 380 # default "attachment" disposition.
371 381 # orig_type: (new_type, new_dispo)
372 382
373 383 # show images inline:
374 384 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
375 385 # for example render an SVG with javascript inside or even render
376 386 # HTML.
377 387 'image/x-icon': ('image/x-icon', 'inline'),
378 388 'image/png': ('image/png', 'inline'),
379 389 'image/gif': ('image/gif', 'inline'),
380 390 'image/jpeg': ('image/jpeg', 'inline'),
381 391 }
382 392
383 393 mimetype = file_node.mimetype
384 394 try:
385 395 mimetype, dispo = raw_mimetype_mapping[mimetype]
386 396 except KeyError:
387 397 # we don't know anything special about this, handle it safely
388 398 if file_node.is_binary:
389 399 # do same as download raw for binary files
390 400 mimetype, dispo = 'application/octet-stream', 'attachment'
391 401 else:
392 402 # do not just use the original mimetype, but force text/plain,
393 403 # otherwise it would serve text/html and that might be unsafe.
394 404 # Note: underlying vcs library fakes text/plain mimetype if the
395 405 # mimetype can not be determined and it thinks it is not
396 406 # binary.This might lead to erroneous text display in some
397 407 # cases, but helps in other cases, like with text files
398 408 # without extension.
399 409 mimetype, dispo = 'text/plain', 'inline'
400 410
401 411 if dispo == 'attachment':
402 412 dispo = 'attachment; filename=%s' % safe_str(
403 413 f_path.split(os.sep)[-1])
404 414
405 415 response.content_disposition = dispo
406 416 response.content_type = mimetype
407 417 charset = self._get_default_encoding()
408 418 if charset:
409 419 response.charset = charset
410 420 return file_node.content
411 421
412 422 @CSRFRequired()
413 423 @LoginRequired()
414 424 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
415 425 def delete(self, repo_name, revision, f_path):
416 426 commit_id = revision
417 427
418 428 repo = c.rhodecode_db_repo
419 429 if repo.enable_locking and repo.locked[0]:
420 430 h.flash(_('This repository has been locked by %s on %s')
421 431 % (h.person_by_id(repo.locked[0]),
422 432 h.format_date(h.time_to_datetime(repo.locked[1]))),
423 433 'warning')
424 434 return redirect(h.url('files_home',
425 435 repo_name=repo_name, revision='tip'))
426 436
427 437 if not self._is_valid_head(commit_id, repo.scm_instance()):
428 438 h.flash(_('You can only delete files with revision '
429 439 'being a valid branch '), category='warning')
430 440 return redirect(h.url('files_home',
431 441 repo_name=repo_name, revision='tip',
432 442 f_path=f_path))
433 443
434 444 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
435 445 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
436 446
437 447 c.default_message = _(
438 448 'Deleted file %s via RhodeCode Enterprise') % (f_path)
439 449 c.f_path = f_path
440 450 node_path = f_path
441 451 author = c.rhodecode_user.full_contact
442 452 message = request.POST.get('message') or c.default_message
443 453 try:
444 454 nodes = {
445 455 node_path: {
446 456 'content': ''
447 457 }
448 458 }
449 459 self.scm_model.delete_nodes(
450 460 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
451 461 message=message,
452 462 nodes=nodes,
453 463 parent_commit=c.commit,
454 464 author=author,
455 465 )
456 466
457 467 h.flash(_('Successfully deleted file %s') % f_path,
458 468 category='success')
459 469 except Exception:
460 470 msg = _('Error occurred during commit')
461 471 log.exception(msg)
462 472 h.flash(msg, category='error')
463 473 return redirect(url('changeset_home',
464 474 repo_name=c.repo_name, revision='tip'))
465 475
466 476 @LoginRequired()
467 477 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
468 478 def delete_home(self, repo_name, revision, f_path):
469 479 commit_id = revision
470 480
471 481 repo = c.rhodecode_db_repo
472 482 if repo.enable_locking and repo.locked[0]:
473 483 h.flash(_('This repository has been locked by %s on %s')
474 484 % (h.person_by_id(repo.locked[0]),
475 485 h.format_date(h.time_to_datetime(repo.locked[1]))),
476 486 'warning')
477 487 return redirect(h.url('files_home',
478 488 repo_name=repo_name, revision='tip'))
479 489
480 490 if not self._is_valid_head(commit_id, repo.scm_instance()):
481 491 h.flash(_('You can only delete files with revision '
482 492 'being a valid branch '), category='warning')
483 493 return redirect(h.url('files_home',
484 494 repo_name=repo_name, revision='tip',
485 495 f_path=f_path))
486 496
487 497 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
488 498 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
489 499
490 500 c.default_message = _(
491 501 'Deleted file %s via RhodeCode Enterprise') % (f_path)
492 502 c.f_path = f_path
493 503
494 504 return render('files/files_delete.mako')
495 505
496 506 @CSRFRequired()
497 507 @LoginRequired()
498 508 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
499 509 def edit(self, repo_name, revision, f_path):
500 510 commit_id = revision
501 511
502 512 repo = c.rhodecode_db_repo
503 513 if repo.enable_locking and repo.locked[0]:
504 514 h.flash(_('This repository has been locked by %s on %s')
505 515 % (h.person_by_id(repo.locked[0]),
506 516 h.format_date(h.time_to_datetime(repo.locked[1]))),
507 517 'warning')
508 518 return redirect(h.url('files_home',
509 519 repo_name=repo_name, revision='tip'))
510 520
511 521 if not self._is_valid_head(commit_id, repo.scm_instance()):
512 522 h.flash(_('You can only edit files with revision '
513 523 'being a valid branch '), category='warning')
514 524 return redirect(h.url('files_home',
515 525 repo_name=repo_name, revision='tip',
516 526 f_path=f_path))
517 527
518 528 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
519 529 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
520 530
521 531 if c.file.is_binary:
522 532 return redirect(url('files_home', repo_name=c.repo_name,
523 533 revision=c.commit.raw_id, f_path=f_path))
524 534 c.default_message = _(
525 535 'Edited file %s via RhodeCode Enterprise') % (f_path)
526 536 c.f_path = f_path
527 537 old_content = c.file.content
528 538 sl = old_content.splitlines(1)
529 539 first_line = sl[0] if sl else ''
530 540
531 541 # modes: 0 - Unix, 1 - Mac, 2 - DOS
532 542 mode = detect_mode(first_line, 0)
533 543 content = convert_line_endings(request.POST.get('content', ''), mode)
534 544
535 545 message = request.POST.get('message') or c.default_message
536 546 org_f_path = c.file.unicode_path
537 547 filename = request.POST['filename']
538 548 org_filename = c.file.name
539 549
540 550 if content == old_content and filename == org_filename:
541 551 h.flash(_('No changes'), category='warning')
542 552 return redirect(url('changeset_home', repo_name=c.repo_name,
543 553 revision='tip'))
544 554 try:
545 555 mapping = {
546 556 org_f_path: {
547 557 'org_filename': org_f_path,
548 558 'filename': os.path.join(c.file.dir_path, filename),
549 559 'content': content,
550 560 'lexer': '',
551 561 'op': 'mod',
552 562 }
553 563 }
554 564
555 565 ScmModel().update_nodes(
556 566 user=c.rhodecode_user.user_id,
557 567 repo=c.rhodecode_db_repo,
558 568 message=message,
559 569 nodes=mapping,
560 570 parent_commit=c.commit,
561 571 )
562 572
563 573 h.flash(_('Successfully committed to %s') % f_path,
564 574 category='success')
565 575 except Exception:
566 576 msg = _('Error occurred during commit')
567 577 log.exception(msg)
568 578 h.flash(msg, category='error')
569 579 return redirect(url('changeset_home',
570 580 repo_name=c.repo_name, revision='tip'))
571 581
572 582 @LoginRequired()
573 583 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
574 584 def edit_home(self, repo_name, revision, f_path):
575 585 commit_id = revision
576 586
577 587 repo = c.rhodecode_db_repo
578 588 if repo.enable_locking and repo.locked[0]:
579 589 h.flash(_('This repository has been locked by %s on %s')
580 590 % (h.person_by_id(repo.locked[0]),
581 591 h.format_date(h.time_to_datetime(repo.locked[1]))),
582 592 'warning')
583 593 return redirect(h.url('files_home',
584 594 repo_name=repo_name, revision='tip'))
585 595
586 596 if not self._is_valid_head(commit_id, repo.scm_instance()):
587 597 h.flash(_('You can only edit files with revision '
588 598 'being a valid branch '), category='warning')
589 599 return redirect(h.url('files_home',
590 600 repo_name=repo_name, revision='tip',
591 601 f_path=f_path))
592 602
593 603 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
594 604 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
595 605
596 606 if c.file.is_binary:
597 607 return redirect(url('files_home', repo_name=c.repo_name,
598 608 revision=c.commit.raw_id, f_path=f_path))
599 609 c.default_message = _(
600 610 'Edited file %s via RhodeCode Enterprise') % (f_path)
601 611 c.f_path = f_path
602 612
603 613 return render('files/files_edit.mako')
604 614
605 615 def _is_valid_head(self, commit_id, repo):
606 616 # check if commit is a branch identifier- basically we cannot
607 617 # create multiple heads via file editing
608 618 valid_heads = repo.branches.keys() + repo.branches.values()
609 619
610 620 if h.is_svn(repo) and not repo.is_empty():
611 621 # Note: Subversion only has one head, we add it here in case there
612 622 # is no branch matched.
613 623 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
614 624
615 625 # check if commit is a branch name or branch hash
616 626 return commit_id in valid_heads
617 627
618 628 @CSRFRequired()
619 629 @LoginRequired()
620 630 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
621 631 def add(self, repo_name, revision, f_path):
622 632 repo = Repository.get_by_repo_name(repo_name)
623 633 if repo.enable_locking and repo.locked[0]:
624 634 h.flash(_('This repository has been locked by %s on %s')
625 635 % (h.person_by_id(repo.locked[0]),
626 636 h.format_date(h.time_to_datetime(repo.locked[1]))),
627 637 'warning')
628 638 return redirect(h.url('files_home',
629 639 repo_name=repo_name, revision='tip'))
630 640
631 641 r_post = request.POST
632 642
633 643 c.commit = self.__get_commit_or_redirect(
634 644 revision, repo_name, redirect_after=False)
635 645 if c.commit is None:
636 646 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
637 647 c.default_message = (_('Added file via RhodeCode Enterprise'))
638 648 c.f_path = f_path
639 649 unix_mode = 0
640 650 content = convert_line_endings(r_post.get('content', ''), unix_mode)
641 651
642 652 message = r_post.get('message') or c.default_message
643 653 filename = r_post.get('filename')
644 654 location = r_post.get('location', '') # dir location
645 655 file_obj = r_post.get('upload_file', None)
646 656
647 657 if file_obj is not None and hasattr(file_obj, 'filename'):
648 658 filename = file_obj.filename
649 659 content = file_obj.file
650 660
651 661 if hasattr(content, 'file'):
652 662 # non posix systems store real file under file attr
653 663 content = content.file
654 664
655 665 # If there's no commit, redirect to repo summary
656 666 if type(c.commit) is EmptyCommit:
657 667 redirect_url = "summary_home"
658 668 else:
659 669 redirect_url = "changeset_home"
660 670
661 671 if not filename:
662 672 h.flash(_('No filename'), category='warning')
663 673 return redirect(url(redirect_url, repo_name=c.repo_name,
664 674 revision='tip'))
665 675
666 676 # extract the location from filename,
667 677 # allows using foo/bar.txt syntax to create subdirectories
668 678 subdir_loc = filename.rsplit('/', 1)
669 679 if len(subdir_loc) == 2:
670 680 location = os.path.join(location, subdir_loc[0])
671 681
672 682 # strip all crap out of file, just leave the basename
673 683 filename = os.path.basename(filename)
674 684 node_path = os.path.join(location, filename)
675 685 author = c.rhodecode_user.full_contact
676 686
677 687 try:
678 688 nodes = {
679 689 node_path: {
680 690 'content': content
681 691 }
682 692 }
683 693 self.scm_model.create_nodes(
684 694 user=c.rhodecode_user.user_id,
685 695 repo=c.rhodecode_db_repo,
686 696 message=message,
687 697 nodes=nodes,
688 698 parent_commit=c.commit,
689 699 author=author,
690 700 )
691 701
692 702 h.flash(_('Successfully committed to %s') % node_path,
693 703 category='success')
694 704 except NonRelativePathError as e:
695 705 h.flash(_(
696 706 'The location specified must be a relative path and must not '
697 707 'contain .. in the path'), category='warning')
698 708 return redirect(url('changeset_home', repo_name=c.repo_name,
699 709 revision='tip'))
700 710 except (NodeError, NodeAlreadyExistsError) as e:
701 711 h.flash(_(e), category='error')
702 712 except Exception:
703 713 msg = _('Error occurred during commit')
704 714 log.exception(msg)
705 715 h.flash(msg, category='error')
706 716 return redirect(url('changeset_home',
707 717 repo_name=c.repo_name, revision='tip'))
708 718
709 719 @LoginRequired()
710 720 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
711 721 def add_home(self, repo_name, revision, f_path):
712 722
713 723 repo = Repository.get_by_repo_name(repo_name)
714 724 if repo.enable_locking and repo.locked[0]:
715 725 h.flash(_('This repository has been locked by %s on %s')
716 726 % (h.person_by_id(repo.locked[0]),
717 727 h.format_date(h.time_to_datetime(repo.locked[1]))),
718 728 'warning')
719 729 return redirect(h.url('files_home',
720 730 repo_name=repo_name, revision='tip'))
721 731
722 732 c.commit = self.__get_commit_or_redirect(
723 733 revision, repo_name, redirect_after=False)
724 734 if c.commit is None:
725 735 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
726 736 c.default_message = (_('Added file via RhodeCode Enterprise'))
727 737 c.f_path = f_path
728 738
729 739 return render('files/files_add.mako')
730 740
731 741 @LoginRequired()
732 742 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
733 743 'repository.admin')
734 744 def archivefile(self, repo_name, fname):
735 745 fileformat = None
736 746 commit_id = None
737 747 ext = None
738 748 subrepos = request.GET.get('subrepos') == 'true'
739 749
740 750 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
741 751 archive_spec = fname.split(ext_data[1])
742 752 if len(archive_spec) == 2 and archive_spec[1] == '':
743 753 fileformat = a_type or ext_data[1]
744 754 commit_id = archive_spec[0]
745 755 ext = ext_data[1]
746 756
747 757 dbrepo = RepoModel().get_by_repo_name(repo_name)
748 758 if not dbrepo.enable_downloads:
749 759 return _('Downloads disabled')
750 760
751 761 try:
752 762 commit = c.rhodecode_repo.get_commit(commit_id)
753 763 content_type = settings.ARCHIVE_SPECS[fileformat][0]
754 764 except CommitDoesNotExistError:
755 765 return _('Unknown revision %s') % commit_id
756 766 except EmptyRepositoryError:
757 767 return _('Empty repository')
758 768 except KeyError:
759 769 return _('Unknown archive type')
760 770
761 771 # archive cache
762 772 from rhodecode import CONFIG
763 773
764 774 archive_name = '%s-%s%s%s' % (
765 775 safe_str(repo_name.replace('/', '_')),
766 776 '-sub' if subrepos else '',
767 777 safe_str(commit.short_id), ext)
768 778
769 779 use_cached_archive = False
770 780 archive_cache_enabled = CONFIG.get(
771 781 'archive_cache_dir') and not request.GET.get('no_cache')
772 782
773 783 if archive_cache_enabled:
774 784 # check if we it's ok to write
775 785 if not os.path.isdir(CONFIG['archive_cache_dir']):
776 786 os.makedirs(CONFIG['archive_cache_dir'])
777 787 cached_archive_path = os.path.join(
778 788 CONFIG['archive_cache_dir'], archive_name)
779 789 if os.path.isfile(cached_archive_path):
780 790 log.debug('Found cached archive in %s', cached_archive_path)
781 791 fd, archive = None, cached_archive_path
782 792 use_cached_archive = True
783 793 else:
784 794 log.debug('Archive %s is not yet cached', archive_name)
785 795
786 796 if not use_cached_archive:
787 797 # generate new archive
788 798 fd, archive = tempfile.mkstemp()
789 799 log.debug('Creating new temp archive in %s' % (archive,))
790 800 try:
791 801 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
792 802 except ImproperArchiveTypeError:
793 803 return _('Unknown archive type')
794 804 if archive_cache_enabled:
795 805 # if we generated the archive and we have cache enabled
796 806 # let's use this for future
797 807 log.debug('Storing new archive in %s' % (cached_archive_path,))
798 808 shutil.move(archive, cached_archive_path)
799 809 archive = cached_archive_path
800 810
801 811 def get_chunked_archive(archive):
802 812 with open(archive, 'rb') as stream:
803 813 while True:
804 814 data = stream.read(16 * 1024)
805 815 if not data:
806 816 if fd: # fd means we used temporary file
807 817 os.close(fd)
808 818 if not archive_cache_enabled:
809 819 log.debug('Destroying temp archive %s', archive)
810 820 os.remove(archive)
811 821 break
812 822 yield data
813 823
814 824 # store download action
815 825 action_logger(user=c.rhodecode_user,
816 826 action='user_downloaded_archive:%s' % archive_name,
817 827 repo=repo_name, ipaddr=self.ip_addr, commit=True)
818 828 response.content_disposition = str(
819 829 'attachment; filename=%s' % archive_name)
820 830 response.content_type = str(content_type)
821 831
822 832 return get_chunked_archive(archive)
823 833
824 834 @LoginRequired()
825 835 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
826 836 'repository.admin')
827 837 def diff(self, repo_name, f_path):
828 838
829 839 c.action = request.GET.get('diff')
830 840 diff1 = request.GET.get('diff1', '')
831 841 diff2 = request.GET.get('diff2', '')
832 842
833 843 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
834 844
835 845 ignore_whitespace = str2bool(request.GET.get('ignorews'))
836 846 line_context = request.GET.get('context', 3)
837 847
838 848 if not any((diff1, diff2)):
839 849 h.flash(
840 850 'Need query parameter "diff1" or "diff2" to generate a diff.',
841 851 category='error')
842 852 raise HTTPBadRequest()
843 853
844 854 if c.action not in ['download', 'raw']:
845 855 # redirect to new view if we render diff
846 856 return redirect(
847 857 url('compare_url', repo_name=repo_name,
848 858 source_ref_type='rev',
849 859 source_ref=diff1,
850 860 target_repo=c.repo_name,
851 861 target_ref_type='rev',
852 862 target_ref=diff2,
853 863 f_path=f_path))
854 864
855 865 try:
856 866 node1 = self._get_file_node(diff1, path1)
857 867 node2 = self._get_file_node(diff2, f_path)
858 868 except (RepositoryError, NodeError):
859 869 log.exception("Exception while trying to get node from repository")
860 870 return redirect(url(
861 871 'files_home', repo_name=c.repo_name, f_path=f_path))
862 872
863 873 if all(isinstance(node.commit, EmptyCommit)
864 874 for node in (node1, node2)):
865 875 raise HTTPNotFound
866 876
867 877 c.commit_1 = node1.commit
868 878 c.commit_2 = node2.commit
869 879
870 880 if c.action == 'download':
871 881 _diff = diffs.get_gitdiff(node1, node2,
872 882 ignore_whitespace=ignore_whitespace,
873 883 context=line_context)
874 884 diff = diffs.DiffProcessor(_diff, format='gitdiff')
875 885
876 886 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
877 887 response.content_type = 'text/plain'
878 888 response.content_disposition = (
879 889 'attachment; filename=%s' % (diff_name,)
880 890 )
881 891 charset = self._get_default_encoding()
882 892 if charset:
883 893 response.charset = charset
884 894 return diff.as_raw()
885 895
886 896 elif c.action == 'raw':
887 897 _diff = diffs.get_gitdiff(node1, node2,
888 898 ignore_whitespace=ignore_whitespace,
889 899 context=line_context)
890 900 diff = diffs.DiffProcessor(_diff, format='gitdiff')
891 901 response.content_type = 'text/plain'
892 902 charset = self._get_default_encoding()
893 903 if charset:
894 904 response.charset = charset
895 905 return diff.as_raw()
896 906
897 907 else:
898 908 return redirect(
899 909 url('compare_url', repo_name=repo_name,
900 910 source_ref_type='rev',
901 911 source_ref=diff1,
902 912 target_repo=c.repo_name,
903 913 target_ref_type='rev',
904 914 target_ref=diff2,
905 915 f_path=f_path))
906 916
907 917 @LoginRequired()
908 918 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
909 919 'repository.admin')
910 920 def diff_2way(self, repo_name, f_path):
911 921 """
912 922 Kept only to make OLD links work
913 923 """
914 924 diff1 = request.GET.get('diff1', '')
915 925 diff2 = request.GET.get('diff2', '')
916 926
917 927 if not any((diff1, diff2)):
918 928 h.flash(
919 929 'Need query parameter "diff1" or "diff2" to generate a diff.',
920 930 category='error')
921 931 raise HTTPBadRequest()
922 932
923 933 return redirect(
924 934 url('compare_url', repo_name=repo_name,
925 935 source_ref_type='rev',
926 936 source_ref=diff1,
927 937 target_repo=c.repo_name,
928 938 target_ref_type='rev',
929 939 target_ref=diff2,
930 940 f_path=f_path,
931 941 diffmode='sideside'))
932 942
933 943 def _get_file_node(self, commit_id, f_path):
934 944 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
935 945 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
936 946 try:
937 947 node = commit.get_node(f_path)
938 948 if node.is_dir():
939 949 raise NodeError('%s path is a %s not a file'
940 950 % (node, type(node)))
941 951 except NodeDoesNotExistError:
942 952 commit = EmptyCommit(
943 953 commit_id=commit_id,
944 954 idx=commit.idx,
945 955 repo=commit.repository,
946 956 alias=commit.repository.alias,
947 957 message=commit.message,
948 958 author=commit.author,
949 959 date=commit.date)
950 960 node = FileNode(f_path, '', commit=commit)
951 961 else:
952 962 commit = EmptyCommit(
953 963 repo=c.rhodecode_repo,
954 964 alias=c.rhodecode_repo.alias)
955 965 node = FileNode(f_path, '', commit=commit)
956 966 return node
957 967
958 968 def _get_node_history(self, commit, f_path, commits=None):
959 969 """
960 970 get commit history for given node
961 971
962 972 :param commit: commit to calculate history
963 973 :param f_path: path for node to calculate history for
964 974 :param commits: if passed don't calculate history and take
965 975 commits defined in this list
966 976 """
967 977 # calculate history based on tip
968 978 tip = c.rhodecode_repo.get_commit()
969 979 if commits is None:
970 980 pre_load = ["author", "branch"]
971 981 try:
972 982 commits = tip.get_file_history(f_path, pre_load=pre_load)
973 983 except (NodeDoesNotExistError, CommitError):
974 984 # this node is not present at tip!
975 985 commits = commit.get_file_history(f_path, pre_load=pre_load)
976 986
977 987 history = []
978 988 commits_group = ([], _("Changesets"))
979 989 for commit in commits:
980 990 branch = ' (%s)' % commit.branch if commit.branch else ''
981 991 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
982 992 commits_group[0].append((commit.raw_id, n_desc,))
983 993 history.append(commits_group)
984 994
985 995 symbolic_reference = self._symbolic_reference
986 996
987 997 if c.rhodecode_repo.alias == 'svn':
988 998 adjusted_f_path = self._adjust_file_path_for_svn(
989 999 f_path, c.rhodecode_repo)
990 1000 if adjusted_f_path != f_path:
991 1001 log.debug(
992 1002 'Recognized svn tag or branch in file "%s", using svn '
993 1003 'specific symbolic references', f_path)
994 1004 f_path = adjusted_f_path
995 1005 symbolic_reference = self._symbolic_reference_svn
996 1006
997 1007 branches = self._create_references(
998 1008 c.rhodecode_repo.branches, symbolic_reference, f_path)
999 1009 branches_group = (branches, _("Branches"))
1000 1010
1001 1011 tags = self._create_references(
1002 1012 c.rhodecode_repo.tags, symbolic_reference, f_path)
1003 1013 tags_group = (tags, _("Tags"))
1004 1014
1005 1015 history.append(branches_group)
1006 1016 history.append(tags_group)
1007 1017
1008 1018 return history, commits
1009 1019
1010 1020 def _adjust_file_path_for_svn(self, f_path, repo):
1011 1021 """
1012 1022 Computes the relative path of `f_path`.
1013 1023
1014 1024 This is mainly based on prefix matching of the recognized tags and
1015 1025 branches in the underlying repository.
1016 1026 """
1017 1027 tags_and_branches = itertools.chain(
1018 1028 repo.branches.iterkeys(),
1019 1029 repo.tags.iterkeys())
1020 1030 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1021 1031
1022 1032 for name in tags_and_branches:
1023 1033 if f_path.startswith(name + '/'):
1024 1034 f_path = vcspath.relpath(f_path, name)
1025 1035 break
1026 1036 return f_path
1027 1037
1028 1038 def _create_references(
1029 1039 self, branches_or_tags, symbolic_reference, f_path):
1030 1040 items = []
1031 1041 for name, commit_id in branches_or_tags.items():
1032 1042 sym_ref = symbolic_reference(commit_id, name, f_path)
1033 1043 items.append((sym_ref, name))
1034 1044 return items
1035 1045
1036 1046 def _symbolic_reference(self, commit_id, name, f_path):
1037 1047 return commit_id
1038 1048
1039 1049 def _symbolic_reference_svn(self, commit_id, name, f_path):
1040 1050 new_f_path = vcspath.join(name, f_path)
1041 1051 return u'%s@%s' % (new_f_path, commit_id)
1042 1052
1043 1053 @LoginRequired()
1044 1054 @XHRRequired()
1045 1055 @HasRepoPermissionAnyDecorator(
1046 1056 'repository.read', 'repository.write', 'repository.admin')
1047 1057 @jsonify
1048 1058 def nodelist(self, repo_name, revision, f_path):
1049 1059 commit = self.__get_commit_or_redirect(revision, repo_name)
1050 1060
1051 1061 metadata = self._get_nodelist_at_commit(
1052 1062 repo_name, commit.raw_id, f_path)
1053 1063 return {'nodes': metadata}
1054 1064
1055 1065 @LoginRequired()
1056 1066 @XHRRequired()
1057 1067 @HasRepoPermissionAnyDecorator(
1058 1068 'repository.read', 'repository.write', 'repository.admin')
1059 1069 def nodetree_full(self, repo_name, commit_id, f_path):
1060 1070 """
1061 1071 Returns rendered html of file tree that contains commit date,
1062 1072 author, revision for the specified combination of
1063 1073 repo, commit_id and file path
1064 1074
1065 1075 :param repo_name: name of the repository
1066 1076 :param commit_id: commit_id of file tree
1067 1077 :param f_path: file path of the requested directory
1068 1078 """
1069 1079
1070 1080 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1071 1081 try:
1072 1082 dir_node = commit.get_node(f_path)
1073 1083 except RepositoryError as e:
1074 1084 return 'error {}'.format(safe_str(e))
1075 1085
1076 1086 if dir_node.is_file():
1077 1087 return ''
1078 1088
1079 1089 c.file = dir_node
1080 1090 c.commit = commit
1081 1091
1082 1092 # using force=True here, make a little trick. We flush the cache and
1083 1093 # compute it using the same key as without full_load, so the fully
1084 1094 # loaded cached tree is now returned instead of partial
1085 1095 return self._get_tree_at_commit(
1086 1096 repo_name, commit.raw_id, dir_node.path, full_load=True,
1087 1097 force=True)
@@ -1,1587 +1,1588 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 98 # A involved commit could not be found.
99 99 _DEPRECATED_MISSING_COMMIT = 8
100 100
101 101 # The target repo reference is missing.
102 102 MISSING_TARGET_REF = 9
103 103
104 104 # The source repo reference is missing.
105 105 MISSING_SOURCE_REF = 10
106 106
107 107 # The merge was not successful, there are conflicts related to sub
108 108 # repositories.
109 109 SUBREPO_MERGE_FAILED = 11
110 110
111 111
112 112 class UpdateFailureReason(object):
113 113 """
114 114 Enumeration with all the reasons why the pull request update could fail.
115 115
116 116 DO NOT change the number of the reasons, as they may be stored in the
117 117 database.
118 118
119 119 Changing the name of a reason is acceptable and encouraged to deprecate old
120 120 reasons.
121 121 """
122 122
123 123 # Everything went well.
124 124 NONE = 0
125 125
126 126 # An unexpected exception was raised. Check the logs for more details.
127 127 UNKNOWN = 1
128 128
129 129 # The pull request is up to date.
130 130 NO_CHANGE = 2
131 131
132 132 # The pull request has a reference type that is not supported for update.
133 133 WRONG_REF_TPYE = 3
134 134
135 135 # Update failed because the target reference is missing.
136 136 MISSING_TARGET_REF = 4
137 137
138 138 # Update failed because the source reference is missing.
139 139 MISSING_SOURCE_REF = 5
140 140
141 141
142 142 class BaseRepository(object):
143 143 """
144 144 Base Repository for final backends
145 145
146 146 .. attribute:: DEFAULT_BRANCH_NAME
147 147
148 148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
149 149
150 150 .. attribute:: commit_ids
151 151
152 152 list of all available commit ids, in ascending order
153 153
154 154 .. attribute:: path
155 155
156 156 absolute path to the repository
157 157
158 158 .. attribute:: bookmarks
159 159
160 160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
161 161 there are no bookmarks or the backend implementation does not support
162 162 bookmarks.
163 163
164 164 .. attribute:: tags
165 165
166 166 Mapping from name to :term:`Commit ID` of the tag.
167 167
168 168 """
169 169
170 170 DEFAULT_BRANCH_NAME = None
171 171 DEFAULT_CONTACT = u"Unknown"
172 172 DEFAULT_DESCRIPTION = u"unknown"
173 173 EMPTY_COMMIT_ID = '0' * 40
174 174
175 175 path = None
176 176
177 177 def __init__(self, repo_path, config=None, create=False, **kwargs):
178 178 """
179 179 Initializes repository. Raises RepositoryError if repository could
180 180 not be find at the given ``repo_path`` or directory at ``repo_path``
181 181 exists and ``create`` is set to True.
182 182
183 183 :param repo_path: local path of the repository
184 184 :param config: repository configuration
185 185 :param create=False: if set to True, would try to create repository.
186 186 :param src_url=None: if set, should be proper url from which repository
187 187 would be cloned; requires ``create`` parameter to be set to True -
188 188 raises RepositoryError if src_url is set and create evaluates to
189 189 False
190 190 """
191 191 raise NotImplementedError
192 192
193 193 def __repr__(self):
194 194 return '<%s at %s>' % (self.__class__.__name__, self.path)
195 195
196 196 def __len__(self):
197 197 return self.count()
198 198
199 199 def __eq__(self, other):
200 200 same_instance = isinstance(other, self.__class__)
201 201 return same_instance and other.path == self.path
202 202
203 203 def __ne__(self, other):
204 204 return not self.__eq__(other)
205 205
206 206 @LazyProperty
207 207 def EMPTY_COMMIT(self):
208 208 return EmptyCommit(self.EMPTY_COMMIT_ID)
209 209
210 210 @LazyProperty
211 211 def alias(self):
212 212 for k, v in settings.BACKENDS.items():
213 213 if v.split('.')[-1] == str(self.__class__.__name__):
214 214 return k
215 215
216 216 @LazyProperty
217 217 def name(self):
218 218 return safe_unicode(os.path.basename(self.path))
219 219
220 220 @LazyProperty
221 221 def description(self):
222 222 raise NotImplementedError
223 223
224 224 def refs(self):
225 225 """
226 226 returns a `dict` with branches, bookmarks, tags, and closed_branches
227 227 for this repository
228 228 """
229 229 return dict(
230 230 branches=self.branches,
231 231 branches_closed=self.branches_closed,
232 232 tags=self.tags,
233 233 bookmarks=self.bookmarks
234 234 )
235 235
236 236 @LazyProperty
237 237 def branches(self):
238 238 """
239 239 A `dict` which maps branch names to commit ids.
240 240 """
241 241 raise NotImplementedError
242 242
243 243 @LazyProperty
244 244 def tags(self):
245 245 """
246 246 A `dict` which maps tags names to commit ids.
247 247 """
248 248 raise NotImplementedError
249 249
250 250 @LazyProperty
251 251 def size(self):
252 252 """
253 253 Returns combined size in bytes for all repository files
254 254 """
255 255 tip = self.get_commit()
256 256 return tip.size
257 257
258 258 def size_at_commit(self, commit_id):
259 259 commit = self.get_commit(commit_id)
260 260 return commit.size
261 261
262 262 def is_empty(self):
263 263 return not bool(self.commit_ids)
264 264
265 265 @staticmethod
266 266 def check_url(url, config):
267 267 """
268 268 Function will check given url and try to verify if it's a valid
269 269 link.
270 270 """
271 271 raise NotImplementedError
272 272
273 273 @staticmethod
274 274 def is_valid_repository(path):
275 275 """
276 276 Check if given `path` contains a valid repository of this backend
277 277 """
278 278 raise NotImplementedError
279 279
280 280 # ==========================================================================
281 281 # COMMITS
282 282 # ==========================================================================
283 283
284 284 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
285 285 """
286 286 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
287 287 are both None, most recent commit is returned.
288 288
289 289 :param pre_load: Optional. List of commit attributes to load.
290 290
291 291 :raises ``EmptyRepositoryError``: if there are no commits
292 292 """
293 293 raise NotImplementedError
294 294
295 295 def __iter__(self):
296 296 for commit_id in self.commit_ids:
297 297 yield self.get_commit(commit_id=commit_id)
298 298
299 299 def get_commits(
300 300 self, start_id=None, end_id=None, start_date=None, end_date=None,
301 301 branch_name=None, pre_load=None):
302 302 """
303 303 Returns iterator of `BaseCommit` objects from start to end
304 304 not inclusive. This should behave just like a list, ie. end is not
305 305 inclusive.
306 306
307 307 :param start_id: None or str, must be a valid commit id
308 308 :param end_id: None or str, must be a valid commit id
309 309 :param start_date:
310 310 :param end_date:
311 311 :param branch_name:
312 312 :param pre_load:
313 313 """
314 314 raise NotImplementedError
315 315
316 316 def __getitem__(self, key):
317 317 """
318 318 Allows index based access to the commit objects of this repository.
319 319 """
320 320 pre_load = ["author", "branch", "date", "message", "parents"]
321 321 if isinstance(key, slice):
322 322 return self._get_range(key, pre_load)
323 323 return self.get_commit(commit_idx=key, pre_load=pre_load)
324 324
325 325 def _get_range(self, slice_obj, pre_load):
326 326 for commit_id in self.commit_ids.__getitem__(slice_obj):
327 327 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
328 328
329 329 def count(self):
330 330 return len(self.commit_ids)
331 331
332 332 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
333 333 """
334 334 Creates and returns a tag for the given ``commit_id``.
335 335
336 336 :param name: name for new tag
337 337 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
338 338 :param commit_id: commit id for which new tag would be created
339 339 :param message: message of the tag's commit
340 340 :param date: date of tag's commit
341 341
342 342 :raises TagAlreadyExistError: if tag with same name already exists
343 343 """
344 344 raise NotImplementedError
345 345
346 346 def remove_tag(self, name, user, message=None, date=None):
347 347 """
348 348 Removes tag with the given ``name``.
349 349
350 350 :param name: name of the tag to be removed
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param message: message of the tag's removal commit
353 353 :param date: date of tag's removal commit
354 354
355 355 :raises TagDoesNotExistError: if tag with given name does not exists
356 356 """
357 357 raise NotImplementedError
358 358
359 359 def get_diff(
360 360 self, commit1, commit2, path=None, ignore_whitespace=False,
361 361 context=3, path1=None):
362 362 """
363 363 Returns (git like) *diff*, as plain text. Shows changes introduced by
364 364 `commit2` since `commit1`.
365 365
366 366 :param commit1: Entry point from which diff is shown. Can be
367 367 ``self.EMPTY_COMMIT`` - in this case, patch showing all
368 368 the changes since empty state of the repository until `commit2`
369 369 :param commit2: Until which commit changes should be shown.
370 370 :param path: Can be set to a path of a file to create a diff of that
371 371 file. If `path1` is also set, this value is only associated to
372 372 `commit2`.
373 373 :param ignore_whitespace: If set to ``True``, would not show whitespace
374 374 changes. Defaults to ``False``.
375 375 :param context: How many lines before/after changed lines should be
376 376 shown. Defaults to ``3``.
377 377 :param path1: Can be set to a path to associate with `commit1`. This
378 378 parameter works only for backends which support diff generation for
379 379 different paths. Other backends will raise a `ValueError` if `path1`
380 380 is set and has a different value than `path`.
381 381 :param file_path: filter this diff by given path pattern
382 382 """
383 383 raise NotImplementedError
384 384
385 385 def strip(self, commit_id, branch=None):
386 386 """
387 387 Strip given commit_id from the repository
388 388 """
389 389 raise NotImplementedError
390 390
391 391 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
392 392 """
393 393 Return a latest common ancestor commit if one exists for this repo
394 394 `commit_id1` vs `commit_id2` from `repo2`.
395 395
396 396 :param commit_id1: Commit it from this repository to use as a
397 397 target for the comparison.
398 398 :param commit_id2: Source commit id to use for comparison.
399 399 :param repo2: Source repository to use for comparison.
400 400 """
401 401 raise NotImplementedError
402 402
403 403 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
404 404 """
405 405 Compare this repository's revision `commit_id1` with `commit_id2`.
406 406
407 407 Returns a tuple(commits, ancestor) that would be merged from
408 408 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
409 409 will be returned as ancestor.
410 410
411 411 :param commit_id1: Commit it from this repository to use as a
412 412 target for the comparison.
413 413 :param commit_id2: Source commit id to use for comparison.
414 414 :param repo2: Source repository to use for comparison.
415 415 :param merge: If set to ``True`` will do a merge compare which also
416 416 returns the common ancestor.
417 417 :param pre_load: Optional. List of commit attributes to load.
418 418 """
419 419 raise NotImplementedError
420 420
421 421 def merge(self, target_ref, source_repo, source_ref, workspace_id,
422 422 user_name='', user_email='', message='', dry_run=False,
423 423 use_rebase=False):
424 424 """
425 425 Merge the revisions specified in `source_ref` from `source_repo`
426 426 onto the `target_ref` of this repository.
427 427
428 428 `source_ref` and `target_ref` are named tupls with the following
429 429 fields `type`, `name` and `commit_id`.
430 430
431 431 Returns a MergeResponse named tuple with the following fields
432 432 'possible', 'executed', 'source_commit', 'target_commit',
433 433 'merge_commit'.
434 434
435 435 :param target_ref: `target_ref` points to the commit on top of which
436 436 the `source_ref` should be merged.
437 437 :param source_repo: The repository that contains the commits to be
438 438 merged.
439 439 :param source_ref: `source_ref` points to the topmost commit from
440 440 the `source_repo` which should be merged.
441 441 :param workspace_id: `workspace_id` unique identifier.
442 442 :param user_name: Merge commit `user_name`.
443 443 :param user_email: Merge commit `user_email`.
444 444 :param message: Merge commit `message`.
445 445 :param dry_run: If `True` the merge will not take place.
446 446 :param use_rebase: If `True` commits from the source will be rebased
447 447 on top of the target instead of being merged.
448 448 """
449 449 if dry_run:
450 450 message = message or 'dry_run_merge_message'
451 451 user_email = user_email or 'dry-run-merge@rhodecode.com'
452 452 user_name = user_name or 'Dry-Run User'
453 453 else:
454 454 if not user_name:
455 455 raise ValueError('user_name cannot be empty')
456 456 if not user_email:
457 457 raise ValueError('user_email cannot be empty')
458 458 if not message:
459 459 raise ValueError('message cannot be empty')
460 460
461 461 shadow_repository_path = self._maybe_prepare_merge_workspace(
462 462 workspace_id, target_ref)
463 463
464 464 try:
465 465 return self._merge_repo(
466 466 shadow_repository_path, target_ref, source_repo,
467 467 source_ref, message, user_name, user_email, dry_run=dry_run,
468 468 use_rebase=use_rebase)
469 469 except RepositoryError:
470 470 log.exception(
471 471 'Unexpected failure when running merge, dry-run=%s',
472 472 dry_run)
473 473 return MergeResponse(
474 474 False, False, None, MergeFailureReason.UNKNOWN)
475 475
476 476 def _merge_repo(self, shadow_repository_path, target_ref,
477 477 source_repo, source_ref, merge_message,
478 478 merger_name, merger_email, dry_run=False, use_rebase=False):
479 479 """Internal implementation of merge."""
480 480 raise NotImplementedError
481 481
482 482 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
483 483 """
484 484 Create the merge workspace.
485 485
486 486 :param workspace_id: `workspace_id` unique identifier.
487 487 """
488 488 raise NotImplementedError
489 489
490 490 def cleanup_merge_workspace(self, workspace_id):
491 491 """
492 492 Remove merge workspace.
493 493
494 494 This function MUST not fail in case there is no workspace associated to
495 495 the given `workspace_id`.
496 496
497 497 :param workspace_id: `workspace_id` unique identifier.
498 498 """
499 499 raise NotImplementedError
500 500
501 501 # ========== #
502 502 # COMMIT API #
503 503 # ========== #
504 504
505 505 @LazyProperty
506 506 def in_memory_commit(self):
507 507 """
508 508 Returns :class:`InMemoryCommit` object for this repository.
509 509 """
510 510 raise NotImplementedError
511 511
512 512 # ======================== #
513 513 # UTILITIES FOR SUBCLASSES #
514 514 # ======================== #
515 515
516 516 def _validate_diff_commits(self, commit1, commit2):
517 517 """
518 518 Validates that the given commits are related to this repository.
519 519
520 520 Intended as a utility for sub classes to have a consistent validation
521 521 of input parameters in methods like :meth:`get_diff`.
522 522 """
523 523 self._validate_commit(commit1)
524 524 self._validate_commit(commit2)
525 525 if (isinstance(commit1, EmptyCommit) and
526 526 isinstance(commit2, EmptyCommit)):
527 527 raise ValueError("Cannot compare two empty commits")
528 528
529 529 def _validate_commit(self, commit):
530 530 if not isinstance(commit, BaseCommit):
531 531 raise TypeError(
532 532 "%s is not of type BaseCommit" % repr(commit))
533 533 if commit.repository != self and not isinstance(commit, EmptyCommit):
534 534 raise ValueError(
535 535 "Commit %s must be a valid commit from this repository %s, "
536 536 "related to this repository instead %s." %
537 537 (commit, self, commit.repository))
538 538
539 539 def _validate_commit_id(self, commit_id):
540 540 if not isinstance(commit_id, basestring):
541 541 raise TypeError("commit_id must be a string value")
542 542
543 543 def _validate_commit_idx(self, commit_idx):
544 544 if not isinstance(commit_idx, (int, long)):
545 545 raise TypeError("commit_idx must be a numeric value")
546 546
547 547 def _validate_branch_name(self, branch_name):
548 548 if branch_name and branch_name not in self.branches_all:
549 549 msg = ("Branch %s not found in %s" % (branch_name, self))
550 550 raise BranchDoesNotExistError(msg)
551 551
552 552 #
553 553 # Supporting deprecated API parts
554 554 # TODO: johbo: consider to move this into a mixin
555 555 #
556 556
557 557 @property
558 558 def EMPTY_CHANGESET(self):
559 559 warnings.warn(
560 560 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
561 561 return self.EMPTY_COMMIT_ID
562 562
563 563 @property
564 564 def revisions(self):
565 565 warnings.warn("Use commits attribute instead", DeprecationWarning)
566 566 return self.commit_ids
567 567
568 568 @revisions.setter
569 569 def revisions(self, value):
570 570 warnings.warn("Use commits attribute instead", DeprecationWarning)
571 571 self.commit_ids = value
572 572
573 573 def get_changeset(self, revision=None, pre_load=None):
574 574 warnings.warn("Use get_commit instead", DeprecationWarning)
575 575 commit_id = None
576 576 commit_idx = None
577 577 if isinstance(revision, basestring):
578 578 commit_id = revision
579 579 else:
580 580 commit_idx = revision
581 581 return self.get_commit(
582 582 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
583 583
584 584 def get_changesets(
585 585 self, start=None, end=None, start_date=None, end_date=None,
586 586 branch_name=None, pre_load=None):
587 587 warnings.warn("Use get_commits instead", DeprecationWarning)
588 588 start_id = self._revision_to_commit(start)
589 589 end_id = self._revision_to_commit(end)
590 590 return self.get_commits(
591 591 start_id=start_id, end_id=end_id, start_date=start_date,
592 592 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
593 593
594 594 def _revision_to_commit(self, revision):
595 595 """
596 596 Translates a revision to a commit_id
597 597
598 598 Helps to support the old changeset based API which allows to use
599 599 commit ids and commit indices interchangeable.
600 600 """
601 601 if revision is None:
602 602 return revision
603 603
604 604 if isinstance(revision, basestring):
605 605 commit_id = revision
606 606 else:
607 607 commit_id = self.commit_ids[revision]
608 608 return commit_id
609 609
610 610 @property
611 611 def in_memory_changeset(self):
612 612 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
613 613 return self.in_memory_commit
614 614
615 615
616 616 class BaseCommit(object):
617 617 """
618 618 Each backend should implement it's commit representation.
619 619
620 620 **Attributes**
621 621
622 622 ``repository``
623 623 repository object within which commit exists
624 624
625 625 ``id``
626 626 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
627 627 just ``tip``.
628 628
629 629 ``raw_id``
630 630 raw commit representation (i.e. full 40 length sha for git
631 631 backend)
632 632
633 633 ``short_id``
634 634 shortened (if apply) version of ``raw_id``; it would be simple
635 635 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
636 636 as ``raw_id`` for subversion
637 637
638 638 ``idx``
639 639 commit index
640 640
641 641 ``files``
642 642 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
643 643
644 644 ``dirs``
645 645 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
646 646
647 647 ``nodes``
648 648 combined list of ``Node`` objects
649 649
650 650 ``author``
651 651 author of the commit, as unicode
652 652
653 653 ``message``
654 654 message of the commit, as unicode
655 655
656 656 ``parents``
657 657 list of parent commits
658 658
659 659 """
660 660
661 661 branch = None
662 662 """
663 663 Depending on the backend this should be set to the branch name of the
664 664 commit. Backends not supporting branches on commits should leave this
665 665 value as ``None``.
666 666 """
667 667
668 668 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
669 669 """
670 670 This template is used to generate a default prefix for repository archives
671 671 if no prefix has been specified.
672 672 """
673 673
674 674 def __str__(self):
675 675 return '<%s at %s:%s>' % (
676 676 self.__class__.__name__, self.idx, self.short_id)
677 677
678 678 def __repr__(self):
679 679 return self.__str__()
680 680
681 681 def __unicode__(self):
682 682 return u'%s:%s' % (self.idx, self.short_id)
683 683
684 684 def __eq__(self, other):
685 685 same_instance = isinstance(other, self.__class__)
686 686 return same_instance and self.raw_id == other.raw_id
687 687
688 688 def __json__(self):
689 689 parents = []
690 690 try:
691 691 for parent in self.parents:
692 692 parents.append({'raw_id': parent.raw_id})
693 693 except NotImplementedError:
694 694 # empty commit doesn't have parents implemented
695 695 pass
696 696
697 697 return {
698 698 'short_id': self.short_id,
699 699 'raw_id': self.raw_id,
700 700 'revision': self.idx,
701 701 'message': self.message,
702 702 'date': self.date,
703 703 'author': self.author,
704 704 'parents': parents,
705 705 'branch': self.branch
706 706 }
707 707
708 708 @LazyProperty
709 709 def last(self):
710 710 """
711 711 ``True`` if this is last commit in repository, ``False``
712 712 otherwise; trying to access this attribute while there is no
713 713 commits would raise `EmptyRepositoryError`
714 714 """
715 715 if self.repository is None:
716 716 raise CommitError("Cannot check if it's most recent commit")
717 717 return self.raw_id == self.repository.commit_ids[-1]
718 718
719 719 @LazyProperty
720 720 def parents(self):
721 721 """
722 722 Returns list of parent commits.
723 723 """
724 724 raise NotImplementedError
725 725
726 726 @property
727 727 def merge(self):
728 728 """
729 729 Returns boolean if commit is a merge.
730 730 """
731 731 return len(self.parents) > 1
732 732
733 733 @LazyProperty
734 734 def children(self):
735 735 """
736 736 Returns list of child commits.
737 737 """
738 738 raise NotImplementedError
739 739
740 740 @LazyProperty
741 741 def id(self):
742 742 """
743 743 Returns string identifying this commit.
744 744 """
745 745 raise NotImplementedError
746 746
747 747 @LazyProperty
748 748 def raw_id(self):
749 749 """
750 750 Returns raw string identifying this commit.
751 751 """
752 752 raise NotImplementedError
753 753
754 754 @LazyProperty
755 755 def short_id(self):
756 756 """
757 757 Returns shortened version of ``raw_id`` attribute, as string,
758 758 identifying this commit, useful for presentation to users.
759 759 """
760 760 raise NotImplementedError
761 761
762 762 @LazyProperty
763 763 def idx(self):
764 764 """
765 765 Returns integer identifying this commit.
766 766 """
767 767 raise NotImplementedError
768 768
769 769 @LazyProperty
770 770 def committer(self):
771 771 """
772 772 Returns committer for this commit
773 773 """
774 774 raise NotImplementedError
775 775
776 776 @LazyProperty
777 777 def committer_name(self):
778 778 """
779 779 Returns committer name for this commit
780 780 """
781 781
782 782 return author_name(self.committer)
783 783
784 784 @LazyProperty
785 785 def committer_email(self):
786 786 """
787 787 Returns committer email address for this commit
788 788 """
789 789
790 790 return author_email(self.committer)
791 791
792 792 @LazyProperty
793 793 def author(self):
794 794 """
795 795 Returns author for this commit
796 796 """
797 797
798 798 raise NotImplementedError
799 799
800 800 @LazyProperty
801 801 def author_name(self):
802 802 """
803 803 Returns author name for this commit
804 804 """
805 805
806 806 return author_name(self.author)
807 807
808 808 @LazyProperty
809 809 def author_email(self):
810 810 """
811 811 Returns author email address for this commit
812 812 """
813 813
814 814 return author_email(self.author)
815 815
816 816 def get_file_mode(self, path):
817 817 """
818 818 Returns stat mode of the file at `path`.
819 819 """
820 820 raise NotImplementedError
821 821
822 822 def is_link(self, path):
823 823 """
824 824 Returns ``True`` if given `path` is a symlink
825 825 """
826 826 raise NotImplementedError
827 827
828 828 def get_file_content(self, path):
829 829 """
830 830 Returns content of the file at the given `path`.
831 831 """
832 832 raise NotImplementedError
833 833
834 834 def get_file_size(self, path):
835 835 """
836 836 Returns size of the file at the given `path`.
837 837 """
838 838 raise NotImplementedError
839 839
840 840 def get_file_commit(self, path, pre_load=None):
841 841 """
842 842 Returns last commit of the file at the given `path`.
843 843
844 844 :param pre_load: Optional. List of commit attributes to load.
845 845 """
846 846 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
847 847 if not commits:
848 848 raise RepositoryError(
849 849 'Failed to fetch history for path {}. '
850 850 'Please check if such path exists in your repository'.format(
851 851 path))
852 852 return commits[0]
853 853
854 854 def get_file_history(self, path, limit=None, pre_load=None):
855 855 """
856 856 Returns history of file as reversed list of :class:`BaseCommit`
857 857 objects for which file at given `path` has been modified.
858 858
859 859 :param limit: Optional. Allows to limit the size of the returned
860 860 history. This is intended as a hint to the underlying backend, so
861 861 that it can apply optimizations depending on the limit.
862 862 :param pre_load: Optional. List of commit attributes to load.
863 863 """
864 864 raise NotImplementedError
865 865
866 866 def get_file_annotate(self, path, pre_load=None):
867 867 """
868 868 Returns a generator of four element tuples with
869 869 lineno, sha, commit lazy loader and line
870 870
871 871 :param pre_load: Optional. List of commit attributes to load.
872 872 """
873 873 raise NotImplementedError
874 874
875 875 def get_nodes(self, path):
876 876 """
877 877 Returns combined ``DirNode`` and ``FileNode`` objects list representing
878 878 state of commit at the given ``path``.
879 879
880 880 :raises ``CommitError``: if node at the given ``path`` is not
881 881 instance of ``DirNode``
882 882 """
883 883 raise NotImplementedError
884 884
885 885 def get_node(self, path):
886 886 """
887 887 Returns ``Node`` object from the given ``path``.
888 888
889 889 :raises ``NodeDoesNotExistError``: if there is no node at the given
890 890 ``path``
891 891 """
892 892 raise NotImplementedError
893 893
894 894 def get_largefile_node(self, path):
895 895 """
896 Returns the path to largefile from Mercurial storage.
896 Returns the path to largefile from Mercurial/Git-lfs storage.
897 or None if it's not a largefile node
897 898 """
898 raise NotImplementedError
899 return None
899 900
900 901 def archive_repo(self, file_path, kind='tgz', subrepos=None,
901 902 prefix=None, write_metadata=False, mtime=None):
902 903 """
903 904 Creates an archive containing the contents of the repository.
904 905
905 906 :param file_path: path to the file which to create the archive.
906 907 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
907 908 :param prefix: name of root directory in archive.
908 909 Default is repository name and commit's short_id joined with dash:
909 910 ``"{repo_name}-{short_id}"``.
910 911 :param write_metadata: write a metadata file into archive.
911 912 :param mtime: custom modification time for archive creation, defaults
912 913 to time.time() if not given.
913 914
914 915 :raise VCSError: If prefix has a problem.
915 916 """
916 917 allowed_kinds = settings.ARCHIVE_SPECS.keys()
917 918 if kind not in allowed_kinds:
918 919 raise ImproperArchiveTypeError(
919 920 'Archive kind (%s) not supported use one of %s' %
920 921 (kind, allowed_kinds))
921 922
922 923 prefix = self._validate_archive_prefix(prefix)
923 924
924 925 mtime = mtime or time.mktime(self.date.timetuple())
925 926
926 927 file_info = []
927 928 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
928 929 for _r, _d, files in cur_rev.walk('/'):
929 930 for f in files:
930 931 f_path = os.path.join(prefix, f.path)
931 932 file_info.append(
932 933 (f_path, f.mode, f.is_link(), f.raw_bytes))
933 934
934 935 if write_metadata:
935 936 metadata = [
936 937 ('repo_name', self.repository.name),
937 938 ('rev', self.raw_id),
938 939 ('create_time', mtime),
939 940 ('branch', self.branch),
940 941 ('tags', ','.join(self.tags)),
941 942 ]
942 943 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
943 944 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
944 945
945 946 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
946 947
947 948 def _validate_archive_prefix(self, prefix):
948 949 if prefix is None:
949 950 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
950 951 repo_name=safe_str(self.repository.name),
951 952 short_id=self.short_id)
952 953 elif not isinstance(prefix, str):
953 954 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
954 955 elif prefix.startswith('/'):
955 956 raise VCSError("Prefix cannot start with leading slash")
956 957 elif prefix.strip() == '':
957 958 raise VCSError("Prefix cannot be empty")
958 959 return prefix
959 960
960 961 @LazyProperty
961 962 def root(self):
962 963 """
963 964 Returns ``RootNode`` object for this commit.
964 965 """
965 966 return self.get_node('')
966 967
967 968 def next(self, branch=None):
968 969 """
969 970 Returns next commit from current, if branch is gives it will return
970 971 next commit belonging to this branch
971 972
972 973 :param branch: show commits within the given named branch
973 974 """
974 975 indexes = xrange(self.idx + 1, self.repository.count())
975 976 return self._find_next(indexes, branch)
976 977
977 978 def prev(self, branch=None):
978 979 """
979 980 Returns previous commit from current, if branch is gives it will
980 981 return previous commit belonging to this branch
981 982
982 983 :param branch: show commit within the given named branch
983 984 """
984 985 indexes = xrange(self.idx - 1, -1, -1)
985 986 return self._find_next(indexes, branch)
986 987
987 988 def _find_next(self, indexes, branch=None):
988 989 if branch and self.branch != branch:
989 990 raise VCSError('Branch option used on commit not belonging '
990 991 'to that branch')
991 992
992 993 for next_idx in indexes:
993 994 commit = self.repository.get_commit(commit_idx=next_idx)
994 995 if branch and branch != commit.branch:
995 996 continue
996 997 return commit
997 998 raise CommitDoesNotExistError
998 999
999 1000 def diff(self, ignore_whitespace=True, context=3):
1000 1001 """
1001 1002 Returns a `Diff` object representing the change made by this commit.
1002 1003 """
1003 1004 parent = (
1004 1005 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1005 1006 diff = self.repository.get_diff(
1006 1007 parent, self,
1007 1008 ignore_whitespace=ignore_whitespace,
1008 1009 context=context)
1009 1010 return diff
1010 1011
1011 1012 @LazyProperty
1012 1013 def added(self):
1013 1014 """
1014 1015 Returns list of added ``FileNode`` objects.
1015 1016 """
1016 1017 raise NotImplementedError
1017 1018
1018 1019 @LazyProperty
1019 1020 def changed(self):
1020 1021 """
1021 1022 Returns list of modified ``FileNode`` objects.
1022 1023 """
1023 1024 raise NotImplementedError
1024 1025
1025 1026 @LazyProperty
1026 1027 def removed(self):
1027 1028 """
1028 1029 Returns list of removed ``FileNode`` objects.
1029 1030 """
1030 1031 raise NotImplementedError
1031 1032
1032 1033 @LazyProperty
1033 1034 def size(self):
1034 1035 """
1035 1036 Returns total number of bytes from contents of all filenodes.
1036 1037 """
1037 1038 return sum((node.size for node in self.get_filenodes_generator()))
1038 1039
1039 1040 def walk(self, topurl=''):
1040 1041 """
1041 1042 Similar to os.walk method. Insted of filesystem it walks through
1042 1043 commit starting at given ``topurl``. Returns generator of tuples
1043 1044 (topnode, dirnodes, filenodes).
1044 1045 """
1045 1046 topnode = self.get_node(topurl)
1046 1047 if not topnode.is_dir():
1047 1048 return
1048 1049 yield (topnode, topnode.dirs, topnode.files)
1049 1050 for dirnode in topnode.dirs:
1050 1051 for tup in self.walk(dirnode.path):
1051 1052 yield tup
1052 1053
1053 1054 def get_filenodes_generator(self):
1054 1055 """
1055 1056 Returns generator that yields *all* file nodes.
1056 1057 """
1057 1058 for topnode, dirs, files in self.walk():
1058 1059 for node in files:
1059 1060 yield node
1060 1061
1061 1062 #
1062 1063 # Utilities for sub classes to support consistent behavior
1063 1064 #
1064 1065
1065 1066 def no_node_at_path(self, path):
1066 1067 return NodeDoesNotExistError(
1067 1068 "There is no file nor directory at the given path: "
1068 1069 "'%s' at commit %s" % (path, self.short_id))
1069 1070
1070 1071 def _fix_path(self, path):
1071 1072 """
1072 1073 Paths are stored without trailing slash so we need to get rid off it if
1073 1074 needed.
1074 1075 """
1075 1076 return path.rstrip('/')
1076 1077
1077 1078 #
1078 1079 # Deprecated API based on changesets
1079 1080 #
1080 1081
1081 1082 @property
1082 1083 def revision(self):
1083 1084 warnings.warn("Use idx instead", DeprecationWarning)
1084 1085 return self.idx
1085 1086
1086 1087 @revision.setter
1087 1088 def revision(self, value):
1088 1089 warnings.warn("Use idx instead", DeprecationWarning)
1089 1090 self.idx = value
1090 1091
1091 1092 def get_file_changeset(self, path):
1092 1093 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1093 1094 return self.get_file_commit(path)
1094 1095
1095 1096
1096 1097 class BaseChangesetClass(type):
1097 1098
1098 1099 def __instancecheck__(self, instance):
1099 1100 return isinstance(instance, BaseCommit)
1100 1101
1101 1102
1102 1103 class BaseChangeset(BaseCommit):
1103 1104
1104 1105 __metaclass__ = BaseChangesetClass
1105 1106
1106 1107 def __new__(cls, *args, **kwargs):
1107 1108 warnings.warn(
1108 1109 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1109 1110 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1110 1111
1111 1112
1112 1113 class BaseInMemoryCommit(object):
1113 1114 """
1114 1115 Represents differences between repository's state (most recent head) and
1115 1116 changes made *in place*.
1116 1117
1117 1118 **Attributes**
1118 1119
1119 1120 ``repository``
1120 1121 repository object for this in-memory-commit
1121 1122
1122 1123 ``added``
1123 1124 list of ``FileNode`` objects marked as *added*
1124 1125
1125 1126 ``changed``
1126 1127 list of ``FileNode`` objects marked as *changed*
1127 1128
1128 1129 ``removed``
1129 1130 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1130 1131 *removed*
1131 1132
1132 1133 ``parents``
1133 1134 list of :class:`BaseCommit` instances representing parents of
1134 1135 in-memory commit. Should always be 2-element sequence.
1135 1136
1136 1137 """
1137 1138
1138 1139 def __init__(self, repository):
1139 1140 self.repository = repository
1140 1141 self.added = []
1141 1142 self.changed = []
1142 1143 self.removed = []
1143 1144 self.parents = []
1144 1145
1145 1146 def add(self, *filenodes):
1146 1147 """
1147 1148 Marks given ``FileNode`` objects as *to be committed*.
1148 1149
1149 1150 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1150 1151 latest commit
1151 1152 :raises ``NodeAlreadyAddedError``: if node with same path is already
1152 1153 marked as *added*
1153 1154 """
1154 1155 # Check if not already marked as *added* first
1155 1156 for node in filenodes:
1156 1157 if node.path in (n.path for n in self.added):
1157 1158 raise NodeAlreadyAddedError(
1158 1159 "Such FileNode %s is already marked for addition"
1159 1160 % node.path)
1160 1161 for node in filenodes:
1161 1162 self.added.append(node)
1162 1163
1163 1164 def change(self, *filenodes):
1164 1165 """
1165 1166 Marks given ``FileNode`` objects to be *changed* in next commit.
1166 1167
1167 1168 :raises ``EmptyRepositoryError``: if there are no commits yet
1168 1169 :raises ``NodeAlreadyExistsError``: if node with same path is already
1169 1170 marked to be *changed*
1170 1171 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1171 1172 marked to be *removed*
1172 1173 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1173 1174 commit
1174 1175 :raises ``NodeNotChangedError``: if node hasn't really be changed
1175 1176 """
1176 1177 for node in filenodes:
1177 1178 if node.path in (n.path for n in self.removed):
1178 1179 raise NodeAlreadyRemovedError(
1179 1180 "Node at %s is already marked as removed" % node.path)
1180 1181 try:
1181 1182 self.repository.get_commit()
1182 1183 except EmptyRepositoryError:
1183 1184 raise EmptyRepositoryError(
1184 1185 "Nothing to change - try to *add* new nodes rather than "
1185 1186 "changing them")
1186 1187 for node in filenodes:
1187 1188 if node.path in (n.path for n in self.changed):
1188 1189 raise NodeAlreadyChangedError(
1189 1190 "Node at '%s' is already marked as changed" % node.path)
1190 1191 self.changed.append(node)
1191 1192
1192 1193 def remove(self, *filenodes):
1193 1194 """
1194 1195 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1195 1196 *removed* in next commit.
1196 1197
1197 1198 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1198 1199 be *removed*
1199 1200 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1200 1201 be *changed*
1201 1202 """
1202 1203 for node in filenodes:
1203 1204 if node.path in (n.path for n in self.removed):
1204 1205 raise NodeAlreadyRemovedError(
1205 1206 "Node is already marked to for removal at %s" % node.path)
1206 1207 if node.path in (n.path for n in self.changed):
1207 1208 raise NodeAlreadyChangedError(
1208 1209 "Node is already marked to be changed at %s" % node.path)
1209 1210 # We only mark node as *removed* - real removal is done by
1210 1211 # commit method
1211 1212 self.removed.append(node)
1212 1213
1213 1214 def reset(self):
1214 1215 """
1215 1216 Resets this instance to initial state (cleans ``added``, ``changed``
1216 1217 and ``removed`` lists).
1217 1218 """
1218 1219 self.added = []
1219 1220 self.changed = []
1220 1221 self.removed = []
1221 1222 self.parents = []
1222 1223
1223 1224 def get_ipaths(self):
1224 1225 """
1225 1226 Returns generator of paths from nodes marked as added, changed or
1226 1227 removed.
1227 1228 """
1228 1229 for node in itertools.chain(self.added, self.changed, self.removed):
1229 1230 yield node.path
1230 1231
1231 1232 def get_paths(self):
1232 1233 """
1233 1234 Returns list of paths from nodes marked as added, changed or removed.
1234 1235 """
1235 1236 return list(self.get_ipaths())
1236 1237
1237 1238 def check_integrity(self, parents=None):
1238 1239 """
1239 1240 Checks in-memory commit's integrity. Also, sets parents if not
1240 1241 already set.
1241 1242
1242 1243 :raises CommitError: if any error occurs (i.e.
1243 1244 ``NodeDoesNotExistError``).
1244 1245 """
1245 1246 if not self.parents:
1246 1247 parents = parents or []
1247 1248 if len(parents) == 0:
1248 1249 try:
1249 1250 parents = [self.repository.get_commit(), None]
1250 1251 except EmptyRepositoryError:
1251 1252 parents = [None, None]
1252 1253 elif len(parents) == 1:
1253 1254 parents += [None]
1254 1255 self.parents = parents
1255 1256
1256 1257 # Local parents, only if not None
1257 1258 parents = [p for p in self.parents if p]
1258 1259
1259 1260 # Check nodes marked as added
1260 1261 for p in parents:
1261 1262 for node in self.added:
1262 1263 try:
1263 1264 p.get_node(node.path)
1264 1265 except NodeDoesNotExistError:
1265 1266 pass
1266 1267 else:
1267 1268 raise NodeAlreadyExistsError(
1268 1269 "Node `%s` already exists at %s" % (node.path, p))
1269 1270
1270 1271 # Check nodes marked as changed
1271 1272 missing = set(self.changed)
1272 1273 not_changed = set(self.changed)
1273 1274 if self.changed and not parents:
1274 1275 raise NodeDoesNotExistError(str(self.changed[0].path))
1275 1276 for p in parents:
1276 1277 for node in self.changed:
1277 1278 try:
1278 1279 old = p.get_node(node.path)
1279 1280 missing.remove(node)
1280 1281 # if content actually changed, remove node from not_changed
1281 1282 if old.content != node.content:
1282 1283 not_changed.remove(node)
1283 1284 except NodeDoesNotExistError:
1284 1285 pass
1285 1286 if self.changed and missing:
1286 1287 raise NodeDoesNotExistError(
1287 1288 "Node `%s` marked as modified but missing in parents: %s"
1288 1289 % (node.path, parents))
1289 1290
1290 1291 if self.changed and not_changed:
1291 1292 raise NodeNotChangedError(
1292 1293 "Node `%s` wasn't actually changed (parents: %s)"
1293 1294 % (not_changed.pop().path, parents))
1294 1295
1295 1296 # Check nodes marked as removed
1296 1297 if self.removed and not parents:
1297 1298 raise NodeDoesNotExistError(
1298 1299 "Cannot remove node at %s as there "
1299 1300 "were no parents specified" % self.removed[0].path)
1300 1301 really_removed = set()
1301 1302 for p in parents:
1302 1303 for node in self.removed:
1303 1304 try:
1304 1305 p.get_node(node.path)
1305 1306 really_removed.add(node)
1306 1307 except CommitError:
1307 1308 pass
1308 1309 not_removed = set(self.removed) - really_removed
1309 1310 if not_removed:
1310 1311 # TODO: johbo: This code branch does not seem to be covered
1311 1312 raise NodeDoesNotExistError(
1312 1313 "Cannot remove node at %s from "
1313 1314 "following parents: %s" % (not_removed, parents))
1314 1315
1315 1316 def commit(
1316 1317 self, message, author, parents=None, branch=None, date=None,
1317 1318 **kwargs):
1318 1319 """
1319 1320 Performs in-memory commit (doesn't check workdir in any way) and
1320 1321 returns newly created :class:`BaseCommit`. Updates repository's
1321 1322 attribute `commits`.
1322 1323
1323 1324 .. note::
1324 1325
1325 1326 While overriding this method each backend's should call
1326 1327 ``self.check_integrity(parents)`` in the first place.
1327 1328
1328 1329 :param message: message of the commit
1329 1330 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1330 1331 :param parents: single parent or sequence of parents from which commit
1331 1332 would be derived
1332 1333 :param date: ``datetime.datetime`` instance. Defaults to
1333 1334 ``datetime.datetime.now()``.
1334 1335 :param branch: branch name, as string. If none given, default backend's
1335 1336 branch would be used.
1336 1337
1337 1338 :raises ``CommitError``: if any error occurs while committing
1338 1339 """
1339 1340 raise NotImplementedError
1340 1341
1341 1342
1342 1343 class BaseInMemoryChangesetClass(type):
1343 1344
1344 1345 def __instancecheck__(self, instance):
1345 1346 return isinstance(instance, BaseInMemoryCommit)
1346 1347
1347 1348
1348 1349 class BaseInMemoryChangeset(BaseInMemoryCommit):
1349 1350
1350 1351 __metaclass__ = BaseInMemoryChangesetClass
1351 1352
1352 1353 def __new__(cls, *args, **kwargs):
1353 1354 warnings.warn(
1354 1355 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1355 1356 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1356 1357
1357 1358
1358 1359 class EmptyCommit(BaseCommit):
1359 1360 """
1360 1361 An dummy empty commit. It's possible to pass hash when creating
1361 1362 an EmptyCommit
1362 1363 """
1363 1364
1364 1365 def __init__(
1365 1366 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1366 1367 message='', author='', date=None):
1367 1368 self._empty_commit_id = commit_id
1368 1369 # TODO: johbo: Solve idx parameter, default value does not make
1369 1370 # too much sense
1370 1371 self.idx = idx
1371 1372 self.message = message
1372 1373 self.author = author
1373 1374 self.date = date or datetime.datetime.fromtimestamp(0)
1374 1375 self.repository = repo
1375 1376 self.alias = alias
1376 1377
1377 1378 @LazyProperty
1378 1379 def raw_id(self):
1379 1380 """
1380 1381 Returns raw string identifying this commit, useful for web
1381 1382 representation.
1382 1383 """
1383 1384
1384 1385 return self._empty_commit_id
1385 1386
1386 1387 @LazyProperty
1387 1388 def branch(self):
1388 1389 if self.alias:
1389 1390 from rhodecode.lib.vcs.backends import get_backend
1390 1391 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1391 1392
1392 1393 @LazyProperty
1393 1394 def short_id(self):
1394 1395 return self.raw_id[:12]
1395 1396
1396 1397 @LazyProperty
1397 1398 def id(self):
1398 1399 return self.raw_id
1399 1400
1400 1401 def get_file_commit(self, path):
1401 1402 return self
1402 1403
1403 1404 def get_file_content(self, path):
1404 1405 return u''
1405 1406
1406 1407 def get_file_size(self, path):
1407 1408 return 0
1408 1409
1409 1410
1410 1411 class EmptyChangesetClass(type):
1411 1412
1412 1413 def __instancecheck__(self, instance):
1413 1414 return isinstance(instance, EmptyCommit)
1414 1415
1415 1416
1416 1417 class EmptyChangeset(EmptyCommit):
1417 1418
1418 1419 __metaclass__ = EmptyChangesetClass
1419 1420
1420 1421 def __new__(cls, *args, **kwargs):
1421 1422 warnings.warn(
1422 1423 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1423 1424 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1424 1425
1425 1426 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1426 1427 alias=None, revision=-1, message='', author='', date=None):
1427 1428 if requested_revision is not None:
1428 1429 warnings.warn(
1429 1430 "Parameter requested_revision not supported anymore",
1430 1431 DeprecationWarning)
1431 1432 super(EmptyChangeset, self).__init__(
1432 1433 commit_id=cs, repo=repo, alias=alias, idx=revision,
1433 1434 message=message, author=author, date=date)
1434 1435
1435 1436 @property
1436 1437 def revision(self):
1437 1438 warnings.warn("Use idx instead", DeprecationWarning)
1438 1439 return self.idx
1439 1440
1440 1441 @revision.setter
1441 1442 def revision(self, value):
1442 1443 warnings.warn("Use idx instead", DeprecationWarning)
1443 1444 self.idx = value
1444 1445
1445 1446
1446 1447 class EmptyRepository(BaseRepository):
1447 1448 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1448 1449 pass
1449 1450
1450 1451 def get_diff(self, *args, **kwargs):
1451 1452 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1452 1453 return GitDiff('')
1453 1454
1454 1455
1455 1456 class CollectionGenerator(object):
1456 1457
1457 1458 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1458 1459 self.repo = repo
1459 1460 self.commit_ids = commit_ids
1460 1461 # TODO: (oliver) this isn't currently hooked up
1461 1462 self.collection_size = None
1462 1463 self.pre_load = pre_load
1463 1464
1464 1465 def __len__(self):
1465 1466 if self.collection_size is not None:
1466 1467 return self.collection_size
1467 1468 return self.commit_ids.__len__()
1468 1469
1469 1470 def __iter__(self):
1470 1471 for commit_id in self.commit_ids:
1471 1472 # TODO: johbo: Mercurial passes in commit indices or commit ids
1472 1473 yield self._commit_factory(commit_id)
1473 1474
1474 1475 def _commit_factory(self, commit_id):
1475 1476 """
1476 1477 Allows backends to override the way commits are generated.
1477 1478 """
1478 1479 return self.repo.get_commit(commit_id=commit_id,
1479 1480 pre_load=self.pre_load)
1480 1481
1481 1482 def __getslice__(self, i, j):
1482 1483 """
1483 1484 Returns an iterator of sliced repository
1484 1485 """
1485 1486 commit_ids = self.commit_ids[i:j]
1486 1487 return self.__class__(
1487 1488 self.repo, commit_ids, pre_load=self.pre_load)
1488 1489
1489 1490 def __repr__(self):
1490 1491 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1491 1492
1492 1493
1493 1494 class Config(object):
1494 1495 """
1495 1496 Represents the configuration for a repository.
1496 1497
1497 1498 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1498 1499 standard library. It implements only the needed subset.
1499 1500 """
1500 1501
1501 1502 def __init__(self):
1502 1503 self._values = {}
1503 1504
1504 1505 def copy(self):
1505 1506 clone = Config()
1506 1507 for section, values in self._values.items():
1507 1508 clone._values[section] = values.copy()
1508 1509 return clone
1509 1510
1510 1511 def __repr__(self):
1511 1512 return '<Config(%s sections) at %s>' % (
1512 1513 len(self._values), hex(id(self)))
1513 1514
1514 1515 def items(self, section):
1515 1516 return self._values.get(section, {}).iteritems()
1516 1517
1517 1518 def get(self, section, option):
1518 1519 return self._values.get(section, {}).get(option)
1519 1520
1520 1521 def set(self, section, option, value):
1521 1522 section_values = self._values.setdefault(section, {})
1522 1523 section_values[option] = value
1523 1524
1524 1525 def clear_section(self, section):
1525 1526 self._values[section] = {}
1526 1527
1527 1528 def serialize(self):
1528 1529 """
1529 1530 Creates a list of three tuples (section, key, value) representing
1530 1531 this config object.
1531 1532 """
1532 1533 items = []
1533 1534 for section in self._values:
1534 1535 for option, value in self._values[section].items():
1535 1536 items.append(
1536 1537 (safe_str(section), safe_str(option), safe_str(value)))
1537 1538 return items
1538 1539
1539 1540
1540 1541 class Diff(object):
1541 1542 """
1542 1543 Represents a diff result from a repository backend.
1543 1544
1544 1545 Subclasses have to provide a backend specific value for
1545 1546 :attr:`_header_re` and :attr:`_meta_re`.
1546 1547 """
1547 1548 _meta_re = None
1548 1549 _header_re = None
1549 1550
1550 1551 def __init__(self, raw_diff):
1551 1552 self.raw = raw_diff
1552 1553
1553 1554 def chunks(self):
1554 1555 """
1555 1556 split the diff in chunks of separate --git a/file b/file chunks
1556 1557 to make diffs consistent we must prepend with \n, and make sure
1557 1558 we can detect last chunk as this was also has special rule
1558 1559 """
1559 1560
1560 1561 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1561 1562 header = diff_parts[0]
1562 1563
1563 1564 if self._meta_re:
1564 1565 match = self._meta_re.match(header)
1565 1566
1566 1567 chunks = diff_parts[1:]
1567 1568 total_chunks = len(chunks)
1568 1569
1569 1570 return (
1570 1571 DiffChunk(chunk, self, cur_chunk == total_chunks)
1571 1572 for cur_chunk, chunk in enumerate(chunks, start=1))
1572 1573
1573 1574
1574 1575 class DiffChunk(object):
1575 1576
1576 1577 def __init__(self, chunk, diff, last_chunk):
1577 1578 self._diff = diff
1578 1579
1579 1580 # since we split by \ndiff --git that part is lost from original diff
1580 1581 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1581 1582 if not last_chunk:
1582 1583 chunk += '\n'
1583 1584
1584 1585 match = self._diff._header_re.match(chunk)
1585 1586 self.header = match.groupdict()
1586 1587 self.diff = chunk[match.end():]
1587 1588 self.raw = chunk
@@ -1,527 +1,538 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from ConfigParser import ConfigParser
28 28 from itertools import chain
29 29 from StringIO import StringIO
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import safe_int
36 36 from rhodecode.lib.vcs.conf import settings
37 37 from rhodecode.lib.vcs.backends import base
38 38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 39 from rhodecode.lib.vcs.nodes import (
40 40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 RemovedFileNodesGenerator)
42 RemovedFileNodesGenerator, LargeFileNode)
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49 _author_property = 'author'
50 50 _committer_property = 'committer'
51 51 _date_property = 'commit_time'
52 52 _date_tz_property = 'commit_timezone'
53 53 _message_property = 'message'
54 54 _parents_property = 'parents'
55 55
56 56 _filter_pre_load = [
57 57 # done through a more complex tree walk on parents
58 58 "affected_files",
59 59 # based on repository cached property
60 60 "branch",
61 61 # done through subprocess not remote call
62 62 "children",
63 63 # done through a more complex tree walk on parents
64 64 "status",
65 65 # mercurial specific property not supported here
66 66 "_file_paths",
67 67 ]
68 68
69 69 def __init__(self, repository, raw_id, idx, pre_load=None):
70 70 self.repository = repository
71 71 self._remote = repository._remote
72 72 # TODO: johbo: Tweak of raw_id should not be necessary
73 73 self.raw_id = safe_str(raw_id)
74 74 self.idx = idx
75 75
76 76 self._set_bulk_properties(pre_load)
77 77
78 78 # caches
79 79 self._stat_modes = {} # stat info for paths
80 80 self._paths = {} # path processed with parse_tree
81 81 self.nodes = {}
82 82 self._submodules = None
83 83
84 84 def _set_bulk_properties(self, pre_load):
85 85 if not pre_load:
86 86 return
87 87 pre_load = [entry for entry in pre_load
88 88 if entry not in self._filter_pre_load]
89 89 if not pre_load:
90 90 return
91 91
92 92 result = self._remote.bulk_request(self.raw_id, pre_load)
93 93 for attr, value in result.items():
94 94 if attr in ["author", "message"]:
95 95 if value:
96 96 value = safe_unicode(value)
97 97 elif attr == "date":
98 98 value = utcdate_fromtimestamp(*value)
99 99 elif attr == "parents":
100 100 value = self._make_commits(value)
101 101 self.__dict__[attr] = value
102 102
103 103 @LazyProperty
104 104 def _commit(self):
105 105 return self._remote[self.raw_id]
106 106
107 107 @LazyProperty
108 108 def _tree_id(self):
109 109 return self._remote[self._commit['tree']]['id']
110 110
111 111 @LazyProperty
112 112 def id(self):
113 113 return self.raw_id
114 114
115 115 @LazyProperty
116 116 def short_id(self):
117 117 return self.raw_id[:12]
118 118
119 119 @LazyProperty
120 120 def message(self):
121 121 return safe_unicode(
122 122 self._remote.commit_attribute(self.id, self._message_property))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(
127 127 self._remote.commit_attribute(self.id, self._committer_property))
128 128
129 129 @LazyProperty
130 130 def author(self):
131 131 return safe_unicode(
132 132 self._remote.commit_attribute(self.id, self._author_property))
133 133
134 134 @LazyProperty
135 135 def date(self):
136 136 unix_ts, tz = self._remote.get_object_attrs(
137 137 self.raw_id, self._date_property, self._date_tz_property)
138 138 return utcdate_fromtimestamp(unix_ts, tz)
139 139
140 140 @LazyProperty
141 141 def status(self):
142 142 """
143 143 Returns modified, added, removed, deleted files for current commit
144 144 """
145 145 return self.changed, self.added, self.removed
146 146
147 147 @LazyProperty
148 148 def tags(self):
149 149 tags = [safe_unicode(name) for name,
150 150 commit_id in self.repository.tags.iteritems()
151 151 if commit_id == self.raw_id]
152 152 return tags
153 153
154 154 @LazyProperty
155 155 def branch(self):
156 156 for name, commit_id in self.repository.branches.iteritems():
157 157 if commit_id == self.raw_id:
158 158 return safe_unicode(name)
159 159 return None
160 160
161 161 def _get_id_for_path(self, path):
162 162 path = safe_str(path)
163 163 if path in self._paths:
164 164 return self._paths[path]
165 165
166 166 tree_id = self._tree_id
167 167
168 168 path = path.strip('/')
169 169 if path == '':
170 170 data = [tree_id, "tree"]
171 171 self._paths[''] = data
172 172 return data
173 173
174 174 parts = path.split('/')
175 175 dirs, name = parts[:-1], parts[-1]
176 176 cur_dir = ''
177 177
178 178 # initially extract things from root dir
179 179 tree_items = self._remote.tree_items(tree_id)
180 180 self._process_tree_items(tree_items, cur_dir)
181 181
182 182 for dir in dirs:
183 183 if cur_dir:
184 184 cur_dir = '/'.join((cur_dir, dir))
185 185 else:
186 186 cur_dir = dir
187 187 dir_id = None
188 188 for item, stat_, id_, type_ in tree_items:
189 189 if item == dir:
190 190 dir_id = id_
191 191 break
192 192 if dir_id:
193 193 if type_ != "tree":
194 194 raise CommitError('%s is not a directory' % cur_dir)
195 195 # update tree
196 196 tree_items = self._remote.tree_items(dir_id)
197 197 else:
198 198 raise CommitError('%s have not been found' % cur_dir)
199 199
200 200 # cache all items from the given traversed tree
201 201 self._process_tree_items(tree_items, cur_dir)
202 202
203 203 if path not in self._paths:
204 204 raise self.no_node_at_path(path)
205 205
206 206 return self._paths[path]
207 207
208 208 def _process_tree_items(self, items, cur_dir):
209 209 for item, stat_, id_, type_ in items:
210 210 if cur_dir:
211 211 name = '/'.join((cur_dir, item))
212 212 else:
213 213 name = item
214 214 self._paths[name] = [id_, type_]
215 215 self._stat_modes[name] = stat_
216 216
217 217 def _get_kind(self, path):
218 218 path_id, type_ = self._get_id_for_path(path)
219 219 if type_ == 'blob':
220 220 return NodeKind.FILE
221 221 elif type_ == 'tree':
222 222 return NodeKind.DIR
223 223 elif type == 'link':
224 224 return NodeKind.SUBMODULE
225 225 return None
226 226
227 227 def _get_filectx(self, path):
228 228 path = self._fix_path(path)
229 229 if self._get_kind(path) != NodeKind.FILE:
230 230 raise CommitError(
231 231 "File does not exist for commit %s at '%s'" %
232 232 (self.raw_id, path))
233 233 return path
234 234
235 235 def _get_file_nodes(self):
236 236 return chain(*(t[2] for t in self.walk()))
237 237
238 238 @LazyProperty
239 239 def parents(self):
240 240 """
241 241 Returns list of parent commits.
242 242 """
243 243 parent_ids = self._remote.commit_attribute(
244 244 self.id, self._parents_property)
245 245 return self._make_commits(parent_ids)
246 246
247 247 @LazyProperty
248 248 def children(self):
249 249 """
250 250 Returns list of child commits.
251 251 """
252 252 rev_filter = settings.GIT_REV_FILTER
253 253 output, __ = self.repository.run_git_command(
254 254 ['rev-list', '--children'] + rev_filter)
255 255
256 256 child_ids = []
257 257 pat = re.compile(r'^%s' % self.raw_id)
258 258 for l in output.splitlines():
259 259 if pat.match(l):
260 260 found_ids = l.split(' ')[1:]
261 261 child_ids.extend(found_ids)
262 262 return self._make_commits(child_ids)
263 263
264 264 def _make_commits(self, commit_ids):
265 265 return [self.repository.get_commit(commit_id=commit_id)
266 266 for commit_id in commit_ids]
267 267
268 268 def get_file_mode(self, path):
269 269 """
270 270 Returns stat mode of the file at the given `path`.
271 271 """
272 272 path = safe_str(path)
273 273 # ensure path is traversed
274 274 self._get_id_for_path(path)
275 275 return self._stat_modes[path]
276 276
277 277 def is_link(self, path):
278 278 return stat.S_ISLNK(self.get_file_mode(path))
279 279
280 280 def get_file_content(self, path):
281 281 """
282 282 Returns content of the file at given `path`.
283 283 """
284 284 id_, _ = self._get_id_for_path(path)
285 285 return self._remote.blob_as_pretty_string(id_)
286 286
287 287 def get_file_size(self, path):
288 288 """
289 289 Returns size of the file at given `path`.
290 290 """
291 291 id_, _ = self._get_id_for_path(path)
292 292 return self._remote.blob_raw_length(id_)
293 293
294 294 def get_file_history(self, path, limit=None, pre_load=None):
295 295 """
296 296 Returns history of file as reversed list of `GitCommit` objects for
297 297 which file at given `path` has been modified.
298 298
299 299 TODO: This function now uses an underlying 'git' command which works
300 300 quickly but ideally we should replace with an algorithm.
301 301 """
302 302 self._get_filectx(path)
303 303 f_path = safe_str(path)
304 304
305 305 cmd = ['log']
306 306 if limit:
307 307 cmd.extend(['-n', str(safe_int(limit, 0))])
308 308 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
309 309
310 310 output, __ = self.repository.run_git_command(cmd)
311 311 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
312 312
313 313 return [
314 314 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
315 315 for commit_id in commit_ids]
316 316
317 317 # TODO: unused for now potential replacement for subprocess
318 318 def get_file_history_2(self, path, limit=None, pre_load=None):
319 319 """
320 320 Returns history of file as reversed list of `Commit` objects for
321 321 which file at given `path` has been modified.
322 322 """
323 323 self._get_filectx(path)
324 324 f_path = safe_str(path)
325 325
326 326 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
327 327
328 328 return [
329 329 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
330 330 for commit_id in commit_ids]
331 331
332 332 def get_file_annotate(self, path, pre_load=None):
333 333 """
334 334 Returns a generator of four element tuples with
335 335 lineno, commit_id, commit lazy loader and line
336 336
337 337 TODO: This function now uses os underlying 'git' command which is
338 338 generally not good. Should be replaced with algorithm iterating
339 339 commits.
340 340 """
341 341 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
342 342 # -l ==> outputs long shas (and we need all 40 characters)
343 343 # --root ==> doesn't put '^' character for bounderies
344 344 # -r commit_id ==> blames for the given commit
345 345 output, __ = self.repository.run_git_command(cmd)
346 346
347 347 for i, blame_line in enumerate(output.split('\n')[:-1]):
348 348 line_no = i + 1
349 349 commit_id, line = re.split(r' ', blame_line, 1)
350 350 yield (
351 351 line_no, commit_id,
352 352 lambda: self.repository.get_commit(commit_id=commit_id,
353 353 pre_load=pre_load),
354 354 line)
355 355
356 356 def get_nodes(self, path):
357 357 if self._get_kind(path) != NodeKind.DIR:
358 358 raise CommitError(
359 359 "Directory does not exist for commit %s at "
360 360 " '%s'" % (self.raw_id, path))
361 361 path = self._fix_path(path)
362 362 id_, _ = self._get_id_for_path(path)
363 363 tree_id = self._remote[id_]['id']
364 364 dirnodes = []
365 365 filenodes = []
366 366 alias = self.repository.alias
367 367 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
368 368 if type_ == 'link':
369 369 url = self._get_submodule_url('/'.join((path, name)))
370 370 dirnodes.append(SubModuleNode(
371 371 name, url=url, commit=id_, alias=alias))
372 372 continue
373 373
374 374 if path != '':
375 375 obj_path = '/'.join((path, name))
376 376 else:
377 377 obj_path = name
378 378 if obj_path not in self._stat_modes:
379 379 self._stat_modes[obj_path] = stat_
380 380
381 381 if type_ == 'tree':
382 382 dirnodes.append(DirNode(obj_path, commit=self))
383 383 elif type_ == 'blob':
384 384 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
385 385 else:
386 386 raise CommitError(
387 387 "Requested object should be Tree or Blob, is %s", type_)
388 388
389 389 nodes = dirnodes + filenodes
390 390 for node in nodes:
391 391 if node.path not in self.nodes:
392 392 self.nodes[node.path] = node
393 393 nodes.sort()
394 394 return nodes
395 395
396 396 def get_node(self, path, pre_load=None):
397 397 if isinstance(path, unicode):
398 398 path = path.encode('utf-8')
399 399 path = self._fix_path(path)
400 400 if path not in self.nodes:
401 401 try:
402 402 id_, type_ = self._get_id_for_path(path)
403 403 except CommitError:
404 404 raise NodeDoesNotExistError(
405 405 "Cannot find one of parents' directories for a given "
406 406 "path: %s" % path)
407 407
408 408 if type_ == 'link':
409 409 url = self._get_submodule_url(path)
410 410 node = SubModuleNode(path, url=url, commit=id_,
411 411 alias=self.repository.alias)
412 412 elif type_ == 'tree':
413 413 if path == '':
414 414 node = RootNode(commit=self)
415 415 else:
416 416 node = DirNode(path, commit=self)
417 417 elif type_ == 'blob':
418 418 node = FileNode(path, commit=self, pre_load=pre_load)
419 419 else:
420 420 raise self.no_node_at_path(path)
421 421
422 422 # cache node
423 423 self.nodes[path] = node
424 424 return self.nodes[path]
425 425
426 def get_largefile_node(self, path):
427 id_, _ = self._get_id_for_path(path)
428 pointer_spec = self._remote.is_large_file(id_)
429
430 if pointer_spec:
431 # content of that file regular FileNode is the hash of largefile
432 file_id = pointer_spec.get('oid_hash')
433 if self._remote.in_largefiles_store(file_id):
434 lf_path = self._remote.store_path(file_id)
435 return LargeFileNode(lf_path, commit=self, org_path=path)
436
426 437 @LazyProperty
427 438 def affected_files(self):
428 439 """
429 440 Gets a fast accessible file changes for given commit
430 441 """
431 442 added, modified, deleted = self._changes_cache
432 443 return list(added.union(modified).union(deleted))
433 444
434 445 @LazyProperty
435 446 def _changes_cache(self):
436 447 added = set()
437 448 modified = set()
438 449 deleted = set()
439 450 _r = self._remote
440 451
441 452 parents = self.parents
442 453 if not self.parents:
443 454 parents = [base.EmptyCommit()]
444 455 for parent in parents:
445 456 if isinstance(parent, base.EmptyCommit):
446 457 oid = None
447 458 else:
448 459 oid = parent.raw_id
449 460 changes = _r.tree_changes(oid, self.raw_id)
450 461 for (oldpath, newpath), (_, _), (_, _) in changes:
451 462 if newpath and oldpath:
452 463 modified.add(newpath)
453 464 elif newpath and not oldpath:
454 465 added.add(newpath)
455 466 elif not newpath and oldpath:
456 467 deleted.add(oldpath)
457 468 return added, modified, deleted
458 469
459 470 def _get_paths_for_status(self, status):
460 471 """
461 472 Returns sorted list of paths for given ``status``.
462 473
463 474 :param status: one of: *added*, *modified* or *deleted*
464 475 """
465 476 added, modified, deleted = self._changes_cache
466 477 return sorted({
467 478 'added': list(added),
468 479 'modified': list(modified),
469 480 'deleted': list(deleted)}[status]
470 481 )
471 482
472 483 @LazyProperty
473 484 def added(self):
474 485 """
475 486 Returns list of added ``FileNode`` objects.
476 487 """
477 488 if not self.parents:
478 489 return list(self._get_file_nodes())
479 490 return AddedFileNodesGenerator(
480 491 [n for n in self._get_paths_for_status('added')], self)
481 492
482 493 @LazyProperty
483 494 def changed(self):
484 495 """
485 496 Returns list of modified ``FileNode`` objects.
486 497 """
487 498 if not self.parents:
488 499 return []
489 500 return ChangedFileNodesGenerator(
490 501 [n for n in self._get_paths_for_status('modified')], self)
491 502
492 503 @LazyProperty
493 504 def removed(self):
494 505 """
495 506 Returns list of removed ``FileNode`` objects.
496 507 """
497 508 if not self.parents:
498 509 return []
499 510 return RemovedFileNodesGenerator(
500 511 [n for n in self._get_paths_for_status('deleted')], self)
501 512
502 513 def _get_submodule_url(self, submodule_path):
503 514 git_modules_path = '.gitmodules'
504 515
505 516 if self._submodules is None:
506 517 self._submodules = {}
507 518
508 519 try:
509 520 submodules_node = self.get_node(git_modules_path)
510 521 except NodeDoesNotExistError:
511 522 return None
512 523
513 524 content = submodules_node.content
514 525
515 526 # ConfigParser fails if there are whitespaces
516 527 content = '\n'.join(l.strip() for l in content.split('\n'))
517 528
518 529 parser = ConfigParser()
519 530 parser.readfp(StringIO(content))
520 531
521 532 for section in parser.sections():
522 533 path = parser.get(section, 'path')
523 534 url = parser.get(section, 'url')
524 535 if path and url:
525 536 self._submodules[path.strip('/')] = url
526 537
527 538 return self._submodules.get(submodule_path.strip('/'))
@@ -1,362 +1,362 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG commit module
23 23 """
24 24
25 25 import os
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 30 from rhodecode.lib.utils import safe_str, safe_unicode
31 31 from rhodecode.lib.vcs import path as vcspath
32 32 from rhodecode.lib.vcs.backends import base
33 33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 34 from rhodecode.lib.vcs.exceptions import CommitError
35 35 from rhodecode.lib.vcs.nodes import (
36 36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 38 LargeFileNode, LARGEFILE_PREFIX)
39 39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40 40
41 41
42 42 class MercurialCommit(base.BaseCommit):
43 43 """
44 44 Represents state of the repository at the single commit.
45 45 """
46 46
47 47 _filter_pre_load = [
48 48 # git specific property not supported here
49 49 "_commit",
50 50 ]
51 51
52 52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 53 raw_id = safe_str(raw_id)
54 54
55 55 self.repository = repository
56 56 self._remote = repository._remote
57 57
58 58 self.raw_id = raw_id
59 59 self.idx = repository._sanitize_commit_idx(idx)
60 60
61 61 self._set_bulk_properties(pre_load)
62 62
63 63 # caches
64 64 self.nodes = {}
65 65
66 66 def _set_bulk_properties(self, pre_load):
67 67 if not pre_load:
68 68 return
69 69 pre_load = [entry for entry in pre_load
70 70 if entry not in self._filter_pre_load]
71 71 if not pre_load:
72 72 return
73 73
74 74 result = self._remote.bulk_request(self.idx, pre_load)
75 75 for attr, value in result.items():
76 76 if attr in ["author", "branch", "message"]:
77 77 value = safe_unicode(value)
78 78 elif attr == "affected_files":
79 79 value = map(safe_unicode, value)
80 80 elif attr == "date":
81 81 value = utcdate_fromtimestamp(*value)
82 82 elif attr in ["children", "parents"]:
83 83 value = self._make_commits(value)
84 84 self.__dict__[attr] = value
85 85
86 86 @LazyProperty
87 87 def tags(self):
88 88 tags = [name for name, commit_id in self.repository.tags.iteritems()
89 89 if commit_id == self.raw_id]
90 90 return tags
91 91
92 92 @LazyProperty
93 93 def branch(self):
94 94 return safe_unicode(self._remote.ctx_branch(self.idx))
95 95
96 96 @LazyProperty
97 97 def bookmarks(self):
98 98 bookmarks = [
99 99 name for name, commit_id in self.repository.bookmarks.iteritems()
100 100 if commit_id == self.raw_id]
101 101 return bookmarks
102 102
103 103 @LazyProperty
104 104 def message(self):
105 105 return safe_unicode(self._remote.ctx_description(self.idx))
106 106
107 107 @LazyProperty
108 108 def committer(self):
109 109 return safe_unicode(self.author)
110 110
111 111 @LazyProperty
112 112 def author(self):
113 113 return safe_unicode(self._remote.ctx_user(self.idx))
114 114
115 115 @LazyProperty
116 116 def date(self):
117 117 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
118 118
119 119 @LazyProperty
120 120 def status(self):
121 121 """
122 122 Returns modified, added, removed, deleted files for current commit
123 123 """
124 124 return self._remote.ctx_status(self.idx)
125 125
126 126 @LazyProperty
127 127 def _file_paths(self):
128 128 return self._remote.ctx_list(self.idx)
129 129
130 130 @LazyProperty
131 131 def _dir_paths(self):
132 132 p = list(set(get_dirs_for_path(*self._file_paths)))
133 133 p.insert(0, '')
134 134 return p
135 135
136 136 @LazyProperty
137 137 def _paths(self):
138 138 return self._dir_paths + self._file_paths
139 139
140 140 @LazyProperty
141 141 def id(self):
142 142 if self.last:
143 143 return u'tip'
144 144 return self.short_id
145 145
146 146 @LazyProperty
147 147 def short_id(self):
148 148 return self.raw_id[:12]
149 149
150 150 def _make_commits(self, indexes):
151 151 return [self.repository.get_commit(commit_idx=idx)
152 152 for idx in indexes if idx >= 0]
153 153
154 154 @LazyProperty
155 155 def parents(self):
156 156 """
157 157 Returns list of parent commits.
158 158 """
159 159 parents = self._remote.ctx_parents(self.idx)
160 160 return self._make_commits(parents)
161 161
162 162 @LazyProperty
163 163 def children(self):
164 164 """
165 165 Returns list of child commits.
166 166 """
167 167 children = self._remote.ctx_children(self.idx)
168 168 return self._make_commits(children)
169 169
170 170 def diff(self, ignore_whitespace=True, context=3):
171 171 result = self._remote.ctx_diff(
172 172 self.idx,
173 173 git=True, ignore_whitespace=ignore_whitespace, context=context)
174 174 diff = ''.join(result)
175 175 return MercurialDiff(diff)
176 176
177 177 def _fix_path(self, path):
178 178 """
179 179 Mercurial keeps filenodes as str so we need to encode from unicode
180 180 to str.
181 181 """
182 182 return safe_str(super(MercurialCommit, self)._fix_path(path))
183 183
184 184 def _get_kind(self, path):
185 185 path = self._fix_path(path)
186 186 if path in self._file_paths:
187 187 return NodeKind.FILE
188 188 elif path in self._dir_paths:
189 189 return NodeKind.DIR
190 190 else:
191 191 raise CommitError(
192 192 "Node does not exist at the given path '%s'" % (path, ))
193 193
194 194 def _get_filectx(self, path):
195 195 path = self._fix_path(path)
196 196 if self._get_kind(path) != NodeKind.FILE:
197 197 raise CommitError(
198 198 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
199 199 return path
200 200
201 201 def get_file_mode(self, path):
202 202 """
203 203 Returns stat mode of the file at the given ``path``.
204 204 """
205 205 path = self._get_filectx(path)
206 206 if 'x' in self._remote.fctx_flags(self.idx, path):
207 207 return base.FILEMODE_EXECUTABLE
208 208 else:
209 209 return base.FILEMODE_DEFAULT
210 210
211 211 def is_link(self, path):
212 212 path = self._get_filectx(path)
213 213 return 'l' in self._remote.fctx_flags(self.idx, path)
214 214
215 215 def get_file_content(self, path):
216 216 """
217 217 Returns content of the file at given ``path``.
218 218 """
219 219 path = self._get_filectx(path)
220 220 return self._remote.fctx_data(self.idx, path)
221 221
222 222 def get_file_size(self, path):
223 223 """
224 224 Returns size of the file at given ``path``.
225 225 """
226 226 path = self._get_filectx(path)
227 227 return self._remote.fctx_size(self.idx, path)
228 228
229 229 def get_file_history(self, path, limit=None, pre_load=None):
230 230 """
231 231 Returns history of file as reversed list of `MercurialCommit` objects
232 232 for which file at given ``path`` has been modified.
233 233 """
234 234 path = self._get_filectx(path)
235 235 hist = self._remote.file_history(self.idx, path, limit)
236 236 return [
237 237 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
238 238 for commit_id in hist]
239 239
240 240 def get_file_annotate(self, path, pre_load=None):
241 241 """
242 242 Returns a generator of four element tuples with
243 243 lineno, commit_id, commit lazy loader and line
244 244 """
245 245 result = self._remote.fctx_annotate(self.idx, path)
246 246
247 247 for ln_no, commit_id, content in result:
248 248 yield (
249 249 ln_no, commit_id,
250 250 lambda: self.repository.get_commit(commit_id=commit_id,
251 251 pre_load=pre_load),
252 252 content)
253 253
254 254 def get_nodes(self, path):
255 255 """
256 256 Returns combined ``DirNode`` and ``FileNode`` objects list representing
257 257 state of commit at the given ``path``. If node at the given ``path``
258 258 is not instance of ``DirNode``, CommitError would be raised.
259 259 """
260 260
261 261 if self._get_kind(path) != NodeKind.DIR:
262 262 raise CommitError(
263 263 "Directory does not exist for idx %s at '%s'" %
264 264 (self.idx, path))
265 265 path = self._fix_path(path)
266 266
267 267 filenodes = [
268 268 FileNode(f, commit=self) for f in self._file_paths
269 269 if os.path.dirname(f) == path]
270 270 # TODO: johbo: Check if this can be done in a more obvious way
271 271 dirs = path == '' and '' or [
272 272 d for d in self._dir_paths
273 273 if d and vcspath.dirname(d) == path]
274 274 dirnodes = [
275 275 DirNode(d, commit=self) for d in dirs
276 276 if os.path.dirname(d) == path]
277 277
278 278 alias = self.repository.alias
279 279 for k, vals in self._submodules.iteritems():
280 280 loc = vals[0]
281 281 commit = vals[1]
282 282 dirnodes.append(
283 283 SubModuleNode(k, url=loc, commit=commit, alias=alias))
284 284 nodes = dirnodes + filenodes
285 285 # cache nodes
286 286 for node in nodes:
287 287 self.nodes[node.path] = node
288 288 nodes.sort()
289 289
290 290 return nodes
291 291
292 292 def get_node(self, path, pre_load=None):
293 293 """
294 294 Returns `Node` object from the given `path`. If there is no node at
295 295 the given `path`, `NodeDoesNotExistError` would be raised.
296 296 """
297 297 path = self._fix_path(path)
298 298
299 299 if path not in self.nodes:
300 300 if path in self._file_paths:
301 301 node = FileNode(path, commit=self, pre_load=pre_load)
302 302 elif path in self._dir_paths:
303 303 if path == '':
304 304 node = RootNode(commit=self)
305 305 else:
306 306 node = DirNode(path, commit=self)
307 307 else:
308 308 raise self.no_node_at_path(path)
309 309
310 310 # cache node
311 311 self.nodes[path] = node
312 312 return self.nodes[path]
313 313
314 314 def get_largefile_node(self, path):
315 path = os.path.join(LARGEFILE_PREFIX, path)
316 315
317 316 if self._remote.is_large_file(path):
318 317 # content of that file regular FileNode is the hash of largefile
319 318 file_id = self.get_file_content(path).strip()
320 if self._remote.in_store(file_id):
321 path = self._remote.store_path(file_id)
322 return LargeFileNode(path, commit=self)
319
320 if self._remote.in_largefiles_store(file_id):
321 lf_path = self._remote.store_path(file_id)
322 return LargeFileNode(lf_path, commit=self, org_path=path)
323 323 elif self._remote.in_user_cache(file_id):
324 path = self._remote.store_path(file_id)
324 lf_path = self._remote.store_path(file_id)
325 325 self._remote.link(file_id, path)
326 return LargeFileNode(path, commit=self)
326 return LargeFileNode(lf_path, commit=self, org_path=path)
327 327
328 328 @LazyProperty
329 329 def _submodules(self):
330 330 """
331 331 Returns a dictionary with submodule information from substate file
332 332 of hg repository.
333 333 """
334 334 return self._remote.ctx_substate(self.idx)
335 335
336 336 @LazyProperty
337 337 def affected_files(self):
338 338 """
339 339 Gets a fast accessible file changes for given commit
340 340 """
341 341 return self._remote.ctx_files(self.idx)
342 342
343 343 @property
344 344 def added(self):
345 345 """
346 346 Returns list of added ``FileNode`` objects.
347 347 """
348 348 return AddedFileNodesGenerator([n for n in self.status[1]], self)
349 349
350 350 @property
351 351 def changed(self):
352 352 """
353 353 Returns list of modified ``FileNode`` objects.
354 354 """
355 355 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
356 356
357 357 @property
358 358 def removed(self):
359 359 """
360 360 Returns list of removed ``FileNode`` objects.
361 361 """
362 362 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,779 +1,800 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25
25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 31 from rhodecode.lib.utils import safe_unicode, safe_str
32 32 from rhodecode.lib.utils2 import md5
33 33 from rhodecode.lib.vcs import path as vcspath
34 34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 37
38 38 LARGEFILE_PREFIX = '.hglf'
39 39
40 40
41 41 class NodeKind:
42 42 SUBMODULE = -1
43 43 DIR = 1
44 44 FILE = 2
45 45 LARGEFILE = 3
46 46
47 47
48 48 class NodeState:
49 49 ADDED = u'added'
50 50 CHANGED = u'changed'
51 51 NOT_CHANGED = u'not changed'
52 52 REMOVED = u'removed'
53 53
54 54
55 55 class NodeGeneratorBase(object):
56 56 """
57 57 Base class for removed added and changed filenodes, it's a lazy generator
58 58 class that will create filenodes only on iteration or call
59 59
60 60 The len method doesn't need to create filenodes at all
61 61 """
62 62
63 63 def __init__(self, current_paths, cs):
64 64 self.cs = cs
65 65 self.current_paths = current_paths
66 66
67 67 def __call__(self):
68 68 return [n for n in self]
69 69
70 70 def __getslice__(self, i, j):
71 71 for p in self.current_paths[i:j]:
72 72 yield self.cs.get_node(p)
73 73
74 74 def __len__(self):
75 75 return len(self.current_paths)
76 76
77 77 def __iter__(self):
78 78 for p in self.current_paths:
79 79 yield self.cs.get_node(p)
80 80
81 81
82 82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 83 """
84 84 Class holding added files for current commit
85 85 """
86 86
87 87
88 88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 89 """
90 90 Class holding changed files for current commit
91 91 """
92 92
93 93
94 94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 95 """
96 96 Class holding removed files for current commit
97 97 """
98 98 def __iter__(self):
99 99 for p in self.current_paths:
100 100 yield RemovedFileNode(path=p)
101 101
102 102 def __getslice__(self, i, j):
103 103 for p in self.current_paths[i:j]:
104 104 yield RemovedFileNode(path=p)
105 105
106 106
107 107 class Node(object):
108 108 """
109 109 Simplest class representing file or directory on repository. SCM backends
110 110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 111 directly.
112 112
113 113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 114 only. Moreover, every single node is identified by the ``path`` attribute,
115 115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 116 """
117 117
118 118 commit = None
119 119
120 120 def __init__(self, path, kind):
121 121 self._validate_path(path) # can throw exception if path is invalid
122 122 self.path = safe_str(path.rstrip('/')) # we store paths as str
123 123 if path == '' and kind != NodeKind.DIR:
124 124 raise NodeError("Only DirNode and its subclasses may be "
125 125 "initialized with empty path")
126 126 self.kind = kind
127 127
128 128 if self.is_root() and not self.is_dir():
129 129 raise NodeError("Root node cannot be FILE kind")
130 130
131 131 def _validate_path(self, path):
132 132 if path.startswith('/'):
133 133 raise NodeError(
134 134 "Cannot initialize Node objects with slash at "
135 135 "the beginning as only relative paths are supported. "
136 136 "Got %s" % (path,))
137 137
138 138 @LazyProperty
139 139 def parent(self):
140 140 parent_path = self.get_parent_path()
141 141 if parent_path:
142 142 if self.commit:
143 143 return self.commit.get_node(parent_path)
144 144 return DirNode(parent_path)
145 145 return None
146 146
147 147 @LazyProperty
148 148 def unicode_path(self):
149 149 return safe_unicode(self.path)
150 150
151 151 @LazyProperty
152 152 def dir_path(self):
153 153 """
154 154 Returns name of the directory from full path of this vcs node. Empty
155 155 string is returned if there's no directory in the path
156 156 """
157 157 _parts = self.path.rstrip('/').rsplit('/', 1)
158 158 if len(_parts) == 2:
159 159 return safe_unicode(_parts[0])
160 160 return u''
161 161
162 162 @LazyProperty
163 163 def name(self):
164 164 """
165 165 Returns name of the node so if its path
166 166 then only last part is returned.
167 167 """
168 168 return safe_unicode(self.path.rstrip('/').split('/')[-1])
169 169
170 170 @property
171 171 def kind(self):
172 172 return self._kind
173 173
174 174 @kind.setter
175 175 def kind(self, kind):
176 176 if hasattr(self, '_kind'):
177 177 raise NodeError("Cannot change node's kind")
178 178 else:
179 179 self._kind = kind
180 180 # Post setter check (path's trailing slash)
181 181 if self.path.endswith('/'):
182 182 raise NodeError("Node's path cannot end with slash")
183 183
184 184 def __cmp__(self, other):
185 185 """
186 186 Comparator using name of the node, needed for quick list sorting.
187 187 """
188 188 kind_cmp = cmp(self.kind, other.kind)
189 189 if kind_cmp:
190 190 return kind_cmp
191 191 return cmp(self.name, other.name)
192 192
193 193 def __eq__(self, other):
194 194 for attr in ['name', 'path', 'kind']:
195 195 if getattr(self, attr) != getattr(other, attr):
196 196 return False
197 197 if self.is_file():
198 198 if self.content != other.content:
199 199 return False
200 200 else:
201 201 # For DirNode's check without entering each dir
202 202 self_nodes_paths = list(sorted(n.path for n in self.nodes))
203 203 other_nodes_paths = list(sorted(n.path for n in self.nodes))
204 204 if self_nodes_paths != other_nodes_paths:
205 205 return False
206 206 return True
207 207
208 208 def __ne__(self, other):
209 209 return not self.__eq__(other)
210 210
211 211 def __repr__(self):
212 212 return '<%s %r>' % (self.__class__.__name__, self.path)
213 213
214 214 def __str__(self):
215 215 return self.__repr__()
216 216
217 217 def __unicode__(self):
218 218 return self.name
219 219
220 220 def get_parent_path(self):
221 221 """
222 222 Returns node's parent path or empty string if node is root.
223 223 """
224 224 if self.is_root():
225 225 return ''
226 226 return vcspath.dirname(self.path.rstrip('/')) + '/'
227 227
228 228 def is_file(self):
229 229 """
230 230 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
231 231 otherwise.
232 232 """
233 233 return self.kind == NodeKind.FILE
234 234
235 235 def is_dir(self):
236 236 """
237 237 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
238 238 otherwise.
239 239 """
240 240 return self.kind == NodeKind.DIR
241 241
242 242 def is_root(self):
243 243 """
244 244 Returns ``True`` if node is a root node and ``False`` otherwise.
245 245 """
246 246 return self.kind == NodeKind.DIR and self.path == ''
247 247
248 248 def is_submodule(self):
249 249 """
250 250 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
251 251 otherwise.
252 252 """
253 253 return self.kind == NodeKind.SUBMODULE
254 254
255 255 def is_largefile(self):
256 256 """
257 257 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
258 258 otherwise
259 259 """
260 260 return self.kind == NodeKind.LARGEFILE
261 261
262 262 def is_link(self):
263 263 if self.commit:
264 264 return self.commit.is_link(self.path)
265 265 return False
266 266
267 267 @LazyProperty
268 268 def added(self):
269 269 return self.state is NodeState.ADDED
270 270
271 271 @LazyProperty
272 272 def changed(self):
273 273 return self.state is NodeState.CHANGED
274 274
275 275 @LazyProperty
276 276 def not_changed(self):
277 277 return self.state is NodeState.NOT_CHANGED
278 278
279 279 @LazyProperty
280 280 def removed(self):
281 281 return self.state is NodeState.REMOVED
282 282
283 283
284 284 class FileNode(Node):
285 285 """
286 286 Class representing file nodes.
287 287
288 288 :attribute: path: path to the node, relative to repository's root
289 289 :attribute: content: if given arbitrary sets content of the file
290 290 :attribute: commit: if given, first time content is accessed, callback
291 291 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
292 292 """
293 293 _filter_pre_load = []
294 294
295 295 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
296 296 """
297 297 Only one of ``content`` and ``commit`` may be given. Passing both
298 298 would raise ``NodeError`` exception.
299 299
300 300 :param path: relative path to the node
301 301 :param content: content may be passed to constructor
302 302 :param commit: if given, will use it to lazily fetch content
303 303 :param mode: ST_MODE (i.e. 0100644)
304 304 """
305 305 if content and commit:
306 306 raise NodeError("Cannot use both content and commit")
307 307 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
308 308 self.commit = commit
309 309 self._content = content
310 310 self._mode = mode or FILEMODE_DEFAULT
311 311
312 312 self._set_bulk_properties(pre_load)
313 313
314 314 def _set_bulk_properties(self, pre_load):
315 315 if not pre_load:
316 316 return
317 317 pre_load = [entry for entry in pre_load
318 318 if entry not in self._filter_pre_load]
319 319 if not pre_load:
320 320 return
321 321
322 322 for attr_name in pre_load:
323 323 result = getattr(self, attr_name)
324 324 if callable(result):
325 325 result = result()
326 326 self.__dict__[attr_name] = result
327 327
328 328 @LazyProperty
329 329 def mode(self):
330 330 """
331 331 Returns lazily mode of the FileNode. If `commit` is not set, would
332 332 use value given at initialization or `FILEMODE_DEFAULT` (default).
333 333 """
334 334 if self.commit:
335 335 mode = self.commit.get_file_mode(self.path)
336 336 else:
337 337 mode = self._mode
338 338 return mode
339 339
340 340 @LazyProperty
341 341 def raw_bytes(self):
342 342 """
343 343 Returns lazily the raw bytes of the FileNode.
344 344 """
345 345 if self.commit:
346 346 if self._content is None:
347 347 self._content = self.commit.get_file_content(self.path)
348 348 content = self._content
349 349 else:
350 350 content = self._content
351 351 return content
352 352
353 353 @LazyProperty
354 354 def md5(self):
355 355 """
356 356 Returns md5 of the file node.
357 357 """
358 358 return md5(self.raw_bytes)
359 359
360 360 @LazyProperty
361 361 def content(self):
362 362 """
363 363 Returns lazily content of the FileNode. If possible, would try to
364 364 decode content from UTF-8.
365 365 """
366 366 content = self.raw_bytes
367 367
368 368 if self.is_binary:
369 369 return content
370 370 return safe_unicode(content)
371 371
372 372 @LazyProperty
373 373 def size(self):
374 374 if self.commit:
375 375 return self.commit.get_file_size(self.path)
376 376 raise NodeError(
377 377 "Cannot retrieve size of the file without related "
378 378 "commit attribute")
379 379
380 380 @LazyProperty
381 381 def message(self):
382 382 if self.commit:
383 383 return self.last_commit.message
384 384 raise NodeError(
385 385 "Cannot retrieve message of the file without related "
386 386 "commit attribute")
387 387
388 388 @LazyProperty
389 389 def last_commit(self):
390 390 if self.commit:
391 391 pre_load = ["author", "date", "message"]
392 392 return self.commit.get_file_commit(self.path, pre_load=pre_load)
393 393 raise NodeError(
394 394 "Cannot retrieve last commit of the file without "
395 395 "related commit attribute")
396 396
397 397 def get_mimetype(self):
398 398 """
399 399 Mimetype is calculated based on the file's content. If ``_mimetype``
400 400 attribute is available, it will be returned (backends which store
401 401 mimetypes or can easily recognize them, should set this private
402 402 attribute to indicate that type should *NOT* be calculated).
403 403 """
404 404
405 405 if hasattr(self, '_mimetype'):
406 406 if (isinstance(self._mimetype, (tuple, list,)) and
407 407 len(self._mimetype) == 2):
408 408 return self._mimetype
409 409 else:
410 410 raise NodeError('given _mimetype attribute must be an 2 '
411 411 'element list or tuple')
412 412
413 413 db = get_mimetypes_db()
414 414 mtype, encoding = db.guess_type(self.name)
415 415
416 416 if mtype is None:
417 417 if self.is_binary:
418 418 mtype = 'application/octet-stream'
419 419 encoding = None
420 420 else:
421 421 mtype = 'text/plain'
422 422 encoding = None
423 423
424 424 # try with pygments
425 425 try:
426 426 from pygments.lexers import get_lexer_for_filename
427 427 mt = get_lexer_for_filename(self.name).mimetypes
428 428 except Exception:
429 429 mt = None
430 430
431 431 if mt:
432 432 mtype = mt[0]
433 433
434 434 return mtype, encoding
435 435
436 436 @LazyProperty
437 437 def mimetype(self):
438 438 """
439 439 Wrapper around full mimetype info. It returns only type of fetched
440 440 mimetype without the encoding part. use get_mimetype function to fetch
441 441 full set of (type,encoding)
442 442 """
443 443 return self.get_mimetype()[0]
444 444
445 445 @LazyProperty
446 446 def mimetype_main(self):
447 447 return self.mimetype.split('/')[0]
448 448
449 449 @classmethod
450 450 def get_lexer(cls, filename, content=None):
451 451 from pygments import lexers
452 452
453 453 extension = filename.split('.')[-1]
454 454 lexer = None
455 455
456 456 try:
457 457 lexer = lexers.guess_lexer_for_filename(
458 458 filename, content, stripnl=False)
459 459 except lexers.ClassNotFound:
460 460 lexer = None
461 461
462 462 # try our EXTENSION_MAP
463 463 if not lexer:
464 464 try:
465 465 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
466 466 if lexer_class:
467 467 lexer = lexers.get_lexer_by_name(lexer_class[0])
468 468 except lexers.ClassNotFound:
469 469 lexer = None
470 470
471 471 if not lexer:
472 472 lexer = lexers.TextLexer(stripnl=False)
473 473
474 474 return lexer
475 475
476 476 @LazyProperty
477 477 def lexer(self):
478 478 """
479 479 Returns pygment's lexer class. Would try to guess lexer taking file's
480 480 content, name and mimetype.
481 481 """
482 482 return self.get_lexer(self.name, self.content)
483 483
484 484 @LazyProperty
485 485 def lexer_alias(self):
486 486 """
487 487 Returns first alias of the lexer guessed for this file.
488 488 """
489 489 return self.lexer.aliases[0]
490 490
491 491 @LazyProperty
492 492 def history(self):
493 493 """
494 494 Returns a list of commit for this file in which the file was changed
495 495 """
496 496 if self.commit is None:
497 497 raise NodeError('Unable to get commit for this FileNode')
498 498 return self.commit.get_file_history(self.path)
499 499
500 500 @LazyProperty
501 501 def annotate(self):
502 502 """
503 503 Returns a list of three element tuples with lineno, commit and line
504 504 """
505 505 if self.commit is None:
506 506 raise NodeError('Unable to get commit for this FileNode')
507 507 pre_load = ["author", "date", "message"]
508 508 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
509 509
510 510 @LazyProperty
511 511 def state(self):
512 512 if not self.commit:
513 513 raise NodeError(
514 514 "Cannot check state of the node if it's not "
515 515 "linked with commit")
516 516 elif self.path in (node.path for node in self.commit.added):
517 517 return NodeState.ADDED
518 518 elif self.path in (node.path for node in self.commit.changed):
519 519 return NodeState.CHANGED
520 520 else:
521 521 return NodeState.NOT_CHANGED
522 522
523 523 @LazyProperty
524 524 def is_binary(self):
525 525 """
526 526 Returns True if file has binary content.
527 527 """
528 528 _bin = self.raw_bytes and '\0' in self.raw_bytes
529 529 return _bin
530 530
531 531 @LazyProperty
532 532 def extension(self):
533 533 """Returns filenode extension"""
534 534 return self.name.split('.')[-1]
535 535
536 536 @property
537 537 def is_executable(self):
538 538 """
539 539 Returns ``True`` if file has executable flag turned on.
540 540 """
541 541 return bool(self.mode & stat.S_IXUSR)
542 542
543 543 def get_largefile_node(self):
544 544 """
545 545 Try to return a Mercurial FileNode from this node. It does internal
546 546 checks inside largefile store, if that file exist there it will
547 547 create special instance of LargeFileNode which can get content from
548 548 LF store.
549 549 """
550 if self.commit and self.path.startswith(LARGEFILE_PREFIX):
551 largefile_path = self.path.split(LARGEFILE_PREFIX)[-1].lstrip('/')
552 return self.commit.get_largefile_node(largefile_path)
550 if self.commit:
551 return self.commit.get_largefile_node(self.path)
553 552
554 553 def lines(self, count_empty=False):
555 554 all_lines, empty_lines = 0, 0
556 555
557 556 if not self.is_binary:
558 557 content = self.content
559 558 if count_empty:
560 559 all_lines = 0
561 560 empty_lines = 0
562 561 for line in content.splitlines(True):
563 562 if line == '\n':
564 563 empty_lines += 1
565 564 all_lines += 1
566 565
567 566 return all_lines, all_lines - empty_lines
568 567 else:
569 568 # fast method
570 569 empty_lines = all_lines = content.count('\n')
571 570 if all_lines == 0 and content:
572 571 # one-line without a newline
573 572 empty_lines = all_lines = 1
574 573
575 574 return all_lines, empty_lines
576 575
577 576 def __repr__(self):
578 577 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
579 578 getattr(self.commit, 'short_id', ''))
580 579
581 580
582 581 class RemovedFileNode(FileNode):
583 582 """
584 583 Dummy FileNode class - trying to access any public attribute except path,
585 584 name, kind or state (or methods/attributes checking those two) would raise
586 585 RemovedFileNodeError.
587 586 """
588 587 ALLOWED_ATTRIBUTES = [
589 588 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
590 589 'added', 'changed', 'not_changed', 'removed'
591 590 ]
592 591
593 592 def __init__(self, path):
594 593 """
595 594 :param path: relative path to the node
596 595 """
597 596 super(RemovedFileNode, self).__init__(path=path)
598 597
599 598 def __getattribute__(self, attr):
600 599 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
601 600 return super(RemovedFileNode, self).__getattribute__(attr)
602 601 raise RemovedFileNodeError(
603 602 "Cannot access attribute %s on RemovedFileNode" % attr)
604 603
605 604 @LazyProperty
606 605 def state(self):
607 606 return NodeState.REMOVED
608 607
609 608
610 609 class DirNode(Node):
611 610 """
612 611 DirNode stores list of files and directories within this node.
613 612 Nodes may be used standalone but within repository context they
614 613 lazily fetch data within same repositorty's commit.
615 614 """
616 615
617 616 def __init__(self, path, nodes=(), commit=None):
618 617 """
619 618 Only one of ``nodes`` and ``commit`` may be given. Passing both
620 619 would raise ``NodeError`` exception.
621 620
622 621 :param path: relative path to the node
623 622 :param nodes: content may be passed to constructor
624 623 :param commit: if given, will use it to lazily fetch content
625 624 """
626 625 if nodes and commit:
627 626 raise NodeError("Cannot use both nodes and commit")
628 627 super(DirNode, self).__init__(path, NodeKind.DIR)
629 628 self.commit = commit
630 629 self._nodes = nodes
631 630
632 631 @LazyProperty
633 632 def content(self):
634 633 raise NodeError(
635 634 "%s represents a dir and has no `content` attribute" % self)
636 635
637 636 @LazyProperty
638 637 def nodes(self):
639 638 if self.commit:
640 639 nodes = self.commit.get_nodes(self.path)
641 640 else:
642 641 nodes = self._nodes
643 642 self._nodes_dict = dict((node.path, node) for node in nodes)
644 643 return sorted(nodes)
645 644
646 645 @LazyProperty
647 646 def files(self):
648 647 return sorted((node for node in self.nodes if node.is_file()))
649 648
650 649 @LazyProperty
651 650 def dirs(self):
652 651 return sorted((node for node in self.nodes if node.is_dir()))
653 652
654 653 def __iter__(self):
655 654 for node in self.nodes:
656 655 yield node
657 656
658 657 def get_node(self, path):
659 658 """
660 659 Returns node from within this particular ``DirNode``, so it is now
661 660 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
662 661 'docs'. In order to access deeper nodes one must fetch nodes between
663 662 them first - this would work::
664 663
665 664 docs = root.get_node('docs')
666 665 docs.get_node('api').get_node('index.rst')
667 666
668 667 :param: path - relative to the current node
669 668
670 669 .. note::
671 670 To access lazily (as in example above) node have to be initialized
672 671 with related commit object - without it node is out of
673 672 context and may know nothing about anything else than nearest
674 673 (located at same level) nodes.
675 674 """
676 675 try:
677 676 path = path.rstrip('/')
678 677 if path == '':
679 678 raise NodeError("Cannot retrieve node without path")
680 679 self.nodes # access nodes first in order to set _nodes_dict
681 680 paths = path.split('/')
682 681 if len(paths) == 1:
683 682 if not self.is_root():
684 683 path = '/'.join((self.path, paths[0]))
685 684 else:
686 685 path = paths[0]
687 686 return self._nodes_dict[path]
688 687 elif len(paths) > 1:
689 688 if self.commit is None:
690 689 raise NodeError(
691 690 "Cannot access deeper nodes without commit")
692 691 else:
693 692 path1, path2 = paths[0], '/'.join(paths[1:])
694 693 return self.get_node(path1).get_node(path2)
695 694 else:
696 695 raise KeyError
697 696 except KeyError:
698 697 raise NodeError("Node does not exist at %s" % path)
699 698
700 699 @LazyProperty
701 700 def state(self):
702 701 raise NodeError("Cannot access state of DirNode")
703 702
704 703 @LazyProperty
705 704 def size(self):
706 705 size = 0
707 706 for root, dirs, files in self.commit.walk(self.path):
708 707 for f in files:
709 708 size += f.size
710 709
711 710 return size
712 711
713 712 def __repr__(self):
714 713 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
715 714 getattr(self.commit, 'short_id', ''))
716 715
717 716
718 717 class RootNode(DirNode):
719 718 """
720 719 DirNode being the root node of the repository.
721 720 """
722 721
723 722 def __init__(self, nodes=(), commit=None):
724 723 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
725 724
726 725 def __repr__(self):
727 726 return '<%s>' % self.__class__.__name__
728 727
729 728
730 729 class SubModuleNode(Node):
731 730 """
732 731 represents a SubModule of Git or SubRepo of Mercurial
733 732 """
734 733 is_binary = False
735 734 size = 0
736 735
737 736 def __init__(self, name, url=None, commit=None, alias=None):
738 737 self.path = name
739 738 self.kind = NodeKind.SUBMODULE
740 739 self.alias = alias
741 740
742 741 # we have to use EmptyCommit here since this can point to svn/git/hg
743 742 # submodules we cannot get from repository
744 743 self.commit = EmptyCommit(str(commit), alias=alias)
745 744 self.url = url or self._extract_submodule_url()
746 745
747 746 def __repr__(self):
748 747 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
749 748 getattr(self.commit, 'short_id', ''))
750 749
751 750 def _extract_submodule_url(self):
752 751 # TODO: find a way to parse gits submodule file and extract the
753 752 # linking URL
754 753 return self.path
755 754
756 755 @LazyProperty
757 756 def name(self):
758 757 """
759 758 Returns name of the node so if its path
760 759 then only last part is returned.
761 760 """
762 761 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
763 762 return u'%s @ %s' % (org, self.commit.short_id)
764 763
765 764
766 765 class LargeFileNode(FileNode):
767 766
767 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
768 self.path = path
769 self.org_path = org_path
770 self.kind = NodeKind.LARGEFILE
771 self.alias = alias
772
768 773 def _validate_path(self, path):
769 774 """
770 775 we override check since the LargeFileNode path is system absolute
771 776 """
777 pass
772 778
779 def __repr__(self):
780 return '<%s %r>' % (self.__class__.__name__, self.path)
781
782 @LazyProperty
783 def size(self):
784 return os.stat(self.path).st_size
785
786 @LazyProperty
773 787 def raw_bytes(self):
774 788 if self.commit:
775 789 with open(self.path, 'rb') as f:
776 790 content = f.read()
777 791 else:
778 792 content = self._content
779 return content No newline at end of file
793 return content
794
795 @LazyProperty
796 def name(self):
797 """
798 Overwrites name to be the org lf path
799 """
800 return self.org_path
@@ -1,78 +1,87 b''
1 1 <%namespace name="sourceblock" file="/codeblocks/source.mako"/>
2 2
3 3 <div id="codeblock" class="codeblock">
4 4 <div class="codeblock-header">
5 5 <div class="stats">
6 6 <span> <strong>${c.file}</strong></span>
7 % if c.lf_node:
8 <span title="${_('This file is a pointer to large binary file')}"> | ${_('LargeFile')} ${h.format_byte_size_binary(c.lf_node.size)} </span>
9 % endif
7 10 <span> | ${c.file.lines()[0]} ${ungettext('line', 'lines', c.file.lines()[0])}</span>
8 11 <span> | ${h.format_byte_size_binary(c.file.size)}</span>
9 12 <span> | ${c.file.mimetype} </span>
10 13 <span class="item last"> | ${h.get_lexer_for_filenode(c.file).__class__.__name__}</span>
11 14 </div>
12 15 <div class="buttons">
13 16 <a id="file_history_overview" href="#">
14 17 ${_('History')}
15 18 </a>
16 19 <a id="file_history_overview_full" style="display: none" href="${h.url('changelog_file_home',repo_name=c.repo_name, revision=c.commit.raw_id, f_path=c.f_path)}">
17 20 ${_('Show Full History')}
18 21 </a> |
19 22 %if c.annotate:
20 23 ${h.link_to(_('Source'), h.url('files_home', repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
21 24 %else:
22 25 ${h.link_to(_('Annotation'), h.url('files_annotate_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
23 26 %endif
24 27 | ${h.link_to(_('Raw'), h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
25 | <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path)}">
28 |
29 % if c.lf_node:
30 <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path, lf=1)}">
31 ${_('Download largefile')}
32 </a>
33 % else:
34 <a href="${h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path)}">
26 35 ${_('Download')}
27 36 </a>
37 % endif
28 38
29 39 %if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name):
30 40 |
31 41 %if c.on_branch_head and c.branch_or_raw_id and not c.file.is_binary:
32 42 <a href="${h.url('files_edit_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">
33 43 ${_('Edit on Branch:%s') % c.branch_or_raw_id}
34 44 </a>
35 45 | <a class="btn-danger btn-link" href="${h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit')}">${_('Delete')}
36 46 </a>
37 47 %elif c.on_branch_head and c.branch_or_raw_id and c.file.is_binary:
38 48 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing binary files not allowed'))}
39 49 | ${h.link_to(_('Delete'), h.url('files_delete_home',repo_name=c.repo_name,revision=c.branch_or_raw_id,f_path=c.f_path, anchor='edit'),class_="btn-danger btn-link")}
40 50 %else:
41 51 ${h.link_to(_('Edit'), '#', class_="btn btn-link disabled tooltip", title=_('Editing files allowed only when on branch head commit'))}
42 52 | ${h.link_to(_('Delete'), '#', class_="btn btn-danger btn-link disabled tooltip", title=_('Deleting files allowed only when on branch head commit'))}
43 53 %endif
44 54 %endif
45 55 </div>
46 56 </div>
47 57 <div id="file_history_container"></div>
48 58 <div class="code-body">
49 59 %if c.file.is_binary:
50 60 <div>
51 61 ${_('Binary file (%s)') % c.file.mimetype}
52 62 </div>
53 63 %else:
54 64 % if c.file.size < c.cut_off_limit:
55 65 %if c.renderer and not c.annotate:
56 66 ${h.render(c.file.content, renderer=c.renderer, relative_url=h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
57 67 %else:
58 68 <table class="cb codehilite">
59 69 %if c.annotate:
60 70 <% color_hasher = h.color_hasher() %>
61 71 %for annotation, lines in c.annotated_lines:
62 72 ${sourceblock.render_annotation_lines(annotation, lines, color_hasher)}
63 73 %endfor
64 74 %else:
65 75 %for line_num, tokens in enumerate(c.lines, 1):
66 76 ${sourceblock.render_line(line_num, tokens)}
67 77 %endfor
68 78 %endif
69 79 </table>
70 </div>
71 80 %endif
72 81 %else:
73 82 ${_('File is too big to display')} ${h.link_to(_('Show as raw'),
74 83 h.url('files_raw_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path))}
75 84 %endif
76 85 %endif
77 86 </div>
78 87 </div> No newline at end of file
@@ -1,1809 +1,1814 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39
40 40 import rhodecode
41 41 from rhodecode.lib.utils2 import AttributeDict
42 42 from rhodecode.model.changeset_status import ChangesetStatusModel
43 43 from rhodecode.model.comment import CommentsModel
44 44 from rhodecode.model.db import (
45 45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.pull_request import PullRequestModel
49 49 from rhodecode.model.repo import RepoModel
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51 from rhodecode.model.user import UserModel
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.user_group import UserGroupModel
54 54 from rhodecode.model.integration import IntegrationModel
55 55 from rhodecode.integrations import integration_type_registry
56 56 from rhodecode.integrations.types.base import IntegrationTypeBase
57 57 from rhodecode.lib.utils import repo2db_mapper
58 58 from rhodecode.lib.vcs import create_vcsserver_proxy
59 59 from rhodecode.lib.vcs.backends import get_backend
60 60 from rhodecode.lib.vcs.nodes import FileNode
61 61 from rhodecode.tests import (
62 62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 64 TEST_USER_REGULAR_PASS)
65 65 from rhodecode.tests.utils import CustomTestApp
66 66 from rhodecode.tests.fixture import Fixture
67 67
68 68
69 69 def _split_comma(value):
70 70 return value.split(',')
71 71
72 72
73 73 def pytest_addoption(parser):
74 74 parser.addoption(
75 75 '--keep-tmp-path', action='store_true',
76 76 help="Keep the test temporary directories")
77 77 parser.addoption(
78 78 '--backends', action='store', type=_split_comma,
79 79 default=['git', 'hg', 'svn'],
80 80 help="Select which backends to test for backend specific tests.")
81 81 parser.addoption(
82 82 '--dbs', action='store', type=_split_comma,
83 83 default=['sqlite'],
84 84 help="Select which database to test for database specific tests. "
85 85 "Possible options are sqlite,postgres,mysql")
86 86 parser.addoption(
87 87 '--appenlight', '--ae', action='store_true',
88 88 help="Track statistics in appenlight.")
89 89 parser.addoption(
90 90 '--appenlight-api-key', '--ae-key',
91 91 help="API key for Appenlight.")
92 92 parser.addoption(
93 93 '--appenlight-url', '--ae-url',
94 94 default="https://ae.rhodecode.com",
95 95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 96 parser.addoption(
97 97 '--sqlite-connection-string', action='store',
98 98 default='', help="Connection string for the dbs tests with SQLite")
99 99 parser.addoption(
100 100 '--postgres-connection-string', action='store',
101 101 default='', help="Connection string for the dbs tests with Postgres")
102 102 parser.addoption(
103 103 '--mysql-connection-string', action='store',
104 104 default='', help="Connection string for the dbs tests with MySQL")
105 105 parser.addoption(
106 106 '--repeat', type=int, default=100,
107 107 help="Number of repetitions in performance tests.")
108 108
109 109
110 110 def pytest_configure(config):
111 111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
112 112 from rhodecode.config import patches
113 113 patches.kombu_1_5_1_python_2_7_11()
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.function.backends.args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.tests.other import example_rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = example_rcextensions
161 161
162 162 @request.addfinalizer
163 163 def cleanup():
164 164 rhodecode.EXTENSIONS = old_extensions
165 165
166 166
167 167 @pytest.fixture
168 168 def capture_rcextensions():
169 169 """
170 170 Returns the recorded calls to entry points in rcextensions.
171 171 """
172 172 calls = rhodecode.EXTENSIONS.calls
173 173 calls.clear()
174 174 # Note: At this moment, it is still the empty dict, but that will
175 175 # be filled during the test run and since it is a reference this
176 176 # is enough to make it work.
177 177 return calls
178 178
179 179
180 180 @pytest.fixture(scope='session')
181 181 def http_environ_session():
182 182 """
183 183 Allow to use "http_environ" in session scope.
184 184 """
185 185 return http_environ(
186 186 http_host_stub=http_host_stub())
187 187
188 188
189 189 @pytest.fixture
190 190 def http_host_stub():
191 191 """
192 192 Value of HTTP_HOST in the test run.
193 193 """
194 194 return 'test.example.com:80'
195 195
196 196
197 197 @pytest.fixture
198 198 def http_environ(http_host_stub):
199 199 """
200 200 HTTP extra environ keys.
201 201
202 202 User by the test application and as well for setting up the pylons
203 203 environment. In the case of the fixture "app" it should be possible
204 204 to override this for a specific test case.
205 205 """
206 206 return {
207 207 'SERVER_NAME': http_host_stub.split(':')[0],
208 208 'SERVER_PORT': http_host_stub.split(':')[1],
209 209 'HTTP_HOST': http_host_stub,
210 210 'HTTP_USER_AGENT': 'rc-test-agent',
211 211 'REQUEST_METHOD': 'GET'
212 212 }
213 213
214 214
215 215 @pytest.fixture(scope='function')
216 216 def app(request, pylonsapp, http_environ):
217 217 app = CustomTestApp(
218 218 pylonsapp,
219 219 extra_environ=http_environ)
220 220 if request.cls:
221 221 request.cls.app = app
222 222 return app
223 223
224 224
225 225 @pytest.fixture(scope='session')
226 226 def app_settings(pylonsapp, pylons_config):
227 227 """
228 228 Settings dictionary used to create the app.
229 229
230 230 Parses the ini file and passes the result through the sanitize and apply
231 231 defaults mechanism in `rhodecode.config.middleware`.
232 232 """
233 233 from paste.deploy.loadwsgi import loadcontext, APP
234 234 from rhodecode.config.middleware import (
235 235 sanitize_settings_and_apply_defaults)
236 236 context = loadcontext(APP, 'config:' + pylons_config)
237 237 settings = sanitize_settings_and_apply_defaults(context.config())
238 238 return settings
239 239
240 240
241 241 @pytest.fixture(scope='session')
242 242 def db(app_settings):
243 243 """
244 244 Initializes the database connection.
245 245
246 246 It uses the same settings which are used to create the ``pylonsapp`` or
247 247 ``app`` fixtures.
248 248 """
249 249 from rhodecode.config.utils import initialize_database
250 250 initialize_database(app_settings)
251 251
252 252
253 253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
254 254
255 255
256 256 def _autologin_user(app, *args):
257 257 session = login_user_session(app, *args)
258 258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
259 259 return LoginData(csrf_token, session['rhodecode_user'])
260 260
261 261
262 262 @pytest.fixture
263 263 def autologin_user(app):
264 264 """
265 265 Utility fixture which makes sure that the admin user is logged in
266 266 """
267 267 return _autologin_user(app)
268 268
269 269
270 270 @pytest.fixture
271 271 def autologin_regular_user(app):
272 272 """
273 273 Utility fixture which makes sure that the regular user is logged in
274 274 """
275 275 return _autologin_user(
276 276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
277 277
278 278
279 279 @pytest.fixture(scope='function')
280 280 def csrf_token(request, autologin_user):
281 281 return autologin_user.csrf_token
282 282
283 283
284 284 @pytest.fixture(scope='function')
285 285 def xhr_header(request):
286 286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
287 287
288 288
289 289 @pytest.fixture
290 290 def real_crypto_backend(monkeypatch):
291 291 """
292 292 Switch the production crypto backend on for this test.
293 293
294 294 During the test run the crypto backend is replaced with a faster
295 295 implementation based on the MD5 algorithm.
296 296 """
297 297 monkeypatch.setattr(rhodecode, 'is_test', False)
298 298
299 299
300 300 @pytest.fixture(scope='class')
301 301 def index_location(request, pylonsapp):
302 302 index_location = pylonsapp.config['app_conf']['search.location']
303 303 if request.cls:
304 304 request.cls.index_location = index_location
305 305 return index_location
306 306
307 307
308 308 @pytest.fixture(scope='session', autouse=True)
309 309 def tests_tmp_path(request):
310 310 """
311 311 Create temporary directory to be used during the test session.
312 312 """
313 313 if not os.path.exists(TESTS_TMP_PATH):
314 314 os.makedirs(TESTS_TMP_PATH)
315 315
316 316 if not request.config.getoption('--keep-tmp-path'):
317 317 @request.addfinalizer
318 318 def remove_tmp_path():
319 319 shutil.rmtree(TESTS_TMP_PATH)
320 320
321 321 return TESTS_TMP_PATH
322 322
323 323
324 324 @pytest.fixture
325 325 def test_repo_group(request):
326 326 """
327 327 Create a temporary repository group, and destroy it after
328 328 usage automatically
329 329 """
330 330 fixture = Fixture()
331 331 repogroupid = 'test_repo_group_%s' % int(time.time())
332 332 repo_group = fixture.create_repo_group(repogroupid)
333 333
334 334 def _cleanup():
335 335 fixture.destroy_repo_group(repogroupid)
336 336
337 337 request.addfinalizer(_cleanup)
338 338 return repo_group
339 339
340 340
341 341 @pytest.fixture
342 342 def test_user_group(request):
343 343 """
344 344 Create a temporary user group, and destroy it after
345 345 usage automatically
346 346 """
347 347 fixture = Fixture()
348 348 usergroupid = 'test_user_group_%s' % int(time.time())
349 349 user_group = fixture.create_user_group(usergroupid)
350 350
351 351 def _cleanup():
352 352 fixture.destroy_user_group(user_group)
353 353
354 354 request.addfinalizer(_cleanup)
355 355 return user_group
356 356
357 357
358 358 @pytest.fixture(scope='session')
359 359 def test_repo(request):
360 360 container = TestRepoContainer()
361 361 request.addfinalizer(container._cleanup)
362 362 return container
363 363
364 364
365 365 class TestRepoContainer(object):
366 366 """
367 367 Container for test repositories which are used read only.
368 368
369 369 Repositories will be created on demand and re-used during the lifetime
370 370 of this object.
371 371
372 372 Usage to get the svn test repository "minimal"::
373 373
374 374 test_repo = TestContainer()
375 375 repo = test_repo('minimal', 'svn')
376 376
377 377 """
378 378
379 379 dump_extractors = {
380 380 'git': utils.extract_git_repo_from_dump,
381 381 'hg': utils.extract_hg_repo_from_dump,
382 382 'svn': utils.extract_svn_repo_from_dump,
383 383 }
384 384
385 385 def __init__(self):
386 386 self._cleanup_repos = []
387 387 self._fixture = Fixture()
388 388 self._repos = {}
389 389
390 def __call__(self, dump_name, backend_alias):
390 def __call__(self, dump_name, backend_alias, config=None):
391 391 key = (dump_name, backend_alias)
392 392 if key not in self._repos:
393 repo = self._create_repo(dump_name, backend_alias)
393 repo = self._create_repo(dump_name, backend_alias, config)
394 394 self._repos[key] = repo.repo_id
395 395 return Repository.get(self._repos[key])
396 396
397 def _create_repo(self, dump_name, backend_alias):
397 def _create_repo(self, dump_name, backend_alias, config):
398 398 repo_name = '%s-%s' % (backend_alias, dump_name)
399 399 backend_class = get_backend(backend_alias)
400 400 dump_extractor = self.dump_extractors[backend_alias]
401 401 repo_path = dump_extractor(dump_name, repo_name)
402 vcs_repo = backend_class(repo_path)
402
403 vcs_repo = backend_class(repo_path, config=config)
403 404 repo2db_mapper({repo_name: vcs_repo})
405
404 406 repo = RepoModel().get_by_repo_name(repo_name)
405 407 self._cleanup_repos.append(repo_name)
406 408 return repo
407 409
408 410 def _cleanup(self):
409 411 for repo_name in reversed(self._cleanup_repos):
410 412 self._fixture.destroy_repo(repo_name)
411 413
412 414
413 415 @pytest.fixture
414 416 def backend(request, backend_alias, pylonsapp, test_repo):
415 417 """
416 418 Parametrized fixture which represents a single backend implementation.
417 419
418 420 It respects the option `--backends` to focus the test run on specific
419 421 backend implementations.
420 422
421 423 It also supports `pytest.mark.xfail_backends` to mark tests as failing
422 424 for specific backends. This is intended as a utility for incremental
423 425 development of a new backend implementation.
424 426 """
425 427 if backend_alias not in request.config.getoption('--backends'):
426 428 pytest.skip("Backend %s not selected." % (backend_alias, ))
427 429
428 430 utils.check_xfail_backends(request.node, backend_alias)
429 431 utils.check_skip_backends(request.node, backend_alias)
430 432
431 433 repo_name = 'vcs_test_%s' % (backend_alias, )
432 434 backend = Backend(
433 435 alias=backend_alias,
434 436 repo_name=repo_name,
435 437 test_name=request.node.name,
436 438 test_repo_container=test_repo)
437 439 request.addfinalizer(backend.cleanup)
438 440 return backend
439 441
440 442
441 443 @pytest.fixture
442 444 def backend_git(request, pylonsapp, test_repo):
443 445 return backend(request, 'git', pylonsapp, test_repo)
444 446
445 447
446 448 @pytest.fixture
447 449 def backend_hg(request, pylonsapp, test_repo):
448 450 return backend(request, 'hg', pylonsapp, test_repo)
449 451
450 452
451 453 @pytest.fixture
452 454 def backend_svn(request, pylonsapp, test_repo):
453 455 return backend(request, 'svn', pylonsapp, test_repo)
454 456
455 457
456 458 @pytest.fixture
457 459 def backend_random(backend_git):
458 460 """
459 461 Use this to express that your tests need "a backend.
460 462
461 463 A few of our tests need a backend, so that we can run the code. This
462 464 fixture is intended to be used for such cases. It will pick one of the
463 465 backends and run the tests.
464 466
465 467 The fixture `backend` would run the test multiple times for each
466 468 available backend which is a pure waste of time if the test is
467 469 independent of the backend type.
468 470 """
469 471 # TODO: johbo: Change this to pick a random backend
470 472 return backend_git
471 473
472 474
473 475 @pytest.fixture
474 476 def backend_stub(backend_git):
475 477 """
476 478 Use this to express that your tests need a backend stub
477 479
478 480 TODO: mikhail: Implement a real stub logic instead of returning
479 481 a git backend
480 482 """
481 483 return backend_git
482 484
483 485
484 486 @pytest.fixture
485 487 def repo_stub(backend_stub):
486 488 """
487 489 Use this to express that your tests need a repository stub
488 490 """
489 491 return backend_stub.create_repo()
490 492
491 493
492 494 class Backend(object):
493 495 """
494 496 Represents the test configuration for one supported backend
495 497
496 498 Provides easy access to different test repositories based on
497 499 `__getitem__`. Such repositories will only be created once per test
498 500 session.
499 501 """
500 502
501 503 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
502 504 _master_repo = None
503 505 _commit_ids = {}
504 506
505 507 def __init__(self, alias, repo_name, test_name, test_repo_container):
506 508 self.alias = alias
507 509 self.repo_name = repo_name
508 510 self._cleanup_repos = []
509 511 self._test_name = test_name
510 512 self._test_repo_container = test_repo_container
511 513 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
512 514 # Fixture will survive in the end.
513 515 self._fixture = Fixture()
514 516
515 517 def __getitem__(self, key):
516 518 return self._test_repo_container(key, self.alias)
517 519
520 def create_test_repo(self, key, config=None):
521 return self._test_repo_container(key, self.alias, config)
522
518 523 @property
519 524 def repo(self):
520 525 """
521 526 Returns the "current" repository. This is the vcs_test repo or the
522 527 last repo which has been created with `create_repo`.
523 528 """
524 529 from rhodecode.model.db import Repository
525 530 return Repository.get_by_repo_name(self.repo_name)
526 531
527 532 @property
528 533 def default_branch_name(self):
529 534 VcsRepository = get_backend(self.alias)
530 535 return VcsRepository.DEFAULT_BRANCH_NAME
531 536
532 537 @property
533 538 def default_head_id(self):
534 539 """
535 540 Returns the default head id of the underlying backend.
536 541
537 542 This will be the default branch name in case the backend does have a
538 543 default branch. In the other cases it will point to a valid head
539 544 which can serve as the base to create a new commit on top of it.
540 545 """
541 546 vcsrepo = self.repo.scm_instance()
542 547 head_id = (
543 548 vcsrepo.DEFAULT_BRANCH_NAME or
544 549 vcsrepo.commit_ids[-1])
545 550 return head_id
546 551
547 552 @property
548 553 def commit_ids(self):
549 554 """
550 555 Returns the list of commits for the last created repository
551 556 """
552 557 return self._commit_ids
553 558
554 559 def create_master_repo(self, commits):
555 560 """
556 561 Create a repository and remember it as a template.
557 562
558 563 This allows to easily create derived repositories to construct
559 564 more complex scenarios for diff, compare and pull requests.
560 565
561 566 Returns a commit map which maps from commit message to raw_id.
562 567 """
563 568 self._master_repo = self.create_repo(commits=commits)
564 569 return self._commit_ids
565 570
566 571 def create_repo(
567 572 self, commits=None, number_of_commits=0, heads=None,
568 573 name_suffix=u'', **kwargs):
569 574 """
570 575 Create a repository and record it for later cleanup.
571 576
572 577 :param commits: Optional. A sequence of dict instances.
573 578 Will add a commit per entry to the new repository.
574 579 :param number_of_commits: Optional. If set to a number, this number of
575 580 commits will be added to the new repository.
576 581 :param heads: Optional. Can be set to a sequence of of commit
577 582 names which shall be pulled in from the master repository.
578 583
579 584 """
580 585 self.repo_name = self._next_repo_name() + name_suffix
581 586 repo = self._fixture.create_repo(
582 587 self.repo_name, repo_type=self.alias, **kwargs)
583 588 self._cleanup_repos.append(repo.repo_name)
584 589
585 590 commits = commits or [
586 591 {'message': 'Commit %s of %s' % (x, self.repo_name)}
587 592 for x in xrange(number_of_commits)]
588 593 self._add_commits_to_repo(repo.scm_instance(), commits)
589 594 if heads:
590 595 self.pull_heads(repo, heads)
591 596
592 597 return repo
593 598
594 599 def pull_heads(self, repo, heads):
595 600 """
596 601 Make sure that repo contains all commits mentioned in `heads`
597 602 """
598 603 vcsmaster = self._master_repo.scm_instance()
599 604 vcsrepo = repo.scm_instance()
600 605 vcsrepo.config.clear_section('hooks')
601 606 commit_ids = [self._commit_ids[h] for h in heads]
602 607 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
603 608
604 609 def create_fork(self):
605 610 repo_to_fork = self.repo_name
606 611 self.repo_name = self._next_repo_name()
607 612 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
608 613 self._cleanup_repos.append(self.repo_name)
609 614 return repo
610 615
611 616 def new_repo_name(self, suffix=u''):
612 617 self.repo_name = self._next_repo_name() + suffix
613 618 self._cleanup_repos.append(self.repo_name)
614 619 return self.repo_name
615 620
616 621 def _next_repo_name(self):
617 622 return u"%s_%s" % (
618 623 self.invalid_repo_name.sub(u'_', self._test_name),
619 624 len(self._cleanup_repos))
620 625
621 626 def ensure_file(self, filename, content='Test content\n'):
622 627 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
623 628 commits = [
624 629 {'added': [
625 630 FileNode(filename, content=content),
626 631 ]},
627 632 ]
628 633 self._add_commits_to_repo(self.repo.scm_instance(), commits)
629 634
630 635 def enable_downloads(self):
631 636 repo = self.repo
632 637 repo.enable_downloads = True
633 638 Session().add(repo)
634 639 Session().commit()
635 640
636 641 def cleanup(self):
637 642 for repo_name in reversed(self._cleanup_repos):
638 643 self._fixture.destroy_repo(repo_name)
639 644
640 645 def _add_commits_to_repo(self, repo, commits):
641 646 commit_ids = _add_commits_to_repo(repo, commits)
642 647 if not commit_ids:
643 648 return
644 649 self._commit_ids = commit_ids
645 650
646 651 # Creating refs for Git to allow fetching them from remote repository
647 652 if self.alias == 'git':
648 653 refs = {}
649 654 for message in self._commit_ids:
650 655 # TODO: mikhail: do more special chars replacements
651 656 ref_name = 'refs/test-refs/{}'.format(
652 657 message.replace(' ', ''))
653 658 refs[ref_name] = self._commit_ids[message]
654 659 self._create_refs(repo, refs)
655 660
656 661 def _create_refs(self, repo, refs):
657 662 for ref_name in refs:
658 663 repo.set_refs(ref_name, refs[ref_name])
659 664
660 665
661 666 @pytest.fixture
662 667 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
663 668 """
664 669 Parametrized fixture which represents a single vcs backend implementation.
665 670
666 671 See the fixture `backend` for more details. This one implements the same
667 672 concept, but on vcs level. So it does not provide model instances etc.
668 673
669 674 Parameters are generated dynamically, see :func:`pytest_generate_tests`
670 675 for how this works.
671 676 """
672 677 if backend_alias not in request.config.getoption('--backends'):
673 678 pytest.skip("Backend %s not selected." % (backend_alias, ))
674 679
675 680 utils.check_xfail_backends(request.node, backend_alias)
676 681 utils.check_skip_backends(request.node, backend_alias)
677 682
678 683 repo_name = 'vcs_test_%s' % (backend_alias, )
679 684 repo_path = os.path.join(tests_tmp_path, repo_name)
680 685 backend = VcsBackend(
681 686 alias=backend_alias,
682 687 repo_path=repo_path,
683 688 test_name=request.node.name,
684 689 test_repo_container=test_repo)
685 690 request.addfinalizer(backend.cleanup)
686 691 return backend
687 692
688 693
689 694 @pytest.fixture
690 695 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
691 696 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
692 697
693 698
694 699 @pytest.fixture
695 700 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
696 701 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
697 702
698 703
699 704 @pytest.fixture
700 705 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
701 706 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
702 707
703 708
704 709 @pytest.fixture
705 710 def vcsbackend_random(vcsbackend_git):
706 711 """
707 712 Use this to express that your tests need "a vcsbackend".
708 713
709 714 The fixture `vcsbackend` would run the test multiple times for each
710 715 available vcs backend which is a pure waste of time if the test is
711 716 independent of the vcs backend type.
712 717 """
713 718 # TODO: johbo: Change this to pick a random backend
714 719 return vcsbackend_git
715 720
716 721
717 722 @pytest.fixture
718 723 def vcsbackend_stub(vcsbackend_git):
719 724 """
720 725 Use this to express that your test just needs a stub of a vcsbackend.
721 726
722 727 Plan is to eventually implement an in-memory stub to speed tests up.
723 728 """
724 729 return vcsbackend_git
725 730
726 731
727 732 class VcsBackend(object):
728 733 """
729 734 Represents the test configuration for one supported vcs backend.
730 735 """
731 736
732 737 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
733 738
734 739 def __init__(self, alias, repo_path, test_name, test_repo_container):
735 740 self.alias = alias
736 741 self._repo_path = repo_path
737 742 self._cleanup_repos = []
738 743 self._test_name = test_name
739 744 self._test_repo_container = test_repo_container
740 745
741 746 def __getitem__(self, key):
742 747 return self._test_repo_container(key, self.alias).scm_instance()
743 748
744 749 @property
745 750 def repo(self):
746 751 """
747 752 Returns the "current" repository. This is the vcs_test repo of the last
748 753 repo which has been created.
749 754 """
750 755 Repository = get_backend(self.alias)
751 756 return Repository(self._repo_path)
752 757
753 758 @property
754 759 def backend(self):
755 760 """
756 761 Returns the backend implementation class.
757 762 """
758 763 return get_backend(self.alias)
759 764
760 765 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
761 766 repo_name = self._next_repo_name()
762 767 self._repo_path = get_new_dir(repo_name)
763 768 repo_class = get_backend(self.alias)
764 769 src_url = None
765 770 if _clone_repo:
766 771 src_url = _clone_repo.path
767 772 repo = repo_class(self._repo_path, create=True, src_url=src_url)
768 773 self._cleanup_repos.append(repo)
769 774
770 775 commits = commits or [
771 776 {'message': 'Commit %s of %s' % (x, repo_name)}
772 777 for x in xrange(number_of_commits)]
773 778 _add_commits_to_repo(repo, commits)
774 779 return repo
775 780
776 781 def clone_repo(self, repo):
777 782 return self.create_repo(_clone_repo=repo)
778 783
779 784 def cleanup(self):
780 785 for repo in self._cleanup_repos:
781 786 shutil.rmtree(repo.path)
782 787
783 788 def new_repo_path(self):
784 789 repo_name = self._next_repo_name()
785 790 self._repo_path = get_new_dir(repo_name)
786 791 return self._repo_path
787 792
788 793 def _next_repo_name(self):
789 794 return "%s_%s" % (
790 795 self.invalid_repo_name.sub('_', self._test_name),
791 796 len(self._cleanup_repos))
792 797
793 798 def add_file(self, repo, filename, content='Test content\n'):
794 799 imc = repo.in_memory_commit
795 800 imc.add(FileNode(filename, content=content))
796 801 imc.commit(
797 802 message=u'Automatic commit from vcsbackend fixture',
798 803 author=u'Automatic')
799 804
800 805 def ensure_file(self, filename, content='Test content\n'):
801 806 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
802 807 self.add_file(self.repo, filename, content)
803 808
804 809
805 810 def _add_commits_to_repo(vcs_repo, commits):
806 811 commit_ids = {}
807 812 if not commits:
808 813 return commit_ids
809 814
810 815 imc = vcs_repo.in_memory_commit
811 816 commit = None
812 817
813 818 for idx, commit in enumerate(commits):
814 819 message = unicode(commit.get('message', 'Commit %s' % idx))
815 820
816 821 for node in commit.get('added', []):
817 822 imc.add(FileNode(node.path, content=node.content))
818 823 for node in commit.get('changed', []):
819 824 imc.change(FileNode(node.path, content=node.content))
820 825 for node in commit.get('removed', []):
821 826 imc.remove(FileNode(node.path))
822 827
823 828 parents = [
824 829 vcs_repo.get_commit(commit_id=commit_ids[p])
825 830 for p in commit.get('parents', [])]
826 831
827 832 operations = ('added', 'changed', 'removed')
828 833 if not any((commit.get(o) for o in operations)):
829 834 imc.add(FileNode('file_%s' % idx, content=message))
830 835
831 836 commit = imc.commit(
832 837 message=message,
833 838 author=unicode(commit.get('author', 'Automatic')),
834 839 date=commit.get('date'),
835 840 branch=commit.get('branch'),
836 841 parents=parents)
837 842
838 843 commit_ids[commit.message] = commit.raw_id
839 844
840 845 return commit_ids
841 846
842 847
843 848 @pytest.fixture
844 849 def reposerver(request):
845 850 """
846 851 Allows to serve a backend repository
847 852 """
848 853
849 854 repo_server = RepoServer()
850 855 request.addfinalizer(repo_server.cleanup)
851 856 return repo_server
852 857
853 858
854 859 class RepoServer(object):
855 860 """
856 861 Utility to serve a local repository for the duration of a test case.
857 862
858 863 Supports only Subversion so far.
859 864 """
860 865
861 866 url = None
862 867
863 868 def __init__(self):
864 869 self._cleanup_servers = []
865 870
866 871 def serve(self, vcsrepo):
867 872 if vcsrepo.alias != 'svn':
868 873 raise TypeError("Backend %s not supported" % vcsrepo.alias)
869 874
870 875 proc = subprocess32.Popen(
871 876 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
872 877 '--root', vcsrepo.path])
873 878 self._cleanup_servers.append(proc)
874 879 self.url = 'svn://localhost'
875 880
876 881 def cleanup(self):
877 882 for proc in self._cleanup_servers:
878 883 proc.terminate()
879 884
880 885
881 886 @pytest.fixture
882 887 def pr_util(backend, request):
883 888 """
884 889 Utility for tests of models and for functional tests around pull requests.
885 890
886 891 It gives an instance of :class:`PRTestUtility` which provides various
887 892 utility methods around one pull request.
888 893
889 894 This fixture uses `backend` and inherits its parameterization.
890 895 """
891 896
892 897 util = PRTestUtility(backend)
893 898
894 899 @request.addfinalizer
895 900 def cleanup():
896 901 util.cleanup()
897 902
898 903 return util
899 904
900 905
901 906 class PRTestUtility(object):
902 907
903 908 pull_request = None
904 909 pull_request_id = None
905 910 mergeable_patcher = None
906 911 mergeable_mock = None
907 912 notification_patcher = None
908 913
909 914 def __init__(self, backend):
910 915 self.backend = backend
911 916
912 917 def create_pull_request(
913 918 self, commits=None, target_head=None, source_head=None,
914 919 revisions=None, approved=False, author=None, mergeable=False,
915 920 enable_notifications=True, name_suffix=u'', reviewers=None,
916 921 title=u"Test", description=u"Description"):
917 922 self.set_mergeable(mergeable)
918 923 if not enable_notifications:
919 924 # mock notification side effect
920 925 self.notification_patcher = mock.patch(
921 926 'rhodecode.model.notification.NotificationModel.create')
922 927 self.notification_patcher.start()
923 928
924 929 if not self.pull_request:
925 930 if not commits:
926 931 commits = [
927 932 {'message': 'c1'},
928 933 {'message': 'c2'},
929 934 {'message': 'c3'},
930 935 ]
931 936 target_head = 'c1'
932 937 source_head = 'c2'
933 938 revisions = ['c2']
934 939
935 940 self.commit_ids = self.backend.create_master_repo(commits)
936 941 self.target_repository = self.backend.create_repo(
937 942 heads=[target_head], name_suffix=name_suffix)
938 943 self.source_repository = self.backend.create_repo(
939 944 heads=[source_head], name_suffix=name_suffix)
940 945 self.author = author or UserModel().get_by_username(
941 946 TEST_USER_ADMIN_LOGIN)
942 947
943 948 model = PullRequestModel()
944 949 self.create_parameters = {
945 950 'created_by': self.author,
946 951 'source_repo': self.source_repository.repo_name,
947 952 'source_ref': self._default_branch_reference(source_head),
948 953 'target_repo': self.target_repository.repo_name,
949 954 'target_ref': self._default_branch_reference(target_head),
950 955 'revisions': [self.commit_ids[r] for r in revisions],
951 956 'reviewers': reviewers or self._get_reviewers(),
952 957 'title': title,
953 958 'description': description,
954 959 }
955 960 self.pull_request = model.create(**self.create_parameters)
956 961 assert model.get_versions(self.pull_request) == []
957 962
958 963 self.pull_request_id = self.pull_request.pull_request_id
959 964
960 965 if approved:
961 966 self.approve()
962 967
963 968 Session().add(self.pull_request)
964 969 Session().commit()
965 970
966 971 return self.pull_request
967 972
968 973 def approve(self):
969 974 self.create_status_votes(
970 975 ChangesetStatus.STATUS_APPROVED,
971 976 *self.pull_request.reviewers)
972 977
973 978 def close(self):
974 979 PullRequestModel().close_pull_request(self.pull_request, self.author)
975 980
976 981 def _default_branch_reference(self, commit_message):
977 982 reference = '%s:%s:%s' % (
978 983 'branch',
979 984 self.backend.default_branch_name,
980 985 self.commit_ids[commit_message])
981 986 return reference
982 987
983 988 def _get_reviewers(self):
984 989 model = UserModel()
985 990 return [
986 991 model.get_by_username(TEST_USER_REGULAR_LOGIN),
987 992 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
988 993 ]
989 994
990 995 def update_source_repository(self, head=None):
991 996 heads = [head or 'c3']
992 997 self.backend.pull_heads(self.source_repository, heads=heads)
993 998
994 999 def add_one_commit(self, head=None):
995 1000 self.update_source_repository(head=head)
996 1001 old_commit_ids = set(self.pull_request.revisions)
997 1002 PullRequestModel().update_commits(self.pull_request)
998 1003 commit_ids = set(self.pull_request.revisions)
999 1004 new_commit_ids = commit_ids - old_commit_ids
1000 1005 assert len(new_commit_ids) == 1
1001 1006 return new_commit_ids.pop()
1002 1007
1003 1008 def remove_one_commit(self):
1004 1009 assert len(self.pull_request.revisions) == 2
1005 1010 source_vcs = self.source_repository.scm_instance()
1006 1011 removed_commit_id = source_vcs.commit_ids[-1]
1007 1012
1008 1013 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1009 1014 # remove the if once that's sorted out.
1010 1015 if self.backend.alias == "git":
1011 1016 kwargs = {'branch_name': self.backend.default_branch_name}
1012 1017 else:
1013 1018 kwargs = {}
1014 1019 source_vcs.strip(removed_commit_id, **kwargs)
1015 1020
1016 1021 PullRequestModel().update_commits(self.pull_request)
1017 1022 assert len(self.pull_request.revisions) == 1
1018 1023 return removed_commit_id
1019 1024
1020 1025 def create_comment(self, linked_to=None):
1021 1026 comment = CommentsModel().create(
1022 1027 text=u"Test comment",
1023 1028 repo=self.target_repository.repo_name,
1024 1029 user=self.author,
1025 1030 pull_request=self.pull_request)
1026 1031 assert comment.pull_request_version_id is None
1027 1032
1028 1033 if linked_to:
1029 1034 PullRequestModel()._link_comments_to_version(linked_to)
1030 1035
1031 1036 return comment
1032 1037
1033 1038 def create_inline_comment(
1034 1039 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1035 1040 comment = CommentsModel().create(
1036 1041 text=u"Test comment",
1037 1042 repo=self.target_repository.repo_name,
1038 1043 user=self.author,
1039 1044 line_no=line_no,
1040 1045 f_path=file_path,
1041 1046 pull_request=self.pull_request)
1042 1047 assert comment.pull_request_version_id is None
1043 1048
1044 1049 if linked_to:
1045 1050 PullRequestModel()._link_comments_to_version(linked_to)
1046 1051
1047 1052 return comment
1048 1053
1049 1054 def create_version_of_pull_request(self):
1050 1055 pull_request = self.create_pull_request()
1051 1056 version = PullRequestModel()._create_version_from_snapshot(
1052 1057 pull_request)
1053 1058 return version
1054 1059
1055 1060 def create_status_votes(self, status, *reviewers):
1056 1061 for reviewer in reviewers:
1057 1062 ChangesetStatusModel().set_status(
1058 1063 repo=self.pull_request.target_repo,
1059 1064 status=status,
1060 1065 user=reviewer.user_id,
1061 1066 pull_request=self.pull_request)
1062 1067
1063 1068 def set_mergeable(self, value):
1064 1069 if not self.mergeable_patcher:
1065 1070 self.mergeable_patcher = mock.patch.object(
1066 1071 VcsSettingsModel, 'get_general_settings')
1067 1072 self.mergeable_mock = self.mergeable_patcher.start()
1068 1073 self.mergeable_mock.return_value = {
1069 1074 'rhodecode_pr_merge_enabled': value}
1070 1075
1071 1076 def cleanup(self):
1072 1077 # In case the source repository is already cleaned up, the pull
1073 1078 # request will already be deleted.
1074 1079 pull_request = PullRequest().get(self.pull_request_id)
1075 1080 if pull_request:
1076 1081 PullRequestModel().delete(pull_request)
1077 1082 Session().commit()
1078 1083
1079 1084 if self.notification_patcher:
1080 1085 self.notification_patcher.stop()
1081 1086
1082 1087 if self.mergeable_patcher:
1083 1088 self.mergeable_patcher.stop()
1084 1089
1085 1090
1086 1091 @pytest.fixture
1087 1092 def user_admin(pylonsapp):
1088 1093 """
1089 1094 Provides the default admin test user as an instance of `db.User`.
1090 1095 """
1091 1096 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1092 1097 return user
1093 1098
1094 1099
1095 1100 @pytest.fixture
1096 1101 def user_regular(pylonsapp):
1097 1102 """
1098 1103 Provides the default regular test user as an instance of `db.User`.
1099 1104 """
1100 1105 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1101 1106 return user
1102 1107
1103 1108
1104 1109 @pytest.fixture
1105 1110 def user_util(request, pylonsapp):
1106 1111 """
1107 1112 Provides a wired instance of `UserUtility` with integrated cleanup.
1108 1113 """
1109 1114 utility = UserUtility(test_name=request.node.name)
1110 1115 request.addfinalizer(utility.cleanup)
1111 1116 return utility
1112 1117
1113 1118
1114 1119 # TODO: johbo: Split this up into utilities per domain or something similar
1115 1120 class UserUtility(object):
1116 1121
1117 1122 def __init__(self, test_name="test"):
1118 1123 self._test_name = self._sanitize_name(test_name)
1119 1124 self.fixture = Fixture()
1120 1125 self.repo_group_ids = []
1121 1126 self.repos_ids = []
1122 1127 self.user_ids = []
1123 1128 self.user_group_ids = []
1124 1129 self.user_repo_permission_ids = []
1125 1130 self.user_group_repo_permission_ids = []
1126 1131 self.user_repo_group_permission_ids = []
1127 1132 self.user_group_repo_group_permission_ids = []
1128 1133 self.user_user_group_permission_ids = []
1129 1134 self.user_group_user_group_permission_ids = []
1130 1135 self.user_permissions = []
1131 1136
1132 1137 def _sanitize_name(self, name):
1133 1138 for char in ['[', ']']:
1134 1139 name = name.replace(char, '_')
1135 1140 return name
1136 1141
1137 1142 def create_repo_group(
1138 1143 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1139 1144 group_name = "{prefix}_repogroup_{count}".format(
1140 1145 prefix=self._test_name,
1141 1146 count=len(self.repo_group_ids))
1142 1147 repo_group = self.fixture.create_repo_group(
1143 1148 group_name, cur_user=owner)
1144 1149 if auto_cleanup:
1145 1150 self.repo_group_ids.append(repo_group.group_id)
1146 1151 return repo_group
1147 1152
1148 1153 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1149 1154 auto_cleanup=True, repo_type='hg'):
1150 1155 repo_name = "{prefix}_repository_{count}".format(
1151 1156 prefix=self._test_name,
1152 1157 count=len(self.repos_ids))
1153 1158
1154 1159 repository = self.fixture.create_repo(
1155 1160 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1156 1161 if auto_cleanup:
1157 1162 self.repos_ids.append(repository.repo_id)
1158 1163 return repository
1159 1164
1160 1165 def create_user(self, auto_cleanup=True, **kwargs):
1161 1166 user_name = "{prefix}_user_{count}".format(
1162 1167 prefix=self._test_name,
1163 1168 count=len(self.user_ids))
1164 1169 user = self.fixture.create_user(user_name, **kwargs)
1165 1170 if auto_cleanup:
1166 1171 self.user_ids.append(user.user_id)
1167 1172 return user
1168 1173
1169 1174 def create_user_with_group(self):
1170 1175 user = self.create_user()
1171 1176 user_group = self.create_user_group(members=[user])
1172 1177 return user, user_group
1173 1178
1174 1179 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 1180 auto_cleanup=True, **kwargs):
1176 1181 group_name = "{prefix}_usergroup_{count}".format(
1177 1182 prefix=self._test_name,
1178 1183 count=len(self.user_group_ids))
1179 1184 user_group = self.fixture.create_user_group(
1180 1185 group_name, cur_user=owner, **kwargs)
1181 1186
1182 1187 if auto_cleanup:
1183 1188 self.user_group_ids.append(user_group.users_group_id)
1184 1189 if members:
1185 1190 for user in members:
1186 1191 UserGroupModel().add_user_to_group(user_group, user)
1187 1192 return user_group
1188 1193
1189 1194 def grant_user_permission(self, user_name, permission_name):
1190 1195 self._inherit_default_user_permissions(user_name, False)
1191 1196 self.user_permissions.append((user_name, permission_name))
1192 1197
1193 1198 def grant_user_permission_to_repo_group(
1194 1199 self, repo_group, user, permission_name):
1195 1200 permission = RepoGroupModel().grant_user_permission(
1196 1201 repo_group, user, permission_name)
1197 1202 self.user_repo_group_permission_ids.append(
1198 1203 (repo_group.group_id, user.user_id))
1199 1204 return permission
1200 1205
1201 1206 def grant_user_group_permission_to_repo_group(
1202 1207 self, repo_group, user_group, permission_name):
1203 1208 permission = RepoGroupModel().grant_user_group_permission(
1204 1209 repo_group, user_group, permission_name)
1205 1210 self.user_group_repo_group_permission_ids.append(
1206 1211 (repo_group.group_id, user_group.users_group_id))
1207 1212 return permission
1208 1213
1209 1214 def grant_user_permission_to_repo(
1210 1215 self, repo, user, permission_name):
1211 1216 permission = RepoModel().grant_user_permission(
1212 1217 repo, user, permission_name)
1213 1218 self.user_repo_permission_ids.append(
1214 1219 (repo.repo_id, user.user_id))
1215 1220 return permission
1216 1221
1217 1222 def grant_user_group_permission_to_repo(
1218 1223 self, repo, user_group, permission_name):
1219 1224 permission = RepoModel().grant_user_group_permission(
1220 1225 repo, user_group, permission_name)
1221 1226 self.user_group_repo_permission_ids.append(
1222 1227 (repo.repo_id, user_group.users_group_id))
1223 1228 return permission
1224 1229
1225 1230 def grant_user_permission_to_user_group(
1226 1231 self, target_user_group, user, permission_name):
1227 1232 permission = UserGroupModel().grant_user_permission(
1228 1233 target_user_group, user, permission_name)
1229 1234 self.user_user_group_permission_ids.append(
1230 1235 (target_user_group.users_group_id, user.user_id))
1231 1236 return permission
1232 1237
1233 1238 def grant_user_group_permission_to_user_group(
1234 1239 self, target_user_group, user_group, permission_name):
1235 1240 permission = UserGroupModel().grant_user_group_permission(
1236 1241 target_user_group, user_group, permission_name)
1237 1242 self.user_group_user_group_permission_ids.append(
1238 1243 (target_user_group.users_group_id, user_group.users_group_id))
1239 1244 return permission
1240 1245
1241 1246 def revoke_user_permission(self, user_name, permission_name):
1242 1247 self._inherit_default_user_permissions(user_name, True)
1243 1248 UserModel().revoke_perm(user_name, permission_name)
1244 1249
1245 1250 def _inherit_default_user_permissions(self, user_name, value):
1246 1251 user = UserModel().get_by_username(user_name)
1247 1252 user.inherit_default_permissions = value
1248 1253 Session().add(user)
1249 1254 Session().commit()
1250 1255
1251 1256 def cleanup(self):
1252 1257 self._cleanup_permissions()
1253 1258 self._cleanup_repos()
1254 1259 self._cleanup_repo_groups()
1255 1260 self._cleanup_user_groups()
1256 1261 self._cleanup_users()
1257 1262
1258 1263 def _cleanup_permissions(self):
1259 1264 if self.user_permissions:
1260 1265 for user_name, permission_name in self.user_permissions:
1261 1266 self.revoke_user_permission(user_name, permission_name)
1262 1267
1263 1268 for permission in self.user_repo_permission_ids:
1264 1269 RepoModel().revoke_user_permission(*permission)
1265 1270
1266 1271 for permission in self.user_group_repo_permission_ids:
1267 1272 RepoModel().revoke_user_group_permission(*permission)
1268 1273
1269 1274 for permission in self.user_repo_group_permission_ids:
1270 1275 RepoGroupModel().revoke_user_permission(*permission)
1271 1276
1272 1277 for permission in self.user_group_repo_group_permission_ids:
1273 1278 RepoGroupModel().revoke_user_group_permission(*permission)
1274 1279
1275 1280 for permission in self.user_user_group_permission_ids:
1276 1281 UserGroupModel().revoke_user_permission(*permission)
1277 1282
1278 1283 for permission in self.user_group_user_group_permission_ids:
1279 1284 UserGroupModel().revoke_user_group_permission(*permission)
1280 1285
1281 1286 def _cleanup_repo_groups(self):
1282 1287 def _repo_group_compare(first_group_id, second_group_id):
1283 1288 """
1284 1289 Gives higher priority to the groups with the most complex paths
1285 1290 """
1286 1291 first_group = RepoGroup.get(first_group_id)
1287 1292 second_group = RepoGroup.get(second_group_id)
1288 1293 first_group_parts = (
1289 1294 len(first_group.group_name.split('/')) if first_group else 0)
1290 1295 second_group_parts = (
1291 1296 len(second_group.group_name.split('/')) if second_group else 0)
1292 1297 return cmp(second_group_parts, first_group_parts)
1293 1298
1294 1299 sorted_repo_group_ids = sorted(
1295 1300 self.repo_group_ids, cmp=_repo_group_compare)
1296 1301 for repo_group_id in sorted_repo_group_ids:
1297 1302 self.fixture.destroy_repo_group(repo_group_id)
1298 1303
1299 1304 def _cleanup_repos(self):
1300 1305 sorted_repos_ids = sorted(self.repos_ids)
1301 1306 for repo_id in sorted_repos_ids:
1302 1307 self.fixture.destroy_repo(repo_id)
1303 1308
1304 1309 def _cleanup_user_groups(self):
1305 1310 def _user_group_compare(first_group_id, second_group_id):
1306 1311 """
1307 1312 Gives higher priority to the groups with the most complex paths
1308 1313 """
1309 1314 first_group = UserGroup.get(first_group_id)
1310 1315 second_group = UserGroup.get(second_group_id)
1311 1316 first_group_parts = (
1312 1317 len(first_group.users_group_name.split('/'))
1313 1318 if first_group else 0)
1314 1319 second_group_parts = (
1315 1320 len(second_group.users_group_name.split('/'))
1316 1321 if second_group else 0)
1317 1322 return cmp(second_group_parts, first_group_parts)
1318 1323
1319 1324 sorted_user_group_ids = sorted(
1320 1325 self.user_group_ids, cmp=_user_group_compare)
1321 1326 for user_group_id in sorted_user_group_ids:
1322 1327 self.fixture.destroy_user_group(user_group_id)
1323 1328
1324 1329 def _cleanup_users(self):
1325 1330 for user_id in self.user_ids:
1326 1331 self.fixture.destroy_user(user_id)
1327 1332
1328 1333
1329 1334 # TODO: Think about moving this into a pytest-pyro package and make it a
1330 1335 # pytest plugin
1331 1336 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1332 1337 def pytest_runtest_makereport(item, call):
1333 1338 """
1334 1339 Adding the remote traceback if the exception has this information.
1335 1340
1336 1341 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1337 1342 to the exception instance.
1338 1343 """
1339 1344 outcome = yield
1340 1345 report = outcome.get_result()
1341 1346 if call.excinfo:
1342 1347 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1343 1348
1344 1349
1345 1350 def _add_vcsserver_remote_traceback(report, exc):
1346 1351 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1347 1352
1348 1353 if vcsserver_traceback:
1349 1354 section = 'VCSServer remote traceback ' + report.when
1350 1355 report.sections.append((section, vcsserver_traceback))
1351 1356
1352 1357
1353 1358 @pytest.fixture(scope='session')
1354 1359 def testrun():
1355 1360 return {
1356 1361 'uuid': uuid.uuid4(),
1357 1362 'start': datetime.datetime.utcnow().isoformat(),
1358 1363 'timestamp': int(time.time()),
1359 1364 }
1360 1365
1361 1366
1362 1367 @pytest.fixture(autouse=True)
1363 1368 def collect_appenlight_stats(request, testrun):
1364 1369 """
1365 1370 This fixture reports memory consumtion of single tests.
1366 1371
1367 1372 It gathers data based on `psutil` and sends them to Appenlight. The option
1368 1373 ``--ae`` has te be used to enable this fixture and the API key for your
1369 1374 application has to be provided in ``--ae-key``.
1370 1375 """
1371 1376 try:
1372 1377 # cygwin cannot have yet psutil support.
1373 1378 import psutil
1374 1379 except ImportError:
1375 1380 return
1376 1381
1377 1382 if not request.config.getoption('--appenlight'):
1378 1383 return
1379 1384 else:
1380 1385 # Only request the pylonsapp fixture if appenlight tracking is
1381 1386 # enabled. This will speed up a test run of unit tests by 2 to 3
1382 1387 # seconds if appenlight is not enabled.
1383 1388 pylonsapp = request.getfuncargvalue("pylonsapp")
1384 1389 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1385 1390 client = AppenlightClient(
1386 1391 url=url,
1387 1392 api_key=request.config.getoption('--appenlight-api-key'),
1388 1393 namespace=request.node.nodeid,
1389 1394 request=str(testrun['uuid']),
1390 1395 testrun=testrun)
1391 1396
1392 1397 client.collect({
1393 1398 'message': "Starting",
1394 1399 })
1395 1400
1396 1401 server_and_port = pylonsapp.config['vcs.server']
1397 1402 protocol = pylonsapp.config['vcs.server.protocol']
1398 1403 server = create_vcsserver_proxy(server_and_port, protocol)
1399 1404 with server:
1400 1405 vcs_pid = server.get_pid()
1401 1406 server.run_gc()
1402 1407 vcs_process = psutil.Process(vcs_pid)
1403 1408 mem = vcs_process.memory_info()
1404 1409 client.tag_before('vcsserver.rss', mem.rss)
1405 1410 client.tag_before('vcsserver.vms', mem.vms)
1406 1411
1407 1412 test_process = psutil.Process()
1408 1413 mem = test_process.memory_info()
1409 1414 client.tag_before('test.rss', mem.rss)
1410 1415 client.tag_before('test.vms', mem.vms)
1411 1416
1412 1417 client.tag_before('time', time.time())
1413 1418
1414 1419 @request.addfinalizer
1415 1420 def send_stats():
1416 1421 client.tag_after('time', time.time())
1417 1422 with server:
1418 1423 gc_stats = server.run_gc()
1419 1424 for tag, value in gc_stats.items():
1420 1425 client.tag_after(tag, value)
1421 1426 mem = vcs_process.memory_info()
1422 1427 client.tag_after('vcsserver.rss', mem.rss)
1423 1428 client.tag_after('vcsserver.vms', mem.vms)
1424 1429
1425 1430 mem = test_process.memory_info()
1426 1431 client.tag_after('test.rss', mem.rss)
1427 1432 client.tag_after('test.vms', mem.vms)
1428 1433
1429 1434 client.collect({
1430 1435 'message': "Finished",
1431 1436 })
1432 1437 client.send_stats()
1433 1438
1434 1439 return client
1435 1440
1436 1441
1437 1442 class AppenlightClient():
1438 1443
1439 1444 url_template = '{url}?protocol_version=0.5'
1440 1445
1441 1446 def __init__(
1442 1447 self, url, api_key, add_server=True, add_timestamp=True,
1443 1448 namespace=None, request=None, testrun=None):
1444 1449 self.url = self.url_template.format(url=url)
1445 1450 self.api_key = api_key
1446 1451 self.add_server = add_server
1447 1452 self.add_timestamp = add_timestamp
1448 1453 self.namespace = namespace
1449 1454 self.request = request
1450 1455 self.server = socket.getfqdn(socket.gethostname())
1451 1456 self.tags_before = {}
1452 1457 self.tags_after = {}
1453 1458 self.stats = []
1454 1459 self.testrun = testrun or {}
1455 1460
1456 1461 def tag_before(self, tag, value):
1457 1462 self.tags_before[tag] = value
1458 1463
1459 1464 def tag_after(self, tag, value):
1460 1465 self.tags_after[tag] = value
1461 1466
1462 1467 def collect(self, data):
1463 1468 if self.add_server:
1464 1469 data.setdefault('server', self.server)
1465 1470 if self.add_timestamp:
1466 1471 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1467 1472 if self.namespace:
1468 1473 data.setdefault('namespace', self.namespace)
1469 1474 if self.request:
1470 1475 data.setdefault('request', self.request)
1471 1476 self.stats.append(data)
1472 1477
1473 1478 def send_stats(self):
1474 1479 tags = [
1475 1480 ('testrun', self.request),
1476 1481 ('testrun.start', self.testrun['start']),
1477 1482 ('testrun.timestamp', self.testrun['timestamp']),
1478 1483 ('test', self.namespace),
1479 1484 ]
1480 1485 for key, value in self.tags_before.items():
1481 1486 tags.append((key + '.before', value))
1482 1487 try:
1483 1488 delta = self.tags_after[key] - value
1484 1489 tags.append((key + '.delta', delta))
1485 1490 except Exception:
1486 1491 pass
1487 1492 for key, value in self.tags_after.items():
1488 1493 tags.append((key + '.after', value))
1489 1494 self.collect({
1490 1495 'message': "Collected tags",
1491 1496 'tags': tags,
1492 1497 })
1493 1498
1494 1499 response = requests.post(
1495 1500 self.url,
1496 1501 headers={
1497 1502 'X-appenlight-api-key': self.api_key},
1498 1503 json=self.stats,
1499 1504 )
1500 1505
1501 1506 if not response.status_code == 200:
1502 1507 pprint.pprint(self.stats)
1503 1508 print response.headers
1504 1509 print response.text
1505 1510 raise Exception('Sending to appenlight failed')
1506 1511
1507 1512
1508 1513 @pytest.fixture
1509 1514 def gist_util(request, pylonsapp):
1510 1515 """
1511 1516 Provides a wired instance of `GistUtility` with integrated cleanup.
1512 1517 """
1513 1518 utility = GistUtility()
1514 1519 request.addfinalizer(utility.cleanup)
1515 1520 return utility
1516 1521
1517 1522
1518 1523 class GistUtility(object):
1519 1524 def __init__(self):
1520 1525 self.fixture = Fixture()
1521 1526 self.gist_ids = []
1522 1527
1523 1528 def create_gist(self, **kwargs):
1524 1529 gist = self.fixture.create_gist(**kwargs)
1525 1530 self.gist_ids.append(gist.gist_id)
1526 1531 return gist
1527 1532
1528 1533 def cleanup(self):
1529 1534 for id_ in self.gist_ids:
1530 1535 self.fixture.destroy_gists(str(id_))
1531 1536
1532 1537
1533 1538 @pytest.fixture
1534 1539 def enabled_backends(request):
1535 1540 backends = request.config.option.backends
1536 1541 return backends[:]
1537 1542
1538 1543
1539 1544 @pytest.fixture
1540 1545 def settings_util(request):
1541 1546 """
1542 1547 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1543 1548 """
1544 1549 utility = SettingsUtility()
1545 1550 request.addfinalizer(utility.cleanup)
1546 1551 return utility
1547 1552
1548 1553
1549 1554 class SettingsUtility(object):
1550 1555 def __init__(self):
1551 1556 self.rhodecode_ui_ids = []
1552 1557 self.rhodecode_setting_ids = []
1553 1558 self.repo_rhodecode_ui_ids = []
1554 1559 self.repo_rhodecode_setting_ids = []
1555 1560
1556 1561 def create_repo_rhodecode_ui(
1557 1562 self, repo, section, value, key=None, active=True, cleanup=True):
1558 1563 key = key or hashlib.sha1(
1559 1564 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1560 1565
1561 1566 setting = RepoRhodeCodeUi()
1562 1567 setting.repository_id = repo.repo_id
1563 1568 setting.ui_section = section
1564 1569 setting.ui_value = value
1565 1570 setting.ui_key = key
1566 1571 setting.ui_active = active
1567 1572 Session().add(setting)
1568 1573 Session().commit()
1569 1574
1570 1575 if cleanup:
1571 1576 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1572 1577 return setting
1573 1578
1574 1579 def create_rhodecode_ui(
1575 1580 self, section, value, key=None, active=True, cleanup=True):
1576 1581 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1577 1582
1578 1583 setting = RhodeCodeUi()
1579 1584 setting.ui_section = section
1580 1585 setting.ui_value = value
1581 1586 setting.ui_key = key
1582 1587 setting.ui_active = active
1583 1588 Session().add(setting)
1584 1589 Session().commit()
1585 1590
1586 1591 if cleanup:
1587 1592 self.rhodecode_ui_ids.append(setting.ui_id)
1588 1593 return setting
1589 1594
1590 1595 def create_repo_rhodecode_setting(
1591 1596 self, repo, name, value, type_, cleanup=True):
1592 1597 setting = RepoRhodeCodeSetting(
1593 1598 repo.repo_id, key=name, val=value, type=type_)
1594 1599 Session().add(setting)
1595 1600 Session().commit()
1596 1601
1597 1602 if cleanup:
1598 1603 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1599 1604 return setting
1600 1605
1601 1606 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1602 1607 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1603 1608 Session().add(setting)
1604 1609 Session().commit()
1605 1610
1606 1611 if cleanup:
1607 1612 self.rhodecode_setting_ids.append(setting.app_settings_id)
1608 1613
1609 1614 return setting
1610 1615
1611 1616 def cleanup(self):
1612 1617 for id_ in self.rhodecode_ui_ids:
1613 1618 setting = RhodeCodeUi.get(id_)
1614 1619 Session().delete(setting)
1615 1620
1616 1621 for id_ in self.rhodecode_setting_ids:
1617 1622 setting = RhodeCodeSetting.get(id_)
1618 1623 Session().delete(setting)
1619 1624
1620 1625 for id_ in self.repo_rhodecode_ui_ids:
1621 1626 setting = RepoRhodeCodeUi.get(id_)
1622 1627 Session().delete(setting)
1623 1628
1624 1629 for id_ in self.repo_rhodecode_setting_ids:
1625 1630 setting = RepoRhodeCodeSetting.get(id_)
1626 1631 Session().delete(setting)
1627 1632
1628 1633 Session().commit()
1629 1634
1630 1635
1631 1636 @pytest.fixture
1632 1637 def no_notifications(request):
1633 1638 notification_patcher = mock.patch(
1634 1639 'rhodecode.model.notification.NotificationModel.create')
1635 1640 notification_patcher.start()
1636 1641 request.addfinalizer(notification_patcher.stop)
1637 1642
1638 1643
1639 1644 @pytest.fixture
1640 1645 def silence_action_logger(request):
1641 1646 notification_patcher = mock.patch(
1642 1647 'rhodecode.lib.utils.action_logger')
1643 1648 notification_patcher.start()
1644 1649 request.addfinalizer(notification_patcher.stop)
1645 1650
1646 1651
1647 1652 @pytest.fixture(scope='session')
1648 1653 def repeat(request):
1649 1654 """
1650 1655 The number of repetitions is based on this fixture.
1651 1656
1652 1657 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1653 1658 tests are not too slow in our default test suite.
1654 1659 """
1655 1660 return request.config.getoption('--repeat')
1656 1661
1657 1662
1658 1663 @pytest.fixture
1659 1664 def rhodecode_fixtures():
1660 1665 return Fixture()
1661 1666
1662 1667
1663 1668 @pytest.fixture
1664 1669 def request_stub():
1665 1670 """
1666 1671 Stub request object.
1667 1672 """
1668 1673 request = pyramid.testing.DummyRequest()
1669 1674 request.scheme = 'https'
1670 1675 return request
1671 1676
1672 1677
1673 1678 @pytest.fixture
1674 1679 def config_stub(request, request_stub):
1675 1680 """
1676 1681 Set up pyramid.testing and return the Configurator.
1677 1682 """
1678 1683 config = pyramid.testing.setUp(request=request_stub)
1679 1684
1680 1685 @request.addfinalizer
1681 1686 def cleanup():
1682 1687 pyramid.testing.tearDown()
1683 1688
1684 1689 return config
1685 1690
1686 1691
1687 1692 @pytest.fixture
1688 1693 def StubIntegrationType():
1689 1694 class _StubIntegrationType(IntegrationTypeBase):
1690 1695 """ Test integration type class """
1691 1696
1692 1697 key = 'test'
1693 1698 display_name = 'Test integration type'
1694 1699 description = 'A test integration type for testing'
1695 1700 icon = 'test_icon_html_image'
1696 1701
1697 1702 def __init__(self, settings):
1698 1703 super(_StubIntegrationType, self).__init__(settings)
1699 1704 self.sent_events = [] # for testing
1700 1705
1701 1706 def send_event(self, event):
1702 1707 self.sent_events.append(event)
1703 1708
1704 1709 def settings_schema(self):
1705 1710 class SettingsSchema(colander.Schema):
1706 1711 test_string_field = colander.SchemaNode(
1707 1712 colander.String(),
1708 1713 missing=colander.required,
1709 1714 title='test string field',
1710 1715 )
1711 1716 test_int_field = colander.SchemaNode(
1712 1717 colander.Int(),
1713 1718 title='some integer setting',
1714 1719 )
1715 1720 return SettingsSchema()
1716 1721
1717 1722
1718 1723 integration_type_registry.register_integration_type(_StubIntegrationType)
1719 1724 return _StubIntegrationType
1720 1725
1721 1726 @pytest.fixture
1722 1727 def stub_integration_settings():
1723 1728 return {
1724 1729 'test_string_field': 'some data',
1725 1730 'test_int_field': 100,
1726 1731 }
1727 1732
1728 1733
1729 1734 @pytest.fixture
1730 1735 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1731 1736 stub_integration_settings):
1732 1737 integration = IntegrationModel().create(
1733 1738 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1734 1739 name='test repo integration',
1735 1740 repo=repo_stub, repo_group=None, child_repos_only=None)
1736 1741
1737 1742 @request.addfinalizer
1738 1743 def cleanup():
1739 1744 IntegrationModel().delete(integration)
1740 1745
1741 1746 return integration
1742 1747
1743 1748
1744 1749 @pytest.fixture
1745 1750 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1746 1751 stub_integration_settings):
1747 1752 integration = IntegrationModel().create(
1748 1753 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1749 1754 name='test repogroup integration',
1750 1755 repo=None, repo_group=test_repo_group, child_repos_only=True)
1751 1756
1752 1757 @request.addfinalizer
1753 1758 def cleanup():
1754 1759 IntegrationModel().delete(integration)
1755 1760
1756 1761 return integration
1757 1762
1758 1763
1759 1764 @pytest.fixture
1760 1765 def repogroup_recursive_integration_stub(request, test_repo_group,
1761 1766 StubIntegrationType, stub_integration_settings):
1762 1767 integration = IntegrationModel().create(
1763 1768 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1764 1769 name='test recursive repogroup integration',
1765 1770 repo=None, repo_group=test_repo_group, child_repos_only=False)
1766 1771
1767 1772 @request.addfinalizer
1768 1773 def cleanup():
1769 1774 IntegrationModel().delete(integration)
1770 1775
1771 1776 return integration
1772 1777
1773 1778
1774 1779 @pytest.fixture
1775 1780 def global_integration_stub(request, StubIntegrationType,
1776 1781 stub_integration_settings):
1777 1782 integration = IntegrationModel().create(
1778 1783 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1779 1784 name='test global integration',
1780 1785 repo=None, repo_group=None, child_repos_only=None)
1781 1786
1782 1787 @request.addfinalizer
1783 1788 def cleanup():
1784 1789 IntegrationModel().delete(integration)
1785 1790
1786 1791 return integration
1787 1792
1788 1793
1789 1794 @pytest.fixture
1790 1795 def root_repos_integration_stub(request, StubIntegrationType,
1791 1796 stub_integration_settings):
1792 1797 integration = IntegrationModel().create(
1793 1798 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1794 1799 name='test global integration',
1795 1800 repo=None, repo_group=None, child_repos_only=True)
1796 1801
1797 1802 @request.addfinalizer
1798 1803 def cleanup():
1799 1804 IntegrationModel().delete(integration)
1800 1805
1801 1806 return integration
1802 1807
1803 1808
1804 1809 @pytest.fixture
1805 1810 def local_dt_to_utc():
1806 1811 def _factory(dt):
1807 1812 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1808 1813 dateutil.tz.tzutc()).replace(tzinfo=None)
1809 1814 return _factory
@@ -1,1242 +1,1269 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 import shutil
25 26
26 27 import pytest
27 28
29 from rhodecode.lib.utils import make_db_config
28 30 from rhodecode.lib.vcs.backends.base import Reference
29 31 from rhodecode.lib.vcs.backends.git import (
30 32 GitRepository, GitCommit, discover_git_version)
31 33 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError
33 )
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 35 from rhodecode.lib.vcs.nodes import (
35 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 38 from rhodecode.tests.vcs.base import BackendTestMixin
38 39
39 40
40 41 pytestmark = pytest.mark.backends("git")
41 42
42 43
43 44 def repo_path_generator():
44 45 """
45 46 Return a different path to be used for cloning repos.
46 47 """
47 48 i = 0
48 49 while True:
49 50 i += 1
50 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 52
52 53
53 54 REPO_PATH_GENERATOR = repo_path_generator()
54 55
55 56
56 57 class TestGitRepository:
57 58
58 59 # pylint: disable=protected-access
59 60
60 61 def __check_for_existing_repo(self):
61 62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 63 self.fail('Cannot test git clone repo as location %s already '
63 64 'exists. You should manually remove it first.'
64 65 % TEST_GIT_REPO_CLONE)
65 66
66 67 @pytest.fixture(autouse=True)
67 68 def prepare(self, request, pylonsapp):
68 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 70
70 71 def get_clone_repo(self):
71 72 """
72 73 Return a non bare clone of the base repo.
73 74 """
74 75 clone_path = next(REPO_PATH_GENERATOR)
75 76 repo_clone = GitRepository(
76 77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 78
78 79 return repo_clone
79 80
80 81 def get_empty_repo(self, bare=False):
81 82 """
82 83 Return a non bare empty repo.
83 84 """
84 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 86
86 87 def test_wrong_repo_path(self):
87 88 wrong_repo_path = '/tmp/errorrepo'
88 89 with pytest.raises(RepositoryError):
89 90 GitRepository(wrong_repo_path)
90 91
91 92 def test_repo_clone(self):
92 93 self.__check_for_existing_repo()
93 94 repo = GitRepository(TEST_GIT_REPO)
94 95 repo_clone = GitRepository(
95 96 TEST_GIT_REPO_CLONE,
96 97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
97 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 99 # Checking hashes of commits should be enough
99 100 for commit in repo.get_commits():
100 101 raw_id = commit.raw_id
101 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 103
103 104 def test_repo_clone_without_create(self):
104 105 with pytest.raises(RepositoryError):
105 106 GitRepository(
106 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 108
108 109 def test_repo_clone_with_update(self):
109 110 repo = GitRepository(TEST_GIT_REPO)
110 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 112 repo_clone = GitRepository(
112 113 clone_path,
113 114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
114 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 116
116 117 # check if current workdir was updated
117 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 119 assert os.path.isfile(fpath)
119 120
120 121 def test_repo_clone_without_update(self):
121 122 repo = GitRepository(TEST_GIT_REPO)
122 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 124 repo_clone = GitRepository(
124 125 clone_path,
125 126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
126 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 128 # check if current workdir was *NOT* updated
128 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 130 # Make sure it's not bare repo
130 131 assert not repo_clone.bare
131 132 assert not os.path.isfile(fpath)
132 133
133 134 def test_repo_clone_into_bare_repo(self):
134 135 repo = GitRepository(TEST_GIT_REPO)
135 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 137 repo_clone = GitRepository(
137 138 clone_path, create=True, src_url=repo.path, bare=True)
138 139 assert repo_clone.bare
139 140
140 141 def test_create_repo_is_not_bare_by_default(self):
141 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 143 assert not repo.bare
143 144
144 145 def test_create_bare_repo(self):
145 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 147 assert repo.bare
147 148
148 149 def test_update_server_info(self):
149 150 self.repo._update_server_info()
150 151
151 152 def test_fetch(self, vcsbackend_git):
152 153 # Note: This is a git specific part of the API, it's only implemented
153 154 # by the git backend.
154 155 source_repo = vcsbackend_git.repo
155 156 target_repo = vcsbackend_git.create_repo()
156 157 target_repo.fetch(source_repo.path)
157 158 # Note: Get a fresh instance, avoids caching trouble
158 159 target_repo = vcsbackend_git.backend(target_repo.path)
159 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 161
161 162 def test_commit_ids(self):
162 163 # there are 112 commits (by now)
163 164 # so we can assume they would be available from now on
164 165 subset = set([
165 166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 168 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 169 '102607b09cdd60e2793929c4f90478be29f85a17',
169 170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 171 '2d1028c054665b962fa3d307adfc923ddd528038',
171 172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 175 '8430a588b43b5d6da365400117c89400326e7992',
175 176 'd955cd312c17b02143c04fa1099a352b04368118',
176 177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 188 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
190 191 assert subset.issubset(set(self.repo.commit_ids))
191 192
192 193 def test_slicing(self):
193 194 # 4 1 5 10 95
194 195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 196 (10, 20, 10), (5, 100, 95)]:
196 197 commit_ids = list(self.repo[sfrom:sto])
197 198 assert len(commit_ids) == size
198 199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 201
201 202 def test_branches(self):
202 203 # TODO: Need more tests here
203 204 # Removed (those are 'remotes' branches for cloned repo)
204 205 # assert 'master' in self.repo.branches
205 206 # assert 'gittree' in self.repo.branches
206 207 # assert 'web-branch' in self.repo.branches
207 208 for __, commit_id in self.repo.branches.items():
208 209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 210
210 211 def test_tags(self):
211 212 # TODO: Need more tests here
212 213 assert 'v0.1.1' in self.repo.tags
213 214 assert 'v0.1.2' in self.repo.tags
214 215 for __, commit_id in self.repo.tags.items():
215 216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 217
217 218 def _test_single_commit_cache(self, commit_id):
218 219 commit = self.repo.get_commit(commit_id)
219 220 assert commit_id in self.repo.commits
220 221 assert commit is self.repo.commits[commit_id]
221 222
222 223 def test_initial_commit(self):
223 224 commit_id = self.repo.commit_ids[0]
224 225 init_commit = self.repo.get_commit(commit_id)
225 226 init_author = init_commit.author
226 227
227 228 assert init_commit.message == 'initial import\n'
228 229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 230 assert init_author == init_commit.committer
230 231 for path in ('vcs/__init__.py',
231 232 'vcs/backends/BaseRepository.py',
232 233 'vcs/backends/__init__.py'):
233 234 assert isinstance(init_commit.get_node(path), FileNode)
234 235 for path in ('', 'vcs', 'vcs/backends'):
235 236 assert isinstance(init_commit.get_node(path), DirNode)
236 237
237 238 with pytest.raises(NodeDoesNotExistError):
238 239 init_commit.get_node(path='foobar')
239 240
240 241 node = init_commit.get_node('vcs/')
241 242 assert hasattr(node, 'kind')
242 243 assert node.kind == NodeKind.DIR
243 244
244 245 node = init_commit.get_node('vcs')
245 246 assert hasattr(node, 'kind')
246 247 assert node.kind == NodeKind.DIR
247 248
248 249 node = init_commit.get_node('vcs/__init__.py')
249 250 assert hasattr(node, 'kind')
250 251 assert node.kind == NodeKind.FILE
251 252
252 253 def test_not_existing_commit(self):
253 254 with pytest.raises(RepositoryError):
254 255 self.repo.get_commit('f' * 40)
255 256
256 257 def test_commit10(self):
257 258
258 259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 260 README = """===
260 261 VCS
261 262 ===
262 263
263 264 Various Version Control System management abstraction layer for Python.
264 265
265 266 Introduction
266 267 ------------
267 268
268 269 TODO: To be written...
269 270
270 271 """
271 272 node = commit10.get_node('README.rst')
272 273 assert node.kind == NodeKind.FILE
273 274 assert node.content == README
274 275
275 276 def test_head(self):
276 277 assert self.repo.head == self.repo.get_commit().raw_id
277 278
278 279 def test_checkout_with_create(self):
279 280 repo_clone = self.get_clone_repo()
280 281
281 282 new_branch = 'new_branch'
282 283 assert repo_clone._current_branch() == 'master'
283 284 assert set(repo_clone.branches) == set(('master',))
284 285 repo_clone._checkout(new_branch, create=True)
285 286
286 287 # Branches is a lazy property so we need to recrete the Repo object.
287 288 repo_clone = GitRepository(repo_clone.path)
288 289 assert set(repo_clone.branches) == set(('master', new_branch))
289 290 assert repo_clone._current_branch() == new_branch
290 291
291 292 def test_checkout(self):
292 293 repo_clone = self.get_clone_repo()
293 294
294 295 repo_clone._checkout('new_branch', create=True)
295 296 repo_clone._checkout('master')
296 297
297 298 assert repo_clone._current_branch() == 'master'
298 299
299 300 def test_checkout_same_branch(self):
300 301 repo_clone = self.get_clone_repo()
301 302
302 303 repo_clone._checkout('master')
303 304 assert repo_clone._current_branch() == 'master'
304 305
305 306 def test_checkout_branch_already_exists(self):
306 307 repo_clone = self.get_clone_repo()
307 308
308 309 with pytest.raises(RepositoryError):
309 310 repo_clone._checkout('master', create=True)
310 311
311 312 def test_checkout_bare_repo(self):
312 313 with pytest.raises(RepositoryError):
313 314 self.repo._checkout('master')
314 315
315 316 def test_current_branch_bare_repo(self):
316 317 with pytest.raises(RepositoryError):
317 318 self.repo._current_branch()
318 319
319 320 def test_current_branch_empty_repo(self):
320 321 repo = self.get_empty_repo()
321 322 assert repo._current_branch() is None
322 323
323 324 def test_local_clone(self):
324 325 clone_path = next(REPO_PATH_GENERATOR)
325 326 self.repo._local_clone(clone_path, 'master')
326 327 repo_clone = GitRepository(clone_path)
327 328
328 329 assert self.repo.commit_ids == repo_clone.commit_ids
329 330
330 331 def test_local_clone_with_specific_branch(self):
331 332 source_repo = self.get_clone_repo()
332 333
333 334 # Create a new branch in source repo
334 335 new_branch_commit = source_repo.commit_ids[-3]
335 336 source_repo._checkout(new_branch_commit)
336 337 source_repo._checkout('new_branch', create=True)
337 338
338 339 clone_path = next(REPO_PATH_GENERATOR)
339 340 source_repo._local_clone(clone_path, 'new_branch')
340 341 repo_clone = GitRepository(clone_path)
341 342
342 343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 344
344 345 clone_path = next(REPO_PATH_GENERATOR)
345 346 source_repo._local_clone(clone_path, 'master')
346 347 repo_clone = GitRepository(clone_path)
347 348
348 349 assert source_repo.commit_ids == repo_clone.commit_ids
349 350
350 351 def test_local_clone_fails_if_target_exists(self):
351 352 with pytest.raises(RepositoryError):
352 353 self.repo._local_clone(self.repo.path, 'master')
353 354
354 355 def test_local_fetch(self):
355 356 target_repo = self.get_empty_repo()
356 357 source_repo = self.get_clone_repo()
357 358
358 359 # Create a new branch in source repo
359 360 master_commit = source_repo.commit_ids[-1]
360 361 new_branch_commit = source_repo.commit_ids[-3]
361 362 source_repo._checkout(new_branch_commit)
362 363 source_repo._checkout('new_branch', create=True)
363 364
364 365 target_repo._local_fetch(source_repo.path, 'new_branch')
365 366 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 367
367 368 target_repo._local_fetch(source_repo.path, 'master')
368 369 assert target_repo._last_fetch_heads() == [master_commit]
369 370
370 371 def test_local_fetch_from_bare_repo(self):
371 372 target_repo = self.get_empty_repo()
372 373 target_repo._local_fetch(self.repo.path, 'master')
373 374
374 375 master_commit = self.repo.commit_ids[-1]
375 376 assert target_repo._last_fetch_heads() == [master_commit]
376 377
377 378 def test_local_fetch_from_same_repo(self):
378 379 with pytest.raises(ValueError):
379 380 self.repo._local_fetch(self.repo.path, 'master')
380 381
381 382 def test_local_fetch_branch_does_not_exist(self):
382 383 target_repo = self.get_empty_repo()
383 384
384 385 with pytest.raises(RepositoryError):
385 386 target_repo._local_fetch(self.repo.path, 'new_branch')
386 387
387 388 def test_local_pull(self):
388 389 target_repo = self.get_empty_repo()
389 390 source_repo = self.get_clone_repo()
390 391
391 392 # Create a new branch in source repo
392 393 master_commit = source_repo.commit_ids[-1]
393 394 new_branch_commit = source_repo.commit_ids[-3]
394 395 source_repo._checkout(new_branch_commit)
395 396 source_repo._checkout('new_branch', create=True)
396 397
397 398 target_repo._local_pull(source_repo.path, 'new_branch')
398 399 target_repo = GitRepository(target_repo.path)
399 400 assert target_repo.head == new_branch_commit
400 401
401 402 target_repo._local_pull(source_repo.path, 'master')
402 403 target_repo = GitRepository(target_repo.path)
403 404 assert target_repo.head == master_commit
404 405
405 406 def test_local_pull_in_bare_repo(self):
406 407 with pytest.raises(RepositoryError):
407 408 self.repo._local_pull(self.repo.path, 'master')
408 409
409 410 def test_local_merge(self):
410 411 target_repo = self.get_empty_repo()
411 412 source_repo = self.get_clone_repo()
412 413
413 414 # Create a new branch in source repo
414 415 master_commit = source_repo.commit_ids[-1]
415 416 new_branch_commit = source_repo.commit_ids[-3]
416 417 source_repo._checkout(new_branch_commit)
417 418 source_repo._checkout('new_branch', create=True)
418 419
419 420 # This is required as one cannot do a -ff-only merge in an empty repo.
420 421 target_repo._local_pull(source_repo.path, 'new_branch')
421 422
422 423 target_repo._local_fetch(source_repo.path, 'master')
423 424 merge_message = 'Merge message\n\nDescription:...'
424 425 user_name = 'Albert Einstein'
425 426 user_email = 'albert@einstein.com'
426 427 target_repo._local_merge(merge_message, user_name, user_email,
427 428 target_repo._last_fetch_heads())
428 429
429 430 target_repo = GitRepository(target_repo.path)
430 431 assert target_repo.commit_ids[-2] == master_commit
431 432 last_commit = target_repo.get_commit(target_repo.head)
432 433 assert last_commit.message.strip() == merge_message
433 434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 435
435 436 assert not os.path.exists(
436 437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 438
438 439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 442
442 443 target_repo._local_fetch(self.repo.path, 'master')
443 444 with pytest.raises(RepositoryError):
444 445 target_repo._local_merge(
445 446 'merge_message', 'user name', 'user@name.com',
446 447 target_repo._last_fetch_heads())
447 448
448 449 # Check we are not left in an intermediate merge state
449 450 assert not os.path.exists(
450 451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 452
452 453 def test_local_merge_into_empty_repo(self):
453 454 target_repo = self.get_empty_repo()
454 455
455 456 # This is required as one cannot do a -ff-only merge in an empty repo.
456 457 target_repo._local_fetch(self.repo.path, 'master')
457 458 with pytest.raises(RepositoryError):
458 459 target_repo._local_merge(
459 460 'merge_message', 'user name', 'user@name.com',
460 461 target_repo._last_fetch_heads())
461 462
462 463 def test_local_merge_in_bare_repo(self):
463 464 with pytest.raises(RepositoryError):
464 465 self.repo._local_merge(
465 466 'merge_message', 'user name', 'user@name.com', None)
466 467
467 468 def test_local_push_non_bare(self):
468 469 target_repo = self.get_empty_repo()
469 470
470 471 pushed_branch = 'pushed_branch'
471 472 self.repo._local_push('master', target_repo.path, pushed_branch)
472 473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 474 # report any branches.
474 475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 476 f.write('ref: refs/heads/%s' % pushed_branch)
476 477
477 478 target_repo = GitRepository(target_repo.path)
478 479
479 480 assert (target_repo.branches[pushed_branch] ==
480 481 self.repo.branches['master'])
481 482
482 483 def test_local_push_bare(self):
483 484 target_repo = self.get_empty_repo(bare=True)
484 485
485 486 pushed_branch = 'pushed_branch'
486 487 self.repo._local_push('master', target_repo.path, pushed_branch)
487 488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 489 # report any branches.
489 490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 491 f.write('ref: refs/heads/%s' % pushed_branch)
491 492
492 493 target_repo = GitRepository(target_repo.path)
493 494
494 495 assert (target_repo.branches[pushed_branch] ==
495 496 self.repo.branches['master'])
496 497
497 498 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 499 target_repo = self.get_clone_repo()
499 500
500 501 pushed_branch = 'pushed_branch'
501 502 # Create a new branch in source repo
502 503 new_branch_commit = target_repo.commit_ids[-3]
503 504 target_repo._checkout(new_branch_commit)
504 505 target_repo._checkout(pushed_branch, create=True)
505 506
506 507 self.repo._local_push('master', target_repo.path, pushed_branch)
507 508
508 509 target_repo = GitRepository(target_repo.path)
509 510
510 511 assert (target_repo.branches[pushed_branch] ==
511 512 self.repo.branches['master'])
512 513
513 514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 516 with pytest.raises(RepositoryError):
516 517 self.repo._local_push('master', target_repo.path, 'master')
517 518
518 519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 520 target_repo = self.get_empty_repo(bare=True)
520 521
521 522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 523 self.repo._local_push(
523 524 'master', target_repo.path, 'master', enable_hooks=True)
524 525 env = run_mock.call_args[1]['extra_env']
525 526 assert 'RC_SKIP_HOOKS' not in env
526 527
527 528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 529 path_components = (
529 530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 531 hook_path = os.path.join(repo_path, *path_components)
531 532 with open(hook_path, 'w') as f:
532 533 script_lines = [
533 534 '#!%s' % sys.executable,
534 535 'import os',
535 536 'import sys',
536 537 'if os.environ.get("RC_SKIP_HOOKS"):',
537 538 ' sys.exit(0)',
538 539 'sys.exit(1)',
539 540 ]
540 541 f.write('\n'.join(script_lines))
541 542 os.chmod(hook_path, 0755)
542 543
543 544 def test_local_push_does_not_execute_hook(self):
544 545 target_repo = self.get_empty_repo()
545 546
546 547 pushed_branch = 'pushed_branch'
547 548 self._add_failing_hook(target_repo.path, 'pre-receive')
548 549 self.repo._local_push('master', target_repo.path, pushed_branch)
549 550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 551 # report any branches.
551 552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 553 f.write('ref: refs/heads/%s' % pushed_branch)
553 554
554 555 target_repo = GitRepository(target_repo.path)
555 556
556 557 assert (target_repo.branches[pushed_branch] ==
557 558 self.repo.branches['master'])
558 559
559 560 def test_local_push_executes_hook(self):
560 561 target_repo = self.get_empty_repo(bare=True)
561 562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 563 with pytest.raises(RepositoryError):
563 564 self.repo._local_push(
564 565 'master', target_repo.path, 'master', enable_hooks=True)
565 566
566 567 def test_maybe_prepare_merge_workspace(self):
567 568 workspace = self.repo._maybe_prepare_merge_workspace(
568 569 'pr2', Reference('branch', 'master', 'unused'))
569 570
570 571 assert os.path.isdir(workspace)
571 572 workspace_repo = GitRepository(workspace)
572 573 assert workspace_repo.branches == self.repo.branches
573 574
574 575 # Calling it a second time should also succeed
575 576 workspace = self.repo._maybe_prepare_merge_workspace(
576 577 'pr2', Reference('branch', 'master', 'unused'))
577 578 assert os.path.isdir(workspace)
578 579
579 580 def test_cleanup_merge_workspace(self):
580 581 workspace = self.repo._maybe_prepare_merge_workspace(
581 582 'pr3', Reference('branch', 'master', 'unused'))
582 583 self.repo.cleanup_merge_workspace('pr3')
583 584
584 585 assert not os.path.exists(workspace)
585 586
586 587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 588 # No assert: because in case of an inexistent workspace this function
588 589 # should still succeed.
589 590 self.repo.cleanup_merge_workspace('pr4')
590 591
591 592 def test_set_refs(self):
592 593 test_ref = 'refs/test-refs/abcde'
593 594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594 595
595 596 self.repo.set_refs(test_ref, test_commit_id)
596 597 stdout, _ = self.repo.run_git_command(['show-ref'])
597 598 assert test_ref in stdout
598 599 assert test_commit_id in stdout
599 600
600 601 def test_remove_ref(self):
601 602 test_ref = 'refs/test-refs/abcde'
602 603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 604 self.repo.set_refs(test_ref, test_commit_id)
604 605 stdout, _ = self.repo.run_git_command(['show-ref'])
605 606 assert test_ref in stdout
606 607 assert test_commit_id in stdout
607 608
608 609 self.repo.remove_ref(test_ref)
609 610 stdout, _ = self.repo.run_git_command(['show-ref'])
610 611 assert test_ref not in stdout
611 612 assert test_commit_id not in stdout
612 613
613 614
614 615 class TestGitCommit(object):
615 616
616 617 @pytest.fixture(autouse=True)
617 618 def prepare(self):
618 619 self.repo = GitRepository(TEST_GIT_REPO)
619 620
620 621 def test_default_commit(self):
621 622 tip = self.repo.get_commit()
622 623 assert tip == self.repo.get_commit(None)
623 624 assert tip == self.repo.get_commit('tip')
624 625
625 626 def test_root_node(self):
626 627 tip = self.repo.get_commit()
627 628 assert tip.root is tip.get_node('')
628 629
629 630 def test_lazy_fetch(self):
630 631 """
631 632 Test if commit's nodes expands and are cached as we walk through
632 633 the commit. This test is somewhat hard to write as order of tests
633 634 is a key here. Written by running command after command in a shell.
634 635 """
635 636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 637 assert commit_id in self.repo.commit_ids
637 638 commit = self.repo.get_commit(commit_id)
638 639 assert len(commit.nodes) == 0
639 640 root = commit.root
640 641 assert len(commit.nodes) == 1
641 642 assert len(root.nodes) == 8
642 643 # accessing root.nodes updates commit.nodes
643 644 assert len(commit.nodes) == 9
644 645
645 646 docs = root.get_node('docs')
646 647 # we haven't yet accessed anything new as docs dir was already cached
647 648 assert len(commit.nodes) == 9
648 649 assert len(docs.nodes) == 8
649 650 # accessing docs.nodes updates commit.nodes
650 651 assert len(commit.nodes) == 17
651 652
652 653 assert docs is commit.get_node('docs')
653 654 assert docs is root.nodes[0]
654 655 assert docs is root.dirs[0]
655 656 assert docs is commit.get_node('docs')
656 657
657 658 def test_nodes_with_commit(self):
658 659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 660 commit = self.repo.get_commit(commit_id)
660 661 root = commit.root
661 662 docs = root.get_node('docs')
662 663 assert docs is commit.get_node('docs')
663 664 api = docs.get_node('api')
664 665 assert api is commit.get_node('docs/api')
665 666 index = api.get_node('index.rst')
666 667 assert index is commit.get_node('docs/api/index.rst')
667 668 assert index is commit.get_node('docs')\
668 669 .get_node('api')\
669 670 .get_node('index.rst')
670 671
671 672 def test_branch_and_tags(self):
672 673 """
673 674 rev0 = self.repo.commit_ids[0]
674 675 commit0 = self.repo.get_commit(rev0)
675 676 assert commit0.branch == 'master'
676 677 assert commit0.tags == []
677 678
678 679 rev10 = self.repo.commit_ids[10]
679 680 commit10 = self.repo.get_commit(rev10)
680 681 assert commit10.branch == 'master'
681 682 assert commit10.tags == []
682 683
683 684 rev44 = self.repo.commit_ids[44]
684 685 commit44 = self.repo.get_commit(rev44)
685 686 assert commit44.branch == 'web-branch'
686 687
687 688 tip = self.repo.get_commit('tip')
688 689 assert 'tip' in tip.tags
689 690 """
690 691 # Those tests would fail - branches are now going
691 692 # to be changed at main API in order to support git backend
692 693 pass
693 694
694 695 def test_file_size(self):
695 696 to_check = (
696 697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 698 'vcs/backends/BaseRepository.py', 502),
698 699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 700 'vcs/backends/hg.py', 854),
700 701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 702 'setup.py', 1068),
702 703
703 704 ('d955cd312c17b02143c04fa1099a352b04368118',
704 705 'vcs/backends/base.py', 2921),
705 706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 707 'vcs/backends/base.py', 3936),
707 708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 709 'vcs/backends/base.py', 6189),
709 710 )
710 711 for commit_id, path, size in to_check:
711 712 node = self.repo.get_commit(commit_id).get_node(path)
712 713 assert node.is_file()
713 714 assert node.size == size
714 715
715 716 def test_file_history_from_commits(self):
716 717 node = self.repo[10].get_node('setup.py')
717 718 commit_ids = [commit.raw_id for commit in node.history]
718 719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719 720
720 721 node = self.repo[20].get_node('setup.py')
721 722 node_ids = [commit.raw_id for commit in node.history]
722 723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724 725
725 726 # special case we check history from commit that has this particular
726 727 # file changed this means we check if it's included as well
727 728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 729 .get_node('setup.py')
729 730 node_ids = [commit.raw_id for commit in node.history]
730 731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732 733
733 734 def test_file_history(self):
734 735 # we can only check if those commits are present in the history
735 736 # as we cannot update this test every time file is changed
736 737 files = {
737 738 'setup.py': [
738 739 '54386793436c938cff89326944d4c2702340037d',
739 740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 741 '998ed409c795fec2012b1c0ca054d99888b22090',
741 742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 748 ],
748 749 'vcs/nodes.py': [
749 750 '33fa3223355104431402a888fa77a4e9956feb3e',
750 751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 755 '4313566d2e417cb382948f8d9d7c765330356054',
755 756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 757 '54386793436c938cff89326944d4c2702340037d',
757 758 '54000345d2e78b03a99d561399e8e548de3f3203',
758 759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 781 ],
781 782 'vcs/backends/git.py': [
782 783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 784 '9a751d84d8e9408e736329767387f41b36935153',
784 785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 790 '54000345d2e78b03a99d561399e8e548de3f3203',
790 791 ],
791 792 }
792 793 for path, commit_ids in files.items():
793 794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 795 node_ids = [commit.raw_id for commit in node.history]
795 796 assert set(commit_ids).issubset(set(node_ids)), (
796 797 "We assumed that %s is subset of commit_ids for which file %s "
797 798 "has been changed, and history of that node returned: %s"
798 799 % (commit_ids, path, node_ids))
799 800
800 801 def test_file_annotate(self):
801 802 files = {
802 803 'vcs/backends/__init__.py': {
803 804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 805 'lines_no': 1,
805 806 'commits': [
806 807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 808 ],
808 809 },
809 810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 811 'lines_no': 21,
811 812 'commits': [
812 813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 834 ],
834 835 },
835 836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 837 'lines_no': 32,
837 838 'commits': [
838 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 846 '54000345d2e78b03a99d561399e8e548de3f3203',
846 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 871 ],
871 872 },
872 873 },
873 874 }
874 875
875 876 for fname, commit_dict in files.items():
876 877 for commit_id, __ in commit_dict.items():
877 878 commit = self.repo.get_commit(commit_id)
878 879
879 880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 882 assert l1_1 == l1_2
882 883 l1 = l1_1
883 884 l2 = files[fname][commit_id]['commits']
884 885 assert l1 == l2, (
885 886 "The lists of commit_ids for %s@commit_id %s"
886 887 "from annotation list should match each other, "
887 888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888 889
889 890 def test_files_state(self):
890 891 """
891 892 Tests state of FileNodes.
892 893 """
893 894 node = self.repo\
894 895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 896 .get_node('vcs/utils/diffs.py')
896 897 assert node.state, NodeState.ADDED
897 898 assert node.added
898 899 assert not node.changed
899 900 assert not node.not_changed
900 901 assert not node.removed
901 902
902 903 node = self.repo\
903 904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 905 .get_node('.hgignore')
905 906 assert node.state, NodeState.CHANGED
906 907 assert not node.added
907 908 assert node.changed
908 909 assert not node.not_changed
909 910 assert not node.removed
910 911
911 912 node = self.repo\
912 913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 914 .get_node('setup.py')
914 915 assert node.state, NodeState.NOT_CHANGED
915 916 assert not node.added
916 917 assert not node.changed
917 918 assert node.not_changed
918 919 assert not node.removed
919 920
920 921 # If node has REMOVED state then trying to fetch it would raise
921 922 # CommitError exception
922 923 commit = self.repo.get_commit(
923 924 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 925 path = 'vcs/backends/BaseRepository.py'
925 926 with pytest.raises(NodeDoesNotExistError):
926 927 commit.get_node(path)
927 928 # but it would be one of ``removed`` (commit's attribute)
928 929 assert path in [rf.path for rf in commit.removed]
929 930
930 931 commit = self.repo.get_commit(
931 932 '54386793436c938cff89326944d4c2702340037d')
932 933 changed = [
933 934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 935 'vcs/nodes.py']
935 936 assert set(changed) == set([f.path for f in commit.changed])
936 937
937 938 def test_unicode_branch_refs(self):
938 939 unicode_branches = {
939 940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 942 }
942 943 with mock.patch(
943 944 ("rhodecode.lib.vcs.backends.git.repository"
944 945 ".GitRepository._refs"),
945 946 unicode_branches):
946 947 branches = self.repo.branches
947 948
948 949 assert 'unicode' in branches
949 950 assert u'uniΓ§ΓΆβˆ‚e' in branches
950 951
951 952 def test_unicode_tag_refs(self):
952 953 unicode_tags = {
953 954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 956 }
956 957 with mock.patch(
957 958 ("rhodecode.lib.vcs.backends.git.repository"
958 959 ".GitRepository._refs"),
959 960 unicode_tags):
960 961 tags = self.repo.tags
961 962
962 963 assert 'unicode' in tags
963 964 assert u'uniΓ§ΓΆβˆ‚e' in tags
964 965
965 966 def test_commit_message_is_unicode(self):
966 967 for commit in self.repo:
967 968 assert type(commit.message) == unicode
968 969
969 970 def test_commit_author_is_unicode(self):
970 971 for commit in self.repo:
971 972 assert type(commit.author) == unicode
972 973
973 974 def test_repo_files_content_is_unicode(self):
974 975 commit = self.repo.get_commit()
975 976 for node in commit.get_node('/'):
976 977 if node.is_file():
977 978 assert type(node.content) == unicode
978 979
979 980 def test_wrong_path(self):
980 981 # There is 'setup.py' in the root dir but not there:
981 982 path = 'foo/bar/setup.py'
982 983 tip = self.repo.get_commit()
983 984 with pytest.raises(VCSError):
984 985 tip.get_node(path)
985 986
986 987 @pytest.mark.parametrize("author_email, commit_id", [
987 988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 989 ('lukasz.balcerzak@python-center.pl',
989 990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 992 ])
992 993 def test_author_email(self, author_email, commit_id):
993 994 commit = self.repo.get_commit(commit_id)
994 995 assert author_email == commit.author_email
995 996
996 997 @pytest.mark.parametrize("author, commit_id", [
997 998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 1001 ])
1001 1002 def test_author_username(self, author, commit_id):
1002 1003 commit = self.repo.get_commit(commit_id)
1003 1004 assert author == commit.author_name
1004 1005
1005 1006
1007 class TestLargeFileRepo(object):
1008
1009 def test_large_file(self, backend_git):
1010 conf = make_db_config()
1011 repo = backend_git.create_test_repo('largefiles', conf)
1012
1013 tip = repo.scm_instance().get_commit()
1014
1015 # extract stored LF node into the origin cache
1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1017
1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1019 oid_path = os.path.join(lfs_store, oid)
1020 oid_destination = os.path.join(
1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1022 shutil.copy(oid_path, oid_destination)
1023
1024 node = tip.get_node('1MB.zip')
1025
1026 lf_node = node.get_largefile_node()
1027
1028 assert lf_node.is_largefile() is True
1029 assert lf_node.size == 1024000
1030 assert lf_node.name == '1MB.zip'
1031
1032
1006 1033 class TestGitSpecificWithRepo(BackendTestMixin):
1007 1034
1008 1035 @classmethod
1009 1036 def _get_commits(cls):
1010 1037 return [
1011 1038 {
1012 1039 'message': 'Initial',
1013 1040 'author': 'Joe Doe <joe.doe@example.com>',
1014 1041 'date': datetime.datetime(2010, 1, 1, 20),
1015 1042 'added': [
1016 1043 FileNode('foobar/static/js/admin/base.js', content='base'),
1017 1044 FileNode(
1018 1045 'foobar/static/admin', content='admin',
1019 1046 mode=0120000), # this is a link
1020 1047 FileNode('foo', content='foo'),
1021 1048 ],
1022 1049 },
1023 1050 {
1024 1051 'message': 'Second',
1025 1052 'author': 'Joe Doe <joe.doe@example.com>',
1026 1053 'date': datetime.datetime(2010, 1, 1, 22),
1027 1054 'added': [
1028 1055 FileNode('foo2', content='foo2'),
1029 1056 ],
1030 1057 },
1031 1058 ]
1032 1059
1033 1060 def test_paths_slow_traversing(self):
1034 1061 commit = self.repo.get_commit()
1035 1062 assert commit.get_node('foobar').get_node('static').get_node('js')\
1036 1063 .get_node('admin').get_node('base.js').content == 'base'
1037 1064
1038 1065 def test_paths_fast_traversing(self):
1039 1066 commit = self.repo.get_commit()
1040 1067 assert (
1041 1068 commit.get_node('foobar/static/js/admin/base.js').content ==
1042 1069 'base')
1043 1070
1044 1071 def test_get_diff_runs_git_command_with_hashes(self):
1045 1072 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1046 1073 self.repo.get_diff(self.repo[0], self.repo[1])
1047 1074 self.repo.run_git_command.assert_called_once_with(
1048 1075 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1049 1076 '--abbrev=40', self.repo._get_commit_id(0),
1050 1077 self.repo._get_commit_id(1)])
1051 1078
1052 1079 def test_get_diff_runs_git_command_with_str_hashes(self):
1053 1080 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1054 1081 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1055 1082 self.repo.run_git_command.assert_called_once_with(
1056 1083 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1057 1084 '--abbrev=40', self.repo._get_commit_id(1)])
1058 1085
1059 1086 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1060 1087 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1061 1088 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1062 1089 self.repo.run_git_command.assert_called_once_with(
1063 1090 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1064 1091 '--abbrev=40', self.repo._get_commit_id(0),
1065 1092 self.repo._get_commit_id(1), '--', 'foo'])
1066 1093
1067 1094
1068 1095 class TestGitRegression(BackendTestMixin):
1069 1096
1070 1097 @classmethod
1071 1098 def _get_commits(cls):
1072 1099 return [
1073 1100 {
1074 1101 'message': 'Initial',
1075 1102 'author': 'Joe Doe <joe.doe@example.com>',
1076 1103 'date': datetime.datetime(2010, 1, 1, 20),
1077 1104 'added': [
1078 1105 FileNode('bot/__init__.py', content='base'),
1079 1106 FileNode('bot/templates/404.html', content='base'),
1080 1107 FileNode('bot/templates/500.html', content='base'),
1081 1108 ],
1082 1109 },
1083 1110 {
1084 1111 'message': 'Second',
1085 1112 'author': 'Joe Doe <joe.doe@example.com>',
1086 1113 'date': datetime.datetime(2010, 1, 1, 22),
1087 1114 'added': [
1088 1115 FileNode('bot/build/migrations/1.py', content='foo2'),
1089 1116 FileNode('bot/build/migrations/2.py', content='foo2'),
1090 1117 FileNode(
1091 1118 'bot/build/static/templates/f.html', content='foo2'),
1092 1119 FileNode(
1093 1120 'bot/build/static/templates/f1.html', content='foo2'),
1094 1121 FileNode('bot/build/templates/err.html', content='foo2'),
1095 1122 FileNode('bot/build/templates/err2.html', content='foo2'),
1096 1123 ],
1097 1124 },
1098 1125 ]
1099 1126
1100 1127 @pytest.mark.parametrize("path, expected_paths", [
1101 1128 ('bot', [
1102 1129 'bot/build',
1103 1130 'bot/templates',
1104 1131 'bot/__init__.py']),
1105 1132 ('bot/build', [
1106 1133 'bot/build/migrations',
1107 1134 'bot/build/static',
1108 1135 'bot/build/templates']),
1109 1136 ('bot/build/static', [
1110 1137 'bot/build/static/templates']),
1111 1138 ('bot/build/static/templates', [
1112 1139 'bot/build/static/templates/f.html',
1113 1140 'bot/build/static/templates/f1.html']),
1114 1141 ('bot/build/templates', [
1115 1142 'bot/build/templates/err.html',
1116 1143 'bot/build/templates/err2.html']),
1117 1144 ('bot/templates/', [
1118 1145 'bot/templates/404.html',
1119 1146 'bot/templates/500.html']),
1120 1147 ])
1121 1148 def test_similar_paths(self, path, expected_paths):
1122 1149 commit = self.repo.get_commit()
1123 1150 paths = [n.path for n in commit.get_nodes(path)]
1124 1151 assert paths == expected_paths
1125 1152
1126 1153
1127 1154 class TestDiscoverGitVersion:
1128 1155
1129 1156 def test_returns_git_version(self, pylonsapp):
1130 1157 version = discover_git_version()
1131 1158 assert version
1132 1159
1133 1160 def test_returns_empty_string_without_vcsserver(self):
1134 1161 mock_connection = mock.Mock()
1135 1162 mock_connection.discover_git_version = mock.Mock(
1136 1163 side_effect=Exception)
1137 1164 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1138 1165 version = discover_git_version()
1139 1166 assert version == ''
1140 1167
1141 1168
1142 1169 class TestGetSubmoduleUrl(object):
1143 1170 def test_submodules_file_found(self):
1144 1171 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1145 1172 node = mock.Mock()
1146 1173 with mock.patch.object(
1147 1174 commit, 'get_node', return_value=node) as get_node_mock:
1148 1175 node.content = (
1149 1176 '[submodule "subrepo1"]\n'
1150 1177 '\tpath = subrepo1\n'
1151 1178 '\turl = https://code.rhodecode.com/dulwich\n'
1152 1179 )
1153 1180 result = commit._get_submodule_url('subrepo1')
1154 1181 get_node_mock.assert_called_once_with('.gitmodules')
1155 1182 assert result == 'https://code.rhodecode.com/dulwich'
1156 1183
1157 1184 def test_complex_submodule_path(self):
1158 1185 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1159 1186 node = mock.Mock()
1160 1187 with mock.patch.object(
1161 1188 commit, 'get_node', return_value=node) as get_node_mock:
1162 1189 node.content = (
1163 1190 '[submodule "complex/subrepo/path"]\n'
1164 1191 '\tpath = complex/subrepo/path\n'
1165 1192 '\turl = https://code.rhodecode.com/dulwich\n'
1166 1193 )
1167 1194 result = commit._get_submodule_url('complex/subrepo/path')
1168 1195 get_node_mock.assert_called_once_with('.gitmodules')
1169 1196 assert result == 'https://code.rhodecode.com/dulwich'
1170 1197
1171 1198 def test_submodules_file_not_found(self):
1172 1199 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1173 1200 with mock.patch.object(
1174 1201 commit, 'get_node', side_effect=NodeDoesNotExistError):
1175 1202 result = commit._get_submodule_url('complex/subrepo/path')
1176 1203 assert result is None
1177 1204
1178 1205 def test_path_not_found(self):
1179 1206 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1180 1207 node = mock.Mock()
1181 1208 with mock.patch.object(
1182 1209 commit, 'get_node', return_value=node) as get_node_mock:
1183 1210 node.content = (
1184 1211 '[submodule "subrepo1"]\n'
1185 1212 '\tpath = subrepo1\n'
1186 1213 '\turl = https://code.rhodecode.com/dulwich\n'
1187 1214 )
1188 1215 result = commit._get_submodule_url('subrepo2')
1189 1216 get_node_mock.assert_called_once_with('.gitmodules')
1190 1217 assert result is None
1191 1218
1192 1219 def test_returns_cached_values(self):
1193 1220 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1194 1221 node = mock.Mock()
1195 1222 with mock.patch.object(
1196 1223 commit, 'get_node', return_value=node) as get_node_mock:
1197 1224 node.content = (
1198 1225 '[submodule "subrepo1"]\n'
1199 1226 '\tpath = subrepo1\n'
1200 1227 '\turl = https://code.rhodecode.com/dulwich\n'
1201 1228 )
1202 1229 for _ in range(3):
1203 1230 commit._get_submodule_url('subrepo1')
1204 1231 get_node_mock.assert_called_once_with('.gitmodules')
1205 1232
1206 1233 def test_get_node_returns_a_link(self):
1207 1234 repository = mock.Mock()
1208 1235 repository.alias = 'git'
1209 1236 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1210 1237 submodule_url = 'https://code.rhodecode.com/dulwich'
1211 1238 get_id_patch = mock.patch.object(
1212 1239 commit, '_get_id_for_path', return_value=(1, 'link'))
1213 1240 get_submodule_patch = mock.patch.object(
1214 1241 commit, '_get_submodule_url', return_value=submodule_url)
1215 1242
1216 1243 with get_id_patch, get_submodule_patch as submodule_mock:
1217 1244 node = commit.get_node('/abcde')
1218 1245
1219 1246 submodule_mock.assert_called_once_with('/abcde')
1220 1247 assert type(node) == SubModuleNode
1221 1248 assert node.url == submodule_url
1222 1249
1223 1250 def test_get_nodes_returns_links(self):
1224 1251 repository = mock.MagicMock()
1225 1252 repository.alias = 'git'
1226 1253 repository._remote.tree_items.return_value = [
1227 1254 ('subrepo', 'stat', 1, 'link')
1228 1255 ]
1229 1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1230 1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1231 1258 get_id_patch = mock.patch.object(
1232 1259 commit, '_get_id_for_path', return_value=(1, 'tree'))
1233 1260 get_submodule_patch = mock.patch.object(
1234 1261 commit, '_get_submodule_url', return_value=submodule_url)
1235 1262
1236 1263 with get_id_patch, get_submodule_patch as submodule_mock:
1237 1264 nodes = commit.get_nodes('/abcde')
1238 1265
1239 1266 submodule_mock.assert_called_once_with('/abcde/subrepo')
1240 1267 assert len(nodes) == 1
1241 1268 assert type(nodes[0]) == SubModuleNode
1242 1269 assert nodes[0].url == submodule_url
@@ -1,1172 +1,1180 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 import rhodecode.lib.vcs.conf.settings
26 from rhodecode.lib.utils import make_db_config
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 31 from rhodecode.lib.vcs.exceptions import (
32 CommitError, RepositoryError, VCSError, NodeDoesNotExistError,
33 CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
34 33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
35 34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE
36 35
37 36
38 37 pytestmark = pytest.mark.backends("hg")
39 38
40 39
41 40 def repo_path_generator():
42 41 """
43 42 Return a different path to be used for cloning repos.
44 43 """
45 44 i = 0
46 45 while True:
47 46 i += 1
48 47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
49 48
50 49
51 50 REPO_PATH_GENERATOR = repo_path_generator()
52 51
53 52
54 53 @pytest.fixture(scope='class', autouse=True)
55 54 def repo(request, pylonsapp):
56 55 repo = MercurialRepository(TEST_HG_REPO)
57 56 if request.cls:
58 57 request.cls.repo = repo
59 58 return repo
60 59
61 60
62 61 class TestMercurialRepository:
63 62
64 63 # pylint: disable=protected-access
65 64
66 65 def get_clone_repo(self):
67 66 """
68 67 Return a clone of the base repo.
69 68 """
70 69 clone_path = next(REPO_PATH_GENERATOR)
71 70 repo_clone = MercurialRepository(
72 71 clone_path, create=True, src_url=self.repo.path)
73 72
74 73 return repo_clone
75 74
76 75 def get_empty_repo(self):
77 76 """
78 77 Return an empty repo.
79 78 """
80 79 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
81 80
82 81 def test_wrong_repo_path(self):
83 82 wrong_repo_path = '/tmp/errorrepo'
84 83 with pytest.raises(RepositoryError):
85 84 MercurialRepository(wrong_repo_path)
86 85
87 86 def test_unicode_path_repo(self):
88 87 with pytest.raises(VCSError):
89 88 MercurialRepository(u'iShouldFail')
90 89
91 90 def test_unicode_commit_id(self):
92 91 with pytest.raises(CommitDoesNotExistError):
93 92 self.repo.get_commit(u'unicode-commit-id')
94 93 with pytest.raises(CommitDoesNotExistError):
95 94 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
96 95
97 96 def test_unicode_bookmark(self):
98 97 self.repo.bookmark(u'unicode-bookmark')
99 98 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
100 99
101 100 def test_unicode_branch(self):
102 101 with pytest.raises(KeyError):
103 102 self.repo.branches[u'unicode-branch']
104 103 with pytest.raises(KeyError):
105 104 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
106 105
107 106 def test_repo_clone(self):
108 107 if os.path.exists(TEST_HG_REPO_CLONE):
109 108 self.fail(
110 109 'Cannot test mercurial clone repo as location %s already '
111 110 'exists. You should manually remove it first.'
112 111 % TEST_HG_REPO_CLONE)
113 112
114 113 repo = MercurialRepository(TEST_HG_REPO)
115 114 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
116 115 src_url=TEST_HG_REPO)
117 116 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
118 117 # Checking hashes of commits should be enough
119 118 for commit in repo.get_commits():
120 119 raw_id = commit.raw_id
121 120 assert raw_id == repo_clone.get_commit(raw_id).raw_id
122 121
123 122 def test_repo_clone_with_update(self):
124 123 repo = MercurialRepository(TEST_HG_REPO)
125 124 repo_clone = MercurialRepository(
126 125 TEST_HG_REPO_CLONE + '_w_update',
127 126 src_url=TEST_HG_REPO, update_after_clone=True)
128 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
129 128
130 129 # check if current workdir was updated
131 130 assert os.path.isfile(
132 131 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
133 132
134 133 def test_repo_clone_without_update(self):
135 134 repo = MercurialRepository(TEST_HG_REPO)
136 135 repo_clone = MercurialRepository(
137 136 TEST_HG_REPO_CLONE + '_wo_update',
138 137 src_url=TEST_HG_REPO, update_after_clone=False)
139 138 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
140 139 assert not os.path.isfile(
141 140 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
142 141
143 142 def test_commit_ids(self):
144 143 # there are 21 commits at bitbucket now
145 144 # so we can assume they would be available from now on
146 145 subset = set([
147 146 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
148 147 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
149 148 '6cba7170863a2411822803fa77a0a264f1310b35',
150 149 '56349e29c2af3ac913b28bde9a2c6154436e615b',
151 150 '2dda4e345facb0ccff1a191052dd1606dba6781d',
152 151 '6fff84722075f1607a30f436523403845f84cd9e',
153 152 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
154 153 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
155 154 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
156 155 'be90031137367893f1c406e0a8683010fd115b79',
157 156 'db8e58be770518cbb2b1cdfa69146e47cd481481',
158 157 '84478366594b424af694a6c784cb991a16b87c21',
159 158 '17f8e105dddb9f339600389c6dc7175d395a535c',
160 159 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
161 160 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
162 161 '786facd2c61deb9cf91e9534735124fb8fc11842',
163 162 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
164 163 'aa6a0de05b7612707db567078e130a6cd114a9a7',
165 164 'eada5a770da98ab0dd7325e29d00e0714f228d09'
166 165 ])
167 166 assert subset.issubset(set(self.repo.commit_ids))
168 167
169 168 # check if we have the proper order of commits
170 169 org = [
171 170 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
172 171 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
173 172 '6cba7170863a2411822803fa77a0a264f1310b35',
174 173 '56349e29c2af3ac913b28bde9a2c6154436e615b',
175 174 '2dda4e345facb0ccff1a191052dd1606dba6781d',
176 175 '6fff84722075f1607a30f436523403845f84cd9e',
177 176 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
178 177 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
179 178 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
180 179 'be90031137367893f1c406e0a8683010fd115b79',
181 180 'db8e58be770518cbb2b1cdfa69146e47cd481481',
182 181 '84478366594b424af694a6c784cb991a16b87c21',
183 182 '17f8e105dddb9f339600389c6dc7175d395a535c',
184 183 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
185 184 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
186 185 '786facd2c61deb9cf91e9534735124fb8fc11842',
187 186 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
188 187 'aa6a0de05b7612707db567078e130a6cd114a9a7',
189 188 'eada5a770da98ab0dd7325e29d00e0714f228d09',
190 189 '2c1885c735575ca478bf9e17b0029dca68824458',
191 190 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
192 191 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
193 192 '4fb8326d78e5120da2c7468dcf7098997be385da',
194 193 '62b4a097164940bd66030c4db51687f3ec035eed',
195 194 '536c1a19428381cfea92ac44985304f6a8049569',
196 195 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
197 196 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
198 197 'f8940bcb890a98c4702319fbe36db75ea309b475',
199 198 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
200 199 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
201 200 'ee87846a61c12153b51543bf860e1026c6d3dcba',
202 201 ]
203 202 assert org == self.repo.commit_ids[:31]
204 203
205 204 def test_iter_slice(self):
206 205 sliced = list(self.repo[:10])
207 206 itered = list(self.repo)[:10]
208 207 assert sliced == itered
209 208
210 209 def test_slicing(self):
211 210 # 4 1 5 10 95
212 211 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
213 212 (10, 20, 10), (5, 100, 95)]:
214 213 indexes = list(self.repo[sfrom:sto])
215 214 assert len(indexes) == size
216 215 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
217 216 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
218 217
219 218 def test_branches(self):
220 219 # TODO: Need more tests here
221 220
222 221 # active branches
223 222 assert 'default' in self.repo.branches
224 223 assert 'stable' in self.repo.branches
225 224
226 225 # closed
227 226 assert 'git' in self.repo._get_branches(closed=True)
228 227 assert 'web' in self.repo._get_branches(closed=True)
229 228
230 229 for name, id in self.repo.branches.items():
231 230 assert isinstance(self.repo.get_commit(id), MercurialCommit)
232 231
233 232 def test_tip_in_tags(self):
234 233 # tip is always a tag
235 234 assert 'tip' in self.repo.tags
236 235
237 236 def test_tip_commit_in_tags(self):
238 237 tip = self.repo.get_commit()
239 238 assert self.repo.tags['tip'] == tip.raw_id
240 239
241 240 def test_initial_commit(self):
242 241 init_commit = self.repo.get_commit(commit_idx=0)
243 242 init_author = init_commit.author
244 243
245 244 assert init_commit.message == 'initial import'
246 245 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
247 246 assert init_author == init_commit.committer
248 247 assert sorted(init_commit._file_paths) == sorted([
249 248 'vcs/__init__.py',
250 249 'vcs/backends/BaseRepository.py',
251 250 'vcs/backends/__init__.py',
252 251 ])
253 252 assert sorted(init_commit._dir_paths) == sorted(
254 253 ['', 'vcs', 'vcs/backends'])
255 254
256 255 assert init_commit._dir_paths + init_commit._file_paths == \
257 256 init_commit._paths
258 257
259 258 with pytest.raises(NodeDoesNotExistError):
260 259 init_commit.get_node(path='foobar')
261 260
262 261 node = init_commit.get_node('vcs/')
263 262 assert hasattr(node, 'kind')
264 263 assert node.kind == NodeKind.DIR
265 264
266 265 node = init_commit.get_node('vcs')
267 266 assert hasattr(node, 'kind')
268 267 assert node.kind == NodeKind.DIR
269 268
270 269 node = init_commit.get_node('vcs/__init__.py')
271 270 assert hasattr(node, 'kind')
272 271 assert node.kind == NodeKind.FILE
273 272
274 273 def test_not_existing_commit(self):
275 274 # rawid
276 275 with pytest.raises(RepositoryError):
277 276 self.repo.get_commit('abcd' * 10)
278 277 # shortid
279 278 with pytest.raises(RepositoryError):
280 279 self.repo.get_commit('erro' * 4)
281 280 # numeric
282 281 with pytest.raises(RepositoryError):
283 282 self.repo.get_commit(commit_idx=self.repo.count() + 1)
284 283
285 284 # Small chance we ever get to this one
286 285 idx = pow(2, 30)
287 286 with pytest.raises(RepositoryError):
288 287 self.repo.get_commit(commit_idx=idx)
289 288
290 289 def test_commit10(self):
291 290 commit10 = self.repo.get_commit(commit_idx=10)
292 291 README = """===
293 292 VCS
294 293 ===
295 294
296 295 Various Version Control System management abstraction layer for Python.
297 296
298 297 Introduction
299 298 ------------
300 299
301 300 TODO: To be written...
302 301
303 302 """
304 303 node = commit10.get_node('README.rst')
305 304 assert node.kind == NodeKind.FILE
306 305 assert node.content == README
307 306
308 307 def test_local_clone(self):
309 308 clone_path = next(REPO_PATH_GENERATOR)
310 309 self.repo._local_clone(clone_path)
311 310 repo_clone = MercurialRepository(clone_path)
312 311
313 312 assert self.repo.commit_ids == repo_clone.commit_ids
314 313
315 314 def test_local_clone_fails_if_target_exists(self):
316 315 with pytest.raises(RepositoryError):
317 316 self.repo._local_clone(self.repo.path)
318 317
319 318 def test_update(self):
320 319 repo_clone = self.get_clone_repo()
321 320 branches = repo_clone.branches
322 321
323 322 repo_clone._update('default')
324 323 assert branches['default'] == repo_clone._identify()
325 324 repo_clone._update('stable')
326 325 assert branches['stable'] == repo_clone._identify()
327 326
328 327 def test_local_pull_branch(self):
329 328 target_repo = self.get_empty_repo()
330 329 source_repo = self.get_clone_repo()
331 330
332 331 default = Reference(
333 332 'branch', 'default', source_repo.branches['default'])
334 333 target_repo._local_pull(source_repo.path, default)
335 334 target_repo = MercurialRepository(target_repo.path)
336 335 assert (target_repo.branches['default'] ==
337 336 source_repo.branches['default'])
338 337
339 338 stable = Reference('branch', 'stable', source_repo.branches['stable'])
340 339 target_repo._local_pull(source_repo.path, stable)
341 340 target_repo = MercurialRepository(target_repo.path)
342 341 assert target_repo.branches['stable'] == source_repo.branches['stable']
343 342
344 343 def test_local_pull_bookmark(self):
345 344 target_repo = self.get_empty_repo()
346 345 source_repo = self.get_clone_repo()
347 346
348 347 commits = list(source_repo.get_commits(branch_name='default'))
349 348 foo1_id = commits[-5].raw_id
350 349 foo1 = Reference('book', 'foo1', foo1_id)
351 350 source_repo._update(foo1_id)
352 351 source_repo.bookmark('foo1')
353 352
354 353 foo2_id = commits[-3].raw_id
355 354 foo2 = Reference('book', 'foo2', foo2_id)
356 355 source_repo._update(foo2_id)
357 356 source_repo.bookmark('foo2')
358 357
359 358 target_repo._local_pull(source_repo.path, foo1)
360 359 target_repo = MercurialRepository(target_repo.path)
361 360 assert target_repo.branches['default'] == commits[-5].raw_id
362 361
363 362 target_repo._local_pull(source_repo.path, foo2)
364 363 target_repo = MercurialRepository(target_repo.path)
365 364 assert target_repo.branches['default'] == commits[-3].raw_id
366 365
367 366 def test_local_pull_commit(self):
368 367 target_repo = self.get_empty_repo()
369 368 source_repo = self.get_clone_repo()
370 369
371 370 commits = list(source_repo.get_commits(branch_name='default'))
372 371 commit_id = commits[-5].raw_id
373 372 commit = Reference('rev', commit_id, commit_id)
374 373 target_repo._local_pull(source_repo.path, commit)
375 374 target_repo = MercurialRepository(target_repo.path)
376 375 assert target_repo.branches['default'] == commit_id
377 376
378 377 commit_id = commits[-3].raw_id
379 378 commit = Reference('rev', commit_id, commit_id)
380 379 target_repo._local_pull(source_repo.path, commit)
381 380 target_repo = MercurialRepository(target_repo.path)
382 381 assert target_repo.branches['default'] == commit_id
383 382
384 383 def test_local_pull_from_same_repo(self):
385 384 reference = Reference('branch', 'default', None)
386 385 with pytest.raises(ValueError):
387 386 self.repo._local_pull(self.repo.path, reference)
388 387
389 388 def test_validate_pull_reference_raises_on_missing_reference(
390 389 self, vcsbackend_hg):
391 390 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
392 391 reference = Reference(
393 392 'book', 'invalid_reference', 'a' * 40)
394 393
395 394 with pytest.raises(CommitDoesNotExistError):
396 395 target_repo._validate_pull_reference(reference)
397 396
398 397 def test_heads(self):
399 398 assert set(self.repo._heads()) == set(self.repo.branches.values())
400 399
401 400 def test_ancestor(self):
402 401 commits = [
403 402 c.raw_id for c in self.repo.get_commits(branch_name='default')]
404 403 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
405 404 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
406 405
407 406 def test_local_push(self):
408 407 target_repo = self.get_empty_repo()
409 408
410 409 revisions = list(self.repo.get_commits(branch_name='default'))
411 410 revision = revisions[-5].raw_id
412 411 self.repo._local_push(revision, target_repo.path)
413 412
414 413 target_repo = MercurialRepository(target_repo.path)
415 414
416 415 assert target_repo.branches['default'] == revision
417 416
418 417 def test_hooks_can_be_enabled_for_local_push(self):
419 418 revision = 'deadbeef'
420 419 repo_path = 'test_group/test_repo'
421 420 with mock.patch.object(self.repo, '_remote') as remote_mock:
422 421 self.repo._local_push(revision, repo_path, enable_hooks=True)
423 422 remote_mock.push.assert_called_once_with(
424 423 [revision], repo_path, hooks=True, push_branches=False)
425 424
426 425 def test_local_merge(self, vcsbackend_hg):
427 426 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
428 427 source_repo = vcsbackend_hg.clone_repo(target_repo)
429 428 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
430 429 target_repo = MercurialRepository(target_repo.path)
431 430 target_rev = target_repo.branches['default']
432 431 target_ref = Reference(
433 432 type='branch', name='default', commit_id=target_rev)
434 433 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
435 434 source_repo = MercurialRepository(source_repo.path)
436 435 source_rev = source_repo.branches['default']
437 436 source_ref = Reference(
438 437 type='branch', name='default', commit_id=source_rev)
439 438
440 439 target_repo._local_pull(source_repo.path, source_ref)
441 440
442 441 merge_message = 'Merge message\n\nDescription:...'
443 442 user_name = 'Albert Einstein'
444 443 user_email = 'albert@einstein.com'
445 444 merge_commit_id, needs_push = target_repo._local_merge(
446 445 target_ref, merge_message, user_name, user_email, source_ref)
447 446 assert needs_push
448 447
449 448 target_repo = MercurialRepository(target_repo.path)
450 449 assert target_repo.commit_ids[-3] == target_rev
451 450 assert target_repo.commit_ids[-2] == source_rev
452 451 last_commit = target_repo.get_commit(merge_commit_id)
453 452 assert last_commit.message.strip() == merge_message
454 453 assert last_commit.author == '%s <%s>' % (user_name, user_email)
455 454
456 455 assert not os.path.exists(
457 456 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
458 457
459 458 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
460 459 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
461 460 source_repo = vcsbackend_hg.clone_repo(target_repo)
462 461 target_rev = target_repo.branches['default']
463 462 target_ref = Reference(
464 463 type='branch', name='default', commit_id=target_rev)
465 464 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
466 465 source_repo = MercurialRepository(source_repo.path)
467 466 source_rev = source_repo.branches['default']
468 467 source_ref = Reference(
469 468 type='branch', name='default', commit_id=source_rev)
470 469
471 470 target_repo._local_pull(source_repo.path, source_ref)
472 471
473 472 merge_message = 'Merge message\n\nDescription:...'
474 473 user_name = 'Albert Einstein'
475 474 user_email = 'albert@einstein.com'
476 475 merge_commit_id, needs_push = target_repo._local_merge(
477 476 target_ref, merge_message, user_name, user_email, source_ref)
478 477 assert merge_commit_id == source_rev
479 478 assert needs_push
480 479
481 480 target_repo = MercurialRepository(target_repo.path)
482 481 assert target_repo.commit_ids[-2] == target_rev
483 482 assert target_repo.commit_ids[-1] == source_rev
484 483
485 484 assert not os.path.exists(
486 485 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
487 486
488 487 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
489 488 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
490 489 target_rev = target_repo.branches['default']
491 490 target_ref = Reference(
492 491 type='branch', name='default', commit_id=target_rev)
493 492
494 493 merge_message = 'Merge message\n\nDescription:...'
495 494 user_name = 'Albert Einstein'
496 495 user_email = 'albert@einstein.com'
497 496 merge_commit_id, needs_push = target_repo._local_merge(
498 497 target_ref, merge_message, user_name, user_email, target_ref)
499 498 assert merge_commit_id == target_rev
500 499 assert not needs_push
501 500
502 501 target_repo = MercurialRepository(target_repo.path)
503 502 assert target_repo.commit_ids[-1] == target_rev
504 503
505 504 assert not os.path.exists(
506 505 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
507 506
508 507 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
509 508 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
510 509 source_repo = vcsbackend_hg.clone_repo(target_repo)
511 510 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
512 511 target_repo = MercurialRepository(target_repo.path)
513 512 target_rev = target_repo.branches['default']
514 513 target_ref = Reference(
515 514 type='branch', name='default', commit_id=target_rev)
516 515 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
517 516 source_repo = MercurialRepository(source_repo.path)
518 517 source_rev = source_repo.branches['default']
519 518 source_ref = Reference(
520 519 type='branch', name='default', commit_id=source_rev)
521 520
522 521 target_repo._local_pull(source_repo.path, source_ref)
523 522 with pytest.raises(RepositoryError):
524 523 target_repo._local_merge(
525 524 target_ref, 'merge_message', 'user name', 'user@name.com',
526 525 source_ref)
527 526
528 527 # Check we are not left in an intermediate merge state
529 528 assert not os.path.exists(
530 529 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
531 530
532 531 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
533 532 commits = [
534 533 {'message': 'a'},
535 534 {'message': 'b', 'branch': 'b'},
536 535 ]
537 536 repo = backend_hg.create_repo(commits)
538 537 commit_ids = backend_hg.commit_ids
539 538 target_ref = Reference(
540 539 type='branch', name='default', commit_id=commit_ids['a'])
541 540 source_ref = Reference(
542 541 type='branch', name='b', commit_id=commit_ids['b'])
543 542 merge_message = 'Merge message\n\nDescription:...'
544 543 user_name = 'Albert Einstein'
545 544 user_email = 'albert@einstein.com'
546 545 vcs_repo = repo.scm_instance()
547 546 merge_commit_id, needs_push = vcs_repo._local_merge(
548 547 target_ref, merge_message, user_name, user_email, source_ref)
549 548 assert merge_commit_id != source_ref.commit_id
550 549 assert needs_push is True
551 550 commit = vcs_repo.get_commit(merge_commit_id)
552 551 assert commit.merge is True
553 552 assert commit.message == merge_message
554 553
555 554 def test_maybe_prepare_merge_workspace(self):
556 555 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
557 556
558 557 assert os.path.isdir(workspace)
559 558 workspace_repo = MercurialRepository(workspace)
560 559 assert workspace_repo.branches == self.repo.branches
561 560
562 561 # Calling it a second time should also succeed
563 562 workspace = self.repo._maybe_prepare_merge_workspace('pr2', 'unused')
564 563 assert os.path.isdir(workspace)
565 564
566 565 def test_cleanup_merge_workspace(self):
567 566 workspace = self.repo._maybe_prepare_merge_workspace('pr3', 'unused')
568 567 self.repo.cleanup_merge_workspace('pr3')
569 568
570 569 assert not os.path.exists(workspace)
571 570
572 571 def test_cleanup_merge_workspace_invalid_workspace_id(self):
573 572 # No assert: because in case of an inexistent workspace this function
574 573 # should still succeed.
575 574 self.repo.cleanup_merge_workspace('pr4')
576 575
577 576 def test_merge_target_is_bookmark(self, vcsbackend_hg):
578 577 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
579 578 source_repo = vcsbackend_hg.clone_repo(target_repo)
580 579 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
581 580 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
582 581 imc = source_repo.in_memory_commit
583 582 imc.add(FileNode('file_x', content=source_repo.name))
584 583 imc.commit(
585 584 message=u'Automatic commit from repo merge test',
586 585 author=u'Automatic')
587 586 target_commit = target_repo.get_commit()
588 587 source_commit = source_repo.get_commit()
589 588 default_branch = target_repo.DEFAULT_BRANCH_NAME
590 589 bookmark_name = 'bookmark'
591 590 target_repo._update(default_branch)
592 591 target_repo.bookmark(bookmark_name)
593 592 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
594 593 source_ref = Reference('branch', default_branch, source_commit.raw_id)
595 594 workspace = 'test-merge'
596 595
597 596 merge_response = target_repo.merge(
598 597 target_ref, source_repo, source_ref, workspace,
599 598 'test user', 'test@rhodecode.com', 'merge message 1',
600 599 dry_run=False)
601 600 expected_merge_response = MergeResponse(
602 601 True, True, merge_response.merge_ref,
603 602 MergeFailureReason.NONE)
604 603 assert merge_response == expected_merge_response
605 604
606 605 target_repo = backends.get_backend(vcsbackend_hg.alias)(
607 606 target_repo.path)
608 607 target_commits = list(target_repo.get_commits())
609 608 commit_ids = [c.raw_id for c in target_commits[:-1]]
610 609 assert source_ref.commit_id in commit_ids
611 610 assert target_ref.commit_id in commit_ids
612 611
613 612 merge_commit = target_commits[-1]
614 613 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
615 614 assert merge_commit.message.strip() == 'merge message 1'
616 615 assert merge_commit.author == 'test user <test@rhodecode.com>'
617 616
618 617 # Check the bookmark was updated in the target repo
619 618 assert (
620 619 target_repo.bookmarks[bookmark_name] ==
621 620 merge_response.merge_ref.commit_id)
622 621
623 622 def test_merge_source_is_bookmark(self, vcsbackend_hg):
624 623 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
625 624 source_repo = vcsbackend_hg.clone_repo(target_repo)
626 625 imc = source_repo.in_memory_commit
627 626 imc.add(FileNode('file_x', content=source_repo.name))
628 627 imc.commit(
629 628 message=u'Automatic commit from repo merge test',
630 629 author=u'Automatic')
631 630 target_commit = target_repo.get_commit()
632 631 source_commit = source_repo.get_commit()
633 632 default_branch = target_repo.DEFAULT_BRANCH_NAME
634 633 bookmark_name = 'bookmark'
635 634 target_ref = Reference('branch', default_branch, target_commit.raw_id)
636 635 source_repo._update(default_branch)
637 636 source_repo.bookmark(bookmark_name)
638 637 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
639 638 workspace = 'test-merge'
640 639
641 640 merge_response = target_repo.merge(
642 641 target_ref, source_repo, source_ref, workspace,
643 642 'test user', 'test@rhodecode.com', 'merge message 1',
644 643 dry_run=False)
645 644 expected_merge_response = MergeResponse(
646 645 True, True, merge_response.merge_ref,
647 646 MergeFailureReason.NONE)
648 647 assert merge_response == expected_merge_response
649 648
650 649 target_repo = backends.get_backend(vcsbackend_hg.alias)(
651 650 target_repo.path)
652 651 target_commits = list(target_repo.get_commits())
653 652 commit_ids = [c.raw_id for c in target_commits]
654 653 assert source_ref.commit_id == commit_ids[-1]
655 654 assert target_ref.commit_id == commit_ids[-2]
656 655
657 656 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
658 657 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
659 658 source_repo = vcsbackend_hg.clone_repo(target_repo)
660 659 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
661 660 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
662 661
663 662 # add an extra head to the target repo
664 663 imc = target_repo.in_memory_commit
665 664 imc.add(FileNode('file_x', content='foo'))
666 665 commits = list(target_repo.get_commits())
667 666 imc.commit(
668 667 message=u'Automatic commit from repo merge test',
669 668 author=u'Automatic', parents=commits[0:1])
670 669
671 670 target_commit = target_repo.get_commit()
672 671 source_commit = source_repo.get_commit()
673 672 default_branch = target_repo.DEFAULT_BRANCH_NAME
674 673 target_repo._update(default_branch)
675 674
676 675 target_ref = Reference('branch', default_branch, target_commit.raw_id)
677 676 source_ref = Reference('branch', default_branch, source_commit.raw_id)
678 677 workspace = 'test-merge'
679 678
680 679 assert len(target_repo._heads(branch='default')) == 2
681 680 expected_merge_response = MergeResponse(
682 681 False, False, None,
683 682 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
684 683 merge_response = target_repo.merge(
685 684 target_ref, source_repo, source_ref, workspace,
686 685 'test user', 'test@rhodecode.com', 'merge message 1',
687 686 dry_run=False)
688 687 assert merge_response == expected_merge_response
689 688
690 689 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
691 690 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
692 691 source_repo = vcsbackend_hg.clone_repo(target_repo)
693 692 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
694 693 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
695 694 imc = source_repo.in_memory_commit
696 695 imc.add(FileNode('file_x', content=source_repo.name))
697 696 imc.commit(
698 697 message=u'Automatic commit from repo merge test',
699 698 author=u'Automatic')
700 699 target_commit = target_repo.get_commit()
701 700 source_commit = source_repo.get_commit()
702 701
703 702 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
704 703
705 704 default_branch = target_repo.DEFAULT_BRANCH_NAME
706 705 bookmark_name = 'bookmark'
707 706 source_repo._update(default_branch)
708 707 source_repo.bookmark(bookmark_name)
709 708
710 709 target_ref = Reference('branch', default_branch, target_commit.raw_id)
711 710 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
712 711 workspace = 'test-merge'
713 712
714 713 merge_response = target_repo.merge(
715 714 target_ref, source_repo, source_ref, workspace,
716 715 'test user', 'test@rhodecode.com', 'merge message 1',
717 716 dry_run=False, use_rebase=True)
718 717
719 718 expected_merge_response = MergeResponse(
720 719 True, True, merge_response.merge_ref,
721 720 MergeFailureReason.NONE)
722 721 assert merge_response == expected_merge_response
723 722
724 723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
725 724 target_repo.path)
726 725 last_commit = target_repo.get_commit()
727 726 assert last_commit.message == source_commit.message
728 727 assert last_commit.author == source_commit.author
729 728 # This checks that we effectively did a rebase
730 729 assert last_commit.raw_id != source_commit.raw_id
731 730
732 731 # Check the target has only 4 commits: 2 were already in target and
733 732 # only two should have been added
734 733 assert len(target_repo.commit_ids) == 2 + 2
735 734
736 735
737 736 class TestGetShadowInstance(object):
738 737
739 738 @pytest.fixture
740 739 def repo(self, vcsbackend_hg, monkeypatch):
741 740 repo = vcsbackend_hg.repo
742 741 monkeypatch.setattr(repo, 'config', mock.Mock())
743 742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
744 743 return repo
745 744
746 745 def test_passes_config(self, repo):
747 746 shadow = repo._get_shadow_instance(repo.path)
748 747 assert shadow.config == repo.config.copy()
749 748
750 749 def test_disables_hooks(self, repo):
751 750 shadow = repo._get_shadow_instance(repo.path)
752 751 shadow.config.clear_section.assert_called_once_with('hooks')
753 752
754 753 def test_allows_to_keep_hooks(self, repo):
755 754 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
756 755 assert not shadow.config.clear_section.called
757 756
758 757
759 758 class TestMercurialCommit(object):
760 759
761 760 def _test_equality(self, commit):
762 761 idx = commit.idx
763 762 assert commit == self.repo.get_commit(commit_idx=idx)
764 763
765 764 def test_equality(self):
766 765 indexes = [0, 10, 20]
767 766 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
768 767 for commit in commits:
769 768 self._test_equality(commit)
770 769
771 770 def test_default_commit(self):
772 771 tip = self.repo.get_commit('tip')
773 772 assert tip == self.repo.get_commit()
774 773 assert tip == self.repo.get_commit(commit_id=None)
775 774 assert tip == self.repo.get_commit(commit_idx=None)
776 775 assert tip == list(self.repo[-1:])[0]
777 776
778 777 def test_root_node(self):
779 778 tip = self.repo.get_commit('tip')
780 779 assert tip.root is tip.get_node('')
781 780
782 781 def test_lazy_fetch(self):
783 782 """
784 783 Test if commit's nodes expands and are cached as we walk through
785 784 the commit. This test is somewhat hard to write as order of tests
786 785 is a key here. Written by running command after command in a shell.
787 786 """
788 787 commit = self.repo.get_commit(commit_idx=45)
789 788 assert len(commit.nodes) == 0
790 789 root = commit.root
791 790 assert len(commit.nodes) == 1
792 791 assert len(root.nodes) == 8
793 792 # accessing root.nodes updates commit.nodes
794 793 assert len(commit.nodes) == 9
795 794
796 795 docs = root.get_node('docs')
797 796 # we haven't yet accessed anything new as docs dir was already cached
798 797 assert len(commit.nodes) == 9
799 798 assert len(docs.nodes) == 8
800 799 # accessing docs.nodes updates commit.nodes
801 800 assert len(commit.nodes) == 17
802 801
803 802 assert docs is commit.get_node('docs')
804 803 assert docs is root.nodes[0]
805 804 assert docs is root.dirs[0]
806 805 assert docs is commit.get_node('docs')
807 806
808 807 def test_nodes_with_commit(self):
809 808 commit = self.repo.get_commit(commit_idx=45)
810 809 root = commit.root
811 810 docs = root.get_node('docs')
812 811 assert docs is commit.get_node('docs')
813 812 api = docs.get_node('api')
814 813 assert api is commit.get_node('docs/api')
815 814 index = api.get_node('index.rst')
816 815 assert index is commit.get_node('docs/api/index.rst')
817 816 assert index is commit.get_node(
818 817 'docs').get_node('api').get_node('index.rst')
819 818
820 819 def test_branch_and_tags(self):
821 820 commit0 = self.repo.get_commit(commit_idx=0)
822 821 assert commit0.branch == 'default'
823 822 assert commit0.tags == []
824 823
825 824 commit10 = self.repo.get_commit(commit_idx=10)
826 825 assert commit10.branch == 'default'
827 826 assert commit10.tags == []
828 827
829 828 commit44 = self.repo.get_commit(commit_idx=44)
830 829 assert commit44.branch == 'web'
831 830
832 831 tip = self.repo.get_commit('tip')
833 832 assert 'tip' in tip.tags
834 833
835 834 def test_bookmarks(self):
836 835 commit0 = self.repo.get_commit(commit_idx=0)
837 836 assert commit0.bookmarks == []
838 837
839 838 def _test_file_size(self, idx, path, size):
840 839 node = self.repo.get_commit(commit_idx=idx).get_node(path)
841 840 assert node.is_file()
842 841 assert node.size == size
843 842
844 843 def test_file_size(self):
845 844 to_check = (
846 845 (10, 'setup.py', 1068),
847 846 (20, 'setup.py', 1106),
848 847 (60, 'setup.py', 1074),
849 848
850 849 (10, 'vcs/backends/base.py', 2921),
851 850 (20, 'vcs/backends/base.py', 3936),
852 851 (60, 'vcs/backends/base.py', 6189),
853 852 )
854 853 for idx, path, size in to_check:
855 854 self._test_file_size(idx, path, size)
856 855
857 856 def test_file_history_from_commits(self):
858 857 node = self.repo[10].get_node('setup.py')
859 858 commit_ids = [commit.raw_id for commit in node.history]
860 859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
861 860
862 861 node = self.repo[20].get_node('setup.py')
863 862 node_ids = [commit.raw_id for commit in node.history]
864 863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
865 864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
866 865
867 866 # special case we check history from commit that has this particular
868 867 # file changed this means we check if it's included as well
869 868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
870 869 .get_node('setup.py')
871 870 node_ids = [commit.raw_id for commit in node.history]
872 871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
873 872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
874 873
875 874 def test_file_history(self):
876 875 # we can only check if those commits are present in the history
877 876 # as we cannot update this test every time file is changed
878 877 files = {
879 878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
880 879 'vcs/nodes.py': [
881 880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
882 881 'vcs/backends/hg.py': [
883 882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
884 883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
885 884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
886 885 }
887 886 for path, indexes in files.items():
888 887 tip = self.repo.get_commit(commit_idx=indexes[-1])
889 888 node = tip.get_node(path)
890 889 node_indexes = [commit.idx for commit in node.history]
891 890 assert set(indexes).issubset(set(node_indexes)), (
892 891 "We assumed that %s is subset of commits for which file %s "
893 892 "has been changed, and history of that node returned: %s"
894 893 % (indexes, path, node_indexes))
895 894
896 895 def test_file_annotate(self):
897 896 files = {
898 897 'vcs/backends/__init__.py': {
899 898 89: {
900 899 'lines_no': 31,
901 900 'commits': [
902 901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
903 902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
904 903 32, 32, 32, 32, 37, 32, 37, 37, 32,
905 904 32, 32
906 905 ]
907 906 },
908 907 20: {
909 908 'lines_no': 1,
910 909 'commits': [4]
911 910 },
912 911 55: {
913 912 'lines_no': 31,
914 913 'commits': [
915 914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
916 915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
917 916 32, 32, 32, 32, 37, 32, 37, 37, 32,
918 917 32, 32
919 918 ]
920 919 }
921 920 },
922 921 'vcs/exceptions.py': {
923 922 89: {
924 923 'lines_no': 18,
925 924 'commits': [
926 925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
927 926 16, 16, 17, 16, 16, 18, 18, 18
928 927 ]
929 928 },
930 929 20: {
931 930 'lines_no': 18,
932 931 'commits': [
933 932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 933 16, 16, 17, 16, 16, 18, 18, 18
935 934 ]
936 935 },
937 936 55: {
938 937 'lines_no': 18,
939 938 'commits': [
940 939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 940 17, 16, 16, 18, 18, 18
942 941 ]
943 942 }
944 943 },
945 944 'MANIFEST.in': {
946 945 89: {
947 946 'lines_no': 5,
948 947 'commits': [7, 7, 7, 71, 71]
949 948 },
950 949 20: {
951 950 'lines_no': 3,
952 951 'commits': [7, 7, 7]
953 952 },
954 953 55: {
955 954 'lines_no': 3,
956 955 'commits': [7, 7, 7]
957 956 }
958 957 }
959 958 }
960 959
961 960 for fname, commit_dict in files.items():
962 961 for idx, __ in commit_dict.items():
963 962 commit = self.repo.get_commit(commit_idx=idx)
964 963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
965 964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
966 965 assert l1_1 == l1_2
967 966 l1 = l1_2 = [
968 967 x[2]().idx for x in commit.get_file_annotate(fname)]
969 968 l2 = files[fname][idx]['commits']
970 969 assert l1 == l2, (
971 970 "The lists of commit for %s@commit_id%s"
972 971 "from annotation list should match each other,"
973 972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
974 973
975 974 def test_commit_state(self):
976 975 """
977 976 Tests which files have been added/changed/removed at particular commit
978 977 """
979 978
980 979 # commit_id 46ad32a4f974:
981 980 # hg st --rev 46ad32a4f974
982 981 # changed: 13
983 982 # added: 20
984 983 # removed: 1
985 984 changed = set([
986 985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
987 986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
988 987 'vcs/__init__.py', 'vcs/backends/__init__.py',
989 988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
990 989 'vcs/utils/__init__.py'])
991 990
992 991 added = set([
993 992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
994 993 'docs/api/index.rst', 'docs/api/nodes.rst',
995 994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
996 995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
997 996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
998 997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
999 998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1000 999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1001 1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1002 1001 'vcs/web/simplevcs/views.py'])
1003 1002
1004 1003 removed = set(['docs/api.rst'])
1005 1004
1006 1005 commit64 = self.repo.get_commit('46ad32a4f974')
1007 1006 assert set((node.path for node in commit64.added)) == added
1008 1007 assert set((node.path for node in commit64.changed)) == changed
1009 1008 assert set((node.path for node in commit64.removed)) == removed
1010 1009
1011 1010 # commit_id b090f22d27d6:
1012 1011 # hg st --rev b090f22d27d6
1013 1012 # changed: 13
1014 1013 # added: 20
1015 1014 # removed: 1
1016 1015 commit88 = self.repo.get_commit('b090f22d27d6')
1017 1016 assert set((node.path for node in commit88.added)) == set()
1018 1017 assert set((node.path for node in commit88.changed)) == \
1019 1018 set(['.hgignore'])
1020 1019 assert set((node.path for node in commit88.removed)) == set()
1021 1020
1022 1021 #
1023 1022 # 85:
1024 1023 # added: 2 [
1025 1024 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1026 1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1027 1026 # removed: 1 ['vcs/utils/web.py']
1028 1027 commit85 = self.repo.get_commit(commit_idx=85)
1029 1028 assert set((node.path for node in commit85.added)) == set([
1030 1029 'vcs/utils/diffs.py',
1031 1030 'vcs/web/simplevcs/views/diffs.py'])
1032 1031 assert set((node.path for node in commit85.changed)) == set([
1033 1032 'vcs/web/simplevcs/models.py',
1034 1033 'vcs/web/simplevcs/utils.py',
1035 1034 'vcs/web/simplevcs/views/__init__.py',
1036 1035 'vcs/web/simplevcs/views/repository.py',
1037 1036 ])
1038 1037 assert set((node.path for node in commit85.removed)) == \
1039 1038 set(['vcs/utils/web.py'])
1040 1039
1041 1040 def test_files_state(self):
1042 1041 """
1043 1042 Tests state of FileNodes.
1044 1043 """
1045 1044 commit = self.repo.get_commit(commit_idx=85)
1046 1045 node = commit.get_node('vcs/utils/diffs.py')
1047 1046 assert node.state, NodeState.ADDED
1048 1047 assert node.added
1049 1048 assert not node.changed
1050 1049 assert not node.not_changed
1051 1050 assert not node.removed
1052 1051
1053 1052 commit = self.repo.get_commit(commit_idx=88)
1054 1053 node = commit.get_node('.hgignore')
1055 1054 assert node.state, NodeState.CHANGED
1056 1055 assert not node.added
1057 1056 assert node.changed
1058 1057 assert not node.not_changed
1059 1058 assert not node.removed
1060 1059
1061 1060 commit = self.repo.get_commit(commit_idx=85)
1062 1061 node = commit.get_node('setup.py')
1063 1062 assert node.state, NodeState.NOT_CHANGED
1064 1063 assert not node.added
1065 1064 assert not node.changed
1066 1065 assert node.not_changed
1067 1066 assert not node.removed
1068 1067
1069 1068 # If node has REMOVED state then trying to fetch it would raise
1070 1069 # CommitError exception
1071 1070 commit = self.repo.get_commit(commit_idx=2)
1072 1071 path = 'vcs/backends/BaseRepository.py'
1073 1072 with pytest.raises(NodeDoesNotExistError):
1074 1073 commit.get_node(path)
1075 1074 # but it would be one of ``removed`` (commit's attribute)
1076 1075 assert path in [rf.path for rf in commit.removed]
1077 1076
1078 1077 def test_commit_message_is_unicode(self):
1079 1078 for cm in self.repo:
1080 1079 assert type(cm.message) == unicode
1081 1080
1082 1081 def test_commit_author_is_unicode(self):
1083 1082 for cm in self.repo:
1084 1083 assert type(cm.author) == unicode
1085 1084
1086 1085 def test_repo_files_content_is_unicode(self):
1087 1086 test_commit = self.repo.get_commit(commit_idx=100)
1088 1087 for node in test_commit.get_node('/'):
1089 1088 if node.is_file():
1090 1089 assert type(node.content) == unicode
1091 1090
1092 1091 def test_wrong_path(self):
1093 1092 # There is 'setup.py' in the root dir but not there:
1094 1093 path = 'foo/bar/setup.py'
1095 1094 with pytest.raises(VCSError):
1096 1095 self.repo.get_commit().get_node(path)
1097 1096
1098 def test_large_file(self):
1099 # TODO: valid large file
1100 tip = self.repo.get_commit()
1101 with pytest.raises(CommitError):
1102 tip.get_largefile_node("invalid")
1103
1104 1097 def test_author_email(self):
1105 1098 assert 'marcin@python-blog.com' == \
1106 1099 self.repo.get_commit('b986218ba1c9').author_email
1107 1100 assert 'lukasz.balcerzak@python-center.pl' == \
1108 1101 self.repo.get_commit('3803844fdbd3').author_email
1109 1102 assert '' == self.repo.get_commit('84478366594b').author_email
1110 1103
1111 1104 def test_author_username(self):
1112 1105 assert 'Marcin Kuzminski' == \
1113 1106 self.repo.get_commit('b986218ba1c9').author_name
1114 1107 assert 'Lukasz Balcerzak' == \
1115 1108 self.repo.get_commit('3803844fdbd3').author_name
1116 1109 assert 'marcink' == \
1117 1110 self.repo.get_commit('84478366594b').author_name
1118 1111
1119 1112
1113 class TestLargeFileRepo(object):
1114
1115 def test_large_file(self, backend_hg):
1116 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1117
1118 tip = repo.scm_instance().get_commit()
1119 node = tip.get_node('.hglf/thisfileislarge')
1120
1121 lf_node = node.get_largefile_node()
1122
1123 assert lf_node.is_largefile() is True
1124 assert lf_node.size == 1024000
1125 assert lf_node.name == '.hglf/thisfileislarge'
1126
1127
1120 1128 class TestGetBranchName(object):
1121 1129 def test_returns_ref_name_when_type_is_branch(self):
1122 1130 ref = self._create_ref('branch', 'fake-name')
1123 1131 result = self.repo._get_branch_name(ref)
1124 1132 assert result == ref.name
1125 1133
1126 1134 @pytest.mark.parametrize("type_", ("book", "tag"))
1127 1135 def test_queries_remote_when_type_is_not_branch(self, type_):
1128 1136 ref = self._create_ref(type_, 'wrong-fake-name')
1129 1137 with mock.patch.object(self.repo, "_remote") as remote_mock:
1130 1138 remote_mock.ctx_branch.return_value = "fake-name"
1131 1139 result = self.repo._get_branch_name(ref)
1132 1140 assert result == "fake-name"
1133 1141 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1134 1142
1135 1143 def _create_ref(self, type_, name):
1136 1144 ref = mock.Mock()
1137 1145 ref.type = type_
1138 1146 ref.name = 'wrong-fake-name'
1139 1147 ref.commit_id = "deadbeef"
1140 1148 return ref
1141 1149
1142 1150
1143 1151 class TestIsTheSameBranch(object):
1144 1152 def test_returns_true_when_branches_are_equal(self):
1145 1153 source_ref = mock.Mock(name="source-ref")
1146 1154 target_ref = mock.Mock(name="target-ref")
1147 1155 branch_name_patcher = mock.patch.object(
1148 1156 self.repo, "_get_branch_name", return_value="default")
1149 1157 with branch_name_patcher as branch_name_mock:
1150 1158 result = self.repo._is_the_same_branch(source_ref, target_ref)
1151 1159
1152 1160 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1153 1161 assert branch_name_mock.call_args_list == expected_calls
1154 1162 assert result is True
1155 1163
1156 1164 def test_returns_false_when_branches_are_not_equal(self):
1157 1165 source_ref = mock.Mock(name="source-ref")
1158 1166 source_ref.name = "source-branch"
1159 1167 target_ref = mock.Mock(name="target-ref")
1160 1168 source_ref.name = "target-branch"
1161 1169
1162 1170 def side_effect(ref):
1163 1171 return ref.name
1164 1172
1165 1173 branch_name_patcher = mock.patch.object(
1166 1174 self.repo, "_get_branch_name", side_effect=side_effect)
1167 1175 with branch_name_patcher as branch_name_mock:
1168 1176 result = self.repo._is_the_same_branch(source_ref, target_ref)
1169 1177
1170 1178 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1171 1179 assert branch_name_mock.call_args_list == expected_calls
1172 1180 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now