Show More
@@ -1,230 +1,230 b'' | |||||
1 | # hg.py - repository classes for mercurial |
|
1 | # hg.py - repository classes for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import * |
|
8 | from node import * | |
9 | from repo import * |
|
9 | from repo import * | |
10 | from demandload import * |
|
10 | from demandload import * | |
11 | from i18n import gettext as _ |
|
11 | from i18n import gettext as _ | |
12 | demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo") |
|
12 | demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo") | |
13 | demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify") |
|
13 | demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify") | |
14 |
|
14 | |||
15 | def _local(path): |
|
15 | def _local(path): | |
16 | return (os.path.isfile(path and util.drop_scheme('file', path)) and |
|
16 | return (os.path.isfile(path and util.drop_scheme('file', path)) and | |
17 | bundlerepo or localrepo) |
|
17 | bundlerepo or localrepo) | |
18 |
|
18 | |||
19 | schemes = { |
|
19 | schemes = { | |
20 | 'bundle': bundlerepo, |
|
20 | 'bundle': bundlerepo, | |
21 | 'file': _local, |
|
21 | 'file': _local, | |
22 | 'hg': httprepo, |
|
22 | 'hg': httprepo, | |
23 | 'http': httprepo, |
|
23 | 'http': httprepo, | |
24 | 'https': httprepo, |
|
24 | 'https': httprepo, | |
25 | 'old-http': statichttprepo, |
|
25 | 'old-http': statichttprepo, | |
26 | 'ssh': sshrepo, |
|
26 | 'ssh': sshrepo, | |
27 | 'static-http': statichttprepo, |
|
27 | 'static-http': statichttprepo, | |
28 | } |
|
28 | } | |
29 |
|
29 | |||
30 | def _lookup(path): |
|
30 | def _lookup(path): | |
31 | scheme = 'file' |
|
31 | scheme = 'file' | |
32 | if path: |
|
32 | if path: | |
33 | c = path.find(':') |
|
33 | c = path.find(':') | |
34 | if c > 0: |
|
34 | if c > 0: | |
35 | scheme = path[:c] |
|
35 | scheme = path[:c] | |
36 | thing = schemes.get(scheme) or schemes['file'] |
|
36 | thing = schemes.get(scheme) or schemes['file'] | |
37 | try: |
|
37 | try: | |
38 | return thing(path) |
|
38 | return thing(path) | |
39 | except TypeError: |
|
39 | except TypeError: | |
40 | return thing |
|
40 | return thing | |
41 |
|
41 | |||
42 | def islocal(repo): |
|
42 | def islocal(repo): | |
43 | '''return true if repo or path is local''' |
|
43 | '''return true if repo or path is local''' | |
44 | if isinstance(repo, str): |
|
44 | if isinstance(repo, str): | |
45 | try: |
|
45 | try: | |
46 | return _lookup(repo).islocal(repo) |
|
46 | return _lookup(repo).islocal(repo) | |
47 | except AttributeError: |
|
47 | except AttributeError: | |
48 | return False |
|
48 | return False | |
49 | return repo.local() |
|
49 | return repo.local() | |
50 |
|
50 | |||
51 | def repository(ui, path=None, create=False): |
|
51 | def repository(ui, path=None, create=False): | |
52 | """return a repository object for the specified path""" |
|
52 | """return a repository object for the specified path""" | |
53 | return _lookup(path).instance(ui, path, create) |
|
53 | return _lookup(path).instance(ui, path, create) | |
54 |
|
54 | |||
55 | def defaultdest(source): |
|
55 | def defaultdest(source): | |
56 | '''return default destination of clone if none is given''' |
|
56 | '''return default destination of clone if none is given''' | |
57 | return os.path.basename(os.path.normpath(source)) |
|
57 | return os.path.basename(os.path.normpath(source)) | |
58 |
|
58 | |||
59 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, |
|
59 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, | |
60 | stream=False): |
|
60 | stream=False): | |
61 | """Make a copy of an existing repository. |
|
61 | """Make a copy of an existing repository. | |
62 |
|
62 | |||
63 | Create a copy of an existing repository in a new directory. The |
|
63 | Create a copy of an existing repository in a new directory. The | |
64 | source and destination are URLs, as passed to the repository |
|
64 | source and destination are URLs, as passed to the repository | |
65 | function. Returns a pair of repository objects, the source and |
|
65 | function. Returns a pair of repository objects, the source and | |
66 | newly created destination. |
|
66 | newly created destination. | |
67 |
|
67 | |||
68 | The location of the source is added to the new repository's |
|
68 | The location of the source is added to the new repository's | |
69 | .hg/hgrc file, as the default to be used for future pulls and |
|
69 | .hg/hgrc file, as the default to be used for future pulls and | |
70 | pushes. |
|
70 | pushes. | |
71 |
|
71 | |||
72 | If an exception is raised, the partly cloned/updated destination |
|
72 | If an exception is raised, the partly cloned/updated destination | |
73 | repository will be deleted. |
|
73 | repository will be deleted. | |
74 |
|
74 | |||
75 | Arguments: |
|
75 | Arguments: | |
76 |
|
76 | |||
77 | source: repository object or URL |
|
77 | source: repository object or URL | |
78 |
|
78 | |||
79 | dest: URL of destination repository to create (defaults to base |
|
79 | dest: URL of destination repository to create (defaults to base | |
80 | name of source repository) |
|
80 | name of source repository) | |
81 |
|
81 | |||
82 | pull: always pull from source repository, even in local case |
|
82 | pull: always pull from source repository, even in local case | |
83 |
|
83 | |||
84 | stream: stream raw data uncompressed from repository (fast over |
|
84 | stream: stream raw data uncompressed from repository (fast over | |
85 | LAN, slow over WAN) |
|
85 | LAN, slow over WAN) | |
86 |
|
86 | |||
87 | rev: revision to clone up to (implies pull=True) |
|
87 | rev: revision to clone up to (implies pull=True) | |
88 |
|
88 | |||
89 | update: update working directory after clone completes, if |
|
89 | update: update working directory after clone completes, if | |
90 | destination is local repository |
|
90 | destination is local repository | |
91 | """ |
|
91 | """ | |
92 | if isinstance(source, str): |
|
92 | if isinstance(source, str): | |
93 | src_repo = repository(ui, source) |
|
93 | src_repo = repository(ui, source) | |
94 | else: |
|
94 | else: | |
95 | src_repo = source |
|
95 | src_repo = source | |
96 | source = src_repo.url() |
|
96 | source = src_repo.url() | |
97 |
|
97 | |||
98 | if dest is None: |
|
98 | if dest is None: | |
99 | dest = defaultdest(source) |
|
99 | dest = defaultdest(source) | |
100 |
|
100 | |||
101 | def localpath(path): |
|
101 | def localpath(path): | |
102 | if path.startswith('file://'): |
|
102 | if path.startswith('file://'): | |
103 | return path[7:] |
|
103 | return path[7:] | |
104 | if path.startswith('file:'): |
|
104 | if path.startswith('file:'): | |
105 | return path[5:] |
|
105 | return path[5:] | |
106 | return path |
|
106 | return path | |
107 |
|
107 | |||
108 | dest = localpath(dest) |
|
108 | dest = localpath(dest) | |
109 | source = localpath(source) |
|
109 | source = localpath(source) | |
110 |
|
110 | |||
111 | if os.path.exists(dest): |
|
111 | if os.path.exists(dest): | |
112 | raise util.Abort(_("destination '%s' already exists"), dest) |
|
112 | raise util.Abort(_("destination '%s' already exists"), dest) | |
113 |
|
113 | |||
114 | class DirCleanup(object): |
|
114 | class DirCleanup(object): | |
115 | def __init__(self, dir_): |
|
115 | def __init__(self, dir_): | |
116 | self.rmtree = shutil.rmtree |
|
116 | self.rmtree = shutil.rmtree | |
117 | self.dir_ = dir_ |
|
117 | self.dir_ = dir_ | |
118 | def close(self): |
|
118 | def close(self): | |
119 | self.dir_ = None |
|
119 | self.dir_ = None | |
120 | def __del__(self): |
|
120 | def __del__(self): | |
121 | if self.dir_: |
|
121 | if self.dir_: | |
122 | self.rmtree(self.dir_, True) |
|
122 | self.rmtree(self.dir_, True) | |
123 |
|
123 | |||
124 | dest_repo = None |
|
124 | dest_repo = None | |
125 | try: |
|
125 | try: | |
126 | dest_repo = repository(ui, dest) |
|
126 | dest_repo = repository(ui, dest) | |
127 | raise util.Abort(_("destination '%s' already exists." % dest)) |
|
127 | raise util.Abort(_("destination '%s' already exists." % dest)) | |
128 | except RepoError: |
|
128 | except RepoError: | |
129 | dest_repo = repository(ui, dest, create=True) |
|
129 | dest_repo = repository(ui, dest, create=True) | |
130 |
|
130 | |||
131 | dest_path = None |
|
131 | dest_path = None | |
132 | dir_cleanup = None |
|
132 | dir_cleanup = None | |
133 | if dest_repo.local(): |
|
133 | if dest_repo.local(): | |
134 | dest_path = os.path.realpath(dest_repo.root) |
|
134 | dest_path = os.path.realpath(dest_repo.root) | |
135 | dir_cleanup = DirCleanup(dest_path) |
|
135 | dir_cleanup = DirCleanup(dest_path) | |
136 |
|
136 | |||
137 | abspath = source |
|
137 | abspath = source | |
138 | copy = False |
|
138 | copy = False | |
139 | if src_repo.local() and dest_repo.local(): |
|
139 | if src_repo.local() and dest_repo.local(): | |
140 | abspath = os.path.abspath(source) |
|
140 | abspath = os.path.abspath(source) | |
141 | copy = not pull and not rev |
|
141 | copy = not pull and not rev | |
142 |
|
142 | |||
143 | src_lock, dest_lock = None, None |
|
143 | src_lock, dest_lock = None, None | |
144 | if copy: |
|
144 | if copy: | |
145 | try: |
|
145 | try: | |
146 | # we use a lock here because if we race with commit, we |
|
146 | # we use a lock here because if we race with commit, we | |
147 | # can end up with extra data in the cloned revlogs that's |
|
147 | # can end up with extra data in the cloned revlogs that's | |
148 | # not pointed to by changesets, thus causing verify to |
|
148 | # not pointed to by changesets, thus causing verify to | |
149 | # fail |
|
149 | # fail | |
150 | src_lock = src_repo.lock() |
|
150 | src_lock = src_repo.lock() | |
151 | except lock.LockException: |
|
151 | except lock.LockException: | |
152 | copy = False |
|
152 | copy = False | |
153 |
|
153 | |||
154 | if copy: |
|
154 | if copy: | |
155 | # we lock here to avoid premature writing to the target |
|
155 | # we lock here to avoid premature writing to the target | |
156 | dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock")) |
|
156 | dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock")) | |
157 |
|
157 | |||
158 | # we need to remove the (empty) data dir in dest so copyfiles |
|
158 | # we need to remove the (empty) data dir in dest so copyfiles | |
159 | # can do its work |
|
159 | # can do its work | |
160 | os.rmdir(os.path.join(dest_path, ".hg", "data")) |
|
160 | os.rmdir(os.path.join(dest_path, ".hg", "data")) | |
161 | files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i" |
|
161 | files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i" | |
162 | for f in files.split(): |
|
162 | for f in files.split(): | |
163 | src = os.path.join(source, ".hg", f) |
|
163 | src = os.path.join(source, ".hg", f) | |
164 | dst = os.path.join(dest_path, ".hg", f) |
|
164 | dst = os.path.join(dest_path, ".hg", f) | |
165 | try: |
|
165 | try: | |
166 | util.copyfiles(src, dst) |
|
166 | util.copyfiles(src, dst) | |
167 | except OSError, inst: |
|
167 | except OSError, inst: | |
168 | if inst.errno != errno.ENOENT: |
|
168 | if inst.errno != errno.ENOENT: | |
169 | raise |
|
169 | raise | |
170 |
|
170 | |||
171 | # we need to re-init the repo after manually copying the data |
|
171 | # we need to re-init the repo after manually copying the data | |
172 | # into it |
|
172 | # into it | |
173 | dest_repo = repository(ui, dest) |
|
173 | dest_repo = repository(ui, dest) | |
174 |
|
174 | |||
175 | else: |
|
175 | else: | |
176 | revs = None |
|
176 | revs = None | |
177 | if rev: |
|
177 | if rev: | |
178 | if not src_repo.local(): |
|
178 | if not src_repo.local(): | |
179 | raise util.Abort(_("clone by revision not supported yet " |
|
179 | raise util.Abort(_("clone by revision not supported yet " | |
180 | "for remote repositories")) |
|
180 | "for remote repositories")) | |
181 | revs = [src_repo.lookup(r) for r in rev] |
|
181 | revs = [src_repo.lookup(r) for r in rev] | |
182 |
|
182 | |||
183 | if dest_repo.local(): |
|
183 | if dest_repo.local(): | |
184 | dest_repo.clone(src_repo, heads=revs, stream=stream) |
|
184 | dest_repo.clone(src_repo, heads=revs, stream=stream) | |
185 | elif src_repo.local(): |
|
185 | elif src_repo.local(): | |
186 | src_repo.push(dest_repo, revs=revs) |
|
186 | src_repo.push(dest_repo, revs=revs) | |
187 | else: |
|
187 | else: | |
188 | raise util.Abort(_("clone from remote to remote not supported")) |
|
188 | raise util.Abort(_("clone from remote to remote not supported")) | |
189 |
|
189 | |||
190 | if src_lock: |
|
190 | if src_lock: | |
191 | src_lock.release() |
|
191 | src_lock.release() | |
192 |
|
192 | |||
193 | if dest_repo.local(): |
|
193 | if dest_repo.local(): | |
194 | fp = dest_repo.opener("hgrc", "w", text=True) |
|
194 | fp = dest_repo.opener("hgrc", "w", text=True) | |
195 | fp.write("[paths]\n") |
|
195 | fp.write("[paths]\n") | |
196 | fp.write("default = %s\n" % abspath) |
|
196 | fp.write("default = %s\n" % abspath) | |
197 | fp.close() |
|
197 | fp.close() | |
198 |
|
198 | |||
199 | if dest_lock: |
|
199 | if dest_lock: | |
200 | dest_lock.release() |
|
200 | dest_lock.release() | |
201 |
|
201 | |||
202 | if update: |
|
202 | if update: | |
203 | _merge.update(dest_repo, dest_repo.changelog.tip()) |
|
203 | _merge.update(dest_repo, dest_repo.changelog.tip()) | |
204 | if dir_cleanup: |
|
204 | if dir_cleanup: | |
205 | dir_cleanup.close() |
|
205 | dir_cleanup.close() | |
206 |
|
206 | |||
207 | return src_repo, dest_repo |
|
207 | return src_repo, dest_repo | |
208 |
|
208 | |||
209 | def update(repo, node): |
|
209 | def update(repo, node): | |
210 | """update the working directory to node, merging linear changes""" |
|
210 | """update the working directory to node, merging linear changes""" | |
211 | return _merge.update(repo, node) |
|
211 | return _merge.update(repo, node) | |
212 |
|
212 | |||
213 | def clean(repo, node, wlock=None, show_stats=True): |
|
213 | def clean(repo, node, wlock=None, show_stats=True): | |
214 | """forcibly switch the working directory to node, clobbering changes""" |
|
214 | """forcibly switch the working directory to node, clobbering changes""" | |
215 | return _merge.update(repo, node, force=True, wlock=wlock, |
|
215 | return _merge.update(repo, node, force=True, wlock=wlock, | |
216 | show_stats=show_stats) |
|
216 | show_stats=show_stats) | |
217 |
|
217 | |||
218 | def merge(repo, node, force=None, remind=True, wlock=None): |
|
218 | def merge(repo, node, force=None, remind=True, wlock=None): | |
219 | """branch merge with node, resolving changes""" |
|
219 | """branch merge with node, resolving changes""" | |
220 |
return _merge.update(repo, node, branchmerge=True, force |
|
220 | return _merge.update(repo, node, branchmerge=True, force=force, | |
221 | remind=remind, wlock=wlock) |
|
221 | remind=remind, wlock=wlock) | |
222 |
|
222 | |||
223 | def revert(repo, node, choose, wlock): |
|
223 | def revert(repo, node, choose, wlock): | |
224 | """revert changes to revision in node without updating dirstate""" |
|
224 | """revert changes to revision in node without updating dirstate""" | |
225 | return _merge.update(repo, node, force=True, partial=choose, |
|
225 | return _merge.update(repo, node, force=True, partial=choose, | |
226 | show_stats=False, wlock=wlock) |
|
226 | show_stats=False, wlock=wlock) | |
227 |
|
227 | |||
228 | def verify(repo): |
|
228 | def verify(repo): | |
229 | """verify the consistency of a repository""" |
|
229 | """verify the consistency of a repository""" | |
230 | return _verify.verify(repo) |
|
230 | return _verify.verify(repo) |
@@ -1,334 +1,337 b'' | |||||
1 | # merge.py - directory-level update/merge handling for Mercurial |
|
1 | # merge.py - directory-level update/merge handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms |
|
5 | # This software may be used and distributed according to the terms | |
6 | # of the GNU General Public License, incorporated herein by reference. |
|
6 | # of the GNU General Public License, incorporated herein by reference. | |
7 |
|
7 | |||
8 | from node import * |
|
8 | from node import * | |
9 | from i18n import gettext as _ |
|
9 | from i18n import gettext as _ | |
10 | from demandload import * |
|
10 | from demandload import * | |
11 | demandload(globals(), "util os tempfile") |
|
11 | demandload(globals(), "util os tempfile") | |
12 |
|
12 | |||
13 | def merge3(repo, fn, my, other, p1, p2): |
|
13 | def merge3(repo, fn, my, other, p1, p2): | |
14 | """perform a 3-way merge in the working directory""" |
|
14 | """perform a 3-way merge in the working directory""" | |
15 |
|
15 | |||
16 | def temp(prefix, node): |
|
16 | def temp(prefix, node): | |
17 | pre = "%s~%s." % (os.path.basename(fn), prefix) |
|
17 | pre = "%s~%s." % (os.path.basename(fn), prefix) | |
18 | (fd, name) = tempfile.mkstemp(prefix=pre) |
|
18 | (fd, name) = tempfile.mkstemp(prefix=pre) | |
19 | f = os.fdopen(fd, "wb") |
|
19 | f = os.fdopen(fd, "wb") | |
20 | repo.wwrite(fn, fl.read(node), f) |
|
20 | repo.wwrite(fn, fl.read(node), f) | |
21 | f.close() |
|
21 | f.close() | |
22 | return name |
|
22 | return name | |
23 |
|
23 | |||
24 | fl = repo.file(fn) |
|
24 | fl = repo.file(fn) | |
25 | base = fl.ancestor(my, other) |
|
25 | base = fl.ancestor(my, other) | |
26 | a = repo.wjoin(fn) |
|
26 | a = repo.wjoin(fn) | |
27 | b = temp("base", base) |
|
27 | b = temp("base", base) | |
28 | c = temp("other", other) |
|
28 | c = temp("other", other) | |
29 |
|
29 | |||
30 | repo.ui.note(_("resolving %s\n") % fn) |
|
30 | repo.ui.note(_("resolving %s\n") % fn) | |
31 | repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") % |
|
31 | repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") % | |
32 | (fn, short(my), short(other), short(base))) |
|
32 | (fn, short(my), short(other), short(base))) | |
33 |
|
33 | |||
34 | cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge") |
|
34 | cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge") | |
35 | or "hgmerge") |
|
35 | or "hgmerge") | |
36 | r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root, |
|
36 | r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root, | |
37 | environ={'HG_FILE': fn, |
|
37 | environ={'HG_FILE': fn, | |
38 | 'HG_MY_NODE': p1, |
|
38 | 'HG_MY_NODE': p1, | |
39 | 'HG_OTHER_NODE': p2, |
|
39 | 'HG_OTHER_NODE': p2, | |
40 | 'HG_FILE_MY_NODE': hex(my), |
|
40 | 'HG_FILE_MY_NODE': hex(my), | |
41 | 'HG_FILE_OTHER_NODE': hex(other), |
|
41 | 'HG_FILE_OTHER_NODE': hex(other), | |
42 | 'HG_FILE_BASE_NODE': hex(base)}) |
|
42 | 'HG_FILE_BASE_NODE': hex(base)}) | |
43 | if r: |
|
43 | if r: | |
44 | repo.ui.warn(_("merging %s failed!\n") % fn) |
|
44 | repo.ui.warn(_("merging %s failed!\n") % fn) | |
45 |
|
45 | |||
46 | os.unlink(b) |
|
46 | os.unlink(b) | |
47 | os.unlink(c) |
|
47 | os.unlink(c) | |
48 | return r |
|
48 | return r | |
49 |
|
49 | |||
50 | def update(repo, node, branchmerge=False, force=False, partial=None, |
|
50 | def update(repo, node, branchmerge=False, force=False, partial=None, | |
51 |
|
|
51 | wlock=None, show_stats=True, remind=True): | |
|
52 | ||||
|
53 | overwrite = force and not branchmerge | |||
|
54 | forcemerge = force and branchmerge | |||
52 |
|
55 | |||
53 | if not wlock: |
|
56 | if not wlock: | |
54 | wlock = repo.wlock() |
|
57 | wlock = repo.wlock() | |
55 |
|
58 | |||
56 | ### check phase |
|
59 | ### check phase | |
57 |
|
60 | |||
58 | pl = repo.dirstate.parents() |
|
61 | pl = repo.dirstate.parents() | |
59 |
if not |
|
62 | if not overwrite and pl[1] != nullid: | |
60 | raise util.Abort(_("outstanding uncommitted merges")) |
|
63 | raise util.Abort(_("outstanding uncommitted merges")) | |
61 |
|
64 | |||
62 | p1, p2 = pl[0], node |
|
65 | p1, p2 = pl[0], node | |
63 | pa = repo.changelog.ancestor(p1, p2) |
|
66 | pa = repo.changelog.ancestor(p1, p2) | |
64 |
|
67 | |||
65 | # is there a linear path from p1 to p2? |
|
68 | # is there a linear path from p1 to p2? | |
66 | linear_path = (pa == p1 or pa == p2) |
|
69 | linear_path = (pa == p1 or pa == p2) | |
67 | if branchmerge and linear_path: |
|
70 | if branchmerge and linear_path: | |
68 | raise util.Abort(_("there is nothing to merge, just use " |
|
71 | raise util.Abort(_("there is nothing to merge, just use " | |
69 | "'hg update' or look at 'hg heads'")) |
|
72 | "'hg update' or look at 'hg heads'")) | |
70 |
|
73 | |||
71 |
if not |
|
74 | if not overwrite and not linear_path and not branchmerge: | |
72 |
raise util.Abort(_(" |
|
75 | raise util.Abort(_("update spans branches, use 'hg merge' " | |
73 | "or 'hg update -C' to lose changes")) |
|
76 | "or 'hg update -C' to lose changes")) | |
74 |
|
77 | |||
75 | modified, added, removed, deleted, unknown = repo.changes() |
|
78 | modified, added, removed, deleted, unknown = repo.changes() | |
76 | if branchmerge and not forcemerge: |
|
79 | if branchmerge and not forcemerge: | |
77 | if modified or added or removed: |
|
80 | if modified or added or removed: | |
78 | raise util.Abort(_("outstanding uncommitted changes")) |
|
81 | raise util.Abort(_("outstanding uncommitted changes")) | |
79 |
|
82 | |||
80 | m1n = repo.changelog.read(p1)[0] |
|
83 | m1n = repo.changelog.read(p1)[0] | |
81 | m2n = repo.changelog.read(p2)[0] |
|
84 | m2n = repo.changelog.read(p2)[0] | |
82 | man = repo.manifest.ancestor(m1n, m2n) |
|
85 | man = repo.manifest.ancestor(m1n, m2n) | |
83 | m1 = repo.manifest.read(m1n) |
|
86 | m1 = repo.manifest.read(m1n) | |
84 | mf1 = repo.manifest.readflags(m1n) |
|
87 | mf1 = repo.manifest.readflags(m1n) | |
85 | m2 = repo.manifest.read(m2n).copy() |
|
88 | m2 = repo.manifest.read(m2n).copy() | |
86 | mf2 = repo.manifest.readflags(m2n) |
|
89 | mf2 = repo.manifest.readflags(m2n) | |
87 | ma = repo.manifest.read(man) |
|
90 | ma = repo.manifest.read(man) | |
88 | mfa = repo.manifest.readflags(man) |
|
91 | mfa = repo.manifest.readflags(man) | |
89 |
|
92 | |||
90 |
if not forcemerge and not |
|
93 | if not forcemerge and not overwrite: | |
91 | for f in unknown: |
|
94 | for f in unknown: | |
92 | if f in m2: |
|
95 | if f in m2: | |
93 | t1 = repo.wread(f) |
|
96 | t1 = repo.wread(f) | |
94 | t2 = repo.file(f).read(m2[f]) |
|
97 | t2 = repo.file(f).read(m2[f]) | |
95 | if cmp(t1, t2) != 0: |
|
98 | if cmp(t1, t2) != 0: | |
96 | raise util.Abort(_("'%s' already exists in the working" |
|
99 | raise util.Abort(_("'%s' already exists in the working" | |
97 | " dir and differs from remote") % f) |
|
100 | " dir and differs from remote") % f) | |
98 |
|
101 | |||
99 | # resolve the manifest to determine which files |
|
102 | # resolve the manifest to determine which files | |
100 | # we care about merging |
|
103 | # we care about merging | |
101 | repo.ui.note(_("resolving manifests\n")) |
|
104 | repo.ui.note(_("resolving manifests\n")) | |
102 |
repo.ui.debug(_(" |
|
105 | repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") % | |
103 |
( |
|
106 | (overwrite, branchmerge, partial and True or False, linear_path)) | |
104 | repo.ui.debug(_(" ancestor %s local %s remote %s\n") % |
|
107 | repo.ui.debug(_(" ancestor %s local %s remote %s\n") % | |
105 | (short(man), short(m1n), short(m2n))) |
|
108 | (short(man), short(m1n), short(m2n))) | |
106 |
|
109 | |||
107 | merge = {} |
|
110 | merge = {} | |
108 | get = {} |
|
111 | get = {} | |
109 | remove = [] |
|
112 | remove = [] | |
110 |
|
113 | |||
111 | # construct a working dir manifest |
|
114 | # construct a working dir manifest | |
112 | mw = m1.copy() |
|
115 | mw = m1.copy() | |
113 | mfw = mf1.copy() |
|
116 | mfw = mf1.copy() | |
114 | umap = dict.fromkeys(unknown) |
|
117 | umap = dict.fromkeys(unknown) | |
115 |
|
118 | |||
116 | for f in added + modified + unknown: |
|
119 | for f in added + modified + unknown: | |
117 | mw[f] = "" |
|
120 | mw[f] = "" | |
118 | mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False)) |
|
121 | mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False)) | |
119 |
|
122 | |||
120 | for f in deleted + removed: |
|
123 | for f in deleted + removed: | |
121 | if f in mw: |
|
124 | if f in mw: | |
122 | del mw[f] |
|
125 | del mw[f] | |
123 |
|
126 | |||
124 | # If we're jumping between revisions (as opposed to merging), |
|
127 | # If we're jumping between revisions (as opposed to merging), | |
125 | # and if neither the working directory nor the target rev has |
|
128 | # and if neither the working directory nor the target rev has | |
126 | # the file, then we need to remove it from the dirstate, to |
|
129 | # the file, then we need to remove it from the dirstate, to | |
127 | # prevent the dirstate from listing the file when it is no |
|
130 | # prevent the dirstate from listing the file when it is no | |
128 | # longer in the manifest. |
|
131 | # longer in the manifest. | |
129 | if not partial and linear_path and f not in m2: |
|
132 | if not partial and linear_path and f not in m2: | |
130 | repo.dirstate.forget((f,)) |
|
133 | repo.dirstate.forget((f,)) | |
131 |
|
134 | |||
132 | # Compare manifests |
|
135 | # Compare manifests | |
133 | for f, n in mw.iteritems(): |
|
136 | for f, n in mw.iteritems(): | |
134 | if partial and not partial(f): |
|
137 | if partial and not partial(f): | |
135 | continue |
|
138 | continue | |
136 | if f in m2: |
|
139 | if f in m2: | |
137 | s = 0 |
|
140 | s = 0 | |
138 |
|
141 | |||
139 | # is the wfile new since m1, and match m2? |
|
142 | # is the wfile new since m1, and match m2? | |
140 | if f not in m1: |
|
143 | if f not in m1: | |
141 | t1 = repo.wread(f) |
|
144 | t1 = repo.wread(f) | |
142 | t2 = repo.file(f).read(m2[f]) |
|
145 | t2 = repo.file(f).read(m2[f]) | |
143 | if cmp(t1, t2) == 0: |
|
146 | if cmp(t1, t2) == 0: | |
144 | n = m2[f] |
|
147 | n = m2[f] | |
145 | del t1, t2 |
|
148 | del t1, t2 | |
146 |
|
149 | |||
147 | # are files different? |
|
150 | # are files different? | |
148 | if n != m2[f]: |
|
151 | if n != m2[f]: | |
149 | a = ma.get(f, nullid) |
|
152 | a = ma.get(f, nullid) | |
150 | # are both different from the ancestor? |
|
153 | # are both different from the ancestor? | |
151 | if n != a and m2[f] != a: |
|
154 | if n != a and m2[f] != a: | |
152 | repo.ui.debug(_(" %s versions differ, resolve\n") % f) |
|
155 | repo.ui.debug(_(" %s versions differ, resolve\n") % f) | |
153 | # merge executable bits |
|
156 | # merge executable bits | |
154 | # "if we changed or they changed, change in merge" |
|
157 | # "if we changed or they changed, change in merge" | |
155 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
|
158 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | |
156 | mode = ((a^b) | (a^c)) ^ a |
|
159 | mode = ((a^b) | (a^c)) ^ a | |
157 | merge[f] = (m1.get(f, nullid), m2[f], mode) |
|
160 | merge[f] = (m1.get(f, nullid), m2[f], mode) | |
158 | s = 1 |
|
161 | s = 1 | |
159 | # are we clobbering? |
|
162 | # are we clobbering? | |
160 | # is remote's version newer? |
|
163 | # is remote's version newer? | |
161 | # or are we going back in time? |
|
164 | # or are we going back in time? | |
162 |
elif |
|
165 | elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]): | |
163 | repo.ui.debug(_(" remote %s is newer, get\n") % f) |
|
166 | repo.ui.debug(_(" remote %s is newer, get\n") % f) | |
164 | get[f] = m2[f] |
|
167 | get[f] = m2[f] | |
165 | s = 1 |
|
168 | s = 1 | |
166 | elif f in umap or f in added: |
|
169 | elif f in umap or f in added: | |
167 | # this unknown file is the same as the checkout |
|
170 | # this unknown file is the same as the checkout | |
168 | # we need to reset the dirstate if the file was added |
|
171 | # we need to reset the dirstate if the file was added | |
169 | get[f] = m2[f] |
|
172 | get[f] = m2[f] | |
170 |
|
173 | |||
171 | if not s and mfw[f] != mf2[f]: |
|
174 | if not s and mfw[f] != mf2[f]: | |
172 |
if |
|
175 | if overwrite: | |
173 | repo.ui.debug(_(" updating permissions for %s\n") % f) |
|
176 | repo.ui.debug(_(" updating permissions for %s\n") % f) | |
174 | util.set_exec(repo.wjoin(f), mf2[f]) |
|
177 | util.set_exec(repo.wjoin(f), mf2[f]) | |
175 | else: |
|
178 | else: | |
176 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
|
179 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | |
177 | mode = ((a^b) | (a^c)) ^ a |
|
180 | mode = ((a^b) | (a^c)) ^ a | |
178 | if mode != b: |
|
181 | if mode != b: | |
179 | repo.ui.debug(_(" updating permissions for %s\n") |
|
182 | repo.ui.debug(_(" updating permissions for %s\n") | |
180 | % f) |
|
183 | % f) | |
181 | util.set_exec(repo.wjoin(f), mode) |
|
184 | util.set_exec(repo.wjoin(f), mode) | |
182 | del m2[f] |
|
185 | del m2[f] | |
183 | elif f in ma: |
|
186 | elif f in ma: | |
184 | if n != ma[f]: |
|
187 | if n != ma[f]: | |
185 | r = _("d") |
|
188 | r = _("d") | |
186 |
if not |
|
189 | if not overwrite and (linear_path or branchmerge): | |
187 | r = repo.ui.prompt( |
|
190 | r = repo.ui.prompt( | |
188 | (_(" local changed %s which remote deleted\n") % f) + |
|
191 | (_(" local changed %s which remote deleted\n") % f) + | |
189 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
|
192 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) | |
190 | if r == _("d"): |
|
193 | if r == _("d"): | |
191 | remove.append(f) |
|
194 | remove.append(f) | |
192 | else: |
|
195 | else: | |
193 | repo.ui.debug(_("other deleted %s\n") % f) |
|
196 | repo.ui.debug(_("other deleted %s\n") % f) | |
194 | remove.append(f) # other deleted it |
|
197 | remove.append(f) # other deleted it | |
195 | else: |
|
198 | else: | |
196 | # file is created on branch or in working directory |
|
199 | # file is created on branch or in working directory | |
197 |
if |
|
200 | if overwrite and f not in umap: | |
198 | repo.ui.debug(_("remote deleted %s, clobbering\n") % f) |
|
201 | repo.ui.debug(_("remote deleted %s, clobbering\n") % f) | |
199 | remove.append(f) |
|
202 | remove.append(f) | |
200 | elif n == m1.get(f, nullid): # same as parent |
|
203 | elif n == m1.get(f, nullid): # same as parent | |
201 | if p2 == pa: # going backwards? |
|
204 | if p2 == pa: # going backwards? | |
202 | repo.ui.debug(_("remote deleted %s\n") % f) |
|
205 | repo.ui.debug(_("remote deleted %s\n") % f) | |
203 | remove.append(f) |
|
206 | remove.append(f) | |
204 | else: |
|
207 | else: | |
205 | repo.ui.debug(_("local modified %s, keeping\n") % f) |
|
208 | repo.ui.debug(_("local modified %s, keeping\n") % f) | |
206 | else: |
|
209 | else: | |
207 | repo.ui.debug(_("working dir created %s, keeping\n") % f) |
|
210 | repo.ui.debug(_("working dir created %s, keeping\n") % f) | |
208 |
|
211 | |||
209 | for f, n in m2.iteritems(): |
|
212 | for f, n in m2.iteritems(): | |
210 | if partial and not partial(f): |
|
213 | if partial and not partial(f): | |
211 | continue |
|
214 | continue | |
212 | if f[0] == "/": |
|
215 | if f[0] == "/": | |
213 | continue |
|
216 | continue | |
214 | if f in ma and n != ma[f]: |
|
217 | if f in ma and n != ma[f]: | |
215 | r = _("k") |
|
218 | r = _("k") | |
216 |
if not |
|
219 | if not overwrite and (linear_path or branchmerge): | |
217 | r = repo.ui.prompt( |
|
220 | r = repo.ui.prompt( | |
218 | (_("remote changed %s which local deleted\n") % f) + |
|
221 | (_("remote changed %s which local deleted\n") % f) + | |
219 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
|
222 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) | |
220 | if r == _("k"): |
|
223 | if r == _("k"): | |
221 | get[f] = n |
|
224 | get[f] = n | |
222 | elif f not in ma: |
|
225 | elif f not in ma: | |
223 | repo.ui.debug(_("remote created %s\n") % f) |
|
226 | repo.ui.debug(_("remote created %s\n") % f) | |
224 | get[f] = n |
|
227 | get[f] = n | |
225 | else: |
|
228 | else: | |
226 |
if |
|
229 | if overwrite or p2 == pa: # going backwards? | |
227 | repo.ui.debug(_("local deleted %s, recreating\n") % f) |
|
230 | repo.ui.debug(_("local deleted %s, recreating\n") % f) | |
228 | get[f] = n |
|
231 | get[f] = n | |
229 | else: |
|
232 | else: | |
230 | repo.ui.debug(_("local deleted %s\n") % f) |
|
233 | repo.ui.debug(_("local deleted %s\n") % f) | |
231 |
|
234 | |||
232 | del mw, m1, m2, ma |
|
235 | del mw, m1, m2, ma | |
233 |
|
236 | |||
234 |
if |
|
237 | if overwrite: | |
235 | for f in merge: |
|
238 | for f in merge: | |
236 | get[f] = merge[f][1] |
|
239 | get[f] = merge[f][1] | |
237 | merge = {} |
|
240 | merge = {} | |
238 |
|
241 | |||
239 |
if linear_path or |
|
242 | if linear_path or overwrite: | |
240 | # we don't need to do any magic, just jump to the new rev |
|
243 | # we don't need to do any magic, just jump to the new rev | |
241 | p1, p2 = p2, nullid |
|
244 | p1, p2 = p2, nullid | |
242 |
|
245 | |||
243 | xp1 = hex(p1) |
|
246 | xp1 = hex(p1) | |
244 | xp2 = hex(p2) |
|
247 | xp2 = hex(p2) | |
245 | if p2 == nullid: xxp2 = '' |
|
248 | if p2 == nullid: xxp2 = '' | |
246 | else: xxp2 = xp2 |
|
249 | else: xxp2 = xp2 | |
247 |
|
250 | |||
248 | repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2) |
|
251 | repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2) | |
249 |
|
252 | |||
250 | # get the files we don't need to change |
|
253 | # get the files we don't need to change | |
251 | files = get.keys() |
|
254 | files = get.keys() | |
252 | files.sort() |
|
255 | files.sort() | |
253 | for f in files: |
|
256 | for f in files: | |
254 | if f[0] == "/": |
|
257 | if f[0] == "/": | |
255 | continue |
|
258 | continue | |
256 | repo.ui.note(_("getting %s\n") % f) |
|
259 | repo.ui.note(_("getting %s\n") % f) | |
257 | t = repo.file(f).read(get[f]) |
|
260 | t = repo.file(f).read(get[f]) | |
258 | repo.wwrite(f, t) |
|
261 | repo.wwrite(f, t) | |
259 | util.set_exec(repo.wjoin(f), mf2[f]) |
|
262 | util.set_exec(repo.wjoin(f), mf2[f]) | |
260 | if not partial: |
|
263 | if not partial: | |
261 | if branchmerge: |
|
264 | if branchmerge: | |
262 | repo.dirstate.update([f], 'n', st_mtime=-1) |
|
265 | repo.dirstate.update([f], 'n', st_mtime=-1) | |
263 | else: |
|
266 | else: | |
264 | repo.dirstate.update([f], 'n') |
|
267 | repo.dirstate.update([f], 'n') | |
265 |
|
268 | |||
266 | # merge the tricky bits |
|
269 | # merge the tricky bits | |
267 | unresolved = [] |
|
270 | unresolved = [] | |
268 | files = merge.keys() |
|
271 | files = merge.keys() | |
269 | files.sort() |
|
272 | files.sort() | |
270 | for f in files: |
|
273 | for f in files: | |
271 | repo.ui.status(_("merging %s\n") % f) |
|
274 | repo.ui.status(_("merging %s\n") % f) | |
272 | my, other, flag = merge[f] |
|
275 | my, other, flag = merge[f] | |
273 | ret = merge3(repo, f, my, other, xp1, xp2) |
|
276 | ret = merge3(repo, f, my, other, xp1, xp2) | |
274 | if ret: |
|
277 | if ret: | |
275 | unresolved.append(f) |
|
278 | unresolved.append(f) | |
276 | util.set_exec(repo.wjoin(f), flag) |
|
279 | util.set_exec(repo.wjoin(f), flag) | |
277 | if not partial: |
|
280 | if not partial: | |
278 | if branchmerge: |
|
281 | if branchmerge: | |
279 | # We've done a branch merge, mark this file as merged |
|
282 | # We've done a branch merge, mark this file as merged | |
280 | # so that we properly record the merger later |
|
283 | # so that we properly record the merger later | |
281 | repo.dirstate.update([f], 'm') |
|
284 | repo.dirstate.update([f], 'm') | |
282 | else: |
|
285 | else: | |
283 | # We've update-merged a locally modified file, so |
|
286 | # We've update-merged a locally modified file, so | |
284 | # we set the dirstate to emulate a normal checkout |
|
287 | # we set the dirstate to emulate a normal checkout | |
285 | # of that file some time in the past. Thus our |
|
288 | # of that file some time in the past. Thus our | |
286 | # merge will appear as a normal local file |
|
289 | # merge will appear as a normal local file | |
287 | # modification. |
|
290 | # modification. | |
288 | f_len = len(repo.file(f).read(other)) |
|
291 | f_len = len(repo.file(f).read(other)) | |
289 | repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) |
|
292 | repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) | |
290 |
|
293 | |||
291 | remove.sort() |
|
294 | remove.sort() | |
292 | for f in remove: |
|
295 | for f in remove: | |
293 | repo.ui.note(_("removing %s\n") % f) |
|
296 | repo.ui.note(_("removing %s\n") % f) | |
294 | util.audit_path(f) |
|
297 | util.audit_path(f) | |
295 | try: |
|
298 | try: | |
296 | util.unlink(repo.wjoin(f)) |
|
299 | util.unlink(repo.wjoin(f)) | |
297 | except OSError, inst: |
|
300 | except OSError, inst: | |
298 | if inst.errno != errno.ENOENT: |
|
301 | if inst.errno != errno.ENOENT: | |
299 | repo.ui.warn(_("update failed to remove %s: %s!\n") % |
|
302 | repo.ui.warn(_("update failed to remove %s: %s!\n") % | |
300 | (f, inst.strerror)) |
|
303 | (f, inst.strerror)) | |
301 | if not partial: |
|
304 | if not partial: | |
302 | if branchmerge: |
|
305 | if branchmerge: | |
303 | repo.dirstate.update(remove, 'r') |
|
306 | repo.dirstate.update(remove, 'r') | |
304 | else: |
|
307 | else: | |
305 | repo.dirstate.forget(remove) |
|
308 | repo.dirstate.forget(remove) | |
306 |
|
309 | |||
307 | if not partial: |
|
310 | if not partial: | |
308 | repo.dirstate.setparents(p1, p2) |
|
311 | repo.dirstate.setparents(p1, p2) | |
309 |
|
312 | |||
310 | if show_stats: |
|
313 | if show_stats: | |
311 | stats = ((len(get), _("updated")), |
|
314 | stats = ((len(get), _("updated")), | |
312 | (len(merge) - len(unresolved), _("merged")), |
|
315 | (len(merge) - len(unresolved), _("merged")), | |
313 | (len(remove), _("removed")), |
|
316 | (len(remove), _("removed")), | |
314 | (len(unresolved), _("unresolved"))) |
|
317 | (len(unresolved), _("unresolved"))) | |
315 | note = ", ".join([_("%d files %s") % s for s in stats]) |
|
318 | note = ", ".join([_("%d files %s") % s for s in stats]) | |
316 | repo.ui.status("%s\n" % note) |
|
319 | repo.ui.status("%s\n" % note) | |
317 | if not partial: |
|
320 | if not partial: | |
318 | if branchmerge: |
|
321 | if branchmerge: | |
319 | if unresolved: |
|
322 | if unresolved: | |
320 | repo.ui.status(_("There are unresolved merges," |
|
323 | repo.ui.status(_("There are unresolved merges," | |
321 | " you can redo the full merge using:\n" |
|
324 | " you can redo the full merge using:\n" | |
322 | " hg update -C %s\n" |
|
325 | " hg update -C %s\n" | |
323 | " hg merge %s\n" |
|
326 | " hg merge %s\n" | |
324 | % (repo.changelog.rev(p1), |
|
327 | % (repo.changelog.rev(p1), | |
325 | repo.changelog.rev(p2)))) |
|
328 | repo.changelog.rev(p2)))) | |
326 | elif remind: |
|
329 | elif remind: | |
327 | repo.ui.status(_("(branch merge, don't forget to commit)\n")) |
|
330 | repo.ui.status(_("(branch merge, don't forget to commit)\n")) | |
328 | elif unresolved: |
|
331 | elif unresolved: | |
329 | repo.ui.status(_("There are unresolved merges with" |
|
332 | repo.ui.status(_("There are unresolved merges with" | |
330 | " locally modified files.\n")) |
|
333 | " locally modified files.\n")) | |
331 |
|
334 | |||
332 | repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved)) |
|
335 | repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved)) | |
333 | return len(unresolved) |
|
336 | return len(unresolved) | |
334 |
|
337 |
General Comments 0
You need to be logged in to leave comments.
Login now