Show More
@@ -1,230 +1,230 | |||
|
1 | 1 | # hg.py - repository classes for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | 8 | from node import * |
|
9 | 9 | from repo import * |
|
10 | 10 | from demandload import * |
|
11 | 11 | from i18n import gettext as _ |
|
12 | 12 | demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo") |
|
13 | 13 | demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify") |
|
14 | 14 | |
|
15 | 15 | def _local(path): |
|
16 | 16 | return (os.path.isfile(path and util.drop_scheme('file', path)) and |
|
17 | 17 | bundlerepo or localrepo) |
|
18 | 18 | |
|
19 | 19 | schemes = { |
|
20 | 20 | 'bundle': bundlerepo, |
|
21 | 21 | 'file': _local, |
|
22 | 22 | 'hg': httprepo, |
|
23 | 23 | 'http': httprepo, |
|
24 | 24 | 'https': httprepo, |
|
25 | 25 | 'old-http': statichttprepo, |
|
26 | 26 | 'ssh': sshrepo, |
|
27 | 27 | 'static-http': statichttprepo, |
|
28 | 28 | } |
|
29 | 29 | |
|
30 | 30 | def _lookup(path): |
|
31 | 31 | scheme = 'file' |
|
32 | 32 | if path: |
|
33 | 33 | c = path.find(':') |
|
34 | 34 | if c > 0: |
|
35 | 35 | scheme = path[:c] |
|
36 | 36 | thing = schemes.get(scheme) or schemes['file'] |
|
37 | 37 | try: |
|
38 | 38 | return thing(path) |
|
39 | 39 | except TypeError: |
|
40 | 40 | return thing |
|
41 | 41 | |
|
42 | 42 | def islocal(repo): |
|
43 | 43 | '''return true if repo or path is local''' |
|
44 | 44 | if isinstance(repo, str): |
|
45 | 45 | try: |
|
46 | 46 | return _lookup(repo).islocal(repo) |
|
47 | 47 | except AttributeError: |
|
48 | 48 | return False |
|
49 | 49 | return repo.local() |
|
50 | 50 | |
|
51 | 51 | def repository(ui, path=None, create=False): |
|
52 | 52 | """return a repository object for the specified path""" |
|
53 | 53 | return _lookup(path).instance(ui, path, create) |
|
54 | 54 | |
|
55 | 55 | def defaultdest(source): |
|
56 | 56 | '''return default destination of clone if none is given''' |
|
57 | 57 | return os.path.basename(os.path.normpath(source)) |
|
58 | 58 | |
|
59 | 59 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, |
|
60 | 60 | stream=False): |
|
61 | 61 | """Make a copy of an existing repository. |
|
62 | 62 | |
|
63 | 63 | Create a copy of an existing repository in a new directory. The |
|
64 | 64 | source and destination are URLs, as passed to the repository |
|
65 | 65 | function. Returns a pair of repository objects, the source and |
|
66 | 66 | newly created destination. |
|
67 | 67 | |
|
68 | 68 | The location of the source is added to the new repository's |
|
69 | 69 | .hg/hgrc file, as the default to be used for future pulls and |
|
70 | 70 | pushes. |
|
71 | 71 | |
|
72 | 72 | If an exception is raised, the partly cloned/updated destination |
|
73 | 73 | repository will be deleted. |
|
74 | 74 | |
|
75 | 75 | Arguments: |
|
76 | 76 | |
|
77 | 77 | source: repository object or URL |
|
78 | 78 | |
|
79 | 79 | dest: URL of destination repository to create (defaults to base |
|
80 | 80 | name of source repository) |
|
81 | 81 | |
|
82 | 82 | pull: always pull from source repository, even in local case |
|
83 | 83 | |
|
84 | 84 | stream: stream raw data uncompressed from repository (fast over |
|
85 | 85 | LAN, slow over WAN) |
|
86 | 86 | |
|
87 | 87 | rev: revision to clone up to (implies pull=True) |
|
88 | 88 | |
|
89 | 89 | update: update working directory after clone completes, if |
|
90 | 90 | destination is local repository |
|
91 | 91 | """ |
|
92 | 92 | if isinstance(source, str): |
|
93 | 93 | src_repo = repository(ui, source) |
|
94 | 94 | else: |
|
95 | 95 | src_repo = source |
|
96 | 96 | source = src_repo.url() |
|
97 | 97 | |
|
98 | 98 | if dest is None: |
|
99 | 99 | dest = defaultdest(source) |
|
100 | 100 | |
|
101 | 101 | def localpath(path): |
|
102 | 102 | if path.startswith('file://'): |
|
103 | 103 | return path[7:] |
|
104 | 104 | if path.startswith('file:'): |
|
105 | 105 | return path[5:] |
|
106 | 106 | return path |
|
107 | 107 | |
|
108 | 108 | dest = localpath(dest) |
|
109 | 109 | source = localpath(source) |
|
110 | 110 | |
|
111 | 111 | if os.path.exists(dest): |
|
112 | 112 | raise util.Abort(_("destination '%s' already exists"), dest) |
|
113 | 113 | |
|
114 | 114 | class DirCleanup(object): |
|
115 | 115 | def __init__(self, dir_): |
|
116 | 116 | self.rmtree = shutil.rmtree |
|
117 | 117 | self.dir_ = dir_ |
|
118 | 118 | def close(self): |
|
119 | 119 | self.dir_ = None |
|
120 | 120 | def __del__(self): |
|
121 | 121 | if self.dir_: |
|
122 | 122 | self.rmtree(self.dir_, True) |
|
123 | 123 | |
|
124 | 124 | dest_repo = None |
|
125 | 125 | try: |
|
126 | 126 | dest_repo = repository(ui, dest) |
|
127 | 127 | raise util.Abort(_("destination '%s' already exists." % dest)) |
|
128 | 128 | except RepoError: |
|
129 | 129 | dest_repo = repository(ui, dest, create=True) |
|
130 | 130 | |
|
131 | 131 | dest_path = None |
|
132 | 132 | dir_cleanup = None |
|
133 | 133 | if dest_repo.local(): |
|
134 | 134 | dest_path = os.path.realpath(dest_repo.root) |
|
135 | 135 | dir_cleanup = DirCleanup(dest_path) |
|
136 | 136 | |
|
137 | 137 | abspath = source |
|
138 | 138 | copy = False |
|
139 | 139 | if src_repo.local() and dest_repo.local(): |
|
140 | 140 | abspath = os.path.abspath(source) |
|
141 | 141 | copy = not pull and not rev |
|
142 | 142 | |
|
143 | 143 | src_lock, dest_lock = None, None |
|
144 | 144 | if copy: |
|
145 | 145 | try: |
|
146 | 146 | # we use a lock here because if we race with commit, we |
|
147 | 147 | # can end up with extra data in the cloned revlogs that's |
|
148 | 148 | # not pointed to by changesets, thus causing verify to |
|
149 | 149 | # fail |
|
150 | 150 | src_lock = src_repo.lock() |
|
151 | 151 | except lock.LockException: |
|
152 | 152 | copy = False |
|
153 | 153 | |
|
154 | 154 | if copy: |
|
155 | 155 | # we lock here to avoid premature writing to the target |
|
156 | 156 | dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock")) |
|
157 | 157 | |
|
158 | 158 | # we need to remove the (empty) data dir in dest so copyfiles |
|
159 | 159 | # can do its work |
|
160 | 160 | os.rmdir(os.path.join(dest_path, ".hg", "data")) |
|
161 | 161 | files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i" |
|
162 | 162 | for f in files.split(): |
|
163 | 163 | src = os.path.join(source, ".hg", f) |
|
164 | 164 | dst = os.path.join(dest_path, ".hg", f) |
|
165 | 165 | try: |
|
166 | 166 | util.copyfiles(src, dst) |
|
167 | 167 | except OSError, inst: |
|
168 | 168 | if inst.errno != errno.ENOENT: |
|
169 | 169 | raise |
|
170 | 170 | |
|
171 | 171 | # we need to re-init the repo after manually copying the data |
|
172 | 172 | # into it |
|
173 | 173 | dest_repo = repository(ui, dest) |
|
174 | 174 | |
|
175 | 175 | else: |
|
176 | 176 | revs = None |
|
177 | 177 | if rev: |
|
178 | 178 | if not src_repo.local(): |
|
179 | 179 | raise util.Abort(_("clone by revision not supported yet " |
|
180 | 180 | "for remote repositories")) |
|
181 | 181 | revs = [src_repo.lookup(r) for r in rev] |
|
182 | 182 | |
|
183 | 183 | if dest_repo.local(): |
|
184 | 184 | dest_repo.clone(src_repo, heads=revs, stream=stream) |
|
185 | 185 | elif src_repo.local(): |
|
186 | 186 | src_repo.push(dest_repo, revs=revs) |
|
187 | 187 | else: |
|
188 | 188 | raise util.Abort(_("clone from remote to remote not supported")) |
|
189 | 189 | |
|
190 | 190 | if src_lock: |
|
191 | 191 | src_lock.release() |
|
192 | 192 | |
|
193 | 193 | if dest_repo.local(): |
|
194 | 194 | fp = dest_repo.opener("hgrc", "w", text=True) |
|
195 | 195 | fp.write("[paths]\n") |
|
196 | 196 | fp.write("default = %s\n" % abspath) |
|
197 | 197 | fp.close() |
|
198 | 198 | |
|
199 | 199 | if dest_lock: |
|
200 | 200 | dest_lock.release() |
|
201 | 201 | |
|
202 | 202 | if update: |
|
203 | 203 | _merge.update(dest_repo, dest_repo.changelog.tip()) |
|
204 | 204 | if dir_cleanup: |
|
205 | 205 | dir_cleanup.close() |
|
206 | 206 | |
|
207 | 207 | return src_repo, dest_repo |
|
208 | 208 | |
|
209 | 209 | def update(repo, node): |
|
210 | 210 | """update the working directory to node, merging linear changes""" |
|
211 | 211 | return _merge.update(repo, node) |
|
212 | 212 | |
|
213 | 213 | def clean(repo, node, wlock=None, show_stats=True): |
|
214 | 214 | """forcibly switch the working directory to node, clobbering changes""" |
|
215 | 215 | return _merge.update(repo, node, force=True, wlock=wlock, |
|
216 | 216 | show_stats=show_stats) |
|
217 | 217 | |
|
218 | 218 | def merge(repo, node, force=None, remind=True, wlock=None): |
|
219 | 219 | """branch merge with node, resolving changes""" |
|
220 |
return _merge.update(repo, node, branchmerge=True, force |
|
|
220 | return _merge.update(repo, node, branchmerge=True, force=force, | |
|
221 | 221 | remind=remind, wlock=wlock) |
|
222 | 222 | |
|
223 | 223 | def revert(repo, node, choose, wlock): |
|
224 | 224 | """revert changes to revision in node without updating dirstate""" |
|
225 | 225 | return _merge.update(repo, node, force=True, partial=choose, |
|
226 | 226 | show_stats=False, wlock=wlock) |
|
227 | 227 | |
|
228 | 228 | def verify(repo): |
|
229 | 229 | """verify the consistency of a repository""" |
|
230 | 230 | return _verify.verify(repo) |
@@ -1,334 +1,337 | |||
|
1 | 1 | # merge.py - directory-level update/merge handling for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | 8 | from node import * |
|
9 | 9 | from i18n import gettext as _ |
|
10 | 10 | from demandload import * |
|
11 | 11 | demandload(globals(), "util os tempfile") |
|
12 | 12 | |
|
13 | 13 | def merge3(repo, fn, my, other, p1, p2): |
|
14 | 14 | """perform a 3-way merge in the working directory""" |
|
15 | 15 | |
|
16 | 16 | def temp(prefix, node): |
|
17 | 17 | pre = "%s~%s." % (os.path.basename(fn), prefix) |
|
18 | 18 | (fd, name) = tempfile.mkstemp(prefix=pre) |
|
19 | 19 | f = os.fdopen(fd, "wb") |
|
20 | 20 | repo.wwrite(fn, fl.read(node), f) |
|
21 | 21 | f.close() |
|
22 | 22 | return name |
|
23 | 23 | |
|
24 | 24 | fl = repo.file(fn) |
|
25 | 25 | base = fl.ancestor(my, other) |
|
26 | 26 | a = repo.wjoin(fn) |
|
27 | 27 | b = temp("base", base) |
|
28 | 28 | c = temp("other", other) |
|
29 | 29 | |
|
30 | 30 | repo.ui.note(_("resolving %s\n") % fn) |
|
31 | 31 | repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") % |
|
32 | 32 | (fn, short(my), short(other), short(base))) |
|
33 | 33 | |
|
34 | 34 | cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge") |
|
35 | 35 | or "hgmerge") |
|
36 | 36 | r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root, |
|
37 | 37 | environ={'HG_FILE': fn, |
|
38 | 38 | 'HG_MY_NODE': p1, |
|
39 | 39 | 'HG_OTHER_NODE': p2, |
|
40 | 40 | 'HG_FILE_MY_NODE': hex(my), |
|
41 | 41 | 'HG_FILE_OTHER_NODE': hex(other), |
|
42 | 42 | 'HG_FILE_BASE_NODE': hex(base)}) |
|
43 | 43 | if r: |
|
44 | 44 | repo.ui.warn(_("merging %s failed!\n") % fn) |
|
45 | 45 | |
|
46 | 46 | os.unlink(b) |
|
47 | 47 | os.unlink(c) |
|
48 | 48 | return r |
|
49 | 49 | |
|
50 | 50 | def update(repo, node, branchmerge=False, force=False, partial=None, |
|
51 |
|
|
|
51 | wlock=None, show_stats=True, remind=True): | |
|
52 | ||
|
53 | overwrite = force and not branchmerge | |
|
54 | forcemerge = force and branchmerge | |
|
52 | 55 | |
|
53 | 56 | if not wlock: |
|
54 | 57 | wlock = repo.wlock() |
|
55 | 58 | |
|
56 | 59 | ### check phase |
|
57 | 60 | |
|
58 | 61 | pl = repo.dirstate.parents() |
|
59 |
if not |
|
|
62 | if not overwrite and pl[1] != nullid: | |
|
60 | 63 | raise util.Abort(_("outstanding uncommitted merges")) |
|
61 | 64 | |
|
62 | 65 | p1, p2 = pl[0], node |
|
63 | 66 | pa = repo.changelog.ancestor(p1, p2) |
|
64 | 67 | |
|
65 | 68 | # is there a linear path from p1 to p2? |
|
66 | 69 | linear_path = (pa == p1 or pa == p2) |
|
67 | 70 | if branchmerge and linear_path: |
|
68 | 71 | raise util.Abort(_("there is nothing to merge, just use " |
|
69 | 72 | "'hg update' or look at 'hg heads'")) |
|
70 | 73 | |
|
71 |
if not |
|
|
72 |
raise util.Abort(_(" |
|
|
74 | if not overwrite and not linear_path and not branchmerge: | |
|
75 | raise util.Abort(_("update spans branches, use 'hg merge' " | |
|
73 | 76 | "or 'hg update -C' to lose changes")) |
|
74 | 77 | |
|
75 | 78 | modified, added, removed, deleted, unknown = repo.changes() |
|
76 | 79 | if branchmerge and not forcemerge: |
|
77 | 80 | if modified or added or removed: |
|
78 | 81 | raise util.Abort(_("outstanding uncommitted changes")) |
|
79 | 82 | |
|
80 | 83 | m1n = repo.changelog.read(p1)[0] |
|
81 | 84 | m2n = repo.changelog.read(p2)[0] |
|
82 | 85 | man = repo.manifest.ancestor(m1n, m2n) |
|
83 | 86 | m1 = repo.manifest.read(m1n) |
|
84 | 87 | mf1 = repo.manifest.readflags(m1n) |
|
85 | 88 | m2 = repo.manifest.read(m2n).copy() |
|
86 | 89 | mf2 = repo.manifest.readflags(m2n) |
|
87 | 90 | ma = repo.manifest.read(man) |
|
88 | 91 | mfa = repo.manifest.readflags(man) |
|
89 | 92 | |
|
90 |
if not forcemerge and not |
|
|
93 | if not forcemerge and not overwrite: | |
|
91 | 94 | for f in unknown: |
|
92 | 95 | if f in m2: |
|
93 | 96 | t1 = repo.wread(f) |
|
94 | 97 | t2 = repo.file(f).read(m2[f]) |
|
95 | 98 | if cmp(t1, t2) != 0: |
|
96 | 99 | raise util.Abort(_("'%s' already exists in the working" |
|
97 | 100 | " dir and differs from remote") % f) |
|
98 | 101 | |
|
99 | 102 | # resolve the manifest to determine which files |
|
100 | 103 | # we care about merging |
|
101 | 104 | repo.ui.note(_("resolving manifests\n")) |
|
102 |
repo.ui.debug(_(" |
|
|
103 |
( |
|
|
105 | repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") % | |
|
106 | (overwrite, branchmerge, partial and True or False, linear_path)) | |
|
104 | 107 | repo.ui.debug(_(" ancestor %s local %s remote %s\n") % |
|
105 | 108 | (short(man), short(m1n), short(m2n))) |
|
106 | 109 | |
|
107 | 110 | merge = {} |
|
108 | 111 | get = {} |
|
109 | 112 | remove = [] |
|
110 | 113 | |
|
111 | 114 | # construct a working dir manifest |
|
112 | 115 | mw = m1.copy() |
|
113 | 116 | mfw = mf1.copy() |
|
114 | 117 | umap = dict.fromkeys(unknown) |
|
115 | 118 | |
|
116 | 119 | for f in added + modified + unknown: |
|
117 | 120 | mw[f] = "" |
|
118 | 121 | mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False)) |
|
119 | 122 | |
|
120 | 123 | for f in deleted + removed: |
|
121 | 124 | if f in mw: |
|
122 | 125 | del mw[f] |
|
123 | 126 | |
|
124 | 127 | # If we're jumping between revisions (as opposed to merging), |
|
125 | 128 | # and if neither the working directory nor the target rev has |
|
126 | 129 | # the file, then we need to remove it from the dirstate, to |
|
127 | 130 | # prevent the dirstate from listing the file when it is no |
|
128 | 131 | # longer in the manifest. |
|
129 | 132 | if not partial and linear_path and f not in m2: |
|
130 | 133 | repo.dirstate.forget((f,)) |
|
131 | 134 | |
|
132 | 135 | # Compare manifests |
|
133 | 136 | for f, n in mw.iteritems(): |
|
134 | 137 | if partial and not partial(f): |
|
135 | 138 | continue |
|
136 | 139 | if f in m2: |
|
137 | 140 | s = 0 |
|
138 | 141 | |
|
139 | 142 | # is the wfile new since m1, and match m2? |
|
140 | 143 | if f not in m1: |
|
141 | 144 | t1 = repo.wread(f) |
|
142 | 145 | t2 = repo.file(f).read(m2[f]) |
|
143 | 146 | if cmp(t1, t2) == 0: |
|
144 | 147 | n = m2[f] |
|
145 | 148 | del t1, t2 |
|
146 | 149 | |
|
147 | 150 | # are files different? |
|
148 | 151 | if n != m2[f]: |
|
149 | 152 | a = ma.get(f, nullid) |
|
150 | 153 | # are both different from the ancestor? |
|
151 | 154 | if n != a and m2[f] != a: |
|
152 | 155 | repo.ui.debug(_(" %s versions differ, resolve\n") % f) |
|
153 | 156 | # merge executable bits |
|
154 | 157 | # "if we changed or they changed, change in merge" |
|
155 | 158 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
|
156 | 159 | mode = ((a^b) | (a^c)) ^ a |
|
157 | 160 | merge[f] = (m1.get(f, nullid), m2[f], mode) |
|
158 | 161 | s = 1 |
|
159 | 162 | # are we clobbering? |
|
160 | 163 | # is remote's version newer? |
|
161 | 164 | # or are we going back in time? |
|
162 |
elif |
|
|
165 | elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]): | |
|
163 | 166 | repo.ui.debug(_(" remote %s is newer, get\n") % f) |
|
164 | 167 | get[f] = m2[f] |
|
165 | 168 | s = 1 |
|
166 | 169 | elif f in umap or f in added: |
|
167 | 170 | # this unknown file is the same as the checkout |
|
168 | 171 | # we need to reset the dirstate if the file was added |
|
169 | 172 | get[f] = m2[f] |
|
170 | 173 | |
|
171 | 174 | if not s and mfw[f] != mf2[f]: |
|
172 |
if |
|
|
175 | if overwrite: | |
|
173 | 176 | repo.ui.debug(_(" updating permissions for %s\n") % f) |
|
174 | 177 | util.set_exec(repo.wjoin(f), mf2[f]) |
|
175 | 178 | else: |
|
176 | 179 | a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
|
177 | 180 | mode = ((a^b) | (a^c)) ^ a |
|
178 | 181 | if mode != b: |
|
179 | 182 | repo.ui.debug(_(" updating permissions for %s\n") |
|
180 | 183 | % f) |
|
181 | 184 | util.set_exec(repo.wjoin(f), mode) |
|
182 | 185 | del m2[f] |
|
183 | 186 | elif f in ma: |
|
184 | 187 | if n != ma[f]: |
|
185 | 188 | r = _("d") |
|
186 |
if not |
|
|
189 | if not overwrite and (linear_path or branchmerge): | |
|
187 | 190 | r = repo.ui.prompt( |
|
188 | 191 | (_(" local changed %s which remote deleted\n") % f) + |
|
189 | 192 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
|
190 | 193 | if r == _("d"): |
|
191 | 194 | remove.append(f) |
|
192 | 195 | else: |
|
193 | 196 | repo.ui.debug(_("other deleted %s\n") % f) |
|
194 | 197 | remove.append(f) # other deleted it |
|
195 | 198 | else: |
|
196 | 199 | # file is created on branch or in working directory |
|
197 |
if |
|
|
200 | if overwrite and f not in umap: | |
|
198 | 201 | repo.ui.debug(_("remote deleted %s, clobbering\n") % f) |
|
199 | 202 | remove.append(f) |
|
200 | 203 | elif n == m1.get(f, nullid): # same as parent |
|
201 | 204 | if p2 == pa: # going backwards? |
|
202 | 205 | repo.ui.debug(_("remote deleted %s\n") % f) |
|
203 | 206 | remove.append(f) |
|
204 | 207 | else: |
|
205 | 208 | repo.ui.debug(_("local modified %s, keeping\n") % f) |
|
206 | 209 | else: |
|
207 | 210 | repo.ui.debug(_("working dir created %s, keeping\n") % f) |
|
208 | 211 | |
|
209 | 212 | for f, n in m2.iteritems(): |
|
210 | 213 | if partial and not partial(f): |
|
211 | 214 | continue |
|
212 | 215 | if f[0] == "/": |
|
213 | 216 | continue |
|
214 | 217 | if f in ma and n != ma[f]: |
|
215 | 218 | r = _("k") |
|
216 |
if not |
|
|
219 | if not overwrite and (linear_path or branchmerge): | |
|
217 | 220 | r = repo.ui.prompt( |
|
218 | 221 | (_("remote changed %s which local deleted\n") % f) + |
|
219 | 222 | _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
|
220 | 223 | if r == _("k"): |
|
221 | 224 | get[f] = n |
|
222 | 225 | elif f not in ma: |
|
223 | 226 | repo.ui.debug(_("remote created %s\n") % f) |
|
224 | 227 | get[f] = n |
|
225 | 228 | else: |
|
226 |
if |
|
|
229 | if overwrite or p2 == pa: # going backwards? | |
|
227 | 230 | repo.ui.debug(_("local deleted %s, recreating\n") % f) |
|
228 | 231 | get[f] = n |
|
229 | 232 | else: |
|
230 | 233 | repo.ui.debug(_("local deleted %s\n") % f) |
|
231 | 234 | |
|
232 | 235 | del mw, m1, m2, ma |
|
233 | 236 | |
|
234 |
if |
|
|
237 | if overwrite: | |
|
235 | 238 | for f in merge: |
|
236 | 239 | get[f] = merge[f][1] |
|
237 | 240 | merge = {} |
|
238 | 241 | |
|
239 |
if linear_path or |
|
|
242 | if linear_path or overwrite: | |
|
240 | 243 | # we don't need to do any magic, just jump to the new rev |
|
241 | 244 | p1, p2 = p2, nullid |
|
242 | 245 | |
|
243 | 246 | xp1 = hex(p1) |
|
244 | 247 | xp2 = hex(p2) |
|
245 | 248 | if p2 == nullid: xxp2 = '' |
|
246 | 249 | else: xxp2 = xp2 |
|
247 | 250 | |
|
248 | 251 | repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2) |
|
249 | 252 | |
|
250 | 253 | # get the files we don't need to change |
|
251 | 254 | files = get.keys() |
|
252 | 255 | files.sort() |
|
253 | 256 | for f in files: |
|
254 | 257 | if f[0] == "/": |
|
255 | 258 | continue |
|
256 | 259 | repo.ui.note(_("getting %s\n") % f) |
|
257 | 260 | t = repo.file(f).read(get[f]) |
|
258 | 261 | repo.wwrite(f, t) |
|
259 | 262 | util.set_exec(repo.wjoin(f), mf2[f]) |
|
260 | 263 | if not partial: |
|
261 | 264 | if branchmerge: |
|
262 | 265 | repo.dirstate.update([f], 'n', st_mtime=-1) |
|
263 | 266 | else: |
|
264 | 267 | repo.dirstate.update([f], 'n') |
|
265 | 268 | |
|
266 | 269 | # merge the tricky bits |
|
267 | 270 | unresolved = [] |
|
268 | 271 | files = merge.keys() |
|
269 | 272 | files.sort() |
|
270 | 273 | for f in files: |
|
271 | 274 | repo.ui.status(_("merging %s\n") % f) |
|
272 | 275 | my, other, flag = merge[f] |
|
273 | 276 | ret = merge3(repo, f, my, other, xp1, xp2) |
|
274 | 277 | if ret: |
|
275 | 278 | unresolved.append(f) |
|
276 | 279 | util.set_exec(repo.wjoin(f), flag) |
|
277 | 280 | if not partial: |
|
278 | 281 | if branchmerge: |
|
279 | 282 | # We've done a branch merge, mark this file as merged |
|
280 | 283 | # so that we properly record the merger later |
|
281 | 284 | repo.dirstate.update([f], 'm') |
|
282 | 285 | else: |
|
283 | 286 | # We've update-merged a locally modified file, so |
|
284 | 287 | # we set the dirstate to emulate a normal checkout |
|
285 | 288 | # of that file some time in the past. Thus our |
|
286 | 289 | # merge will appear as a normal local file |
|
287 | 290 | # modification. |
|
288 | 291 | f_len = len(repo.file(f).read(other)) |
|
289 | 292 | repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) |
|
290 | 293 | |
|
291 | 294 | remove.sort() |
|
292 | 295 | for f in remove: |
|
293 | 296 | repo.ui.note(_("removing %s\n") % f) |
|
294 | 297 | util.audit_path(f) |
|
295 | 298 | try: |
|
296 | 299 | util.unlink(repo.wjoin(f)) |
|
297 | 300 | except OSError, inst: |
|
298 | 301 | if inst.errno != errno.ENOENT: |
|
299 | 302 | repo.ui.warn(_("update failed to remove %s: %s!\n") % |
|
300 | 303 | (f, inst.strerror)) |
|
301 | 304 | if not partial: |
|
302 | 305 | if branchmerge: |
|
303 | 306 | repo.dirstate.update(remove, 'r') |
|
304 | 307 | else: |
|
305 | 308 | repo.dirstate.forget(remove) |
|
306 | 309 | |
|
307 | 310 | if not partial: |
|
308 | 311 | repo.dirstate.setparents(p1, p2) |
|
309 | 312 | |
|
310 | 313 | if show_stats: |
|
311 | 314 | stats = ((len(get), _("updated")), |
|
312 | 315 | (len(merge) - len(unresolved), _("merged")), |
|
313 | 316 | (len(remove), _("removed")), |
|
314 | 317 | (len(unresolved), _("unresolved"))) |
|
315 | 318 | note = ", ".join([_("%d files %s") % s for s in stats]) |
|
316 | 319 | repo.ui.status("%s\n" % note) |
|
317 | 320 | if not partial: |
|
318 | 321 | if branchmerge: |
|
319 | 322 | if unresolved: |
|
320 | 323 | repo.ui.status(_("There are unresolved merges," |
|
321 | 324 | " you can redo the full merge using:\n" |
|
322 | 325 | " hg update -C %s\n" |
|
323 | 326 | " hg merge %s\n" |
|
324 | 327 | % (repo.changelog.rev(p1), |
|
325 | 328 | repo.changelog.rev(p2)))) |
|
326 | 329 | elif remind: |
|
327 | 330 | repo.ui.status(_("(branch merge, don't forget to commit)\n")) |
|
328 | 331 | elif unresolved: |
|
329 | 332 | repo.ui.status(_("There are unresolved merges with" |
|
330 | 333 | " locally modified files.\n")) |
|
331 | 334 | |
|
332 | 335 | repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved)) |
|
333 | 336 | return len(unresolved) |
|
334 | 337 |
General Comments 0
You need to be logged in to leave comments.
Login now