##// END OF EJS Templates
branching: merge stable into default
Raphaël Gomès -
r51356:f57f5ab0 merge default
parent child Browse files
Show More
@@ -0,0 +1,196 b''
1 ============================================================================
2 Pulling from modern to a non-general delta target (and other related checks)
3 ============================================================================
4
5 There is various issue that can arise when we update the code with modern
6 storage in mind while working on delta processing. So this file is meant for
7 various scenario that might break in the future or have break in the past.
8
9 Setup
10 =====
11
12 Create a modern server with an older clone
13
14 $ cat << EOF >> $HGRCPATH
15 > [command-templates]
16 > log = "{desc} {tags}\n"
17 > EOF
18
19 $ hg init server
20
21 $ hg clone --quiet --pull server client --config format.usegeneraldelta=no
22 $ hg debugformat -R client | grep generaldelta
23 generaldelta: no
24
25 Create some complexe history
26
27 $ cd server
28 $ hg debugbuilddag -n '.+3:a$.+5:b/a:k$.+7:c/b:l$.+6:d/a:m<k+6/l+1/m'
29 $ hg log -G
30 o r36 tip
31 |\
32 | o r35
33 | |
34 | o r34
35 | |\
36 | | o r33
37 | | |
38 | | o r32
39 | | |
40 | | o r31
41 | | |
42 | | o r30
43 | | |
44 | | o r29
45 | | |
46 | | o r28
47 | | |
48 o | | r27 m
49 |\ \ \
50 | o | | r26 d
51 | | | |
52 | o | | r25
53 | | | |
54 | o | | r24
55 | | | |
56 | o | | r23
57 | | | |
58 | o | | r22
59 | | | |
60 | o | | r21
61 | | | |
62 | o | | r20
63 | / /
64 | o | r19 l
65 | |\ \
66 | | o | r18 c
67 | | | |
68 | | o | r17
69 | | | |
70 | | o | r16
71 | | | |
72 | | o | r15
73 | | | |
74 | | o | r14
75 | | | |
76 | | o | r13
77 | | | |
78 | | o | r12
79 | | | |
80 | | o | r11
81 | | /
82 +---o r10 k
83 | |/
84 | o r9 b
85 | |
86 | o r8
87 | |
88 | o r7
89 | |
90 | o r6
91 | |
92 | o r5
93 | |
94 | o r4
95 |
96 o r3 a
97 |
98 o r2
99 |
100 o r1
101 |
102 o r0
103
104 $ cd ..
105
106
107 Pull it in the client
108 =====================
109
110
111 pull with default value
112 -----------------------
113
114 $ cp -R client client-simple-pull
115 $ hg -R client-simple-pull pull
116 pulling from $TESTTMP/server
117 requesting all changes
118 adding changesets
119 adding manifests
120 adding file changes
121 added 37 changesets with 37 changes to 37 files
122 new changesets 61246295ee1e:b4b117cbbcf3
123 (run 'hg update' to get a working copy)
124 $ hg -R client-simple-pull verify
125 checking changesets
126 checking manifests
127 crosschecking files in changesets and manifests
128 checking files
129 checking dirstate
130 checked 37 changesets with 37 changes to 37 files
131
132
133 pull with "no-reuse" policy
134 ---------------------------
135
136 $ cp -R client client-no-reuse
137 $ hg -R client-no-reuse pull --config paths.default:pulled-delta-reuse-policy=no-reuse
138 pulling from $TESTTMP/server
139 requesting all changes
140 adding changesets
141 adding manifests
142 adding file changes
143 added 37 changesets with 37 changes to 37 files
144 new changesets 61246295ee1e:b4b117cbbcf3
145 (run 'hg update' to get a working copy)
146 $ hg -R client-no-reuse verify
147 checking changesets
148 checking manifests
149 crosschecking files in changesets and manifests
150 checking files
151 checking dirstate
152 checked 37 changesets with 37 changes to 37 files
153
154
155 pull with "try-base" policy
156 ---------------------------
157
158 $ cp -R client client-try-base
159 $ hg -R client-try-base pull --config paths.default:pulled-delta-reuse-policy=try-base
160 pulling from $TESTTMP/server
161 requesting all changes
162 adding changesets
163 adding manifests
164 adding file changes
165 added 37 changesets with 37 changes to 37 files
166 new changesets 61246295ee1e:b4b117cbbcf3
167 (run 'hg update' to get a working copy)
168 $ hg -R client-try-base verify
169 checking changesets
170 checking manifests
171 crosschecking files in changesets and manifests
172 checking files
173 checking dirstate
174 checked 37 changesets with 37 changes to 37 files
175
176
177 pull with "forced" policy
178 -------------------------
179
180 $ cp -R client client-forced
181 $ hg -R client-forced pull --config paths.default:pulled-delta-reuse-policy=forced
182 pulling from $TESTTMP/server
183 requesting all changes
184 adding changesets
185 adding manifests
186 adding file changes
187 added 37 changesets with 37 changes to 37 files
188 new changesets 61246295ee1e:b4b117cbbcf3
189 (run 'hg update' to get a working copy)
190 $ hg -R client-forced verify
191 checking changesets
192 checking manifests
193 crosschecking files in changesets and manifests
194 checking files
195 checking dirstate
196 checked 37 changesets with 37 changes to 37 files
@@ -243,3 +243,4 b' 05de4896508e8ec387b33eb30d8aab78d1c8e9e4'
243 f14864fffdcab725d9eac6d4f4c07be05a35f59a 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQc3KUZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVnYZDACh1Bcj8Yu3t8pO22SKWJnz8Ndw9Hvw+ifLaRxFUxKtqUYvy3CIl2qt8k7V13M25qw0061SKgcvNdjtkOhdmtFHNAbqryy0nK9oSZ2GfndmJfMxm9ixF/CcHrx+MmsklEz2woApViHW5PrmgKvZNsStQ5NM457Yx3B4nsT9b8t03NzdNiZRM+RZOkZ+4OdSbiB6hYuTqEFIi2YM+gfVM5Z7H8sEFBkUCtuwUjFGaWThZGGhAcqD5E7p/Lkjv4e4tzyHOzHDgdd+OCAkcbib6/E3Q1MlQ1x7CKpJ190T8R35CzAIMBVoTSI+Ov7OKw1OfGdeCvMVJsKUvqY3zrPawmJB6pG7GoVPEu5pU65H51U3Plq3GhsekUrKWY/BSHV9FOqpKZdnxOAllfWcjLYpbC/fM3l8uuQVcPAs89GvWKnDuE/NWCDYzDAYE++s/H4tP3Chv6yQbPSv/lbccst7OfLLDtXgRHIyEWLo392X3mWzhrkNtfJkBdi39uH9Aoh7pN0=
243 f14864fffdcab725d9eac6d4f4c07be05a35f59a 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQc3KUZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVnYZDACh1Bcj8Yu3t8pO22SKWJnz8Ndw9Hvw+ifLaRxFUxKtqUYvy3CIl2qt8k7V13M25qw0061SKgcvNdjtkOhdmtFHNAbqryy0nK9oSZ2GfndmJfMxm9ixF/CcHrx+MmsklEz2woApViHW5PrmgKvZNsStQ5NM457Yx3B4nsT9b8t03NzdNiZRM+RZOkZ+4OdSbiB6hYuTqEFIi2YM+gfVM5Z7H8sEFBkUCtuwUjFGaWThZGGhAcqD5E7p/Lkjv4e4tzyHOzHDgdd+OCAkcbib6/E3Q1MlQ1x7CKpJ190T8R35CzAIMBVoTSI+Ov7OKw1OfGdeCvMVJsKUvqY3zrPawmJB6pG7GoVPEu5pU65H51U3Plq3GhsekUrKWY/BSHV9FOqpKZdnxOAllfWcjLYpbC/fM3l8uuQVcPAs89GvWKnDuE/NWCDYzDAYE++s/H4tP3Chv6yQbPSv/lbccst7OfLLDtXgRHIyEWLo392X3mWzhrkNtfJkBdi39uH9Aoh7pN0=
244 83ea6ce48b4fd09fb79c4e34cc5750c805699a53 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQ3860ZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVk3gDACIIcQxKfis/r5UNj7SqyFhQxUCo8Njp7zdLFv3CSWFdFiOpQONI7Byt9KjwedUkUK9tqdb03V7W32ZSBTrNLM11uHY9E5Aknjoza4m+aIGbamEVRWIIHXjUZEMKS9QcY8ElbDvvPu/xdZjyTEjNNiuByUpPUcJXVzpKrHm8Wy3GWDliYBuu68mzFIX3JnZKscdK4EjCAfDysSwwfLeBMpd0Rk+SgwjDwyPWAAyU3yDPNmlUn8qTGHjXxU3vsHCXpoJWkfKmQ9n++23WEpM9vC8zx2TIy70+gFUvKG77+Ucv+djQxHRv0L6L5qUSBJukD3R3nml1xu6pUeioBHepRmTUWgPbHa/gQ+J2Pw+rPCK51x0EeT0SJjxUR2mmMLbk8N2efM35lEjF/sNxotTq17Sv9bjwXhue6BURxpQDEyOuSaS0IlF56ndXtE/4FX3H6zgU1+3jw5iBWajr1E04QjPlSOJO7nIKYM9Jq3VpHR7MiFwfT46pJEfw9pNgZX2b8o=
244 83ea6ce48b4fd09fb79c4e34cc5750c805699a53 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQ3860ZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVk3gDACIIcQxKfis/r5UNj7SqyFhQxUCo8Njp7zdLFv3CSWFdFiOpQONI7Byt9KjwedUkUK9tqdb03V7W32ZSBTrNLM11uHY9E5Aknjoza4m+aIGbamEVRWIIHXjUZEMKS9QcY8ElbDvvPu/xdZjyTEjNNiuByUpPUcJXVzpKrHm8Wy3GWDliYBuu68mzFIX3JnZKscdK4EjCAfDysSwwfLeBMpd0Rk+SgwjDwyPWAAyU3yDPNmlUn8qTGHjXxU3vsHCXpoJWkfKmQ9n++23WEpM9vC8zx2TIy70+gFUvKG77+Ucv+djQxHRv0L6L5qUSBJukD3R3nml1xu6pUeioBHepRmTUWgPbHa/gQ+J2Pw+rPCK51x0EeT0SJjxUR2mmMLbk8N2efM35lEjF/sNxotTq17Sv9bjwXhue6BURxpQDEyOuSaS0IlF56ndXtE/4FX3H6zgU1+3jw5iBWajr1E04QjPlSOJO7nIKYM9Jq3VpHR7MiFwfT46pJEfw9pNgZX2b8o=
245 f952be90b0514a576dcc8bbe758ce3847faba9bb 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQ+ZaoZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVuDOC/90SQ3UjXmByAaT5qr4bd3sVGt12lXlaKdyDxY0JMSKyHMUnb4YltHzNFxiUku10aRsRvJt5denTGeaOvAYbbXE7nbZJuyLD9rvfFTCe6EVx7kymCBwSbobKMzD79QHAFU7xu036gs7rmwyc++F4JF4IOrT4bjSYY5/8g0uLAHUexnn49QfQ5OYr325qShDFLjUZ7aH0yxA/gEr2MfXQmbIEc0eJJQXD1EhDkpSJFNIKzwWMOT1AhFk8kTlDqqbPnW7sDxTW+v/gGjAFYLHi8GMLEyrBQdEqytN7Pl9XOPXt/8RaDfIzYfl0OHxh2l1Y1MuH/PHrWO4PBPsr82QI2mxufYKuujpFMPr4PxXXl2g31OKhI8jJj+bHr62kGIOJCxZ8EPPGKXPGyoOuIVa0MeHmXxjb9kkj0SALjlaUvZrSENzRTsQXDNHQa+iDaITKLmItvLsaTEz9DJzGmI20shtJYcx4lqHsTgtMZfOtR5tmUknAFUUBZfUwvwULD4LmNI=
245 f952be90b0514a576dcc8bbe758ce3847faba9bb 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmQ+ZaoZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVuDOC/90SQ3UjXmByAaT5qr4bd3sVGt12lXlaKdyDxY0JMSKyHMUnb4YltHzNFxiUku10aRsRvJt5denTGeaOvAYbbXE7nbZJuyLD9rvfFTCe6EVx7kymCBwSbobKMzD79QHAFU7xu036gs7rmwyc++F4JF4IOrT4bjSYY5/8g0uLAHUexnn49QfQ5OYr325qShDFLjUZ7aH0yxA/gEr2MfXQmbIEc0eJJQXD1EhDkpSJFNIKzwWMOT1AhFk8kTlDqqbPnW7sDxTW+v/gGjAFYLHi8GMLEyrBQdEqytN7Pl9XOPXt/8RaDfIzYfl0OHxh2l1Y1MuH/PHrWO4PBPsr82QI2mxufYKuujpFMPr4PxXXl2g31OKhI8jJj+bHr62kGIOJCxZ8EPPGKXPGyoOuIVa0MeHmXxjb9kkj0SALjlaUvZrSENzRTsQXDNHQa+iDaITKLmItvLsaTEz9DJzGmI20shtJYcx4lqHsTgtMZfOtR5tmUknAFUUBZfUwvwULD4LmNI=
246 fc445f8abcf90b33db7c463816a1b3560681767f 0 iQHNBAABCgA3FiEEH2b4zfZU6QXBHaBhoR4BzQ4F2VYFAmRTok8ZHGFscGhhcmVAcmFwaGFlbGdvbWVzLmRldgAKCRChHgHNDgXZVpZ5DACBv33k//ovzSbyH5/q+Xhk3TqNRY8IDOjoEhvDyu0bJHsvygOGXLUtHpQPth1RA4/c+AVNJrUeFvT02sLqqP2d9oSA9HEAYpOuzwgr1A+1o+Q2GyfD4cElP6KfiEe8oyFVOB0rfBgWNei1C0nnrhChQr5dOPR63uAFhHzkEsgsTFS7ONxZ1DHbe7gRV8OMMf1MatAtRzRexQJCqyNv7WodQdrKtjHqPKtlWl20dbwTHhzeiZbtjiTe0CVXVsOqnA1DQkO/IaiKQrn3zWdGY5ABbqQ1K0ceLcej4NFOeLo9ZrShndU3BuFUa9Dq9bnPYOI9wMqGoDh/GdTZkZEzBy5PTokY3AJHblbub49pi8YTenFcPdtd/v71AaNi3TKa45ZNhYVkPmRETYweHkLs3CIrSyeiBwU4RGuQZVD/GujAQB5yhk0w+LPMzBsHruD4vsgXwIraCzQIIJTjgyxKuAJGdGNUFYyxEpUkgz5G6MFrBKe8HO69y3Pm/qDNZ2maV8k=
@@ -259,3 +259,4 b' 05de4896508e8ec387b33eb30d8aab78d1c8e9e4'
259 f14864fffdcab725d9eac6d4f4c07be05a35f59a 6.4
259 f14864fffdcab725d9eac6d4f4c07be05a35f59a 6.4
260 83ea6ce48b4fd09fb79c4e34cc5750c805699a53 6.4.1
260 83ea6ce48b4fd09fb79c4e34cc5750c805699a53 6.4.1
261 f952be90b0514a576dcc8bbe758ce3847faba9bb 6.4.2
261 f952be90b0514a576dcc8bbe758ce3847faba9bb 6.4.2
262 fc445f8abcf90b33db7c463816a1b3560681767f 6.4.3
@@ -31,6 +31,8 b''
31 #define PATH_MAX 4096
31 #define PATH_MAX 4096
32 #endif
32 #endif
33
33
34 extern char **environ;
35
34 struct cmdserveropts {
36 struct cmdserveropts {
35 char sockname[PATH_MAX];
37 char sockname[PATH_MAX];
36 char initsockname[PATH_MAX];
38 char initsockname[PATH_MAX];
@@ -232,18 +234,12 b' static const char *gethgcmd(void)'
232 hgcmd = "hg";
234 hgcmd = "hg";
233 #endif
235 #endif
234 }
236 }
235 /* Set $CHGHG to the path to the seleted hg executable if it wasn't
236 * already set. This has the effect of ensuring that a new command
237 * server will be spawned if the existing command server is running from
238 * an executable at a different path. */
239 if (setenv("CHGHG", hgcmd, 1) != 0)
240 abortmsgerrno("failed to setenv");
241 return hgcmd;
237 return hgcmd;
242 }
238 }
243
239
244 static void execcmdserver(const char *hgcmd, const struct cmdserveropts *opts)
240 static void execcmdserver(const struct cmdserveropts *opts)
245 {
241 {
246
242 const char *hgcmd = gethgcmd();
247 const char *baseargv[] = {
243 const char *baseargv[] = {
248 hgcmd, "serve", "--no-profile", "--cmdserver",
244 hgcmd, "serve", "--no-profile", "--cmdserver",
249 "chgunix", "--address", opts->initsockname, "--daemon-postexec",
245 "chgunix", "--address", opts->initsockname, "--daemon-postexec",
@@ -380,16 +376,11 b' static hgclient_t *connectcmdserver(stru'
380
376
381 debugmsg("start cmdserver at %s", opts->initsockname);
377 debugmsg("start cmdserver at %s", opts->initsockname);
382
378
383 /* Get the path to the hg executable before we fork because this
384 * function might update the environment, and we want this to be
385 * reflected in both the parent and child processes. */
386 const char *hgcmd = gethgcmd();
387
388 pid_t pid = fork();
379 pid_t pid = fork();
389 if (pid < 0)
380 if (pid < 0)
390 abortmsg("failed to fork cmdserver process");
381 abortmsg("failed to fork cmdserver process");
391 if (pid == 0) {
382 if (pid == 0) {
392 execcmdserver(hgcmd, opts);
383 execcmdserver(opts);
393 } else {
384 } else {
394 hgc = retryconnectcmdserver(opts, pid);
385 hgc = retryconnectcmdserver(opts, pid);
395 }
386 }
@@ -523,6 +514,16 b' int main(int argc, const char *argv[])'
523 }
514 }
524 }
515 }
525
516
517 /* Set $CHGHG to the path of the hg executable we intend to use. This
518 * is a no-op if $CHGHG was expliclty specified, but otherwise this
519 * ensures that we will spawn a new command server if we connect to an
520 * existing one running from a different executable. This should only
521 * only be needed when chg is built with HGPATHREL since otherwise the
522 * hg executable used when CHGHG is absent should be deterministic.
523 * */
524 if (setenv("CHGHG", gethgcmd(), 1) != 0)
525 abortmsgerrno("failed to setenv");
526
526 hgclient_t *hgc;
527 hgclient_t *hgc;
527 size_t retry = 0;
528 size_t retry = 0;
528 while (1) {
529 while (1) {
@@ -9,6 +9,13 b''
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Fixer tools are run in the repository's root directory. This allows them to read
13 configuration files from the working copy, or even write to the working copy.
14 The working copy is not updated to match the revision being fixed. In fact,
15 several revisions may be fixed in parallel. Writes to the working copy are not
16 amended into the revision being fixed; fixer tools MUST always read content to
17 be fixed from stdin, and write fixed file content back to stdout.
18
12 Here is an example configuration that causes :hg:`fix` to apply automatic
19 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
20 formatting fixes to modified lines in C++ code::
14
21
@@ -113,13 +120,6 b' perform other post-fixing work. The supp'
113 mapping fixer tool names to lists of metadata values returned from
120 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
121 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
122 previously passed to the "postfixfile" hook.
116
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
123 """
123 """
124
124
125
125
@@ -239,7 +239,8 b" usage = _(b'[OPTION]... [FILE]...')"
239 def fix(ui, repo, *pats, **opts):
239 def fix(ui, repo, *pats, **opts):
240 """rewrite file content in changesets or working directory
240 """rewrite file content in changesets or working directory
241
241
242 Runs any configured tools to fix the content of files. Only affects files
242 Runs any configured tools to fix the content of files. (See
243 :hg:`help -e fix` for details about configuring tools.) Only affects files
243 with changes, unless file arguments are provided. Only affects changed lines
244 with changes, unless file arguments are provided. Only affects changed lines
244 of files, unless the --whole flag is used. Some tools may always affect the
245 of files, unless the --whole flag is used. Some tools may always affect the
245 whole file regardless of --whole.
246 whole file regardless of --whole.
@@ -616,6 +616,10 b' coreconfigitem('
616 b'bundle2.debug',
616 b'bundle2.debug',
617 default=False,
617 default=False,
618 )
618 )
619 # which kind of delta to put in the bundled changegroup. Possible value
620 # - '': use default behavior
621 # - p1: force to always use delta against p1
622 # - full: force to always use full content
619 coreconfigitem(
623 coreconfigitem(
620 b'devel',
624 b'devel',
621 b'bundle.delta',
625 b'bundle.delta',
@@ -684,6 +684,15 b' def _candidategroups('
684 yield None
684 yield None
685 return
685 return
686
686
687 if target_rev is None:
688 target_rev = len(revlog)
689
690 if not revlog._generaldelta:
691 # before general delta, there is only one possible delta base
692 yield (target_rev - 1,)
693 yield None
694 return
695
687 if (
696 if (
688 cachedelta is not None
697 cachedelta is not None
689 and nullrev == cachedelta[0]
698 and nullrev == cachedelta[0]
@@ -716,9 +725,7 b' def _candidategroups('
716 group = []
725 group = []
717 for rev in temptative:
726 for rev in temptative:
718 # skip over empty delta (no need to include them in a chain)
727 # skip over empty delta (no need to include them in a chain)
719 while revlog._generaldelta and not (
728 while not (rev == nullrev or rev in tested or deltalength(rev)):
720 rev == nullrev or rev in tested or deltalength(rev)
721 ):
722 tested.add(rev)
729 tested.add(rev)
723 rev = deltaparent(rev)
730 rev = deltaparent(rev)
724 # no need to try a delta against nullrev, this will be done as a
731 # no need to try a delta against nullrev, this will be done as a
@@ -901,27 +908,27 b' def _rawgroups(revlog, p1, p2, cachedelt'
901
908
902 The group order aims at providing fast or small candidates first.
909 The group order aims at providing fast or small candidates first.
903 """
910 """
904 gdelta = revlog._generaldelta
911 # Why search for delta base if we cannot use a delta base ?
905 # gate sparse behind general-delta because of issue6056
912 assert revlog._generaldelta
906 sparse = gdelta and revlog._sparserevlog
913 # also see issue6056
914 sparse = revlog._sparserevlog
907 curr = len(revlog)
915 curr = len(revlog)
908 prev = curr - 1
916 prev = curr - 1
909 deltachain = lambda rev: revlog._deltachain(rev)[0]
917 deltachain = lambda rev: revlog._deltachain(rev)[0]
910
918
911 if gdelta:
919 # exclude already lazy tested base if any
912 # exclude already lazy tested base if any
920 parents = [p for p in (p1, p2) if p != nullrev]
913 parents = [p for p in (p1, p2) if p != nullrev]
914
921
915 if not revlog._deltabothparents and len(parents) == 2:
922 if not revlog._deltabothparents and len(parents) == 2:
916 parents.sort()
923 parents.sort()
917 # To minimize the chance of having to build a fulltext,
924 # To minimize the chance of having to build a fulltext,
918 # pick first whichever parent is closest to us (max rev)
925 # pick first whichever parent is closest to us (max rev)
919 yield (parents[1],)
926 yield (parents[1],)
920 # then the other one (min rev) if the first did not fit
927 # then the other one (min rev) if the first did not fit
921 yield (parents[0],)
928 yield (parents[0],)
922 elif len(parents) > 0:
929 elif len(parents) > 0:
923 # Test all parents (1 or 2), and keep the best candidate
930 # Test all parents (1 or 2), and keep the best candidate
924 yield parents
931 yield parents
925
932
926 if sparse and parents:
933 if sparse and parents:
927 if snapshot_cache is None:
934 if snapshot_cache is None:
@@ -1126,7 +1133,7 b' class deltacomputer:'
1126
1133
1127 return delta
1134 return delta
1128
1135
1129 def _builddeltainfo(self, revinfo, base, fh):
1136 def _builddeltainfo(self, revinfo, base, fh, target_rev=None):
1130 # can we use the cached delta?
1137 # can we use the cached delta?
1131 revlog = self.revlog
1138 revlog = self.revlog
1132 debug_search = self._write_debug is not None and self._debug_search
1139 debug_search = self._write_debug is not None and self._debug_search
@@ -1134,6 +1141,13 b' class deltacomputer:'
1134 if revlog._generaldelta:
1141 if revlog._generaldelta:
1135 deltabase = base
1142 deltabase = base
1136 else:
1143 else:
1144 if target_rev is not None and base != target_rev - 1:
1145 msg = (
1146 b'general delta cannot use delta for something else '
1147 b'than `prev`: %d<-%d'
1148 )
1149 msg %= (base, target_rev)
1150 raise error.ProgrammingError(msg)
1137 deltabase = chainbase
1151 deltabase = chainbase
1138 snapshotdepth = None
1152 snapshotdepth = None
1139 if revlog._sparserevlog and deltabase == nullrev:
1153 if revlog._sparserevlog and deltabase == nullrev:
@@ -1364,7 +1378,12 b' class deltacomputer:'
1364
1378
1365 if debug_search:
1379 if debug_search:
1366 delta_start = util.timer()
1380 delta_start = util.timer()
1367 candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
1381 candidatedelta = self._builddeltainfo(
1382 revinfo,
1383 candidaterev,
1384 fh,
1385 target_rev=target_rev,
1386 )
1368 if debug_search:
1387 if debug_search:
1369 delta_end = util.timer()
1388 delta_end = util.timer()
1370 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
1389 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
@@ -414,6 +414,11 b' class transaction(util.transactional):'
414 if vfs.exists(file):
414 if vfs.exists(file):
415 filepath = vfs.join(file)
415 filepath = vfs.join(file)
416 backuppath = vfs.join(backupfile)
416 backuppath = vfs.join(backupfile)
417 # store encoding may result in different directory here.
418 # so we have to ensure the destination directory exist
419 final_dir_name = os.path.dirname(backuppath)
420 util.makedirs(final_dir_name, mode=vfs.createmode, notindexed=True)
421 # then we can copy the backup
417 util.copyfile(filepath, backuppath, hardlink=hardlink)
422 util.copyfile(filepath, backuppath, hardlink=hardlink)
418 else:
423 else:
419 backupfile = b''
424 backupfile = b''
@@ -655,9 +655,14 b' def upgrade_dirstate(ui, srcrepo, upgrad'
655 pass
655 pass
656
656
657 assert srcrepo.dirstate._use_dirstate_v2 == (old == b'v2')
657 assert srcrepo.dirstate._use_dirstate_v2 == (old == b'v2')
658 use_v2 = new == b'v2'
659 if use_v2:
660 # Write the requirements *before* upgrading
661 scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
662
658 srcrepo.dirstate._map.preload()
663 srcrepo.dirstate._map.preload()
659 srcrepo.dirstate._use_dirstate_v2 = new == b'v2'
664 srcrepo.dirstate._use_dirstate_v2 = use_v2
660 srcrepo.dirstate._map._use_dirstate_v2 = srcrepo.dirstate._use_dirstate_v2
665 srcrepo.dirstate._map._use_dirstate_v2 = use_v2
661 srcrepo.dirstate._dirty = True
666 srcrepo.dirstate._dirty = True
662 try:
667 try:
663 srcrepo.vfs.unlink(b'dirstate')
668 srcrepo.vfs.unlink(b'dirstate')
@@ -667,8 +672,9 b' def upgrade_dirstate(ui, srcrepo, upgrad'
667 pass
672 pass
668
673
669 srcrepo.dirstate.write(None)
674 srcrepo.dirstate.write(None)
670
675 if not use_v2:
671 scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
676 # Remove the v2 requirement *after* downgrading
677 scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
672
678
673
679
674 def upgrade_tracked_hint(ui, srcrepo, upgrade_op, add):
680 def upgrade_tracked_hint(ui, srcrepo, upgrade_op, add):
@@ -599,6 +599,10 b' class proxyvfs(abstractvfs):'
599 def __init__(self, vfs: "vfs"):
599 def __init__(self, vfs: "vfs"):
600 self.vfs = vfs
600 self.vfs = vfs
601
601
602 @property
603 def createmode(self):
604 return self.vfs.createmode
605
602 def _auditpath(self, path, mode):
606 def _auditpath(self, path, mode):
603 return self.vfs._auditpath(path, mode)
607 return self.vfs._auditpath(path, mode)
604
608
@@ -1,3 +1,16 b''
1 = Mercurial 6.4.3 =
2
3 * chg: declare environ (issue6812)
4 * chg: set CHGHG before connecting to command server
5 * delta-find: never do anything fancy when general delta is off
6 * delta-find: add a simple safeguard to prevent bad non-general-delta
7 * debug-delta: add minimal documentation for `devel.bundle-delta` option
8 * fix: highlight the required configuration and behavior of the fixer tools
9 * rhg: don't print copy source when --no-status is passed
10 * rhg: correctly relativize copy source path
11 * repo-upgrade: write new requirement before upgrading the dirstate
12 * backup: fix issue when the backup end up in a different directory
13
1 = Mercurial 6.4.2 =
14 = Mercurial 6.4.2 =
2
15
3 Exceptional bugfix release due to a corruption bug that happens when using
16 Exceptional bugfix release due to a corruption bug that happens when using
@@ -552,12 +552,17 b" impl DisplayStatusPaths<'_> {"
552 // TODO: get the stdout lock once for the whole loop
552 // TODO: get the stdout lock once for the whole loop
553 // instead of in each write
553 // instead of in each write
554 for StatusPath { path, copy_source } in paths {
554 for StatusPath { path, copy_source } in paths {
555 let relative;
555 let relative_path;
556 let path = if let Some(relativize) = &self.relativize {
556 let relative_source;
557 relative = relativize.relativize(&path);
557 let (path, copy_source) = if let Some(relativize) =
558 &*relative
558 &self.relativize
559 {
560 relative_path = relativize.relativize(&path);
561 relative_source =
562 copy_source.as_ref().map(|s| relativize.relativize(s));
563 (&*relative_path, relative_source.as_deref())
559 } else {
564 } else {
560 path.as_bytes()
565 (path.as_bytes(), copy_source.as_ref().map(|s| s.as_bytes()))
561 };
566 };
562 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
567 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
563 // in order to stream to stdout instead of allocating an
568 // in order to stream to stdout instead of allocating an
@@ -570,10 +575,10 b" impl DisplayStatusPaths<'_> {"
570 &format_bytes!(b"{}{}", path, linebreak),
575 &format_bytes!(b"{}{}", path, linebreak),
571 label,
576 label,
572 )?;
577 )?;
573 if let Some(source) = copy_source {
578 if let Some(source) = copy_source.filter(|_| !self.no_status) {
574 let label = "status.copied";
579 let label = "status.copied";
575 self.ui.write_stdout_labelled(
580 self.ui.write_stdout_labelled(
576 &format_bytes!(b" {}{}", source.as_bytes(), linebreak),
581 &format_bytes!(b" {}{}", source, linebreak),
577 label,
582 label,
578 )?
583 )?
579 }
584 }
@@ -553,3 +553,20 b" FIXME: Run 4 should not be >3x Run 1's n"
553 $ filteredchg log -r . --no-profile
553 $ filteredchg log -r . --no-profile
554 $ filteredchg log -r .
554 $ filteredchg log -r .
555 Sample count: * (glob)
555 Sample count: * (glob)
556
557 chg setting CHGHG itself
558 ------------------------
559
560 If CHGHG is not set, chg will set it before spawning the command server.
561 $ hg --kill-chg-daemon
562 $ HG=$CHGHG CHGHG= CHGDEBUG= hg debugshell -c \
563 > 'ui.write(b"CHGHG=%s\n" % ui.environ.get(b"CHGHG"))' 2>&1 \
564 > | egrep 'CHGHG|start'
565 chg: debug: * start cmdserver at * (glob)
566 CHGHG=/*/install/bin/hg (glob)
567
568 Running the same command a second time shouldn't spawn a new command server.
569 $ HG=$CHGHG CHGHG= CHGDEBUG= hg debugshell -c \
570 > 'ui.write(b"CHGHG=%s\n" % ui.environ.get(b"CHGHG"))' 2>&1 \
571 > | egrep 'CHGHG|start'
572 CHGHG=/*/install/bin/hg (glob)
@@ -84,10 +84,11 b' Help text for fix.'
84
84
85 rewrite file content in changesets or working directory
85 rewrite file content in changesets or working directory
86
86
87 Runs any configured tools to fix the content of files. Only affects files
87 Runs any configured tools to fix the content of files. (See 'hg help -e
88 with changes, unless file arguments are provided. Only affects changed
88 fix' for details about configuring tools.) Only affects files with
89 lines of files, unless the --whole flag is used. Some tools may always
89 changes, unless file arguments are provided. Only affects changed lines of
90 affect the whole file regardless of --whole.
90 files, unless the --whole flag is used. Some tools may always affect the
91 whole file regardless of --whole.
91
92
92 If --working-dir is used, files with uncommitted changes in the working
93 If --working-dir is used, files with uncommitted changes in the working
93 copy will be fixed. Note that no backup are made.
94 copy will be fixed. Note that no backup are made.
@@ -125,6 +126,13 b' Help text for fix.'
125 Provides a command that runs configured tools on the contents of modified
126 Provides a command that runs configured tools on the contents of modified
126 files, writing back any fixes to the working copy or replacing changesets.
127 files, writing back any fixes to the working copy or replacing changesets.
127
128
129 Fixer tools are run in the repository's root directory. This allows them to
130 read configuration files from the working copy, or even write to the working
131 copy. The working copy is not updated to match the revision being fixed. In
132 fact, several revisions may be fixed in parallel. Writes to the working copy
133 are not amended into the revision being fixed; fixer tools MUST always read
134 content to be fixed from stdin, and write fixed file content back to stdout.
135
128 Here is an example configuration that causes 'hg fix' to apply automatic
136 Here is an example configuration that causes 'hg fix' to apply automatic
129 formatting fixes to modified lines in C++ code:
137 formatting fixes to modified lines in C++ code:
130
138
@@ -231,13 +239,6 b' Help text for fix.'
231 executions that modified a file. This aggregates the same metadata
239 executions that modified a file. This aggregates the same metadata
232 previously passed to the "postfixfile" hook.
240 previously passed to the "postfixfile" hook.
233
241
234 Fixer tools are run in the repository's root directory. This allows them to
235 read configuration files from the working copy, or even write to the working
236 copy. The working copy is not updated to match the revision being fixed. In
237 fact, several revisions may be fixed in parallel. Writes to the working copy
238 are not amended into the revision being fixed; fixer tools should always write
239 fixed file content back to stdout as documented above.
240
241 list of commands:
242 list of commands:
242
243
243 fix rewrite file content in changesets or working directory
244 fix rewrite file content in changesets or working directory
@@ -1005,3 +1005,27 b' in the same run.'
1005 0 -1 set subdir
1005 0 -1 set subdir
1006
1006
1007 #endif
1007 #endif
1008
1009
1010 Test copy source formatting.
1011 $ cd ..
1012 $ hg init copy-source-repo
1013 $ cd copy-source-repo
1014 $ mkdir -p foo/bar
1015 $ cd foo/bar
1016 $ touch file
1017 $ hg addremove
1018 adding foo/bar/file
1019 $ hg commit -m 'add file'
1020 $ hg mv file copy
1021
1022 Copy source respects relative path setting.
1023 $ hg st --config ui.statuscopies=true --config commands.status.relative=true
1024 A copy
1025 file
1026 R file
1027
1028 Copy source is not shown when --no-status is passed.
1029 $ hg st --config ui.statuscopies=true --no-status
1030 foo/bar/copy
1031 foo/bar/file
@@ -1,6 +1,9 b''
1 Test correctness of revlog inline -> non-inline transition
1 Test correctness of revlog inline -> non-inline transition
2 ----------------------------------------------------------
2 ----------------------------------------------------------
3
3
4 We test various file length and naming pattern as this created issue in the
5 past.
6
4 Helper extension to intercept renames and kill process
7 Helper extension to intercept renames and kill process
5
8
6 $ cat > $TESTTMP/intercept_before_rename.py << EOF
9 $ cat > $TESTTMP/intercept_before_rename.py << EOF
@@ -76,13 +79,30 b' setup a repository for tests'
76
79
77 $ hg init troffset-computation
80 $ hg init troffset-computation
78 $ cd troffset-computation
81 $ cd troffset-computation
79 $ printf '%20d' '1' > file
82 $ files="
83 > file
84 > Directory_With,Special%Char/Complex_File.babar
85 > foo/bar/babar_celeste/foo
86 > 1234567890/1234567890/1234567890/1234567890/1234567890/1234567890/1234567890/1234567890/1234567890/1234567890/f
87 > "
88 $ for f in $files; do
89 > mkdir -p `dirname $f`
90 > done
91 $ for f in $files; do
92 > printf '%20d' '1' > $f
93 > done
80 $ hg commit -Aqma
94 $ hg commit -Aqma
81 $ printf '%1024d' '1' > file
95 $ for f in $files; do
96 > printf '%1024d' '1' > $f
97 > done
82 $ hg commit -Aqmb
98 $ hg commit -Aqmb
83 $ printf '%20d' '1' > file
99 $ for f in $files; do
100 > printf '%20d' '1' > $f
101 > done
84 $ hg commit -Aqmc
102 $ hg commit -Aqmc
85 $ dd if=/dev/zero of=file bs=1k count=128 > /dev/null 2>&1
103 $ for f in $files; do
104 > dd if=/dev/zero of=$f bs=1k count=128 > /dev/null 2>&1
105 > done
86 $ hg commit -AqmD --traceback
106 $ hg commit -AqmD --traceback
87
107
88 Reference size:
108 Reference size:
@@ -163,7 +183,7 b' recover is rolling the split back, the f'
163 $ f -s .hg/store/data/file*
183 $ f -s .hg/store/data/file*
164 .hg/store/data/file.i: size=1174
184 .hg/store/data/file.i: size=1174
165 $ hg tip
185 $ hg tip
166 changeset: 1:cfa8d6e60429
186 changeset: 1:cc8dfb126534
167 tag: tip
187 tag: tip
168 user: test
188 user: test
169 date: Thu Jan 01 00:00:00 1970 +0000
189 date: Thu Jan 01 00:00:00 1970 +0000
@@ -233,7 +253,7 b' recover is rolling the split back, the f'
233 $ f -s .hg/store/data/file*
253 $ f -s .hg/store/data/file*
234 .hg/store/data/file.i: size=1174
254 .hg/store/data/file.i: size=1174
235 $ hg tip
255 $ hg tip
236 changeset: 1:cfa8d6e60429
256 changeset: 1:cc8dfb126534
237 tag: tip
257 tag: tip
238 user: test
258 user: test
239 date: Thu Jan 01 00:00:00 1970 +0000
259 date: Thu Jan 01 00:00:00 1970 +0000
@@ -297,7 +317,7 b' recover is rolling the split back, the f'
297 $ f -s .hg/store/data/file*
317 $ f -s .hg/store/data/file*
298 .hg/store/data/file.i: size=1174
318 .hg/store/data/file.i: size=1174
299 $ hg tip
319 $ hg tip
300 changeset: 1:cfa8d6e60429
320 changeset: 1:cc8dfb126534
301 tag: tip
321 tag: tip
302 user: test
322 user: test
303 date: Thu Jan 01 00:00:00 1970 +0000
323 date: Thu Jan 01 00:00:00 1970 +0000
@@ -337,7 +357,7 b' The split was rollback'
337
357
338
358
339 $ hg tip
359 $ hg tip
340 changeset: 1:cfa8d6e60429
360 changeset: 1:cc8dfb126534
341 tag: tip
361 tag: tip
342 user: test
362 user: test
343 date: Thu Jan 01 00:00:00 1970 +0000
363 date: Thu Jan 01 00:00:00 1970 +0000
General Comments 0
You need to be logged in to leave comments. Login now