Show More
@@ -1,192 +1,194 b'' | |||
|
1 | 1 | # Copyright 2009-2010 Gregory P. Ward |
|
2 | 2 | # Copyright 2009-2010 Intelerad Medical Systems Incorporated |
|
3 | 3 | # Copyright 2010-2011 Fog Creek Software |
|
4 | 4 | # Copyright 2010-2011 Unity Technologies |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | '''base class for store implementations and store-related utility code''' |
|
10 | 10 | |
|
11 | 11 | import binascii |
|
12 | 12 | import re |
|
13 | 13 | |
|
14 | 14 | from mercurial import util, node, hg |
|
15 | 15 | from mercurial.i18n import _ |
|
16 | 16 | |
|
17 | 17 | import lfutil |
|
18 | 18 | |
|
19 | 19 | class StoreError(Exception): |
|
20 | 20 | '''Raised when there is a problem getting files from or putting |
|
21 | 21 | files to a central store.''' |
|
22 | 22 | def __init__(self, filename, hash, url, detail): |
|
23 | 23 | self.filename = filename |
|
24 | 24 | self.hash = hash |
|
25 | 25 | self.url = url |
|
26 | 26 | self.detail = detail |
|
27 | 27 | |
|
28 | 28 | def longmessage(self): |
|
29 | 29 | return (_("error getting id %s from url %s for file %s: %s\n") % |
|
30 | 30 | (self.hash, self.url, self.filename, self.detail)) |
|
31 | 31 | |
|
32 | 32 | def __str__(self): |
|
33 | 33 | return "%s: %s" % (self.url, self.detail) |
|
34 | 34 | |
|
35 | 35 | class basestore(object): |
|
36 | 36 | def __init__(self, ui, repo, url): |
|
37 | 37 | self.ui = ui |
|
38 | 38 | self.repo = repo |
|
39 | 39 | self.url = url |
|
40 | 40 | |
|
41 | 41 | def put(self, source, hash): |
|
42 | 42 | '''Put source file into the store under <filename>/<hash>.''' |
|
43 | 43 | raise NotImplementedError('abstract method') |
|
44 | 44 | |
|
45 | 45 | def exists(self, hashes): |
|
46 | 46 | '''Check to see if the store contains the given hashes.''' |
|
47 | 47 | raise NotImplementedError('abstract method') |
|
48 | 48 | |
|
49 | 49 | def get(self, files): |
|
50 | 50 | '''Get the specified largefiles from the store and write to local |
|
51 | 51 | files under repo.root. files is a list of (filename, hash) |
|
52 | 52 | tuples. Return (success, missing), lists of files successfully |
|
53 | 53 | downloaded and those not found in the store. success is a list |
|
54 | 54 | of (filename, hash) tuples; missing is a list of filenames that |
|
55 | 55 | we could not get. (The detailed error message will already have |
|
56 | 56 | been presented to the user, so missing is just supplied as a |
|
57 | 57 | summary.)''' |
|
58 | 58 | success = [] |
|
59 | 59 | missing = [] |
|
60 | 60 | ui = self.ui |
|
61 | 61 | |
|
62 | util.makedirs(lfutil.storepath(self.repo, '')) | |
|
63 | ||
|
62 | 64 | at = 0 |
|
63 | 65 | for filename, hash in files: |
|
64 | 66 | ui.progress(_('getting largefiles'), at, unit='lfile', |
|
65 | 67 | total=len(files)) |
|
66 | 68 | at += 1 |
|
67 | 69 | ui.note(_('getting %s:%s\n') % (filename, hash)) |
|
68 | 70 | |
|
69 | 71 | storefilename = lfutil.storepath(self.repo, hash) |
|
70 | 72 | tmpfile = util.atomictempfile(storefilename + '.tmp', |
|
71 | 73 | createmode=self.repo.store.createmode) |
|
72 | 74 | |
|
73 | 75 | try: |
|
74 | 76 | hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash)) |
|
75 | 77 | except StoreError, err: |
|
76 | 78 | ui.warn(err.longmessage()) |
|
77 | 79 | hhash = "" |
|
78 | 80 | tmpfile.close() # has probably already been closed! |
|
79 | 81 | |
|
80 | 82 | if hhash != hash: |
|
81 | 83 | if hhash != "": |
|
82 | 84 | ui.warn(_('%s: data corruption (expected %s, got %s)\n') |
|
83 | 85 | % (filename, hash, hhash)) |
|
84 | 86 | util.unlink(storefilename + '.tmp') |
|
85 | 87 | missing.append(filename) |
|
86 | 88 | continue |
|
87 | 89 | |
|
88 | 90 | util.rename(storefilename + '.tmp', storefilename) |
|
89 | 91 | lfutil.linktousercache(self.repo, hash) |
|
90 | 92 | success.append((filename, hhash)) |
|
91 | 93 | |
|
92 | 94 | ui.progress(_('getting largefiles'), None) |
|
93 | 95 | return (success, missing) |
|
94 | 96 | |
|
95 | 97 | def verify(self, revs, contents=False): |
|
96 | 98 | '''Verify the existence (and, optionally, contents) of every big |
|
97 | 99 | file revision referenced by every changeset in revs. |
|
98 | 100 | Return 0 if all is well, non-zero on any errors.''' |
|
99 | 101 | write = self.ui.write |
|
100 | 102 | failed = False |
|
101 | 103 | |
|
102 | 104 | write(_('searching %d changesets for largefiles\n') % len(revs)) |
|
103 | 105 | verified = set() # set of (filename, filenode) tuples |
|
104 | 106 | |
|
105 | 107 | for rev in revs: |
|
106 | 108 | cctx = self.repo[rev] |
|
107 | 109 | cset = "%d:%s" % (cctx.rev(), node.short(cctx.node())) |
|
108 | 110 | |
|
109 | 111 | for standin in cctx: |
|
110 | 112 | if self._verifyfile(cctx, cset, contents, standin, verified): |
|
111 | 113 | failed = True |
|
112 | 114 | |
|
113 | 115 | numrevs = len(verified) |
|
114 | 116 | numlfiles = len(set([fname for (fname, fnode) in verified])) |
|
115 | 117 | if contents: |
|
116 | 118 | write(_('verified contents of %d revisions of %d largefiles\n') |
|
117 | 119 | % (numrevs, numlfiles)) |
|
118 | 120 | else: |
|
119 | 121 | write(_('verified existence of %d revisions of %d largefiles\n') |
|
120 | 122 | % (numrevs, numlfiles)) |
|
121 | 123 | |
|
122 | 124 | return int(failed) |
|
123 | 125 | |
|
124 | 126 | def _getfile(self, tmpfile, filename, hash): |
|
125 | 127 | '''Fetch one revision of one file from the store and write it |
|
126 | 128 | to tmpfile. Compute the hash of the file on-the-fly as it |
|
127 | 129 | downloads and return the binary hash. Close tmpfile. Raise |
|
128 | 130 | StoreError if unable to download the file (e.g. it does not |
|
129 | 131 | exist in the store).''' |
|
130 | 132 | raise NotImplementedError('abstract method') |
|
131 | 133 | |
|
132 | 134 | def _verifyfile(self, cctx, cset, contents, standin, verified): |
|
133 | 135 | '''Perform the actual verification of a file in the store. |
|
134 | 136 | ''' |
|
135 | 137 | raise NotImplementedError('abstract method') |
|
136 | 138 | |
|
137 | 139 | import localstore, wirestore |
|
138 | 140 | |
|
139 | 141 | _storeprovider = { |
|
140 | 142 | 'file': [localstore.localstore], |
|
141 | 143 | 'http': [wirestore.wirestore], |
|
142 | 144 | 'https': [wirestore.wirestore], |
|
143 | 145 | 'ssh': [wirestore.wirestore], |
|
144 | 146 | } |
|
145 | 147 | |
|
146 | 148 | _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://') |
|
147 | 149 | |
|
148 | 150 | # During clone this function is passed the src's ui object |
|
149 | 151 | # but it needs the dest's ui object so it can read out of |
|
150 | 152 | # the config file. Use repo.ui instead. |
|
151 | 153 | def _openstore(repo, remote=None, put=False): |
|
152 | 154 | ui = repo.ui |
|
153 | 155 | |
|
154 | 156 | if not remote: |
|
155 | 157 | lfpullsource = getattr(repo, 'lfpullsource', None) |
|
156 | 158 | if lfpullsource: |
|
157 | 159 | path = ui.expandpath(lfpullsource) |
|
158 | 160 | else: |
|
159 | 161 | path = ui.expandpath('default-push', 'default') |
|
160 | 162 | |
|
161 | 163 | # ui.expandpath() leaves 'default-push' and 'default' alone if |
|
162 | 164 | # they cannot be expanded: fallback to the empty string, |
|
163 | 165 | # meaning the current directory. |
|
164 | 166 | if path == 'default-push' or path == 'default': |
|
165 | 167 | path = '' |
|
166 | 168 | remote = repo |
|
167 | 169 | else: |
|
168 | 170 | path, _branches = hg.parseurl(path) |
|
169 | 171 | remote = hg.peer(repo, {}, path) |
|
170 | 172 | |
|
171 | 173 | # The path could be a scheme so use Mercurial's normal functionality |
|
172 | 174 | # to resolve the scheme to a repository and use its path |
|
173 | 175 | path = util.safehasattr(remote, 'url') and remote.url() or remote.path |
|
174 | 176 | |
|
175 | 177 | match = _scheme_re.match(path) |
|
176 | 178 | if not match: # regular filesystem path |
|
177 | 179 | scheme = 'file' |
|
178 | 180 | else: |
|
179 | 181 | scheme = match.group(1) |
|
180 | 182 | |
|
181 | 183 | try: |
|
182 | 184 | storeproviders = _storeprovider[scheme] |
|
183 | 185 | except KeyError: |
|
184 | 186 | raise util.Abort(_('unsupported URL scheme %r') % scheme) |
|
185 | 187 | |
|
186 | 188 | for classobj in storeproviders: |
|
187 | 189 | try: |
|
188 | 190 | return classobj(ui, repo, remote) |
|
189 | 191 | except lfutil.storeprotonotcapable: |
|
190 | 192 | pass |
|
191 | 193 | |
|
192 | 194 | raise util.Abort(_('%s does not appear to be a largefile store') % path) |
@@ -1,141 +1,140 b'' | |||
|
1 | 1 | Create user cache directory |
|
2 | 2 | |
|
3 | 3 | $ USERCACHE=`pwd`/cache; export USERCACHE |
|
4 | 4 | $ cat <<EOF >> ${HGRCPATH} |
|
5 | 5 | > [extensions] |
|
6 | 6 | > hgext.largefiles= |
|
7 | 7 | > [largefiles] |
|
8 | 8 | > usercache=${USERCACHE} |
|
9 | 9 | > EOF |
|
10 | 10 | $ mkdir -p ${USERCACHE} |
|
11 | 11 | |
|
12 | 12 | Create source repo, and commit adding largefile. |
|
13 | 13 | |
|
14 | 14 | $ hg init src |
|
15 | 15 | $ cd src |
|
16 | 16 | $ echo large > large |
|
17 | 17 | $ hg add --large large |
|
18 | 18 | $ hg commit -m 'add largefile' |
|
19 | 19 | $ hg rm large |
|
20 | 20 | $ hg commit -m 'branchhead without largefile' |
|
21 | 21 | $ hg up -qr 0 |
|
22 | 22 | $ cd .. |
|
23 | 23 | |
|
24 | 24 | Discard all cached largefiles in USERCACHE |
|
25 | 25 | |
|
26 | 26 | $ rm -rf ${USERCACHE} |
|
27 | 27 | |
|
28 | 28 | Create mirror repo, and pull from source without largefile: |
|
29 | 29 | "pull" is used instead of "clone" for suppression of (1) updating to |
|
30 | 30 | tip (= cahcing largefile from source repo), and (2) recording source |
|
31 | 31 | repo as "default" path in .hg/hgrc. |
|
32 | 32 | |
|
33 | 33 | $ hg init mirror |
|
34 | 34 | $ cd mirror |
|
35 | 35 | $ hg pull ../src |
|
36 | 36 | pulling from ../src |
|
37 | 37 | requesting all changes |
|
38 | 38 | adding changesets |
|
39 | 39 | adding manifests |
|
40 | 40 | adding file changes |
|
41 | 41 | added 2 changesets with 1 changes to 1 files |
|
42 | 42 | (run 'hg update' to get a working copy) |
|
43 | 43 | caching new largefiles |
|
44 | 44 | 0 largefiles cached |
|
45 | 45 | |
|
46 | 46 | Update working directory to "tip", which requires largefile("large"), |
|
47 | 47 | but there is no cache file for it. So, hg must treat it as |
|
48 | 48 | "missing"(!) file. |
|
49 | 49 | |
|
50 | 50 | $ hg update -r0 |
|
51 | 51 | getting changed largefiles |
|
52 | 52 | error getting id 7f7097b041ccf68cc5561e9600da4655d21c6d18 from url file:$TESTTMP/mirror for file large: can't get file locally (glob) |
|
53 | 53 | 0 largefiles updated, 0 removed |
|
54 | 54 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
55 | 55 | $ hg status |
|
56 | 56 | ! large |
|
57 | 57 | |
|
58 | 58 | Update working directory to null: this cleanup .hg/largefiles/dirstate |
|
59 | 59 | |
|
60 | 60 | $ hg update null |
|
61 | 61 | getting changed largefiles |
|
62 | 62 | 0 largefiles updated, 0 removed |
|
63 | 63 | 0 files updated, 0 files merged, 1 files removed, 0 files unresolved |
|
64 | 64 | |
|
65 | 65 | Update working directory to tip, again. |
|
66 | 66 | |
|
67 | 67 | $ hg update -r0 |
|
68 | 68 | getting changed largefiles |
|
69 | 69 | error getting id 7f7097b041ccf68cc5561e9600da4655d21c6d18 from url file:$TESTTMP/mirror for file large: can't get file locally (glob) |
|
70 | 70 | 0 largefiles updated, 0 removed |
|
71 | 71 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
72 | 72 | $ hg status |
|
73 | 73 | ! large |
|
74 | 74 | $ cd .. |
|
75 | 75 | |
|
76 | 76 | Verify that largefiles from pulled branchheads are fetched, also to an empty repo |
|
77 | 77 | |
|
78 | 78 | $ hg init mirror2 |
|
79 | 79 | $ hg -R mirror2 pull src -r0 |
|
80 | 80 | pulling from src |
|
81 | 81 | adding changesets |
|
82 | 82 | adding manifests |
|
83 | 83 | adding file changes |
|
84 | 84 | added 1 changesets with 1 changes to 1 files |
|
85 | 85 | (run 'hg update' to get a working copy) |
|
86 | 86 | caching new largefiles |
|
87 | abort: *: '$TESTTMP/mirror2/.hg/largefiles/.7f7097b041ccf68cc5561e9600da4655d21c6d18.*' (glob) | |
|
88 | [255] | |
|
87 | 1 largefiles cached | |
|
89 | 88 | |
|
90 | 89 | #if unix-permissions |
|
91 | 90 | |
|
92 | 91 | Portable way to print file permissions: |
|
93 | 92 | |
|
94 | 93 | $ cat > ls-l.py <<EOF |
|
95 | 94 | > #!/usr/bin/env python |
|
96 | 95 | > import sys, os |
|
97 | 96 | > path = sys.argv[1] |
|
98 | 97 | > print '%03o' % (os.lstat(path).st_mode & 0777) |
|
99 | 98 | > EOF |
|
100 | 99 | $ chmod +x ls-l.py |
|
101 | 100 | |
|
102 | 101 | Test that files in .hg/largefiles inherit mode from .hg/store, not |
|
103 | 102 | from file in working copy: |
|
104 | 103 | |
|
105 | 104 | $ cd src |
|
106 | 105 | $ chmod 750 .hg/store |
|
107 | 106 | $ chmod 660 large |
|
108 | 107 | $ echo change >> large |
|
109 | 108 | $ hg commit -m change |
|
110 | 109 | created new head |
|
111 | 110 | $ ../ls-l.py .hg/largefiles/e151b474069de4ca6898f67ce2f2a7263adf8fea |
|
112 | 111 | 640 |
|
113 | 112 | |
|
114 | 113 | Test permission of with files in .hg/largefiles created by update: |
|
115 | 114 | |
|
116 | 115 | $ cd ../mirror |
|
117 | 116 | $ rm -r "$USERCACHE" .hg/largefiles # avoid links |
|
118 | 117 | $ chmod 750 .hg/store |
|
119 | 118 | $ hg pull ../src --update -q |
|
120 | 119 | $ ../ls-l.py .hg/largefiles/e151b474069de4ca6898f67ce2f2a7263adf8fea |
|
121 | 120 | 640 |
|
122 | 121 | |
|
123 | 122 | Test permission of files created by push: |
|
124 | 123 | |
|
125 | 124 | $ hg serve -R ../src -d -p $HGPORT --pid-file hg.pid \ |
|
126 | 125 | > --config "web.allow_push=*" --config web.push_ssl=no |
|
127 | 126 | $ cat hg.pid >> $DAEMON_PIDS |
|
128 | 127 | |
|
129 | 128 | $ echo change >> large |
|
130 | 129 | $ hg commit -m change |
|
131 | 130 | |
|
132 | 131 | $ rm -r "$USERCACHE" |
|
133 | 132 | |
|
134 | 133 | $ hg push -q http://localhost:$HGPORT/ |
|
135 | 134 | |
|
136 | 135 | $ ../ls-l.py ../src/.hg/largefiles/b734e14a0971e370408ab9bce8d56d8485e368a9 |
|
137 | 136 | 640 |
|
138 | 137 | |
|
139 | 138 | $ cd .. |
|
140 | 139 | |
|
141 | 140 | #endif |
General Comments 0
You need to be logged in to leave comments.
Login now