##// END OF EJS Templates
gzip: indent the custom Gzip code...
marmoute -
r44981:a23b859a stable
parent child Browse files
Show More
@@ -1,386 +1,388 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import nullrev
19 from .node import nullrev
20 from .pycompat import open
20 from .pycompat import open
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 formatter,
24 formatter,
25 match as matchmod,
25 match as matchmod,
26 pycompat,
26 pycompat,
27 scmutil,
27 scmutil,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 )
30 )
31
31
32 stringio = util.stringio
32 stringio = util.stringio
33
33
34 # from unzip source code:
34 # from unzip source code:
35 _UNX_IFREG = 0x8000
35 _UNX_IFREG = 0x8000
36 _UNX_IFLNK = 0xA000
36 _UNX_IFLNK = 0xA000
37
37
38
38
39 def tidyprefix(dest, kind, prefix):
39 def tidyprefix(dest, kind, prefix):
40 '''choose prefix to use for names in archive. make sure prefix is
40 '''choose prefix to use for names in archive. make sure prefix is
41 safe for consumers.'''
41 safe for consumers.'''
42
42
43 if prefix:
43 if prefix:
44 prefix = util.normpath(prefix)
44 prefix = util.normpath(prefix)
45 else:
45 else:
46 if not isinstance(dest, bytes):
46 if not isinstance(dest, bytes):
47 raise ValueError(b'dest must be string if no prefix')
47 raise ValueError(b'dest must be string if no prefix')
48 prefix = os.path.basename(dest)
48 prefix = os.path.basename(dest)
49 lower = prefix.lower()
49 lower = prefix.lower()
50 for sfx in exts.get(kind, []):
50 for sfx in exts.get(kind, []):
51 if lower.endswith(sfx):
51 if lower.endswith(sfx):
52 prefix = prefix[: -len(sfx)]
52 prefix = prefix[: -len(sfx)]
53 break
53 break
54 lpfx = os.path.normpath(util.localpath(prefix))
54 lpfx = os.path.normpath(util.localpath(prefix))
55 prefix = util.pconvert(lpfx)
55 prefix = util.pconvert(lpfx)
56 if not prefix.endswith(b'/'):
56 if not prefix.endswith(b'/'):
57 prefix += b'/'
57 prefix += b'/'
58 # Drop the leading '.' path component if present, so Windows can read the
58 # Drop the leading '.' path component if present, so Windows can read the
59 # zip files (issue4634)
59 # zip files (issue4634)
60 if prefix.startswith(b'./'):
60 if prefix.startswith(b'./'):
61 prefix = prefix[2:]
61 prefix = prefix[2:]
62 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
62 if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
63 raise error.Abort(_(b'archive prefix contains illegal components'))
63 raise error.Abort(_(b'archive prefix contains illegal components'))
64 return prefix
64 return prefix
65
65
66
66
67 exts = {
67 exts = {
68 b'tar': [b'.tar'],
68 b'tar': [b'.tar'],
69 b'tbz2': [b'.tbz2', b'.tar.bz2'],
69 b'tbz2': [b'.tbz2', b'.tar.bz2'],
70 b'tgz': [b'.tgz', b'.tar.gz'],
70 b'tgz': [b'.tgz', b'.tar.gz'],
71 b'zip': [b'.zip'],
71 b'zip': [b'.zip'],
72 b'txz': [b'.txz', b'.tar.xz'],
72 b'txz': [b'.txz', b'.tar.xz'],
73 }
73 }
74
74
75
75
76 def guesskind(dest):
76 def guesskind(dest):
77 for kind, extensions in pycompat.iteritems(exts):
77 for kind, extensions in pycompat.iteritems(exts):
78 if any(dest.endswith(ext) for ext in extensions):
78 if any(dest.endswith(ext) for ext in extensions):
79 return kind
79 return kind
80 return None
80 return None
81
81
82
82
83 def _rootctx(repo):
83 def _rootctx(repo):
84 # repo[0] may be hidden
84 # repo[0] may be hidden
85 for rev in repo:
85 for rev in repo:
86 return repo[rev]
86 return repo[rev]
87 return repo[nullrev]
87 return repo[nullrev]
88
88
89
89
90 # {tags} on ctx includes local tags and 'tip', with no current way to limit
90 # {tags} on ctx includes local tags and 'tip', with no current way to limit
91 # that to global tags. Therefore, use {latesttag} as a substitute when
91 # that to global tags. Therefore, use {latesttag} as a substitute when
92 # the distance is 0, since that will be the list of global tags on ctx.
92 # the distance is 0, since that will be the list of global tags on ctx.
93 _defaultmetatemplate = br'''
93 _defaultmetatemplate = br'''
94 repo: {root}
94 repo: {root}
95 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
95 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
96 branch: {branch|utf8}
96 branch: {branch|utf8}
97 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
97 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
98 separate("\n",
98 separate("\n",
99 join(latesttag % "latesttag: {tag}", "\n"),
99 join(latesttag % "latesttag: {tag}", "\n"),
100 "latesttagdistance: {latesttagdistance}",
100 "latesttagdistance: {latesttagdistance}",
101 "changessincelatesttag: {changessincelatesttag}"))}
101 "changessincelatesttag: {changessincelatesttag}"))}
102 '''[
102 '''[
103 1:
103 1:
104 ] # drop leading '\n'
104 ] # drop leading '\n'
105
105
106
106
107 def buildmetadata(ctx):
107 def buildmetadata(ctx):
108 '''build content of .hg_archival.txt'''
108 '''build content of .hg_archival.txt'''
109 repo = ctx.repo()
109 repo = ctx.repo()
110
110
111 opts = {
111 opts = {
112 b'template': repo.ui.config(
112 b'template': repo.ui.config(
113 b'experimental', b'archivemetatemplate', _defaultmetatemplate
113 b'experimental', b'archivemetatemplate', _defaultmetatemplate
114 )
114 )
115 }
115 }
116
116
117 out = util.stringio()
117 out = util.stringio()
118
118
119 fm = formatter.formatter(repo.ui, out, b'archive', opts)
119 fm = formatter.formatter(repo.ui, out, b'archive', opts)
120 fm.startitem()
120 fm.startitem()
121 fm.context(ctx=ctx)
121 fm.context(ctx=ctx)
122 fm.data(root=_rootctx(repo).hex())
122 fm.data(root=_rootctx(repo).hex())
123
123
124 if ctx.rev() is None:
124 if ctx.rev() is None:
125 dirty = b''
125 dirty = b''
126 if ctx.dirty(missing=True):
126 if ctx.dirty(missing=True):
127 dirty = b'+'
127 dirty = b'+'
128 fm.data(dirty=dirty)
128 fm.data(dirty=dirty)
129 fm.end()
129 fm.end()
130
130
131 return out.getvalue()
131 return out.getvalue()
132
132
133
133
134 class tarit(object):
134 class tarit(object):
135 '''write archive to tar file or stream. can write uncompressed,
135 '''write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2.'''
136 or compress with gzip or bzip2.'''
137
137
138 class GzipFileWithTime(gzip.GzipFile):
138 if True:
139 def __init__(self, *args, **kw):
139
140 timestamp = None
140 class GzipFileWithTime(gzip.GzipFile):
141 if 'mtime' in kw:
141 def __init__(self, *args, **kw):
142 timestamp = kw.pop('mtime')
142 timestamp = None
143 if timestamp is None:
143 if 'mtime' in kw:
144 self.timestamp = time.time()
144 timestamp = kw.pop('mtime')
145 else:
145 if timestamp is None:
146 self.timestamp = timestamp
146 self.timestamp = time.time()
147 gzip.GzipFile.__init__(self, *args, **kw)
147 else:
148 self.timestamp = timestamp
149 gzip.GzipFile.__init__(self, *args, **kw)
148
150
149 def _write_gzip_header(self):
151 def _write_gzip_header(self):
150 self.fileobj.write(b'\037\213') # magic header
152 self.fileobj.write(b'\037\213') # magic header
151 self.fileobj.write(b'\010') # compression method
153 self.fileobj.write(b'\010') # compression method
152 fname = self.name
154 fname = self.name
153 if fname and fname.endswith(b'.gz'):
155 if fname and fname.endswith(b'.gz'):
154 fname = fname[:-3]
156 fname = fname[:-3]
155 flags = 0
157 flags = 0
156 if fname:
158 if fname:
157 flags = gzip.FNAME # pytype: disable=module-attr
159 flags = gzip.FNAME # pytype: disable=module-attr
158 self.fileobj.write(pycompat.bytechr(flags))
160 self.fileobj.write(pycompat.bytechr(flags))
159 gzip.write32u( # pytype: disable=module-attr
161 gzip.write32u( # pytype: disable=module-attr
160 self.fileobj, int(self.timestamp)
162 self.fileobj, int(self.timestamp)
161 )
163 )
162 self.fileobj.write(b'\002')
164 self.fileobj.write(b'\002')
163 self.fileobj.write(b'\377')
165 self.fileobj.write(b'\377')
164 if fname:
166 if fname:
165 self.fileobj.write(fname + b'\000')
167 self.fileobj.write(fname + b'\000')
166
168
167 def __init__(self, dest, mtime, kind=b''):
169 def __init__(self, dest, mtime, kind=b''):
168 self.mtime = mtime
170 self.mtime = mtime
169 self.fileobj = None
171 self.fileobj = None
170
172
171 def taropen(mode, name=b'', fileobj=None):
173 def taropen(mode, name=b'', fileobj=None):
172 if kind == b'gz':
174 if kind == b'gz':
173 mode = mode[0:1]
175 mode = mode[0:1]
174 if not fileobj:
176 if not fileobj:
175 fileobj = open(name, mode + b'b')
177 fileobj = open(name, mode + b'b')
176 gzfileobj = self.GzipFileWithTime(
178 gzfileobj = self.GzipFileWithTime(
177 name,
179 name,
178 pycompat.sysstr(mode + b'b'),
180 pycompat.sysstr(mode + b'b'),
179 zlib.Z_BEST_COMPRESSION,
181 zlib.Z_BEST_COMPRESSION,
180 fileobj,
182 fileobj,
181 mtime=mtime,
183 mtime=mtime,
182 )
184 )
183 self.fileobj = gzfileobj
185 self.fileobj = gzfileobj
184 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
186 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
185 name, pycompat.sysstr(mode), gzfileobj
187 name, pycompat.sysstr(mode), gzfileobj
186 )
188 )
187 else:
189 else:
188 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj)
190 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj)
189
191
190 if isinstance(dest, bytes):
192 if isinstance(dest, bytes):
191 self.z = taropen(b'w:', name=dest)
193 self.z = taropen(b'w:', name=dest)
192 else:
194 else:
193 self.z = taropen(b'w|', fileobj=dest)
195 self.z = taropen(b'w|', fileobj=dest)
194
196
195 def addfile(self, name, mode, islink, data):
197 def addfile(self, name, mode, islink, data):
196 name = pycompat.fsdecode(name)
198 name = pycompat.fsdecode(name)
197 i = tarfile.TarInfo(name)
199 i = tarfile.TarInfo(name)
198 i.mtime = self.mtime
200 i.mtime = self.mtime
199 i.size = len(data)
201 i.size = len(data)
200 if islink:
202 if islink:
201 i.type = tarfile.SYMTYPE
203 i.type = tarfile.SYMTYPE
202 i.mode = 0o777
204 i.mode = 0o777
203 i.linkname = pycompat.fsdecode(data)
205 i.linkname = pycompat.fsdecode(data)
204 data = None
206 data = None
205 i.size = 0
207 i.size = 0
206 else:
208 else:
207 i.mode = mode
209 i.mode = mode
208 data = stringio(data)
210 data = stringio(data)
209 self.z.addfile(i, data)
211 self.z.addfile(i, data)
210
212
211 def done(self):
213 def done(self):
212 self.z.close()
214 self.z.close()
213 if self.fileobj:
215 if self.fileobj:
214 self.fileobj.close()
216 self.fileobj.close()
215
217
216
218
217 class zipit(object):
219 class zipit(object):
218 '''write archive to zip file or stream. can write uncompressed,
220 '''write archive to zip file or stream. can write uncompressed,
219 or compressed with deflate.'''
221 or compressed with deflate.'''
220
222
221 def __init__(self, dest, mtime, compress=True):
223 def __init__(self, dest, mtime, compress=True):
222 if isinstance(dest, bytes):
224 if isinstance(dest, bytes):
223 dest = pycompat.fsdecode(dest)
225 dest = pycompat.fsdecode(dest)
224 self.z = zipfile.ZipFile(
226 self.z = zipfile.ZipFile(
225 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
227 dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
226 )
228 )
227
229
228 # Python's zipfile module emits deprecation warnings if we try
230 # Python's zipfile module emits deprecation warnings if we try
229 # to store files with a date before 1980.
231 # to store files with a date before 1980.
230 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
232 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
231 if mtime < epoch:
233 if mtime < epoch:
232 mtime = epoch
234 mtime = epoch
233
235
234 self.mtime = mtime
236 self.mtime = mtime
235 self.date_time = time.gmtime(mtime)[:6]
237 self.date_time = time.gmtime(mtime)[:6]
236
238
237 def addfile(self, name, mode, islink, data):
239 def addfile(self, name, mode, islink, data):
238 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
240 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
239 i.compress_type = self.z.compression # pytype: disable=attribute-error
241 i.compress_type = self.z.compression # pytype: disable=attribute-error
240 # unzip will not honor unix file modes unless file creator is
242 # unzip will not honor unix file modes unless file creator is
241 # set to unix (id 3).
243 # set to unix (id 3).
242 i.create_system = 3
244 i.create_system = 3
243 ftype = _UNX_IFREG
245 ftype = _UNX_IFREG
244 if islink:
246 if islink:
245 mode = 0o777
247 mode = 0o777
246 ftype = _UNX_IFLNK
248 ftype = _UNX_IFLNK
247 i.external_attr = (mode | ftype) << 16
249 i.external_attr = (mode | ftype) << 16
248 # add "extended-timestamp" extra block, because zip archives
250 # add "extended-timestamp" extra block, because zip archives
249 # without this will be extracted with unexpected timestamp,
251 # without this will be extracted with unexpected timestamp,
250 # if TZ is not configured as GMT
252 # if TZ is not configured as GMT
251 i.extra += struct.pack(
253 i.extra += struct.pack(
252 b'<hhBl',
254 b'<hhBl',
253 0x5455, # block type: "extended-timestamp"
255 0x5455, # block type: "extended-timestamp"
254 1 + 4, # size of this block
256 1 + 4, # size of this block
255 1, # "modification time is present"
257 1, # "modification time is present"
256 int(self.mtime),
258 int(self.mtime),
257 ) # last modification (UTC)
259 ) # last modification (UTC)
258 self.z.writestr(i, data)
260 self.z.writestr(i, data)
259
261
260 def done(self):
262 def done(self):
261 self.z.close()
263 self.z.close()
262
264
263
265
264 class fileit(object):
266 class fileit(object):
265 '''write archive as files in directory.'''
267 '''write archive as files in directory.'''
266
268
267 def __init__(self, name, mtime):
269 def __init__(self, name, mtime):
268 self.basedir = name
270 self.basedir = name
269 self.opener = vfsmod.vfs(self.basedir)
271 self.opener = vfsmod.vfs(self.basedir)
270 self.mtime = mtime
272 self.mtime = mtime
271
273
272 def addfile(self, name, mode, islink, data):
274 def addfile(self, name, mode, islink, data):
273 if islink:
275 if islink:
274 self.opener.symlink(data, name)
276 self.opener.symlink(data, name)
275 return
277 return
276 f = self.opener(name, b"w", atomictemp=False)
278 f = self.opener(name, b"w", atomictemp=False)
277 f.write(data)
279 f.write(data)
278 f.close()
280 f.close()
279 destfile = os.path.join(self.basedir, name)
281 destfile = os.path.join(self.basedir, name)
280 os.chmod(destfile, mode)
282 os.chmod(destfile, mode)
281 if self.mtime is not None:
283 if self.mtime is not None:
282 os.utime(destfile, (self.mtime, self.mtime))
284 os.utime(destfile, (self.mtime, self.mtime))
283
285
284 def done(self):
286 def done(self):
285 pass
287 pass
286
288
287
289
288 archivers = {
290 archivers = {
289 b'files': fileit,
291 b'files': fileit,
290 b'tar': tarit,
292 b'tar': tarit,
291 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
293 b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
292 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
294 b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
293 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
295 b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
294 b'uzip': lambda name, mtime: zipit(name, mtime, False),
296 b'uzip': lambda name, mtime: zipit(name, mtime, False),
295 b'zip': zipit,
297 b'zip': zipit,
296 }
298 }
297
299
298
300
299 def archive(
301 def archive(
300 repo,
302 repo,
301 dest,
303 dest,
302 node,
304 node,
303 kind,
305 kind,
304 decode=True,
306 decode=True,
305 match=None,
307 match=None,
306 prefix=b'',
308 prefix=b'',
307 mtime=None,
309 mtime=None,
308 subrepos=False,
310 subrepos=False,
309 ):
311 ):
310 '''create archive of repo as it was at node.
312 '''create archive of repo as it was at node.
311
313
312 dest can be name of directory, name of archive file, or file
314 dest can be name of directory, name of archive file, or file
313 object to write archive to.
315 object to write archive to.
314
316
315 kind is type of archive to create.
317 kind is type of archive to create.
316
318
317 decode tells whether to put files through decode filters from
319 decode tells whether to put files through decode filters from
318 hgrc.
320 hgrc.
319
321
320 match is a matcher to filter names of files to write to archive.
322 match is a matcher to filter names of files to write to archive.
321
323
322 prefix is name of path to put before every archive member.
324 prefix is name of path to put before every archive member.
323
325
324 mtime is the modified time, in seconds, or None to use the changeset time.
326 mtime is the modified time, in seconds, or None to use the changeset time.
325
327
326 subrepos tells whether to include subrepos.
328 subrepos tells whether to include subrepos.
327 '''
329 '''
328
330
329 if kind == b'txz' and not pycompat.ispy3:
331 if kind == b'txz' and not pycompat.ispy3:
330 raise error.Abort(_(b'xz compression is only available in Python 3'))
332 raise error.Abort(_(b'xz compression is only available in Python 3'))
331
333
332 if kind == b'files':
334 if kind == b'files':
333 if prefix:
335 if prefix:
334 raise error.Abort(_(b'cannot give prefix when archiving to files'))
336 raise error.Abort(_(b'cannot give prefix when archiving to files'))
335 else:
337 else:
336 prefix = tidyprefix(dest, kind, prefix)
338 prefix = tidyprefix(dest, kind, prefix)
337
339
338 def write(name, mode, islink, getdata):
340 def write(name, mode, islink, getdata):
339 data = getdata()
341 data = getdata()
340 if decode:
342 if decode:
341 data = repo.wwritedata(name, data)
343 data = repo.wwritedata(name, data)
342 archiver.addfile(prefix + name, mode, islink, data)
344 archiver.addfile(prefix + name, mode, islink, data)
343
345
344 if kind not in archivers:
346 if kind not in archivers:
345 raise error.Abort(_(b"unknown archive type '%s'") % kind)
347 raise error.Abort(_(b"unknown archive type '%s'") % kind)
346
348
347 ctx = repo[node]
349 ctx = repo[node]
348 archiver = archivers[kind](dest, mtime or ctx.date()[0])
350 archiver = archivers[kind](dest, mtime or ctx.date()[0])
349
351
350 if not match:
352 if not match:
351 match = scmutil.matchall(repo)
353 match = scmutil.matchall(repo)
352
354
353 if repo.ui.configbool(b"ui", b"archivemeta"):
355 if repo.ui.configbool(b"ui", b"archivemeta"):
354 name = b'.hg_archival.txt'
356 name = b'.hg_archival.txt'
355 if match(name):
357 if match(name):
356 write(name, 0o644, False, lambda: buildmetadata(ctx))
358 write(name, 0o644, False, lambda: buildmetadata(ctx))
357
359
358 files = [f for f in ctx.manifest().matches(match)]
360 files = [f for f in ctx.manifest().matches(match)]
359 total = len(files)
361 total = len(files)
360 if total:
362 if total:
361 files.sort()
363 files.sort()
362 scmutil.prefetchfiles(
364 scmutil.prefetchfiles(
363 repo, [ctx.rev()], scmutil.matchfiles(repo, files)
365 repo, [ctx.rev()], scmutil.matchfiles(repo, files)
364 )
366 )
365 progress = repo.ui.makeprogress(
367 progress = repo.ui.makeprogress(
366 _(b'archiving'), unit=_(b'files'), total=total
368 _(b'archiving'), unit=_(b'files'), total=total
367 )
369 )
368 progress.update(0)
370 progress.update(0)
369 for f in files:
371 for f in files:
370 ff = ctx.flags(f)
372 ff = ctx.flags(f)
371 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
373 write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
372 progress.increment(item=f)
374 progress.increment(item=f)
373 progress.complete()
375 progress.complete()
374
376
375 if subrepos:
377 if subrepos:
376 for subpath in sorted(ctx.substate):
378 for subpath in sorted(ctx.substate):
377 sub = ctx.workingsub(subpath)
379 sub = ctx.workingsub(subpath)
378 submatch = matchmod.subdirmatcher(subpath, match)
380 submatch = matchmod.subdirmatcher(subpath, match)
379 subprefix = prefix + subpath + b'/'
381 subprefix = prefix + subpath + b'/'
380 total += sub.archive(archiver, subprefix, submatch, decode)
382 total += sub.archive(archiver, subprefix, submatch, decode)
381
383
382 if total == 0:
384 if total == 0:
383 raise error.Abort(_(b'no files match the archive pattern'))
385 raise error.Abort(_(b'no files match the archive pattern'))
384
386
385 archiver.done()
387 archiver.done()
386 return total
388 return total
General Comments 0
You need to be logged in to leave comments. Login now