6This script finds all HTML pages in a folder and downloads all images, replacing
7the urls with local ones.
9import os, sys, optparse, subprocess, multiprocessing
10from os.path
import abspath, basename, dirname, join
12SWARM_PATH = dirname(abspath(__file__))
13CLIENT_PATH = dirname(dirname(SWARM_PATH))
14CLIENT_TOOLS_PATH =
join(CLIENT_PATH,
'tools')
17sys.path.append(CLIENT_TOOLS_PATH)
19converter = CLIENT_TOOLS_PATH +
'/htmlconverter.py'
26 htmlconverter.convertForOffline(infile,
28 verbose=options.verbose,
29 encode_images=options.inline_images)
30 print(
'Converted ' + infile)
31 except BaseException
as e:
32 print(
'Caught error: %s' % e)
36 """ Constructs a parser for extracting flags from the command line. """
37 parser = optparse.OptionParser()
40 help=(
"Encode img payloads as data:// URLs rather than local files."),
43 parser.add_option(
"--verbose",
44 help=
"Print verbose output",
53 options, args = parser.parse_args()
54 print(
"args: %s" % args)
55 if len(args) < 1
or 'help' in args[0]:
60 print(
'Searching directory ' + dirname)
63 for root, dirs, fnames
in os.walk(dirname):
65 if fname.endswith(
'.html'):
66 files.append(
join(root, fname))
68 count = 4 * multiprocessing.cpu_count()
69 pool = multiprocessing.Pool(processes=count)
71 pool.map_async(convertImgs, files).
get(3600)
74if __name__ ==
'__main__':
const myers::Point & get(const myers::Segment &)
def print(*args, **kwargs)
static SkString join(const CommandLineFlags::StringArray &)