1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Bootstrap Chrome Telemetry by downloading all its files from SVN servers.
7 Requires a DEPS file to specify which directories on which SVN servers
8 are required to run Telemetry. Format of that DEPS file is a subset of the
9 normal DEPS file format[1]; currently only only the "deps" dictionary is
10 supported and nothing else.
12 Fetches all files in the specified directories using WebDAV (SVN is WebDAV under
15 [1] http://dev.chromium.org/developers/how-tos/depottools#TOC-DEPS-file
24 # Link to file containing the 'davclient' WebDAV client library.
25 _DAVCLIENT_URL
= ('https://src.chromium.org/viewvc/chrome/trunk/src/tools/' +
26 'telemetry/third_party/davclient/davclient.py')
28 # Dummy module for Davclient.
31 def _download_and_import_davclient_module():
32 """Dynamically import davclient helper library."""
34 davclient_src
= urllib
.urlopen(_DAVCLIENT_URL
).read()
35 _davclient
= imp
.new_module('davclient')
36 exec davclient_src
in _davclient
.__dict
__
39 class DAVClientWrapper():
40 """Knows how to retrieve subdirectories and files from WebDAV/SVN servers."""
42 def __init__(self
, root_url
):
43 """Initialize SVN server root_url, save files to local dest_dir.
46 root_url: string url of SVN/WebDAV server
48 self
.root_url
= root_url
49 self
.client
= _davclient
.DAVClient(root_url
)
52 def __norm_path_keys(dict_with_path_keys
):
53 """Returns a dictionary with os.path.normpath called on every key."""
54 return dict((os
.path
.normpath(k
), v
) for (k
, v
) in
55 dict_with_path_keys
.items())
57 def GetDirList(self
, path
):
58 """Returns string names of all files and subdirs of path on the server."""
59 props
= self
.__norm
_path
_keys
(self
.client
.propfind(path
, depth
=1))
61 del props
[os
.path
.normpath(path
)]
62 return [os
.path
.basename(p
) for p
in props
.keys()]
64 def IsFile(self
, path
):
65 """Returns True if the path is a file on the server, False if directory."""
66 props
= self
.__norm
_path
_keys
(self
.client
.propfind(path
, depth
=1))
67 return props
[os
.path
.normpath(path
)]['resourcetype'] is None
69 def Traverse(self
, src_path
, dst_path
):
70 """Walks the directory hierarchy pointed to by src_path download all files.
72 Recursively walks src_path and saves all files and subfolders into
76 src_path: string path on SVN server to save (absolute path on server).
77 dest_path: string local path (relative or absolute) to save to.
79 if self
.IsFile(src_path
):
80 if not os
.path
.exists(os
.path
.dirname(dst_path
)):
81 logging
.info("creating %s", os
.path
.dirname(dst_path
))
82 os
.makedirs(os
.path
.dirname(dst_path
))
83 logging
.info("Saving %s to %s", self
.root_url
+ src_path
, dst_path
)
84 urllib
.urlretrieve(self
.root_url
+ src_path
, dst_path
)
87 for subdir
in self
.GetDirList(src_path
):
88 self
.Traverse(os
.path
.join(src_path
, subdir
),
89 os
.path
.join(dst_path
, subdir
))
92 def ListAllDepsPaths(deps_content
):
93 """Recursively returns a list of all paths indicated in this deps file.
95 Note that this discards information about where path dependencies come from,
96 so this is only useful in the context of a Chromium source checkout that has
97 already fetched all dependencies.
100 deps_content: String containing deps information to be evaluated, in the
101 format given in the header of this file.
102 Returns: A list of string paths starting under src that are required by the
103 given deps file, and all of its sub-dependencies. This amounts to
104 the keys of the 'deps' dictionary.
106 chrome_root
= os
.path
.dirname(__file__
)
107 while os
.path
.basename(chrome_root
) != 'src':
108 chrome_root
= os
.path
.abspath(os
.path
.join(chrome_root
, '..'))
109 deps
= imp
.new_module('deps')
110 exec deps_content
in deps
.__dict
__
112 deps_paths
= deps
.deps
.keys()
114 if hasattr(deps
, 'deps_includes'):
115 for path
in deps
.deps_includes
.keys():
116 # Need to localize the paths.
117 path
= os
.path
.join(chrome_root
, '..', path
)
118 deps_paths
= deps_paths
+ ListAllDepsPaths(open(path
).read())
123 def DownloadDepsURL(destination_dir
, url
):
124 """Wrapper around DownloadDeps that takes a string URL to the deps file.
127 destination_dir: String path to local directory to download files into.
128 url: URL of deps file (see DownloadDeps for format).
130 logging
.warning('Downloading deps from %s...', url
)
131 DownloadDeps(destination_dir
, urllib
.urlopen(url
).read())
134 def DownloadDeps(destination_dir
, deps_content
):
135 """Saves all the dependencies in deps_path.
137 Reads deps_content, assuming the contents are in the simple DEPS-like file
138 format specified in the header of this file, then download all
139 files/directories listed to the destination_dir.
142 destination_dir: String path to directory to download files into.
143 deps_content: String containing deps information to be evaluated.
145 # TODO(wiltzius): Add a parameter for which revision to pull.
146 _download_and_import_davclient_module()
148 deps
= imp
.new_module('deps')
149 exec deps_content
in deps
.__dict
__
151 for dst_path
, src_path
in deps
.deps
.iteritems():
152 full_dst_path
= os
.path
.join(destination_dir
, dst_path
)
153 parsed_url
= urlparse
.urlparse(src_path
)
154 root_url
= parsed_url
.scheme
+ '://' + parsed_url
.netloc
156 dav_client
= DAVClientWrapper(root_url
)
157 dav_client
.Traverse(parsed_url
.path
, full_dst_path
)
159 if hasattr(deps
, 'deps_includes'):
160 for url
in deps
.deps_includes
.values():
161 DownloadDepsURL(destination_dir
, url
)