1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. url_fetcher_fake.py uses the fake
7 # fetchers if other URL fetching APIs are unavailable.
14 import url_fetcher_fake
15 from extensions_paths
import SERVER2
16 from path_util
import IsDirectory
17 from test_util
import ReadFile
, ChromiumPath
21 # TODO(kalman): Investigate why logging in this class implies that the server
22 # isn't properly caching some fetched files; often it fetches the same file
23 # 10+ times. This may be a test anomaly.
26 def _ReadTestData(*path
, **read_args
):
27 return ReadFile(SERVER2
, 'test_data', *path
, **read_args
)
30 class _FakeFetcher(object):
31 def _ListDir(self
, path
):
32 return os
.listdir(path
)
34 def _IsDir(self
, path
):
35 return os
.path
.isdir(path
)
37 def _Stat(self
, path
):
38 return int(os
.stat(path
).st_mtime
)
41 class _FakeOmahaProxy(_FakeFetcher
):
43 return _ReadTestData('branch_utility', 'first.json')
46 class _FakeOmahaHistory(_FakeFetcher
):
48 return _ReadTestData('branch_utility', 'second.json')
51 _SVN_URL_TO_PATH_PATTERN
= re
.compile(
52 r
'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?')
53 def _ExtractPathFromSvnUrl(url
):
54 return _SVN_URL_TO_PATH_PATTERN
.match(url
).group(2)
57 class _FakeSubversionServer(_FakeFetcher
):
59 path
= _ExtractPathFromSvnUrl(url
)
61 html
= ['<html>Revision 000000']
63 for f
in self
._ListDir
(ChromiumPath(path
)):
66 if self
._IsDir
(ChromiumPath(path
, f
)):
67 html
.append('<a>' + f
+ '/</a>')
69 html
.append('<a>' + f
+ '</a>')
70 html
.append('</html>')
71 return '\n'.join(html
)
80 class _FakeViewvcServer(_FakeFetcher
):
82 path
= ChromiumPath(_ExtractPathFromSvnUrl(url
))
84 html
= ['<table><tbody><tr>...</tr>']
85 # The version of the directory.
86 dir_stat
= self
._Stat
(path
)
88 html
.append('<td>Directory revision:</td>')
89 html
.append('<td><a>%s</a><a></a></td>' % dir_stat
)
91 # The version of each file.
92 for f
in self
._ListDir
(path
):
96 html
.append(' <td><a>%s%s</a></td>' % (
97 f
, '/' if self
._IsDir
(os
.path
.join(path
, f
)) else ''))
98 html
.append(' <td><a><strong>%s</strong></a></td>' %
99 self
._Stat
(os
.path
.join(path
, f
)))
100 html
.append('<td></td><td></td><td></td>')
102 html
.append('</tbody></table>')
103 return '\n'.join(html
)
105 return ReadFile(path
)
110 class _FakeGithubStat(_FakeFetcher
):
111 def fetch(self
, url
):
112 return '{ "sha": 0 }'
115 class _FakeGithubZip(_FakeFetcher
):
116 def fetch(self
, url
):
117 return _ReadTestData('github_file_system', 'apps_samples.zip', mode
='rb')
120 class _FakeRietveldAPI(_FakeFetcher
):
122 self
._base
_pattern
= re
.compile(r
'.*/(api/.*)')
124 def fetch(self
, url
):
125 return _ReadTestData(
126 'rietveld_patcher', self
._base
_pattern
.match(url
).group(1), 'json')
129 class _FakeRietveldTarball(_FakeFetcher
):
131 self
._base
_pattern
= re
.compile(r
'.*/(tarball/\d+/\d+)')
133 def fetch(self
, url
):
134 return _ReadTestData(
135 'rietveld_patcher', self
._base
_pattern
.match(url
).group(1) + '.tar.bz2',
139 def ConfigureFakeFetchers():
140 '''Configure the fake fetcher paths relative to the docs directory.
142 url_fetcher_fake
.ConfigureFakeUrlFetch({
143 url_constants
.OMAHA_HISTORY
: _FakeOmahaHistory(),
144 url_constants
.OMAHA_PROXY_URL
: _FakeOmahaProxy(),
145 '%s/.*' % url_constants
.SVN_URL
: _FakeSubversionServer(),
146 '%s/.*' % url_constants
.VIEWVC_URL
: _FakeViewvcServer(),
147 '%s/.*/commits/.*' % url_constants
.GITHUB_REPOS
: _FakeGithubStat(),
148 '%s/.*/zipball' % url_constants
.GITHUB_REPOS
: _FakeGithubZip(),
149 '%s/api/.*' % url_constants
.CODEREVIEW_SERVER
: _FakeRietveldAPI(),
150 '%s/tarball/.*' % url_constants
.CODEREVIEW_SERVER
: _FakeRietveldTarball(),