1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake
7 # fetchers if the App Engine imports fail.
12 import appengine_wrappers
13 from extensions_paths
import SERVER2
14 from test_util
import ReadFile
, ChromiumPath
18 # TODO(kalman): Investigate why logging in this class implies that the server
19 # isn't properly caching some fetched files; often it fetches the same file
20 # 10+ times. This may be a test anomaly.
23 def _ReadTestData(*path
, **read_args
):
24 return ReadFile(SERVER2
, 'test_data', *path
, **read_args
)
27 class _FakeFetcher(object):
28 def _ListDir(self
, path
):
29 return os
.listdir(path
)
31 def _IsDir(self
, path
):
32 return os
.path
.isdir(path
)
34 def _Stat(self
, path
):
35 return int(os
.stat(path
).st_mtime
)
38 class _FakeOmahaProxy(_FakeFetcher
):
40 return _ReadTestData('branch_utility', 'first.json')
43 class _FakeOmahaHistory(_FakeFetcher
):
45 return _ReadTestData('branch_utility', 'second.json')
48 _SVN_URL_TO_PATH_PATTERN
= re
.compile(
49 r
'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?')
50 def _ExtractPathFromSvnUrl(url
):
51 return _SVN_URL_TO_PATH_PATTERN
.match(url
).group(2)
54 class _FakeSubversionServer(_FakeFetcher
):
56 path
= ChromiumPath(_ExtractPathFromSvnUrl(url
))
58 html
= ['<html>Revision 000000']
60 for f
in self
._ListDir
(path
):
63 if self
._IsDir
(os
.path
.join(path
, f
)):
64 html
.append('<a>' + f
+ '/</a>')
66 html
.append('<a>' + f
+ '</a>')
67 html
.append('</html>')
68 return '\n'.join(html
)
77 class _FakeViewvcServer(_FakeFetcher
):
79 path
= ChromiumPath(_ExtractPathFromSvnUrl(url
))
81 html
= ['<table><tbody><tr>...</tr>']
82 # The version of the directory.
83 dir_stat
= self
._Stat
(path
)
85 html
.append('<td>Directory revision:</td>')
86 html
.append('<td><a>%s</a><a></a></td>' % dir_stat
)
88 # The version of each file.
89 for f
in self
._ListDir
(path
):
93 html
.append(' <td><a>%s%s</a></td>' % (
94 f
, '/' if self
._IsDir
(os
.path
.join(path
, f
)) else ''))
95 html
.append(' <td><a><strong>%s</strong></a></td>' %
96 self
._Stat
(os
.path
.join(path
, f
)))
97 html
.append('<td></td><td></td><td></td>')
99 html
.append('</tbody></table>')
100 return '\n'.join(html
)
102 return ReadFile(path
)
107 class _FakeGithubStat(_FakeFetcher
):
108 def fetch(self
, url
):
109 return '{ "sha": 0 }'
112 class _FakeGithubZip(_FakeFetcher
):
113 def fetch(self
, url
):
114 return _ReadTestData('github_file_system', 'apps_samples.zip', mode
='rb')
117 class _FakeRietveldAPI(_FakeFetcher
):
119 self
._base
_pattern
= re
.compile(r
'.*/(api/.*)')
121 def fetch(self
, url
):
122 return _ReadTestData(
123 'rietveld_patcher', self
._base
_pattern
.match(url
).group(1), 'json')
126 class _FakeRietveldTarball(_FakeFetcher
):
128 self
._base
_pattern
= re
.compile(r
'.*/(tarball/\d+/\d+)')
130 def fetch(self
, url
):
131 return _ReadTestData(
132 'rietveld_patcher', self
._base
_pattern
.match(url
).group(1) + '.tar.bz2',
136 def ConfigureFakeFetchers():
137 '''Configure the fake fetcher paths relative to the docs directory.
139 appengine_wrappers
.ConfigureFakeUrlFetch({
140 url_constants
.OMAHA_HISTORY
: _FakeOmahaHistory(),
141 url_constants
.OMAHA_PROXY_URL
: _FakeOmahaProxy(),
142 '%s/.*' % url_constants
.SVN_URL
: _FakeSubversionServer(),
143 '%s/.*' % url_constants
.VIEWVC_URL
: _FakeViewvcServer(),
144 '%s/.*/commits/.*' % url_constants
.GITHUB_REPOS
: _FakeGithubStat(),
145 '%s/.*/zipball' % url_constants
.GITHUB_REPOS
: _FakeGithubZip(),
146 '%s/api/.*' % url_constants
.CODEREVIEW_SERVER
: _FakeRietveldAPI(),
147 '%s/tarball/.*' % url_constants
.CODEREVIEW_SERVER
: _FakeRietveldTarball(),