Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / chrome / common / extensions / docs / server2 / fake_fetchers.py
blobea30b83d79ffa28dbfb98cc05710b2a01972e918
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. url_fetcher_fake.py uses the fake
7 # fetchers if other URL fetching APIs are unavailable.
9 import base64
10 import json
11 import os
12 import re
14 import url_fetcher_fake
15 from extensions_paths import SERVER2
16 from path_util import IsDirectory
17 from test_util import ReadFile, ChromiumPath
18 import url_constants
21 # TODO(kalman): Investigate why logging in this class implies that the server
22 # isn't properly caching some fetched files; often it fetches the same file
23 # 10+ times. This may be a test anomaly.
26 def _ReadTestData(*path, **read_args):
27 return ReadFile(SERVER2, 'test_data', *path, **read_args)
30 class _FakeFetcher(object):
31 def _ListDir(self, path):
32 return os.listdir(path)
34 def _IsDir(self, path):
35 return os.path.isdir(path)
37 def _Stat(self, path):
38 return int(os.stat(path).st_mtime)
41 class _FakeOmahaProxy(_FakeFetcher):
42 def fetch(self, url):
43 return _ReadTestData('branch_utility', 'first.json')
46 class _FakeOmahaHistory(_FakeFetcher):
47 def fetch(self, url):
48 return _ReadTestData('branch_utility', 'second.json')
51 _SVN_URL_TO_PATH_PATTERN = re.compile(
52 r'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?')
53 def _ExtractPathFromSvnUrl(url):
54 return _SVN_URL_TO_PATH_PATTERN.match(url).group(2)
57 class _FakeSubversionServer(_FakeFetcher):
58 def fetch(self, url):
59 path = _ExtractPathFromSvnUrl(url)
60 if IsDirectory(path):
61 html = ['<html>Revision 000000']
62 try:
63 for f in self._ListDir(ChromiumPath(path)):
64 if f.startswith('.'):
65 continue
66 if self._IsDir(ChromiumPath(path, f)):
67 html.append('<a>' + f + '/</a>')
68 else:
69 html.append('<a>' + f + '</a>')
70 html.append('</html>')
71 return '\n'.join(html)
72 except OSError as e:
73 return None
74 try:
75 return ReadFile(path)
76 except IOError:
77 return None
80 class _FakeViewvcServer(_FakeFetcher):
81 def fetch(self, url):
82 path = ChromiumPath(_ExtractPathFromSvnUrl(url))
83 if self._IsDir(path):
84 html = ['<table><tbody><tr>...</tr>']
85 # The version of the directory.
86 dir_stat = self._Stat(path)
87 html.append('<tr>')
88 html.append('<td>Directory revision:</td>')
89 html.append('<td><a>%s</a><a></a></td>' % dir_stat)
90 html.append('</tr>')
91 # The version of each file.
92 for f in self._ListDir(path):
93 if f.startswith('.'):
94 continue
95 html.append('<tr>')
96 html.append(' <td><a>%s%s</a></td>' % (
97 f, '/' if self._IsDir(os.path.join(path, f)) else ''))
98 html.append(' <td><a><strong>%s</strong></a></td>' %
99 self._Stat(os.path.join(path, f)))
100 html.append('<td></td><td></td><td></td>')
101 html.append('</tr>')
102 html.append('</tbody></table>')
103 return '\n'.join(html)
104 try:
105 return ReadFile(path)
106 except IOError:
107 return None
110 class _FakeGithubStat(_FakeFetcher):
111 def fetch(self, url):
112 return '{ "sha": 0 }'
115 class _FakeGithubZip(_FakeFetcher):
116 def fetch(self, url):
117 return _ReadTestData('github_file_system', 'apps_samples.zip', mode='rb')
120 class _FakeRietveldAPI(_FakeFetcher):
121 def __init__(self):
122 self._base_pattern = re.compile(r'.*/(api/.*)')
124 def fetch(self, url):
125 return _ReadTestData(
126 'rietveld_patcher', self._base_pattern.match(url).group(1), 'json')
129 class _FakeRietveldTarball(_FakeFetcher):
130 def __init__(self):
131 self._base_pattern = re.compile(r'.*/(tarball/\d+/\d+)')
133 def fetch(self, url):
134 return _ReadTestData(
135 'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2',
136 mode='rb')
139 def ConfigureFakeFetchers():
140 '''Configure the fake fetcher paths relative to the docs directory.
142 url_fetcher_fake.ConfigureFakeUrlFetch({
143 url_constants.OMAHA_HISTORY: _FakeOmahaHistory(),
144 url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(),
145 '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(),
146 '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(),
147 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(),
148 '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(),
149 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(),
150 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(),