2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # Run build_server so that files needed by tests are copied to the local
7 # third_party directory.
20 from branch_utility
import BranchUtility
21 from chroot_file_system
import ChrootFileSystem
22 from extensions_paths
import (
23 CONTENT_PROVIDERS
, CHROME_EXTENSIONS
, PUBLIC_TEMPLATES
)
24 from fake_fetchers
import ConfigureFakeFetchers
25 from special_paths
import SITE_VERIFICATION_FILE
26 from handler
import Handler
27 from link_error_detector
import LinkErrorDetector
, StringifyBrokenLinks
28 from local_file_system
import LocalFileSystem
29 from local_renderer
import LocalRenderer
30 from path_util
import AssertIsValid
31 from servlet
import Request
32 from third_party
.json_schema_compiler
import json_parse
33 from test_util
import (
34 ChromiumPath
, DisableLogging
, EnableLogging
, ReadFile
, Server2Path
)
37 # Arguments set up if __main__ specifies them.
38 _EXPLICIT_TEST_FILES
= None
43 def _ToPosixPath(os_path
):
44 return os_path
.replace(os
.sep
, '/')
47 def _FilterHidden(paths
):
48 '''Returns a list of the non-hidden paths from |paths|.
50 # Hidden files start with a '.' but paths like './foo' and '../foo' are not
52 return [path
for path
in paths
if (not path
.startswith('.')) or
53 path
.startswith('./') or
54 path
.startswith('../')]
57 def _GetPublicFiles():
58 '''Gets all public file paths mapped to their contents.
60 def walk(path
, prefix
=''):
61 path
= ChromiumPath(path
)
63 for root
, dirs
, files
in os
.walk(path
, topdown
=True):
64 relative_root
= root
[len(path
):].lstrip(os
.path
.sep
)
65 dirs
[:] = _FilterHidden(dirs
)
66 for filename
in _FilterHidden(files
):
67 with
open(os
.path
.join(root
, filename
), 'r') as f
:
68 request_path
= posixpath
.join(prefix
, relative_root
, filename
)
69 public_files
[request_path
] = f
.read()
72 # Public file locations are defined in content_providers.json, sort of. Epic
73 # hack to pull them out; list all the files from the directories that
74 # Chromium content providers ask for.
76 content_providers
= json_parse
.Parse(ReadFile(CONTENT_PROVIDERS
))
77 for content_provider
in content_providers
.itervalues():
78 if 'chromium' in content_provider
:
79 public_files
.update(walk(content_provider
['chromium']['dir'],
80 prefix
=content_provider
['serveFrom']))
84 class IntegrationTest(unittest
.TestCase
):
86 ConfigureFakeFetchers()
88 @EnableLogging('info')
89 def testUpdateAndPublicFiles(self
):
90 '''Runs update then requests every public file. Update needs to be run first
91 because the public file requests are offline.
93 if _EXPLICIT_TEST_FILES
is not None:
96 print('Running update...')
97 start_time
= time
.time()
99 update_cache
.UpdateCache()
101 print('Took %s seconds' % (time
.time() - start_time
))
103 # TODO(kalman): Re-enable this, but it takes about an hour at the moment,
104 # presumably because every page now has a lot of links on it from the
107 #print("Checking for broken links...")
108 #start_time = time.time()
109 #link_error_detector = LinkErrorDetector(
110 # # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix.
111 # ChrootFileSystem(LocalFileSystem.Create(), CHROME_EXTENSIONS),
112 # lambda path: Handler(Request.ForTest(path)).Get(),
113 # 'templates/public',
114 # ('extensions/index.html', 'apps/about_apps.html'))
116 #broken_links = link_error_detector.GetBrokenLinks()
118 # print('Found %d broken links.' % (
119 # len(broken_links)))
121 # print(StringifyBrokenLinks(broken_links))
123 #broken_links_set = set(broken_links)
125 #known_broken_links_path = os.path.join(
126 # Server2Path('known_broken_links.json'))
128 # with open(known_broken_links_path, 'r') as f:
129 # # The JSON file converts tuples and sets into lists, and for this
130 # # set union/difference logic they need to be converted back.
131 # known_broken_links = set(tuple(item) for item in json.load(f))
133 # known_broken_links = set()
135 #newly_broken_links = broken_links_set - known_broken_links
136 #fixed_links = known_broken_links - broken_links_set
138 #print('Took %s seconds.' % (time.time() - start_time))
140 #print('Searching for orphaned pages...')
141 #start_time = time.time()
142 #orphaned_pages = link_error_detector.GetOrphanedPages()
144 # # TODO(jshumway): Test should fail when orphaned pages are detected.
145 # print('Found %d orphaned pages:' % len(orphaned_pages))
146 # for page in orphaned_pages:
148 #print('Took %s seconds.' % (time.time() - start_time))
150 public_files
= _GetPublicFiles()
152 print('Rendering %s public files...' % len(public_files
.keys()))
153 start_time
= time
.time()
155 for path
, content
in public_files
.iteritems():
157 if path
.endswith('redirects.json'):
160 # The non-example html and md files are served without their file
162 path_without_ext
, ext
= posixpath
.splitext(path
)
163 if (ext
in ('.html', '.md') and
164 '/examples/' not in path
and
165 path
!= SITE_VERIFICATION_FILE
):
166 path
= path_without_ext
168 def check_result(response
):
169 self
.assertEqual(200, response
.status
,
170 'Got %s when rendering %s' % (response
.status
, path
))
172 # This is reaaaaally rough since usually these will be tiny templates
173 # that render large files. At least it'll catch zero-length responses.
174 self
.assertTrue(len(response
.content
) >= len(content
),
175 'Rendered content length was %s vs template content length %s '
176 'when rendering %s' % (len(response
.content
), len(content
), path
))
178 # TODO(kalman): Hack to avoid failing redirects like extensions/index
179 # to extensions. Better fix would be to parse or whitelist the
180 # redirects.json files as part of this test.
181 if not path
.endswith('/index'):
182 check_result(Handler(Request
.ForTest(path
)).Get())
184 if path
.startswith(('apps/', 'extensions/')):
185 # Make sure that adding the .html will temporarily redirect to
186 # the path without the .html for APIs and articles.
187 if '/examples/' not in path
:
188 redirect_response
= Handler(Request
.ForTest(path
+ '.html')).Get()
190 ('/' + path
, False), redirect_response
.GetRedirect(),
191 '%s.html did not (temporarily) redirect to %s (status %s)' %
192 (path
, path
, redirect_response
.status
))
194 # Make sure including a channel will permanently redirect to the same
195 # path without a channel.
196 for channel
in BranchUtility
.GetAllChannelNames():
197 redirect_response
= Handler(
198 Request
.ForTest(posixpath
.join(channel
, path
))).Get()
201 redirect_response
.GetRedirect(),
202 '%s/%s did not (permanently) redirect to %s (status %s)' %
203 (channel
, path
, path
, redirect_response
.status
))
205 # Samples are internationalized, test some locales.
206 if path
.endswith('/samples'):
207 for lang
in ('en-US', 'es', 'ar'):
208 check_result(Handler(Request
.ForTest(
210 headers
={'Accept-Language': '%s;q=0.8' % lang
})).Get())
212 print('Took %s seconds' % (time
.time() - start_time
))
215 # print('Rebasing broken links with %s newly broken and %s fixed links.' %
216 # (len(newly_broken_links), len(fixed_links)))
217 # with open(known_broken_links_path, 'w') as f:
218 # json.dump(broken_links, f,
219 # indent=2, separators=(',', ': '), sort_keys=True)
221 # if fixed_links or newly_broken_links:
222 # print('**********************************************\n'
223 # 'CHANGE DETECTED IN BROKEN LINKS WITHOUT REBASE\n'
224 # '**********************************************')
225 # print('Found %s broken links, and some have changed. '
226 # 'If this is acceptable or expected then run %s with the --rebase '
227 # 'option.' % (len(broken_links), os.path.split(__file__)[-1]))
229 # print('%s existing broken links' % len(broken_links))
231 # print('%s broken links have been fixed:' % len(fixed_links))
232 # print(StringifyBrokenLinks(fixed_links))
233 # if newly_broken_links:
234 # print('There are %s new broken links:' % len(newly_broken_links))
235 # print(StringifyBrokenLinks(newly_broken_links))
236 # self.fail('See logging for details.')
238 # TODO(kalman): Move this test elsewhere, it's not an integration test.
239 # Perhaps like "presubmit_tests" or something.
240 def testExplicitFiles(self
):
241 '''Tests just the files in _EXPLICIT_TEST_FILES.
243 if _EXPLICIT_TEST_FILES
is None:
245 for filename
in _EXPLICIT_TEST_FILES
:
246 print('Rendering %s...' % filename
)
247 start_time
= time
.time()
249 response
= LocalRenderer
.Render(_ToPosixPath(filename
))
250 self
.assertEqual(200, response
.status
)
251 self
.assertTrue(response
.content
!= '')
253 print('Took %s seconds' % (time
.time() - start_time
))
255 # TODO(jshumway): Check page for broken links (currently prohibited by the
256 # time it takes to render the pages).
258 @DisableLogging('warning')
259 def testFileNotFound(self
):
260 response
= LocalRenderer
.Render('/extensions/notfound')
261 self
.assertEqual(404, response
.status
)
263 def testSiteVerificationFile(self
):
264 response
= LocalRenderer
.Render('/' + SITE_VERIFICATION_FILE
)
265 self
.assertEqual(200, response
.status
)
267 if __name__
== '__main__':
268 parser
= optparse
.OptionParser()
269 parser
.add_option('-a', '--all', action
='store_true', default
=False,
270 help='Render all pages, not just the one specified')
271 parser
.add_option('-r', '--rebase', action
='store_true', default
=False,
272 help='Rewrites the known_broken_links.json file with '
273 'the current set of broken links')
274 parser
.add_option('-v', '--verbose', action
='store_true', default
=False,
275 help='Show verbose output like currently broken links')
276 (opts
, args
) = parser
.parse_args()
278 _EXPLICIT_TEST_FILES
= args
279 _REBASE
= opts
.rebase
280 _VERBOSE
= opts
.verbose
281 # Kill sys.argv because we have our own flags.
282 sys
.argv
= [sys
.argv
[0]]