2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # Run build_server so that files needed by tests are copied to the local
7 # third_party directory.
19 from branch_utility
import BranchUtility
20 from chroot_file_system
import ChrootFileSystem
21 from extensions_paths
import EXTENSIONS
, PUBLIC_TEMPLATES
22 from fake_fetchers
import ConfigureFakeFetchers
23 from handler
import Handler
24 from link_error_detector
import LinkErrorDetector
, StringifyBrokenLinks
25 from local_file_system
import LocalFileSystem
26 from local_renderer
import LocalRenderer
27 from servlet
import Request
28 from test_util
import EnableLogging
, DisableLogging
, ChromiumPath
30 # Arguments set up if __main__ specifies them.
31 _EXPLICIT_TEST_FILES
= None
36 def _ToPosixPath(os_path
):
37 return os_path
.replace(os
.sep
, '/')
39 def _GetPublicFiles():
40 '''Gets all public files mapped to their contents.
42 public_path
= ChromiumPath(PUBLIC_TEMPLATES
)
44 for path
, dirs
, files
in os
.walk(public_path
, topdown
=True):
45 dirs
[:] = [d
for d
in dirs
if d
!= '.svn']
46 relative_posix_path
= _ToPosixPath(path
[len(public_path
):])
47 for filename
in files
:
48 with
open(os
.path
.join(path
, filename
), 'r') as f
:
49 public_files
['/'.join((relative_posix_path
, filename
))] = f
.read()
52 class IntegrationTest(unittest
.TestCase
):
54 ConfigureFakeFetchers()
56 @EnableLogging('info')
57 def testCronAndPublicFiles(self
):
58 '''Runs cron then requests every public file. Cron needs to be run first
59 because the public file requests are offline.
61 if _EXPLICIT_TEST_FILES
is not None:
64 print('Running cron...')
65 start_time
= time
.time()
67 response
= Handler(Request
.ForTest('/_cron')).Get()
68 self
.assertEqual(200, response
.status
)
69 self
.assertEqual('Success', response
.content
.ToString())
71 print('Took %s seconds' % (time
.time() - start_time
))
73 # TODO(kalman): Re-enable this, but it takes about an hour at the moment,
74 # presumably because every page now has a lot of links on it from the
77 #print("Checking for broken links...")
78 #start_time = time.time()
79 #link_error_detector = LinkErrorDetector(
80 # # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix.
81 # ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS),
82 # lambda path: Handler(Request.ForTest(path)).Get(),
84 # ('extensions/index.html', 'apps/about_apps.html'))
86 #broken_links = link_error_detector.GetBrokenLinks()
88 # print('Found %d broken links.' % (
91 # print(StringifyBrokenLinks(broken_links))
93 #broken_links_set = set(broken_links)
95 #known_broken_links_path = os.path.join(
96 # sys.path[0], 'known_broken_links.json')
98 # with open(known_broken_links_path, 'r') as f:
99 # # The JSON file converts tuples and sets into lists, and for this
100 # # set union/difference logic they need to be converted back.
101 # known_broken_links = set(tuple(item) for item in json.load(f))
103 # known_broken_links = set()
105 #newly_broken_links = broken_links_set - known_broken_links
106 #fixed_links = known_broken_links - broken_links_set
108 #print('Took %s seconds.' % (time.time() - start_time))
110 #print('Searching for orphaned pages...')
111 #start_time = time.time()
112 #orphaned_pages = link_error_detector.GetOrphanedPages()
114 # # TODO(jshumway): Test should fail when orphaned pages are detected.
115 # print('Found %d orphaned pages:' % len(orphaned_pages))
116 # for page in orphaned_pages:
118 #print('Took %s seconds.' % (time.time() - start_time))
120 public_files
= _GetPublicFiles()
122 print('Rendering %s public files...' % len(public_files
.keys()))
123 start_time
= time
.time()
125 for path
, content
in public_files
.iteritems():
126 assert path
.startswith('/')
127 if path
.endswith('redirects.json'):
130 def check_result(response
):
131 self
.assertEqual(200, response
.status
,
132 'Got %s when rendering %s' % (response
.status
, path
))
133 # This is reaaaaally rough since usually these will be tiny templates
134 # that render large files. At least it'll catch zero-length responses.
135 self
.assertTrue(len(response
.content
) >= len(content
),
136 'Content was "%s" when rendering %s' % (response
.content
, path
))
138 check_result(Handler(Request
.ForTest(path
)).Get())
140 # Make sure that leaving out the .html will temporarily redirect to the
141 # path with the .html.
142 if path
.startswith(('/apps/', '/extensions/')):
143 redirect_result
= Handler(
144 Request
.ForTest(posixpath
.splitext(path
)[0])).Get()
145 self
.assertEqual((path
, False), redirect_result
.GetRedirect())
147 # Make sure including a channel will permanently redirect to the same
148 # path without a channel.
149 for channel
in BranchUtility
.GetAllChannelNames():
150 redirect_result
= Handler(
151 Request
.ForTest('%s%s' % (channel
, path
))).Get()
152 self
.assertEqual((path
, True), redirect_result
.GetRedirect())
154 # Samples are internationalized, test some locales.
155 if path
.endswith('/samples.html'):
156 for lang
in ['en-US', 'es', 'ar']:
157 check_result(Handler(Request
.ForTest(
159 headers
={'Accept-Language': '%s;q=0.8' % lang
})).Get())
161 print('Took %s seconds' % (time
.time() - start_time
))
164 # print('Rebasing broken links with %s newly broken and %s fixed links.' %
165 # (len(newly_broken_links), len(fixed_links)))
166 # with open(known_broken_links_path, 'w') as f:
167 # json.dump(broken_links, f,
168 # indent=2, separators=(',', ': '), sort_keys=True)
170 # if fixed_links or newly_broken_links:
171 # print('**********************************************\n'
172 # 'CHANGE DETECTED IN BROKEN LINKS WITHOUT REBASE\n'
173 # '**********************************************')
174 # print('Found %s broken links, and some have changed. '
175 # 'If this is acceptable or expected then run %s with the --rebase '
176 # 'option.' % (len(broken_links), os.path.split(__file__)[-1]))
178 # print('%s existing broken links' % len(broken_links))
180 # print('%s broken links have been fixed:' % len(fixed_links))
181 # print(StringifyBrokenLinks(fixed_links))
182 # if newly_broken_links:
183 # print('There are %s new broken links:' % len(newly_broken_links))
184 # print(StringifyBrokenLinks(newly_broken_links))
185 # self.fail('See logging for details.')
187 # TODO(kalman): Move this test elsewhere, it's not an integration test.
188 # Perhaps like "presubmit_tests" or something.
189 def testExplicitFiles(self
):
190 '''Tests just the files in _EXPLICIT_TEST_FILES.
192 if _EXPLICIT_TEST_FILES
is None:
194 for filename
in _EXPLICIT_TEST_FILES
:
195 print('Rendering %s...' % filename
)
196 start_time
= time
.time()
198 response
= LocalRenderer
.Render(_ToPosixPath(filename
))
199 self
.assertEqual(200, response
.status
)
200 self
.assertTrue(response
.content
!= '')
202 print('Took %s seconds' % (time
.time() - start_time
))
204 # TODO(jshumway): Check page for broken links (currently prohibited by the
205 # time it takes to render the pages).
207 @DisableLogging('warning')
208 def testFileNotFound(self
):
209 response
= Handler(Request
.ForTest('/extensions/notfound.html')).Get()
210 self
.assertEqual(404, response
.status
)
212 if __name__
== '__main__':
213 parser
= optparse
.OptionParser()
214 parser
.add_option('-a', '--all', action
='store_true', default
=False,
215 help='Render all pages, not just the one specified')
216 parser
.add_option('-r', '--rebase', action
='store_true', default
=False,
217 help='Rewrites the known_broken_links.json file with '
218 'the current set of broken links')
219 parser
.add_option('-v', '--verbose', action
='store_true', default
=False,
220 help='Show verbose output like currently broken links')
221 (opts
, args
) = parser
.parse_args()
223 _EXPLICIT_TEST_FILES
= args
224 _REBASE
= opts
.rebase
225 _VERBOSE
= opts
.verbose
226 # Kill sys.argv because we have our own flags.
227 sys
.argv
= [sys
.argv
[0]]