1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "ppapi/tests/testing_instance.h"
13 #include "ppapi/cpp/core.h"
14 #include "ppapi/cpp/module.h"
15 #include "ppapi/cpp/var.h"
16 #include "ppapi/cpp/view.h"
17 #include "ppapi/tests/test_case.h"
19 TestCaseFactory
* TestCaseFactory::head_
= NULL
;
21 // Cookie value we use to signal "we're still working." See the comment above
22 // the class declaration for how this works.
23 static const char kProgressSignal
[] = "...";
25 // Returns a new heap-allocated test case for the given test, or NULL on
27 TestingInstance::TestingInstance(PP_Instance instance
)
28 #if (defined __native_client__)
29 : pp::Instance(instance
),
31 : pp::InstancePrivate(instance
),
34 executed_tests_(false),
35 number_tests_executed_(0),
39 remove_plugin_(true) {
40 callback_factory_
.Initialize(this);
43 TestingInstance::~TestingInstance() {
48 bool TestingInstance::Init(uint32_t argc
,
51 for (uint32_t i
= 0; i
< argc
; i
++) {
52 if (std::strcmp(argn
[i
], "mode") == 0) {
53 if (std::strcmp(argv
[i
], "nacl") == 0)
55 } else if (std::strcmp(argn
[i
], "protocol") == 0) {
57 } else if (std::strcmp(argn
[i
], "websocket_host") == 0) {
58 websocket_host_
= argv
[i
];
59 } else if (std::strcmp(argn
[i
], "websocket_port") == 0) {
60 websocket_port_
= atoi(argv
[i
]);
61 } else if (std::strcmp(argn
[i
], "ssl_server_port") == 0) {
62 ssl_server_port_
= atoi(argv
[i
]);
65 // Create the proper test case from the argument.
66 for (uint32_t i
= 0; i
< argc
; i
++) {
67 if (std::strcmp(argn
[i
], "testcase") == 0) {
68 if (argv
[i
][0] == '\0')
70 current_case_
= CaseForTestName(argv
[i
]);
71 test_filter_
= argv
[i
];
73 errors_
.append(std::string("Unknown test case ") + argv
[i
]);
74 else if (!current_case_
->Init())
75 errors_
.append(" Test case could not initialize.");
80 // In DidChangeView, we'll dump out a list of all available tests.
84 #if !(defined __native_client__)
85 pp::Var
TestingInstance::GetInstanceObject() {
87 return current_case_
->GetTestObject();
89 return pp::VarPrivate();
93 void TestingInstance::HandleMessage(const pp::Var
& message_data
) {
95 current_case_
->HandleMessage(message_data
);
98 void TestingInstance::DidChangeView(const pp::View
& view
) {
99 if (!executed_tests_
) {
100 executed_tests_
= true;
101 pp::Module::Get()->core()->CallOnMainThread(
103 callback_factory_
.NewCallback(&TestingInstance::ExecuteTests
));
106 current_case_
->DidChangeView(view
);
109 bool TestingInstance::HandleInputEvent(const pp::InputEvent
& event
) {
111 return current_case_
->HandleInputEvent(event
);
115 void TestingInstance::EvalScript(const std::string
& script
) {
116 SendTestCommand("EvalScript", script
);
119 void TestingInstance::SetCookie(const std::string
& name
,
120 const std::string
& value
) {
121 SendTestCommand("SetCookie", name
+ "=" + value
);
124 void TestingInstance::LogTest(const std::string
& test_name
,
125 const std::string
& error_message
,
126 PP_TimeTicks start_time
) {
127 current_test_name_
= test_name
;
129 // Compute the time to run the test and save it in a string for logging:
130 PP_TimeTicks
end_time(pp::Module::Get()->core()->GetTimeTicks());
131 std::ostringstream number_stream
;
132 PP_TimeTicks
elapsed_time(end_time
- start_time
);
133 number_stream
<< std::fixed
<< std::setprecision(3) << elapsed_time
;
134 std::string
time_string(number_stream
.str());
136 // Tell the browser we're still working.
137 ReportProgress(kProgressSignal
);
139 number_tests_executed_
++;
142 html
.append("<div class=\"test_line\"><span class=\"test_name\">");
143 html
.append(test_name
);
144 html
.append("</span> ");
145 if (error_message
.empty()) {
146 html
.append("<span class=\"pass\">PASS</span>");
148 html
.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
149 html
.append(error_message
);
150 html
.append("</span>");
152 if (!errors_
.empty())
153 errors_
.append(", "); // Separator for different error messages.
154 errors_
.append(test_name
+ " FAIL: " + error_message
);
156 html
.append(" <span class=\"time\">(");
157 html
.append(time_string
);
158 html
.append("s)</span>");
160 html
.append("</div>");
163 std::string test_time
;
164 test_time
.append(test_name
);
165 test_time
.append(" finished in ");
166 test_time
.append(time_string
);
167 test_time
.append(" seconds.");
168 LogTestTime(test_time
);
170 current_test_name_
.clear();
173 void TestingInstance::AppendError(const std::string
& message
) {
174 if (!errors_
.empty())
175 errors_
.append(", ");
176 errors_
.append(message
);
179 void TestingInstance::ExecuteTests(int32_t unused
) {
180 ReportProgress(kProgressSignal
);
182 // Clear the console.
183 SendTestCommand("ClearConsole");
185 if (!errors_
.empty()) {
186 // Catch initialization errors and output the current error string to
188 LogError("Plugin initialization failed: " + errors_
);
189 } else if (!current_case_
) {
191 errors_
.append("FAIL: Only listed tests");
193 current_case_
->RunTests(test_filter_
);
195 if (number_tests_executed_
== 0) {
196 errors_
.append("No tests executed. The test filter might be too "
197 "restrictive: '" + test_filter_
+ "'.");
200 if (current_case_
->skipped_tests().size()) {
201 // TODO(dmichael): Convert all TestCases to run all tests in one fixture,
202 // and enable this check. Currently, a lot of our tests
203 // run 1 test per fixture, which is slow.
205 errors_.append("Some tests were not listed and thus were not run. Make "
206 "sure all tests are passed in the test_case URL (even if "
207 "they are marked DISABLED_). Forgotten tests: ");
208 std::set<std::string>::const_iterator iter =
209 current_case_->skipped_tests().begin();
210 for (; iter != current_case_->skipped_tests().end(); ++iter) {
211 errors_.append(*iter);
217 if (current_case_
->remaining_tests().size()) {
218 errors_
.append("Some listed tests were not found in the TestCase. Check "
219 "the test names that were passed to make sure they match "
220 "tests in the TestCase. Unknown tests: ");
221 std::map
<std::string
, bool>::const_iterator iter
=
222 current_case_
->remaining_tests().begin();
223 for (; iter
!= current_case_
->remaining_tests().end(); ++iter
) {
224 errors_
.append(iter
->first
);
232 SendTestCommand("RemovePluginWhenFinished");
233 std::string
result(errors_
);
236 SendTestCommand("DidExecuteTests", result
);
237 // Note, DidExecuteTests may unload the plugin. We can't really do anything
241 TestCase
* TestingInstance::CaseForTestName(const std::string
& name
) {
242 std::string case_name
= name
.substr(0, name
.find_first_of('_'));
243 TestCaseFactory
* iter
= TestCaseFactory::head_
;
244 while (iter
!= NULL
) {
245 if (case_name
== iter
->name_
)
246 return iter
->method_(this);
252 void TestingInstance::SendTestCommand(const std::string
& command
) {
253 std::string
msg("TESTING_MESSAGE:");
255 PostMessage(pp::Var(msg
));
258 void TestingInstance::SendTestCommand(const std::string
& command
,
259 const std::string
& params
) {
260 SendTestCommand(command
+ ":" + params
);
264 void TestingInstance::LogAvailableTests() {
265 // Print out a listing of all tests.
266 std::vector
<std::string
> test_cases
;
267 TestCaseFactory
* iter
= TestCaseFactory::head_
;
268 while (iter
!= NULL
) {
269 test_cases
.push_back(iter
->name_
);
272 std::sort(test_cases
.begin(), test_cases
.end());
275 html
.append("Available test cases: <dl>");
276 for (size_t i
= 0; i
< test_cases
.size(); ++i
) {
277 html
.append("<dd><a href='?testcase=");
278 html
.append(test_cases
[i
]);
280 html
.append("&mode=nacl");
282 html
.append(test_cases
[i
]);
283 html
.append("</a></dd>");
285 html
.append("</dl>");
286 html
.append("<button onclick='RunAll()'>Run All Tests</button>");
291 void TestingInstance::LogError(const std::string
& text
) {
293 html
.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
295 html
.append("</span>");
299 void TestingInstance::LogHTML(const std::string
& html
) {
300 SendTestCommand("LogHTML", html
);
303 void TestingInstance::ReportProgress(const std::string
& progress_value
) {
304 SendTestCommand("ReportProgress", progress_value
);
307 void TestingInstance::AddPostCondition(const std::string
& script
) {
308 SendTestCommand("AddPostCondition", script
);
311 void TestingInstance::LogTestTime(const std::string
& test_time
) {
312 SendTestCommand("LogTestTime", test_time
);
315 class Module
: public pp::Module
{
317 Module() : pp::Module() {}
320 virtual pp::Instance
* CreateInstance(PP_Instance instance
) {
321 return new TestingInstance(instance
);
327 Module
* CreateModule() {
328 return new ::Module();