1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "ppapi/tests/testing_instance.h"
13 #include "ppapi/cpp/core.h"
14 #include "ppapi/cpp/module.h"
15 #include "ppapi/cpp/var.h"
16 #include "ppapi/cpp/view.h"
17 #include "ppapi/tests/test_case.h"
19 TestCaseFactory
* TestCaseFactory::head_
= NULL
;
21 // Cookie value we use to signal "we're still working." See the comment above
22 // the class declaration for how this works.
23 static const char kProgressSignal
[] = "...";
25 // Returns a new heap-allocated test case for the given test, or NULL on
27 TestingInstance::TestingInstance(PP_Instance instance
)
28 #if (defined __native_client__)
29 : pp::Instance(instance
),
31 : pp::InstancePrivate(instance
),
34 executed_tests_(false),
35 number_tests_executed_(0),
39 remove_plugin_(true) {
40 callback_factory_
.Initialize(this);
43 TestingInstance::~TestingInstance() {
48 bool TestingInstance::Init(uint32_t argc
,
51 for (uint32_t i
= 0; i
< argc
; i
++) {
52 if (std::strcmp(argn
[i
], "mode") == 0) {
53 if (std::strcmp(argv
[i
], "nacl") == 0)
55 } else if (std::strcmp(argn
[i
], "protocol") == 0) {
57 } else if (std::strcmp(argn
[i
], "websocket_host") == 0) {
58 websocket_host_
= argv
[i
];
59 } else if (std::strcmp(argn
[i
], "websocket_port") == 0) {
60 websocket_port_
= atoi(argv
[i
]);
61 } else if (std::strcmp(argn
[i
], "ssl_server_port") == 0) {
62 ssl_server_port_
= atoi(argv
[i
]);
65 // Create the proper test case from the argument.
66 for (uint32_t i
= 0; i
< argc
; i
++) {
67 if (std::strcmp(argn
[i
], "testcase") == 0) {
68 if (argv
[i
][0] == '\0')
70 current_case_
= CaseForTestName(argv
[i
]);
71 test_filter_
= argv
[i
];
73 errors_
.append(std::string("Unknown test case ") + argv
[i
]);
74 else if (!current_case_
->Init())
75 errors_
.append(" Test case could not initialize.");
80 // In DidChangeView, we'll dump out a list of all available tests.
84 #if !(defined __native_client__)
85 pp::Var
TestingInstance::GetInstanceObject() {
87 return current_case_
->GetTestObject();
89 return pp::VarPrivate();
93 void TestingInstance::HandleMessage(const pp::Var
& message_data
) {
95 current_case_
->HandleMessage(message_data
);
98 void TestingInstance::DidChangeView(const pp::View
& view
) {
99 if (!executed_tests_
) {
100 executed_tests_
= true;
101 pp::Module::Get()->core()->CallOnMainThread(
103 callback_factory_
.NewCallback(&TestingInstance::ExecuteTests
));
106 current_case_
->DidChangeView(view
);
109 bool TestingInstance::HandleInputEvent(const pp::InputEvent
& event
) {
111 return current_case_
->HandleInputEvent(event
);
115 void TestingInstance::EvalScript(const std::string
& script
) {
116 SendTestCommand("EvalScript", script
);
119 void TestingInstance::SetCookie(const std::string
& name
,
120 const std::string
& value
) {
121 SendTestCommand("SetCookie", name
+ "=" + value
);
124 void TestingInstance::LogTest(const std::string
& test_name
,
125 const std::string
& error_message
,
126 PP_TimeTicks start_time
) {
127 // Compute the time to run the test and save it in a string for logging:
128 PP_TimeTicks
end_time(pp::Module::Get()->core()->GetTimeTicks());
129 std::ostringstream number_stream
;
130 PP_TimeTicks
elapsed_time(end_time
- start_time
);
131 number_stream
<< std::fixed
<< std::setprecision(3) << elapsed_time
;
132 std::string
time_string(number_stream
.str());
134 // Tell the browser we're still working.
135 ReportProgress(kProgressSignal
);
137 number_tests_executed_
++;
140 html
.append("<div class=\"test_line\"><span class=\"test_name\">");
141 html
.append(test_name
);
142 html
.append("</span> ");
143 if (error_message
.empty()) {
144 html
.append("<span class=\"pass\">PASS</span>");
146 html
.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
147 html
.append(error_message
);
148 html
.append("</span>");
150 if (!errors_
.empty())
151 errors_
.append(", "); // Separator for different error messages.
152 errors_
.append(test_name
+ " FAIL: " + error_message
);
154 html
.append(" <span class=\"time\">(");
155 html
.append(time_string
);
156 html
.append("s)</span>");
158 html
.append("</div>");
162 void TestingInstance::AppendError(const std::string
& message
) {
163 if (!errors_
.empty())
164 errors_
.append(", ");
165 errors_
.append(message
);
168 void TestingInstance::ExecuteTests(int32_t unused
) {
169 ReportProgress(kProgressSignal
);
171 // Clear the console.
172 SendTestCommand("ClearConsole");
174 if (!errors_
.empty()) {
175 // Catch initialization errors and output the current error string to
177 LogError("Plugin initialization failed: " + errors_
);
178 } else if (!current_case_
) {
180 errors_
.append("FAIL: Only listed tests");
182 current_case_
->RunTests(test_filter_
);
184 if (number_tests_executed_
== 0) {
185 errors_
.append("No tests executed. The test filter might be too "
186 "restrictive: '" + test_filter_
+ "'.");
189 if (current_case_
->skipped_tests().size()) {
190 // TODO(dmichael): Convert all TestCases to run all tests in one fixture,
191 // and enable this check. Currently, a lot of our tests
192 // run 1 test per fixture, which is slow.
194 errors_.append("Some tests were not listed and thus were not run. Make "
195 "sure all tests are passed in the test_case URL (even if "
196 "they are marked DISABLED_). Forgotten tests: ");
197 std::set<std::string>::const_iterator iter =
198 current_case_->skipped_tests().begin();
199 for (; iter != current_case_->skipped_tests().end(); ++iter) {
200 errors_.append(*iter);
206 if (current_case_
->remaining_tests().size()) {
207 errors_
.append("Some listed tests were not found in the TestCase. Check "
208 "the test names that were passed to make sure they match "
209 "tests in the TestCase. Unknown tests: ");
210 std::map
<std::string
, bool>::const_iterator iter
=
211 current_case_
->remaining_tests().begin();
212 for (; iter
!= current_case_
->remaining_tests().end(); ++iter
) {
213 errors_
.append(iter
->first
);
220 // Declare we're done by setting a cookie to either "PASS" or the errors.
221 ReportProgress(errors_
.empty() ? "PASS" : errors_
);
223 SendTestCommand("DidExecuteTests");
224 // Note, DidExecuteTests unloads the plugin. We can't really do anthing after
228 TestCase
* TestingInstance::CaseForTestName(const std::string
& name
) {
229 std::string case_name
= name
.substr(0, name
.find_first_of('_'));
230 TestCaseFactory
* iter
= TestCaseFactory::head_
;
231 while (iter
!= NULL
) {
232 if (case_name
== iter
->name_
)
233 return iter
->method_(this);
239 void TestingInstance::SendTestCommand(const std::string
& command
) {
240 std::string
msg("TESTING_MESSAGE:");
242 PostMessage(pp::Var(msg
));
245 void TestingInstance::SendTestCommand(const std::string
& command
,
246 const std::string
& params
) {
247 SendTestCommand(command
+ ":" + params
);
251 void TestingInstance::LogAvailableTests() {
252 // Print out a listing of all tests.
253 std::vector
<std::string
> test_cases
;
254 TestCaseFactory
* iter
= TestCaseFactory::head_
;
255 while (iter
!= NULL
) {
256 test_cases
.push_back(iter
->name_
);
259 std::sort(test_cases
.begin(), test_cases
.end());
262 html
.append("Available test cases: <dl>");
263 for (size_t i
= 0; i
< test_cases
.size(); ++i
) {
264 html
.append("<dd><a href='?testcase=");
265 html
.append(test_cases
[i
]);
267 html
.append("&mode=nacl");
269 html
.append(test_cases
[i
]);
270 html
.append("</a></dd>");
272 html
.append("</dl>");
273 html
.append("<button onclick='RunAll()'>Run All Tests</button>");
278 void TestingInstance::LogError(const std::string
& text
) {
280 html
.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
282 html
.append("</span>");
286 void TestingInstance::LogHTML(const std::string
& html
) {
287 SendTestCommand("LogHTML", html
);
290 void TestingInstance::ReportProgress(const std::string
& progress_value
) {
291 // Use streams since nacl doesn't compile base yet (for StringPrintf).
292 std::ostringstream script
;
293 script
<< "window.domAutomationController.setAutomationId(0);" <<
294 "window.domAutomationController.send(\"" << progress_value
<< "\")";
295 EvalScript(script
.str());
298 void TestingInstance::AddPostCondition(const std::string
& script
) {
299 SendTestCommand("AddPostCondition", script
);
302 class Module
: public pp::Module
{
304 Module() : pp::Module() {}
307 virtual pp::Instance
* CreateInstance(PP_Instance instance
) {
308 return new TestingInstance(instance
);
314 Module
* CreateModule() {
315 return new ::Module();