Add missing getMagicWords()
[mediawiki.git] / testsuite / src / com / piclab / wikitest / HTMLTest.java
blob288f9885a32918185e2925ac0a68d5d595c35d16
2 /*
3 * View pages with various skins and make sure they're basically
4 * valid HTML structured the way we expect. For now we're just
5 * using regexes, which should be fine for the sample pages. They
6 * would probably fail on pages about HTML markup and such, though.
7 * Eventualy, we should be scanning the DOM for these tests.
8 */
10 package com.piclab.wikitest;
12 import com.meterware.httpunit.*;
13 import java.util.regex.*;
14 import java.io.*;
15 import org.w3c.dom.*;
17 public class HTMLTest extends WikiTest {
19 /* Regex patterns to look for on every page; "good" patterns should
20 * be found, "bad" patterns should be absent.
23 private String[] m_goodpats = {
24 "\\A\\s*<!doctype html", "<meta\\s+[^>]*name\\s*=\\s*.robots",
25 "<head[^>]*>.*<title[^>]*>.*</title>.*</head>\\s*<body",
26 "<link\\s+[^>]*rel\\s*=\\s*.stylesheet", "<h1\\s+[^>]*class\\s*=.pagetitle",
27 "<form\\s+[^>]*id\\s*=\\s*.search",
28 "<div\\s+[^>]*id\\s*=.content.*<div\\s+[^>]*id\\s*=.article",
30 private Pattern[] m_cgoodpats;
32 private String[] m_badpats = {
33 "<[^>]*onclick\\s*=", "<applet", "<object", "<body.*<script.*</body"
35 private Pattern[] m_cbadpats;
37 /* TODO: figure out some way to check for unbalanced <ul>, etc. */
39 public String testName() { return "HTML"; }
42 protected int initTest() throws Exception {
43 logout();
45 * Pre-compile the regexes.
47 m_cgoodpats = new Pattern[m_goodpats.length];
48 for (int i = 0; i < m_goodpats.length; ++i) {
49 m_cgoodpats[i] = Pattern.compile( m_goodpats[i],
50 Pattern.CASE_INSENSITIVE | Pattern.DOTALL );
52 m_cbadpats = new Pattern[m_badpats.length];
53 for (int i = 0; i < m_badpats.length; ++i) {
54 m_cbadpats[i] = Pattern.compile( m_badpats[i],
55 Pattern.CASE_INSENSITIVE | Pattern.DOTALL );
57 return 0;
60 protected int runTest() throws Exception {
61 int c = 0;
63 if ( 0 != ( c = part1() ) ) { return fail(c); }
64 if ( 0 != ( c = part2() ) ) { return fail(c); }
65 return 0;
68 private int part1() throws Exception {
69 WebResponse wr = loginAs( "Fred", "Fred" );
70 Document doc = wr.getDOM();
71 if ( ! matchesAll( wr.getText() ) ) { return 101; }
73 WebRequest req = openPrefs();
74 req.removeParameter( "wpOpnumberheadings" );
75 req.setParameter( "wpOphighlightbroken", "1" );
76 wr = getResponse( req );
77 WikiSuite.fine( "Standard settings" );
79 int c = 0;
80 if ( 0 != ( c = part1inner() ) ) { return 110 + c; }
82 req = openPrefs();
83 req.setParameter( "wpOpnumberheadings", "1" );
84 wr = getResponse( req );
85 WikiSuite.fine( "Numbered headings" );
87 if ( 0 != ( c = part1inner() ) ) { return 120 + c; }
89 req = openPrefs();
90 req.setParameter( "wpOphighlightbroken", "1" );
91 wr = getResponse( req );
92 WikiSuite.fine( "Question-mark links" );
94 if ( 0 != ( c = part1inner() ) ) { return 130 + c; }
95 return 0;
98 private int part1inner() throws Exception {
99 WebResponse wr = viewPage( "" );
101 * Will throw exception if not parseable:
103 Document doc = wr.getDOM();
104 if ( ! matchesAll( wr.getText() ) ) { return 1; }
106 wr = viewPage( "Opera" );
107 doc = wr.getDOM();
108 if ( ! matchesAll( wr.getText() ) ) { return 2; }
110 wr = viewPage( "User:Fred" );
111 doc = wr.getDOM();
112 if ( ! matchesAll( wr.getText() ) ) { return 3; }
114 wr = viewPage( "Special:Recentchanges" );
115 doc = wr.getDOM();
116 if ( ! matchesAll( wr.getText() ) ) { return 4; }
118 wr = viewPage( "Talk:Poker" );
119 doc = wr.getDOM();
120 if ( ! matchesAll( wr.getText() ) ) { return 5; }
122 wr = viewPage( "Wikipedia:Upload_log" );
123 doc = wr.getDOM();
124 if ( ! matchesAll( wr.getText() ) ) { return 6; }
126 return 0;
129 private int part2() throws Exception {
130 WebResponse wr = loginAs( "Barney", "Barney" );
131 Document doc = wr.getDOM();
132 if ( ! matchesAll( wr.getText() ) ) { return 201; }
134 WebRequest req = openPrefs();
135 req.removeParameter( "wpOpnumberheadings" );
136 req.setParameter( "wpOphighlightbroken", "1" );
137 wr = getResponse( req );
139 for (int q = 0; q < 4; ++q) {
140 req = openPrefs();
141 req.setParameter( "wpQuickbar", String.valueOf( q ) );
142 wr = getResponse( req );
144 doc = wr.getDOM();
145 if ( ! matchesAll( wr.getText() ) ) { return 200 + 10 * q; }
146 WikiSuite.finer( "Set quickbar to " + q );
148 for (int s = 0; s < 3; ++s) {
149 req = openPrefs();
150 req.setParameter( "wpSkin", String.valueOf( s ) );
151 wr = getResponse( req );
152 WikiSuite.finer( "Set skin to " + s );
154 double r = Math.random();
155 if ( r < .5 ) {
156 wr = viewPage( WikiSuite.preloadedPages[
157 (int)(r * 100.0)] );
158 } else if ( r < .6 ) {
159 wr = viewPage( "User:Fred" );
160 } else if ( r < .7 ) {
161 wr = viewPage( "Special:Recentchanges" );
162 } else if ( r < .8 ) {
163 wr = editPage( "Talk:Sport" );
164 } else if ( r < .9 ) {
165 wr = editPage( "Wikipedia:Upload_log" );
166 } else {
167 wr = viewPage( "" );
169 doc = wr.getDOM();
170 if ( ! matchesAll( wr.getText() ) ) { return 201 + 10 * q + s; }
173 return 0;
176 private boolean matchesAll( String text ) {
177 if ( m_cgoodpats[0] == null ) {
178 WikiSuite.error( "Patterns not compiled." );
179 return false;
181 for (int i = 0; i < m_goodpats.length; ++i) {
182 Matcher m = m_cgoodpats[i].matcher( text );
183 if ( ! m.find() ) {
184 WikiSuite.error( "Failed to match pattern \"" + m_goodpats[i] + "\"" );
185 return false;
188 for (int i = 0; i < m_badpats.length; ++i) {
189 Matcher m = m_cbadpats[i].matcher( text );
190 if ( m.find() ) {
191 WikiSuite.error( "Matched pattern \"" + m_badpats[i] + "\"" );
192 return false;
195 return true;
198 public static void main( String[] params ) {
199 (new HTMLTest()).runSingle( params );