1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
|
<?php
/**
* Take page text out of an XML dump file and render basic HTML out to files.
* This is *NOT* suitable for publishing or offline use; it's intended for
* running comparitive tests of parsing behavior using real-world data.
*
* Templates etc are pulled from the local wiki database, not from the dump.
*
* Copyright (C) 2006 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* @addtogroup Maintenance
*/
$optionsWithArgs = array( 'report' );
require_once( 'commandLine.inc' );
require_once( 'SpecialImport.php' );
class DumpRenderer {
function __construct( $dir ) {
$this->stderr = fopen( "php://stderr", "wt" );
$this->outputDirectory = $dir;
$this->count = 0;
}
function handleRevision( $rev ) {
$title = $rev->getTitle();
if (!$title) {
fprintf( $this->stderr, "Got bogus revision with null title!" );
return;
}
$display = $title->getPrefixedText();
$this->count++;
$sanitized = rawurlencode( $display );
$filename = sprintf( "%s/wiki-%07d-%s.html",
$this->outputDirectory,
$this->count,
$sanitized );
fprintf( $this->stderr, "%s\n", $filename, $display );
// fixme
$user = new User();
$parser = new Parser();
$options = ParserOptions::newFromUser( $user );
$output = $parser->parse( $rev->getText(), $title, $options );
file_put_contents( $filename,
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" " .
"\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n" .
"<html xmlns=\"http://www.w3.org/1999/xhtml\">\n" .
"<head>\n" .
"<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n" .
"<title>" . htmlspecialchars( $display ) . "</title>\n" .
"</head>\n" .
"<body>\n" .
$output->getText() .
"</body>\n" .
"</html>" );
}
function run() {
$this->startTime = wfTime();
$file = fopen( 'php://stdin', 'rt' );
$source = new ImportStreamSource( $file );
$importer = new WikiImporter( $source );
$importer->setRevisionCallback(
array( &$this, 'handleRevision' ) );
return $importer->doImport();
}
}
if( isset( $options['output-dir'] ) ) {
$dir = $options['output-dir'];
} else {
wfDie( "Must use --output-dir=/some/dir\n" );
}
$render = new DumpRenderer( $dir );
$render->run();
?>
|