Attachment 'mw112moin.php.txt'
Download 1 #!/usr/bin/php
2 <?php
3 /*
4 copyright <original author>
5 TJ Fontaine <tjfontaine@gmail.com>
6
7 This script allows the exporting of pages from MediaWiki to MoinMoin.
8
9 Usage:
10 chose the options you want and fill in the appropriate
11 variables. Make sure you at the very least edit the database
12 settings.
13
14 MM_USER_ID requires at least one user be registered in
15 moinmoin, you can find this number in wiki/data/user/
16
17 Once everything is setup run the script, then copy from
18 $output_dir/* wiki/data/pages/ and
19 mv wiki/data/pages/edit-log wiki/data/
20
21 Your MediaWiki pages and history should now be available
22 to you, check wiki/Main_Page
23
24 By default the script exports namespaces 0-3. It has been
25 my experience that namespace 0 are normal editable pages,
26 1 is the normal pages Talk sections, 2 are the user pages,
27 and 3 are the user page Talk sections. When filling in
28 $IMPORT_PAGES if description is set it will export those
29 pages to the description:
30
31 (example)
32 $IMPORT_PAGES['users-talk']['namespace'] = 3;
33 $IMPORT_PAGES['users-talk']['description'] = "Talk";
34
35 will cause all pages in that namespace to be exported to
36 User/Talk where as
37
38 $IMPORT_PAGES['users-talk']['namespace'] = 3;
39 $IMPORT_PAGES['users-talk']['description'] = "TalkAgain";
40
41 will cause all pages in that namespace to be exported to
42 User/TalkAgain.
43
44 Features:
45 * Import Current Pages
46 * Import By Namespace
47 * Import Talk Pages (as Page/Talk)
48 * Import Revision History
49 * Import Images
50 * Add "#format $parser" to header
51 * Or make minimal changes to Wiki syntax
52
53 Known Issues:
54 * Changing the syntax on large sites will eat up memory
55 that part of the code needs overhauled
56 * Thumbnails aren't handled at all
57
58 TODO:
59 * Migrate Users
60 * Map Users in revision history
61 * Overhaul change_syntax
62 * Image thumbnails
63
64 ChangeLog:
65 * 2006-01-12 TJ Fontaine <tjfontaine@gmail.com>
66 - Removed nasty not_pages array
67 - Import based on namespace
68 - Import Talk Pages
69 - Import images (uses find)
70 - Import Revision History
71 - Add Proper Revision Timestamp
72 - Add Conditional Revision Import
73
74 * Version 0.3
75
76 * 2007-11-07 David Huggins-Daines <dhuggins@cs.cmu.edu>
77 - Updated for Mediawiki 0.11
78 */
79 ########################
80 ## MediaWiki Options ##
81 ########################
82
83 $MIGRATE_IMAGES = false;#set to true if you want to migrate images to moinmoin
84 $MW_IMAGE_PATH = ""; #full path to mediawiki images
85 $IMPORT_HISTORY = true;#set to false if you only want the current revision
86
87 ########################
88 ## MoinMoin Options ##
89 ########################
90
91 $MM_USER_ID = ""; #moinmoin userid to identify the importer by
92 $ADD_MW_PARSER = true; #set to true to add #format $MW_PARSER to the begining
93 #of every page if false script does minimal conversion
94 #before hand the old code needs reworked, eats too much
95 #memory leave this to true
96 $MW_PARSER = "media"; #name of mediawiki parser in plugin/parser
97
98 ########################
99 ## DB Settings ##
100 ########################
101
102 $MW_TABLE_PREFIX = ""; #mediawiki database was installed with tables prefixed
103 $host = ""; #mediawiki database server
104 $usr = ""; #mediawiki database username
105 $passwd = ""; #mediawiki database password
106 $db = ""; #mediawiki database name
107
108 ########################
109 ## Pages To Import ##
110 ########################
111
112 $IMPORT_PAGES['regular']['namespace'] = 0;
113 $IMPORT_PAGES['regular']['description'] = "";
114 $IMPORT_PAGES['regular-talk']['namespace'] = 1;
115 $IMPORT_PAGES['regular-talk']['description'] = "Talk";
116 $IMPORT_PAGES['users']['namespace'] = 2;
117 $IMPORT_PAGES['users']['description'] = "";
118 $IMPORT_PAGES['users-talk']['namespace'] = 3;
119 $IMPORT_PAGES['users-talk']['description'] = "Talk";
120
121 ########################
122 ## Output Directory ##
123 ########################
124
125 $output_dir = "mediawiki_pages"; #where the script will output the exported
126 #pages
127
128 /*
129 DO NOT EDIT BELOW THIS LINE
130 unless you think you know what you're doing
131 -----------------------------------------------------
132 */
133
134 $link = mysql_pconnect($host,$usr,$passwd) or die(mysql_error());
135 mysql_select_db($db) or die("Could not select database");
136
137 $WIKI_LINK_START = "[";
138 $WIKI_LINK_END = "]";
139 $EXTERNAL_LINK_START = "[";
140 $EXTERNAL_LINK_END = "]";
141 $EXTERNAL_LINK_DIVIDER = " ";
142
143 if(file_exists($output_dir)){
144 rmdirr($output_dir);
145 mkdir($output_dir);
146 }
147 else{
148 mkdir($output_dir);
149 }
150
151 chdir("./$output_dir") or die;
152
153 $EDIT_LOG = array();
154
155 foreach($IMPORT_PAGES as $pagetype)
156 migrate_current_pages($pagetype['namespace'], $pagetype['description']);
157
158 print "sorting Edit Log ...";
159 asort($EDIT_LOG);
160 print "Done\n";
161
162 $edit_log = fopen("edit-log", "w");
163 foreach($EDIT_LOG as $entry)
164 fputs($edit_log, $entry);
165 fclose($edit_log);
166
167 chdir("..");
168 ###End of Main
169
170 function migrate_current_pages($page_namespace, $page_description = "")
171 {
172 $page_table = $GLOBALS['MW_TABLE_PREFIX']."page";
173 $text_table = $GLOBALS['MW_TABLE_PREFIX']."text";
174 $revision_table = $GLOBALS['MW_TABLE_PREFIX']."revision";
175
176 $curr_sql = "SELECT `$page_table`.page_title as ptitle, " .
177 "`$page_table`.page_latest as revision, ".
178 "`$page_table`.page_id as id, ".
179 "`$text_table`.old_text as text, ".
180 "`$page_table`.page_touched as timestamp " .
181 "FROM `$page_table`, `$text_table`, `$revision_table` ".
182 "WHERE `$revision_table`.rev_page = `$page_table`.page_id ".
183 "AND `$revision_table`.rev_id = `$page_table`.page_latest ".
184 "AND `$text_table`.old_id = `$revision_table`.rev_text_id ".
185 "AND `$text_table`.old_text NOT LIKE \"MediaWiki default\" " .
186 "AND page_namespace = '$page_namespace' " .
187 ";";
188
189 $query = mysql_query($curr_sql) or die(mysql_error());
190
191 while ($row = mysql_fetch_object($query)) {
192 if ($GLOBALS['IMPORT_HISTORY']) {
193 $rev_sql = "SELECT `$page_table`.page_title as ptitle, " .
194 "`$revision_table`.rev_id as revision, ".
195 "`$text_table`.old_text as text, ".
196 "`$page_table`.page_touched as timestamp " .
197 "FROM `$page_table`, `$text_table`, `$revision_table` ".
198 "WHERE `$page_table`.page_id = $row->id ".
199 "AND `$revision_table`.rev_page = `$page_table`.page_id ".
200 "AND `$text_table`.old_id = `$revision_table`.rev_text_id ".
201 "AND `$text_table`.old_text NOT LIKE \"MediaWiki default\" " .
202 "AND page_namespace = '$page_namespace' " .
203 ";";
204
205 $rev_query = mysql_query($rev_sql) or
206 die(mysql_error());
207
208 while ($rev_row = mysql_fetch_object($rev_query))
209 {
210 migrate_page_row($rev_row, $page_description);
211 }
212 }
213 else {
214 migrate_page_row($row, $page_description);
215 }
216 }
217 mysql_free_result($query);
218 }
219
220 function migrate_page_row($row, $desc)
221 {
222 $timestamp = $row->timestamp;
223 $title = clean_title($row->ptitle);
224 $text = $row->text;
225 $revision = $row->revision;
226
227 if(strlen($desc))
228 create_page($title."(2f)".$desc, $text, $timestamp, $revision);
229 else
230 create_page($title, $text, $timestamp, $revision);
231 }
232
233 function create_page($page_title, $page_text, $page_timestamp, $page_revision)
234 {
235 print 'create page '.$page_title.' revision '.$page_revision."\n";
236
237 @mkdir($page_title);
238 chdir($page_title) or die($page_title);
239
240 append_edit_log($page_title, $page_timestamp, $page_revision);
241
242 $file = fopen("current", "w");
243 fputs($file, $page_revision);
244
245 fclose($file);
246
247 if($GLOBALS['MIGRATE_IMAGES'])
248 migrate_images($page_text);
249
250 @mkdir("revisions");
251 chdir("revisions") or die("revisions");
252
253 $file = fopen($page_revision, "w");
254
255 #break up one string into lines
256 $file_text = explode("\n", $page_text);
257
258 if($GLOBALS['ADD_MW_PARSER'])
259 {
260 $mw_parser = $GLOBALS['MW_PARSER'];
261 fputs($file, "#format $mw_parser \n");
262 }
263 else
264 $file_text = change_syntax($file_text);
265
266 $b = 0;
267
268 while ($b < count($file_text)) {
269 fputs($file, rtrim($file_text[$b]) . "\n");
270 $b++;
271 }
272
273 unset($file_text);
274 fclose($file);
275 chdir("..") or die(system('pwd')); #revision
276 chdir("..") or die(system('pwd')); #page name
277 }
278
279 function append_edit_log($page_title, $timestamp, $revision)
280 {
281 $file = fopen('edit-log', 'a+');
282
283 if($revision == 0)
284 $action = 'SAVENEW';
285 else
286 $action = 'SAVE';
287
288 if(strlen($timestamp))
289 $tstamp = getStamp($timestamp);
290 else
291 $tstamp = uts();
292
293 $el_string = "$tstamp\t$revision\t$action\t$page_title\t" .
294 "127.0.0.1\tlocalhost\t".$GLOBALS['MM_USER_ID']."\n";
295
296 fputs($file, $el_string);
297
298 $GLOBALS['EDIT_LOG'][$tstamp] = $el_string;
299
300 fclose($file);
301 }
302
303 function uts(){
304 $Asec = explode(" ", microtime());
305 $Amicro = explode(".", $Asec[0]);
306 return ($Asec[1].substr($Amicro[1], 0, 6));
307 }
308
309 function getStamp($t)
310 {
311 $year = substr($t, 0, 4);
312 $month = substr($t, 4, 2);
313 $day = substr($t, 6, 2);
314 $hour = substr($t, 8, 2);
315 $min = substr($t, 11, 2);
316 $sec = substr($t, 13, 2);
317 $micro = mktime($hour, $min, $sec, $month, $day, $year);
318 return sprintf("%-016s", $micro);
319 }
320
321 function migrate_images($page_text)
322 {
323 $mw_path = $GLOBALS['MW_IMAGE_PATH'];
324 $image_matches = array();
325 $image_pat = "/\[\[Image:(.*)\]\]/";
326 if(preg_match_all($image_pat, $page_text, $image_matches))
327 {
328 @mkdir("attachments");
329
330 for($z = 0; $z < count($image_matches[1]); $z++)
331 {
332 $image_file_name = strtok($image_matches[1][$z], '|');
333 if(!file_exists('attachments/'.$image_file_name))
334 {
335 $find_string = "find $mw_path -type f -name \"".
336 "$image_file_name\"";
337
338 $image_file_path = system($find_string, $ret);
339 if($ret) die($image_file_path);
340 if(strlen($image_file_path))
341 {
342 if(!copy($image_file_path, "./attachments/$image_file_name"))
343 die("failed to copy $image_file_name\n");
344 print " added attachment: $image_file_name \n";
345 }
346 }
347 }
348 }
349 }
350
351 function clean_title ($page_title)
352 {
353 $page_title = utf8_encode(str_replace(" ", "_", $page_title));
354 $quoted = array();
355 $in_parenthesis = false;
356 for ($i = 0; $i < strlen($page_title); $i++)
357 {
358 $curchar = substr ($page_title, $i, 1);
359 if (ereg('[^a-zA-Z0-9_]', $curchar))
360 {
361 if (!$in_parenthesis)
362 {
363 $quoted[] = '(';
364 $in_parenthesis = true;
365 }
366 $quoted[] = str_pad(dechex(ord($curchar)),
367 2, '0', STR_PAD_LEFT);
368 }
369 else
370 {
371 if ($in_parenthesis)
372 {
373 $quoted[] = ')';
374 $in_parenthesis = false;
375 }
376 $quoted[] = $curchar;
377 }
378 }
379
380 if ($in_parenthesis)
381 $quoted[] = ')';
382
383 $page_title = implode('', $quoted);
384 unset($quoted);
385 return $page_title;
386 }
387
388 function change_syntax ($textString) {
389 #$a = 0;
390
391 for($a = 0; $a < count($textString); $a++){
392 #print "str(before mod) = $textString[$a] \n";
393
394 #custom plugin
395 #if(preg_match("/\<fileshare\>.+\<\/fileshare\>/",$textString[$a])){
396 # $textString[$a] = fileShare($textString[$a]);
397 #}
398
399 #strpos : Returns the numeric position of the first occurrence of needle in the haystack string. Unlike the strrpos(), this function can take a full string as the needle parameter and the entire string will be used.
400 #substr() returns the portion of string specified by the start and length parameters.
401 #string substr ( string string, int start [, int length] )
402 if(substr($textString[$a], 0, 1) == '*'){
403 $textString[$a] = bullets($textString[$a]);
404 }
405
406 if(preg_match("/^#/",$textString[$a])){
407 $textString[$a] = numberedList( $textString[$a]);
408 }
409
410 #headings
411 if(preg_match("/^==.+==/",$textString[$a])){
412 $textString[$a] = heading( $textString[$a]);
413 }
414
415 #wikilink
416 if(preg_match("/\[\[.+\]\]/",$textString[$a])){
417 $textString[$a] = wikiLinks($textString[$a]);
418 }
419
420 #media wiki new line <br\> or <BR>
421 #must be after wiki links
422 if (preg_match("/\<br\/{0,1}\>/i", $textString[$a])) {
423 $textString[$a] = preg_replace("/\\<br\/{0,1}\>/i", "[[BR]]",$textString[$a]);
424 #print "result = $textString[$a]\n";
425 }
426 }
427
428 return $textString;
429 }
430
431
432
433 #custom plugin
434 #function fileShare($string) {
435 # $fileshare = substr($string, strpos($string, "\\\\"));
436 # $fileshare = preg_replace("/<\/fileshare>/","",$fileshare);
437 # $string = "[file:" .$fileshare ."]";
438 # return $string;
439 #}
440
441 function heading($string){
442 $theHeading = $string;
443 $headingLevel = 0;
444
445 #strip the left side '=' chars
446 while($headingLevel < strlen($theHeading)){
447 if(substr($theHeading, 0, 1) == '='){
448 $theHeading = substr($theHeading, 1);
449 }
450 else{
451 #no more ='s in front of text
452 break;
453 }
454 $headingLevel++;
455 }
456
457 #the left side '=' chars are now removed
458 #now strip right side '=' chars
459 $theHeading = substr($theHeading, 0, strpos($theHeading, '='));
460
461 $theSyntax = "";
462 #note moinmoin uses 1 less = for heading levels
463 #so mediawiki "===" is the same as moinmoin "=="
464 for($i = 1; $i < $headingLevel; $i++){
465 $theSyntax .= "=";
466 }
467
468 $string = $theSyntax ." $theHeading " .$theSyntax;
469
470 return $string;
471 }
472
473
474 function bullets ($string) {
475 $a = 0;
476 while ($a < strlen($string)) {
477 $a++;
478 if (substr($string, 1, 1) == "*")
479 $string = substr($string, 1);
480 else
481 break;
482 }
483 while ($a > 0) {
484 $string = " " . $string;
485 $a--;
486 }
487 return $string;
488 }
489
490 function numberedList ($string) {
491 if(preg_match("/^#/",$string)){
492 $string = preg_replace("/^#/", " 1.", $string);
493 }
494 elseif(preg_match("/^##/",$string)){
495 $string = preg_replace("/^##/", " 1.", $string);
496 }
497 return $string;
498 }
499
500
501 function wikiLinks ($string) {
502 global $WIKI_LINK_START;
503 global $WIKI_LINK_END;
504
505 while (strpos($string, "[[") !== false && strpos($string, "]]") !== false) {
506 #isolate link
507 $link = substr($string, strpos($string, "[[") + 2);
508 $link = substr($link, 0, strpos($link, "]]") + 0);
509
510 if (strpos($link, "|") == false){
511 #add new link syntax
512 $link = $WIKI_LINK_START ."\"". $link ."\"" .$WIKI_LINK_END;
513 }
514 else{
515 $dividerPosition = strpos($link, "|");
516
517 $wikilink = substr($link, 0, $dividerPosition);
518 $label = substr($link, $dividerPosition + 1, strlen($link) - $dividerPosition);
519
520 #remove whitespace from beginning and end
521 $label = trim($label);
522
523 $link = $WIKI_LINK_START .":" .$wikilink .": " .$label .$WIKI_LINK_END;
524 }
525
526 $string = substr($string, 0, strpos($string, "[[") - 0) . $link .substr($string, strpos($string, "]]") + 2);
527 }
528
529 return $string;
530 }
531
532
533 function externalLinks($string){
534 global $EXTERNAL_LINK_START;
535 global $EXTERNAL_LINK_END;
536 global $EXTERNAL_LINK_DIVIDER;
537
538 #external link syntax is the same except for the label divider
539
540 if(preg_match("/| /")){
541 $string = preg_replace("/| /", " ", $string);
542 }
543 elseif(preg_match("/|/")){
544 $string = preg_replace("/|/", " ", $string);
545 }
546
547 return $string;
548
549 }
550
551 function rmdirr($dir) {
552 if($objs = glob($dir."/*")){
553 foreach($objs as $obj) {
554 is_dir($obj)? rmdirr($obj) : unlink($obj);
555 }
556 }
557 rmdir($dir);
558 }
559
560 ?>
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.