Page MenuHomePhabricator
Paste P2056

Trac Wiki Import Script
ActivePublic

Authored by michelkaeser on May 29 2017, 8:59 AM.
Subscribers
None
Tokens
"Like" token, awarded by mormegil.
<?php
require_once 'libphutil/src/__phutil_library_init__.php';
$DUMP_DIRECTORY = "wiki-cron/dump/"; // tracadmin dump
$OUTPUT_DIRECTORY = "wiki-cron/dump-md/";
$CONDUIT_URL = "<phab-url>";
$CONDUIT_API_TOKEN = "<admin-token>";
// DO NOT EDIT BELOW THIS LINES
function cmp($a, $b) {
return substr_count($a, "%2F") == substr_count($b, "%2F") ? 0 : (substr_count($a, "%2F") > substr_count($b, "%2F") ? 1 : -1);
}
$wikiPages = glob($DUMP_DIRECTORY."*");
usort($wikiPages, "cmp");
foreach ($wikiPages as $wikiPage) {
$content = file_get_contents($wikiPage);
// replace spaces
$wikiPage = str_replace("%20", " ", $wikiPage);
// get directory/slug information
$directory = null;
$slug = null;
$page = null;
if (strpos($wikiPage, "%2F") !== false) { // is subpage
$parts = preg_split("/\%2F/", $wikiPage);
$directory = basename($parts[0]);
if (count($parts) !== 2) {
for ($i = 1; $i < count($parts) - 1; ++$i) {
$directory .= "/".$parts[$i];
}
}
$page = $parts[count($parts)-1];
$slug = $directory."/".$page;
} else {
$page = basename($wikiPage);
$slug = $page;
}
// MoinMoin to Markdown Transformation
// https://gist.github.com/sgk/1286682
// newlines
$content = str_replace("\r\n", "\n", $content);
$content = str_replace("[[BR]]", "", $content);
// (code) blocks
$content = preg_replace("/\{\{\{[^#!comment:]+#!comment: ([^\n}}}]*)\}\}\}/", "(NOTE) $1", $content);
$content = preg_replace("/\{\{\{([^}}}]*)\}\}\}/", "```$1```", $content);
$content = preg_replace("/\~~~(.*)\n([^~~~]*)~~~/", "```lang=$1\n$2```", $content);
// headings
// $content = preg_replace("/^\s*=====\s+(.*)\s+=====/m", "\n##### $1", $content);
// $content = preg_replace("/^\s*====\s+(.*)\s+====/m", "\n#### $1", $content);
// $content = preg_replace("/^\s*===\s+(.*)\s+===/m", "\n### $1", $content);
// $content = preg_replace("/^\s*==\s+(.*)\s+==/m", "\n## $1", $content);
// $content = preg_replace("/^\s*=\s+(.*)\s+=/m", "\n# $1", $content);
// // alternative headings
// $content = preg_replace("/^\s*=====\s+(.*)$/m", "\n##### $1", $content);
// $content = preg_replace("/^\s*====\s+(.*)$/m", "\n#### $1", $content);
// $content = preg_replace("/^\s*===\s+(.*)$/m", "\n### $1", $content);
// $content = preg_replace("/^\s*==\s+(.*)$/m", "\n## $1", $content);
// $content = preg_replace("/^\s*=\s+(.*)$/m", "\n# $1", $content);
// listings
$content = preg_replace("/^ * /", "---- ", $content);
$content = preg_replace("/^ * /", "--- ", $content);
$content = preg_replace("/^ * /", "-- ", $content);
$content = preg_replace("/^ * /", "- ", $content);
$content = preg_replace("/^ (.*)::/m", "- **$1:**", $content);
$content = preg_replace("/^ \d+./", "#", $content);
$lines = [];
foreach (preg_split ('/$\R?^/m', $content) as $line) {
if (strpos($line, " ") !== 0) {
$line = preg_replace("/\[((ftp|http|https)?:\/\/[^\s\[\]]+)\s([^\[\]]+)\]/", "[[ $1 | $3 ]]", $line); // links
$line = preg_replace("/\[wiki:([^\s\[\]]+)\s?([^\[\]]+)\]/", "[[ $1 | $2 ]]", $line); // alt links
$line = preg_replace("/\'\'\'(.*?)\'\'\'/", "//$1//", $line); // italic
$line = preg_replace("/\'\'(.*?)\'\'/", "**$1**", $line); // bold
$line = preg_replace("/\/\/\*\*([^\/\/**]*)\/\/\*\*/", "//**$1**//", $line);
}
$lines[] = $line;
}
$content = implode("\n", $lines);
// Markdown Output
if ($directory !== null) {
@mkdir($OUTPUT_DIRECTORY.$directory, 0755, true);
}
@file_put_contents($OUTPUT_DIRECTORY.$slug.".md", trim($content));
// Conduit "Upload"
$client = new ConduitClient($CONDUIT_URL);
$client->setConduitToken($CONDUIT_API_TOKEN);
$api_parameters = [
'slug' => $slug,
'title' => ucfirst($page),
'content' => trim($content),
'description' => ""
];
try {
$client->callMethodSynchronous('phriction.create', $api_parameters);
} catch (Exception $e) {
try {
$client->callMethodSynchronous('phriction.edit', $api_parameters);
} catch (Exception $e) {
echo "FAILED to edit or create Phriction document ".$slug."\n";
}
}
}