Merge branch 'ab/mediawiki-fixes'
Modernization and fixes to MediaWiki remote backend. * ab/mediawiki-fixes: remote-mediawiki: use "sh" to eliminate unquoted commands remote-mediawiki: annotate unquoted uses of run_git() remote-mediawiki: convert to quoted run_git() invocation remote-mediawiki: provide a list form of run_git() remote-mediawiki tests: annotate failing tests remote-mediawiki: fix duplicate revisions being imported remote-mediawiki tests: use CLI installer remote-mediawiki tests: use inline PerlIO for readability remote-mediawiki tests: replace deprecated Perl construct remote-mediawiki tests: use a more idiomatic dispatch table remote-mediawiki tests: use "$dir/" instead of "$dir." remote-mediawiki tests: change `[]` to `test` remote-mediawiki tests: use test_cmp in tests remote-mediawiki tests: use a 10 character password remote-mediawiki tests: use the login/password variables remote-mediawiki doc: don't hardcode Debian PHP versions remote-mediawiki doc: link to MediaWiki's current version remote-mediawiki doc: correct link to GitHub project
This commit is contained in:
commit
5a25615d5c
@ -6,7 +6,7 @@
|
||||
# License: GPL v2 or later
|
||||
|
||||
# Set of tools for git repo with a mediawiki remote.
|
||||
# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
|
||||
# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
@ -9,7 +9,7 @@
|
||||
# License: GPL v2 or later
|
||||
|
||||
# Gateway between Git and MediaWiki.
|
||||
# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
|
||||
# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
|
||||
|
||||
use strict;
|
||||
use MediaWiki::API;
|
||||
@ -56,38 +56,38 @@ my $url = $ARGV[1];
|
||||
|
||||
# Accept both space-separated and multiple keys in config file.
|
||||
# Spaces should be written as _ anyway because we'll use chomp.
|
||||
my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
|
||||
my @tracked_pages = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.pages"]));
|
||||
chomp(@tracked_pages);
|
||||
|
||||
# Just like @tracked_pages, but for MediaWiki categories.
|
||||
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
|
||||
my @tracked_categories = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.categories"]));
|
||||
chomp(@tracked_categories);
|
||||
|
||||
# Just like @tracked_categories, but for MediaWiki namespaces.
|
||||
my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
|
||||
my @tracked_namespaces = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaces"]));
|
||||
for (@tracked_namespaces) { s/_/ /g; }
|
||||
chomp(@tracked_namespaces);
|
||||
|
||||
# Import media files on pull
|
||||
my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
|
||||
my $import_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaimport"]);
|
||||
chomp($import_media);
|
||||
$import_media = ($import_media eq 'true');
|
||||
|
||||
# Export media files on push
|
||||
my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
|
||||
my $export_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaexport"]);
|
||||
chomp($export_media);
|
||||
$export_media = !($export_media eq 'false');
|
||||
|
||||
my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
|
||||
my $wiki_login = run_git_quoted(["config", "--get", "remote.${remotename}.mwLogin"]);
|
||||
# Note: mwPassword is discouraged. Use the credential system instead.
|
||||
my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
|
||||
my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
|
||||
my $wiki_passwd = run_git_quoted(["config", "--get", "remote.${remotename}.mwPassword"]);
|
||||
my $wiki_domain = run_git_quoted(["config", "--get", "remote.${remotename}.mwDomain"]);
|
||||
chomp($wiki_login);
|
||||
chomp($wiki_passwd);
|
||||
chomp($wiki_domain);
|
||||
|
||||
# Import only last revisions (both for clone and fetch)
|
||||
my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
|
||||
my $shallow_import = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.shallow"]);
|
||||
chomp($shallow_import);
|
||||
$shallow_import = ($shallow_import eq 'true');
|
||||
|
||||
@ -97,9 +97,9 @@ $shallow_import = ($shallow_import eq 'true');
|
||||
# Possible values:
|
||||
# - by_rev: perform one query per new revision on the remote wiki
|
||||
# - by_page: query each tracked page for new revision
|
||||
my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
|
||||
my $fetch_strategy = run_git_quoted(["config", "--get", "remote.${remotename}.fetchStrategy"]);
|
||||
if (!$fetch_strategy) {
|
||||
$fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
|
||||
$fetch_strategy = run_git_quoted(["config", "--get", "mediawiki.fetchStrategy"]);
|
||||
}
|
||||
chomp($fetch_strategy);
|
||||
if (!$fetch_strategy) {
|
||||
@ -123,9 +123,9 @@ my %basetimestamps;
|
||||
# will get the history with information lost). If the import is
|
||||
# deterministic, this means everybody gets the same sha1 for each
|
||||
# MediaWiki revision.
|
||||
my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
|
||||
my $dumb_push = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.dumbPush"]);
|
||||
if (!$dumb_push) {
|
||||
$dumb_push = run_git('config --get --bool mediawiki.dumbPush');
|
||||
$dumb_push = run_git_quoted(["config", "--get", "--bool", "mediawiki.dumbPush"]);
|
||||
}
|
||||
chomp($dumb_push);
|
||||
$dumb_push = ($dumb_push eq 'true');
|
||||
@ -369,12 +369,14 @@ sub get_mw_pages {
|
||||
return %pages;
|
||||
}
|
||||
|
||||
# usage: $out = run_git("command args");
|
||||
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
|
||||
sub run_git {
|
||||
# usage: $out = run_git_quoted(["command", "args", ...]);
|
||||
# $out = run_git_quoted(["command", "args", ...], "raw"); # don't interpret output as UTF-8.
|
||||
# $out = run_git_quoted_nostderr(["command", "args", ...]); # discard stderr
|
||||
# $out = run_git_quoted_nostderr(["command", "args", ...], "raw"); # ditto but raw instead of UTF-8 as above
|
||||
sub _run_git {
|
||||
my $args = shift;
|
||||
my $encoding = (shift || 'encoding(UTF-8)');
|
||||
open(my $git, "-|:${encoding}", "git ${args}")
|
||||
open(my $git, "-|:${encoding}", @$args)
|
||||
or die "Unable to fork: $!\n";
|
||||
my $res = do {
|
||||
local $/ = undef;
|
||||
@ -385,6 +387,13 @@ sub run_git {
|
||||
return $res;
|
||||
}
|
||||
|
||||
sub run_git_quoted {
|
||||
_run_git(["git", @{$_[0]}], $_[1]);
|
||||
}
|
||||
|
||||
sub run_git_quoted_nostderr {
|
||||
_run_git(['sh', '-c', 'git "$@" 2>/dev/null', '--', @{$_[0]}], $_[1]);
|
||||
}
|
||||
|
||||
sub get_all_mediafiles {
|
||||
my $pages = shift;
|
||||
@ -511,8 +520,9 @@ sub download_mw_mediafile {
|
||||
}
|
||||
|
||||
sub get_last_local_revision {
|
||||
# Get note regarding last mediawiki revision
|
||||
my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
|
||||
# Get note regarding last mediawiki revision.
|
||||
my $note = run_git_quoted_nostderr(["notes", "--ref=${remotename}/mediawiki",
|
||||
"show", "refs/mediawiki/${remotename}/master"]);
|
||||
my @note_info = split(/ /, $note);
|
||||
|
||||
my $lastrevision_number;
|
||||
@ -807,7 +817,10 @@ sub get_more_refs {
|
||||
sub mw_import {
|
||||
# multiple import commands can follow each other.
|
||||
my @refs = (shift, get_more_refs('import'));
|
||||
my $processedRefs;
|
||||
foreach my $ref (@refs) {
|
||||
next if $processedRefs->{$ref}; # skip duplicates: "import refs/heads/master" being issued twice; TODO: why?
|
||||
$processedRefs->{$ref} = 1;
|
||||
mw_import_ref($ref);
|
||||
}
|
||||
print {*STDOUT} "done\n";
|
||||
@ -970,7 +983,7 @@ sub mw_import_revids {
|
||||
}
|
||||
|
||||
sub error_non_fast_forward {
|
||||
my $advice = run_git('config --bool advice.pushNonFastForward');
|
||||
my $advice = run_git_quoted(["config", "--bool", "advice.pushNonFastForward"]);
|
||||
chomp($advice);
|
||||
if ($advice ne 'false') {
|
||||
# Native git-push would show this after the summary.
|
||||
@ -1014,7 +1027,7 @@ sub mw_upload_file {
|
||||
}
|
||||
} else {
|
||||
# Don't let perl try to interpret file content as UTF-8 => use "raw"
|
||||
my $content = run_git("cat-file blob ${new_sha1}", 'raw');
|
||||
my $content = run_git_quoted(["cat-file", "blob", $new_sha1], 'raw');
|
||||
if ($content ne EMPTY) {
|
||||
$mediawiki = connect_maybe($mediawiki, $remotename, $url);
|
||||
$mediawiki->{config}->{upload_url} =
|
||||
@ -1084,7 +1097,7 @@ sub mw_push_file {
|
||||
# with this content instead:
|
||||
$file_content = DELETED_CONTENT;
|
||||
} else {
|
||||
$file_content = run_git("cat-file blob ${new_sha1}");
|
||||
$file_content = run_git_quoted(["cat-file", "blob", $new_sha1]);
|
||||
}
|
||||
|
||||
$mediawiki = connect_maybe($mediawiki, $remotename, $url);
|
||||
@ -1174,10 +1187,10 @@ sub mw_push_revision {
|
||||
my $mw_revision = $last_remote_revid;
|
||||
|
||||
# Get sha1 of commit pointed by local HEAD
|
||||
my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
|
||||
my $HEAD_sha1 = run_git_quoted_nostderr(["rev-parse", $local]);
|
||||
chomp($HEAD_sha1);
|
||||
# Get sha1 of commit pointed by remotes/$remotename/master
|
||||
my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
|
||||
my $remoteorigin_sha1 = run_git_quoted_nostderr(["rev-parse", "refs/remotes/${remotename}/master"]);
|
||||
chomp($remoteorigin_sha1);
|
||||
|
||||
if ($last_local_revid > 0 &&
|
||||
@ -1197,7 +1210,7 @@ sub mw_push_revision {
|
||||
my $parsed_sha1 = $remoteorigin_sha1;
|
||||
# Find a path from last MediaWiki commit to pushed commit
|
||||
print {*STDERR} "Computing path from local to remote ...\n";
|
||||
my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
|
||||
my @local_ancestry = split(/\n/, run_git_quoted(["rev-list", "--boundary", "--parents", $local, "^${parsed_sha1}"]));
|
||||
my %local_ancestry;
|
||||
foreach my $line (@local_ancestry) {
|
||||
if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
|
||||
@ -1221,7 +1234,7 @@ sub mw_push_revision {
|
||||
# No remote mediawiki revision. Export the whole
|
||||
# history (linearized with --first-parent)
|
||||
print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
|
||||
my $history = run_git("rev-list --first-parent --children ${local}");
|
||||
my $history = run_git_quoted(["rev-list", "--first-parent", "--children", $local]);
|
||||
my @history = split(/\n/, $history);
|
||||
@history = @history[1..$#history];
|
||||
foreach my $line (reverse @history) {
|
||||
@ -1233,12 +1246,12 @@ sub mw_push_revision {
|
||||
foreach my $commit_info_split (@commit_pairs) {
|
||||
my $sha1_child = @{$commit_info_split}[0];
|
||||
my $sha1_commit = @{$commit_info_split}[1];
|
||||
my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
|
||||
my $diff_infos = run_git_quoted(["diff-tree", "-r", "--raw", "-z", $sha1_child, $sha1_commit]);
|
||||
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
|
||||
# TODO: for now, it's just a delete+add
|
||||
my @diff_info_list = split(/\0/, $diff_infos);
|
||||
# Keep the subject line of the commit message as mediawiki comment for the revision
|
||||
my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
|
||||
my $commit_msg = run_git_quoted(["log", "--no-walk", '--format="%s"', $sha1_commit]);
|
||||
chomp($commit_msg);
|
||||
# Push every blob
|
||||
while (@diff_info_list) {
|
||||
@ -1263,7 +1276,10 @@ sub mw_push_revision {
|
||||
}
|
||||
}
|
||||
if (!$dumb_push) {
|
||||
run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
|
||||
run_git_quoted(["notes", "--ref=${remotename}/mediawiki",
|
||||
"add", "-f", "-m",
|
||||
"mediawiki_revision: ${mw_revision}",
|
||||
$sha1_commit]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1304,7 +1320,7 @@ sub get_mw_namespace_id {
|
||||
# already cached. Namespaces are stored in form:
|
||||
# "Name_of_namespace:Id_namespace", ex.: "File:6".
|
||||
my @temp = split(/\n/,
|
||||
run_git("config --get-all remote.${remotename}.namespaceCache"));
|
||||
run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaceCache"]));
|
||||
chomp(@temp);
|
||||
foreach my $ns (@temp) {
|
||||
my ($n, $id) = split(/:/, $ns);
|
||||
@ -1358,7 +1374,7 @@ sub get_mw_namespace_id {
|
||||
|
||||
# Store explicitly requested namespaces on disk
|
||||
if (!exists $cached_mw_namespace_id{$name}) {
|
||||
run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
|
||||
run_git_quoted(["config", "--add", "remote.${remotename}.namespaceCache", "${name}:${store_id}"]);
|
||||
$cached_mw_namespace_id{$name} = 1;
|
||||
}
|
||||
return $id;
|
||||
|
@ -4,4 +4,4 @@ objects from mediawiki just as one would do with a classic git
|
||||
repository thanks to remote-helpers.
|
||||
|
||||
For more information, visit the wiki at
|
||||
https://github.com/moy/Git-Mediawiki/wiki
|
||||
https://github.com/Git-Mediawiki/Git-Mediawiki
|
||||
|
2
contrib/mw-to-git/t/.gitignore
vendored
2
contrib/mw-to-git/t/.gitignore
vendored
@ -1,4 +1,4 @@
|
||||
WEB/
|
||||
wiki/
|
||||
mediawiki/
|
||||
trash directory.t*/
|
||||
test-results/
|
||||
|
@ -14,11 +14,11 @@ install the following packages (Debian/Ubuntu names, may need to be
|
||||
adapted for another distribution):
|
||||
|
||||
* lighttpd
|
||||
* php5
|
||||
* php5-cgi
|
||||
* php5-cli
|
||||
* php5-curl
|
||||
* php5-sqlite
|
||||
* php
|
||||
* php-cgi
|
||||
* php-cli
|
||||
* php-curl
|
||||
* php-sqlite
|
||||
|
||||
Principles and Technical Choices
|
||||
--------------------------------
|
||||
|
1
contrib/mw-to-git/t/install-wiki/.gitignore
vendored
1
contrib/mw-to-git/t/install-wiki/.gitignore
vendored
@ -1 +0,0 @@
|
||||
wikidb.sqlite
|
@ -1,129 +0,0 @@
|
||||
<?php
|
||||
# This file was automatically generated by the MediaWiki 1.19.0
|
||||
# installer. If you make manual changes, please keep track in case you
|
||||
# need to recreate them later.
|
||||
#
|
||||
# See includes/DefaultSettings.php for all configurable settings
|
||||
# and their default values, but don't forget to make changes in _this_
|
||||
# file, not there.
|
||||
#
|
||||
# Further documentation for configuration settings may be found at:
|
||||
# http://www.mediawiki.org/wiki/Manual:Configuration_settings
|
||||
|
||||
# Protect against web entry
|
||||
if ( !defined( 'MEDIAWIKI' ) ) {
|
||||
exit;
|
||||
}
|
||||
|
||||
## Uncomment this to disable output compression
|
||||
# $wgDisableOutputCompression = true;
|
||||
|
||||
$wgSitename = "Git-MediaWiki-Test";
|
||||
$wgMetaNamespace = "Git-MediaWiki-Test";
|
||||
|
||||
## The URL base path to the directory containing the wiki;
|
||||
## defaults for all runtime URL paths are based off of this.
|
||||
## For more information on customizing the URLs please see:
|
||||
## http://www.mediawiki.org/wiki/Manual:Short_URL
|
||||
$wgScriptPath = "@WG_SCRIPT_PATH@";
|
||||
$wgScriptExtension = ".php";
|
||||
|
||||
## The protocol and server name to use in fully-qualified URLs
|
||||
$wgServer = "@WG_SERVER@";
|
||||
|
||||
## The relative URL path to the skins directory
|
||||
$wgStylePath = "$wgScriptPath/skins";
|
||||
|
||||
## The relative URL path to the logo. Make sure you change this from the default,
|
||||
## or else you'll overwrite your logo when you upgrade!
|
||||
$wgLogo = "$wgStylePath/common/images/wiki.png";
|
||||
|
||||
## UPO means: this is also a user preference option
|
||||
|
||||
$wgEnableEmail = true;
|
||||
$wgEnableUserEmail = true; # UPO
|
||||
|
||||
$wgEmergencyContact = "apache@localhost";
|
||||
$wgPasswordSender = "apache@localhost";
|
||||
|
||||
$wgEnotifUserTalk = false; # UPO
|
||||
$wgEnotifWatchlist = false; # UPO
|
||||
$wgEmailAuthentication = true;
|
||||
|
||||
## Database settings
|
||||
$wgDBtype = "sqlite";
|
||||
$wgDBserver = "";
|
||||
$wgDBname = "@WG_SQLITE_DATAFILE@";
|
||||
$wgDBuser = "";
|
||||
$wgDBpassword = "";
|
||||
|
||||
# SQLite-specific settings
|
||||
$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
|
||||
|
||||
|
||||
## Shared memory settings
|
||||
$wgMainCacheType = CACHE_NONE;
|
||||
$wgMemCachedServers = array();
|
||||
|
||||
## To enable image uploads, make sure the 'images' directory
|
||||
## is writable, then set this to true:
|
||||
$wgEnableUploads = true;
|
||||
$wgUseImageMagick = true;
|
||||
$wgImageMagickConvertCommand ="@CONVERT@";
|
||||
$wgFileExtensions[] = 'txt';
|
||||
|
||||
# InstantCommons allows wiki to use images from http://commons.wikimedia.org
|
||||
$wgUseInstantCommons = false;
|
||||
|
||||
## If you use ImageMagick (or any other shell command) on a
|
||||
## Linux server, this will need to be set to the name of an
|
||||
## available UTF-8 locale
|
||||
$wgShellLocale = "en_US.utf8";
|
||||
|
||||
## If you want to use image uploads under safe mode,
|
||||
## create the directories images/archive, images/thumb and
|
||||
## images/temp, and make them all writable. Then uncomment
|
||||
## this, if it's not already uncommented:
|
||||
#$wgHashedUploadDirectory = false;
|
||||
|
||||
## Set $wgCacheDirectory to a writable directory on the web server
|
||||
## to make your wiki go slightly faster. The directory should not
|
||||
## be publicly accessible from the web.
|
||||
#$wgCacheDirectory = "$IP/cache";
|
||||
|
||||
# Site language code, should be one of the list in ./languages/Names.php
|
||||
$wgLanguageCode = "en";
|
||||
|
||||
$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
|
||||
#$wgSecretKey = "@SECRETKEY@";
|
||||
|
||||
|
||||
# Site upgrade key. Must be set to a string (default provided) to turn on the
|
||||
# web installer while LocalSettings.php is in place
|
||||
$wgUpgradeKey = "ddae7dc87cd0a645";
|
||||
|
||||
## Default skin: you can change the default skin. Use the internal symbolic
|
||||
## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
|
||||
$wgDefaultSkin = "vector";
|
||||
|
||||
## For attaching licensing metadata to pages, and displaying an
|
||||
## appropriate copyright notice / icon. GNU Free Documentation
|
||||
## License and Creative Commons licenses are supported so far.
|
||||
$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
|
||||
$wgRightsUrl = "";
|
||||
$wgRightsText = "";
|
||||
$wgRightsIcon = "";
|
||||
|
||||
# Path to the GNU diff3 utility. Used for conflict resolution.
|
||||
$wgDiff3 = "/usr/bin/diff3";
|
||||
|
||||
# Query string length limit for ResourceLoader. You should only set this if
|
||||
# your web server has a query string length limit (then set it to that limit),
|
||||
# or if you have suhosin.get.max_value_length set in php.ini (then set it to
|
||||
# that value)
|
||||
$wgResourceLoaderMaxQueryLength = -1;
|
||||
|
||||
|
||||
|
||||
# End of automatically generated settings.
|
||||
# Add more configuration options below.
|
@ -1,120 +0,0 @@
|
||||
<?php
|
||||
/**
|
||||
* This script generates a SQLite database for a MediaWiki version 1.19.0
|
||||
* You must specify the login of the admin (argument 1) and its
|
||||
* password (argument 2) and the folder where the database file
|
||||
* is located (absolute path in argument 3).
|
||||
* It is used by the script install-wiki.sh in order to make easy the
|
||||
* installation of a MediaWiki.
|
||||
*
|
||||
* In order to generate a SQLite database file, MediaWiki ask the user
|
||||
* to submit some forms in its web browser. This script simulates this
|
||||
* behavior though the functions <get> and <submit>
|
||||
*
|
||||
*/
|
||||
$argc = $_SERVER['argc'];
|
||||
$argv = $_SERVER['argv'];
|
||||
|
||||
$login = $argv[2];
|
||||
$pass = $argv[3];
|
||||
$tmp = $argv[4];
|
||||
$port = $argv[5];
|
||||
|
||||
$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
|
||||
$db_dir = urlencode($tmp);
|
||||
$tmp_cookie = tempnam($tmp, "COOKIE_");
|
||||
/*
|
||||
* Fetches a page with cURL.
|
||||
*/
|
||||
function get($page_name = "") {
|
||||
$curl = curl_init();
|
||||
$page_name_add = "";
|
||||
if ($page_name != "") {
|
||||
$page_name_add = '?page='.$page_name;
|
||||
}
|
||||
$url = $GLOBALS['url'].$page_name_add;
|
||||
$tmp_cookie = $GLOBALS['tmp_cookie'];
|
||||
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_HEADER, true);
|
||||
curl_setopt($curl, CURLOPT_URL, $url);
|
||||
|
||||
$page = curl_exec($curl);
|
||||
if (!$page) {
|
||||
die("Could not get page: $url\n");
|
||||
}
|
||||
curl_close($curl);
|
||||
return $page;
|
||||
}
|
||||
|
||||
/*
|
||||
* Submits a form with cURL.
|
||||
*/
|
||||
function submit($page_name, $option = "") {
|
||||
$curl = curl_init();
|
||||
$datapost = 'submit-continue=Continue+%E2%86%92';
|
||||
if ($option != "") {
|
||||
$datapost = $option.'&'.$datapost;
|
||||
}
|
||||
$url = $GLOBALS['url'].'?page='.$page_name;
|
||||
$tmp_cookie = $GLOBALS['tmp_cookie'];
|
||||
curl_setopt($curl, CURLOPT_URL, $url);
|
||||
curl_setopt($curl, CURLOPT_POST, true);
|
||||
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
|
||||
curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
|
||||
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
|
||||
|
||||
$page = curl_exec($curl);
|
||||
if (!$page) {
|
||||
die("Could not get page: $url\n");
|
||||
}
|
||||
curl_close($curl);
|
||||
return "$page";
|
||||
}
|
||||
|
||||
/*
|
||||
* Here starts this script: simulates the behavior of the user
|
||||
* submitting forms to generates the database file.
|
||||
* Note this simulation was made for the MediaWiki version 1.19.0,
|
||||
* we can't assume it works with other versions.
|
||||
*
|
||||
*/
|
||||
|
||||
$page = get();
|
||||
if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
|
||||
$page, $matches)) {
|
||||
echo "Unexpected content for page downloaded:\n";
|
||||
echo "$page";
|
||||
die;
|
||||
};
|
||||
$timestamp = $matches[1];
|
||||
$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
|
||||
$page = submit('Language', $language);
|
||||
|
||||
submit('Welcome');
|
||||
|
||||
$db_config = 'DBType=sqlite';
|
||||
$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
|
||||
$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
|
||||
submit('DBConnect', $db_config);
|
||||
|
||||
$wiki_config = 'config_wgSitename=TEST';
|
||||
$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
|
||||
$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
|
||||
$wiki_config = $wiki_config.'&config__AdminName='.$login;
|
||||
|
||||
$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
|
||||
$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
|
||||
|
||||
$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
|
||||
$wiki_config = $wiki_config.'&config__SkipOptional=skip';
|
||||
submit('Name', $wiki_config);
|
||||
submit('Install');
|
||||
submit('Install');
|
||||
|
||||
unlink($tmp_cookie);
|
||||
?>
|
@ -28,7 +28,7 @@ test_expect_success 'Git clone creates the expected git log with one file' '
|
||||
git log --format=%s HEAD^..HEAD >log.tmp
|
||||
) &&
|
||||
echo "this must be the same" >msg.tmp &&
|
||||
diff -b mw_dir_1/log.tmp msg.tmp
|
||||
test_cmp msg.tmp mw_dir_1/log.tmp
|
||||
'
|
||||
|
||||
|
||||
@ -50,8 +50,8 @@ test_expect_success 'Git clone creates the expected git log with multiple files'
|
||||
echo "this must be the same" >>msgDaddy.tmp &&
|
||||
echo "identical too" >msgDj.tmp &&
|
||||
echo "identical" >>msgDj.tmp &&
|
||||
diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
|
||||
diff -b mw_dir_2/logDj.tmp msgDj.tmp
|
||||
test_cmp msgDaddy.tmp mw_dir_2/logDaddy.tmp &&
|
||||
test_cmp msgDj.tmp mw_dir_2/logDj.tmp
|
||||
'
|
||||
|
||||
|
||||
@ -135,7 +135,7 @@ test_expect_success 'Git clone works with one specific page cloned ' '
|
||||
cd mw_dir_8 &&
|
||||
echo "this log must stay" >msg.tmp &&
|
||||
git log --format=%s >log.tmp &&
|
||||
diff -b msg.tmp log.tmp
|
||||
test_cmp msg.tmp log.tmp
|
||||
) &&
|
||||
wiki_check_content mw_dir_8/Namnam.mw Namnam
|
||||
'
|
||||
|
@ -27,12 +27,12 @@ test_git_reimport () {
|
||||
|
||||
# Don't bother with permissions, be administrator by default
|
||||
test_expect_success 'setup config' '
|
||||
git config --global remote.origin.mwLogin WikiAdmin &&
|
||||
git config --global remote.origin.mwPassword AdminPass &&
|
||||
git config --global remote.origin.mwLogin "$WIKI_ADMIN" &&
|
||||
git config --global remote.origin.mwPassword "$WIKI_PASSW" &&
|
||||
test_might_fail git config --global --unset remote.origin.mediaImport
|
||||
'
|
||||
|
||||
test_expect_success 'git push can upload media (File:) files' '
|
||||
test_expect_failure 'git push can upload media (File:) files' '
|
||||
wiki_reset &&
|
||||
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
|
||||
(
|
||||
@ -48,13 +48,14 @@ test_expect_success 'git push can upload media (File:) files' '
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'git clone works on previously created wiki with media files' '
|
||||
test_expect_failure 'git clone works on previously created wiki with media files' '
|
||||
test_when_finished "rm -rf mw_dir mw_dir_clone" &&
|
||||
git clone -c remote.origin.mediaimport=true \
|
||||
mediawiki::'"$WIKI_URL"' mw_dir_clone &&
|
||||
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
|
||||
(cd mw_dir_clone && git checkout HEAD^) &&
|
||||
(cd mw_dir && git checkout HEAD^) &&
|
||||
test_path_is_file mw_dir_clone/Foo.txt &&
|
||||
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
|
||||
'
|
||||
|
||||
|
@ -13,7 +13,8 @@
|
||||
|
||||
. ./test.config
|
||||
|
||||
WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
|
||||
WIKI_BASE_URL=http://$SERVER_ADDR:$PORT
|
||||
WIKI_URL=$WIKI_BASE_URL/$WIKI_DIR_NAME
|
||||
CURR_DIR=$(pwd)
|
||||
TEST_OUTPUT_DIRECTORY=$(pwd)
|
||||
TEST_DIRECTORY="$CURR_DIR"/../../../t
|
||||
@ -65,7 +66,7 @@ test_check_precond () {
|
||||
GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
|
||||
PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
|
||||
|
||||
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
|
||||
if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
then
|
||||
skip_all='skipping gateway git-mw tests, no mediawiki found'
|
||||
test_done
|
||||
@ -291,27 +292,59 @@ stop_lighttpd () {
|
||||
test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
|
||||
}
|
||||
|
||||
# Create the SQLite database of the MediaWiki. If the database file already
|
||||
# exists, it will be deleted.
|
||||
# This script should be runned from the directory where $FILES_FOLDER is
|
||||
# located.
|
||||
create_db () {
|
||||
rm -f "$TMP/$DB_FILE"
|
||||
wiki_delete_db () {
|
||||
rm -rf \
|
||||
"$FILES_FOLDER_DB"/* || error "Couldn't delete $FILES_FOLDER_DB/"
|
||||
}
|
||||
|
||||
echo "Generating the SQLite database file. It can take some time ..."
|
||||
# Run the php script to generate the SQLite database file
|
||||
# with cURL calls.
|
||||
php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
|
||||
"$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
|
||||
wiki_delete_db_backup () {
|
||||
rm -rf \
|
||||
"$FILES_FOLDER_POST_INSTALL_DB"/* || error "Couldn't delete $FILES_FOLDER_POST_INSTALL_DB/"
|
||||
}
|
||||
|
||||
if [ ! -f "$TMP/$DB_FILE" ] ; then
|
||||
error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
|
||||
# Install MediaWiki using its install.php script. If the database file
|
||||
# already exists, it will be deleted.
|
||||
install_mediawiki () {
|
||||
|
||||
localsettings="$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php"
|
||||
if test -f "$localsettings"
|
||||
then
|
||||
error "We already installed the wiki, since $localsettings exists" \
|
||||
"perhaps you wanted to run 'delete' first?"
|
||||
fi
|
||||
|
||||
# Copy the generated database file into the directory the
|
||||
# user indicated.
|
||||
cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
|
||||
error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
|
||||
wiki_delete_db
|
||||
wiki_delete_db_backup
|
||||
mkdir \
|
||||
"$FILES_FOLDER_DB/" \
|
||||
"$FILES_FOLDER_POST_INSTALL_DB/"
|
||||
|
||||
install_script="$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php"
|
||||
echo "Installing MediaWiki using $install_script. This may take some time ..."
|
||||
|
||||
php "$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php" \
|
||||
--server $WIKI_BASE_URL \
|
||||
--scriptpath /wiki \
|
||||
--lang en \
|
||||
--dbtype sqlite \
|
||||
--dbpath $PWD/$FILES_FOLDER_DB/ \
|
||||
--pass "$WIKI_PASSW" \
|
||||
Git-MediaWiki-Test \
|
||||
"$WIKI_ADMIN" ||
|
||||
error "Couldn't run $install_script, see errors above. Try to run ./install-wiki.sh delete first."
|
||||
cat <<-'EOF' >>$localsettings
|
||||
# Custom settings added by test-gitmw-lib.sh
|
||||
#
|
||||
# Uploading text files is needed for
|
||||
# t9363-mw-to-git-export-import.sh
|
||||
$wgEnableUploads = true;
|
||||
$wgFileExtensions[] = 'txt';
|
||||
EOF
|
||||
|
||||
# Copy the initially generated database file into our backup
|
||||
# folder
|
||||
cp -R "$FILES_FOLDER_DB/"* "$FILES_FOLDER_POST_INSTALL_DB/" ||
|
||||
error "Unable to copy $FILES_FOLDER_DB/* to $FILES_FOLDER_POST_INSTALL_DB/*"
|
||||
}
|
||||
|
||||
# Install a wiki in your web server directory.
|
||||
@ -320,30 +353,33 @@ wiki_install () {
|
||||
start_lighttpd
|
||||
fi
|
||||
|
||||
SERVER_ADDR=$SERVER_ADDR:$PORT
|
||||
# In this part, we change directory to $TMP in order to download,
|
||||
# unpack and copy the files of MediaWiki
|
||||
(
|
||||
mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
|
||||
if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
then
|
||||
error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
|
||||
Please create it and launch the script again."
|
||||
fi
|
||||
|
||||
# Fetch MediaWiki's archive if not already present in the TMP directory
|
||||
# Fetch MediaWiki's archive if not already present in the
|
||||
# download directory
|
||||
mkdir -p "$FILES_FOLDER_DOWNLOAD"
|
||||
MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
|
||||
cd "$TMP"
|
||||
if [ ! -f $MW_FILENAME ] ; then
|
||||
cd "$FILES_FOLDER_DOWNLOAD"
|
||||
if ! test -f $MW_FILENAME
|
||||
then
|
||||
echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
|
||||
wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
|
||||
error "Unable to download "\
|
||||
"http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
|
||||
"$MW_FILENAME. "\
|
||||
"Please fix your connection and launch the script again."
|
||||
echo "$MW_FILENAME downloaded in $(pwd). "\
|
||||
"You can delete it later if you want."
|
||||
echo "$MW_FILENAME downloaded in $(pwd)/;" \
|
||||
"you can delete it later if you want."
|
||||
else
|
||||
echo "Reusing existing $MW_FILENAME downloaded in $(pwd)."
|
||||
echo "Reusing existing $MW_FILENAME downloaded in $(pwd)/"
|
||||
fi
|
||||
archive_abs_path=$(pwd)/$MW_FILENAME
|
||||
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
|
||||
@ -352,48 +388,12 @@ wiki_install () {
|
||||
error "Unable to extract WikiMedia's files from $archive_abs_path to "\
|
||||
"$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
) || exit 1
|
||||
echo Extracted in "$WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
|
||||
create_db
|
||||
|
||||
# Copy the generic LocalSettings.php in the web server's directory
|
||||
# And modify parameters according to the ones set at the top
|
||||
# of this script.
|
||||
# Note that LocalSettings.php is never modified.
|
||||
if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
|
||||
error "Can't find $FILES_FOLDER/LocalSettings.php " \
|
||||
"in the current folder. "\
|
||||
"Please run the script inside its folder."
|
||||
fi
|
||||
cp "$FILES_FOLDER/LocalSettings.php" \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" ||
|
||||
error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
|
||||
"to $FILES_FOLDER/LocalSettings-tmp.php"
|
||||
|
||||
# Parse and set the LocalSettings file of the user according to the
|
||||
# CONFIGURATION VARIABLES section at the beginning of this script
|
||||
file_swap="$FILES_FOLDER/LocalSettings-swap.php"
|
||||
sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SQLITE_DATADIR@,$TMP," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
|
||||
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
|
||||
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
|
||||
|
||||
mv "$FILES_FOLDER/LocalSettings-tmp.php" \
|
||||
"$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
|
||||
error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
|
||||
"in $WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
echo "File $FILES_FOLDER/LocalSettings.php is set in" \
|
||||
" $WIKI_DIR_INST/$WIKI_DIR_NAME"
|
||||
install_mediawiki
|
||||
|
||||
echo "Your wiki has been installed. You can check it at
|
||||
http://$SERVER_ADDR/$WIKI_DIR_NAME"
|
||||
$WIKI_URL"
|
||||
}
|
||||
|
||||
# Reset the database of the wiki and the password of the admin
|
||||
@ -401,12 +401,18 @@ wiki_install () {
|
||||
# Warning: This function must be called only in a subdirectory of t/ directory
|
||||
wiki_reset () {
|
||||
# Copy initial database of the wiki
|
||||
if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
|
||||
error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
|
||||
if ! test -d "../$FILES_FOLDER_DB"
|
||||
then
|
||||
error "No wiki database at ../$FILES_FOLDER_DB, not installed yet?"
|
||||
fi
|
||||
cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
|
||||
error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
|
||||
echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
|
||||
if ! test -d "../$FILES_FOLDER_POST_INSTALL_DB"
|
||||
then
|
||||
error "No wiki backup database at ../$FILES_FOLDER_POST_INSTALL_DB, failed installation?"
|
||||
fi
|
||||
wiki_delete_db
|
||||
cp -R "../$FILES_FOLDER_POST_INSTALL_DB/"* "../$FILES_FOLDER_DB/" ||
|
||||
error "Can't copy ../$FILES_FOLDER_POST_INSTALL_DB/* to ../$FILES_FOLDER_DB/*"
|
||||
echo "File $FILES_FOLDER_DB/* has been reset"
|
||||
}
|
||||
|
||||
# Delete the wiki created in the web server's directory and all its content
|
||||
@ -420,13 +426,7 @@ wiki_delete () {
|
||||
rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
|
||||
error "Wiki's directory $WIKI_DIR_INST/" \
|
||||
"$WIKI_DIR_NAME could not be deleted"
|
||||
# Delete the wiki's SQLite database.
|
||||
rm -f "$TMP/$DB_FILE" ||
|
||||
error "Database $TMP/$DB_FILE could not be deleted."
|
||||
fi
|
||||
|
||||
# Delete the wiki's SQLite database
|
||||
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
|
||||
rm -f "$FILES_FOLDER/$DB_FILE"
|
||||
rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
|
||||
wiki_delete_db
|
||||
wiki_delete_db_backup
|
||||
}
|
||||
|
@ -24,9 +24,7 @@
|
||||
|
||||
use MediaWiki::API;
|
||||
use Getopt::Long;
|
||||
use encoding 'utf8';
|
||||
use DateTime::Format::ISO8601;
|
||||
use open ':encoding(utf8)';
|
||||
use constant SLASH_REPLACEMENT => "%2F";
|
||||
|
||||
#Parsing of the config file
|
||||
@ -87,7 +85,7 @@ sub wiki_getpage {
|
||||
# Replace spaces by underscore in the page name
|
||||
$pagename =~ s/ /_/g;
|
||||
$pagename =~ s/\//%2F/g;
|
||||
open(my $file, ">$destdir/$pagename.mw");
|
||||
open(my $file, ">:encoding(UTF-8)", "$destdir/$pagename.mw");
|
||||
print $file "$content";
|
||||
close ($file);
|
||||
|
||||
@ -172,7 +170,7 @@ sub wiki_getallpagename {
|
||||
cmlimit => 500 },
|
||||
)
|
||||
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
|
||||
open(my $file, ">all.txt");
|
||||
open(my $file, ">:encoding(UTF-8)", "all.txt");
|
||||
foreach my $page (@{$mw_pages}) {
|
||||
print $file "$page->{title}\n";
|
||||
}
|
||||
@ -185,7 +183,7 @@ sub wiki_getallpagename {
|
||||
aplimit => 500,
|
||||
})
|
||||
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
|
||||
open(my $file, ">all.txt");
|
||||
open(my $file, ">:encoding(UTF-8)", "all.txt");
|
||||
foreach my $page (@{$mw_pages}) {
|
||||
print $file "$page->{title}\n";
|
||||
}
|
||||
@ -214,12 +212,12 @@ my $fct_to_call = shift;
|
||||
|
||||
wiki_login($wiki_admin, $wiki_admin_pass);
|
||||
|
||||
my %functions_to_call = qw(
|
||||
upload_file wiki_upload_file
|
||||
get_page wiki_getpage
|
||||
delete_page wiki_delete_page
|
||||
edit_page wiki_editpage
|
||||
getallpagename wiki_getallpagename
|
||||
my %functions_to_call = (
|
||||
upload_file => \&wiki_upload_file,
|
||||
get_page => \&wiki_getpage,
|
||||
delete_page => \&wiki_delete_page,
|
||||
edit_page => \&wiki_editpage,
|
||||
getallpagename => \&wiki_getallpagename,
|
||||
);
|
||||
die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
|
||||
&{$functions_to_call{$fct_to_call}}(@ARGV);
|
||||
$functions_to_call{$fct_to_call}->(map { utf8::decode($_); $_ } @ARGV);
|
||||
|
@ -3,15 +3,11 @@ WIKI_DIR_NAME=wiki
|
||||
|
||||
# Login and password of the wiki's admin
|
||||
WIKI_ADMIN=WikiAdmin
|
||||
WIKI_PASSW=AdminPass
|
||||
WIKI_PASSW=AdminPass1
|
||||
|
||||
# Address of the web server
|
||||
SERVER_ADDR=localhost
|
||||
|
||||
# SQLite database of the wiki, named DB_FILE, is located in TMP
|
||||
TMP=/tmp
|
||||
DB_FILE=wikidb.sqlite
|
||||
|
||||
# If LIGHTTPD is not set to true, the script will use the default
|
||||
# web server running in WIKI_DIR_INST.
|
||||
WIKI_DIR_INST=/var/www
|
||||
@ -28,10 +24,17 @@ WEB=WEB
|
||||
WEB_TMP=$WEB/tmp
|
||||
WEB_WWW=$WEB/www
|
||||
|
||||
# Where our configuration for the wiki is located
|
||||
FILES_FOLDER=mediawiki
|
||||
FILES_FOLDER_DOWNLOAD=$FILES_FOLDER/download
|
||||
FILES_FOLDER_DB=$FILES_FOLDER/db
|
||||
FILES_FOLDER_POST_INSTALL_DB=$FILES_FOLDER/post-install-db
|
||||
|
||||
# The variables below are used by the script to install a wiki.
|
||||
# You should not modify these unless you are modifying the script itself.
|
||||
# tested versions: 1.19.X -> 1.21.1
|
||||
MW_VERSION_MAJOR=1.21
|
||||
MW_VERSION_MINOR=1
|
||||
FILES_FOLDER=install-wiki
|
||||
DB_INSTALL_SCRIPT=db_install.php
|
||||
# tested versions: 1.19.X -> 1.21.1 -> 1.34.2
|
||||
#
|
||||
# See https://www.mediawiki.org/wiki/Download for what the latest
|
||||
# version is.
|
||||
MW_VERSION_MAJOR=1.34
|
||||
MW_VERSION_MINOR=2
|
||||
|
Loading…
Reference in New Issue
Block a user