So that we can easily include refernce docs into mediawiki.
GFDL.adoc \
attributes.txt
-ADOC_STDARG= -a icons -a data-uri -a "date=$(shell date)"
-ADOC_MAN1_HTML_ARGS=-a "manvolnum=1" ${ADOC_STDARG} -a "revnumber=${DOCRELEASE}"
-ADOC_MAN5_HTML_ARGS=-a "manvolnum=5" ${ADOC_STDARG} -a "revnumber=${DOCRELEASE}"
-ADOC_MAN8_HTML_ARGS=-a "manvolnum=8" ${ADOC_STDARG} -a "revnumber=${DOCRELEASE}"
+ADOC_STDARG= -a icons -a data-uri -a "date=$(shell date)" -a "revnumber=${DOCRELEASE}"
+ADOC_MAN1_HTML_ARGS=-a "manvolnum=1" ${ADOC_STDARG}
+ADOC_MAN5_HTML_ARGS=-a "manvolnum=5" ${ADOC_STDARG}
+ADOC_MAN8_HTML_ARGS=-a "manvolnum=8" ${ADOC_STDARG}
BROWSER?=xdg-open
asciidoc ${ADOC_STDARG} -a toc -o $@ $*.adoc
test -n "$${NOVIEW}" || $(BROWSER) $@ &
+chapter-%-plain.html: %.adoc ${PVE_COMMON_DOC_SOURCES}
+ asciidoc -s ${ADOC_STDARG} -a toc -o chapter-$*-plain.html $*.adoc
+ test -n "$${NOVIEW}" || $(BROWSER) $@ &
+
%.1.html: %.adoc %.1-synopsis.adoc ${PVE_COMMON_DOC_SOURCES}
asciidoc ${ADOC_MAN1_HTML_ARGS} -o $@ $*.adoc
test -n "$${NOVIEW}" || $(BROWSER) $@ &
$(MAKE) NOVIEW=1 pve-admin-guide.pdf pve-admin-guide.html pve-admin-guide.epub
$(MAKE) NOVIEW=1 $(addsuffix .1.html, ${COMMAND_LIST}) $(addsuffix .8.html, ${SERVICE_LIST}) $(addsuffix .5.html, ${CONFIG_LIST})
$(MAKE) NOVIEW=1 $(addsuffix .html, $(addprefix chapter-, ${CHAPTER_LIST}))
+ $(MAKE) NOVIEW=1 $(addsuffix -plain.html, $(addprefix chapter-, ${CHAPTER_LIST}))
asciidoc -a "date=$(shell date)" -a "revnumber=${DOCRELEASE}" index.adoc
pve-admin-guide.html: ${PVE_ADMIN_GUIDE_SOURCES}
make ${GEN_DEB};
make ${DOC_DEB};
-DOC_DEB_FILES= \
- $(addsuffix .html, $(addprefix chapter-, ${CHAPTER_LIST})) \
- $(addsuffix .1.html, ${COMMAND_LIST}) \
- $(addsuffix .8.html, ${SERVICE_LIST}) \
- $(addsuffix .5.html, ${CONFIG_LIST}) \
- pve-admin-guide.pdf \
- pve-admin-guide.html \
- pve-admin-guide.epub \
+DOC_DEB_FILES= \
+ $(addsuffix .html, $(addprefix chapter-, ${CHAPTER_LIST})) \
+ $(addsuffix -plain.html, $(addprefix chapter-, ${CHAPTER_LIST})) \
+ $(addsuffix .1.html, ${COMMAND_LIST}) \
+ $(addsuffix .8.html, ${SERVICE_LIST}) \
+ $(addsuffix .5.html, ${CONFIG_LIST}) \
+ pve-admin-guide.pdf \
+ pve-admin-guide.html \
+ pve-admin-guide.epub \
index.html
${DOC_DEB}: index.html
make all
clean:
- rm -rf *.html *.pdf *.epub *.tmp *.1 *.5 *.8 *.deb *.changes build api-viewer/apidoc.js chapter-*.html pve-admin-guide.chunked
+ rm -rf *.html *.pdf *.epub *.tmp *.1 *.5 *.8 *.deb *.changes build api-viewer/apidoc.js chapter-*.html chapter-*-plain.html chapter-*.html pve-admin-guide.chunked
find . -name '*~' -exec rm {} ';'
/usr/share/
+debian/pvedocs-include.php /etc/mediawiki-extensions/extensions-available/
+debian/pve-docs-mediawiki-import /usr/bin
\ No newline at end of file
--- /dev/null
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+use Data::Dumper;
+
+use IO::File;
+use File::Basename;
+use MediaWiki::API;
+
+my $config_fn = "/root/.pve-docs"; # format 'username:pw'
+
+my $fh = IO::File->new("$config_fn") ||
+ die "Please configure the mediawiki user/passswd in '$config_fn'\n";
+
+my $api_url = "http://localhost/api.php";
+
+my $config = <$fh>;
+chomp $config;
+
+my ($username, $passwd) = split(':', $config, 2);
+
+my $mw = MediaWiki::API->new();
+$mw->{config}->{api_url} = $api_url;
+
+# log in to the wiki
+$mw->login({ lgname => $username, lgpassword => $passwd })
+ || die $mw->{error}->{code} . ': ' . $mw->{error}->{details};
+
+sub update_page {
+ my ($pagename, $include, $category) = @_;
+
+ print "update mediawiki page: $pagename\n";
+
+ my $ref = $mw->get_page( { title => $pagename } );
+ my $page = $ref->{'*'} || '';
+
+ if ($page !~ m/^\{\{#pvedocs:.*\}\}\s*$/m) {
+ $page = "{{#pvedocs:$include}}\n$page";
+ } else {
+ $page =~ s/^\{\{#pvedocs:.*\}\}\s*$/\{\{#pvedocs:$include\}\}\n/m;
+ }
+
+ if ($category) {
+ my $catstr = "Category:$category";
+
+ if ($page !~ m/^\[\[$catstr\]\]\s*$/m) {
+ $page .= "\n[[$catstr]]\n";
+ }
+ }
+
+ my $timestamp = $ref->{timestamp};
+ my $wcmd = {
+ action => 'edit',
+ title => $pagename,
+ basetimestamp => $timestamp, # to avoid edit conflicts
+ text => $page,
+ };
+
+ $mw->edit($wcmd) ||
+ die $mw->{error}->{code} . ': ' . $mw->{error}->{details};
+}
+
+my $cat_refdoc = "Reference Documentation";
+
+my $docs = {
+ 'chapter-ha-manager-plain.html' => {
+ title => "High Availability",
+ category => $cat_refdoc,
+ },
+ 'chapter-sysadmin-plain.html' => {
+ title => "Host System Administration",
+ category => $cat_refdoc,
+ },
+ 'chapter-pct-plain.html' => {
+ title => "Linux Container",
+ category => $cat_refdoc,
+ },
+ 'chapter-pmxcfs-plain.html' => {
+ title => "Proxmox Cluster File System (pmxcfs)",
+ category => $cat_refdoc,
+ },
+ 'chapter-pve-bibliography-plain.html' => {
+ title => "Bibliography",
+ category => $cat_refdoc,
+ },
+ 'chapter-pvecm-plain.html' => {
+ title => "Cluster Manager",
+ category => $cat_refdoc,
+ },
+ 'chapter-pve-faq-plain.html' => {
+ title => "FAQ",
+ category => $cat_refdoc,
+ },
+ 'chapter-pve-firewall-plain.html' => {
+ title => "Firewall",
+ category => $cat_refdoc,
+ },
+ 'chapter-pvesm-plain.html' => {
+ title => "Storage",
+ category => $cat_refdoc,
+ },
+ 'chapter-pveum-plain.html' => {
+ title => "User Management",
+ category => $cat_refdoc,
+ },
+ 'chapter-qm-plain.html' => {
+ title => "Qemu/KVM Virtual Machines",
+ category => $cat_refdoc,
+ },
+ 'chapter-vzdump-plain.html' => {
+ title => "Backup and Restore",
+ category => $cat_refdoc,
+ }
+};
+
+#update_page("testpage1", $filename, $d->{category});
+
+foreach my $filename (keys %$docs) {
+ my $path = "/usr/share/pve-docs/$filename";
+ die "no such file '$path'" if ! -f $path;
+ my $d = $docs->{$filename};
+ update_page($d->{title}, $filename, $d->{category});
+}
--- /dev/null
+<?php
+
+# see http://www.mediawiki.org/wiki/Manual:Parser_functions
+
+$wgExtensionCredits['parserhook'][] = array(
+ 'name' => "PVE Documenation Pages",
+ 'description' => "Display PVE Documentation Pages",
+ 'author' => "Dietmar Maurer",
+);
+
+# Define a setup function
+$wgHooks['ParserFirstCallInit'][] = 'efPvedocsParserFunction_Setup';
+
+# Add a hook to initialise the magic word
+$wgHooks['LanguageGetMagic'][] = 'efPvedocsParserFunction_Magic';
+
+function efPvedocsParserFunction_Setup(&$parser) {
+ # Set a function hook associating the "pvedocs" magic
+ # word with our function
+ $parser->setFunctionHook( 'pvedocs', 'efPvedocsParserFunction_Render' );
+ return true;
+}
+
+function efPvedocsParserFunction_Magic(&$magicWords, $langCode) {
+ # Add the magic word
+ # The first array element is whether to be case sensitive,
+ # in this case (0) it is not case sensitive, 1 would be sensitive
+ # All remaining elements are synonyms for our parser function
+ $magicWords['pvedocs'] = array( 0, 'pvedocs' );
+
+ # unless we return true, other parser functions extensions won't get loaded.
+ return true;
+}
+
+function encodeURI($uri) {
+ return preg_replace_callback("{[^0-9a-z_.!~*'();,/?:@&=+$#-]}i",
+ function ($m) { return sprintf('%%%02X', ord($m[0])); }, $uri);
+}
+
+function efPvedocsParserFunction_Render($parser, $param1 = '', $param2 = '') {
+
+ $parser->disableCache();
+
+ # only allow simply names, so that users can only include
+ # files from within "/usr/share/pve-docs/"
+ if (!preg_match("/[a-z0-9.-]+\.html/i", $param1)) {
+ die("no such manual page");
+ }
+
+ $content = file_get_contents("/usr/share/pve-docs/$param1");
+
+ # hack to inject html without modifications my mediawiki parser
+ $encHtml = encodeURI($content);
+ $output .= "<div id='pve_embed_data'></div>";
+ $output .= "<script>" .
+ "var data = decodeURI(\"".$encHtml."\");" .
+ "document.getElementById('pve_embed_data').innerHTML = data;" .
+ "</script>";
+
+ return array($output, 'noparse' => true, 'isHTML' => true);
+}
+
+?>