summaryrefslogtreecommitdiffstats
path: root/.ikiwiki/IkiWiki/Plugin/pandoc.pm
diff options
context:
space:
mode:
Diffstat (limited to '.ikiwiki/IkiWiki/Plugin/pandoc.pm')
-rwxr-xr-x.ikiwiki/IkiWiki/Plugin/pandoc.pm185
1 files changed, 182 insertions, 3 deletions
diff --git a/.ikiwiki/IkiWiki/Plugin/pandoc.pm b/.ikiwiki/IkiWiki/Plugin/pandoc.pm
index da13763..9f3e0c2 100755
--- a/.ikiwiki/IkiWiki/Plugin/pandoc.pm
+++ b/.ikiwiki/IkiWiki/Plugin/pandoc.pm
@@ -9,6 +9,60 @@ use FileHandle;
use IPC::Open2;
use File::Path qw/make_path/;
use JSON;
+use Data::Dumper;
+
+$Data::Dumper::Indent = 3;
+
+my $fh = undef;
+my $dh = undef;
+my $fnm = undef;
+my $f = undef;
+my $next = 0;
+my $base = '/tmp/pandoc';
+
+mkdir($base);
+
+# activate with 'generate_$format' in meta; turn on all with 'generate_all_formats'.
+my %extra_formats = (
+ pdf => { ext=>'pdf', label=>'PDF', format=>'latex', extra=>[], order=>1 },
+ docx => { ext=>'docx', label=>'DOCX', format=>'docx', extra=>[], order=>2 },
+ odt => { ext=>'odt', label=>'ODT', format=>'odt', extra=>[], order=>3 },
+ beamer => { ext=>'beamer.pdf', label=>'Beamer', format=>'beamer', extra=>[], order=>4 },
+ revealjs => { ext=>'revealjs.html', label=>'RevealJS', format=>'revealjs', extra=>['--self-contained'], order=>5 },
+ epub => { ext=>'epub', label=>'EPUB', format=>'epub3', extra=>[], order=>6 },
+ latex => { ext=>'tex', label=>'LaTeX', format=>'latex', extra=>['--standalone'], order=>7 },
+);
+
+sub debug_get_filename_prefix {
+
+ my $place = shift;
+ my $page = shift;
+ my $fnm_pfx = undef;
+
+ opendir($dh, $base) || die "Can't open $base: $!";
+ foreach $f (grep(/^[0-9][0-9]*/, readdir($dh))) {
+ $f =~ /^([0-9][0-9]*)\.(.*)$/;
+ $next = $1 + 1 if ($1 >= $next);
+ }
+ close($dh);
+ if ($page) {
+ $fnm_pfx = sprintf "%s/%.3d.%s.%s", $base, $next, $place,
+ $page =~ s/\//-/rg;
+ } else {
+ $fnm_pfx = sprintf "%s/%.3d.%s", $base, $next, $place;
+ }
+ return $fnm_pfx;
+}
+
+# DEBUG Print
+print "D: First file read.\n";
+$fnm = debug_get_filename_prefix ("verify");
+open($fh, '>', $fnm) or die "Could not";
+print $fh "Hej du glade på fil\n";
+print $fh "Detta är filen i '/home/gustav/git/fripost/wiki/.ikiwiki/IkiWiki/Plugin'\n";
+close $fh;
+# END
+
# activate with 'generate_$format' in meta; turn on all with 'generate_all_formats'.
my %extra_formats = (
@@ -22,6 +76,16 @@ my %extra_formats = (
);
sub import {
+
+ # DEBUG Print
+ print "D: Run import.\n";
+ my $fnm_pfx = debug_get_filename_prefix ("import","");
+ my $fnm = $fnm_pfx;
+ open($fh, '>', $fnm) or die "Could not open $fnm .";
+ print $fh "Import.\n";
+ close $fh;
+ # END
+
my $markdown_ext = $config{pandoc_markdown_ext} || "mdwn";
# May be both a string with a single value, a string containing commas or an arrayref
@@ -70,6 +134,11 @@ sub import {
sub getsetup () {
+
+ # DEBUG
+ print "D: Run getsetup.\n";
+ # END
+
return
plugin => {
safe => 1,
@@ -345,6 +414,7 @@ sub getsetup () {
sub htmlize ($@) {
+
my $format = shift;
my %params = @_;
my $page = $params{page};
@@ -353,6 +423,27 @@ sub htmlize ($@) {
local(*PANDOC_IN, *JSON_IN, *JSON_OUT, *PANDOC_OUT);
my @args = ();
+ # DEBUG Print params
+ print "D: Run htmlize for ", $page, "\n";
+ my $fnm_pfx = debug_get_filename_prefix ("htmlize", $page);
+ my $m = 0;
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "params";
+ open($fh, '>', $fnm) or die "Could not";
+ $Data::Dumper::Indent = 2;
+ print $fh Dumper(%params), "\n";
+ close $fh;
+ # END
+
+ # DEBUG Print half way pagestate pagestate
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "early-pagestate";
+ open($fh, '>>', $fnm);
+ my %pagestate_dumper = (
+ $page => $pagestate{$page},
+ );
+ print $fh Data::Dumper->Dump([\%pagestate_dumper], [qw("pagestate")]), "\n";
+ close $fh;
+ # END
+
# The default assumes pandoc is in PATH
my $command = $config{pandoc_command} || "pandoc";
@@ -452,6 +543,20 @@ sub htmlize ($@) {
warn "WARNING: Unexpected format for meta block. Incompatible version of Pandoc?\n";
}
+ # DEBUG Half way content which is plenty data
+ print "D: Pandoc command: ", " ", $command, " ", '-f', " ",
+ $markdown_fmt, " ", '-t', " ", 'json', " ", @args,
+ "\n";
+
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "early-content";
+ open($fh, '>>', $fnm);
+ print $fh Data::Dumper->Dump([$content], [qw(content)]), "\n";
+ print $fh Data::Dumper->Dump([$json_content], [qw(json_content)]), "\n";
+ print $fh Data::Dumper->Dump([$decoded_json], [qw(decoded_json)]), "\n";
+ print $fh Data::Dumper->Dump([$meta], [qw(meta)]), "\n";
+ close $fh;
+ # END
+
# Get some selected meta attributes, more specifically:
# (title date bibliography csl subtitle abstract summary description
# version lang locale references author [+ num_authors primary_author]),
@@ -460,11 +565,15 @@ sub htmlize ($@) {
my @format_keys = grep { $_ ne 'pdf' } keys %extra_formats;
my %scalar_meta = map { ($_=>undef) } qw(
title date bibliography csl subtitle abstract summary
- description version lang locale);
+ description version lang locale
+ titlesort tag tags
+ );
+
$scalar_meta{$_.'_template'} = undef for @format_keys;
my %bool_meta = map { ("generate_$_"=>0) } keys %extra_formats;
- my %list_meta = map { ($_=>[]) } qw/author references/;
+ my %list_meta = map { ($_=>[]) } qw/author references tags/;
$list_meta{$_.'_extra_options'} = [] for @format_keys;
+
my $have_bibl = 0;
foreach my $k (keys %scalar_meta) {
next unless $meta->{$k};
@@ -585,16 +694,51 @@ sub htmlize ($@) {
close JSON_IN;
my @html = <PANDOC_IN>;
+
close PANDOC_IN;
waitpid $to_html_pid, 0;
+ print "D: Pandoc command: ", " ", $command, " ", '-f', " ",
+ 'json', " ", '-t', $htmlformat, " ", @html_args, "\n";
+
$content = Encode::decode_utf8(join('', @html));
# Reinstate placeholders for inline plugin:
$content =~ s{::INLINE::PLACEHOLDER::(\d+)::}
{<div class="inline" id="$1"></div>}g;
+ # DEBUG Print meta
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "meta";
+ open($fh, '>>', $fnm) or die "Could not";
+ print $fh "\n";
+ print $fh Data::Dumper->Dump([\%scalar_meta], [qw(scalar_meta)]), "\n";
+ print $fh Data::Dumper->Dump([\%bool_meta], [qw(bool_meta)]), "\n";
+ print $fh Data::Dumper->Dump([\%list_meta], [qw(list_meta)]), "\n";
+ close $fh;
+ # END
+
+ # DEBUG Print pagestate again
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "return-pagestate";
+ open($fh, '>>', $fnm);
+ my %pagestate_dumper = (
+ $page => $pagestate{$page},
+ );
+ print $fh Data::Dumper->Dump([\%pagestate_dumper], [qw("pagestate")]), "\n";
+ close $fh;
+ # END
+
+ # DEBUG Print return content
+ $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "return-content";
+ open($fh, '>>', $fnm);
+ print $fh Dumper($content), "\n";
+ close $fh;
+ # END
+
+ # DEBUG
+ print "D: Return htmlize.\n";
+ # END
+
return $content;
}
@@ -602,16 +746,36 @@ sub htmlize ($@) {
sub pagetemplate (@) {
my %params = @_;
my $page = $params{page};
+
+ # DEBUG
+ print "D: Run pagetemplate for $page\n";
+ # END
+
my $template = $params{template};
foreach my $k (keys %{$pagestate{$page}{meta}}) {
next unless $k =~ /^pandoc_/;
$template->param($k => $pagestate{$page}{meta}{$k});
}
+
+ # DEBUG Print pagetemplate data
+ my $fnm_pfx = debug_get_filename_prefix ("pagetemplate", $page);
+ my $m = 0;
+ my $fnm = sprintf "%s.%.1d.%s", $fnm_pfx, $m++, "data";
+ open($fh, '>', $fnm) or die "Could not";
+ print $fh Data::Dumper->Dump([%params], [qw(params)]), "\n";
+ print $fh Data::Dumper->Dump([$template], [qw(teplate)]), "\n";
+ close $fh;
+ # END
+
+ return $template;
}
sub pageactions {
my %args = @_;
my $page = $args{page};
+
+ print "D: Run pageactions for ", $page, "\n";
+
my @links = ();
return unless $pagestate{$page}{pandoc_extra_formats};
my @exts = sort {
@@ -633,6 +797,11 @@ sub pageactions {
sub export_file {
my ($page, $ext, $json_content, $command, @args) = @_;
+
+ # DEBUG
+ print "D: Run export_file for $page.\n";
+ # END
+
my ($export_path, $export_url) = _export_file_path_and_url($page, $ext);
my $subdir = $1 if $export_path =~ /(.*)\//;
my @extra_args = @{ $extra_formats{$ext}->{extra} };
@@ -684,6 +853,11 @@ sub export_file {
@args, @extra_args) or die "Could not open pipe for $ext: $!";
print EXPORT $json_content;
close EXPORT or die "Could not close pipe for $ext: $!";
+
+ print "D: Pandoc command: ", " ", $command, " ", '-f' =>
+ 'json', " ", '-t' => $to_format, " ", '-o' =>
+ $tmp_export_path, " ", @args, " ", @extra_args;
+
if ($indirect_pdf && $tmp_export_path ne $export_path) {
my @latexmk_args = qw(-quiet -silent);
if (grep { /xelatex/ } @extra_args) {
@@ -723,6 +897,12 @@ sub remove_exported_file {
if ($@) {
warn "WARNING: remove_exported_file; page=$page, ext=$ext: $@\n";
}
+
+ # DEBUG
+ print "D: remove_exported_file: ",
+ "file removed for $page and $ext: $export_path.\n";
+ # END
+
}
}
@@ -741,7 +921,6 @@ sub _export_file_path_and_url {
return ($export_path, $export_url);
}
-
## compile_string and unwrap_c are used to make the meta data structures
## easier to work with for perl.