By popular demand.

This needs some adjustment/fixups after feeding snippets to asciidoc
since compared to markdown asciidown escapes xml markup and doesn't
just let it through.

The other noticeable change is that build times increase a lot - we
need to launch the markup process per-snippet, there's a few thousand
of them and asciidoc (python) has a substantial higher overhead per
invocation than pandoc (haskell).

v2: More fine-tuning:

- Use unixe newlines, not the default dos ones. Only results in
  ugliness in the intermediate gpu.xml, but still.

- Resurrect the hack to remove paragraphs for the one-line references.
  Like markdown asciidoc insists to wrap everything.

Cc: Danilo Cesar Lemes de Paula <danilo.cesar at collabora.co.uk>
Cc: Thomas Wood <thomas.wood at intel.com>
Cc: Jonathan Corbet <corbet at lwn.net>
Signed-off-by: Daniel Vetter <daniel.vetter at intel.com>
---
 Documentation/DocBook/Makefile |  6 +++---
 scripts/kernel-doc             | 18 ++++++++++++++----
 2 files changed, 17 insertions(+), 7 deletions(-)

diff --git a/Documentation/DocBook/Makefile b/Documentation/DocBook/Makefile
index 246ad38550e5..5335955c0de5 100644
--- a/Documentation/DocBook/Makefile
+++ b/Documentation/DocBook/Makefile
@@ -104,8 +104,8 @@ define rule_docproc
 endef

 %.xml: %.tmpl $(KERNELDOC) $(DOCPROC) $(KERNELDOCXMLREF) FORCE
-       @(which pandoc > /dev/null 2>&1) || \
-       (echo "*** To get propper documentation you need to install pandoc 
***";)
+       @(which asciidoc > /dev/null 2>&1) || \
+       (echo "*** To get propper documentation you need to install asciidoc 
***";)
        $(call if_changed_rule,docproc)

 # Tell kbuild to always build the programs
@@ -116,7 +116,7 @@ notfoundtemplate = echo "*** You have to install 
docbook-utils or xmlto ***"; \
 db2xtemplate = db2TYPE -o $(dir $@) $<
 xmltotemplate = xmlto TYPE $(XMLTOFLAGS) -o $(dir $@) $<

-ifneq ($(shell which pandoc >/dev/null 2>&1 && echo found),found)
+ifneq ($(shell which asciidoc >/dev/null 2>&1 && echo found),found)
        MARKDOWNREADY := "";
 endif

diff --git a/scripts/kernel-doc b/scripts/kernel-doc
index e01e74f15a22..cbfa4c03189e 100755
--- a/scripts/kernel-doc
+++ b/scripts/kernel-doc
@@ -524,7 +524,7 @@ sub dump_doc_section {
 sub markdown_to_docbook {
        my $orig_content = $_[0];

-       my $pid = open3( \*CHLD_IN, \*CHLD_OUT, \*CHLD_ERR, "pandoc  
--columns=80 -f markdown -t docbook" );
+       my $pid = open3( \*CHLD_IN, \*CHLD_OUT, \*CHLD_ERR, "asciidoc  -a 
'newline=\\n' --no-header-footer --backend=docbook45 -" );

        print CHLD_IN "$orig_content";
        close(CHLD_IN);
@@ -540,9 +540,9 @@ sub markdown_to_docbook {
        close(CHLD_ERR);

        if ($output_markdown_nopara) {
-               # pandoc insists in adding Main <para></para>, sometimes we
-               # want to remove them.
-               $content =~ s:\A\s*<para>\s*\n(.*)\n</para>\Z$:$1:egsm;
+               # asciidoc insists in adding Main <simpara></simpara>, sometimes
+               # we want to remove them.
+               $content =~ s:\A\s*<simpara>(.*)</simpara>\Z:$1:egsm;
        }

        return $content;
@@ -605,6 +605,16 @@ sub output_highlight {
 #   print STDERR "contents af:$contents\n";
     if ($use_markdown) {
         $contents = markdown_to_docbook($contents);
+
+       # Compared to markdown asciidoc doesn't let through arbitrary xml
+       # markup. We need to un-escape the kerneldoc markup for functions,
+       # structures, ...
+       $contents =~ s/&lt;quote&gt;(\S*)&lt;\/quote&gt;/<quote>$1<\/quote>/g;
+       $contents =~ 
s/&lt;constant&gt;(\S*)&lt;\/constant&gt;/<constant>$1<\/constant>/g;
+       $contents =~ 
s/&lt;structname&gt;(\S*)&lt;\/structname&gt;/<structname>$1<\/structname>/g;
+       $contents =~ 
s/&lt;parameter&gt;(\S*)&lt;\/parameter&gt;/<parameter>$1<\/parameter>/g;
+       $contents =~ 
s/&lt;function&gt;(\S*)&lt;\/function&gt;/<function>$1<\/function>/g;
+       $contents =~ s/&lt;envar&gt;(\S*)&lt;\/envar&gt;/<envar>$1<\/envar>/g;
     }

 #   strip whitespaces when generating html5
-- 
2.5.1

Reply via email to