Hello. After discussion with Richi, he approved to install patches separately to current perl scripts. I'm attaching these patches and I will send patch that will remove the legacy Perl scripts. The patch will be subject for normal review process.
Thanks Martin
>From 818d9da7892bcdb70df6fb456f7ea9243f155f3f Mon Sep 17 00:00:00 2001 From: marxin <mli...@suse.cz> Date: Fri, 28 Apr 2017 13:50:24 +0200 Subject: [PATCH 1/3] Port Doxygen support script from Perl to Python; add unittests contrib/ChangeLog: 2017-05-31 David Malcolm <dmalc...@redhat.com> Martin Liska <mli...@suse.cz> * filter_params.py: New, porting the perl script to python, adding a test suite. * filter_gcc_for_doxygen_new: New file. --- contrib/filter_gcc_for_doxygen_new | 12 ++++ contrib/filter_params.py | 144 +++++++++++++++++++++++++++++++++++++ 2 files changed, 156 insertions(+) create mode 100755 contrib/filter_gcc_for_doxygen_new create mode 100644 contrib/filter_params.py diff --git a/contrib/filter_gcc_for_doxygen_new b/contrib/filter_gcc_for_doxygen_new new file mode 100755 index 00000000000..d1109a50c88 --- /dev/null +++ b/contrib/filter_gcc_for_doxygen_new @@ -0,0 +1,12 @@ +#!/bin/sh + +# This filters GCC source before Doxygen can get confused by it; +# this script is listed in the doxyfile. The output is not very +# pretty, but at least we get output that Doxygen can understand. +# +# $1 is a source file of some kind. The source we wish doxygen to +# process is put on stdout. + +dir=`dirname $0` +python $dir/filter_params.py $1 +exit 0 diff --git a/contrib/filter_params.py b/contrib/filter_params.py new file mode 100644 index 00000000000..f94d201bbf8 --- /dev/null +++ b/contrib/filter_params.py @@ -0,0 +1,144 @@ +#!/usr/bin/python +""" +Filters out some of the #defines used throughout the GCC sources: +- GTY(()) marks declarations for gengtype.c +- PARAMS(()) is used for K&R compatibility. See ansidecl.h. + +When passed one or more filenames, acts on those files and prints the +results to stdout. + +When run without a filename, runs a unit-testing suite. +""" +import re +import sys +import unittest + +# Optional whitespace +OPT_WS = '\s*' + +def filter_src(text): + """ + str -> str. We operate on the whole of the source file at once + (rather than individual lines) so that we can have multiline + regexes. + """ + + # Convert C comments from GNU coding convention of: + # /* FIRST_LINE + # NEXT_LINE + # FINAL_LINE. */ + # to: + # /** @verbatim FIRST_LINE + # NEXT_LINE + # FINAL_LINE. @endverbatim */ + # so that doxygen will parse them. + # + # Only comments that begin on the left-most column are converted. + text = re.sub(r'^/\* ', + r'/** @verbatim ', + text, + flags=re.MULTILINE) + text = re.sub(r'\*/', + r' @endverbatim */', + text) + + # Remove GTY markings (potentially multiline ones): + text = re.sub('GTY' + OPT_WS + r'\(\(.*?\)\)\s+', + '', + text, + flags=(re.MULTILINE|re.DOTALL)) + + # Strip out 'ATTRIBUTE_UNUSED' + text = re.sub('\sATTRIBUTE_UNUSED', + '', + text) + + # PARAMS(()) is used for K&R compatibility. See ansidecl.h. + text = re.sub('PARAMS' + OPT_WS + r'\(\((.*?)\)\)', + r'(\1)', + text) + + return text + +class FilteringTests(unittest.TestCase): + ''' + Unit tests for filter_src. + ''' + def assert_filters_to(self, src_input, expected_result): + # assertMultiLineEqual was added to unittest in 2.7/3.1 + if hasattr(self, 'assertMultiLineEqual'): + assertion = self.assertMultiLineEqual + else: + assertion = self.assertEqual + assertion(expected_result, filter_src(src_input)) + + def test_comment_example(self): + self.assert_filters_to( + ('/* FIRST_LINE\n' + ' NEXT_LINE\n' + ' FINAL_LINE. */\n'), + ('/** @verbatim FIRST_LINE\n' + ' NEXT_LINE\n' + ' FINAL_LINE. @endverbatim */\n')) + + def test_oneliner_comment(self): + self.assert_filters_to( + '/* Returns the string representing CLASS. */\n', + ('/** @verbatim Returns the string representing CLASS. @endverbatim */\n')) + + def test_multiline_comment(self): + self.assert_filters_to( + ('/* The thread-local storage model associated with a given VAR_DECL\n' + " or SYMBOL_REF. This isn't used much, but both trees and RTL refer\n" + " to it, so it's here. */\n"), + ('/** @verbatim The thread-local storage model associated with a given VAR_DECL\n' + " or SYMBOL_REF. This isn't used much, but both trees and RTL refer\n" + " to it, so it's here. @endverbatim */\n")) + + def test_GTY(self): + self.assert_filters_to( + ('typedef struct GTY(()) alias_pair {\n' + ' tree decl;\n' + ' tree target;\n' + '} alias_pair;\n'), + ('typedef struct alias_pair {\n' + ' tree decl;\n' + ' tree target;\n' + '} alias_pair;\n')) + + def test_multiline_GTY(self): + # Ensure that a multiline GTY is filtered out. + self.assert_filters_to( + ('class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),\n' + '\t chain_next ("%h.next"), chain_prev ("%h.previous")))\n' + ' symtab_node_base\n' + '{\n'), + ('class symtab_node_base\n' + '{\n')) + + def test_ATTRIBUTE_UNUSED(self): + # Ensure that ATTRIBUTE_UNUSED is filtered out. + self.assert_filters_to( + ('static void\n' + 'record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)\n' + '{\n'), + ('static void\n' + 'record_set (rtx dest, const_rtx set, void *data)\n' + '{\n')) + + def test_PARAMS(self): + self.assert_filters_to( + 'char *strcpy PARAMS ((char *dest, char *source));\n', + 'char *strcpy (char *dest, char *source);\n') + +def act_on_files(argv): + for filename in argv[1:]: + with open(filename) as f: + text = f.read() + print(filter_src(text)) + +if __name__ == '__main__': + if len(sys.argv) > 1: + act_on_files(sys.argv) + else: + unittest.main() -- 2.12.2
>From 52c428a06e120ae58b8b9ad5fecb9a28e4ef5841 Mon Sep 17 00:00:00 2001 From: marxin <mli...@suse.cz> Date: Fri, 28 Apr 2017 13:49:01 +0200 Subject: [PATCH 2/3] Change comment style to one we normally use. gcc/ChangeLog: 2017-04-28 Martin Liska <mli...@suse.cz> * tree-vect-loop.c (vect_create_epilog_for_reduction): Change comment style to one we normally use. (vectorizable_reduction): Likewise. (vectorizable_induction): Likewise. * tree-vect-stmts.c (vectorizable_mask_load_store): Likewise. (vectorizable_call): Likewise. (vectorizable_simd_clone_call): Likewise. (vectorizable_conversion): Likewise. (vectorizable_assignment): Likewise. (vectorizable_shift): Likewise. (vectorizable_operation): Likewise. (vectorizable_store): Likewise. (vectorizable_load): Likewise. * tree-vectorizer.h: Likewise. --- gcc/tree-vect-loop.c | 12 ++++++------ gcc/tree-vect-stmts.c | 18 +++++++++--------- gcc/tree-vectorizer.h | 4 ++-- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c index 3133a968d69..d3ad0d5652e 100644 --- a/gcc/tree-vect-loop.c +++ b/gcc/tree-vect-loop.c @@ -4855,8 +4855,8 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt, tree tmp; tree vec_elem_type; - /*** Case 1: Create: - v_out2 = reduc_expr <v_out1> */ + /* Case 1: Create: + v_out2 = reduc_expr <v_out1> */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, @@ -4931,7 +4931,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt, int elt_offset; tree zero_vec = build_zero_cst (vectype); - /*** Case 2: Create: + /* Case 2: Create: for (offset = nelements/2; offset >= 1; offset/=2) { Create: va' = vec_shift <va, offset> @@ -4982,7 +4982,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt, } else { - /*** Case 3: Create: + /* Case 3: Create: s = extract_field <v_out2, 0> for (offset = element_size; offset < vector_size; @@ -6080,7 +6080,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform reduction.\n"); @@ -6508,7 +6508,7 @@ vectorizable_induction (gimple *phi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform induction phi.\n"); diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c index 74c9a113082..7490b08b454 100644 --- a/gcc/tree-vect-stmts.c +++ b/gcc/tree-vect-stmts.c @@ -2136,7 +2136,7 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi, } gcc_assert (memory_access_type == STMT_VINFO_MEMORY_ACCESS_TYPE (stmt_info)); - /** Transform. **/ + /* Transform. */ if (memory_access_type == VMAT_GATHER_SCATTER) { @@ -2818,7 +2818,7 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n"); @@ -3462,7 +3462,7 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n"); @@ -4324,7 +4324,7 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform conversion. ncopies = %d.\n", ncopies); @@ -4722,7 +4722,7 @@ vectorizable_assignment (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform assignment.\n"); @@ -5093,7 +5093,7 @@ vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, @@ -5421,7 +5421,7 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - /** Transform. **/ + /* Transform. */ if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, @@ -5756,7 +5756,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt, } gcc_assert (memory_access_type == STMT_VINFO_MEMORY_ACCESS_TYPE (stmt_info)); - /** Transform. **/ + /* Transform. */ ensure_base_align (stmt_info, dr); @@ -6743,7 +6743,7 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt, dump_printf_loc (MSG_NOTE, vect_location, "transform load. ncopies = %d\n", ncopies); - /** Transform. **/ + /* Transform. */ ensure_base_align (stmt_info, dr); diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h index a7f5c6e6f13..df8da9eb1fd 100644 --- a/gcc/tree-vectorizer.h +++ b/gcc/tree-vectorizer.h @@ -544,9 +544,9 @@ typedef struct _stmt_vec_info { gimple *vectorized_stmt; - /** The following is relevant only for stmts that contain a non-scalar + /* The following is relevant only for stmts that contain a non-scalar data-ref (array/pointer/struct access). A GIMPLE stmt is expected to have - at most one such data-ref. **/ + at most one such data-ref. */ /* Information about the data-ref (access function, etc), relative to the inner-most containing loop. */ -- 2.12.2
>From c2b36dc402e6012a9a7a878961853827bf782812 Mon Sep 17 00:00:00 2001 From: marxin <mli...@suse.cz> Date: Fri, 28 Apr 2017 13:52:57 +0200 Subject: [PATCH 3/3] Doxygen: transform ENUM_BITFIELD and comments starting with '/**'. contrib/ChangeLog: 2017-04-28 Martin Liska <mli...@suse.cz> * filter_params.py: Transform ENUM_BITFIELD and comments starting with '/**' --- contrib/filter_params.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/contrib/filter_params.py b/contrib/filter_params.py index f94d201bbf8..a82a8d5728c 100644 --- a/contrib/filter_params.py +++ b/contrib/filter_params.py @@ -34,6 +34,11 @@ def filter_src(text): # so that doxygen will parse them. # # Only comments that begin on the left-most column are converted. + # + text = re.sub(r'^/\*\* ', + r'/** @verbatim ', + text, + flags=re.MULTILINE) text = re.sub(r'^/\* ', r'/** @verbatim ', text, @@ -58,6 +63,11 @@ def filter_src(text): r'(\1)', text) + # Replace 'ENUM_BITFIELD(enum_name)' with 'enum enum_name'. + text = re.sub('ENUM_BITFIELD\s*\(([^\)]*)\)', + r'enum \1', + text) + return text class FilteringTests(unittest.TestCase): @@ -81,6 +91,21 @@ class FilteringTests(unittest.TestCase): ' NEXT_LINE\n' ' FINAL_LINE. @endverbatim */\n')) + def test_comment_example_gengtype(self): + self.assert_filters_to( + ('/** Allocate and initialize an input buffer state.\n' + ' * @param file A readable stream.\n' + ' * @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE.\n' + ' * \n' + ' * @return the allocated buffer state.\n' + ' */'), + ('/** @verbatim Allocate and initialize an input buffer state.\n' + ' * @param file A readable stream.\n' + ' * @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE.\n' + ' * \n' + ' * @return the allocated buffer state.\n' + ' @endverbatim */')) + def test_oneliner_comment(self): self.assert_filters_to( '/* Returns the string representing CLASS. */\n', @@ -131,6 +156,11 @@ class FilteringTests(unittest.TestCase): 'char *strcpy PARAMS ((char *dest, char *source));\n', 'char *strcpy (char *dest, char *source);\n') + def test_ENUM_BITFIELD(self): + self.assert_filters_to( + ' ENUM_BITFIELD (sym_intent) intent:2;\n', + ' enum sym_intent intent:2;\n') + def act_on_files(argv): for filename in argv[1:]: with open(filename) as f: -- 2.12.2