Jakub,

Here is an updated patch. I have not addressed some of the points:

> Also note (but, already preexisting issue) is that the
> __asan_report* and __asan_{load,store}* calls are declared in
> sanitizer.def as having 1st argument PTR type, while in the library
> it is uptr (aka PTRMODE).  So, either we should fix up sanitizer.def
> to match that (we were passing already base_addr which is integral,
> not pointer), or if we keep sanitizer.def as is, we should pass
> to the calls base_ssa instead of base_addr.

Can I do this with a separate patch? The current one is already quite big.

+  if (use_calls)
+    {
+      build_check_stmt_sized (location, base, len, iter, is_store);
+      update_mem_ref_hash_table (base, 1);
+      update_mem_ref_hash_table (end, 1);
+      return;
+    }
+

See above, here we should not handle use_calls any differently from
!use_calls.

Build_check_stmt is now unified for const/variable lengths
but I'd still prefer to treat the use_calls case specially
because calling single __asan_loadN is more space-efficient
than emitting two separate calls with a length check.

-Y
2014-06-06  Yury Gribov  <y.gri...@samsung.com>

	New asan-instrumentation-with-call-threshold parameter.

	gcc/
	* asan.c (check_func): New function.
	(use_calls_p): Likewise.
	(report_error_func): Change interface.
	(build_check_stmt): Allow non-integer lengths; add support
	for new parameter.
	(instrument_mem_region_access): Likewise.
	(asan_instrument): Likewise.
	(instrument_derefs): Moved code to build_check_stmt.
	* cfgcleanup.c (old_insns_match_p): Add support for new
	functions.
	* doc/invoke.texi: Describe new parameter.
	* params.def: Define new parameter.
	* params.h: Likewise.
	* sanitizer.def: Describe new builtins.

	gcc/testsuite/
	* c-c++-common/asan/instrument-with-calls-1.c: New test.
	* c-c++-common/asan/instrument-with-calls-2.c: Likewise.
	* c-c++-common/asan/instrument-with-calls-3.c: Likewise.

diff --git a/gcc/asan.c b/gcc/asan.c
index 3397655..acaf110 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -257,6 +257,15 @@ struct asan_mem_ref
 
 static alloc_pool asan_mem_ref_alloc_pool;
 
+static int asan_num_accesses;
+
+static inline bool
+use_calls_p ()
+{
+  return ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
+    && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
+}
+
 /* This creates the alloc pool used to store the instances of
    asan_mem_ref that are stored in the hash table asan_mem_ref_ht.  */
 
@@ -1319,7 +1328,7 @@ asan_protect_global (tree decl)
    IS_STORE is either 1 (for a store) or 0 (for a load).  */
 
 static tree
-report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, bool slow_p)
+report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, int *nargs)
 {
   static enum built_in_function report[2][6]
     = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
@@ -1328,13 +1337,37 @@ report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, bool slow_p)
 	{ BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
 	  BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
 	  BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
-  if ((size_in_bytes & (size_in_bytes - 1)) != 0
-      || size_in_bytes > 16
-      || slow_p)
-    return builtin_decl_implicit (report[is_store][5]);
+  if (size_in_bytes == -1)
+    {
+      *nargs = 2;
+      return builtin_decl_implicit (report[is_store][5]);
+    }
+  *nargs = 1;
   return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
 }
 
+/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
+   IS_STORE is either 1 (for a store) or 0 (for a load).  */
+
+static tree
+check_func (bool is_store, int size_in_bytes, int *nargs)
+{
+  static enum built_in_function check[2][6]
+    = { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
+      BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
+      BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
+	{ BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
+      BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
+      BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } };
+  if (size_in_bytes == -1)
+    {
+      *nargs = 2;
+      return builtin_decl_implicit (check[is_store][5]);
+    }
+  *nargs = 1;
+  return builtin_decl_implicit (check[is_store][exact_log2 (size_in_bytes)]);
+}
+
 /* Split the current basic block and create a condition statement
    insertion point right before or after the statement pointed to by
    ITER.  Return an iterator to the point at which the caller might
@@ -1508,41 +1541,72 @@ build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
    otherwise, it points to the statement logically following it.  */
 
 static void
-build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
-		  bool before_p, bool is_store, HOST_WIDE_INT size_in_bytes,
-		  bool slow_p = false)
+build_check_stmt (location_t location, tree base, tree len,
+		  gimple_stmt_iterator *iter, bool before_p, bool is_store,
+		  unsigned int align = 0)
 {
   gimple_stmt_iterator gsi;
   basic_block then_bb, else_bb;
   tree t, base_addr, shadow;
   gimple g;
-  tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
-  tree shadow_type = TREE_TYPE (shadow_ptr_type);
   tree uintptr_type
     = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
-  tree base_ssa = base;
+  tree base_ssa;
+  tree sz_arg;
+  bool use_calls = use_calls_p ();
+
+  HOST_WIDE_INT size_in_bytes
+    = len && tree_fits_uhwi_p (len) ? TREE_INT_CST_LOW (len) : -1;
   HOST_WIDE_INT real_size_in_bytes = size_in_bytes;
-  tree sz_arg = NULL_TREE;
 
-  if (size_in_bytes == 1)
-    slow_p = false;
-  else if ((size_in_bytes & (size_in_bytes - 1)) != 0
-	   || size_in_bytes > 16
-	   || slow_p)
+  tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
+  tree shadow_type = TREE_TYPE (shadow_ptr_type);
+
+  if (size_in_bytes > 1)
     {
-      real_size_in_bytes = 1;
-      slow_p = true;
+      if ((size_in_bytes & (size_in_bytes - 1)) != 0
+	  || size_in_bytes > 16)
+	{
+	  size_in_bytes = -1;
+	  real_size_in_bytes = 1;
+	}
+      else
+	{
+	  if (align && align < size_in_bytes * BITS_PER_UNIT)
+	    {
+	      /* On non-strict alignment targets, if
+		 16-byte access is just 8-byte aligned,
+		 this will result in misaligned shadow
+		 memory 2 byte load, but otherwise can
+		 be handled using one read.  */
+	      if (size_in_bytes != 16
+		  || STRICT_ALIGNMENT
+		  || align < 8 * BITS_PER_UNIT)
+		{
+		  size_in_bytes = -1;
+		  real_size_in_bytes = 1;
+		}
+	    }
+	}
     }
 
-  /* Get an iterator on the point where we can add the condition
-     statement for the instrumentation.  */
-  gsi = create_cond_insert_point (iter, before_p,
-				  /*then_more_likely_p=*/false,
-				  /*create_then_fallthru_edge=*/false,
-				  &then_bb,
-				  &else_bb);
+  if (!use_calls)
+    ++asan_num_accesses;
 
-  base = unshare_expr (base);
+  base_ssa = base = unshare_expr (base);
+
+  if (use_calls)
+    gsi = *iter;
+  else
+    {
+      /* Get an iterator on the point where we can add the condition
+	 statement for the instrumentation.  */
+      gsi = create_cond_insert_point (iter, before_p,
+				      /*then_more_likely_p=*/false,
+				      /*create_then_fallthru_edge=*/false,
+				      &then_bb,
+				      &else_bb);
+    }
 
   /* BASE can already be an SSA_NAME; in that case, do not create a
      new SSA_NAME for it.  */
@@ -1552,7 +1616,10 @@ build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
 					make_ssa_name (TREE_TYPE (base), NULL),
 					base, NULL_TREE);
       gimple_set_location (g, location);
-      gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+      if (before_p && use_calls)
+	gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+      else
+	gsi_insert_after (&gsi, g, GSI_NEW_STMT);
       base_ssa = gimple_assign_lhs (g);
     }
 
@@ -1560,9 +1627,39 @@ build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
 				    make_ssa_name (uintptr_type, NULL),
 				    base_ssa, NULL_TREE);
   gimple_set_location (g, location);
-  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+  if (before_p && use_calls)
+    gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+  else
+    gsi_insert_after (&gsi, g, GSI_NEW_STMT);
   base_addr = gimple_assign_lhs (g);
 
+  g = gimple_build_assign_with_ops (NOP_EXPR,
+				    make_ssa_name (uintptr_type, NULL),
+				    len, NULL_TREE);
+  gimple_set_location (g, location);
+  if (before_p && use_calls)
+    gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+  else
+    gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+  sz_arg = gimple_assign_lhs (g);
+
+  if (use_calls)
+    {
+      int nargs;
+      tree fun = check_func (is_store, size_in_bytes, &nargs);
+      g = gimple_build_call (fun, nargs, base_addr, sz_arg);
+      gimple_set_location (g, location);
+      if (before_p)
+	gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+      else
+	{
+	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+	  gsi_next (&gsi);
+	  *iter = gsi;
+	}
+      return;
+    }
+
   /* Build
      (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset ().  */
   shadow = build_shadow_mem_access (&gsi, location, base_addr,
@@ -1591,13 +1688,19 @@ build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
       gimple_seq_set_location (seq, location);
       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
       /* For weird access sizes or misaligned, check first and last byte.  */
-      if (slow_p)
+      if (size_in_bytes == -1)
 	{
+	  g = gimple_build_assign_with_ops (MINUS_EXPR,
+					    make_ssa_name (uintptr_type, NULL),
+					    sz_arg,
+					    build_int_cst (uintptr_type, 1));
+	  gimple_set_location (g, location);
+	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+	  tree last = gimple_assign_lhs (g);
 	  g = gimple_build_assign_with_ops (PLUS_EXPR,
 					    make_ssa_name (uintptr_type, NULL),
 					    base_addr,
-					    build_int_cst (uintptr_type,
-							   size_in_bytes - 1));
+					    last);
 	  gimple_set_location (g, location);
 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
 	  tree base_end_addr = gimple_assign_lhs (g);
@@ -1621,7 +1724,6 @@ build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
 	  t = gimple_assign_lhs (gimple_seq_last (seq));
 	  gimple_seq_set_location (seq, location);
 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
-	  sz_arg = build_int_cst (pointer_sized_int_node, size_in_bytes);
 	}
     }
   else
@@ -1634,8 +1736,9 @@ build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
 
   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
   gsi = gsi_start_bb (then_bb);
-  g = gimple_build_call (report_error_func (is_store, size_in_bytes, slow_p),
-			 sz_arg ? 2 : 1, base_addr, sz_arg);
+  int nargs;
+  tree fun = report_error_func (is_store, size_in_bytes, &nargs);
+  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
   gimple_set_location (g, location);
   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
 
@@ -1656,7 +1759,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
     return;
 
   tree type, base;
-  HOST_WIDE_INT size_in_bytes;
+  HOST_WIDE_INT isize;
 
   type = TREE_TYPE (t);
   switch (TREE_CODE (t))
@@ -1672,8 +1775,8 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
       return;
     }
 
-  size_in_bytes = int_size_in_bytes (type);
-  if (size_in_bytes <= 0)
+  isize = int_size_in_bytes (type);
+  if (isize <= 0)
     return;
 
   HOST_WIDE_INT bitsize, bitpos;
@@ -1682,9 +1785,9 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
   int volatilep = 0, unsignedp = 0;
   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
 				    &mode, &unsignedp, &volatilep, false);
-  if (((size_in_bytes & (size_in_bytes - 1)) == 0
-       && (bitpos % (size_in_bytes * BITS_PER_UNIT)))
-      || bitsize != size_in_bytes * BITS_PER_UNIT)
+  if (((isize & (isize - 1)) == 0
+       && (bitpos % (isize * BITS_PER_UNIT)))
+      || bitsize != isize * BITS_PER_UNIT)
     {
       if (TREE_CODE (t) == COMPONENT_REF
 	  && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
@@ -1728,35 +1831,13 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
     }
 
   base = build_fold_addr_expr (t);
-  if (!has_mem_ref_been_instrumented (base, size_in_bytes))
+  if (!has_mem_ref_been_instrumented (base, isize))
     {
-      bool slow_p = false;
-      if (size_in_bytes > 1)
-	{
-	  if ((size_in_bytes & (size_in_bytes - 1)) != 0
-	      || size_in_bytes > 16)
-	    slow_p = true;
-	  else
-	    {
-	      unsigned int align = get_object_alignment (t);
-	      if (align < size_in_bytes * BITS_PER_UNIT)
-		{
-		  /* On non-strict alignment targets, if
-		     16-byte access is just 8-byte aligned,
-		     this will result in misaligned shadow
-		     memory 2 byte load, but otherwise can
-		     be handled using one read.  */
-		  if (size_in_bytes != 16
-		      || STRICT_ALIGNMENT
-		      || align < 8 * BITS_PER_UNIT)
-		    slow_p = true;
-		}
-	    }
-	}
-      build_check_stmt (location, base, iter, /*before_p=*/true,
-			is_store, size_in_bytes, slow_p);
-      update_mem_ref_hash_table (base, size_in_bytes);
-      update_mem_ref_hash_table (t, size_in_bytes);
+      unsigned int align = get_object_alignment (t);
+      build_check_stmt (location, base, size_in_bytes (type), iter,
+			/*before_p=*/true, is_store, align);
+      update_mem_ref_hash_table (base, isize);
+      update_mem_ref_hash_table (t, isize);
     }
 
 }
@@ -1774,6 +1855,8 @@ instrument_mem_region_access (tree base, tree len,
 			      gimple_stmt_iterator *iter,
 			      location_t location, bool is_store)
 {
+  bool use_calls = use_calls_p ();
+
   if (!POINTER_TYPE_P (TREE_TYPE (base))
       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
       || integer_zerop (len))
@@ -1795,6 +1878,16 @@ instrument_mem_region_access (tree base, tree len,
   if (start_instrumented && end_instrumented)
     return;
 
+  if (use_calls)
+    {
+      /* It is faster to simply call __asan_(load|store)N instead of
+	 calculating length and checking bytes independently.  */
+      build_check_stmt (location, base, len, iter, /*before_p*/true, is_store);
+      update_mem_ref_hash_table (base, 1);
+      update_mem_ref_hash_table (end, 1);
+      return;
+    }
+
   if (!is_gimple_constant (len))
     {
       /* So, the length of the memory area to asan-protect is
@@ -1827,7 +1920,8 @@ instrument_mem_region_access (tree base, tree len,
       /* Instrument the beginning of the memory region to be accessed,
 	 and arrange for the rest of the intrumentation code to be
 	 inserted in the then block *after* the current gsi.  */
-      build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
+      build_check_stmt (location, base, build_int_cst (size_type_node, 1), &gsi,
+			/*before_p=*/true, is_store);
 
       if (then_bb)
 	/* We are in the case where the length of the region is not
@@ -1912,7 +2006,8 @@ instrument_mem_region_access (tree base, tree len,
   /* instrument access at _2;  */
   gsi = gsi_for_stmt (region_end);
   build_check_stmt (location, gimple_assign_lhs (region_end),
-		    &gsi, /*before_p=*/false, is_store, 1);
+		    build_int_cst (size_type_node, 1),
+		    &gsi, /*before_p=*/false, is_store);
 
   if (then_bb == NULL)
     update_mem_ref_hash_table (end, 1);
@@ -1967,8 +2062,9 @@ instrument_strlen_call (gimple_stmt_iterator *iter)
   gimple_set_location (str_arg_ssa, loc);
   gimple_stmt_iterator gsi = *iter;
   gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
-  build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
-		    /*before_p=*/false, /*is_store=*/false, 1);
+  build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa),
+		    build_int_cst (size_type_node, 1), &gsi,
+		    /*before_p=*/false, /*is_store=*/false);
 
   /* If we initially had an instruction like:
 
@@ -1987,8 +2083,9 @@ instrument_strlen_call (gimple_stmt_iterator *iter)
   gimple_set_location (stmt, loc);
   gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
-  build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
-		    /*before_p=*/false, /*is_store=*/false, 1);
+  build_check_stmt (loc, gimple_assign_lhs (stmt),
+		    build_int_cst (size_type_node, 1), &gsi,
+		    /*before_p=*/false, /*is_store=*/false);
 
   /* Ensure that iter points to the statement logically following the
      one it was initially pointing to.  */
@@ -2568,6 +2665,7 @@ asan_instrument (void)
 {
   if (shadow_ptr_types[0] == NULL_TREE)
     asan_init_shadow_ptr_types ();
+  asan_num_accesses = 0;
   transform_statements ();
   return 0;
 }
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index 7c24a6d..e857543 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -1175,7 +1175,7 @@ old_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
 		      && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
 			 >= BUILT_IN_ASAN_REPORT_LOAD1
 		      && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
-			 <= BUILT_IN_ASAN_REPORT_STORE16)
+			 <= BUILT_IN_ASAN_STOREN)
 		    return dir_none;
 		}
 	    }
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
index 9475594..96d6895 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
@@ -10230,6 +10230,12 @@ is enabled by default when using @option{-fsanitize=address} option.
 To disable use-after-return detection use 
 @option{--param asan-use-after-return=0}.
 
+@item asan-instrumentation-with-call-threshold
+Once number of memory accesses  in function becomes greater
+or equal than this number, use callbacks instead of
+generating inline code. E.g. to disable inline code use
+@option{--param asan-instrumentation-with-call-threshold=0}.
+
 @end table
 @end table
 
diff --git a/gcc/params.def b/gcc/params.def
index c3a8797..28ef79a 100644
--- a/gcc/params.def
+++ b/gcc/params.def
@@ -1090,6 +1090,12 @@ DEFPARAM (PARAM_ASAN_USE_AFTER_RETURN,
          "Enable asan builtin functions protection",
          1, 0, 1)
 
+DEFPARAM (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD,
+         "asan-instrumentation-with-call-threshold",
+         "Use callbacks instead of inline code once number of accesses "
+         " in function becomes greater or equal than this threshold",
+         10000, 0, INT_MAX)
+
 DEFPARAM (PARAM_UNINIT_CONTROL_DEP_ATTEMPTS,
 	  "uninit-control-dep-attempts",
 	  "Maximum number of nested calls to search for control dependencies "
diff --git a/gcc/params.h b/gcc/params.h
index 0d6daa2..d488e32 100644
--- a/gcc/params.h
+++ b/gcc/params.h
@@ -232,5 +232,7 @@ extern void init_param_values (int *params);
   PARAM_VALUE (PARAM_ASAN_MEMINTRIN)
 #define ASAN_USE_AFTER_RETURN \
   PARAM_VALUE (PARAM_ASAN_USE_AFTER_RETURN)
+#define ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD \
+  PARAM_VALUE (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD)
 
 #endif /* ! GCC_PARAMS_H */
diff --git a/gcc/sanitizer.def b/gcc/sanitizer.def
index 4016fc5..b4af164 100644
--- a/gcc/sanitizer.def
+++ b/gcc/sanitizer.def
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 /* Address Sanitizer */
 DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_INIT, "__asan_init_v3",
 		      BT_FN_VOID, ATTR_NOTHROW_LEAF_LIST)
-/* Do not reorder the BUILT_IN_ASAN_REPORT* builtins, e.g. cfgcleanup.c
+/* Do not reorder the BUILT_IN_ASAN_{REPORT,CHECK}* builtins, e.g. cfgcleanup.c
    relies on this order.  */
 DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_REPORT_LOAD1, "__asan_report_load1",
 		      BT_FN_VOID_PTR, ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST)
@@ -57,6 +57,30 @@ DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_REPORT_STORE16, "__asan_report_store16",
 DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_REPORT_STORE_N, "__asan_report_store_n",
 		      BT_FN_VOID_PTR_PTRMODE,
 		      ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOAD1, "__asan_load1",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOAD2, "__asan_load2",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOAD4, "__asan_load4",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOAD8, "__asan_load8",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOAD16, "__asan_load16",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_LOADN, "__asan_loadN",
+		      BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STORE1, "__asan_store1",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STORE2, "__asan_store2",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STORE4, "__asan_store4",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STORE8, "__asan_store8",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STORE16, "__asan_store16",
+		      BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_STOREN, "__asan_storeN",
+		      BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
 DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_REGISTER_GLOBALS,
 		      "__asan_register_globals",
 		      BT_FN_VOID_PTR_PTRMODE, ATTR_NOTHROW_LEAF_LIST)
diff --git a/gcc/testsuite/c-c++-common/asan/instrument-with-calls-1.c b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-1.c
new file mode 100644
index 0000000..80f7620
--- /dev/null
+++ b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-1.c
@@ -0,0 +1,10 @@
+/* { dg-do assemble } */
+/* { dg-options "--param asan-instrumentation-with-call-threshold=0 -save-temps" } */
+
+void f(char *a, int *b) {
+  *b = *a;
+}
+
+/* { dg-final { scan-assembler "__asan_load1" } } */
+/* { dg-final { scan-assembler "__asan_store4" } } */
+/* { dg-final { cleanup-saved-temps } } */
diff --git a/gcc/testsuite/c-c++-common/asan/instrument-with-calls-2.c b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-2.c
new file mode 100644
index 0000000..570f796
--- /dev/null
+++ b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-2.c
@@ -0,0 +1,16 @@
+/* { dg-do assemble } */
+/* { dg-options "--param asan-instrumentation-with-call-threshold=1 -save-temps" } */
+
+int x;
+
+void f(int *a, int *b) {
+  *a = 0;
+  asm volatile ("" ::: "memory");
+  x = *b;
+}
+
+/* { dg-final { scan-assembler-not "__asan_store4" } } */
+/* { dg-final { scan-assembler "__asan_report_store4" } } */
+/* { dg-final { scan-assembler "__asan_load4" } } */
+/* { dg-final { scan-assembler-not "__asan_report_load4" } } */
+/* { dg-final { cleanup-saved-temps } } */
diff --git a/gcc/testsuite/c-c++-common/asan/instrument-with-calls-3.c b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-3.c
new file mode 100644
index 0000000..3712c7a
--- /dev/null
+++ b/gcc/testsuite/c-c++-common/asan/instrument-with-calls-3.c
@@ -0,0 +1,15 @@
+/* { dg-do assemble } */
+/* { dg-options "--param asan-instrumentation-with-call-threshold=0 -save-temps" } */
+
+struct A {
+  char x[7];
+};
+
+void f(struct A *x, struct A *y) {
+  *x = *y;
+}
+
+/* { dg-final { scan-assembler "__asan_loadN" } } */
+/* { dg-final { scan-assembler "__asan_storeN" } } */
+/* { dg-final { cleanup-saved-temps } } */
+

Reply via email to