On 10/25/2011 02:18 PM, Richard Henderson wrote:
On 10/25/2011 10:20 AM, Andrew MacLeod wrote:
+   /* Otherwise there is a lockfree match, transform the call from:
+        void fn(T* mem, T* desired, T* return, model)
+      into
+        *((In *)return = fn (T* mem, *((In *)desired, model)  */
This is, in general, an aliasing violation nearly guaranteed to fail.
Instead try

   *return = VCE<In>(fn(mem, VCE<In>(*desired), model))

where VCE<T>(E) is a VIEW_CONVERT_EXPR with TREE_TYPE = T and a
first operand expression of E.

But in particular, the actual stores and loads happen in the original type.


OK, how's this.  (just the c-common.c file)

It seems to work fine. Past all the basic tests, and bootstraps in an incremental bootstrap. I'll kick off a full scratch bootstrap/test before feeling all warm and fuzzy tho :-)

Andrew
Index: c-family/c-common.c
===================================================================
*** c-family/c-common.c (revision 180260)
--- c-family/c-common.c (working copy)
*************** sync_resolve_return (tree first_param, t
*** 9113,9118 ****
--- 9113,9482 ----
      return result;
  }
  
+ /* This function verifies the PARAMS to generic atomic FUNCTION.
+    It returns the size if all the parameters are the same size, otherwise
+    0 is returned if the parameters are invalid.  */
+ 
+ static int
+ get_atomic_generic_size (tree function, VEC(tree,gc) *params)
+ {
+   unsigned int n_param;
+   unsigned int n_model;
+   unsigned int x;
+   int size_0;
+   tree type_0;
+ 
+   /* Determine the parameter makeup.  */
+   switch (DECL_FUNCTION_CODE (function))
+     {
+     case BUILT_IN_ATOMIC_EXCHANGE:
+       n_param = 4;
+       n_model = 1;
+       break;
+     case BUILT_IN_ATOMIC_LOAD:
+     case BUILT_IN_ATOMIC_STORE:
+       n_param = 3;
+       n_model = 1;
+       break;
+     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
+       n_param = 6;
+       n_model = 2;
+       break;
+     default:
+       return 0;
+     }
+ 
+   if (VEC_length (tree, params) != n_param)
+     {
+       error ("Incorrect number of arguments to function %qE", function);
+       return 0;
+     }
+ 
+   /* Get type of first parameter, and determine it's size.  */
+   type_0 = TREE_TYPE (VEC_index (tree, params, 0));
+   if (TREE_CODE (type_0) != POINTER_TYPE)
+     {
+       error ("argument 0 of %qE must be a pointer type", function);
+       return 0;
+     }
+   size_0 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type_0)), 1);
+ 
+   /* Check each other parameter is a pointer and the same size.  */
+   for (x = 0; x < n_param - n_model; x++)
+     {
+       int size;
+       tree type = TREE_TYPE (VEC_index (tree, params, x));
+       /* __atomic_compare_exchange has a bool in the 4th postion, skip it.  */
+       if (n_param == 6 && x == 3)
+         continue;
+       if (!POINTER_TYPE_P (type))
+       {
+         error ("argument %d of %qE must be a pointer type", x, function);
+         return 0;
+       }
+       size = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (type)), 1);
+       if (size != size_0)
+       {
+         error ("size mismatch in argument %d of %qE", x, function);
+         return 0;
+       }
+     }
+ 
+   /* Check memory model parameters for validity.  */
+   for (x = n_param - n_model ; x < n_param; x++)
+     {
+       tree p = VEC_index (tree, params, x);
+       if (TREE_CODE (p) == INTEGER_CST)
+         {
+         int i = tree_low_cst (p, 1);
+         if (i < 0 || i >= MEMMODEL_LAST)
+           {
+             error ("invalid memory model argument %d of %qE", x, function);
+             return 0;
+           }
+       }
+       else
+       if (!INTEGRAL_TYPE_P (TREE_TYPE (p)))
+         {
+           error ("non-integer memory model argument %d of %qE", x, function);
+           return 0;
+         }
+       }
+ 
+   return size_0;
+ }
+ 
+ 
+ /* This will take an __atomic_ generic FUNCTION call, and add a size 
parameter N
+    at the beginning of the parameter list PARAMS representing the size of the
+    objects.  This is to match the library ABI requirement.  LOC is the 
location
+    of the function call.  
+    The new function is returned if it needed rebuilding, otherwise NULL_TREE 
is
+    returned to allow the external call to be constructed.  */
+ 
+ static tree
+ add_atomic_size_parameter (unsigned n, location_t loc, tree function, 
+                          VEC(tree,gc) *params)
+ {
+   tree size_node;
+ 
+   /* Insert a SIZE_T parameter as the first param.  If there isn't
+      enough space, allocate a new vector and recursively re-build with that.  
*/
+   if (!VEC_space (tree, params, 1))
+     {
+       unsigned int z, len;
+       VEC(tree,gc) *vec;
+       tree f;
+ 
+       len = VEC_length (tree, params);
+       vec = VEC_alloc (tree, gc, len + 1);
+       for (z = 0; z < len; z++)
+       VEC_quick_push (tree, vec, VEC_index (tree, params, z));
+       f = build_function_call_vec (loc, function, vec, NULL);
+       VEC_free (tree, gc, vec);
+       return f;
+     }
+ 
+   /* Add the size parameter and leave as a function call for processing.  */
+   size_node = build_int_cst (size_type_node, n);
+   VEC_quick_insert (tree, params, 0, size_node);
+   return NULL_TREE;
+ }
+ 
+ 
+ /* This will process an __atomic_exchange function call, determine whether it
+    needs to be mapped to the _N variation, or turned into a library call.
+    LOC is the location of the builtin call.
+    FUNCTION is the DECL that has been invoked;
+    PARAMS is the argument list for the call.  The return value is non-null
+    TRUE is returned if it is translated into the proper format for a call to 
the
+    external library, and NEW_RETURN is set the tree for that function.
+    FALSE is returned if processing for the _N variation is required, and 
+    NEW_RETURN is set to the the return value the result is copied into.  */
+ static bool
+ resolve_overloaded_atomic_exchange (location_t loc, tree function, 
+                                   VEC(tree,gc) *params, tree *new_return)
+ {     
+   tree p0, p1, p2, p3;
+   tree I_type, I_type_ptr;
+   int n = get_atomic_generic_size (function, params);
+ 
+   /* If not a lock-free size, change to the library generic format.  */
+   if (n != 1 && n != 2 && n != 4 && n != 8 && n != 16)
+     {
+       *new_return = add_atomic_size_parameter (n, loc, function, params);
+       return true;
+     }
+ 
+   /* Otherwise there is a lockfree match, transform the call from:
+        void fn(T* mem, T* desired, T* return, model)
+      into
+        *return = (T) (fn (In* mem, (In) *desired, model)  */
+ 
+   p0 = VEC_index (tree, params, 0);
+   p1 = VEC_index (tree, params, 1);
+   p2 = VEC_index (tree, params, 2);
+   p3 = VEC_index (tree, params, 3);
+   
+   /* Create pointer to appropriate size.  */
+   I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
+   I_type_ptr = build_pointer_type (I_type);
+ 
+   /* Convert object pointer to required type.  */
+   p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
+   VEC_replace (tree, params, 0, p0);
+ 
+   /* Convert new value to required type, and dereference it.  */
+   p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
+   p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
+   VEC_replace (tree, params, 1, p1);
+ 
+   /* Move memory model to the 3rd position, and end param list.  */
+   VEC_replace (tree, params, 2, p3);
+   VEC_truncate (tree, params, 3);
+ 
+   /* Convert return pointer and dereference it for later assignment.  */
+   *new_return = build_indirect_ref (loc, p2, RO_UNARY_STAR);
+ 
+   return false;
+ }
+ 
+ 
+ /* This will process an __atomic_compare_exchange function call, determine 
+    whether it needs to be mapped to the _N variation, or turned into a lib 
call.
+    LOC is the location of the builtin call.
+    FUNCTION is the DECL that has been invoked;
+    PARAMS is the argument list for the call.  The return value is non-null
+    TRUE is returned if it is translated into the proper format for a call to 
the
+    external library, and NEW_RETURN is set the tree for that function.
+    FALSE is returned if processing for the _N variation is required.  */
+ 
+ static bool
+ resolve_overloaded_atomic_compare_exchange (location_t loc, tree function, 
+                                           VEC(tree,gc) *params, 
+                                           tree *new_return)
+ {     
+   tree p0, p1, p2;
+   tree I_type, I_type_ptr;
+   int n = get_atomic_generic_size (function, params);
+ 
+   /* If not a lock-free size, change to the library generic format.  */
+   if (n != 1 && n != 2 && n != 4 && n != 8 && n != 16)
+     {
+       /* The library generic format does not have the weak parameter, so 
+        remove it from the param list.  Since a parameter has been removed,
+        we can be sure that there is room for the SIZE_T parameter, meaning
+        there will not be a recursive rebuilding of the parameter list, so
+        there is no danger this will be done twice.  */
+       if (n > 0)
+         {
+         VEC_replace (tree, params, 3, VEC_index (tree, params, 4));
+         VEC_replace (tree, params, 4, VEC_index (tree, params, 5));
+         VEC_truncate (tree, params, 5);
+       }
+       *new_return = add_atomic_size_parameter (n, loc, function, params);
+       return true;
+     }
+ 
+   /* Otherwise, there is a match, so the call needs to be transformed from:
+        bool fn(T* mem, T* desired, T* return, weak, success, failure)
+      into
+        bool fn ((In *)mem, (In *)expected, (In) *desired, weak, succ, fail)  
*/
+ 
+   p0 = VEC_index (tree, params, 0);
+   p1 = VEC_index (tree, params, 1);
+   p2 = VEC_index (tree, params, 2);
+   
+   /* Create pointer to appropriate size.  */
+   I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
+   I_type_ptr = build_pointer_type (I_type);
+ 
+   /* Convert object pointer to required type.  */
+   p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
+   VEC_replace (tree, params, 0, p0);
+ 
+   /* Convert expected pointer to required type.  */
+   p1 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1);
+   VEC_replace (tree, params, 1, p1);
+ 
+   /* Convert desired value to required type, and dereference it.  */
+   p2 = build_indirect_ref (loc, p2, RO_UNARY_STAR);
+   p2 = build1 (VIEW_CONVERT_EXPR, I_type, p2);
+   VEC_replace (tree, params, 2, p2);
+ 
+   /* The rest of the parameters are fine. NULL means no special return value
+      processing.*/
+   *new_return = NULL;
+   return false;
+ }
+ 
+ 
+ /* This will process an __atomic_load function call, determine whether it
+    needs to be mapped to the _N variation, or turned into a library call.
+    LOC is the location of the builtin call.
+    FUNCTION is the DECL that has been invoked;
+    PARAMS is the argument list for the call.  The return value is non-null
+    TRUE is returned if it is translated into the proper format for a call to 
the
+    external library, and NEW_RETURN is set the tree for that function.
+    FALSE is returned if processing for the _N variation is required, and 
+    NEW_RETURN is set to the the return value the result is copied into.  */
+ 
+ static bool
+ resolve_overloaded_atomic_load (location_t loc, tree function, 
+                               VEC(tree,gc) *params, tree *new_return)
+ {     
+   tree p0, p1, p2;
+   tree I_type, I_type_ptr;
+   int n = get_atomic_generic_size (function, params);
+ 
+   /* If not a lock-free size, change to the library generic format.  */
+   if (n != 1 && n != 2 && n != 4 && n != 8 && n != 16)
+     {
+       *new_return = add_atomic_size_parameter (n, loc, function, params);
+       return true;
+     }
+ 
+   /* Otherwise, there is a match, so the call needs to be transformed from:
+        void fn(T* mem, T* return, model)
+      into
+        *return = (T) fn ((In *) mem, model)  */
+ 
+   p0 = VEC_index (tree, params, 0);
+   p1 = VEC_index (tree, params, 1);
+   p2 = VEC_index (tree, params, 2);
+   
+   /* Create pointer to appropriate size.  */
+   I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
+   I_type_ptr = build_pointer_type (I_type);
+ 
+   /* Convert object pointer to required type.  */
+   p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
+   VEC_replace (tree, params, 0, p0);
+ 
+   /* Move memory model to the 2nd position, and end param list.  */
+   VEC_replace (tree, params, 1, p2);
+   VEC_truncate (tree, params, 2);
+ 
+   /* Convert return pointer and dereference it for later assignment.  */
+   *new_return = build_indirect_ref (loc, p1, RO_UNARY_STAR);
+ 
+   return false;
+ }
+ 
+ 
+ /* This will process an __atomic_store function call, determine whether it
+    needs to be mapped to the _N variation, or turned into a library call.
+    LOC is the location of the builtin call.
+    FUNCTION is the DECL that has been invoked;
+    PARAMS is the argument list for the call.  The return value is non-null
+    TRUE is returned if it is translated into the proper format for a call to 
the
+    external library, and NEW_RETURN is set the tree for that function.
+    FALSE is returned if processing for the _N variation is required, and 
+    NEW_RETURN is set to the the return value the result is copied into.  */
+ 
+ static bool
+ resolve_overloaded_atomic_store (location_t loc, tree function, 
+                                VEC(tree,gc) *params, tree *new_return)
+ {     
+   tree p0, p1;
+   tree I_type, I_type_ptr;
+   int n = get_atomic_generic_size (function, params);
+ 
+   /* If not a lock-free size, change to the library generic format.  */
+   if (n != 1 && n != 2 && n != 4 && n != 8 && n != 16)
+     {
+       *new_return = add_atomic_size_parameter (n, loc, function, params);
+       return true;
+     }
+ 
+   /* Otherwise, there is a match, so the call needs to be transformed from:
+        void fn(T* mem, T* value, model)
+      into
+        fn ((In *) mem, (In) *value, model)  */
+ 
+   p0 = VEC_index (tree, params, 0);
+   p1 = VEC_index (tree, params, 1);
+   
+   /* Create pointer to appropriate size.  */
+   I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
+   I_type_ptr = build_pointer_type (I_type);
+ 
+   /* Convert object pointer to required type.  */
+   p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
+   VEC_replace (tree, params, 0, p0);
+ 
+   /* Convert new value to required type, and dereference it.  */
+   p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
+   p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
+   VEC_replace (tree, params, 1, p1);
+   
+   /* The memory model is in the right spot already. Return is void.  */
+   *new_return = NULL_TREE;
+ 
+   return false;
+ }
+ 
+ 
  /* Some builtin functions are placeholders for other expressions.  This
     function should be called immediately after parsing the call expression
     before surrounding code has committed to the type of the expression.
*************** resolve_overloaded_builtin (location_t l
*** 9129,9134 ****
--- 9493,9499 ----
  {
    enum built_in_function orig_code = DECL_FUNCTION_CODE (function);
    bool orig_format = true;
+   tree new_return = NULL_TREE;
  
    switch (DECL_BUILT_IN_CLASS (function))
      {
*************** resolve_overloaded_builtin (location_t l
*** 9146,9151 ****
--- 9511,9568 ----
    /* Handle BUILT_IN_NORMAL here.  */
    switch (orig_code)
      {
+     case BUILT_IN_ATOMIC_EXCHANGE:
+     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
+     case BUILT_IN_ATOMIC_LOAD:
+     case BUILT_IN_ATOMIC_STORE:
+       {
+       /* Handle these 4 together so that they can fall through to the next
+          case if the call is transformed to an _N variant.  */
+         switch (orig_code)
+       {
+         case BUILT_IN_ATOMIC_EXCHANGE:
+           {
+             if (resolve_overloaded_atomic_exchange (loc, function, params,
+                                                     &new_return))
+               return new_return;
+             /* Change to the _N variant.  */
+             orig_code = BUILT_IN_ATOMIC_EXCHANGE_N;
+             break;
+           }
+ 
+         case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
+           {
+             if (resolve_overloaded_atomic_compare_exchange (loc, function,
+                                                             params,
+                                                             &new_return))
+               return new_return;
+             /* Change to the _N variant.  */
+             orig_code = BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N;
+             break;
+           }
+         case BUILT_IN_ATOMIC_LOAD:
+           {
+             if (resolve_overloaded_atomic_load (loc, function, params,
+                                                 &new_return))
+               return new_return;
+             /* Change to the _N variant.  */
+             orig_code = BUILT_IN_ATOMIC_LOAD_N;
+             break;
+           }
+         case BUILT_IN_ATOMIC_STORE:
+           {
+             if (resolve_overloaded_atomic_store (loc, function, params,
+                                                  &new_return))
+               return new_return;
+             /* Change to the _N variant.  */
+             orig_code = BUILT_IN_ATOMIC_STORE_N;
+             break;
+           }
+         default:
+           gcc_unreachable ();
+       }
+       /* Fallthrough to the normal processing.  */
+       }
      case BUILT_IN_ATOMIC_EXCHANGE_N:
      case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
      case BUILT_IN_ATOMIC_LOAD_N:
*************** resolve_overloaded_builtin (location_t l
*** 9204,9209 ****
--- 9621,9638 ----
            && orig_code != BUILT_IN_ATOMIC_STORE_N)
          result = sync_resolve_return (first_param, result, orig_format);
  
+       /* If new_return is set, assign function to that expr and cast the
+          result to void since the generic interface returned void.  */
+       if (new_return)
+         {
+           /* Cast function result from I{1,2,4,8,16} to the required type.  */
+           result = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (new_return), result);
+           result = build2 (MODIFY_EXPR, TREE_TYPE (new_return), new_return,
+                            result);
+           TREE_SIDE_EFFECTS (result) = 1;
+           protected_set_expr_location (result, loc);
+           result = convert (void_type_node, result);
+         }
        return result;
        }
  

Reply via email to