+ tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, args[1],
+ zero);
+ se->expr = fold_build3_loc (input_location, COND_EXPR, type, tmp, args[0],
+ rrot);
+}
+
+
+/* LEADZ (i) = (i == 0) ? BIT_SIZE (i)
+ : __builtin_clz(i) - (BIT_SIZE('int') - BIT_SIZE(i))
+
+ The conditional expression is necessary because the result of LEADZ(0)
+ is defined, but the result of __builtin_clz(0) is undefined for most
+ targets.
+
+ For INTEGER kinds smaller than the C 'int' type, we have to subtract the
+ difference in bit size between the argument of LEADZ and the C int. */
+
+static void
+gfc_conv_intrinsic_leadz (gfc_se * se, gfc_expr * expr)
+{
+ tree arg;
+ tree arg_type;
+ tree cond;
+ tree result_type;
+ tree leadz;
+ tree bit_size;
+ tree tmp;
+ tree func;
+ int s, argsize;
+
+ gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+ argsize = TYPE_PRECISION (TREE_TYPE (arg));
+
+ /* Which variant of __builtin_clz* should we call? */
+ if (argsize <= INT_TYPE_SIZE)
+ {
+ arg_type = unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CLZ];
+ }
+ else if (argsize <= LONG_TYPE_SIZE)
+ {
+ arg_type = long_unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CLZL];
+ }
+ else if (argsize <= LONG_LONG_TYPE_SIZE)
+ {
+ arg_type = long_long_unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CLZLL];
+ }
+ else
+ {
+ gcc_assert (argsize == 2 * LONG_LONG_TYPE_SIZE);
+ arg_type = gfc_build_uint_type (argsize);
+ func = NULL_TREE;
+ }
+
+ /* Convert the actual argument twice: first, to the unsigned type of the
+ same size; then, to the proper argument type for the built-in
+ function. But the return type is of the default INTEGER kind. */
+ arg = fold_convert (gfc_build_uint_type (argsize), arg);
+ arg = fold_convert (arg_type, arg);
+ arg = gfc_evaluate_now (arg, &se->pre);
+ result_type = gfc_get_int_type (gfc_default_integer_kind);
+
+ /* Compute LEADZ for the case i .ne. 0. */
+ if (func)
+ {
+ s = TYPE_PRECISION (arg_type) - argsize;
+ tmp = fold_convert (result_type,
+ build_call_expr_loc (input_location, func,
+ 1, arg));
+ leadz = fold_build2_loc (input_location, MINUS_EXPR, result_type,
+ tmp, build_int_cst (result_type, s));
+ }
+ else
+ {
+ /* We end up here if the argument type is larger than 'long long'.
+ We generate this code:
+
+ if (x & (ULL_MAX << ULL_SIZE) != 0)
+ return clzll ((unsigned long long) (x >> ULLSIZE));
+ else
+ return ULL_SIZE + clzll ((unsigned long long) x);
+ where ULL_MAX is the largest value that a ULL_MAX can hold
+ (0xFFFFFFFFFFFFFFFF for a 64-bit long long type), and ULLSIZE
+ is the bit-size of the long long type (64 in this example). */
+ tree ullsize, ullmax, tmp1, tmp2;
+
+ ullsize = build_int_cst (result_type, LONG_LONG_TYPE_SIZE);
+ ullmax = fold_build1_loc (input_location, BIT_NOT_EXPR,
+ long_long_unsigned_type_node,
+ build_int_cst (long_long_unsigned_type_node,
+ 0));
+
+ cond = fold_build2_loc (input_location, LSHIFT_EXPR, arg_type,
+ fold_convert (arg_type, ullmax), ullsize);
+ cond = fold_build2_loc (input_location, BIT_AND_EXPR, arg_type,
+ arg, cond);
+ cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
+ cond, build_int_cst (arg_type, 0));
+
+ tmp1 = fold_build2_loc (input_location, RSHIFT_EXPR, arg_type,
+ arg, ullsize);
+ tmp1 = fold_convert (long_long_unsigned_type_node, tmp1);
+ tmp1 = fold_convert (result_type,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_CLZLL],
+ 1, tmp1));
+
+ tmp2 = fold_convert (long_long_unsigned_type_node, arg);
+ tmp2 = fold_convert (result_type,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_CLZLL],
+ 1, tmp2));
+ tmp2 = fold_build2_loc (input_location, PLUS_EXPR, result_type,
+ tmp2, ullsize);
+
+ leadz = fold_build3_loc (input_location, COND_EXPR, result_type,
+ cond, tmp1, tmp2);
+ }
+
+ /* Build BIT_SIZE. */
+ bit_size = build_int_cst (result_type, argsize);
+
+ cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+ arg, build_int_cst (arg_type, 0));
+ se->expr = fold_build3_loc (input_location, COND_EXPR, result_type, cond,
+ bit_size, leadz);
+}
+
+
+/* TRAILZ(i) = (i == 0) ? BIT_SIZE (i) : __builtin_ctz(i)
+
+ The conditional expression is necessary because the result of TRAILZ(0)
+ is defined, but the result of __builtin_ctz(0) is undefined for most
+ targets. */
+
+static void
+gfc_conv_intrinsic_trailz (gfc_se * se, gfc_expr *expr)
+{
+ tree arg;
+ tree arg_type;
+ tree cond;
+ tree result_type;
+ tree trailz;
+ tree bit_size;
+ tree func;
+ int argsize;
+
+ gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+ argsize = TYPE_PRECISION (TREE_TYPE (arg));
+
+ /* Which variant of __builtin_ctz* should we call? */
+ if (argsize <= INT_TYPE_SIZE)
+ {
+ arg_type = unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CTZ];
+ }
+ else if (argsize <= LONG_TYPE_SIZE)
+ {
+ arg_type = long_unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CTZL];
+ }
+ else if (argsize <= LONG_LONG_TYPE_SIZE)
+ {
+ arg_type = long_long_unsigned_type_node;
+ func = built_in_decls[BUILT_IN_CTZLL];
+ }
+ else
+ {
+ gcc_assert (argsize == 2 * LONG_LONG_TYPE_SIZE);
+ arg_type = gfc_build_uint_type (argsize);
+ func = NULL_TREE;
+ }
+
+ /* Convert the actual argument twice: first, to the unsigned type of the
+ same size; then, to the proper argument type for the built-in
+ function. But the return type is of the default INTEGER kind. */
+ arg = fold_convert (gfc_build_uint_type (argsize), arg);
+ arg = fold_convert (arg_type, arg);
+ arg = gfc_evaluate_now (arg, &se->pre);
+ result_type = gfc_get_int_type (gfc_default_integer_kind);
+
+ /* Compute TRAILZ for the case i .ne. 0. */
+ if (func)
+ trailz = fold_convert (result_type, build_call_expr_loc (input_location,
+ func, 1, arg));
+ else
+ {
+ /* We end up here if the argument type is larger than 'long long'.
+ We generate this code:
+
+ if ((x & ULL_MAX) == 0)
+ return ULL_SIZE + ctzll ((unsigned long long) (x >> ULLSIZE));
+ else
+ return ctzll ((unsigned long long) x);
+
+ where ULL_MAX is the largest value that a ULL_MAX can hold
+ (0xFFFFFFFFFFFFFFFF for a 64-bit long long type), and ULLSIZE
+ is the bit-size of the long long type (64 in this example). */
+ tree ullsize, ullmax, tmp1, tmp2;
+
+ ullsize = build_int_cst (result_type, LONG_LONG_TYPE_SIZE);
+ ullmax = fold_build1_loc (input_location, BIT_NOT_EXPR,
+ long_long_unsigned_type_node,
+ build_int_cst (long_long_unsigned_type_node, 0));
+
+ cond = fold_build2_loc (input_location, BIT_AND_EXPR, arg_type, arg,
+ fold_convert (arg_type, ullmax));
+ cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, cond,
+ build_int_cst (arg_type, 0));
+
+ tmp1 = fold_build2_loc (input_location, RSHIFT_EXPR, arg_type,
+ arg, ullsize);
+ tmp1 = fold_convert (long_long_unsigned_type_node, tmp1);
+ tmp1 = fold_convert (result_type,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_CTZLL],
+ 1, tmp1));
+ tmp1 = fold_build2_loc (input_location, PLUS_EXPR, result_type,
+ tmp1, ullsize);
+
+ tmp2 = fold_convert (long_long_unsigned_type_node, arg);
+ tmp2 = fold_convert (result_type,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_CTZLL],
+ 1, tmp2));
+
+ trailz = fold_build3_loc (input_location, COND_EXPR, result_type,
+ cond, tmp1, tmp2);
+ }
+
+ /* Build BIT_SIZE. */
+ bit_size = build_int_cst (result_type, argsize);
+
+ cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
+ arg, build_int_cst (arg_type, 0));
+ se->expr = fold_build3_loc (input_location, COND_EXPR, result_type, cond,
+ bit_size, trailz);
+}
+
+/* Using __builtin_popcount for POPCNT and __builtin_parity for POPPAR;
+ for types larger than "long long", we call the long long built-in for
+ the lower and higher bits and combine the result. */
+
+static void
+gfc_conv_intrinsic_popcnt_poppar (gfc_se * se, gfc_expr *expr, int parity)
+{
+ tree arg;
+ tree arg_type;
+ tree result_type;
+ tree func;
+ int argsize;
+
+ gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
+ argsize = TYPE_PRECISION (TREE_TYPE (arg));
+ result_type = gfc_get_int_type (gfc_default_integer_kind);
+
+ /* Which variant of the builtin should we call? */
+ if (argsize <= INT_TYPE_SIZE)
+ {
+ arg_type = unsigned_type_node;
+ func = built_in_decls[parity ? BUILT_IN_PARITY : BUILT_IN_POPCOUNT];
+ }
+ else if (argsize <= LONG_TYPE_SIZE)
+ {
+ arg_type = long_unsigned_type_node;
+ func = built_in_decls[parity ? BUILT_IN_PARITYL : BUILT_IN_POPCOUNTL];
+ }
+ else if (argsize <= LONG_LONG_TYPE_SIZE)
+ {
+ arg_type = long_long_unsigned_type_node;
+ func = built_in_decls[parity ? BUILT_IN_PARITYLL : BUILT_IN_POPCOUNTLL];
+ }
+ else
+ {
+ /* Our argument type is larger than 'long long', which mean none
+ of the POPCOUNT builtins covers it. We thus call the 'long long'
+ variant multiple times, and add the results. */
+ tree utype, arg2, call1, call2;
+
+ /* For now, we only cover the case where argsize is twice as large
+ as 'long long'. */
+ gcc_assert (argsize == 2 * LONG_LONG_TYPE_SIZE);
+
+ func = built_in_decls[parity ? BUILT_IN_PARITYLL : BUILT_IN_POPCOUNTLL];
+
+ /* Convert it to an integer, and store into a variable. */
+ utype = gfc_build_uint_type (argsize);
+ arg = fold_convert (utype, arg);
+ arg = gfc_evaluate_now (arg, &se->pre);
+
+ /* Call the builtin twice. */
+ call1 = build_call_expr_loc (input_location, func, 1,
+ fold_convert (long_long_unsigned_type_node,
+ arg));
+
+ arg2 = fold_build2_loc (input_location, RSHIFT_EXPR, utype, arg,
+ build_int_cst (utype, LONG_LONG_TYPE_SIZE));
+ call2 = build_call_expr_loc (input_location, func, 1,
+ fold_convert (long_long_unsigned_type_node,
+ arg2));
+
+ /* Combine the results. */
+ if (parity)
+ se->expr = fold_build2_loc (input_location, BIT_XOR_EXPR, result_type,
+ call1, call2);
+ else
+ se->expr = fold_build2_loc (input_location, PLUS_EXPR, result_type,
+ call1, call2);
+
+ return;
+ }
+
+ /* Convert the actual argument twice: first, to the unsigned type of the
+ same size; then, to the proper argument type for the built-in
+ function. */
+ arg = fold_convert (gfc_build_uint_type (argsize), arg);
+ arg = fold_convert (arg_type, arg);
+
+ se->expr = fold_convert (result_type,
+ build_call_expr_loc (input_location, func, 1, arg));
+}
+
+
+/* Process an intrinsic with unspecified argument-types that has an optional
+ argument (which could be of type character), e.g. EOSHIFT. For those, we
+ need to append the string length of the optional argument if it is not
+ present and the type is really character.
+ primary specifies the position (starting at 1) of the non-optional argument
+ specifying the type and optional gives the position of the optional
+ argument in the arglist. */
+
+static void
+conv_generic_with_optional_char_arg (gfc_se* se, gfc_expr* expr,
+ unsigned primary, unsigned optional)
+{
+ gfc_actual_arglist* prim_arg;
+ gfc_actual_arglist* opt_arg;
+ unsigned cur_pos;
+ gfc_actual_arglist* arg;
+ gfc_symbol* sym;
+ VEC(tree,gc) *append_args;
+
+ /* Find the two arguments given as position. */
+ cur_pos = 0;
+ prim_arg = NULL;
+ opt_arg = NULL;
+ for (arg = expr->value.function.actual; arg; arg = arg->next)
+ {
+ ++cur_pos;
+
+ if (cur_pos == primary)
+ prim_arg = arg;
+ if (cur_pos == optional)
+ opt_arg = arg;
+
+ if (cur_pos >= primary && cur_pos >= optional)
+ break;
+ }
+ gcc_assert (prim_arg);
+ gcc_assert (prim_arg->expr);
+ gcc_assert (opt_arg);
+
+ /* If we do have type CHARACTER and the optional argument is really absent,
+ append a dummy 0 as string length. */
+ append_args = NULL;
+ if (prim_arg->expr->ts.type == BT_CHARACTER && !opt_arg->expr)
+ {
+ tree dummy;
+
+ dummy = build_int_cst (gfc_charlen_type_node, 0);
+ append_args = VEC_alloc (tree, gc, 1);
+ VEC_quick_push (tree, append_args, dummy);
+ }
+
+ /* Build the call itself. */
+ sym = gfc_get_symbol_for_expr (expr);
+ gfc_conv_procedure_call (se, sym, expr->value.function.actual, expr,
+ append_args);
+ gfc_free (sym);