@@ -12713,63 +12713,132 @@ GenTree* Compiler::gtFoldExpr(GenTree* tree)
1271312713//
1271412714// Notes:
1271512715// Checks for calls to Type.op_Equality, Type.op_Inequality, and
12716- // Enum.HasFlag, and if the call is to one of these ,
12717- // attempts to optimize.
12716+ // Enum.HasFlag, as well as the helpers for casts ,
12717+ // and if the call is to one of these, attempts to optimize.
1271812718
1271912719GenTree* Compiler::gtFoldExprCall(GenTreeCall* call)
1272012720{
12721- // Can only fold calls to special intrinsics.
12722- if (!call->IsSpecialIntrinsic())
12723- {
12724- return call;
12725- }
12726-
1272712721 // Defer folding if not optimizing.
1272812722 if (opts.OptimizationDisabled())
1272912723 {
1273012724 return call;
1273112725 }
1273212726
12733- // Fetch id of the intrinsic.
12734- const CorInfoIntrinsics methodID = info.compCompHnd->getIntrinsicID(call->gtCallMethHnd);
12735-
12736- switch (methodID)
12727+ // Is this an intrinsic we can fold?
12728+ if (call->IsSpecialIntrinsic())
1273712729 {
12738- case CORINFO_INTRINSIC_TypeEQ:
12739- case CORINFO_INTRINSIC_TypeNEQ:
12730+ // Fetch id of the intrinsic.
12731+ const CorInfoIntrinsics methodID = info.compCompHnd->getIntrinsicID(call->gtCallMethHnd);
12732+ switch (methodID)
12733+ {
12734+ case CORINFO_INTRINSIC_TypeEQ:
12735+ case CORINFO_INTRINSIC_TypeNEQ:
12736+ {
12737+ noway_assert(call->TypeGet() == TYP_INT);
12738+ GenTree* op1 = call->gtCallArgs->GetNode();
12739+ GenTree* op2 = call->gtCallArgs->GetNext()->GetNode();
12740+
12741+ // If either operand is known to be a RuntimeType, this can be folded
12742+ GenTree* result = gtFoldTypeEqualityCall(methodID, op1, op2);
12743+ if (result != nullptr)
12744+ {
12745+ return result;
12746+ }
12747+ break;
12748+ }
12749+
12750+ default:
12751+ break;
12752+ }
12753+
12754+ // Check for a new-style jit intrinsic.
12755+ const NamedIntrinsic ni = lookupNamedIntrinsic(call->gtCallMethHnd);
12756+
12757+ if (ni == NI_System_Enum_HasFlag)
1274012758 {
12741- noway_assert( call->TypeGet() == TYP_INT );
12742- GenTree* op1 = call->gtCallArgs->GetNode();
12743- GenTree* op2 = call->gtCallArgs->GetNext()->GetNode( );
12759+ GenTree* thisOp = call->gtCallThisArg->GetNode( );
12760+ GenTree* flagOp = call->gtCallArgs->GetNode();
12761+ GenTree* result = gtOptimizeEnumHasFlag(thisOp, flagOp );
1274412762
12745- // If either operand is known to be a RuntimeType, this can be folded
12746- GenTree* result = gtFoldTypeEqualityCall(methodID, op1, op2);
1274712763 if (result != nullptr)
1274812764 {
1274912765 return result;
1275012766 }
12751- break;
1275212767 }
12768+ }
1275312769
12754- default:
12770+ // There was no intrinsic to fold, try the helper path.
12771+ // At present, we only fold the helpers for casts.
12772+ const CorInfoHelpFunc helper = eeGetHelperNum(call->gtCallMethHnd);
12773+ bool overflowCast = false;
12774+ bool fromUnsigned = false;
12775+ var_types castToType = call->TypeGet();
12776+
12777+ assert(castToType == genActualType(castToType));
12778+
12779+ switch (helper)
12780+ {
12781+ case CORINFO_HELP_ULNG2DBL:
12782+ fromUnsigned = true;
12783+ break;
12784+ case CORINFO_HELP_LNG2DBL:
12785+ case CORINFO_HELP_DBL2INT:
12786+ case CORINFO_HELP_DBL2LNG:
12787+ break;
12788+ case CORINFO_HELP_DBL2UINT:
12789+ castToType = TYP_UINT;
12790+ break;
12791+ case CORINFO_HELP_DBL2ULNG:
12792+ castToType = TYP_ULONG;
1275512793 break;
12794+ case CORINFO_HELP_DBL2INT_OVF:
12795+ case CORINFO_HELP_DBL2LNG_OVF:
12796+ overflowCast = true;
12797+ break;
12798+ case CORINFO_HELP_DBL2UINT_OVF:
12799+ overflowCast = true;
12800+ castToType = TYP_UINT;
12801+ break;
12802+ case CORINFO_HELP_DBL2ULNG_OVF:
12803+ overflowCast = true;
12804+ castToType = TYP_ULONG;
12805+ break;
12806+ default:
12807+ // We cannot fold this call.
12808+ return call;
12809+ }
12810+
12811+ assert(call->fgArgInfo->ArgCount() == 1);
12812+ GenTree* arg = call->gtCallArgs->GetNode();
12813+
12814+ // We have a placeholder, find the real argument.
12815+ if (arg->OperIs(GT_ARGPLACE))
12816+ {
12817+ arg = call->fgArgInfo->GetArgNode(0);
1275612818 }
1275712819
12758- // Check for a new-style jit intrinsic.
12759- const NamedIntrinsic ni = lookupNamedIntrinsic(call->gtCallMethHnd);
12820+ if (!arg->OperIsConst() || gtIsActiveCSE_Candidate(arg))
12821+ {
12822+ // We can only fold casts from constants.
12823+ // Also, we cannot fold if the argument can be CSEd.
12824+ return call;
12825+ }
1276012826
12761- if (ni == NI_System_Enum_HasFlag)
12827+ GenTree* cast = gtNewCastNode(call->TypeGet(), arg, fromUnsigned, castToType);
12828+ if (overflowCast)
1276212829 {
12763- GenTree* thisOp = call->gtCallThisArg->GetNode();
12764- GenTree* flagOp = call->gtCallArgs->GetNode();
12765- GenTree* result = gtOptimizeEnumHasFlag(thisOp, flagOp);
12830+ cast->gtFlags |= GTF_OVERFLOW;
12831+ }
1276612832
12767- if (result != nullptr)
12768- {
12769- return result;
12770- }
12833+ // Try and fold this cast.
12834+ GenTree* constVal = gtFoldExprConst(cast);
12835+ if (constVal->OperIsConst())
12836+ {
12837+ // Success!
12838+ return constVal;
1277112839 }
1277212840
12841+ // We failed, return the original tree.
1277312842 return call;
1277412843}
1277512844
0 commit comments