Skip to content

Commit

Permalink
JIT: optimize type casts
Browse files Browse the repository at this point in the history
Implement the jit interface compareTypesForEquality method
to handle casts from known types to known types, and from
shared types to certain interface types.

Call this method in the jit for castclass and isinst, using
`gtGetClassHandle` to obtain the from type. Optimize sucessful
casts and unsuccessful isinsts when the from type is known
exactly.

Undo part of the type-equality based optimization/workaround
in the AsyncMethodBuilder code that was introduced in dotnet#14178
in favor of interface checks. There is more here that can
be done here before this issue is entirely closed and I will
look at this subsequently.

This optimization allows the jit to remove boxes that are
used solely to feed type casts, and so closes #12877.
  • Loading branch information
AndyAyersMS committed Oct 13, 2017
1 parent 4cf4202 commit 7532f08
Show file tree
Hide file tree
Showing 4 changed files with 262 additions and 103 deletions.
2 changes: 2 additions & 0 deletions src/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -3156,6 +3156,8 @@ class Compiler
CORINFO_RESOLVED_TOKEN* pResolvedToken,
bool isCastClass);

GenTree* impOptimizeCastClassOrIsInst(GenTree* op1, CORINFO_RESOLVED_TOKEN* pResolvedToken, bool isCastClass);

bool VarTypeIsMultiByteAndCanEnreg(var_types type,
CORINFO_CLASS_HANDLE typeClass,
unsigned* typeSize,
Expand Down
229 changes: 173 additions & 56 deletions src/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9762,6 +9762,99 @@ var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTr
return type;
}

//------------------------------------------------------------------------
// impOptimizeCastClassOrIsInst: attempt to resolve a cast when jitting
//
// Arguments:
// op1 -- value to cast
// pResolvedToken -- resolved token for type to cast to
// isCastClass -- true if this is a castclass, false if isinst
//
// Return Value:
// tree representing optimized cast, or null if no optimization possible

GenTree* Compiler::impOptimizeCastClassOrIsInst(GenTree* op1, CORINFO_RESOLVED_TOKEN* pResolvedToken, bool isCastClass)
{
assert(op1->TypeGet() == TYP_REF);

// Don't optimize for minopts or debug codegen.
if (opts.compDbgCode || opts.MinOpts())
{
return nullptr;
}

// See what we know about the type of the object being cast.
bool isExact = false;
bool isNonNull = false;
CORINFO_CLASS_HANDLE fromClass = gtGetClassHandle(op1, &isExact, &isNonNull);
GenTree* optResult = nullptr;

if (fromClass != nullptr)
{
CORINFO_CLASS_HANDLE toClass = pResolvedToken->hClass;
JITDUMP("\nConsidering optimization of %s from %s%p (%s) to %p (%s)\n", isCastClass ? "castclass" : "isinst",
isExact ? "exact " : "", fromClass, info.compCompHnd->getClassName(fromClass), toClass,
info.compCompHnd->getClassName(toClass));

// Perhaps we know if the cast will succeed or fail.
TypeCompareState castResult = info.compCompHnd->compareTypesForCast(fromClass, toClass);

if (castResult == TypeCompareState::Must)
{
// Cast will succeed, result is simply op1.
JITDUMP("Cast will succeed, optimizing to simply return input\n");
return op1;
}
else if (castResult == TypeCompareState::MustNot)
{
// See if we can sharpen exactness by looking for final classes
if (!isExact)
{
DWORD flags = info.compCompHnd->getClassAttribs(fromClass);
DWORD flagsMask = CORINFO_FLG_FINAL | CORINFO_FLG_MARSHAL_BYREF | CORINFO_FLG_CONTEXTFUL |
CORINFO_FLG_VARIANCE | CORINFO_FLG_ARRAY;
isExact = ((flags & flagsMask) == CORINFO_FLG_FINAL);
}

// Cast to exact type will fail. Handle case where we have
// an exact type (that is, fromClass is not a subtype)
// and we're not going to throw on failure.
if (isExact && !isCastClass)
{
JITDUMP("Cast will fail, optimizing to return null\n");
GenTree* result = gtNewIconNode(0, TYP_REF);

// If the cast was fed by a box, we can remove that too.
if (op1->IsBoxedValue())
{
JITDUMP("Also removing upstream box\n");
gtTryRemoveBoxUpstreamEffects(op1);
}

return result;
}
else if (isExact)
{
JITDUMP("Not optimizing failing castclass (yet)\n");
}
else
{
JITDUMP("Can't optimize since fromClass is inexact\n");
}
}
else
{
JITDUMP("Result of cast unknown, must generate runtime test\n");
}
}
else
{
JITDUMP("\nCan't optimize since fromClass is unknown\n");
}

return nullptr;
}

//------------------------------------------------------------------------
// impCastClassOrIsInstToTree: build and import castclass/isinst
//
Expand Down Expand Up @@ -10203,6 +10296,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
var_types lclTyp, ovflType = TYP_UNKNOWN;
GenTreePtr op1 = DUMMY_INIT(NULL);
GenTreePtr op2 = DUMMY_INIT(NULL);
GenTree* optTree = nullptr;
GenTreeArgList* args = nullptr; // What good do these "DUMMY_INIT"s do?
GenTreePtr newObjThisPtr = DUMMY_INIT(NULL);
bool uns = DUMMY_INIT(false);
Expand Down Expand Up @@ -14262,43 +14356,54 @@ void Compiler::impImportBlockCode(BasicBlock* block)

op1 = impPopStack().val;

#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
optTree = impOptimizeCastClassOrIsInst(op1, &resolvedToken, false);

if (optTree != nullptr)
{
impPushOnStack(optTree, tiRetVal);
}
else
{
GenTreeCall* opLookup =
impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_ISINSTANCEOF, TYP_REF,
gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
op1 = (usingReadyToRunHelper ? opLookup : op1);

if (!usingReadyToRunHelper)
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
{
// TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
// and the isinstanceof_any call with a single call to a dynamic R2R cell that will:
// 1) Load the context
// 2) Perform the generic dictionary lookup and caching, and generate the appropriate stub
// 3) Perform the 'is instance' check on the input object
// Reason: performance (today, we'll always use the slow helper for the R2R generics case)
GenTreeCall* opLookup =
impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_ISINSTANCEOF, TYP_REF,
gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
op1 = (usingReadyToRunHelper ? opLookup : op1);

op2 = impTokenToHandle(&resolvedToken, nullptr, FALSE);
if (op2 == nullptr)
{ // compDonotInline()
return;
if (!usingReadyToRunHelper)
{
// TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
// and the isinstanceof_any call with a single call to a dynamic R2R cell that will:
// 1) Load the context
// 2) Perform the generic dictionary lookup and caching, and generate the appropriate
// stub
// 3) Perform the 'is instance' check on the input object
// Reason: performance (today, we'll always use the slow helper for the R2R generics case)

op2 = impTokenToHandle(&resolvedToken, nullptr, FALSE);
if (op2 == nullptr)
{ // compDonotInline()
return;
}
}
}
}

if (!usingReadyToRunHelper)
if (!usingReadyToRunHelper)
#endif
{
op1 = impCastClassOrIsInstToTree(op1, op2, &resolvedToken, false);
}
if (compDonotInline())
{
return;
}
{
op1 = impCastClassOrIsInstToTree(op1, op2, &resolvedToken, false);
}
if (compDonotInline())
{
return;
}

impPushOnStack(op1, tiRetVal);
impPushOnStack(op1, tiRetVal);
}

break;

Expand Down Expand Up @@ -14796,43 +14901,55 @@ void Compiler::impImportBlockCode(BasicBlock* block)
// and op2 to contain code that creates the type handle corresponding to typeRef
CASTCLASS:

#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
optTree = impOptimizeCastClassOrIsInst(op1, &resolvedToken, true);

if (optTree != nullptr)
{
impPushOnStack(optTree, tiRetVal);
}
else
{
GenTreeCall* opLookup = impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_CHKCAST,
TYP_REF, gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
op1 = (usingReadyToRunHelper ? opLookup : op1);

if (!usingReadyToRunHelper)
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
{
// TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
// and the chkcastany call with a single call to a dynamic R2R cell that will:
// 1) Load the context
// 2) Perform the generic dictionary lookup and caching, and generate the appropriate stub
// 3) Check the object on the stack for the type-cast
// Reason: performance (today, we'll always use the slow helper for the R2R generics case)
GenTreeCall* opLookup =
impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_CHKCAST, TYP_REF,
gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
op1 = (usingReadyToRunHelper ? opLookup : op1);

op2 = impTokenToHandle(&resolvedToken, nullptr, FALSE);
if (op2 == nullptr)
{ // compDonotInline()
return;
if (!usingReadyToRunHelper)
{
// TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
// and the chkcastany call with a single call to a dynamic R2R cell that will:
// 1) Load the context
// 2) Perform the generic dictionary lookup and caching, and generate the appropriate
// stub
// 3) Check the object on the stack for the type-cast
// Reason: performance (today, we'll always use the slow helper for the R2R generics case)

op2 = impTokenToHandle(&resolvedToken, nullptr, FALSE);
if (op2 == nullptr)
{ // compDonotInline()
return;
}
}
}
}

if (!usingReadyToRunHelper)
if (!usingReadyToRunHelper)
#endif
{
op1 = impCastClassOrIsInstToTree(op1, op2, &resolvedToken, true);
}
if (compDonotInline())
{
return;
}
{
op1 = impCastClassOrIsInstToTree(op1, op2, &resolvedToken, true);
}
if (compDonotInline())
{
return;
}

/* Push the result back on the stack */
impPushOnStack(op1, tiRetVal);
/* Push the result back on the stack */
impPushOnStack(op1, tiRetVal);
}
break;

case CEE_THROW:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -387,33 +387,14 @@ public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
{
IAsyncStateMachineBox box = GetStateMachineBox(ref stateMachine);

// TODO https://github.com/dotnet/coreclr/issues/12877:
// Once the JIT is able to recognize "awaiter is ITaskAwaiter" and "awaiter is IConfiguredTaskAwaiter",
// use those in order to a) consolidate a lot of this code, and b) handle all Task/Task<T> and not just
// the few types special-cased here. For now, handle common {Configured}TaskAwaiter. Having the types
// explicitly listed here allows the JIT to generate the best code for them; otherwise we'll fall through
// to the later workaround.
if (typeof(TAwaiter) == typeof(TaskAwaiter) ||
typeof(TAwaiter) == typeof(TaskAwaiter<object>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<string>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<byte[]>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<bool>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<byte>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<int>) ||
typeof(TAwaiter) == typeof(TaskAwaiter<long>))
// TThe null tests here ensure that the jit can optimize away the interface
// tests when TAwaiter is is a ref type.
if ((null != (object)default(TAwaiter)) && (awaiter is ITaskAwaiter))
{
ref TaskAwaiter ta = ref Unsafe.As<TAwaiter, TaskAwaiter>(ref awaiter); // relies on TaskAwaiter/TaskAwaiter<T> having the same layout
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, continueOnCapturedContext: true);
}
else if (
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<object>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<string>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<byte[]>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<bool>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<byte>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<int>.ConfiguredTaskAwaiter) ||
typeof(TAwaiter) == typeof(ConfiguredTaskAwaitable<long>.ConfiguredTaskAwaiter))
else if ((null != (object)default(TAwaiter)) && (awaiter is IConfiguredTaskAwaiter))
{
ref ConfiguredTaskAwaitable.ConfiguredTaskAwaiter ta = ref Unsafe.As<TAwaiter, ConfiguredTaskAwaitable.ConfiguredTaskAwaiter>(ref awaiter);
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, ta.m_continueOnCapturedContext);
Expand Down Expand Up @@ -450,21 +431,6 @@ public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
TaskAwaiter.UnsafeOnCompletedInternal(vta.AsTask(), box, vta._continueOnCapturedContext);
}

// To catch all Task/Task<T> awaits, do the currently more expensive interface checks.
// Eventually these and the above Task/Task<T> checks should be replaced by "is" checks,
// once that's recognized and optimized by the JIT. We do these after all of the hardcoded
// checks above so that they don't incur the costs of these checks.
else if (InterfaceIsCheckWorkaround<TAwaiter>.IsITaskAwaiter)
{
ref TaskAwaiter ta = ref Unsafe.As<TAwaiter, TaskAwaiter>(ref awaiter);
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, continueOnCapturedContext: true);
}
else if (InterfaceIsCheckWorkaround<TAwaiter>.IsIConfiguredTaskAwaiter)
{
ref ConfiguredTaskAwaitable.ConfiguredTaskAwaiter ta = ref Unsafe.As<TAwaiter, ConfiguredTaskAwaitable.ConfiguredTaskAwaiter>(ref awaiter);
TaskAwaiter.UnsafeOnCompletedInternal(ta.m_task, box, ta.m_continueOnCapturedContext);
}

// The awaiter isn't specially known. Fall back to doing a normal await.
else
{
Expand Down Expand Up @@ -922,13 +888,6 @@ internal static Task<TResult> CreateCacheableTask<TResult>(TResult result) =>
new Task<TResult>(false, result, (TaskCreationOptions)InternalTaskOptions.DoNotDispose, default(CancellationToken));
}

/// <summary>Temporary workaround for https://github.com/dotnet/coreclr/issues/12877.</summary>
internal static class InterfaceIsCheckWorkaround<TAwaiter>
{
internal static readonly bool IsITaskAwaiter = typeof(TAwaiter).GetInterface("ITaskAwaiter") != null;
internal static readonly bool IsIConfiguredTaskAwaiter = typeof(TAwaiter).GetInterface("IConfiguredTaskAwaiter") != null;
}

/// <summary>
/// An interface implemented by all <see cref="AsyncStateMachineBox{TStateMachine, TResult}"/> instances, regardless of generics.
/// </summary>
Expand Down
Loading

0 comments on commit 7532f08

Please sign in to comment.