Skip to content

Commit 8f3b2d7

Browse files
ofrobotsMylesBorins
authored andcommitted
deps: V8: cherry-pick ac0fe8ec from upstream
Original commit message: [interpreter] Remove TryInstallOptimizedCode Removes the interrupt check and runtime call to TryInstallOptimizedCode from the optimization marker checks (i.e. CompileLazy and InterpreterEntryTrampoline). Instead, we rely on the other interrupt sources (in particular stack checks at function entries and loop headers) to install optimized code for us. This will hopefully not cause regressions, as we have plenty of other interrupt checks, but it may delay optimized code execution for some function by one function call. Bug: v8:6933 Change-Id: Ieadfff7ae2078d2a84085294158ad9a706eb9c64 Reviewed-on: https://chromium-review.googlesource.com/723475 Reviewed-by: Ross McIlroy <[email protected]> Commit-Queue: Leszek Swirski <[email protected]> Cr-Commit-Position: refs/heads/master@{#48667} Ref: https://bugs.chromium.org/p/v8/issues/detail?id=6933 Ref: v8/v8@ac0fe8e PR-URL: #17695 Reviewed-By: Michaël Zasso <[email protected]> Reviewed-By: Ben Noordhuis <[email protected]> Reviewed-By: James M Snell <[email protected]>
1 parent e9ace7e commit 8f3b2d7

11 files changed

+25
-106
lines changed

common.gypi

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
# Reset this number to 0 on major V8 upgrades.
2929
# Increment by one for each non-official patch applied to deps/v8.
30-
'v8_embedder_string': '-node.15',
30+
'v8_embedder_string': '-node.16',
3131

3232
# Enable disassembler for `--print-code` v8 options
3333
'v8_enable_disassembler': 1,

deps/v8/src/builtins/arm/builtins-arm.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -1073,22 +1073,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10731073
Runtime::kCompileOptimized_Concurrent);
10741074

10751075
{
1076-
// Otherwise, the marker is InOptimizationQueue.
1076+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1077+
// that an interrupt will eventually update the slot with optimized code.
10771078
if (FLAG_debug_code) {
10781079
__ cmp(
10791080
optimized_code_entry,
10801081
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
10811082
__ Assert(eq, kExpectedOptimizationSentinel);
10821083
}
1083-
// Checking whether the queued function is ready for install is
1084-
// optional, since we come across interrupts and stack checks elsewhere.
1085-
// However, not checking may delay installing ready functions, and
1086-
// always checking would be quite expensive. A good compromise is to
1087-
// first check against stack limit as a cue for an interrupt signal.
1088-
__ LoadRoot(scratch2, Heap::kStackLimitRootIndex);
1089-
__ cmp(sp, Operand(scratch2));
1090-
__ b(hs, &fallthrough);
1091-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1084+
__ jmp(&fallthrough);
10921085
}
10931086
}
10941087

deps/v8/src/builtins/arm64/builtins-arm64.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -1084,22 +1084,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10841084
Runtime::kCompileOptimized_Concurrent);
10851085

10861086
{
1087-
// Otherwise, the marker is InOptimizationQueue.
1087+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1088+
// that an interrupt will eventually update the slot with optimized code.
10881089
if (FLAG_debug_code) {
10891090
__ Cmp(
10901091
optimized_code_entry,
10911092
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
10921093
__ Assert(eq, kExpectedOptimizationSentinel);
10931094
}
1094-
1095-
// Checking whether the queued function is ready for install is optional,
1096-
// since we come across interrupts and stack checks elsewhere. However,
1097-
// not checking may delay installing ready functions, and always checking
1098-
// would be quite expensive. A good compromise is to first check against
1099-
// stack limit as a cue for an interrupt signal.
1100-
__ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
1101-
__ B(hs, &fallthrough);
1102-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1095+
__ B(&fallthrough);
11031096
}
11041097
}
11051098

deps/v8/src/builtins/ia32/builtins-ia32.cc

+3-12
Original file line numberDiff line numberDiff line change
@@ -715,24 +715,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
715715
Runtime::kCompileOptimized_Concurrent);
716716

717717
{
718-
// Otherwise, the marker is InOptimizationQueue.
718+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
719+
// that an interrupt will eventually update the slot with optimized code.
719720
if (FLAG_debug_code) {
720721
__ cmp(
721722
optimized_code_entry,
722723
Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
723724
__ Assert(equal, kExpectedOptimizationSentinel);
724725
}
725-
726-
// Checking whether the queued function is ready for install is optional,
727-
// since we come across interrupts and stack checks elsewhere. However,
728-
// not checking may delay installing ready functions, and always checking
729-
// would be quite expensive. A good compromise is to first check against
730-
// stack limit as a cue for an interrupt signal.
731-
ExternalReference stack_limit =
732-
ExternalReference::address_of_stack_limit(masm->isolate());
733-
__ cmp(esp, Operand::StaticVariable(stack_limit));
734-
__ j(above_equal, &fallthrough);
735-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
726+
__ jmp(&fallthrough);
736727
}
737728
}
738729

deps/v8/src/builtins/mips/builtins-mips.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -1052,21 +1052,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10521052
Runtime::kCompileOptimized_Concurrent);
10531053

10541054
{
1055-
// Otherwise, the marker is InOptimizationQueue.
1055+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1056+
// that an interrupt will eventually update the slot with optimized code.
10561057
if (FLAG_debug_code) {
10571058
__ Assert(
10581059
eq, kExpectedOptimizationSentinel, optimized_code_entry,
10591060
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
10601061
}
1061-
1062-
// Checking whether the queued function is ready for install is optional,
1063-
// since we come across interrupts and stack checks elsewhere. However,
1064-
// not checking may delay installing ready functions, and always checking
1065-
// would be quite expensive. A good compromise is to first check against
1066-
// stack limit as a cue for an interrupt signal.
1067-
__ LoadRoot(at, Heap::kStackLimitRootIndex);
1068-
__ Branch(&fallthrough, hs, sp, Operand(at));
1069-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1062+
__ jmp(&fallthrough);
10701063
}
10711064
}
10721065

deps/v8/src/builtins/mips64/builtins-mips64.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -1054,21 +1054,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10541054
Runtime::kCompileOptimized_Concurrent);
10551055

10561056
{
1057-
// Otherwise, the marker is InOptimizationQueue.
1057+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1058+
// that an interrupt will eventually update the slot with optimized code.
10581059
if (FLAG_debug_code) {
10591060
__ Assert(
10601061
eq, kExpectedOptimizationSentinel, optimized_code_entry,
10611062
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
10621063
}
1063-
1064-
// Checking whether the queued function is ready for install is optional,
1065-
// since we come across interrupts and stack checks elsewhere. However,
1066-
// not checking may delay installing ready functions, and always checking
1067-
// would be quite expensive. A good compromise is to first check against
1068-
// stack limit as a cue for an interrupt signal.
1069-
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
1070-
__ Branch(&fallthrough, hs, sp, Operand(t0));
1071-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1064+
__ jmp(&fallthrough);
10721065
}
10731066
}
10741067

deps/v8/src/builtins/ppc/builtins-ppc.cc

+3-11
Original file line numberDiff line numberDiff line change
@@ -1081,23 +1081,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10811081
Runtime::kCompileOptimized_Concurrent);
10821082

10831083
{
1084-
// Otherwise, the marker is InOptimizationQueue.
1084+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1085+
// that an interrupt will eventually update the slot with optimized code.
10851086
if (FLAG_debug_code) {
10861087
__ CmpSmiLiteral(
10871088
optimized_code_entry,
10881089
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
10891090
__ Assert(eq, kExpectedOptimizationSentinel);
10901091
}
1091-
1092-
// Checking whether the queued function is ready for install is optional,
1093-
// since we come across interrupts and stack checks elsewhere. However,
1094-
// not checking may delay installing ready functions, and always checking
1095-
// would be quite expensive. A good compromise is to first check against
1096-
// stack limit as a cue for an interrupt signal.
1097-
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
1098-
__ cmpl(sp, ip);
1099-
__ bge(&fallthrough);
1100-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1092+
__ b(&fallthrough);
11011093
}
11021094
}
11031095

deps/v8/src/builtins/s390/builtins-s390.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -1081,22 +1081,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
10811081
Runtime::kCompileOptimized_Concurrent);
10821082

10831083
{
1084-
// Otherwise, the marker is InOptimizationQueue.
1084+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
1085+
// that an interrupt will eventually update the slot with optimized code.
10851086
if (FLAG_debug_code) {
10861087
__ CmpSmiLiteral(
10871088
optimized_code_entry,
10881089
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
10891090
__ Assert(eq, kExpectedOptimizationSentinel);
10901091
}
1091-
1092-
// Checking whether the queued function is ready for install is optional,
1093-
// since we come across interrupts and stack checks elsewhere. However,
1094-
// not checking may delay installing ready functions, and always checking
1095-
// would be quite expensive. A good compromise is to first check against
1096-
// stack limit as a cue for an interrupt signal.
1097-
__ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
1098-
__ bge(&fallthrough, Label::kNear);
1099-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
1092+
__ b(&fallthrough, Label::kNear);
11001093
}
11011094
}
11021095

deps/v8/src/builtins/x64/builtins-x64.cc

+3-10
Original file line numberDiff line numberDiff line change
@@ -798,21 +798,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
798798
Runtime::kCompileOptimized_Concurrent);
799799

800800
{
801-
// Otherwise, the marker is InOptimizationQueue.
801+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
802+
// that an interrupt will eventually update the slot with optimized code.
802803
if (FLAG_debug_code) {
803804
__ SmiCompare(optimized_code_entry,
804805
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
805806
__ Assert(equal, kExpectedOptimizationSentinel);
806807
}
807-
808-
// Checking whether the queued function is ready for install is optional,
809-
// since we come across interrupts and stack checks elsewhere. However,
810-
// not checking may delay installing ready functions, and always checking
811-
// would be quite expensive. A good compromise is to first check against
812-
// stack limit as a cue for an interrupt signal.
813-
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
814-
__ j(above_equal, &fallthrough);
815-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
808+
__ jmp(&fallthrough);
816809
}
817810
}
818811

deps/v8/src/runtime/runtime-compiler.cc

-21
Original file line numberDiff line numberDiff line change
@@ -340,27 +340,6 @@ RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
340340
return NULL;
341341
}
342342

343-
344-
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
345-
HandleScope scope(isolate);
346-
DCHECK_EQ(1, args.length());
347-
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
348-
349-
// First check if this is a real stack overflow.
350-
StackLimitCheck check(isolate);
351-
if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
352-
return isolate->StackOverflow();
353-
}
354-
355-
// Only try to install optimized functions if the interrupt was InstallCode.
356-
if (isolate->stack_guard()->CheckAndClearInstallCode()) {
357-
isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
358-
}
359-
360-
return (function->IsOptimized()) ? function->code()
361-
: function->shared()->code();
362-
}
363-
364343
static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
365344
Handle<SharedFunctionInfo> outer_info,
366345
LanguageMode language_mode,

deps/v8/src/runtime/runtime.h

-1
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,6 @@ namespace internal {
115115
F(NotifyStubFailure, 0, 1) \
116116
F(NotifyDeoptimized, 1, 1) \
117117
F(CompileForOnStackReplacement, 1, 1) \
118-
F(TryInstallOptimizedCode, 1, 1) \
119118
F(ResolvePossiblyDirectEval, 6, 1) \
120119
F(InstantiateAsmJs, 4, 1)
121120

0 commit comments

Comments
 (0)