Changeset 262932 in webkit
- Timestamp:
- Jun 11, 2020, 5:29:30 PM (5 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r262928 r262932 1 2020-06-11 Saam Barati <sbarati@apple.com> 2 3 Linear Scan uses the wrong Interval for spills for tmps with roles of early def or late use 4 https://bugs.webkit.org/show_bug.cgi?id=213055 5 <rdar://problem/59874018> 6 7 Reviewed by Yusuke Suzuki. 8 9 There was a bug in linear scan when computing the live range interval for 10 spill tmps that had early defs or late uses. When linear scan spills a 11 tmp, it creates a new tmp that it loads to and stores from, and replaces the old tmp 12 with the new tmp, and emits stores/loads around pertinent instructions. The live 13 interval for such tmps is small by nature, it's contained in the interval for the 14 instruction itself. However, we'd build this interval purely based off the 15 original tmp's arg timing. So, for example, let's consider a program like this: 16 17 RandoInsn: LateUse:Tmp1, Use:Tmp2, [early = N, late = N+1] 18 Let's say that Tmp1's last use is RandoInsn, and it had a def before 19 RandoInsn, therefore, its live range will be something like: 20 [J where J < N, N+1] 21 22 and now imagine we spilled Tmp1 for some reason, and rewrote the 23 program to be: 24 Move Addr(spill for Tmp1), TmpSpill 25 RandoInsn: LateUse:TmpSpill, Use:Tmp2, [early = N, late = N+1] 26 27 We used to incorrectly mark the live range for TmpSpill to just be [N+1, N+2). 28 However, the bug here is that we neglected that TmpSpill actually had an earlier 29 def at [N, N+1). So, the live range for TmpSpill was wrong. This could incorrectly 30 lead us to allocate Tmp2 and TmpSpill to the same register, since their live 31 ranges may not intersect if Tmp2 dies at RandoInsn. 32 33 We also had the symmetric bug for EarlyDefs: we wouldn't account for the 34 store-spill that'd happen after something like RandoInsn. 35 36 The fix is to account for the loads/stores of spill tmps when assigning 37 them a live range. 38 39 This patch contains a standalone test in testair. It also fixes crashes we had when 40 running B3O1 tests using typed arrays on arm64e since we had patchpoints that utilized 41 LateUse for signing and auth. 42 43 * b3/B3Procedure.h: 44 * b3/air/AirAllocateRegistersAndStackByLinearScan.cpp: 45 * b3/air/testair.cpp: 46 1 47 2020-06-11 Saam Barati <sbarati@apple.com> 2 48 -
trunk/Source/JavaScriptCore/b3/B3Procedure.h
r260984 r262932 134 134 135 135 // bits is a bitwise_cast of the constant you want. 136 Value* addConstant(Origin, Type, uint64_t bits);136 JS_EXPORT_PRIVATE Value* addConstant(Origin, Type, uint64_t bits); 137 137 138 138 // You're guaranteed that bottom is zero. -
trunk/Source/JavaScriptCore/b3/air/AirAllocateRegistersAndStackByLinearScan.cpp
r261755 r262932 184 184 } 185 185 186 Interval interval(size_t indexOfEarly, Arg::Timing timing) 186 static Interval earlyInterval(size_t indexOfEarly) 187 { 188 return Interval(indexOfEarly); 189 } 190 191 static Interval lateInterval(size_t indexOfEarly) 192 { 193 return Interval(indexOfEarly + 1); 194 } 195 196 static Interval earlyAndLateInterval(size_t indexOfEarly) 197 { 198 return earlyInterval(indexOfEarly) | lateInterval(indexOfEarly); 199 } 200 201 static Interval interval(size_t indexOfEarly, Arg::Timing timing) 187 202 { 188 203 switch (timing) { 189 204 case Arg::OnlyEarly: 190 return Interval(indexOfEarly);205 return earlyInterval(indexOfEarly); 191 206 case Arg::OnlyLate: 192 return Interval(indexOfEarly + 1);207 return lateInterval(indexOfEarly); 193 208 case Arg::EarlyAndLate: 194 return Interval(indexOfEarly, indexOfEarly + 2); 209 return earlyAndLateInterval(indexOfEarly); 210 } 211 ASSERT_NOT_REACHED(); 212 return Interval(); 213 } 214 215 static Interval intervalForSpill(size_t indexOfEarly, Arg::Role role) 216 { 217 Arg::Timing timing = Arg::timing(role); 218 switch (timing) { 219 case Arg::OnlyEarly: 220 if (Arg::isAnyDef(role)) 221 return earlyAndLateInterval(indexOfEarly); // We have a spill store after this insn. 222 return earlyInterval(indexOfEarly); 223 case Arg::OnlyLate: 224 if (Arg::isAnyUse(role)) 225 return earlyAndLateInterval(indexOfEarly); // We had a spill load before this insn. 226 return lateInterval(indexOfEarly); 227 case Arg::EarlyAndLate: 228 return earlyAndLateInterval(indexOfEarly); 195 229 } 196 230 ASSERT_NOT_REACHED(); … … 550 584 return; 551 585 Opcode move = bank == GP ? Move : MoveDouble; 552 tmp = addSpillTmpWithInterval(bank, interval (indexOfEarly, Arg::timing(role)));586 tmp = addSpillTmpWithInterval(bank, intervalForSpill(indexOfEarly, role)); 553 587 if (role == Arg::Scratch) 554 588 return; -
trunk/Source/JavaScriptCore/b3/air/testair.cpp
r261755 r262932 2237 2237 CHECK(runResult == static_cast<unsigned>(42 + (42 * (reg == GPRInfo::returnValueGPR)))); 2238 2238 } 2239 } 2240 2241 void testLinearScanSpillRangesLateUse() 2242 { 2243 B3::Procedure proc; 2244 Code& code = proc.code(); 2245 2246 BasicBlock* root = code.addBlock(); 2247 2248 B3::Air::Special* patchpointSpecial = code.addSpecial(makeUnique<B3::PatchpointSpecial>()); 2249 2250 Vector<Tmp> tmps; 2251 for (unsigned i = 0; i < 100; ++i) { 2252 Tmp tmp = code.newTmp(GP); 2253 tmps.append(tmp); 2254 root->append(Move, nullptr, Arg::bigImm(i), tmp); 2255 } 2256 2257 for (unsigned i = 0; i + 1 < tmps.size(); ++i) { 2258 Tmp tmp1 = tmps[i]; 2259 Tmp tmp2 = tmps[i + 1]; 2260 2261 B3::Value* dummyValue = proc.addConstant(B3::Origin(), B3::Int64, 0); 2262 B3::Value* dummyValue2 = proc.addConstant(B3::Origin(), B3::Int64, 0); 2263 2264 B3::PatchpointValue* patchpoint = proc.add<B3::PatchpointValue>(B3::Void, B3::Origin()); 2265 patchpoint->append(dummyValue, B3::ValueRep::SomeRegister); 2266 patchpoint->append(dummyValue2, B3::ValueRep::SomeLateRegister); 2267 patchpoint->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) { 2268 RELEASE_ASSERT(params[0].gpr() != params[1].gpr()); 2269 2270 auto good = jit.branch32(CCallHelpers::Equal, params[0].gpr(), CCallHelpers::TrustedImm32(i)); 2271 jit.breakpoint(); 2272 good.link(&jit); 2273 2274 good = jit.branch32(CCallHelpers::Equal, params[1].gpr(), CCallHelpers::TrustedImm32(i + 1)); 2275 jit.breakpoint(); 2276 good.link(&jit); 2277 2278 }); 2279 2280 Inst inst(Patch, patchpoint, Arg::special(patchpointSpecial)); 2281 inst.args.append(tmp1); 2282 inst.args.append(tmp2); 2283 2284 root->append(inst); 2285 } 2286 2287 root->append(Move32, nullptr, tmps.last(), Tmp(GPRInfo::returnValueGPR)); 2288 root->append(Ret32, nullptr, Tmp(GPRInfo::returnValueGPR)); 2289 2290 auto runResult = compileAndRun<uint32_t>(proc); 2291 CHECK(runResult == 99); 2292 } 2293 2294 void testLinearScanSpillRangesEarlyDef() 2295 { 2296 B3::Procedure proc; 2297 Code& code = proc.code(); 2298 2299 BasicBlock* root = code.addBlock(); 2300 2301 B3::Air::Special* patchpointSpecial = code.addSpecial(makeUnique<B3::PatchpointSpecial>()); 2302 2303 Vector<Tmp> tmps; 2304 for (unsigned i = 0; i < 100; ++i) { 2305 Tmp tmp = code.newTmp(GP); 2306 tmps.append(tmp); 2307 if (!i) 2308 root->append(Move, nullptr, Arg::bigImm(i), tmp); 2309 } 2310 2311 for (unsigned i = 0; i + 1 < tmps.size(); ++i) { 2312 Tmp tmp1 = tmps[i]; 2313 Tmp tmp2 = tmps[i + 1]; 2314 2315 B3::Value* dummyValue = proc.addConstant(B3::Origin(), B3::Int64, 0); 2316 2317 B3::PatchpointValue* patchpoint = proc.add<B3::PatchpointValue>(B3::Int32, B3::Origin()); 2318 patchpoint->resultConstraints = { B3::ValueRep::SomeEarlyRegister }; 2319 patchpoint->append(dummyValue, B3::ValueRep::SomeLateRegister); 2320 patchpoint->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) { 2321 RELEASE_ASSERT(params[0].gpr() != params[1].gpr()); 2322 2323 auto good = jit.branch32(CCallHelpers::Equal, params[1].gpr(), CCallHelpers::TrustedImm32(i)); 2324 jit.breakpoint(); 2325 good.link(&jit); 2326 2327 jit.move(CCallHelpers::TrustedImm32(i + 1), params[0].gpr()); 2328 }); 2329 2330 Inst inst(Patch, patchpoint, Arg::special(patchpointSpecial)); 2331 inst.args.append(tmp2); // def 2332 inst.args.append(tmp1); // use 2333 2334 root->append(inst); 2335 } 2336 2337 // Make all tmps live till the end 2338 for (unsigned i = 0; i < tmps.size(); ++i) { 2339 Tmp tmp = tmps[i]; 2340 2341 B3::Value* dummyValue = proc.addConstant(B3::Origin(), B3::Int64, 0); 2342 2343 B3::PatchpointValue* patchpoint = proc.add<B3::PatchpointValue>(B3::Void, B3::Origin()); 2344 patchpoint->append(dummyValue, B3::ValueRep::SomeRegister); 2345 patchpoint->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) { 2346 auto good = jit.branch32(CCallHelpers::Equal, params[0].gpr(), CCallHelpers::TrustedImm32(i)); 2347 jit.breakpoint(); 2348 good.link(&jit); 2349 }); 2350 2351 Inst inst(Patch, patchpoint, Arg::special(patchpointSpecial)); 2352 inst.args.append(tmp); 2353 root->append(inst); 2354 } 2355 2356 root->append(Move32, nullptr, tmps.last(), Tmp(GPRInfo::returnValueGPR)); 2357 root->append(Ret32, nullptr, Tmp(GPRInfo::returnValueGPR)); 2358 2359 auto runResult = compileAndRun<uint32_t>(proc); 2360 CHECK(runResult == 99); 2239 2361 } 2240 2362 … … 2331 2453 RUN(testElideMoveThenRealloc()); 2332 2454 2455 RUN(testLinearScanSpillRangesLateUse()); 2456 RUN(testLinearScanSpillRangesEarlyDef()); 2457 2333 2458 if (tasks.isEmpty()) 2334 2459 usage();
Note:
See TracChangeset
for help on using the changeset viewer.