@@ -1341,6 +1341,70 @@ struct AsyncifyAssertInNonInstrumented : public Pass {
13411341 Module* module ;
13421342};
13431343
1344+ struct AsyncifyUnwindWalker
1345+ : WalkerPass<ExpressionStackWalker<AsyncifyUnwindWalker>> {
1346+ Function* function;
1347+ Module* module ;
1348+
1349+ // Adds a check for Call that is inside a Catch block (we do not handle
1350+ // unwinding there).
1351+ template <typename T> void replaceCallWithCheck (T* call) {
1352+ auto builder = std::make_unique<Builder>(*module );
1353+ auto check = builder->makeIf (
1354+ builder->makeBinary (NeInt32,
1355+ builder->makeGlobalGet (ASYNCIFY_STATE, Type::i32 ),
1356+ builder->makeConst (int32_t (State::Normal))),
1357+ builder->makeUnreachable ());
1358+ if (call->type .isConcrete ()) {
1359+ auto temp = builder->addVar (function, call->type );
1360+ replaceCurrent (builder->makeBlock (
1361+ {
1362+ builder->makeLocalSet (temp, call),
1363+ check,
1364+ builder->makeLocalGet (temp, call->type ),
1365+ },
1366+ call->type ));
1367+ } else {
1368+ replaceCurrent (builder->makeBlock (
1369+ {
1370+ call,
1371+ check,
1372+ },
1373+ call->type ));
1374+ }
1375+ }
1376+
1377+ template <typename T> void visitCallLike (T* curr) {
1378+ assert (!expressionStack.empty ());
1379+ // A return_call (curr->isReturn) can be ignored here: It returns first,
1380+ // leaving the Catch, before calling.
1381+ if (curr->isReturn ) {
1382+ return ;
1383+ }
1384+ // Go up the stack and see if we are in a Catch.
1385+ Index i = expressionStack.size () - 1 ;
1386+ while (i > 0 ) {
1387+ auto * expr = expressionStack[i];
1388+ if (Try* aTry = expr->template dynCast <Try>()) {
1389+ // check if curr is inside body of aTry (which is safe),
1390+ // otherwise do replace a call
1391+ assert (i + 1 < expressionStack.size ());
1392+ if (expressionStack[i + 1 ] != aTry->body ) {
1393+ replaceCallWithCheck (curr);
1394+ }
1395+ break ;
1396+ }
1397+ i--;
1398+ }
1399+ }
1400+
1401+ void visitCall (Call* curr) { visitCallLike (curr); }
1402+
1403+ void visitCallRef (CallRef* curr) { visitCallLike (curr); }
1404+
1405+ void visitCallIndirect (CallIndirect* curr) { visitCallLike (curr); }
1406+ };
1407+
13441408struct AsyncifyAssertUnwindCorrectness : Pass {
13451409 bool isFunctionParallel () override { return true ; }
13461410
@@ -1357,63 +1421,7 @@ struct AsyncifyAssertUnwindCorrectness : Pass {
13571421 }
13581422
13591423 void runOnFunction (Module* module_, Function* function) override {
1360- struct UnwindWalker : WalkerPass<ExpressionStackWalker<UnwindWalker>> {
1361- Function* function;
1362- Module* module ;
1363-
1364- // Adds a check for Call that is inside a Catch block (we do not handle unwinding there).
1365- void replaceCallWithCheck (Call* call) {
1366- auto builder = std::make_unique<Builder>(*module );
1367- auto check = builder->makeIf (
1368- builder->makeBinary (NeInt32,
1369- builder->makeGlobalGet (ASYNCIFY_STATE, Type::i32 ),
1370- builder->makeConst (int32_t (State::Normal))),
1371- builder->makeUnreachable ());
1372- if (call->type .isConcrete ()) {
1373- auto temp = builder->addVar (function, call->type );
1374- replaceCurrent (builder->makeBlock (
1375- {
1376- builder->makeLocalSet (temp, call),
1377- check,
1378- builder->makeLocalGet (temp, call->type ),
1379- },
1380- call->type ));
1381- } else {
1382- replaceCurrent (builder->makeBlock (
1383- {
1384- call,
1385- check,
1386- },
1387- call->type ));
1388- }
1389- }
1390-
1391- void visitCall (Call* curr) {
1392- assert (!expressionStack.empty ());
1393- // A return_call (curr->isReturn) can be ignored here: It returns first,
1394- // leaving the Catch, before calling.
1395- if (curr->isReturn ) {
1396- return ;
1397- }
1398- // Go up the stack and see if we are in a Catch.
1399- Index i = expressionStack.size () - 1 ;
1400- while (i > 0 ) {
1401- auto * expr = expressionStack[i];
1402- if (Try* aTry = expr->template dynCast <Try>()) {
1403- // check if curr is inside body of aTry (which is safe),
1404- // otherwise do replace a call
1405- assert (i + 1 < expressionStack.size ());
1406- if (expressionStack[i + 1 ] != aTry->body ) {
1407- replaceCallWithCheck (curr);
1408- }
1409- break ;
1410- }
1411- i--;
1412- }
1413- };
1414- };
1415-
1416- UnwindWalker walker;
1424+ AsyncifyUnwindWalker walker;
14171425 walker.function = function;
14181426 walker.module = module_;
14191427 walker.walk (function->body );
@@ -1859,40 +1867,40 @@ struct Asyncify : public Pass {
18591867 runner.setValidateGlobally (false );
18601868 runner.run ();
18611869 }
1870+ if (asserts) {
18621871 // Add asserts in non-instrumented code. Note we do not use an
18631872 // instrumented pass runner here as we do want to run on all functions.
18641873 PassRunner runner (module );
1865- if (asserts) {
1866- runner.add (std::make_unique<AsyncifyAssertInNonInstrumented>(
1867- &analyzer, pointerType, asyncifyMemory));
1868- runner.add (
1869- std::make_unique<AsyncifyAssertUnwindCorrectness>(&analyzer, module ));
1870- }
1874+ runner.add (std::make_unique<AsyncifyAssertInNonInstrumented>(
1875+ &analyzer, pointerType, asyncifyMemory));
1876+ runner.add (
1877+ std::make_unique<AsyncifyAssertUnwindCorrectness>(&analyzer, module ));
18711878 runner.setIsNested (true );
18721879 runner.setValidateGlobally (false );
18731880 runner.run ();
1874- // Next, add local saving/restoring logic. We optimize before doing this,
1875- // to undo the extra code generated by flattening, and to arrive at the
1876- // minimal amount of locals (which is important as we must save and
1877- // restore those locals). We also and optimize after as well to simplify
1878- // the code as much as possible.
1879- {
1880- PassUtils::FilteredPassRunner runner (module , instrumentedFuncs);
1881- if (optimize) {
1882- runner.addDefaultFunctionOptimizationPasses ();
1883- }
1884- runner.add (std::make_unique<AsyncifyLocals>(
1885- &analyzer, pointerType, asyncifyMemory));
1886- if (optimize) {
1887- runner.addDefaultFunctionOptimizationPasses ();
1888- }
1889- runner.setIsNested (true );
1890- runner.setValidateGlobally (false );
1891- runner.run ();
1881+ }
1882+ // Next, add local saving/restoring logic. We optimize before doing this,
1883+ // to undo the extra code generated by flattening, and to arrive at the
1884+ // minimal amount of locals (which is important as we must save and
1885+ // restore those locals). We also and optimize after as well to simplify
1886+ // the code as much as possible.
1887+ {
1888+ PassUtils::FilteredPassRunner runner (module , instrumentedFuncs);
1889+ if (optimize) {
1890+ runner.addDefaultFunctionOptimizationPasses ();
18921891 }
1892+ runner.add (std::make_unique<AsyncifyLocals>(
1893+ &analyzer, pointerType, asyncifyMemory));
1894+ if (optimize) {
1895+ runner.addDefaultFunctionOptimizationPasses ();
1896+ }
1897+ runner.setIsNested (true );
1898+ runner.setValidateGlobally (false );
1899+ runner.run ();
1900+ }
18931901 // Finally, add function support (that should not have been seen by
18941902 // the previous passes).
1895- addFunctions (module );
1903+ addFunctions (module );
18961904 }
18971905
18981906private:
0 commit comments