WebKit Bugzilla
Attachment 339038 Details for
Bug 185060
: [negative result] B3 should have generalized path specialization
Home
|
New
|
Browse
|
Search
|
[?]
|
Reports
|
Requests
|
Help
|
New Account
|
Log In
Remember
[x]
|
Forgot Password
Login:
[x]
[patch]
close to written
blah.patch (text/plain), 35.77 KB, created by
Filip Pizlo
on 2018-04-27 16:40:44 PDT
(
hide
)
Description:
close to written
Filename:
MIME Type:
Creator:
Filip Pizlo
Created:
2018-04-27 16:40:44 PDT
Size:
35.77 KB
patch
obsolete
>Index: Source/JavaScriptCore/b3/B3BreakCriticalEdges.cpp >=================================================================== >--- Source/JavaScriptCore/b3/B3BreakCriticalEdges.cpp (revision 230970) >+++ Source/JavaScriptCore/b3/B3BreakCriticalEdges.cpp (working copy) >@@ -57,8 +57,8 @@ void breakCriticalEdges(Procedure& proc) > } > } > >- insertionSet.execute(); >- proc.invalidateCFG(); >+ if (insertionSet.execute()) >+ proc.invalidateCFG(); > } > > } } // namespace JSC::B3 >Index: Source/JavaScriptCore/b3/B3Procedure.cpp >=================================================================== >--- Source/JavaScriptCore/b3/B3Procedure.cpp (revision 230970) >+++ Source/JavaScriptCore/b3/B3Procedure.cpp (working copy) >@@ -93,7 +93,6 @@ Value* Procedure::clone(Value* value) > return m_values.add(WTFMove(clone)); > } > >- > Value* Procedure::addIntConstant(Origin origin, Type type, int64_t value) > { > switch (type) { >Index: Source/JavaScriptCore/b3/B3Procedure.h >=================================================================== >--- Source/JavaScriptCore/b3/B3Procedure.h (revision 230970) >+++ Source/JavaScriptCore/b3/B3Procedure.h (working copy) >@@ -142,6 +142,8 @@ public: > unsigned size() const { return m_blocks.size(); } > BasicBlock* at(unsigned index) const { return m_blocks[index].get(); } > BasicBlock* operator[](unsigned index) const { return at(index); } >+ >+ std::unique_ptr<BasicBlock> takeBlock(unsigned index) { return WTFMove(m_blocks[index]); } > > typedef WTF::IndexedContainerIterator<Procedure> iterator; > >Index: Source/JavaScriptCore/b3/B3SpecializeBranches.cpp >=================================================================== >--- Source/JavaScriptCore/b3/B3SpecializeBranches.cpp (nonexistent) >+++ Source/JavaScriptCore/b3/B3SpecializeBranches.cpp (working copy) >@@ -0,0 +1,577 @@ >+/* >+ * Copyright (C) 2018 Apple Inc. All rights reserved. >+ * >+ * Redistribution and use in source and binary forms, with or without >+ * modification, are permitted provided that the following conditions >+ * are met: >+ * 1. Redistributions of source code must retain the above copyright >+ * notice, this list of conditions and the following disclaimer. >+ * 2. Redistributions in binary form must reproduce the above copyright >+ * notice, this list of conditions and the following disclaimer in the >+ * documentation and/or other materials provided with the distribution. >+ * >+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY >+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE >+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR >+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR >+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, >+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, >+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR >+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY >+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT >+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE >+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. >+ */ >+ >+#include "config.h" >+#include "B3SpecializeBranches.h" >+ >+#include "B3ValueInBlock.h" >+ >+#if ENABLE(B3_JIT) >+ >+namespace JSC { namespace B3 { >+ >+class SpecializeBranches { >+public: >+ SpecializeBranches(Procedure& proc) >+ : m_proc(proc) >+ { >+ } >+ >+ bool run() >+ { >+ breakCriticalEdges(); >+ createHoistingTargets(); >+ m_dominators = &proc.dominators(); >+ m_proc.resetValueOwners(); >+ findBranches(); >+ findBlocksDuplicatedForPredicates(); >+ createTracesByDominantRedundancy(); >+ createTracesByHoisting(); >+ chooseTraceOrder(); >+ simulateDuplicationAndPruneTraces(); >+ demoteAppropriateValues(); >+ duplicateTraces(); >+ } >+ >+private: >+ void createHoistingTargets() >+ { >+ // All lower-frequency predecessors of a block should jump to that block via a hoisting >+ // target block. If we don't use these blocks, they'll get collected by reduceStrength. >+ >+ BlockInsertionSet insertionSet(m_proc); >+ >+ for (BasicBlock* block : proc) { >+ unsigned numberOfLowerFrequencyPredecessors = 0; >+ double maxFrequencyOfLowerFrequencyPredecessors = 0; >+ for (BasicBlock* predecessor : block->predecessors()) { >+ if (predecessor->frequency() >= block->frequency()) >+ continue; >+ >+ numberOfLowerFrequencyPredecessors++; >+ maxFrequencyOfLowerFrequencyPredecessors = >+ std::max(maxFrequencyOfLowerFrequencyPredecessors, predecessor->frequency()); >+ } >+ >+ if (numberOfLowerFrequencyPredecessors < 2) >+ continue; >+ >+ BasicBlock* hoistingTarget = insertionSet.insertBefore(block, maxFrequencyOfLowerFrequencyPredecessors); >+ hoistingTarget->appendNew<Value>(proc, Jump, block); >+ hoistingTarget->setSuccessors(FrequentedBlock(block)); >+ for (BasicBlock* predecessor : block->predecessors()) { >+ if (predecessor->frequency() >= block->frequency()) >+ continue; >+ >+ predecessor->replaceSuccessor(block, hoistingTarget); >+ hoistingTarget->addPredecessor(predecessor); >+ block->removePredecessor(predecessor); >+ } >+ block->addPredecessor(hoistingTarget); >+ } >+ >+ if (insertionSet.execute()) >+ m_proc.invalidateCFG(); >+ } >+ >+ void findBranches() >+ { >+ for (BasicBlock* block : m_proc) { >+ for (unsigned index = block->size(); index--;) { >+ Value* value = block->at(index); >+ switch (value->opcode()) { >+ case Branch: >+ case Check: { >+ Value* predicate = value->child(0); >+ >+ m_predicateToBranches.add(predicate, Vector<Value*>()).iterator->value.append(ValueInBlock(block, index)); >+ >+ // Each block can only have one "branch", since: >+ // >+ // 1) This redundancy would have already been fixed: >+ // >+ // BB#foo: >+ // Check(@p) >+ // Check(@p) >+ // >+ // 2) This redundancy would have already been fixed: >+ // >+ // BB#foo: >+ // Check(@p) >+ // Branch(@p) >+ // >+ // 3) This redundancy is not valid IR: >+ // >+ // BB#foo: >+ // Branch(@p) >+ // Check(@p) >+ // >+ // 4) This redundancy is not valid IR: >+ // >+ // BB#foo: >+ // Branch(@p) >+ // Branch(@p) >+ // >+ // Of course, the first two assumptions could be violated because of a missed >+ // optimization opportunity. This phase is prepared for that. >+ m_blocksWithBranches.add(std::make_pair(block, predicate)); >+ break; >+ } >+ >+ default: >+ break; >+ } >+ } >+ } >+ } >+ >+ void findBlocksDuplicatedForPredicates() >+ { >+ for (BasicBlock* block : m_proc) { >+ if (block->last()->opcode() != Branch) >+ continue; >+ >+ Value* predicate = block->last()->child(0); >+ >+ auto handleSuccessor = [&] (BasicBlock* start, bool predicateValue) { >+ m_dominators->forAllBlocksDominatesBy( >+ start, >+ [&] (BasicBlock* duplicatedBlock) { >+ // It's possible that this will have collisions if a branch dominated by >+ // another branch wasn't eliminated by foldPathConstants. >+ m_duplicatedForPredicate.add(std::make_pair(duplicatedBlock, predicate), predicateValue); >+ }); >+ }; >+ >+ handleSuccessor(insertionPoint.block()->successorBlock(0), true); >+ handleSuccessor(insertionPoint.block()->successorBlock(1), false); >+ } >+ } >+ >+ void createTracesByDominantRedundancy() >+ { >+ for (auto& entry : m_predicateToBranches) { >+ Value* predicate = entry.key; >+ Vector<ValueInBlock>& branches = entry.value; >+ >+ for (ValueInBlock insertionPoint : branches) { >+ if (insertionPoint.value()->opcode() != Branch) >+ continue; >+ >+ for (ValueInBlock branchPoint : branches) { >+ if (!m_dominators->dominates(insertionPoint.block, branchPoint.block)) >+ continue; >+ >+ if (insertionPoint.block == branchPoint.block) { >+ if (insertionPoint.index() > branchPoint.index()) >+ continue; >+ >+ RELEASE_ASSERT(insertionPoint.index() == branchPoint.index()); >+ } >+ >+ m_trace = m_traces.add(); >+ m_trace->predicate = predicate; >+ m_trace->insertionPoint = insertionPoint; >+ m_trace->branchPoint = branchPoint; >+ m_trace->insertedBranchCost = insertionPoint.block()->frequency(); >+ >+ if (insertionPoint.value() == branchPoint.value()) { >+ m_trace->spaceCost = 1; >+ m_trace->benefit = insertionPoint.block()->frequency(); >+ } else { >+ m_trace->spaceCost = branchPoint.index(); >+ m_trace->benefit = branchPoint.block()->frequency() + m_trace->insertedBranchCost.block()->frequency(); >+ >+ RELEASE_ASSERT(insertionPoint.block() != branchPoint.block()); >+ >+ BlockWorklist worklist; >+ worklist.pushAll(branchPoint.block()->predecessors()); >+ closeTrace(worklist); >+ } >+ } >+ } >+ } >+ } >+ >+ void createTracesByHoisting() >+ { >+ Vector<Trace*> tracesToProcess; >+ for (Trace* trace : m_traces) >+ tracesToProcess.append(trace); >+ while (Trace* oldTrace = tracesToProcess.takeLast()) { >+ if (oldTrace->insertionPoint.block()->frequency() <= oldTrace->predicate->owner->frequency()) >+ continue; >+ >+ BasicBlock* newInsertionPointBlock = oldTrace->insertionPoint.block(); >+ while (newInsertionPointBlokc->frequency() >= oldTrace->insertionPoint.block()) >+ newInsertionPointBlock = m_dominators->idom(newInsertionPointBlock); >+ >+ if (!m_dominators->dominates(oldTrace->predicate->owner, newInsertionPointBlock)) >+ continue; >+ >+ if (newInsertionPointBlock == oldTrace->insertionPoint.block()) >+ continue; >+ >+ // Hoisting targets must have jumps at the end of them because we created hoisting >+ // targets that have jumps. >+ RELEASE_ASSERT(newInsertionPointBlock->last()->opcode() == Jump); >+ >+ m_trace = m_traces.add(*oldTrace); >+ m_trace->insertionPoint = ValueInBlock( >+ newInsertionPointBlock, >+ newInsertionPointBlock->size() - 1); >+ >+ BlockWorklist worklist; >+ for (BasicBlock* block : oldTrace->blocks) >+ worklist.pretendToHaveSeen(block); >+ >+ worklist.pushAll(oldTrace->insertionPoint.block()->predecessors()); >+ closeTrace(worklist); >+ changed = true; >+ >+ tracesToProcess.append(m_trace); >+ } >+ } >+ >+ void closeTrace(BlockWorklist& worklist) >+ { >+ while (BasicBlock* block : worklist.pop()) { >+ if (block == m_trace->insertionPoint.block()) >+ continue; >+ >+ m_trace->blocks.append(block); >+ >+ if (block == m_trace->branchPoint.block()) { >+ // We only want to add the end of this block. Note, if we see this block, >+ // it's because it wasn't already in the blocks set. That means that oldTrace was a >+ // a dominant redundancy trace rather than a hoisted trace. It also means >+ // that this block doesn't have any unaccounted-for branches. >+ >+ m_trace->spaceCost += block->size() - (m_trace->branchPoint.index() + 1); >+ continue; >+ } >+ >+ if (!m_duplicatedForPredicate.contains(std::make_pair(block, m_trace->predicate))) { >+ m_trace->spaceCost += block->size(); >+ >+ // This `if` is nested because if a duplicated block *did* have a >+ // branch for our predicate then it's probably because we missed an >+ // optimization opportunity in the last reduceStrength pass. That is >+ // most likely to happen because this redundancy was revealed only >+ // after some phase that ran after reduceStrength, like >+ // eliminateCommonSubexpressions. Therefore, a future pass of >+ // reduceStrength is very likely to remove a duplicated branch for >+ // our predicate without needing to specialize. Therefore, we don't >+ // want to could it as a benefit here. Also, we don't duplicate >+ // duplicated blocks, so we wouldn't be creating any benefit anyway. >+ if (m_blocksWithBranches.contains(std::make_pair(block, m_trace->predicate()))) >+ m_trace->benefit += block->frequency(); >+ } >+ >+ worklist.push(block->predecessors()); >+ } >+ } >+ >+ void chooseTraceOrder() >+ { >+ for (Trace* trace : m_traces) { >+ // We don't know anything about this heuristic yet. >+ if (trace->benefitOverCost() > 0.2) >+ m_tracesInOrder.append(trace); >+ } >+ >+ std::sort( >+ m_tracesInOrder.begin(), m_tracesInOrder.end(), >+ [&] (Trace* left, Trace* right) -> bool { >+ // Sort in descending order of benefit. >+ return left->benefitOverCost() > right->benefitOverCost(); >+ }); >+ } >+ >+ void simulateDuplicationAndPruneTraces() >+ { >+ m_tracesInOrder.removeAllMatching( >+ [&] (Trace* trace) -> bool { >+ if (m_affectedBlocks.contains(trace->insertionPoint.block())) >+ return true; >+ if (m_affectedBlocks.contains(trace->branchPoint.block())) >+ return true; >+ for (BasicBlock* block : trace->blocks) { >+ if (m_affectedBlocks.contains(block)) >+ return true; >+ } >+ >+ m_affectedBlocks.add(trace->insertionPoint.block()); >+ m_affectedBlocks.add(trace->branchPoint.block()); >+ for (BasicBlock* block : trace->blocks) >+ m_affectedBlocks.add(block); >+ return false; >+ }); >+ } >+ >+ void demoteAppropriateValues() >+ { >+ IndexSet<Value*> valuesToDemote; >+ for (BasicBlock* block : m_proc) { >+ for (Value* value : *block) { >+ if (value->opcode() == Phi && m_affectedBlocks.contains(block)) >+ valuesToDemote.add(value); >+ if (Value* child : value->children()) { >+ if (child->owner != block && m_affectedBlocks.contains(child->owner)) >+ valuesToDemote.add(value); >+ } >+ } >+ } >+ demoteValues(m_proc, valuesToDemote); >+ } >+ >+ void duplicateTraces() >+ { >+ BlockInsertionSet insertionSet(m_proc); >+ >+ IndexMap<BasicBlock*, BasicBlock*> blockMap(m_proc.blocks().size()); >+ IndexMap<Value*, Value*> valueMap(m_proc.values().size()); >+ >+ for (m_trace : m_tracesInOrder) { >+ auto addBlockToDuplicate = [&] (BasicBlock* block) { >+ m_blocksToDuplicateList.append(block); >+ bool result = m_blocksToDuplicateSet.add(block); >+ RELEASE_ASSERT(result); >+ }; >+ >+ addBlockToDuplicate(m_trace->branchPoint.block()); >+ for (BasicBlock* block : m_trace->blocks()) { >+ if (block != m_trace->branchPoint.block()) >+ addBlockToDuplicate(block); >+ } >+ >+ std::sort( >+ blocksToDuplicate.begin(), blocksToDuplicate.end(), >+ [&] (BasicBlock* left, BasicBlock* right) -> bool { >+ return left->index() < right->index(); >+ }); >+ >+ // FIXME: We're not really duplicating the branchPoint! We want to split it and then >+ // duplicate the first part. That's a little weird to fit into this framework. Maybe we >+ // split it before we create the duplicate set and all that. We'll also want a version of >+ // splitForward() that adds the new block at an index of our choice, so that we add it >+ // at the end or something. >+ >+ // Order matters! We try to create a CFG that doesn't look like garbage. That's >+ // particularly important for the performance of linear scan. It doesn't matter for >+ // graph coloring though. It definitely matters for debugging the compiler. >+ BasicBlock* blockBeforeInsertionPoint = blocksToDuplicate.last();e >+ for (BasicBlock* block : blocksToDuplicate) { >+ auto iter = m_duplicatedForPredicate.find(std::make_pair(block, m_trace->predicate)); >+ if (iter != m_duplicatedForPredicate.end()) { >+ // We don't duplicate then/else blocks. Note that because of how >+ // m_duplicatedForPredicate works, this could be both a then and a else block if: >+ // - This is dominated by (and duplicated for) a branch that is dominated by our >+ // insertion point, and a branch dominated by our insertion point. >+ // - Our insertion point is dominated by (and duplicated for) another branch on >+ // our predicate. >+ // But if this block is in the then and else case of the same predicate, then it >+ // is unreachable. So, we can do whatever we want with it. >+ >+ // We leave then blocks where they are. >+ if (*iter) >+ continue; >+ >+ // Move the location of the block in the program flow. Gotta keep the CFG sane. >+ insertionSet.insertAfter( >+ BlockInsertionSet::BlockInsertion( >+ blockBeforeInsertionPoint->index(), >+ m_proc.takeBlock(block->index()))); >+ continue; >+ } >+ >+ BasicBlock* newBlock = >+ insertionSet.insertAfter(blockBeforeInsertionPoint, block->frequency()); >+ newBlock->predecessors() = block->predecessors(); >+ newBlock->successors() = block->successors(); >+ blockMap[block] = newBlock; >+ } >+ >+ auto elseBlockFor = [&] (BasicBlock* block) -> BasicBlock* { >+ if (BasicBlock* result = blockMap[block]) >+ return result; >+ return block; >+ }; >+ >+ for (BasicBlock* block : blocksToDuplicate) { >+ auto foldValueFor = [&] (BasicBlock* block, Value* value, bool predicateValue) { >+ switch (value->opcode()) { >+ case Branch: >+ if (value->child(0) != m_trace->predicate) >+ return; >+ value->replaceWithJump( >+ block, predicateValue ? block->taken() : taken->notTaken()); >+ return; >+ case Check: >+ if (value->child(0) != m_trace->predicate) >+ return; >+ if (!predicateValue) >+ value->replaceWithNop(); >+ default: >+ return; >+ } >+ }; >+ >+ auto rewireBlockFor = [&] (BasicBlock* block, bool predicateValue) { >+ if (predicateValue) { >+ // In the current rewiring we don't have to do anything for then blocks. >+ return; >+ } >+ >+ for (BasicBlock*& predecessor : block->predecessors()) { >+ // We're duplicating code dominated by the insertion point. Therefore, >+ // the predecessors should either be the insertion point, or it should be >+ // a block in the set of duplicated blocks. >+ if (predecessor == m_trace->insertionPoint.block()) >+ continue; >+ >+ RELEASE_ASSERT(m_blocksToDuplicateSet.contains(predecessor)); >+ predecessor = elseBlockFor(predecessor); >+ } >+ >+ for (BasicBlock*& successor : block->successorBlocks()) { >+ if (m_blocksToDuplicateSet.contains(successor)) >+ successor = elseBlockFor(successor); >+ else >+ successor->addPredeessor(block); >+ } >+ }; >+ >+ auto iter = m_duplicatedForPredicate.find(std::make_pair(block, m_trace->predicate)); >+ if (iter != m_duplicatedForPredicate.end()) { >+ rewireBlockFor(block, *iter); >+ foldValueFor(block, block->last(), *iter); >+ continue; >+ } >+ >+ BasicBlock* elseBlock = elseBlockFor(block); >+ rewireBlockFor(block, true); >+ rewireBlockFor(elseBlock, false); >+ for (Value* value : *block) { >+ Value* clone = m_proc.clone(value); >+ for (Value*& child = clone->children()) { >+ if (Value* replacement = valueMap[child]) >+ child = replacement; >+ } >+ if (value->type() != Void) >+ nodeMap[value] = clone; >+ elseBlock->append(clone); >+ >+ foldValueFor(block, value, true); >+ foldValueFor(elseBlock, clone, false); >+ } >+ } >+ >+ Value* insertionPointJump = m_trace->insertionPoint.block()->last(); >+ switch (insertionPointJump->opcode()) { >+ case Branch: >+ RELEASE_ASSERT(insertionPointJump->child(0) == m_trace->predicate); >+ // There shouldn't be anything left to do if we're in this situation. >+ break; >+ >+ case Jump: >+ m_trace->insertionPoint.block()->replaceLastWithNew<Value>( >+ m_proc, insertionPointJump->origin(), m_trace->predicate); >+ m_trace->insertionPoint.block()->appendSuccessor( >+ elseBlockFor(m_trace->insertionPoint.block()->successor(0))); >+ break; >+ >+ default: >+ RELEASE_ASSERT_NOT_REACHED(); >+ } >+ >+ while (BasicBlock* block = m_blocksToDuplicateList.takeLast()) >+ m_blocksToDuplicateSet.remove(block); >+ } >+ >+ if (insertionSet.execute()) >+ m_proc.invalidateCFG(); >+ } >+ >+ Procedure& m_proc; >+ Dominators* m_dominators { nullptr }; >+ >+ HashMap<Value*, Vector<ValueInBlock>> m_predicateToBranches; >+ >+ HashSet<std::pair<BasicBlock*, Value*>> m_blocksWithBranches; >+ >+ // We say that a block is duplicated for a branch if it's dominated by the successor of one of >+ // those branches. The bool value is the predicate's value when we are in that block. >+ HashMap<std::pair<BasicBlock*, Value*>, bool> m_duplicatedForPredicate; >+ >+ Trace* m_trace; >+ >+ struct Trace { >+ double benefitOverCost() const >+ { >+ // Super valuable: >+ // 1) Pair of branches with very little code to duplicate between them. >+ // 2) Branch inside of small loop. >+ // >+ // Not valuable: >+ // 3) Duplicating a large nested loop to hoist an inner branch. >+ // >+ // In case of (1), we expect spaceCost to be > 1 but the benefit to be 1. >+ // >+ // In case of (2), we expect spaceCost to be 5-20 but the benefit to be 9. >+ // >+ // In case of (3), we expect spaceCost to be >100 but the benefit to be >100. >+ return (benefit - insertedBranchCost) / spaceCost; >+ } >+ >+ Value* predicate; >+ ValueInBlock insertionPoint; // FIXME: This should just be a BasicBlock*, since it always points to the branch. >+ ValueInBlock branchPoint; >+ double benefit { 0 }; >+ double insertedBranchCost { 0 }; >+ double spaceCost { 0 }; >+ Vector<BasicBlock*> blocks; // The list of duplicated blocks not including the branchPoint. >+ }; >+ >+ Bag<Trace> m_traces; >+ Vector<Trace*> m_tracesInOrder; >+ >+ IndexSet<BasicBlock*> m_affectedBlocks; >+ >+ Vector<BasicBlock*> m_blocksToDuplicateList; >+ IndexSet<BasicBlock*> m_blocksToDuplicateSet; >+}; >+ >+bool specializeBranches(Procedure& proc) >+{ >+ PhaseScope phaseScope(proc, "specializeBranches"); >+ SpecializeBranches specializeBranches(proc); >+ return specializeBranches.run(); >+} >+ >+} } // namespace JSC::B3 >+ >+#endif // ENABLE(B3_JIT) >+ >Index: Source/JavaScriptCore/b3/B3SpecializeBranches.h >=================================================================== >--- Source/JavaScriptCore/b3/B3SpecializeBranches.h (nonexistent) >+++ Source/JavaScriptCore/b3/B3SpecializeBranches.h (working copy) >@@ -0,0 +1,41 @@ >+/* >+ * Copyright (C) 2018 Apple Inc. All rights reserved. >+ * >+ * Redistribution and use in source and binary forms, with or without >+ * modification, are permitted provided that the following conditions >+ * are met: >+ * 1. Redistributions of source code must retain the above copyright >+ * notice, this list of conditions and the following disclaimer. >+ * 2. Redistributions in binary form must reproduce the above copyright >+ * notice, this list of conditions and the following disclaimer in the >+ * documentation and/or other materials provided with the distribution. >+ * >+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY >+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE >+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR >+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR >+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, >+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, >+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR >+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY >+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT >+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE >+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. >+ */ >+ >+#pragma once >+ >+#if ENABLE(B3_JIT) >+ >+namespace JSC { namespace B3 { >+ >+class Procedure; >+ >+// Duplicates code to remove branches. Each piece of code gets duplicated at most once, so there >+// is no exponential blow-up. >+ >+bool specializeBranches(Procedure&); >+ >+} } // namespace JSC::B3 >+ >+#endif // ENABLE(B3_JIT) >Index: Source/JavaScriptCore/b3/B3ValueInBlock.cpp >=================================================================== >--- Source/JavaScriptCore/b3/B3ValueInBlock.cpp (nonexistent) >+++ Source/JavaScriptCore/b3/B3ValueInBlock.cpp (working copy) >@@ -0,0 +1,44 @@ >+/* >+ * Copyright (C) 2018 Apple Inc. All rights reserved. >+ * >+ * Redistribution and use in source and binary forms, with or without >+ * modification, are permitted provided that the following conditions >+ * are met: >+ * 1. Redistributions of source code must retain the above copyright >+ * notice, this list of conditions and the following disclaimer. >+ * 2. Redistributions in binary form must reproduce the above copyright >+ * notice, this list of conditions and the following disclaimer in the >+ * documentation and/or other materials provided with the distribution. >+ * >+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY >+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE >+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR >+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR >+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, >+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, >+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR >+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY >+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT >+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE >+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. >+ */ >+ >+#include "config.h" >+#include "B3ValueInBlock.h" >+ >+#if ENABLE(B3_JIT) >+ >+namespace JSC { namespace B3 { >+ >+void ValueInBlock::dump(PrintStream& out) const >+{ >+ if (*this) >+ out.print(pointerDump(m_block), ":", m_index, "->", value()); >+ else >+ out.print("null"); >+} >+ >+} } // namespace JSC::B3 >+ >+#endif // ENABLE(B3_JIT) >+ >Index: Source/JavaScriptCore/b3/B3ValueInBlock.h >=================================================================== >--- Source/JavaScriptCore/b3/B3ValueInBlock.h (nonexistent) >+++ Source/JavaScriptCore/b3/B3ValueInBlock.h (working copy) >@@ -0,0 +1,110 @@ >+/* >+ * Copyright (C) 2018 Apple Inc. All rights reserved. >+ * >+ * Redistribution and use in source and binary forms, with or without >+ * modification, are permitted provided that the following conditions >+ * are met: >+ * 1. Redistributions of source code must retain the above copyright >+ * notice, this list of conditions and the following disclaimer. >+ * 2. Redistributions in binary form must reproduce the above copyright >+ * notice, this list of conditions and the following disclaimer in the >+ * documentation and/or other materials provided with the distribution. >+ * >+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY >+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE >+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR >+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR >+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, >+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, >+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR >+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY >+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT >+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE >+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. >+ */ >+ >+#pragma once >+ >+#if ENABLE(B3_JIT) >+ >+#include "B3BasicBlock.h" >+#include "B3Value.h" >+ >+namespace JSC { namespace B3 { >+ >+class ValueInBlock { >+public: >+ ValueInBlock() { } >+ >+ ValueInBlock(BasicBlock* block, unsigned index) >+ : m_block(block) >+ , m_index(index) >+ { >+ } >+ >+ BasicBlock* block() const { return m_block; } >+ unsigned index() const { return m_index; } >+ Value* value() const { return m_block->at(m_index); } >+ >+ bool operator==(const ValueInBlock& other) const >+ { >+ return m_block == other.m_block >+ && m_index == other.m_index; >+ } >+ >+ bool operator!=(const ValueInBlock& other) const >+ { >+ return !(*this == other); >+ } >+ >+ explicit operator bool() const >+ { >+ return *this != ValueInBlock(); >+ } >+ >+ void dump(PrintStream& out) const; >+ >+ unsigned hash() const >+ { >+ return WTF::PtrHash<BasicBlock*>::hash(m_block) + m_index; >+ } >+ >+ ValueInBlock(WTF::HashTableDeletedValueType) >+ { >+ m_index--; >+ } >+ >+ bool isHashTableDeletedValue() const >+ { >+ return *this == ValueInBlock(WTF::HashTableDeletedValue); >+ } >+ >+private: >+ BasicBlock* m_block { nullptr }; >+ unsigned m_index { UINT_MAX }; >+}; >+ >+struct ValueInBlockHash { >+ static unsigned hash(const ValueInBlock& key) { return key.hash(); } >+ static bool equal(const ValueInBlock& a, const ValueInBlock& b) { return a == b; } >+ static constexpr bool safeToCompareToEmptyOrDeleted = true; >+} >+ >+} } // namespace JSC::B3 >+ >+namespace WTF { >+ >+template<typename> struct DefaultHash; >+template<> struct DefaultHash<JSC::B3::ValueInBlock> { >+ typedef JSC::B3::ValueInBlockHash Hash; >+}; >+ >+template<typename> struct HashTraits; >+template<> struct HashTraits<JSC::B3::ValueInBlock> : public SimpleClassHashTraits<JSC::B3::ValueInBlock> { >+ static constexpr bool emptyValueIsZero = false; >+}; >+ >+} // namespace WTF >+ >+#endif // ENABLE(B3_JIT) >+ >Index: Source/WTF/wtf/BitVector.h >=================================================================== >--- Source/WTF/wtf/BitVector.h (revision 230970) >+++ Source/WTF/wtf/BitVector.h (working copy) >@@ -110,6 +110,8 @@ public: > // Like ensureSize(), but supports reducing the size of the bitvector. > WTF_EXPORT_PRIVATE void resize(size_t numBits); > >+ // Clears the set without freeing memory. If you want to clear the set and free memory, then >+ // you want *this = BitVector(). > WTF_EXPORT_PRIVATE void clearAll(); > > bool quickGet(size_t bit) const >Index: Source/WTF/wtf/GraphNodeWorklist.h >=================================================================== >--- Source/WTF/wtf/GraphNodeWorklist.h (revision 230970) >+++ Source/WTF/wtf/GraphNodeWorklist.h (working copy) >@@ -1,5 +1,5 @@ > /* >- * Copyright (C) 2015-2016 Apple Inc. All rights reserved. >+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved. > * > * Redistribution and use in source and binary forms, with or without > * modification, are permitted provided that the following conditions >@@ -64,8 +64,10 @@ public: > > bool saw(Node node) { return m_seen.contains(node); } > >+ bool pretendToHaveSeen(Node node) { return m_seen.add(node); } >+ > const Set& seen() const { return m_seen; } >- >+ > private: > Set m_seen; > Vector<Node, 16> m_stack; >Index: Source/WTF/wtf/IndexSet.h >=================================================================== >--- Source/WTF/wtf/IndexSet.h (revision 230970) >+++ Source/WTF/wtf/IndexSet.h (working copy) >@@ -61,6 +61,12 @@ public: > { > return m_set.clear(IndexKeyType<T>::index(value)); > } >+ >+ // Removes everything from the set without freeing memory. >+ void removeAll() >+ { >+ m_set.clearAll(); >+ } > > bool contains(const T& value) const > {
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Formatted Diff
|
Diff
Attachments on
bug 185060
:
338939
|
338952
|
338954
|
338965
|
339038
|
339050
|
339075
|
339087
|
339104
|
339163