in stetho/src/main/java/com/facebook/stetho/inspector/elements/Document.java [354:493]
private void applyDocumentUpdate(final ShadowDocument.Update docUpdate) {
// TODO: it'd be nice if we could delegate our calls into mPeerManager.sendNotificationToPeers()
// to a background thread so as to offload the UI from JSON serialization stuff
// Applying the ShadowDocument.Update is done in five stages:
// Stage 1: any elements that have been disconnected from the tree, and any elements in those
// sub-trees which have not been reconnected to the tree, should be garbage collected. For now
// we gather a list of garbage element IDs which we use in stages 2 to test a changed element
// to see if it's also garbage. Then during stage 3 we use this list to unhook all of the
// garbage elements.
// This is used to collect the garbage element IDs in stage 1. It is sorted before stage 2 so
// that it can use a binary search as a quick "contains()" method.
// Note that this could be accomplished in a simpler way by employing a HashSet<Object> and
// storing the element Objects. However, HashSet wraps HashMap and we would have a lot more
// allocations (Map.Entry, iterator during stage 3) and thus GC pressure.
// Using SparseArray wouldn't be good because it ensures sorted ordering as you go, but we don't
// need that during stage 1. Using ArrayList with int boxing is fine because the Integers are
// already boxed inside of mObjectIdMapper and we make sure to reuse that allocation.
final ArrayList<Integer> garbageElementIds = new ArrayList<>();
docUpdate.getGarbageElements(new Accumulator<Object>() {
@Override
public void store(Object element) {
Integer nodeId = Util.throwIfNull(mObjectIdMapper.getIdForObject(element));
ElementInfo newElementInfo = docUpdate.getElementInfo(element);
// Only raise onChildNodeRemoved for the root of a disconnected tree. The remainder of the
// sub-tree is included automatically, so we don't need to send events for those.
if (newElementInfo.parentElement == null) {
ElementInfo oldElementInfo = mShadowDocument.getElementInfo(element);
int parentNodeId = mObjectIdMapper.getIdForObject(oldElementInfo.parentElement);
mUpdateListeners.onChildNodeRemoved(parentNodeId, nodeId);
}
garbageElementIds.add(nodeId);
}
});
Collections.sort(garbageElementIds);
// Stage 2: remove all elements that have been reparented. Otherwise we get into trouble if we
// transmit an event to insert under the new parent before we've transmitted an event to remove
// it from the old parent. The removal event is ignored because the parent doesn't match the
// listener's expectations, so we get ghost elements that are stuck and can't be exorcised.
docUpdate.getChangedElements(new Accumulator<Object>() {
@Override
public void store(Object element) {
Integer nodeId = Util.throwIfNull(mObjectIdMapper.getIdForObject(element));
// Skip garbage elements
if (Collections.binarySearch(garbageElementIds, nodeId) >= 0) {
return;
}
// Skip new elements
final ElementInfo oldElementInfo = mShadowDocument.getElementInfo(element);
if (oldElementInfo == null) {
return;
}
final ElementInfo newElementInfo = docUpdate.getElementInfo(element);
if (newElementInfo.parentElement != oldElementInfo.parentElement) {
int parentNodeId = mObjectIdMapper.getIdForObject(oldElementInfo.parentElement);
mUpdateListeners.onChildNodeRemoved(parentNodeId, nodeId);
}
}
});
// Stage 3: unhook garbage elements
for (int i = 0, N = garbageElementIds.size(); i < N; ++i) {
mObjectIdMapper.removeObjectById(garbageElementIds.get(i));
}
// Stage 4: transmit all other changes to our listener. This includes inserting reparented
// elements that we removed in the 2nd stage.
docUpdate.getChangedElements(new Accumulator<Object>() {
private final HashSet<Object> listenerInsertedElements = new HashSet<>();
private Accumulator<Object> insertedElements = new Accumulator<Object>() {
@Override
public void store(Object element) {
if (docUpdate.isElementChanged(element)) {
// We only need to track changed elements because unchanged elements will never be
// encountered by the code below, in store(), which uses this Set to skip elements that
// don't need to be processed.
listenerInsertedElements.add(element);
}
}
};
@Override
public void store(Object element) {
if (!mObjectIdMapper.containsObject(element)) {
// The element was garbage and has already been removed. At this stage that's okay and we
// just skip it and continue forward with the algorithm.
return;
}
if (listenerInsertedElements.contains(element)) {
// This element was already transmitted in its entirety by an onChildNodeInserted event.
// Trying to send any further updates about it is both unnecessary and incorrect (we'd
// end up with duplicated elements and really bad performance).
return;
}
final ElementInfo oldElementInfo = mShadowDocument.getElementInfo(element);
final ElementInfo newElementInfo = docUpdate.getElementInfo(element);
final List<Object> oldChildren = (oldElementInfo != null)
? oldElementInfo.children
: Collections.emptyList();
final List<Object> newChildren = newElementInfo.children;
// This list is representative of our listener's view of the Document (ultimately, this
// means Chrome DevTools). We need to sync it up with newChildren.
ChildEventingList listenerChildren = acquireChildEventingList(element, docUpdate);
for (int i = 0, N = oldChildren.size(); i < N; ++i) {
final Object childElement = oldChildren.get(i);
if (mObjectIdMapper.containsObject(childElement)) {
final ElementInfo newChildElementInfo = docUpdate.getElementInfo(childElement);
if (newChildElementInfo != null &&
newChildElementInfo.parentElement != element) {
// This element was reparented, so we already told our listener to remove it.
} else {
listenerChildren.add(childElement);
}
}
}
updateListenerChildren(listenerChildren, newChildren, insertedElements);
releaseChildEventingList(listenerChildren);
}
});
// Stage 5: Finally, commit the update to the ShadowDocument.
docUpdate.commit();
}