Skip to content

Commit 81d2554

Browse files
committed
Hit test virtual detectors in button
1 parent c9a577c commit 81d2554

File tree

3 files changed

+67
-57
lines changed

3 files changed

+67
-57
lines changed

packages/react-native-gesture-handler/apple/RNGestureHandler.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@
106106
inState:(RNGestureHandlerState)state
107107
fromManualStateChange:(BOOL)fromManualStateChange;
108108
- (BOOL)containsPointInView;
109+
- (BOOL)wantsToHandleEventsAtPoint:(CGPoint)point;
109110
- (RNGestureHandlerState)state;
110111
- (nullable RNGestureHandlerEventExtraData *)eventExtraData:(nonnull id)recognizer;
111112

packages/react-native-gesture-handler/apple/RNGestureHandler.mm

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -744,6 +744,32 @@ - (BOOL)containsPointInView
744744
return CGRectContainsPoint(hitFrame, location);
745745
}
746746

747+
- (BOOL)wantsToHandleEventsAtPoint:(CGPoint)point
748+
{
749+
RNGHUIView *viewToHitTest = _recognizer.view;
750+
751+
if ([self usesNativeOrVirtualDetector] && [_recognizer.view.subviews count] > 0) {
752+
viewToHitTest = _recognizer.view.subviews[0];
753+
}
754+
755+
if (_actionType == RNGestureHandlerActionTypeVirtualDetector && _virtualViewTag != nil) {
756+
// In this case, logic detector is attached to the DetectorView, which has a single subview representing
757+
// the actual target view in the RN hierarchy
758+
if ([viewToHitTest respondsToSelector:@selector(touchEventEmitterAtPoint:)]) {
759+
// If the view has touchEventEmitterAtPoint: method, it can be used to determine the viewtag
760+
// of the view under the touch point
761+
facebook::react::SharedTouchEventEmitter eventEmitter =
762+
[(id<RCTTouchableComponentViewProtocol>)viewToHitTest touchEventEmitterAtPoint:point];
763+
auto viewUnderTouch = eventEmitter->getEventTarget()->getTag();
764+
765+
return viewUnderTouch == [_virtualViewTag intValue];
766+
}
767+
}
768+
769+
CGRect hitFrame = RNGHHitSlopInsetRect(viewToHitTest.bounds, _hitSlop);
770+
return CGRectContainsPoint(hitFrame, point);
771+
}
772+
747773
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
748774
{
749775
if ([_handlersToWaitFor count]) {

packages/react-native-gesture-handler/apple/RNGestureHandlerButton.mm

Lines changed: 40 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -166,30 +166,6 @@ - (void)layout
166166
[self applyUnderlayCornerRadii];
167167
}
168168

169-
- (BOOL)shouldHandleTouch:(RNGHUIView *)view
170-
{
171-
if ([view isKindOfClass:[RNGestureHandlerButton class]]) {
172-
RNGestureHandlerButton *button = (RNGestureHandlerButton *)view;
173-
return button.userEnabled;
174-
}
175-
176-
// Certain subviews such as RCTViewComponentView have been observed to have disabled
177-
// accessibility gesture recognizers such as _UIAccessibilityHUDGateGestureRecognizer,
178-
// ostensibly set by iOS. Such gesture recognizers cause this function to return YES
179-
// even when the passed view is static text and does not respond to touches. This in
180-
// turn prevents the button from receiving touches, breaking functionality. To handle
181-
// such case, we can count only the enabled gesture recognizers when determining
182-
// whether a view should receive touches.
183-
NSPredicate *isEnabledPredicate = [NSPredicate predicateWithFormat:@"isEnabled == YES"];
184-
NSArray *enabledGestureRecognizers = [view.gestureRecognizers filteredArrayUsingPredicate:isEnabledPredicate];
185-
186-
#if !TARGET_OS_OSX
187-
return [view isKindOfClass:[UIControl class]] || [enabledGestureRecognizers count] > 0;
188-
#else
189-
return [view isKindOfClass:[NSControl class]] || [enabledGestureRecognizers count] > 0;
190-
#endif
191-
}
192-
193169
- (void)animateUnderlayToOpacity:(float)toOpacity duration:(NSTimeInterval)durationMs
194170
{
195171
_underlayLayer.opacity =
@@ -290,37 +266,6 @@ - (void)animateTarget:(RNGHUIView *)target
290266
#endif
291267
}
292268

293-
#if !TARGET_OS_OSX
294-
295-
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
296-
{
297-
[super touchesBegan:touches withEvent:event];
298-
UITouch *touch = [touches anyObject];
299-
if (touch.view != self) {
300-
[self sendActionsForControlEvents:UIControlEventTouchDown];
301-
}
302-
}
303-
304-
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
305-
{
306-
[super touchesEnded:touches withEvent:event];
307-
UITouch *touch = [touches anyObject];
308-
if (touch.view != self) {
309-
[self sendActionsForControlEvents:UIControlEventTouchUpInside];
310-
}
311-
}
312-
313-
- (void)touchesCancelled:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
314-
{
315-
[super touchesCancelled:touches withEvent:event];
316-
UITouch *touch = [touches anyObject];
317-
if (touch.view != self) {
318-
[self sendActionsForControlEvents:UIControlEventTouchCancel];
319-
}
320-
}
321-
322-
#endif
323-
324269
- (void)handleAnimatePressIn
325270
{
326271
if (_pendingPressOutBlock) {
@@ -698,6 +643,44 @@ - (void)endTrackingWithTouch:(UITouch *)touch withEvent:(UIEvent *)event
698643
_isTouchInsideBounds = NO;
699644
}
700645

646+
- (BOOL)shouldHandleTouch:(RNGHUIView *)view atPoint:(CGPoint)point
647+
{
648+
if ([view isKindOfClass:[RNGestureHandlerButton class]]) {
649+
RNGestureHandlerButton *button = (RNGestureHandlerButton *)view;
650+
return button.userEnabled;
651+
}
652+
653+
// Certain subviews such as RCTViewComponentView have been observed to have disabled
654+
// accessibility gesture recognizers such as _UIAccessibilityHUDGateGestureRecognizer,
655+
// ostensibly set by iOS. Such gesture recognizers cause this function to return YES
656+
// even when the passed view is static text and does not respond to touches. This in
657+
// turn prevents the button from receiving touches, breaking functionality. To handle
658+
// such case, we can count only the enabled gesture recognizers when determining
659+
// whether a view should receive touches.
660+
NSPredicate *isEnabledPredicate = [NSPredicate predicateWithFormat:@"isEnabled == YES"];
661+
NSArray *enabledGestureRecognizers = [view.gestureRecognizers filteredArrayUsingPredicate:isEnabledPredicate];
662+
663+
BOOL gestureRecognizerWantsEvent = NO;
664+
for (UIGestureRecognizer *recognizer in enabledGestureRecognizers) {
665+
RNGestureHandler *handler = [RNGestureHandler findGestureHandlerByRecognizer:recognizer];
666+
if (handler != nil) {
667+
CGPoint pointInView = [self convertPoint:point toView:view];
668+
gestureRecognizerWantsEvent = [handler wantsToHandleEventsAtPoint:pointInView];
669+
} else {
670+
gestureRecognizerWantsEvent = YES;
671+
}
672+
if (gestureRecognizerWantsEvent) {
673+
break;
674+
}
675+
}
676+
677+
#if !TARGET_OS_OSX
678+
return [view isKindOfClass:[UIControl class]] || gestureRecognizerWantsEvent;
679+
#else
680+
return [view isKindOfClass:[NSControl class]] || [enabledGestureRecognizers count] > 0;
681+
#endif
682+
}
683+
701684
- (RNGHUIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event
702685
{
703686
RNGestureHandlerPointerEvents pointerEvents = _pointerEvents;
@@ -711,7 +694,7 @@ - (RNGHUIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event
711694
if (!subview.isHidden && subview.alpha > 0) {
712695
CGPoint convertedPoint = [subview convertPoint:point fromView:self];
713696
UIView *hitView = [subview hitTest:convertedPoint withEvent:event];
714-
if (hitView != nil && [self shouldHandleTouch:hitView]) {
697+
if (hitView != nil && [self shouldHandleTouch:hitView atPoint:point]) {
715698
return hitView;
716699
}
717700
}
@@ -724,7 +707,7 @@ - (RNGHUIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event
724707
}
725708

726709
RNGHUIView *inner = [super hitTest:point withEvent:event];
727-
while (inner && ![self shouldHandleTouch:inner]) {
710+
while (inner && ![self shouldHandleTouch:inner atPoint:point]) {
728711
inner = inner.superview;
729712
}
730713
return inner;

0 commit comments

Comments
 (0)