Managing User Gestures in Jetpack Compose: Implementing Tap, Long Tap, and Drag to Select

Danilo Arcadipane
4 min readAug 20, 2024

--

Introduction

Effectively handling user gestures is crucial for delivering an interactive and intuitive experience in modern applications. In contexts like image galleries, where users need to select, move, or access additional options with ease, it’s essential to clearly recognize and differentiate various gestures such as tap, long tap, and drag. In this article, we’ll explore an approach using Jetpack Compose to manage these three gestures: tap, long tap, and drag to select. These gestures have been implemented and differentiated to offer precise and intuitive control, with particular attention to the drag to select functionality, which allows drawing a selection box over a grid of images.

Objective

The primary goal is to implement three types of interactions on an image grid:

  1. Tap: Selecting or deselecting a single image.
  2. Long Tap: Triggering a contextual action, such as opening a menu.
  3. Drag to Select: Multi-selecting images by dragging a finger to draw a selection box.

Technical Solution

1. Handling Tap and Long Tap

  • The tap and long tap gestures are managed using the pointerInput modifier in Jetpack Compose, utilizing the detectTapGestures function. These gestures are handled separately from the dragSelection to allow precise control over individual images. The tap gesture allows selecting or deselecting an image, while the long tap can trigger additional actions like opening a contextual menu.
val photoModifier = Modifier.pointerInput(Unit) {
detectTapGestures(
onTap = {
if (lockedImage?.id == photo.id) return@detectTapGestures
haptic.performHapticFeedback(HapticFeedbackType.LongPress)
onSelectionChange(photo)
},
onLongPress = {
if (lockedImage?.id != photo.id) {
onImageLongClick(index, photo)
}
}
)
}

2. Handling Drag to Select

  • The drag to select gesture is implemented through a custom Modifier called dragSelection. This modifier uses detectDragGestures to detect the user's finger drag across the grid, drawing a rectangular selection box. All images within the box are automatically selected. During the drag, the grid can also scroll automatically if the user drags towards the edge of the screen.
@Composable
fun Modifier.dragSelection(
imageModelList: List<ImageModel>,
state: LazyGridState,
imageModelsSelected: List<ImageModel>,
onSelectionChange: (ImageModel) -> Unit,
lockedImage: ImageModel? = null
): Modifier {
var isDragging by remember { mutableStateOf(false) }
var dragStart by remember { mutableStateOf(Offset.Zero) }
var dragEnd by remember { mutableStateOf(Offset.Zero) }
var processedImages by remember { mutableStateOf(setOf<ImageModel>()) }
val haptic = LocalHapticFeedback.current
val updatedImageModelListState = rememberUpdatedState(newValue = imageModelList)

LaunchedEffect(isDragging) {
if (isDragging) {
while (isDragging) {
val dragEndY = dragEnd.y
val layoutInfo = state.layoutInfo
val viewportHeight = layoutInfo.viewportEndOffset
val firstVisibleItemIndex = layoutInfo.visibleItemsInfo.firstOrNull()?.index ?: 0
val lastVisibleItemIndex = layoutInfo.visibleItemsInfo.lastOrNull()?.index ?: 0

if (dragEndY < 100 && firstVisibleItemIndex > 0) {
state.scrollBy(-20f)
}

if (dragEndY > viewportHeight - 100 && lastVisibleItemIndex < imageModelList.size - 1) {
state.scrollBy(20f)
}

delay(20)
}
}
}

return this.pointerInput(Unit) {
detectDragGestures(
onDragStart = { startOffset ->
dragStart = startOffset
dragEnd = startOffset
isDragging = true
processedImages = setOf()
Log.d("DragSelection", "Drag started at: $startOffset, ${updatedImageModelListState.value.toList().size}")
},
onDrag = { change, dragAmount ->
change.consume()
dragEnd += Offset(dragAmount.x, dragAmount.y)
val minX = minOf(dragStart.x, dragEnd.x)
val maxX = maxOf(dragStart.x, dragEnd.x)
val minY = minOf(dragStart.y, dragEnd.y)
val maxY = maxOf(dragStart.y, dragEnd.y)
Log.d("DragSelection", "Dragging to: $dragEnd")

val itemsInBox = updatedImageModelListState.value
.filter { imageModel ->
val itemOffset =
state.layoutInfo.visibleItemsInfo.find { it.key == imageModel.id }?.offset
?: IntOffset.Zero
val itemSize =
state.layoutInfo.visibleItemsInfo.find { it.key == imageModel.id }?.size
?: IntSize.Zero
val itemBounds = IntOffset(itemOffset.x, itemOffset.y) to IntSize(
itemSize.width,
itemSize.height
)
val itemStart =
Offset(
itemBounds.first.x.toFloat(),
itemBounds.first.y.toFloat()
)
val itemEnd = Offset(
itemBounds.first.x + itemBounds.second.width.toFloat(),
itemBounds.first.y + itemBounds.second.height.toFloat()
)
itemStart.x < maxX && itemEnd.x > minX && itemStart.y < maxY && itemEnd.y > minY
}

Log.d("DragSelection", "Items in selection box: ${itemsInBox.size}")

itemsInBox.forEach { imageModel ->
if (imageModel !in processedImages) {
processedImages = processedImages + imageModel
if (lockedImage?.id != imageModel.id) {
Log.d("DragSelection", "Image selected: ${imageModel.id}")
haptic.performHapticFeedback(HapticFeedbackType.LongPress)
onSelectionChange(imageModel)
}
}
}
},
onDragEnd = {
isDragging = false
processedImages = setOf()
Log.d("DragSelection", "Drag ended")
}
)
}
}

Integrating the Solution

This logic is integrated within the PhotoGridMultiSelect composable, where the image grid is managed alongside the dragSelection modifier and the tap and long tap gestures.

@Composable
fun PhotoGridMultiSelect(
modifier: Modifier = Modifier,
imageModelList: List<ImageModel>,
selectedList: List<ImageModel>,
onImageLongClick: (Int, ImageModel) -> Unit,
onSelectionChange: (ImageModel) -> Unit,
lockedImage: ImageModel? = null,
showLoaderItem: Boolean = false
) {
val state = rememberLazyGridState()
val haptic = LocalHapticFeedback.current

val localModifier = modifier
.fillMaxSize()
.dragSelection(
imageModelList = imageModelList,
state = state,
imageModelsSelected = selectedList,
onSelectionChange = { changedPhoto ->
onSelectionChange(changedPhoto)
},
lockedImage = lockedImage
)

Column {
Box(modifier = localModifier) {
LazyVerticalGrid(
state = state,
columns = GridCells.Fixed(3),
verticalArrangement = Arrangement.spacedBy(2.dp),
horizontalArrangement = Arrangement.spacedBy(2.dp),
modifier = Modifier
) {
itemsIndexed(
imageModelList,
key = { _: Int, item: ImageModel -> item.id }) { index, photo ->
val selected = selectedList.any { it.id == photo.id }
val photoModifier = Modifier.pointerInput(Unit) {
detectTapGestures(
onTap = {
if (lockedImage?.id == photo.id) return@detectTapGestures
haptic.performHapticFeedback(HapticFeedbackType.LongPress)
onSelectionChange(photo)
},
onLongPress = {
if (lockedImage?.id != photo.id) {
onImageLongClick(index, photo)
}
}
)
}
SelectableImageItem(
modifier = photoModifier,
imageModel = photo,
contentDescription = "Photo Grid Item index $index",
isImageSelected = selected && lockedImage?.id != photo.id,
isImageLocked = lockedImage?.id == photo.id
)
}
if (showLoaderItem) {
item(span = { GridItemSpan(3) }) {
Box(
modifier = Modifier
.fillMaxWidth()
.padding(16.dp),
contentAlignment = Alignment.Center
) {
CircularProgressIndicator(color = ThemeDS.colors.tertiary)
}
}
}
}
}
}
}

Conclusion

By leveraging Jetpack Compose and its APIs for gesture handling, it’s possible to implement an advanced and intuitive user interface for image selection. By separating the logic for tap, long tap, and drag to select, this solution ensures precise and customizable control of user interactions, significantly enhancing the user experience.

The complete example can be found HERE.
Cheers :-)

--

--