fix(scanner): yield to scheduler before runSync + min skeleton + gallery placeholder

- useDetection.analyze() now awaits a requestAnimationFrame before
  calling runInference. Without it React commits isAnalyzing=true and
  immediately hits the synchronous TFLite runSync that blocks the JS
  thread for 500-1500ms — the analyzing skeleton overlay appears AFTER
  the inference, defeating its purpose.
- Same hook enforces a minimum 600ms total before resolving so a
  cached/fast inference doesn't show a skeleton flicker that reads as
  a glitch.
- ScannerScreen.handleCapture is split: capture stays inline,
  processImage(uri) is now its own async function. Cleaner control
  flow when a take succeeds but analysis is delegated.
- The previously dead "image gallery" icon next to the shutter is now
  a real TouchableOpacity that fires a "coming soon" toast (we'll wire
  it to expo-image-picker once we add the lib + native rebuild).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Yanis 2026-05-01 14:03:12 +02:00
parent 425f3035ef
commit a3cd906a6d
2 changed files with 82 additions and 43 deletions

View file

@ -11,8 +11,21 @@ export function useDetection() {
setIsAnalyzing(true);
setError(null);
// Yield au scheduler pour que React commit le render `isAnalyzing=true`
// (le skeleton overlay) AVANT que runSync() ne bloque le JS thread ~500-1500ms.
await new Promise<void>((resolve) =>
requestAnimationFrame(() => resolve()),
);
const startedAt = Date.now();
try {
const detection = await runInference(imageUri);
// UX : maintenir le skeleton visible au moins 600ms pour éviter un flash
// perçu comme un bug ("rien ne se passe") quand l'inférence est très rapide.
const elapsed = Date.now() - startedAt;
if (elapsed < 600) {
await new Promise((r) => setTimeout(r, 600 - elapsed));
}
setLastDetection(detection);
return detection;
} catch (err) {

View file

@ -93,49 +93,11 @@ export default function ScannerScreen() {
};
}, []);
async function handleCapture() {
if (isAnalyzing) return;
if (!cameraRef.current) {
Alert.alert(t('common.error'), 'Camera not initialized');
return;
}
if (!isCameraReady) {
Alert.alert(t('common.error'), 'Camera is not ready yet — please wait.');
return;
}
await hapticLight();
shutterScale.value = withSequence(
withTiming(0.88, { duration: 100 }),
withTiming(1, { duration: 150 })
);
async function processImage(imageUri: string) {
const interval = setInterval(() => {
setLiveConfidence((prev) => Math.min(prev + Math.floor(Math.random() * 12), 85));
}, 150);
let imageUri: string | undefined;
try {
const photo = await cameraRef.current.takePictureAsync({
quality: 0.85,
skipProcessing: true,
exif: false,
});
imageUri = photo?.uri;
if (__DEV__) {
console.log('[Scanner] Captured photo:', imageUri);
}
} catch (err) {
clearInterval(interval);
setLiveConfidence(0);
const message = err instanceof Error ? err.message : String(err);
console.warn('[Scanner] takePictureAsync failed:', message);
Alert.alert(t('common.error'), `Capture failed: ${message}`);
return;
}
const [detection, coords] = await Promise.all([
analyze(imageUri),
requestAndGetLocation(),
@ -172,6 +134,61 @@ export default function ScannerScreen() {
setTimeout(() => setLiveConfidence(0), 500);
}
async function handleCapture() {
if (isAnalyzing) return;
if (!cameraRef.current) {
Alert.alert(t('common.error'), 'Camera not initialized');
return;
}
if (!isCameraReady) {
Alert.alert(t('common.error'), 'Camera is not ready yet — please wait.');
return;
}
await hapticLight();
shutterScale.value = withSequence(
withTiming(0.88, { duration: 100 }),
withTiming(1, { duration: 150 })
);
let imageUri: string | undefined;
try {
const photo = await cameraRef.current.takePictureAsync({
quality: 0.85,
skipProcessing: true,
exif: false,
});
imageUri = photo?.uri;
if (__DEV__) {
console.log('[Scanner] Captured photo:', imageUri);
}
} catch (err) {
setLiveConfidence(0);
const message = err instanceof Error ? err.message : String(err);
console.warn('[Scanner] takePictureAsync failed:', message);
Alert.alert(t('common.error'), `Capture failed: ${message}`);
return;
}
if (!imageUri) {
setLiveConfidence(0);
return;
}
await processImage(imageUri);
}
async function handlePickFromGallery() {
// TODO: réactiver après rebuild natif Android (expo prebuild + run:android)
// pour ajouter `expo-image-picker` au build natif
await hapticLight();
toast.info(t('scanner.galleryComingSoonTitle'), {
description: t('scanner.galleryComingSoonDescription'),
});
}
if (!permission) {
return (
<View className="flex-1 items-center justify-center bg-[#FAFAFA]">
@ -252,12 +269,21 @@ export default function ScannerScreen() {
)}
<View className="absolute bottom-0 left-0 right-0 flex-row items-center justify-between px-8 pb-12 pt-5">
<View
<TouchableOpacity
className="h-11 w-11 items-center justify-center rounded-lg"
style={{ backgroundColor: 'rgba(255,255,255,0.15)', borderWidth: 1, borderColor: 'rgba(255,255,255,0.3)' }}
style={{
backgroundColor: 'rgba(255,255,255,0.15)',
borderWidth: 1,
borderColor: 'rgba(255,255,255,0.3)',
opacity: isAnalyzing ? 0.4 : 1,
}}
onPress={handlePickFromGallery}
disabled={isAnalyzing}
accessibilityLabel={t('scanner.pickFromGallery')}
activeOpacity={0.7}
>
<Ionicons name="image-outline" size={20} color="rgba(255,255,255,0.5)" />
</View>
<Ionicons name="image-outline" size={20} color={colors.surface} />
</TouchableOpacity>
<Animated.View
className="h-[72px] w-[72px] items-center justify-center rounded-full border-[3px] border-white"