feat(scanner,ml): real TFLite inference + preload + flip camera + analyzing skeleton

ML
- Reinstall react-native-fast-tflite + react-native-nitro-modules and
  register the fast-tflite Expo plugin in app.json
- Wire model.ts to the real native module: dynamic require + lazy
  loadTensorflowModel (cached), softmax/argmax on output, build Detection
  with the project 0-100 confidence convention. Falls back to mockDetection
  on any load/inference failure so the app never breaks.
- Align preprocessing input size to 256x256 to match the Python
  MobileNetV2 export.

Scanner UX
- Preload the TFLite model on Scanner mount to avoid the ~1-2s decode hit
  on first capture
- Add a flip-front/back camera control with a toast warning that the rear
  camera gives better results
- Show a full-screen analyzing skeleton overlay while inference runs
- Memoize ConfidenceMeter color into a single computed value

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Yanis 2026-05-01 11:31:17 +02:00
parent f247748adc
commit 086de7c05c
9 changed files with 240 additions and 39 deletions

View file

@ -42,6 +42,7 @@
},
"plugins": [
"./plugins/withCmakeFix",
"react-native-fast-tflite",
"expo-localization",
[
"expo-camera",

View file

@ -45,7 +45,9 @@
"react-i18next": "^17.0.1",
"react-lucid": "^0.0.1",
"react-native": "0.81.5",
"react-native-fast-tflite": "^3.0.1",
"react-native-gesture-handler": "~2.28.0",
"react-native-nitro-modules": "^0.35.6",
"react-native-reanimated": "~4.1.1",
"react-native-safe-area-context": "~5.6.0",
"react-native-screens": "~4.16.0",

View file

@ -116,9 +116,15 @@ importers:
react-native:
specifier: 0.81.5
version: 0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0)
react-native-fast-tflite:
specifier: ^3.0.1
version: 3.0.1(react-native-nitro-modules@0.35.6(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0))(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0)
react-native-gesture-handler:
specifier: ~2.28.0
version: 2.28.0(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0)
react-native-nitro-modules:
specifier: ^0.35.6
version: 0.35.6(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0)
react-native-reanimated:
specifier: ~4.1.1
version: 4.1.7(react-native-worklets@0.5.1(@babel/core@7.29.0)(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0))(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0)
@ -2956,6 +2962,14 @@ packages:
react-native-svg:
optional: true
react-native-fast-tflite@3.0.1:
resolution: {integrity: sha512-88wNR/4iR8X0zuQtrpb1jRbF+X+hUqrD8cER4DhNJnbhA+3PuGz8SoP3n8WEhjYWDkGqTme2Ezk+mbeLiiE+6w==}
engines: {node: '>= 18'}
peerDependencies:
react: '*'
react-native: '*'
react-native-nitro-modules: '*'
react-native-gesture-handler@2.28.0:
resolution: {integrity: sha512-0msfJ1vRxXKVgTgvL+1ZOoYw3/0z1R+Ked0+udoJhyplC2jbVKIJ8Z1bzWdpQRCV3QcQ87Op0zJVE5DhKK2A0A==}
peerDependencies:
@ -2968,6 +2982,12 @@ packages:
react: '*'
react-native: '*'
react-native-nitro-modules@0.35.6:
resolution: {integrity: sha512-3Cb7s+O5tpZ6RdIiPOB/wi3IMfBxD6tl6VDF8gJ5zvM/BEGTWxwMMLjzmWmsYPKekdbYBznF6qp2d2SxixPy8g==}
peerDependencies:
react: '*'
react-native: '*'
react-native-reanimated@4.1.7:
resolution: {integrity: sha512-Q4H6xA3Tn7QL0/E/KjI86I1KK4tcf+ErRE04LH34Etka2oVQhW6oXQ+Q8ZcDCVxiWp5vgbBH6XcH8BOo4w/Rhg==}
peerDependencies:
@ -4843,7 +4863,9 @@ snapshots:
metro-runtime: 0.83.5
transitivePeerDependencies:
- '@babel/core'
- bufferutil
- supports-color
- utf-8-validate
optional: true
'@react-native/normalize-colors@0.74.89': {}
@ -6987,6 +7009,12 @@ snapshots:
transitivePeerDependencies:
- supports-color
react-native-fast-tflite@3.0.1(react-native-nitro-modules@0.35.6(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0))(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0):
dependencies:
react: 19.1.0
react-native: 0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0)
react-native-nitro-modules: 0.35.6(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0)
react-native-gesture-handler@2.28.0(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0):
dependencies:
'@egjs/hammerjs': 2.0.17
@ -7000,6 +7028,11 @@ snapshots:
react: 19.1.0
react-native: 0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0)
react-native-nitro-modules@0.35.6(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0):
dependencies:
react: 19.1.0
react-native: 0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0)
react-native-reanimated@4.1.7(react-native-worklets@0.5.1(@babel/core@7.29.0)(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0))(react-native@0.81.5(@babel/core@7.29.0)(@react-native/metro-config@0.84.1(@babel/core@7.29.0))(@types/react@19.1.17)(react@19.1.0))(react@19.1.0):
dependencies:
react: 19.1.0

View file

@ -23,6 +23,7 @@ function getConfidenceColor(confidence: number): string {
export function ConfidenceMeter({ confidence }: ConfidenceMeterProps) {
const { t } = useTranslation();
const animatedWidth = useSharedValue(0);
const barColor = getConfidenceColor(confidence);
useEffect(() => {
animatedWidth.value = withTiming(confidence / 100, { duration: 500 });
@ -30,16 +31,14 @@ export function ConfidenceMeter({ confidence }: ConfidenceMeterProps) {
const barStyle = useAnimatedStyle(() => ({
width: `${animatedWidth.value * 100}%`,
backgroundColor: getConfidenceColor(confidence),
backgroundColor: barColor,
}));
return (
<View style={styles.container}>
<View style={styles.labelRow}>
<Text style={styles.label}>{t('scanner.confidence')}</Text>
<Text style={[styles.value, { color: getConfidenceColor(confidence) }]}>
{confidence}%
</Text>
<Text style={[styles.value, { color: barColor }]}>{confidence}%</Text>
</View>
<View style={styles.track}>
<Animated.View style={[styles.bar, barStyle]} />

View file

@ -434,7 +434,12 @@
"permissionRequired": "Camera permission required",
"permissionMessage": "VinEye needs camera access to detect grapevines.",
"grantPermission": "Grant camera access",
"identify": "Identify the plant"
"identify": "Identify the plant",
"flipCamera": "Flip camera",
"frontWarningTitle": "Front camera enabled",
"frontWarningDescription": "For best results, use the rear camera.",
"analyzingTitle": "Analyzing",
"analyzingSubtitle": "Identifying the plant…"
},
"result": {
"vineDetected": "Vine detected!",
@ -535,7 +540,7 @@
"resetConfirmTitle": "Restart?",
"resetConfirmMessage": "Your current account will be deleted. You will be redirected to the login screen to create a new account or continue as a guest.",
"resetConfirmCancel": "Cancel",
"resetConfirmOk": "Yes, log me out"
"resetConfirmOk": "Logout"
}
},
"auth": {

View file

@ -434,7 +434,12 @@
"permissionRequired": "Permission caméra requise",
"permissionMessage": "VinEye nécessite l'accès à votre caméra pour détecter les vignes.",
"grantPermission": "Autoriser la caméra",
"identify": "Identifier la plante"
"identify": "Identifier la plante",
"flipCamera": "Inverser la caméra",
"frontWarningTitle": "Caméra avant activée",
"frontWarningDescription": "Pour de meilleurs résultats, utilisez la caméra arrière.",
"analyzingTitle": "Analyse en cours",
"analyzingSubtitle": "Identification de la plante…"
},
"result": {
"vineDetected": "Vigne détectée !",
@ -535,7 +540,7 @@
"resetConfirmTitle": "Recommencer ?",
"resetConfirmMessage": "Votre compte actuel sera supprimé. Vous serez redirigé vers l'écran de connexion pour créer un nouveau compte ou continuer en invité.",
"resetConfirmCancel": "Annuler",
"resetConfirmOk": "Oui, me déconnecter"
"resetConfirmOk": "Logout"
}
},
"auth": {

View file

@ -12,11 +12,14 @@ import Animated, {
withTiming,
} from 'react-native-reanimated';
import { useTranslation } from 'react-i18next';
import { toast } from 'sonner-native';
import { CameraOverlay } from '@/components/scanner/CameraOverlay';
import { Text } from '@/components/ui/text';
import { Button } from '@/components/ui/Button';
import Skeleton from '@/components/ui/Skeleton';
import { useDetection } from '@/hooks/useDetection';
import { loadModel } from '@/services/tflite/model';
import { useGameProgress } from '@/hooks/useGameProgress';
import { useHistory } from '@/hooks/useHistory';
import { useScanLocation } from '@/hooks/useScanLocation';
@ -41,8 +44,21 @@ export default function ScannerScreen() {
const { requestAndGetLocation } = useScanLocation();
const [liveConfidence, setLiveConfidence] = useState(0);
const [isCameraReady, setIsCameraReady] = useState(false);
const [facing, setFacing] = useState<'back' | 'front'>('back');
const cameraRef = useRef<CameraView>(null);
async function handleToggleFacing() {
if (isAnalyzing) return;
await hapticLight();
const next = facing === 'back' ? 'front' : 'back';
setFacing(next);
if (next === 'front') {
toast.info(t('scanner.frontWarningTitle'), {
description: t('scanner.frontWarningDescription'),
});
}
}
const shutterScale = useSharedValue(1);
const shutterStyle = useAnimatedStyle(() => ({
transform: [{ scale: shutterScale.value }],
@ -54,6 +70,29 @@ export default function ScannerScreen() {
}
}, [permission, requestPermission]);
// Préchargement du modèle TFLite dès le mount du Scanner — évite de bloquer
// la 1ère capture par le download/decode du modèle (~1-2s).
useEffect(() => {
let cancelled = false;
loadModel()
.then((ok) => {
if (cancelled) return;
if (!ok) {
console.warn('[Scanner] Model preload failed — fallback mock will be used');
} else if (__DEV__) {
console.log('[Scanner] Model preloaded');
}
})
.catch((err) => {
if (!cancelled) {
console.warn('[Scanner] Model preload error:', err);
}
});
return () => {
cancelled = true;
};
}, []);
async function handleCapture() {
if (isAnalyzing) return;
@ -164,7 +203,7 @@ export default function ScannerScreen() {
ref={cameraRef}
className="flex-1"
style={{ flex: 1 }}
facing="back"
facing={facing}
onCameraReady={() => setIsCameraReady(true)}
onMountError={(e) => {
console.warn('[Scanner] Camera mount error:', e);
@ -191,6 +230,27 @@ export default function ScannerScreen() {
<CameraOverlay isScanning={isAnalyzing} confidence={liveConfidence} />
{isAnalyzing && (
<View
className="absolute inset-0 z-20 items-center justify-center px-8"
style={{ backgroundColor: 'rgba(20,20,20,0.96)' }}
>
<View className="w-full max-w-[320px] items-center gap-4">
<Skeleton width={120} height={120} borderRadius={24} />
<Skeleton width="80%" height={24} borderRadius={8} />
<Skeleton width="60%" height={16} borderRadius={6} />
</View>
<View className="mt-8 items-center gap-1.5">
<Text className="text-white text-lg font-bold">
{t('scanner.analyzingTitle')}
</Text>
<Text className="text-white/70 text-sm">
{t('scanner.analyzingSubtitle')}
</Text>
</View>
</View>
)}
<View className="absolute bottom-0 left-0 right-0 flex-row items-center justify-between px-8 pb-12 pt-5">
<View
className="h-11 w-11 items-center justify-center rounded-lg"
@ -224,7 +284,14 @@ export default function ScannerScreen() {
<TouchableOpacity
className="h-11 w-11 items-center justify-center rounded-full"
style={{ backgroundColor: 'rgba(0,0,0,0.3)' }}
style={{
backgroundColor: 'rgba(0,0,0,0.3)',
opacity: isAnalyzing ? 0.4 : 1,
}}
onPress={handleToggleFacing}
disabled={isAnalyzing}
accessibilityLabel={t('scanner.flipCamera')}
activeOpacity={0.7}
>
<Ionicons name="camera-reverse-outline" size={24} color={colors.surface} />
</TouchableOpacity>

View file

@ -1,7 +1,9 @@
import { manipulateAsync, SaveFormat } from 'expo-image-manipulator';
import * as jpeg from 'jpeg-js';
export const MODEL_INPUT_SIZE = 224;
// Le modèle Python a été entraîné en 256×256 (MobileNetV2).
// Toute modification doit rester synchronisée avec l'export TFLite.
export const MODEL_INPUT_SIZE = 256;
export async function preprocessImage(uri: string): Promise<Float32Array> {
const resized = await manipulateAsync(

View file

@ -1,47 +1,134 @@
/**
* MOCK TFLite Service
* TFLite Service VinEye
*
* Ce service retourne actuellement des résultats simulés (random pondéré).
* Les libs `react-native-fast-tflite` et `react-native-nitro-modules` ont é
* désinstallées temporairement car :
* - Le modèle ML n'est pas encore exporté en .tflite final (précision insuffisante)
* - Les builds Android C++ (CMake/Ninja + Nitro headers) étaient instables sur Windows
* Inférence réelle via react-native-fast-tflite.
* Modèle : assets/models/grapevine_v1.tflite (MobileNetV2, 4 classes, 256×256)
*
* L'interface publique reste identique :
* - `loadModel(): Promise<boolean>` retourne false (pas de modèle chargé)
* - `runInference(imageUri?: string): Promise<Detection>` renvoie un mock pondéré
* Limitations connues :
* - Le modèle a une accuracy validation faible (~25% overfitting connu)
* - Pas de classe "not_vine" : tout sera classifié dans une des 4 classes
* - Voir docs/audit_report.md pour le diagnostic complet
*
* RÉINTÉGRATION DU VRAI MODÈLE (quand le .tflite sera prêt) :
* 1. pnpm add react-native-fast-tflite react-native-nitro-modules
* 2. Vérifier que `src/assets/models/grapevine_v1.tflite` est présent
* 3. Remplacer `runInference` ci-dessous par l'implémentation native :
* const tflite = require('react-native-fast-tflite');
* const asset = require('@/assets/models/grapevine_v1.tflite');
* const model = await tflite.loadTensorflowModel(asset);
* const input = await preprocessImage(imageUri); // depuis services/ml/preprocessing
* const outputs = model.runSync([input]);
* // ... softmax/argmax → buildDetection
* 4. pnpm dlx expo prebuild --clean
* 5. pnpm dlx expo run:android (ou EAS Build pour éviter les soucis CMake Windows)
*
* Documentation : https://github.com/mrousavy/react-native-fast-tflite
* En cas d'échec de chargement, fallback sur mockDetection() pour ne pas casser l'UX.
*/
import type { Detection, DiseaseClass, ClassProbability } from '@/types/detection';
import type {
Detection,
DiseaseClass,
ClassProbability,
} from '@/types/detection';
import {
ML_CLASSES,
CLASS_TO_SLUG,
CONFIDENCE_THRESHOLD_VINE,
CONFIDENCE_THRESHOLD_UNCERTAIN,
} from '@/services/ml/classes';
import { argmax } from '@/services/ml/preprocessing';
import {
preprocessImage,
argmax,
softmax,
} from '@/services/ml/preprocessing';
type FastTfliteModel = {
runSync: (
inputs: (Float32Array | Int32Array | Uint8Array)[],
) => (Float32Array | Int32Array | Uint8Array)[];
};
let cachedModel: FastTfliteModel | null = null;
let modelLoadFailed = false;
async function getModel(): Promise<FastTfliteModel | null> {
if (cachedModel) return cachedModel;
if (modelLoadFailed) return null;
try {
console.log('[TFLite] Loading model...');
const start = Date.now();
// require dynamique pour ne pas crasher si la lib n'est pas installée.
// Path RELATIF (pas '@/') car require runtime ne résout pas les alias TS.
const tflite = require('react-native-fast-tflite');
const asset = require('../../assets/models/grapevine_v1.tflite');
const loaded: FastTfliteModel = await tflite.loadTensorflowModel(asset);
cachedModel = loaded;
console.log(`[TFLite] Model loaded in ${Date.now() - start}ms`);
return loaded;
} catch (err) {
console.error('[TFLite] Failed to load model:', err);
modelLoadFailed = true;
return null;
}
}
export async function loadModel(): Promise<boolean> {
return false;
const m = await getModel();
return m !== null;
}
export async function runInference(imageUri?: string): Promise<Detection> {
return mockDetection(Date.now(), imageUri);
const timestamp = Date.now();
// Pas d'image fournie → mock (utile pour le dev sans capture)
if (!imageUri) {
return mockDetection(timestamp);
}
const model = await getModel();
if (!model) {
console.warn('[TFLite] Model unavailable, falling back to mock');
return mockDetection(timestamp, imageUri);
}
try {
const t0 = Date.now();
const input = await preprocessImage(imageUri);
const t1 = Date.now();
console.log(`[TFLite] Preprocess: ${t1 - t0}ms`);
const outputs = model.runSync([input]);
const t2 = Date.now();
console.log(
`[TFLite] Inference: ${t2 - t1}ms (total: ${t2 - t0}ms)`,
);
if (t2 - t0 > 500) {
console.warn(`[TFLite] Slow inference: ${t2 - t0}ms`);
}
const raw = outputs[0];
const rawArr =
raw instanceof Float32Array
? Array.from(raw)
: Array.from(raw as ArrayLike<number>);
const probs = isProbabilityVector(rawArr) ? rawArr : softmax(rawArr);
const idx = argmax(probs);
const topClass = ML_CLASSES[idx];
const topProb = probs[idx];
const allProbabilities: ClassProbability[] = ML_CLASSES.map((cls, i) => ({
class: cls,
probability: probs[i],
}));
return buildDetection({
timestamp,
imageUri,
topClass,
topProb,
allProbabilities,
});
} catch (err) {
console.error('[TFLite] Inference failed:', err);
return mockDetection(timestamp, imageUri);
}
}
function isProbabilityVector(values: number[]): boolean {
if (values.length === 0) return false;
const sum = values.reduce((a, b) => a + b, 0);
if (Math.abs(sum - 1) > 0.05) return false;
return values.every((v) => v >= 0 && v <= 1);
}
function buildDetection(args: {
@ -52,7 +139,7 @@ function buildDetection(args: {
allProbabilities: ClassProbability[];
}): Detection {
const { timestamp, imageUri, topClass, topProb, allProbabilities } = args;
const confidence = Math.round(topProb * 100);
const confidence = Math.round(topProb * 100); // convention projet : 0-100
const result =
topProb >= CONFIDENCE_THRESHOLD_VINE