@@ -5,6 +5,8 @@ sidebar_label: Overview
5
5
---
6
6
7
7
import useBaseUrl from ' @docusaurus/useBaseUrl' ;
8
+ import Tabs from ' @theme/Tabs' ;
9
+ import TabItem from ' @theme/TabItem' ;
8
10
9
11
## Overview
10
12
@@ -32,7 +34,7 @@ To achieve **maximum performance**, the `scanQRCodes` function is written in a n
32
34
33
35
Similar to a TurboModule, the Frame Processor Plugin Registry API automatically manages type conversion from JS <-> native. They are converted into the most efficient data-structures, as seen here:
34
36
35
- | JS Type | Objective-C Type | Java Type |
37
+ | JS Type | Objective-C/Swift Type | Java/Kotlin Type |
36
38
|----------------------|-------------------------------|----------------------------|
37
39
| `number` | `NSNumber*` (double) | `Double` |
38
40
| `boolean` | `NSNumber*` (boolean) | `Boolean` |
@@ -45,11 +47,12 @@ Similar to a TurboModule, the Frame Processor Plugin Registry API automatically
45
47
46
48
### Return values
47
49
48
- Return values will automatically be converted to JS values, assuming they are representable in the ["Types" table](#types). So the following Objective-C frame processor :
50
+ Return values will automatically be converted to JS values, assuming they are representable in the ["Types" table](#types). So the following Java Frame Processor Plugin :
49
51
50
- ```objc
51
- static inline id detectObject(Frame* frame, NSArray args) {
52
- return @" cat" ;
52
+ ```java
53
+ @Override
54
+ public Object callback(ImageProxy image, Object[] params) {
55
+ return " cat" ;
53
56
}
54
57
```
55
58
@@ -63,15 +66,13 @@ export function detectObject(frame: Frame): string {
63
66
}
64
67
```
65
68
66
- You can also manipulate the buffer and return it (or a copy of it) by using the [`Frame` class](https://github.com/mrousavy/react-native-vision-camera/blob/main/ios/Frame%20Processor/Frame.h) :
69
+ You can also manipulate the buffer and return it (or a copy of it) by returning a [`Frame`][2]/[`ImageProxy`][3] instance :
67
70
68
- ```objc
69
- #import <VisionCamera/Frame.h>
70
-
71
- static inline id resize(Frame* frame, NSArray args) {
72
- CMSampleBufferRef resizedBuffer = // ...
73
-
74
- return [[Frame alloc ] initWithBuffer :resizedBuffer orientation :frame .orientation ];
71
+ ```java
72
+ @Override
73
+ public Object callback(ImageProxy image, Object[] params) {
74
+ ImageProxy resizedImage = new ImageProxy (/* ... */ );
75
+ return resizedImage ;
75
76
}
76
77
```
77
78
@@ -80,8 +81,10 @@ Which returns a [`Frame`](https://github.com/mrousavy/react-native-vision-camera
80
81
```js
81
82
const frameProcessor = useFrameProcessor((frame) => {
82
83
' worklet' ;
83
- // by downscaling the frame, the `detectObjects` function runs faster.
84
+ // creates a new `Frame` that's 720x480
84
85
const resizedFrame = resize (frame , 720 , 480 )
86
+
87
+ // by downscaling the frame, the `detectObjects` function runs faster.
85
88
const objects = detectObjects (resizedFrame )
86
89
_log (objects )
87
90
} , [])
@@ -107,6 +110,34 @@ const frameProcessor = useFrameProcessor((frame) => {
107
110
} , [])
108
111
```
109
112
113
+ ### Exceptions
114
+
115
+ To let the user know that something went wrong you can use Exceptions:
116
+
117
+ ```java
118
+ @Override
119
+ public Object callback(ImageProxy image, Object[] params) {
120
+ if (params [0 ] instanceof String ) {
121
+ // ...
122
+ } else {
123
+ throw new Exception(" First argument has to be a string!" );
124
+ }
125
+ }
126
+ ```
127
+
128
+ Which will throw a JS-error:
129
+
130
+ ```ts
131
+ const frameProcessor = useFrameProcessor((frame) => {
132
+ ' worklet'
133
+ try {
134
+ const codes = scanCodes (frame , true )
135
+ } catch (e ) {
136
+ _log(` Error: ${e .message } ` )
137
+ }
138
+ } , [])
139
+ ```
140
+
110
141
## What's possible?
111
142
112
143
You can run any native code you want in a Frame Processor Plugin. Just like in the native iOS and Android Camera APIs, you will receive a frame (`CMSampleBuffer` on iOS, `ImageProxy` on Android) which you can use however you want. In other words; **everything is possible**.
@@ -119,19 +150,18 @@ If your Frame Processor takes longer than a single frame interval to execute, or
119
150
120
151
For example, a realtime video chat application might use WebRTC to send the frames to the server. I/O operations (networking) are asynchronous, and we don't _need_ to wait for the upload to succeed before pushing the next frame, so we copy the frame and perform the upload on another Thread.
121
152
122
- ```objc
123
- static dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0ul);
124
-
125
- static inline id sendFrameToWebRTC(Frame* frame, NSArray args) {
126
- CMSampleBufferRef bufferCopy ;
127
- CMSampleBufferCreateCopy (kCFAllocatorDefault , frame .buffer , & bufferCopy );
153
+ ```java
154
+ @Override
155
+ public Object callback(ImageProxy image, Object[] params) {
156
+ String serverURL = (String )params [0 ];
157
+ ImageProxy imageCopy = new ImageProxy (/* ... */ );
128
158
129
- dispatch_async ( queue , ^ {
130
- NSString* serverURL = ( NSString * ) args [ 0 ] ;
131
- [ WebRTC uploadFrame : bufferCopy toServer : serverURL ] ;
159
+ uploaderQueue . runAsync (() - > {
160
+ WebRTC.uploadImage( imageCopy , serverURL) ;
161
+ imageCopy.close() ;
132
162
});
133
163
134
- return nil ;
164
+ return null ;
135
165
}
136
166
```
137
167
0 commit comments