Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/classes/CameraFeed.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<description>
A camera feed gives you access to a single physical camera attached to your device. When enabled, Godot will start capturing frames from the camera which can then be used. See also [CameraServer].
[b]Note:[/b] Many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background.
[b]Note:[/b] This class is currently only implemented on Linux, Android, macOS, and iOS. On other platforms no [CameraFeed]s will be available. To get a [CameraFeed] on iOS, the camera plugin from [url=https://github.com/godotengine/godot-ios-plugins]godot-ios-plugins[/url] is required.
[b]Note:[/b] This class is currently only implemented on Linux, Android, macOS, and iOS. On other platforms no [CameraFeed]s will be available. To get a [CameraFeed] on iOS, enable [member EditorExportPlatformIOS.modules/camera].
</description>
<tutorials>
</tutorials>
Expand Down
11 changes: 11 additions & 0 deletions drivers/apple_embedded/os_apple_embedded.mm
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
#include "drivers/sdl/joypad_sdl.h"
#endif
#include "main/main.h"
#include "servers/camera/camera_server.h"

#import <AVFoundation/AVFAudio.h>
#import <AudioToolbox/AudioServices.h>
Expand Down Expand Up @@ -805,6 +806,11 @@ Rect2 fit_keep_aspect_covered(const Vector2 &p_container, const Vector2 &p_rect)
void OS_AppleEmbedded::on_enter_background() {
// Do not check for is_focused, because on_focus_out will always be fired first by applicationWillResignActive.

CameraServer *camera_server = CameraServer::get_singleton();
if (camera_server) {
camera_server->handle_application_pause();
}

if (OS::get_singleton()->get_main_loop()) {
OS::get_singleton()->get_main_loop()->notification(MainLoop::NOTIFICATION_APPLICATION_PAUSED);
}
Expand All @@ -819,6 +825,11 @@ Rect2 fit_keep_aspect_covered(const Vector2 &p_container, const Vector2 &p_rect)
if (OS::get_singleton()->get_main_loop()) {
OS::get_singleton()->get_main_loop()->notification(MainLoop::NOTIFICATION_APPLICATION_RESUMED);
}

CameraServer *camera_server = CameraServer::get_singleton();
if (camera_server) {
camera_server->handle_application_resume();
}
}
}

Expand Down
3 changes: 3 additions & 0 deletions modules/camera/camera_apple.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,7 @@ class CameraApple : public CameraServer {

void update_feeds();
void set_monitoring_feeds(bool p_monitoring_feeds) override;
void handle_display_rotation_change(int p_orientation) override;
void handle_application_pause() override;
void handle_application_resume() override;
};
162 changes: 137 additions & 25 deletions modules/camera/camera_apple.mm
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,27 @@ - (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device
[self commitConfiguration];
return nil;
}
if (![self canAddInput:input]) {
print_line("Couldn't add input to capture session");
input = nullptr;
[self commitConfiguration];
return nil;
}
[self addInput:input];

output = [AVCaptureVideoDataOutput new];
if (!output) {
print_line("Couldn't get output device for camera");
[self removeInput:input];
input = nullptr;
[self commitConfiguration];
return nil;
}
if (![self canAddOutput:output]) {
print_line("Couldn't add output to capture session");
[self removeInput:input];
input = nullptr;
output = nullptr;
[self commitConfiguration];
return nil;
}
Expand Down Expand Up @@ -193,6 +209,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
// do Y
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
size_t row_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);

if ((width[0] != new_width) || (height[0] != new_height)) {
width[0] = new_width;
Expand All @@ -201,7 +218,15 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
}

uint8_t *w = img_data[0].ptrw();
memcpy(w, dataY, new_width * new_height);
if (new_width == row_stride) {
memcpy(w, dataY, new_width * new_height);
} else {
for (size_t i = 0; i < new_height; i++) {
memcpy(w, dataY, new_width);
w += new_width;
dataY += row_stride;
}
}

img[0].instantiate();
img[0]->set_data(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
Expand All @@ -211,6 +236,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
// do CbCr
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
size_t row_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);

if ((width[1] != new_width) || (height[1] != new_height)) {
width[1] = new_width;
Expand All @@ -219,7 +245,15 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
}

uint8_t *w = img_data[1].ptrw();
memcpy(w, dataCbCr, 2 * new_width * new_height);
if (new_width * 2 == row_stride) {
memcpy(w, dataCbCr, 2 * new_width * new_height);
} else {
for (size_t i = 0; i < new_height; i++) {
memcpy(w, dataCbCr, new_width * 2);
w += new_width * 2;
dataCbCr += row_stride;
}
}

///TODO OpenGL doesn't support FORMAT_RG8, need to do some form of conversion
img[1].instantiate();
Expand All @@ -229,28 +263,6 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
// set our texture...
feed->set_ycbcr_images(img[0], img[1]);

#ifdef IOS_ENABLED
UIInterfaceOrientation orientation = [UIApplication sharedApplication].delegate.window.windowScene.interfaceOrientation;

Transform2D display_transform;
switch (orientation) {
case UIInterfaceOrientationPortrait: {
display_transform = Transform2D(0.0, -1.0, -1.0, 0.0, 1.0, 1.0);
} break;
case UIInterfaceOrientationLandscapeRight: {
display_transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
} break;
case UIInterfaceOrientationLandscapeLeft: {
display_transform = Transform2D(-1.0, 0.0, 0.0, 1.0, 1.0, 0.0);
} break;
default: {
display_transform = Transform2D(0.0, 1.0, 1.0, 0.0, 0.0, 0.0);
} break;
}

feed->set_transform(display_transform);
#endif // IOS_ENABLED

// and unlock
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
Expand All @@ -266,6 +278,8 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
private:
AVCaptureDevice *device;
MyCaptureSession *capture_session;
bool device_locked;
bool was_active_before_pause = false;

public:
AVCaptureDevice *get_device() const;
Expand All @@ -275,6 +289,10 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM

void set_device(AVCaptureDevice *p_device);

void handle_rotation_change(int p_orientation);
void handle_pause();
void handle_resume();

bool activate_feed() override;
void deactivate_feed() override;
};
Expand All @@ -286,7 +304,8 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
CameraFeedApple::CameraFeedApple() {
device = nullptr;
capture_session = nullptr;
transform = Transform2D(1.0, 0.0, 0.0, 1.0, 0.0, 0.0); /* should re-orientate this based on device orientation */
device_locked = false;
transform = Transform2D(1.0, 0.0, 0.0, 1.0, 0.0, 0.0);
}

CameraFeedApple::~CameraFeedApple() {
Expand All @@ -309,6 +328,56 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
};
}

void CameraFeedApple::handle_rotation_change(int p_orientation) {
// UIInterfaceOrientation values:
// 1 = UIInterfaceOrientationPortrait
// 2 = UIInterfaceOrientationPortraitUpsideDown
// 3 = UIInterfaceOrientationLandscapeLeft
// 4 = UIInterfaceOrientationLandscapeRight
int display_rotation = 0;
switch (p_orientation) {
case 1:
display_rotation = 0;
break;
case 2:
display_rotation = 180;
break;
case 3:
display_rotation = 270;
break;
case 4:
display_rotation = 90;
break;
default:
display_rotation = 0;
break;
}

// iOS camera sensor orientation is 90 degrees.
int sensor_orientation = 90;
int sign = position == CameraFeed::FEED_FRONT ? 1 : -1;
int image_rotation = (sensor_orientation - display_rotation * sign + 360) % 360;

transform = Transform2D();
transform = transform.rotated(Math::deg_to_rad((float)image_rotation));
}

void CameraFeedApple::handle_pause() {
if (capture_session) {
was_active_before_pause = true;
deactivate_feed();
} else {
was_active_before_pause = false;
}
}

void CameraFeedApple::handle_resume() {
if (was_active_before_pause) {
activate_feed();
was_active_before_pause = false;
}
}

bool CameraFeedApple::activate_feed() {
if (capture_session) {
// Already recording.
Expand Down Expand Up @@ -350,6 +419,10 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
[capture_session cleanup];
capture_session = nullptr;
};
if (device_locked) {
[device unlockForConfiguration];
device_locked = false;
}
}

//////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -463,6 +536,18 @@ - (void)dealloc {
add_feed(newfeed);
};
};

#ifdef IOS_ENABLED
// Update rotation for all feeds.
UIInterfaceOrientation orientation = UIInterfaceOrientationUnknown;
UIWindow *window = [UIApplication sharedApplication].delegate.window;
UIWindowScene *windowScene = window.windowScene;
if (windowScene) {
orientation = windowScene.interfaceOrientation;
}
handle_display_rotation_change((int)orientation);
#endif // IOS_ENABLED

emit_signal(SNAME(CameraServer::feeds_updated_signal_name));
}

Expand All @@ -484,6 +569,33 @@ - (void)dealloc {
}
}

void CameraApple::handle_display_rotation_change(int p_orientation) {
for (int i = 0; i < feeds.size(); i++) {
Ref<CameraFeedApple> feed = (Ref<CameraFeedApple>)feeds[i];
if (feed.is_valid()) {
feed->handle_rotation_change(p_orientation);
}
}
}

void CameraApple::handle_application_pause() {
for (int i = 0; i < feeds.size(); i++) {
Ref<CameraFeedApple> feed = (Ref<CameraFeedApple>)feeds[i];
if (feed.is_valid()) {
feed->handle_pause();
}
}
}

void CameraApple::handle_application_resume() {
for (int i = 0; i < feeds.size(); i++) {
Ref<CameraFeedApple> feed = (Ref<CameraFeedApple>)feeds[i];
if (feed.is_valid()) {
feed->handle_resume();
}
}
}

#ifdef APPLE_EMBEDDED_ENABLED

void register_camera_external_module() {
Expand Down
Loading