/*---------------------------------------------------------------------------* Project: Horizon File: Simple.cpp Copyright (C)2009-2012 Nintendo Co., Ltd. All rights reserved. These coded instructions, statements, and computer programs contain proprietary information of Nintendo of America Inc. and/or Nintendo Company Ltd., and are protected by Federal copyright law. They may not be disclosed to third parties or copied or duplicated in any form, in whole or in part, without the prior written consent of Nintendo. $Rev: 48673 $ *---------------------------------------------------------------------------*/ #include #include #include #include "demo.h" #include "applet.h" // ====================================================================================== // Function Declarations namespace { // ---------------------------------------- // Sample initialization, finalization and input processes void Initialize(void); void Finalize(void); void ProcessUserInput(nn::hid::PadReader* pPadReader); // ---------------------------------------- // Functions for applet control void InitializeApplet(void); void FinalizeApplet(void); // ---------------------------------------- // Functions for camera thread control void InitializeCameraThread(void); void FinalizeCameraThread(void); void PrepareTransitionCallback(void); void AfterTransitionCallback(void); // ---------------------------------------- // Camera/Y2R control functions void SetupCamera(void); nn::Result InitializeCameraSetting(void); void InitializeY2r(void); void InitializeResource(void); void FinalizeResource(void); void CameraThreadFunc(uptr param); void CameraRecvFunc(void); void CameraVsyncFunc(void); void StartCameraCapture(void); void StopCameraCapture(void); bool Y2rConversion(void); void PlaySound(nn::camera::ShutterSoundType soundType); // ---------------------------------------- // Rendering Functions void InitializeGx(void); void FinalizeGx(void); void DrawFrame(void); void DrawDisplay0(void); void DrawDisplay1(void); void UpdateCameraTexture(void); void DeleteCameraTexture(void); } // ====================================================================================== // Variables for controlling Camera/Y2R namespace { // Dynamic allocation heap nn::fnd::ExpHeap s_AppHeap; // ---------------------------------------- // Image size // ---------------------------------------- // Image to be captured by the camera const nn::camera::CTR::Size CAPTURE_SIZE = nn::camera::SIZE_DS_LCDx4; const s32 CAPTURE_WIDTH = 512; // Width of DS x 2 const s32 CAPTURE_HEIGHT = 384; // Height of DS x 2 // If higher resolution has priority over wider angle of view, set to highest resolution VGA (640x480). // // const nn::camera::CTR::Size CAPTURE_SIZE = nn::camera::SIZE_VGA; // const s32 CAPTURE_WIDTH = 640; // VGA width // const s32 CAPTURE_HEIGHT = 480; // VGA height // An image after trimming // Must be a multiple of 8 no greater than 1024. // To simplify the specification of the size for the Y2R conversion result, here we match the horizontal width to the texture size. // const s32 TRIMMING_WIDTH = 512; const s32 TRIMMING_HEIGHT = 240; // CTR upper screen height // Image to apply as texture // For the texture size, both the width and height must be a power of 2. const s32 TEXTURE_WIDTH = 512; const s32 TEXTURE_HEIGHT = 256; // ---------------------------------------- // Camera/Y2R settings // ---------------------------------------- // Port and camera that are used const nn::camera::Port CAMERA_PORT = nn::camera::PORT_CAM1; // Port 1 const nn::camera::CameraSelect SELECT_CAMERA = nn::camera::SELECT_OUT1; // External camera (R) // When using external camera (L) // const nn::camera::Port CAMERA_PORT = nn::camera::PORT_CAM2; // Port 2 // const nn::camera::CameraSelect SELECT_CAMERA = nn::camera::SELECT_OUT2; // External camera (L) // When using the internal camera // const nn::camera::Port CAMERA_PORT = nn::camera::PORT_CAM1; // Port 1 // const nn::camera::CameraSelect SELECT_CAMERA = nn::camera::SELECT_IN1; // Internal camera // Alignment of the buffer for transferring after Y2R conversion // The buffer that receives the data converted by Y2R to RGB must have a physically contiguous address that is also aligned to 4 bytes. // // However, when the alignment is less than 64 bytes, the transfer speed may drop. const s32 ALIGNMENT_SIZE = 64; // Y2R input format // The image data that is input from the camera is in YUV4:2:2 format, which is a format that can be processed in batch mode. // const nn::y2r::InputFormat Y2R_INPUT_FORMAT = nn::y2r::INPUT_YUV422_BATCH; // Y2R output format // This application is set to RGB 24bit. // If you want to complete the conversion more quickly, you need to reduce the data size, so another option is set RGB 16bit (nn::y2r::OUTPUT_RGB_16_565). // // If you do this, the texture type // must be changed to GL_UNSIGNED_SHORT_5_6_5. const nn::y2r::OutputFormat Y2R_OUTPUT_FORMAT = nn::y2r::OUTPUT_RGB_24; // Output buffer array // Since we want to apply the converted image as-is as a texture, set to 8x8 block format, which is the sort order that supports DMP_NATIVE. // const nn::y2r::BlockAlignment Y2R_BLOCK_ALIGNMENT = nn::y2r::BLOCK_8_BY_8; // Index of definitions for frame rates that can be set in this sample. enum FrameRateIndex { FRAME_RATE_INDEX_5, FRAME_RATE_INDEX_10, FRAME_RATE_INDEX_15, FRAME_RATE_INDEX_20, FRAME_RATE_INDEX_30, FRAME_RATE_INDEX_NUM }; // Structure related to the frame rate struct FrameRateInfo { s32 frameRate; // Frame rate nn::camera::FrameRate settingValue; // Enumerated type of the frame rate used for settings NN_PADDING3; }; FrameRateInfo s_FrameRateInfo[FRAME_RATE_INDEX_NUM] = { { 5, nn::camera::FRAME_RATE_5 }, // 5 fps { 10, nn::camera::FRAME_RATE_10 }, // 10 fps { 15, nn::camera::FRAME_RATE_15 }, // 15 fps { 20, nn::camera::FRAME_RATE_20 }, // 20 fps { 30, nn::camera::FRAME_RATE_30 } // 30 fps }; // Index of the frame rate to be set // Use 15 fps as the default frame rate int s_FrameRateIndex = FRAME_RATE_INDEX_15; // ------------------------------------- // Camera/Y2R transfer // ------------------------------------- // Related to the buffer for transferring YUV data s32 s_YuvTransferUnit; // Transfer size for one time s32 s_YuvBufferSize; // Total size to transfer (size for one frame) nn::y2r::StandardCoefficient s_Coefficient; // The Y2R conversion coefficient applied to the data output by the camera. const s32 YUV_BUFFER_NUM = 3; // Number of buffers for transferring (triple buffer) u8* s_pYuvBuffer[YUV_BUFFER_NUM] = { NULL }; // Buffer for transferring data // Buffer index s32 s_YuvBufferIndexCapturing; // Buffer to read the image captured by the camera s32 s_YuvBufferIndexLatestCaptured; // Buffer where the read from the camera was completed s32 s_YuvBufferIndexTransmitting; // Buffer which is transferring to the Y2R circuit nn::os::CriticalSection s_CsYuvSwap; // For exclusively controlling the Yuv buffer // Related to the buffer for transferring RGB data s32 s_RgbBufferSize; // Total size to transfer (size for one frame) u8* s_pRgbBuffer = NULL; // Buffer for transferring data // Counter for stabilizing auto exposure // For the first four frames immediately after the cameras are activated, the obtained images may be extremely dark. // The number of frames until auto-exposure stabilizes, together with these four frames, is around 14 frames indoors and around 30 frames outdoors. // // s32 s_FrameCountForStabilize = 0; const s32 FRAME_NUM_FOR_STABILIZE = 30; // Counter for frame count display s32 s_FrameCountForDisplay = 1; // The thread that processes camera events nn::os::Thread s_CameraThread; // Priority of the main thread s32 s_MainThreadPriority; // ------------------------------------- // Event // ------------------------------------- // Notification of completed data transfer from the camera to the buffer nn::os::Event s_CameraRecvEvent; // Can also be used for buffer error notification when sending data from camera to buffer, and when the main thread starts the camera thread. nn::os::Event s_CameraBufferErrorEvent; // Notification of a camera V blank interrupt with camera capture nn::os::Event s_CameraVsyncEvent; // Notification of completion of conversion and transfer with Y2R nn::os::Event s_Y2rEndEvent; // Event to wait for the camera thread to enter the wait state. nn::os::LightEvent s_CameraThreadSleepAckEvent; // Event to cancel the camera thread wait state. nn::os::LightEvent s_CameraThreadAwakeEvent; // ------------------------------------- // Flags // ------------------------------------- // Flag determining whether camera initialization has completed. bool s_IsCameraInitialized = false; // Flag to put the camera thread into the wait state. bool s_IsCameraThreadSleep = false; // Flag used to stop the camera thread bool s_IsCameraThreadEnd = false; // Flag when converting the frame rate bool s_SwitchFrameRateFlag = false; // Flag indicating whether there are any images waiting for Y2R conversion. bool s_ExistYuvImage = false; // ------------------------------------- // Rendering variables // ------------------------------------- // 2D graphics demo frame work demo::RenderSystemDrawing s_RenderSystem; // Memory size used to allocate graphics in FCRAM const s32 MEMORY_SIZE_FCRAM_GX = 0x400000; // Address of the memory to allocate for graphics void* s_AddrForGxHeap = NULL; // ID of texture generated from camera image. GLuint s_TextureId = 0; } //Namespace // ====================================================================================== // Main // ====================================================================================== extern "C" void nnMain(void) { // Initialization Initialize(); NN_LOG("---------- Camera demo start -----------\n"); NN_LOG("Up : Up frame rate\n"); NN_LOG("Down : Down frame rate\n"); NN_LOG("R : Play shutter sound (photo)\n"); NN_LOG("A : Play shutter sound (movie start)\n"); NN_LOG("B : Play shutter sound (movie end)\n"); NN_LOG("----------------------------------------\n"); //----------------------------------------------------- // Main loop nn::hid::PadReader padReader; while (1) { if (s_ExistYuvImage) { s_ExistYuvImage = false; // Only when a completion event for transferring from the camera is signaled // Performs Y2R conversion if (Y2rConversion()) { // Update texture if conversion was successful. UpdateCameraTexture(); } else { // Invalidate texture if conversion failed. DeleteCameraTexture(); } } // Updates the LCD rendering DrawFrame(); // ---------------------------------------- // Applet related process TransitionHandler::Process(); if (TransitionHandler::IsExitRequired()) { break; // Exit main loop if the application ends } // If camera initialization has ended if (s_IsCameraInitialized) { // Confirm input from the user ProcessUserInput(&padReader); } } //while // Finalization Finalize(); NN_LOG("----------- Camera demo end ------------\n"); nn::applet::CloseApplication(); } //nnMain() // ==================================================================================== // Applet control functions namespace { //------------------------------------------------------------ // Function for applet process initialization //------------------------------------------------------------ void InitializeApplet(void) { TransitionHandler::Initialize(); TransitionHandler::EnableSleep(); } //InitializeApplet() //------------------------------------------------------------ // Function for applet process finalization //------------------------------------------------------------ void FinalizeApplet(void) { TransitionHandler::DisableSleep(); TransitionHandler::Finalize(); } //FinalizeApplet() } //Namespace // ==================================================================================== // Sample initialization, finalization and input processes namespace { //------------------------------------------------------------ // Initialization //------------------------------------------------------------ void Initialize(void) { InitializeApplet(); // Applet process if (TransitionHandler::IsExitRequired()) { // Ends application when the power button is pressed during startup FinalizeApplet(); nn::applet::CloseApplication(); } // Heap memory s_AppHeap.Initialize( nn::os::GetDeviceMemoryAddress(), // Start address nn::os::GetDeviceMemorySize()); // Memory size InitializeGx(); // Rendering // Force close when HID initialization fails NN_PANIC_IF_FAILED(nn::hid::Initialize()); // HID library // Start thread for camera processing InitializeCameraThread(); } //------------------------------------------------------------ // Finalization //------------------------------------------------------------ void Finalize(void) { // Finalize thread for camera processing. FinalizeCameraThread(); nn::hid::Finalize(); FinalizeGx(); FinalizeApplet(); s_AppHeap.Finalize(); } //------------------------------------------------------------ // User input processing functions //------------------------------------------------------------ void ProcessUserInput(nn::hid::PadReader *pPadReader) { nn::hid::PadStatus padStatus; pPadReader->ReadLatest(&padStatus); // Get gamepad values if (padStatus.trigger & nn::hid::BUTTON_UP) // +Control Pad up { // Ignored if currently in conversion processing if (!s_SwitchFrameRateFlag) { // Increase frame rate if (s_FrameRateIndex < FRAME_RATE_INDEX_NUM - 1) { s_FrameRateIndex++; NN_LOG("Change frame rate to %2d...", s_FrameRateInfo[s_FrameRateIndex].frameRate); s_SwitchFrameRateFlag = true; } } } else if (padStatus.trigger & nn::hid::BUTTON_DOWN) // +Control Pad down { // Ignored if currently in conversion processing if (!s_SwitchFrameRateFlag) { // Decrease frame rate if (s_FrameRateIndex > 0) { s_FrameRateIndex--; NN_LOG("Change frame rate to %2d...", s_FrameRateInfo[s_FrameRateIndex].frameRate); s_SwitchFrameRateFlag = true; } } } else if (padStatus.trigger & nn::hid::BUTTON_R) // R Button { // Still image/Photo sound playback/Temporarily extinguish camera light PlaySound(nn::camera::SHUTTER_SOUND_TYPE_NORMAL); } else if (padStatus.trigger & nn::hid::BUTTON_A) // A Button { // Video/Photo start sound playback/Start blinking camera light PlaySound(nn::camera::SHUTTER_SOUND_TYPE_MOVIE); } else if (padStatus.trigger & nn::hid::BUTTON_B) // B Button { // Video/Photo end sound playback/End blinking camera light PlaySound(nn::camera::SHUTTER_SOUND_TYPE_MOVIE_END); } else { // Because there is not input from the pad, there is no particular processing } } // ProcessUserInput } //Namespace // ====================================================================================== // Functions for camera thread control // ====================================================================================== namespace { //------------------------------------------------------------ // Sleep/Preparation before transition. (Called from main thread.) //------------------------------------------------------------ void PrepareTransitionCallback(void) { // If camera thread is not finalized, transition to wait state. if (!s_IsCameraThreadEnd) { // Notify camera thread to transition to wait state (to CameraThreadFunc). s_IsCameraThreadSleep = true; // Signal so that the camera thread does not block with WaitAny (to CameraThreadFunc) s_CameraBufferErrorEvent.Signal(); // Wait for camera thread to transition to wait state (from CameraThreadFunc). s_CameraThreadSleepAckEvent.Wait(); } } //------------------------------------------------------------ // Sleep/Recovery after transition. (Called from main thread.) //------------------------------------------------------------ void AfterTransitionCallback(void) { // If camera thread is not finalized, cancel the wait state. // If recovering in order to finalize the application, here you finalize the camera thread without canceling. if (!s_IsCameraThreadEnd && !TransitionHandler::IsExitRequired()) { // Signal to start the camera thread (to CameraThreadFunc) s_CameraThreadAwakeEvent.Signal(); } } //----------------------------------------------------- // Start camera thread. //----------------------------------------------------- void InitializeCameraThread(void) { s_CameraThreadSleepAckEvent.Initialize(false); s_CameraThreadAwakeEvent.Initialize(false); s_CameraBufferErrorEvent.Initialize(false); // Use when recovering from wait state. TransitionHandler::SetPrepareSleepCallback(PrepareTransitionCallback); TransitionHandler::SetAfterSleepCallback(AfterTransitionCallback); TransitionHandler::SetPrepareHomeButtonCallback(PrepareTransitionCallback); TransitionHandler::SetAfterHomeButtonCallback(AfterTransitionCallback); // Remember the priority of the main thread so you can change the priority later. s_MainThreadPriority = nn::os::Thread::GetCurrentPriority(); // Create a thread for camera processing. // While initializing, the priority is set lower than the main thread. // After initialization has completed, the priority changes to be higher than the main thread. s_CameraThread.StartUsingAutoStack( CameraThreadFunc, NULL, 4096, s_MainThreadPriority + 3); } //----------------------------------------------------- // Finalization of camera thread //----------------------------------------------------- void FinalizeCameraThread(void) { // Destroy the thread for camera processing s_IsCameraThreadEnd = true; // Set the end flag s_CameraThreadAwakeEvent.Signal(); // Signal so that it does not stop with the sleep state s_CameraBufferErrorEvent.Signal(); // Signal so that the camera thread does not stop with WaitAny s_CameraThread.Join(); // Wait for thread to end s_CameraThread.Finalize(); // Discard thread TransitionHandler::SetPrepareSleepCallback(NULL); TransitionHandler::SetAfterSleepCallback(NULL); TransitionHandler::SetPrepareHomeButtonCallback(NULL); TransitionHandler::SetAfterHomeButtonCallback(NULL); s_CameraThreadSleepAckEvent.Finalize(); s_CameraThreadAwakeEvent.Finalize(); } } // ====================================================================================== // Camera/Y2R control functions // ====================================================================================== namespace { //----------------------------------------------------- // Camera initialization //----------------------------------------------------- void SetupCamera() { // Until camera capture starts, // the following functions must execute in order. // (1) nn::camera::Initialize() Initializes the camera library // (2) nn::camera::Activate() Starts the camera to be used // (3) nn::camera::SetReceiving() Starts transfer // (4) nn::camera::StartCapture() Starts capture nn::Result result; // Camera library initialization while (true) { // Exit if the end flag is set. if (s_IsCameraThreadEnd) { return; } result = nn::camera::Initialize(); if (result.IsSuccess()) { break; } else if (result == nn::camera::ResultFatalError()) { // Camera restart process failed NN_PANIC("Camera has broken.\n"); } else if (result == nn::camera::ResultUsingOtherProcess()) { // Camera is being used by another process NN_PANIC("Camera is using by other process.\n"); } else if (result == nn::camera::ResultAlreadyInitialized()) { // Because initialization was already done, no particular process is performed NN_LOG("Camera is already initialized.\n"); break; } else if (result == nn::camera::ResultIsSleeping()) { // If there is a request to enter the wait state, block until end. if (s_IsCameraThreadSleep) { s_IsCameraThreadSleep = false; // Notify that the camera thread has entered the wait state (to Sleep, Home) s_CameraThreadSleepAckEvent.Signal(); // Wait until the thread recovery signal arrives (from Sleep, Home) s_CameraThreadAwakeEvent.Wait(); } else { // Even if there is no request, block for a period of time in order to create an interval to retry. nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(10)); } // Retry NN_LOG("ShellClose: Retry camera initialization\n"); } } // Initialize camera settings while (true) { // Exit if the end flag is set. if (s_IsCameraThreadEnd) { nn::camera::Finalize(); return; } result = InitializeCameraSetting(); if (result.IsSuccess()) { break; } else if (result == nn::camera::ResultFatalError()) { NN_PANIC("Camera has broken.\n"); } else if (result == nn::camera::ResultIsSleeping()) { // If there is a request to enter the wait state, block until end. if (s_IsCameraThreadSleep) { s_IsCameraThreadSleep = false; // Notify that the camera thread has entered the wait state (to Sleep, Home) s_CameraThreadSleepAckEvent.Signal(); // Wait until the thread recovery signal arrives (from Sleep, Home) s_CameraThreadAwakeEvent.Wait(); } else { // Even if there is no request, block for a period of time in order to create an interval to retry. nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(10)); } // Retry NN_LOG("ShellClose: Retry camera setting\n"); } } InitializeY2r(); // Y2R library-related initialization InitializeResource(); // Resource settings // Initialization has completed, so priority changes to become higher than the main thread. nn::os::Thread::ChangeCurrentPriority(s_MainThreadPriority - 3); s_IsCameraInitialized = true; } //----------------------------------------------------- // Camera default setting //----------------------------------------------------- nn::Result InitializeCameraSetting(void) { // Set the image size for the camera to capture NN_UTIL_RETURN_IF_FAILED( nn::camera::SetSize( SELECT_CAMERA, // Camera that is the target for the settings CAPTURE_SIZE, // Resolution of the camera to be set nn::camera::CONTEXT_A)); // Context where settings are reflected // Set trimming // If trimming is being performed, these are the various trimming-related actions. // Function calls must be made before call to StartCapture(). // nn::camera::SetTrimming(CAMERA_PORT, true); // Enable trimming // Trims the specified sized from the center of the captured image nn::camera::SetTrimmingParamsCenter( CAMERA_PORT, // Port targeted for settings TRIMMING_WIDTH, // Width of the image to be trimmed TRIMMING_HEIGHT, // Height of the image to be trimmed CAPTURE_WIDTH, // Width of the camera resolution CAPTURE_HEIGHT); // Height of the camera resolution // Set number of bytes to transfer // A number of bytes is set in this demo, but if a number of lines is to be set for the number of bytes to transfer, you can get the maximum number of bytes to send using nn::camera::GetMaxLines() , and then use nn::camera::SetTransferLines() to set the value. // // However, there are conditions for the number of bytes being sent, so read the details in section "6.4.3 Capture Settings" in the Programming Manual System. // // s_YuvTransferUnit = nn::camera::GetMaxBytes( TRIMMING_WIDTH, // Width of the image to transfer (width after trimming when trimmed) TRIMMING_HEIGHT); // Height of the image to transfer (height after trimming when trimmed) nn::camera::SetTransferBytes( CAMERA_PORT, // Targeted port s_YuvTransferUnit, // Number of bytes to transfer TRIMMING_WIDTH, // Width of the image to transfer (width after trimming when trimmed) TRIMMING_HEIGHT); // Height of the image to transfer (height after trimming when trimmed) // Get the Y2R conversion coefficient suited to the data output by the camera. // This value is used when initializing Y2R. // It is unnecessary if you are not using Y2R. NN_UTIL_RETURN_IF_FAILED(nn::camera::GetSuitableY2rStandardCoefficient(&s_Coefficient)); // The three settings below (noise elimination, auto exposure and auto white balance) all are 'true' by default for all cameras, but explicitly perform the configuration. // // Noise filter NN_UTIL_RETURN_IF_FAILED(nn::camera::SetNoiseFilter(SELECT_CAMERA, true)); // Enabled // Auto exposure feature NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoExposure(SELECT_CAMERA, true)); // Enabled // White balance auto adjustment feature NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoWhiteBalance(SELECT_CAMERA, true)); // Enabled // Start camera // After the camera module has been started by this function you can begin image capture by calling the StartCapture function. // NN_UTIL_RETURN_IF_FAILED(nn::camera::Activate(SELECT_CAMERA)); return nn::ResultSuccess(); } //SetupCamera() //------------------------------------------------------------ // Initial settings for the Y2R library //------------------------------------------------------------ void InitializeY2r(void) { // Initialization of the Y2R library if (!nn::y2r::Initialize()) { NN_PANIC("Y2R is using by other process.\n"); } // Stop the library explicitly because it cannot be configured during a conversion. nn::y2r::StopConversion(); while (nn::y2r::IsBusyConversion()) { // Wait until conversion ends nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMicroSeconds(100)); } // Event initialization s_Y2rEndEvent.Initialize(false); // Input format settings nn::y2r::SetInputFormat(Y2R_INPUT_FORMAT); // Output format settings nn::y2r::SetOutputFormat(Y2R_OUTPUT_FORMAT); // Set angle of rotation for output data nn::y2r::SetRotation(nn::y2r::ROTATION_NONE); // No rotation // Set alpha value for output data // Since alpha is not used with RGB 24bit, initialize explicitly. nn::y2r::SetAlpha(0xFF); // Set the order of the output buffer data nn::y2r::SetBlockAlignment(Y2R_BLOCK_ALIGNMENT); // Set whether we will receive a notification when the Y2R conversion/transfer is complete. nn::y2r::SetTransferEndInterrupt(true); nn::y2r::GetTransferEndEvent(&s_Y2rEndEvent); // Set width of 1 line of input data nn::y2r::SetInputLineWidth(TRIMMING_WIDTH); // Set number of vertical lines of input data nn::y2r::SetInputLines(TRIMMING_HEIGHT); // Set standard conversion coefficients // The camera module installed on the CTR system may change in the future. //For this reason, instead of specifying a specific conversion coefficient, we recommend using the GetSuitableY2rStandardCoefficient function to get the conversion coefficient that matches the camera, and using this to set the value. // // nn::y2r::SetStandardCoefficient(s_Coefficient); } //InitializeY2r() //------------------------------------------------------------ // Allocate the necessary resources for camera and Y2R processing //------------------------------------------------------------ void InitializeResource(void) { // Initialize the critical session s_CsYuvSwap.Initialize(); // Initialize the event to acquire from the camera library s_CameraRecvEvent.Initialize(false); s_CameraVsyncEvent.Initialize(false); // Event for buffer error notification // When camera data transfer fails (when FIFO overflowed) // A buffer error notification is made. nn::camera::GetBufferErrorInterruptEvent(&s_CameraBufferErrorEvent, CAMERA_PORT); // Event for camera V blank notification // During V-blanks, frame rates are calculated and camera settings are changed. // nn::camera::GetVsyncInterruptEvent(&s_CameraVsyncEvent, CAMERA_PORT); // Allocate a buffer for transferring camera image data (YUV data). s_YuvBufferSize = nn::camera::GetFrameBytes(TRIMMING_WIDTH, TRIMMING_HEIGHT); for (s32 i = 0; i < YUV_BUFFER_NUM; i++) { NN_ASSERT(!s_pYuvBuffer[i]); s_pYuvBuffer[i] = static_cast(s_AppHeap.Allocate(s_YuvBufferSize, ALIGNMENT_SIZE)); std::memset(s_pYuvBuffer[i], 0, s_YuvBufferSize); } s_YuvBufferIndexCapturing = 0; s_YuvBufferIndexLatestCaptured = YUV_BUFFER_NUM - 1; s_YuvBufferIndexTransmitting = YUV_BUFFER_NUM - 1; // Allocate a buffer for transferring the camera image Y2R conversion result (RGB data). s_RgbBufferSize = nn::y2r::GetOutputImageSize(TEXTURE_WIDTH, TEXTURE_HEIGHT, Y2R_OUTPUT_FORMAT); NN_ASSERT(!s_pRgbBuffer); s_pRgbBuffer = static_cast(s_AppHeap.Allocate(s_RgbBufferSize, ALIGNMENT_SIZE)); std::memset(s_pRgbBuffer, 0, s_RgbBufferSize); } //InitializeResource() //------------------------------------------------------------ // Free the necessary resources for camera and Y2R processing //------------------------------------------------------------ void FinalizeResource(void) { // Free heap. for (s32 i = 0; i < YUV_BUFFER_NUM; i++) { NN_ASSERT(s_pYuvBuffer[i]); s_AppHeap.Free(s_pYuvBuffer[i]); s_pYuvBuffer[i] = NULL; } NN_ASSERT(s_pRgbBuffer); s_AppHeap.Free(s_pRgbBuffer); s_pRgbBuffer = NULL; // Destroys events obtained from the camera library. s_CameraRecvEvent.Finalize(); s_CameraBufferErrorEvent.Finalize(); s_CameraVsyncEvent.Finalize(); // Destroys the critical section s_CsYuvSwap.Finalize(); } //FinalizeResource() //------------------------------------------------------------ // The thread that processes camera events //------------------------------------------------------------ void CameraThreadFunc(uptr param NN_IS_UNUSED_VAR) { // Camera initialization SetupCamera(); // Exit if a request to finalize the thread comes during camera initialization. if (s_IsCameraThreadEnd) { return; } // Start camera for firs time and capture settings StartCameraCapture(); // ---------------------------------------- // Processing by event enum { EVENT_RECV, // Transfer from the camera is complete EVENT_ERROR, // Restart due to buffer error or malfunction EVENT_VSYNC, // V-Blank interrupt EVENT_MAX }; nn::os::WaitObject* pEvent[EVENT_MAX]; // Register each event pEvent[EVENT_RECV] = &s_CameraRecvEvent; pEvent[EVENT_ERROR] = &s_CameraBufferErrorEvent; pEvent[EVENT_VSYNC] = &s_CameraVsyncEvent; while (1) { // Standby for one of the events registered above // The call to SetReceiving() clears the event notifying the completion of transfer, so only one of the events for reconfiguring the transfer settings can be executed at a time. // // if (s_IsCameraThreadEnd) { NN_LOG("before WaitAny\n"); } s32 eventType = nn::os::WaitObject::WaitAny(pEvent, EVENT_MAX); if (s_IsCameraThreadEnd) { NN_LOG("after WaitAny\n"); } // ---------------------------------------- // End the thread if (s_IsCameraThreadEnd) { StopCameraCapture(); break; } // ---------------------------------------- // Thread waits if (s_IsCameraThreadSleep) { s_IsCameraThreadSleep = false; // Stop camera capture StopCameraCapture(); // Notify that the camera thread has entered the sleep wait state (to Sleep, Home) s_CameraThreadSleepAckEvent.Signal(); // Standby until the thread sleep recovery signal arrives (from Sleep, Home) NN_LOG("-Sleep camera thread.\n"); s_CameraThreadAwakeEvent.Wait(); NN_LOG("-Awake camera thread.\n"); // ---------------------------------------- // Process after sleep recovery // Cleared once because a Vsync event may have been signaled s_CameraVsyncEvent.ClearSignal(); // Reset the counter for waiting for auto exposure stabilization s_FrameCountForStabilize = 0; // Use the buffer error mechanism to resume capture. s_CameraBufferErrorEvent.Signal(); continue; } switch (eventType) { case EVENT_RECV: // Complete transfer from camera { // Set transfer for the next frame's portion CameraRecvFunc(); // Request Y2R conversion execution s_ExistYuvImage = true; } break; case EVENT_ERROR: // Restart due to buffer error, malfunction. Restart after sleep/transition. { // Capture stops when there is an error, so reconfigure the transfer settings and resume capture operations. // StartCameraCapture(); } break; case EVENT_VSYNC: // V-Blank interrupt { // Regardless of the frame rate, change the camera settings when nothing is being transferred and a V-blank interrupt is generated. // CameraVsyncFunc(); } break; default: { NN_LOG("Illegal event\n"); } break; } //Switch } //while { // Finalization of CAMERA/Y2R // If you do not perform finalization with the following procedure, there is a possibility that sound noise will be generated in the HOME Menu. // nn::y2r::StopConversion(); // (1) Stop Y2R conversion nn::camera::StopCapture(CAMERA_PORT); // (2) Stop capture nn::camera::Activate(nn::camera::SELECT_NONE); // (3) Set all cameras to standby state nn::camera::Finalize(); // (4) End camera nn::y2r::Finalize(); // End Y2R } { // Free the resources used for CAMERA/Y2R FinalizeResource(); } } //CameraThreadFunc() //------------------------------------------------------------ // Main data transfer processing //------------------------------------------------------------ void CameraRecvFunc(void) { { // Switch the write buffer // Block so that this does not overlap with the process of switching the read buffer (Y2rConversion). // nn::os::CriticalSection::ScopedLock sl(s_CsYuvSwap); s_YuvBufferIndexLatestCaptured = s_YuvBufferIndexCapturing; do { // Switch the ring buffer index if ((++s_YuvBufferIndexCapturing) >= YUV_BUFFER_NUM) { s_YuvBufferIndexCapturing = 0; } } while (s_YuvBufferIndexCapturing == s_YuvBufferIndexTransmitting); } // Set next frame transfer nn::camera::SetReceiving( &s_CameraRecvEvent, // Event signaled when transfer completes s_pYuvBuffer[s_YuvBufferIndexCapturing], // Transfer destination of the loaded image data CAMERA_PORT, // Target port for loading s_YuvBufferSize, // Size of one frame's portion (size that for transfer completion) s_YuvTransferUnit); // Size to be transferred at one time } //CameraRecvFunc() void CameraVsyncFunc(void) { if (s_FrameCountForStabilize < FRAME_NUM_FOR_STABILIZE) { // Counter for waiting for auto exposure stabilization s_FrameCountForStabilize++; } s_FrameCountForDisplay++; // Increase frame count for display // Change frame rate if (s_SwitchFrameRateFlag) { // Stop camera capture // Capture operations must be stopped before camera settings are changed. StopCameraCapture(); // Change frame rate nn::Result result = nn::camera::SetFrameRate( SELECT_CAMERA, s_FrameRateInfo[s_FrameRateIndex].settingValue); if (result.IsSuccess()) { // Disable frame rate change flag s_SwitchFrameRateFlag = false; NN_LOG("Success!\n"); } else if (result == nn::camera::ResultFatalError()) { NN_DBG_PRINT_RESULT(result); NN_PANIC("Error:Camera has broken.\n"); } else { // If there is some other error, don't do anything in particular and try again when the next V-blank event occurs. // NN_LOG("Retry to change frame rate.\n"); } // Start camera capture StartCameraCapture(); } // if (s_SwitchFrameRateFlag) } //------------------------------------------------------------ // Start camera capture //------------------------------------------------------------ void StartCameraCapture(void) { // Clears the buffer, error flag nn::camera::ClearBuffer(CAMERA_PORT); // Data transfer process CameraRecvFunc(); // Start capture nn::camera::StartCapture(CAMERA_PORT); } // StartCameraCapture() //------------------------------------------------------------ // Stop camera capture //------------------------------------------------------------ void StopCameraCapture(void) { // Stop capture nn::camera::StopCapture(CAMERA_PORT); // Calculate the time (milliseconds) for one frame from the set frame rate s32 timeout = 1000 / s_FrameRateInfo[s_FrameRateIndex].frameRate; s32 cnt = 0; while (nn::camera::IsBusy(CAMERA_PORT)) { // Depending on the timing when the system is closed, IsBusy() might be in a // state where it is always 'true.' For that reason, transition to sleep. // If this state takes place in the implementation, you will not be able to // exit the loop and transition to sleep. // Here, a timeout is used to avoid an infinite loop. // Normally, IsBusy is 'true' for at most 1 frame, so set the timeout time somewhat large at 1 frame. // nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(1)); if (++cnt > timeout) { NN_LOG("Busy timeout\n"); break; } } // Clears the buffer, error flag nn::camera::ClearBuffer(CAMERA_PORT); } // StopCameraCapture() //------------------------------------------------------------ // Transfer and convert to Y2R //------------------------------------------------------------ bool Y2rConversion(void) { // *** BEGIN WARNING *** // Due to a hardware bug, when the camera and Y2R are being used at the same time, there is a possibility that the recovery from a camera buffer error could cause Y2R transfers to hang, depending on the timing of that recovery. // // // In this case, the conversion completion event obtained by nn::y2r::GetTransferEndEvent might never be signaled. // // For details on when this problem occurs and how to deal with it, see the Function Reference Manual for the Y2R library. // // In this sample, a timeout is inserted in the wait for the above event, and when a timeout occurs, a retry is performed. // // *** END WARNING *** { // Switch the camera read buffer. // Block so that this does not overlap with the process of switching the write buffer (CameraRecvFunc). nn::os::CriticalSection::ScopedLock sl(s_CsYuvSwap); // Set the buffer that has completed acquisition from the camera as the conversion target s_YuvBufferIndexTransmitting = s_YuvBufferIndexLatestCaptured; } // Offset so that the trimmed image is applied in the center of the texture s32 rgbBufferOffset; s32 centeringImageHeight = (TEXTURE_HEIGHT - TRIMMING_HEIGHT) / 2; rgbBufferOffset = nn::y2r::GetOutputImageSize(TEXTURE_WIDTH, centeringImageHeight, Y2R_OUTPUT_FORMAT); s32 cnt = 0; while (true) { // Transfer settings for the Y2R input image (YUV) // Writes YUV data to Y2R. Transfers are measured in lines of data. nn::y2r::SetSendingYuv( s_pYuvBuffer[s_YuvBufferIndexTransmitting], // Transfer data s_YuvBufferSize, // Total transfer size nn::camera::GetLineBytes(TRIMMING_WIDTH)); // Transfer size for one time (1 line) // Transfer setting for the output image (RGB) from Y2R // Load RGB data from Y2R. For the per-time transfer size, in order to boost performance we recommend specifying a size of 8 lines' worth. // s16 yuvTransferUnit = TRIMMING_WIDTH * 8 * GetOutputFormatBytes(Y2R_OUTPUT_FORMAT); nn::y2r::SetReceiving( s_pRgbBuffer + rgbBufferOffset, // Load destination for the converted RGB data s_RgbBufferSize, // Total transfer size yuvTransferUnit); // The size of a single transfer. // Start Y2R conversion nn::y2r::StartConversion(); // Standby for notification of Y2R conversion completion event // Waits for Y2R conversion completion because data transfer is aborted if the transfer for the next conversion is performed before conversion of the first image has completed. // // To deal with a bug that causes Y2R to hang, a timeout is inserted that is longer than the time for the conversion to complete. // // See the y2r function reference for the approximate time it takes for conversion. if (s_Y2rEndEvent.Wait(nn::fnd::TimeSpan::FromMilliSeconds(10))) { // Conversion succeeded return true; } else { // Conversion failed NN_LOG("Y2R may have hung up.\n"); nn::y2r::StopConversion(); // Force conversion to end // Although it is very rare for problems to occur over and over, a process is inserted to explicitly exit the loop. // if (++cnt >= 2) { // Destroy the conversion at this time return false; } // Timed out, so retry. } } //while(true) } //Y2rConversion() //------------------------------------------------------------ // Control of the camera operation sound playback and the camera light //------------------------------------------------------------ void PlaySound(nn::camera::ShutterSoundType soundType) { nn::Result result = nn::camera::PlayShutterSound(soundType); if (result.IsSuccess()) { switch (soundType) { case nn::camera::SHUTTER_SOUND_TYPE_NORMAL: { NN_LOG("Play shutter sound.\n"); } break; case nn::camera::SHUTTER_SOUND_TYPE_MOVIE: { NN_LOG("Play movie start sound.\n"); } break; case nn::camera::SHUTTER_SOUND_TYPE_MOVIE_END: { NN_LOG("Play movie end sound.\n"); } break; default: break; } } else if (result == nn::camera::CTR::ResultFatalError()) { NN_PANIC("Camera has broken.\n"); } else if (result == nn::camera::CTR::ResultIsSleeping()) { // Because this is called from a thread the returns ACCEPT to a sleep request, this error does not actually get returned here. // } else { // Unexpected errors NN_DBG_PRINT_RESULT(result); NN_PANIC("Unknown error.\n"); } } // PlaySound } //Namespace // ==================================================================================== // Rendering Functions namespace { //------------------------------------------------------------ // Rendering initialization function //------------------------------------------------------------ void InitializeGx(void) { NN_ASSERT(!s_AddrForGxHeap); s_AddrForGxHeap = s_AppHeap.Allocate(MEMORY_SIZE_FCRAM_GX); s_RenderSystem.Initialize(reinterpret_cast(s_AddrForGxHeap), MEMORY_SIZE_FCRAM_GX); } // InitializeGx() //------------------------------------------------------------ // Rendering end function //------------------------------------------------------------ void FinalizeGx(void) { s_RenderSystem.Finalize(); NN_ASSERT(s_AddrForGxHeap); s_AppHeap.Free(s_AddrForGxHeap); s_AddrForGxHeap = NULL; } // FinalizeGx() //------------------------------------------------------------ // Function to render the upper and lower screens //------------------------------------------------------------ void DrawFrame(void) { // Update the upper screen DrawDisplay0(); // Update the lower screen DrawDisplay1(); s_RenderSystem.WaitVsync(NN_GX_DISPLAY_BOTH); } // DrawFrame //------------------------------------------------------------ // Update the render content for the upper screen //------------------------------------------------------------ void DrawDisplay0(void) { // Made the upper screen the render target s_RenderSystem.SetRenderTarget(NN_GX_DISPLAY0); // Make the background black s_RenderSystem.SetClearColor(NN_GX_DISPLAY0, 0.0f, 0.0f, 0.0f, 0.0f); // Clear the previous rendering s_RenderSystem.Clear(); // Render if there is a camera image texture. if (s_TextureId != 0) { // ---------------------------------------- // Render the camera image texture f32 windowCoordinateX = (nn::gx::DISPLAY0_HEIGHT - TEXTURE_WIDTH) / 2.0f; f32 windowCoordinateY = (nn::gx::DISPLAY0_WIDTH - TEXTURE_HEIGHT) / 2.0f; s_RenderSystem.FillTexturedRectangle( s_TextureId, // Texture ID windowCoordinateX, windowCoordinateY, // Coordinate (x,y) of the upper left vertex of a rectangle in the window coordinate system TEXTURE_WIDTH, TEXTURE_HEIGHT, // Length (x,y) of the rectangle side in the window coordinate system TEXTURE_WIDTH, TEXTURE_HEIGHT, // Image size (x,y) TEXTURE_WIDTH, TEXTURE_HEIGHT); // Texture size (x,y) } const char* pMessage = NULL; u32 numOfChar = 0; if (!s_IsCameraInitialized) { // Display message until camera initialization has completed. const char message[] = "Initializing camera."; pMessage = message; numOfChar = sizeof(message) / sizeof(message[0]); } else if (s_FrameCountForStabilize < FRAME_NUM_FOR_STABILIZE) { // Display message until the camera auto exposure stabilizes const char message[] = "Stabilizing auto exposure."; pMessage = message; numOfChar = sizeof(message) / sizeof(message[0]); } // Display if there is a message to display. if (pMessage) { f32 fontSize = 8.0f; f32 textX = (nn::gx::DISPLAY0_HEIGHT - (fontSize * numOfChar)) / 2.0f; f32 textY = nn::gx::DISPLAY0_WIDTH / 2.0f; s_RenderSystem.SetFontSize(fontSize); s_RenderSystem.SetColor(1.0f, 0.0f, 0.0f); // Set character body color to red for (s32 i = -1; i <= 1; i++) { for (s32 j = -1; j <= 1; j++) { if (!((i == 0) && (j == 0))) { s_RenderSystem.DrawText(textX + i, textY + j, pMessage); } } } s_RenderSystem.SetColor(1.0f, 1.0f, 1.0f); // Set font color to white s_RenderSystem.DrawText(textX, textY, pMessage); } s_RenderSystem.SwapBuffers(); } // DrawDisplay0 //------------------------------------------------------------ // Update render content for lower screen //------------------------------------------------------------ void DrawDisplay1(void) { // Made the upper screen the render target s_RenderSystem.SetRenderTarget(NN_GX_DISPLAY1); // Make the background black s_RenderSystem.SetClearColor(NN_GX_DISPLAY1, 0.0f, 0.0f, 0.0f, 0.0f); // Clear the previous rendering s_RenderSystem.Clear(); // Set font color to white s_RenderSystem.SetColor(1.0f, 1.0f, 1.0f); // ---------------------------------------- // Display all information f32 fontSize = 8.0f; f32 margin = fontSize / 2; f32 textY = margin; f32 textX = margin; s_RenderSystem.SetFontSize(fontSize); // Count of the number of frames s_RenderSystem.DrawText(textX, textY, "Counts: %d", s_FrameCountForDisplay); // Render the set frame rate textX = nn::gx::DISPLAY1_HEIGHT - margin - (fontSize * 6); // 6 character's worth s_RenderSystem.DrawText(textX, textY, "%2d fps", s_FrameRateInfo[s_FrameRateIndex].frameRate); // Using the Demo textX = margin; textY = nn::gx::DISPLAY1_WIDTH - (fontSize + margin); s_RenderSystem.DrawText(textX, textY, "B : Play shutter sound (Movie end)"); textY -= fontSize; s_RenderSystem.DrawText(textX, textY, "A : Play shutter sound (Movie start)"); textY -= fontSize; s_RenderSystem.DrawText(textX, textY, "R : Play shutter sound (Photo)"); textY -= 2 * fontSize; s_RenderSystem.DrawText(textX, textY, "Down : Down frame rate"); textY -= fontSize; s_RenderSystem.DrawText(textX, textY, "Up : Up frame rate"); s_RenderSystem.SwapBuffers(); } // DrawDisplay1() void UpdateCameraTexture(void) { // ---------------------------------------- // Create texture from the camera image // Destroy old texture. DeleteCameraTexture(); // Texture target settings GLenum textureTarget = GL_TEXTURE_2D | NN_GX_MEM_FCRAM | GL_NO_COPY_FCRAM_DMP; // Texture format/type settings // There are restrictions on the texture format and type combinations. // Also, the internal format of the texture must be the same as the texture format. // For details, see the section on textures in the Programming Manual: Basic Graphics. // GLenum textureFormat = GL_RGB_NATIVE_DMP; // Format GLenum textureInternalFormat = textureFormat; // Internal format // If the Y2R output format is OUTPUT_RGB_16_565, then the texture type must be GL_UNSIGNED_SHORT_5_6_5. // GLenum textureType = GL_UNSIGNED_BYTE; // Type // Create the texture s_RenderSystem.GenerateTexture( textureTarget, // Texture target textureInternalFormat, // Internal format of the texture TEXTURE_WIDTH, // Texture width TEXTURE_HEIGHT, // Texture height textureFormat, // Texture format textureType, // Texture type s_pRgbBuffer, // The pointer to the texture data s_TextureId); // Texture object } void DeleteCameraTexture(void) { if (s_TextureId != 0) { if (s_RenderSystem.DeleteTexture(s_TextureId)) { s_TextureId = 0; } else { NN_PANIC(" Failed to delete texture. (id = %d)\n", s_TextureId); } } } } // Namespace