1 /*---------------------------------------------------------------------------*
2   Project:  Horizon
3   File:     main.cpp
4 
5   Copyright (C)2009-2012 Nintendo Co., Ltd.  All rights reserved.
6 
7   These coded instructions, statements, and computer programs contain
8   proprietary information of Nintendo of America Inc. and/or Nintendo
9   Company Ltd., and are protected by Federal copyright law. They may
10   not be disclosed to third parties or copied or duplicated in any form,
11   in whole or in part, without the prior written consent of Nintendo.
12 
13   $Rev: 53198 $
14  *---------------------------------------------------------------------------*/
15 
16 #include <nn.h>
17 #include <nn/camera.h>
18 #include <nn/hid.h>
19 #include <nn/applet.h>
20 #include <string.h>
21 
22 #include "demo.h"
23 #include "demo/Render/demo_RenderSystemExt.h"
24 #include "applet.h"
25 
26 #define  debug_print    NN_LOG
27 //#define  debug_print(...)    ((void)0)
28 
29 //=============================================================================
30 namespace
31 {
32     // ----------------------------------------
33     // Sample initialization and finalization
34     void Initialize(void);
35     void Finalize(void);
36 
37     // ----------------------------------------
38     // Functions for applet control
39     void InitializeApplet(void);
40     void FinalizeApplet(void);
41 
42     // ----------------------------------------
43     // Functions for camera thread control
44     void InitializeCameraThread(void);
45     void FinalizeCameraThread(void);
46     void PrepareTransitionCallback(void);
47     void AfterTransitionCallback(void);
48 
49     // ----------------------------------------
50     // Camera/Y2R control functions
51     void SetupCamera(void);
52     bool UpdateCamera(void);
53     nn::Result InitializeCameraSetting(void);
54     void InitializeY2r(void);
55     void InitializeResource(void);
56     void FinalizeResource(void);
57     void CameraThreadFunc(uptr param);
58     void CameraRecvFunc(nn::camera::Port port);
59     void CameraVsyncFunc(nn::camera::Port port);
60     void StopCameraCapture(void);
61     void RestartCameraCapture(void);
62     bool Y2rConversion(s32 index);
63 
64     // ----------------------------------------
65     // Rendering Functions
66     int  InitializeGx(void);
67     void FinalizeGx(void);
68     void LoadObjects(void);
69     void DeleteObjects(void);
70     void ReadyObjects(void);
71     void DrawCameraImage(s32 index);
72     void DrawDisplay0Stereo(void);
73     void DrawDisplay0(void);
74     void DrawDisplay1(void);
75     void DrawDisplay0Ext(void);
76     void DrawFrame(void);
77     void SetTextureCombiner(void);
78     void SetCalibration(void);
79 }
80 
81 //=============================================================================
82 /* Please see man pages for details
83  */
84 namespace
85 {
86     // Dynamic allocation heap
87     nn::fnd::ExpHeap s_AppHeap;
88 
89     // Index for cameras used in the sample
90     enum CameraIndex
91     {
92         CAMERA_RIGHT,           // Right camera
93         CAMERA_LEFT,            // Left camera
94         CAMERA_NUM              // Number of cameras
95     };
96 
97     // Buffer in which to transfer camera images
98     const s32  YUV_BUFFER_NUM = 3;                          // Number of YUV buffers (triple buffering) per camera
99     u8*        s_paaYuvBuffer[CAMERA_NUM][YUV_BUFFER_NUM];  // Buffers to which YUV is transferred from camera (number of cameras * triple buffer)
100     s32        s_YuvBufferSize;                             // YUV buffer size
101 
102     s32        s_aYuvCapturing[CAMERA_NUM];                 // Indicates the buffer for the camera images currently being transferred
103     s32        s_aYuvLatestCaptured[CAMERA_NUM];            // Indicates the buffer for which a recent transfer completed
104     s32        s_aYuvReading[CAMERA_NUM];                   // Indicates a buffer that is used to read data during Y2R conversion
105     nn::os::CriticalSection  s_aCsYuvSwap[CAMERA_NUM];      // Blocks used to swap buffers
106 
107     s32        s_YuvTransferUnit;                           // Amount of camera image data transferred in a single frame
108     nn::y2r::StandardCoefficient s_Coefficient;             // The Y2R conversion coefficient suitable to the data output by the camera.
109 
110     // The buffer into which RGB images are transferred after Y2R conversion
111     u8*        s_paRgbBuffer[CAMERA_NUM] = { NULL, NULL };  // Space for both the left and right images
112     s32        s_RgbBufferSize;                             // RGB buffer size
113     // Determinant of whether a valid RGB image exists
114     bool       s_ExistRgbImage[CAMERA_NUM] = { false, false };
115     // Determinant of whether RGB image has been updated
116     bool       s_IsUpdateRgbImage[CAMERA_NUM] = { false, false };
117 
118 
119     // The size of an image before it is trimmed
120     s32        s_OriginalWidth;
121 
122     // The size of an image after it is trimmed
123     s32        s_TrimmingWidth;
124     s32        s_TrimmingHeight;
125 
126     // The size of the image to be pasted as a texture
127     s32        s_TextureWidth;
128     s32        s_TextureHeight;
129 
130     // V-Blank timing
131     nn::fnd::TimeSpan  saa_LatestVsyncTiming[CAMERA_NUM][2];
132     s64        s_VsyncTimingDifference = 0;
133 
134     // V-Blank interval (used to calculate the frame rate)
135     const u8   VSYNC_INTERVAL_LOG_NUM = 4;
136     s64        saa_VsyncInterval[CAMERA_NUM][VSYNC_INTERVAL_LOG_NUM] = { { 0, 0, 0, 0 }, { 0, 0, 0, 0 } };
137     s32        s_aVsyncIntervalPos[CAMERA_NUM] = { 0, 0 };
138     s64        s_aFps[CAMERA_NUM] = { 0, 0 };
139 
140     // Number of camera frames
141     s32 s_CameraFrameCount = 0;
142     const s32 FRAME_NUM_FOR_STABILIZE = 30;
143 
144     // The thread that processes camera events
145     nn::os::Thread   s_CameraThread;
146 
147     // Priority of the main thread
148     s32              s_MainThreadPriority;
149 
150     // Notification from the camera library that a data transfer is complete
151     nn::os::Event    s_aCameraRecvEvent[CAMERA_NUM];
152 
153     // Notification from the camera library that there was a buffer error
154     nn::os::Event    s_aCameraBufferErrorEvent[CAMERA_NUM];
155 
156     // Notification from the camera library that there was a V-Blank
157     nn::os::Event    s_aCameraVsyncEvent[CAMERA_NUM];
158 
159     // Notification from the Y2R library that conversion is complete
160     nn::os::Event    s_Y2rEndEvent;
161 
162     // Queue used to collect Y2R conversion requests for camera images
163     // The length of the queue depends on the balance between the cameras' frame rate and the amount of time taken to process Y2R conversion requests.
164     nn::os::BlockingQueue s_Y2rRequestQueue;
165     const s32             Y2R_REQUEST_QUEUE_LENGTH = 8;
166     uptr s_aY2rRequestQueueMessage[Y2R_REQUEST_QUEUE_LENGTH];
167 
168     // Format of a Y2R conversion request
169     struct Y2rRequest_st
170     {
171         s32           index;      // Right or left
172         NN_PADDING4;
173         nn::os::Tick  time;       // Time at which an image is obtained
174     };  //struct Y2rRequest_st
175 
176     // Circular buffer for holding Y2R conversion requests
177     const s32      Y2R_REQUEST_BUFFER_MAX = 3;
178     Y2rRequest_st  saa_Y2rRequestBuffer[CAMERA_NUM][Y2R_REQUEST_BUFFER_MAX];
179     s32            s_aY2rRequestBufferCounter[CAMERA_NUM];
180 
181     // Flag used to stop the camera thread
182     bool s_IsCameraThreadEnd = false;
183 
184     // Flag used to stop processing in the camera thread during Sleep Mode
185     bool s_IsCameraThreadSleep = false;
186 
187     // Event used to wait for the camera thread to sleep
188     nn::os::LightEvent  s_CameraThreadSleepAckEvent;
189 
190     // Event used to send a notification when the camera thread wakes up
191     nn::os::LightEvent  s_CameraThreadAwakeEvent;
192 
193     // Indicates whether to enable camera capture running
194     bool s_IsCameraCaptureEnable = true;
195     // Indicates whether camera capture is running
196     bool s_IsCameraCaptureStarted = false;
197 
198     // Indicates whether to activate cameras
199     bool s_IsCameraActiveTarget = true;
200     // Indicates whether cameras are actually active
201     bool s_IsCameraActive = false;
202 
203     // Indicates whether camera Vsync synchronization is necessary
204     bool s_IsNeedSynchronizeVsyncTiming = false;
205 
206     // Indicates that the cameras have finished being configured
207     bool s_IsFinishCameraSetting = false;
208 
209     // Calibration data for the stereo cameras
210     nn::camera::StereoCameraCalibrationData s_CalData;
211 
212     // Left/right correction matrix
213     nn::math::MTX34 s_aCalMatrix[CAMERA_NUM];
214 
215     // Image magnification ratio obtained by the correction process
216     f32 s_CalScale = 0.0f;
217 
218 } //Namespace
219 
220 //=============================================================================
221 /* Please see man pages for details
222  */
223 namespace
224 {
225     // 3D-enabled demo framework
226     demo::RenderSystemExt s_RenderSystem;
227 
228     // Memory size used to allocate graphics in FCRAM
229     const s32 MEMORY_SIZE_FCRAM_GX = 0x800000;
230     uptr s_AddrForGxHeap = 0;
231 
232     // buffer id
233     GLuint s_ArrayBufferID = 0;
234     GLuint s_ElementArrayBufferID = 0;
235 
236     // program id
237     GLuint s_ProgramID = 0;
238 
239     // shader id
240     GLuint s_ShaderID = 0;
241 
242     // Texture
243     GLuint s_Texture[CAMERA_NUM] = { 0, 0 };
244 
245     // Frame count
246     s32    s_FrameCount = 0;
247 
248 } //Namespace
249 
250 //==============================================================================
251 /* Please see man pages for details
252  */
nnMain(void)253 extern "C" void nnMain(void)
254 {
255     nn::Result result;
256 
257     /* Please see man pages for details */
258     NN_LOG("Camera demo start\n");
259 
260     // Performs initialization.
261     //   In this demo, the camera thread is started and then everything is done in the camera thread, including starting/ending the camera, starting/stopping capture, and changing the settings.
262     //
263     //
264     Initialize();
265 
266     NN_LOG("Camera Demo: start\n");
267     NN_LOG("Y button    : Stop/Restart capturing\n");
268     NN_LOG("X button    : Deactivate/Activate camera\n");
269     NN_LOG("Start button: Finalize camera library\n");
270 
271     nn::hid::PadReader  padReader;
272     nn::hid::PadStatus  padStatus;
273 
274     // Flag for whether Y2R conversion of the camera image is completed.
275     bool a_IsComplete[CAMERA_NUM] = { false, false };
276     // The time to get the YUV image for which Y2R conversion is completed.
277     nn::os::Tick a_LatestTime[CAMERA_NUM];
278     bool isLoop = true;
279     // The variable for counting the number of images that were destroyed because out-of-sync or for other reasons.
280     s32  throwCount = 0;
281 
282     while (isLoop)
283     {
284         // ----------------------------------------
285         // Applet related process
286         //   The system forces the Y2R library to shut down when it transitions to Sleep Mode.
287         //   On the other hand, conversion is not resumed when the system recovers from Sleep Mode.
288         //   We therefore recommend that you transition to Sleep Mode after you have finished Y2R conversion for a single image.
289         TransitionHandler::Process();
290 
291         if (TransitionHandler::IsExitRequired())
292         {
293             break; // Exit main loop if the application ends
294         }
295 
296         // Get gamepad values
297         padReader.ReadLatest(&padStatus);
298 
299         /* Please see man pages for details */
300 
301         // Flag for skipping the wait for Vsync when rendering not performed.
302         bool skipWaitVsync = true;
303 
304         // There is no reason to perform Y2R conversion when no camera images are obtained, so wait for a Y2R conversion request from the camera thread.
305         //
306         uptr msg;
307         if (s_IsFinishCameraSetting && s_Y2rRequestQueue.TryDequeue(&msg))
308         {
309             // Run Y2R conversion.
310             Y2rRequest_st* p_Req = reinterpret_cast<Y2rRequest_st*>(msg);
311 
312             // Check the times at which images were obtained. If there is a significant difference between them,
313             // throw out one of the frames under the assumption that the times do not indicate the same frame.
314             s32 another = p_Req->index ^ 1;
315             if (a_IsComplete[another])
316             {
317                 s64 diff = (p_Req->time - a_LatestTime[another]).ToTimeSpan().GetMilliSeconds();
318 
319                 if ((diff > 3) || (diff < -3))
320                 {
321                     NN_LOG("Throw out another frame (diff=%lld msec)\n", diff);
322 
323                     a_IsComplete[another] = false;
324 
325                     throwCount++;
326                 }
327                 else
328                 {
329                     throwCount = 0;
330                 }
331 
332                 // Retry when the VSync has not run properly.
333                 // This is normally impossible, but it has been included just in case.
334                 if (throwCount >= 10)
335                 {
336                     // Request the camera thread to synchronize camera-capture timing.
337                     s_IsNeedSynchronizeVsyncTiming = true;
338 
339                     // Ask the camera thread to resume data capture and transfer.
340                     // We use a buffer error event because this is processed just like a recovery from a buffer error.
341                     s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
342                     NN_LOG("Retry SynchronizeVsyncTiming\n");
343                 }
344             }
345 
346             // Run Y2R conversion
347             if (Y2rConversion(p_Req->index))
348             {
349                 // Y2R conversion was successful, so update the flag and the time.
350                 a_LatestTime[p_Req->index] = p_Req->time;
351                 a_IsComplete[p_Req->index] = true;
352 
353                 // Texture needs to be updated.
354                 s_IsUpdateRgbImage[p_Req->index] = true;
355                 // The content of the RGB buffer is valid.
356                 s_ExistRgbImage[p_Req->index] = true;
357             }
358             else
359             {
360                 // Invalidate the texture because Y2R conversion failed and the content of the buffer might be corrupted.
361                 s_ExistRgbImage[p_Req->index] = false;
362             }
363         }
364         else
365         {
366             // If queue not processed, wait for Vsync.
367             skipWaitVsync = false;
368         }
369 
370         // The following block handles processing when the cameras have stopped taking images.
371         {
372             // There are three ways to stop taking camera images.
373             // (1) Stop the capture operations themselves
374             // (2) Put the camera devices to sleep
375             // (3) Shut down the camera library
376 
377             // (1) Stop the capture operations themselves
378             // Stop capture operations while the cameras are still active.
379             // Even though it takes little time to resume because the camera does not need to be reactivated, this method consumes more power than method (2).
380             //
381             // This is optimal when you stop capturing for a brief period of time.
382             if ((padStatus.trigger & nn::hid::BUTTON_Y)
383               && s_IsCameraActiveTarget)
384             {
385                 // Reconfigure the transfer settings and ask the camera thread to resume/stop capture.
386                 // In this sample demo, activity is resumed/stopped by the camera thread.
387                 // Use a buffer error event because this is just like a recovery from a buffer error.
388                 //
389                 s_IsCameraCaptureEnable = !s_IsCameraCaptureEnable;
390                 s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
391 
392                 // Invalidate image before switching
393                 a_IsComplete[CAMERA_LEFT] = false;
394                 a_IsComplete[CAMERA_RIGHT] = false;
395             }
396             // (2) Put the camera devices to sleep
397             // Put the camera devices to sleep and cancel image output.
398             // Note that this is not the same concept of "sleep" handled by nn::applet.
399             // Although this requires the cameras to be re-activated, it consumes less power than method (1).
400             // This is optimal when you stop capturing for a long period of time.
401             else if ((padStatus.trigger & nn::hid::BUTTON_X)
402                    && s_IsCameraCaptureEnable)
403             {
404                 // Request the camera thread to switch the indication of whether to activate cameras.
405                 s_IsCameraActiveTarget = !s_IsCameraActiveTarget;
406 
407                 // Reconfigure transfers and resume capture operations.
408                 // In this sample demo, processing is resumed by the camera thread.
409                 // Use a buffer error event because this is just like a recovery from a buffer error.
410                 //
411                 s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
412 
413                 // Invalidate image before switching
414                 a_IsComplete[CAMERA_RIGHT] = false;
415                 a_IsComplete[CAMERA_LEFT] = false;
416             }
417             // (3) Shut down the camera library
418             // We shut down the camera library when we are not going to use it anymore.
419             // If we want to use it again, we must call camera::Initialize and then re-configure every setting.
420             // Only a single application can use the camera library/Y2R library at any given time.
421             // So you must finalize the camera library in order for any other application or applet to use the camera/Y2R libraries.
422             //
423             else if ((padStatus.trigger & nn::hid::BUTTON_START)
424                    && s_IsCameraCaptureEnable
425                    && s_IsCameraActiveTarget)
426             {
427                 // Exit the loop and shut down the camera library.
428                 isLoop = false;
429             }
430         }
431 
432         // Render the left and right camera images.
433         // We do not render them until we have both.
434         if (a_IsComplete[CAMERA_RIGHT] && a_IsComplete[CAMERA_LEFT])
435         {
436             DrawFrame();
437 
438             a_IsComplete[CAMERA_RIGHT] = false;
439             a_IsComplete[CAMERA_LEFT] = false;
440 
441             // If rendered, wait for Vsync.
442             skipWaitVsync = false;
443         }
444         // Render even when images are not being captured from the cameras.
445         else if (!s_IsCameraCaptureStarted || !s_IsCameraActive)
446         {
447             DrawFrame();
448 
449             // If rendered, wait for Vsync.
450             skipWaitVsync = false;
451         }
452         if (!skipWaitVsync)
453         {
454             s_RenderSystem.WaitVsync(NN_GX_DISPLAY_BOTH);
455         }
456     } //while()
457 
458     Finalize();
459 
460     NN_LOG("Stereo Camera Demo: End\n");
461 
462     nn::applet::CloseApplication();
463 } //nnMain()
464 
465 //=============================================================================
466 /* Please see man pages for details
467  */
468 namespace
469 {
470     //------------------------------------------------------------
471     // Initialization
472     //------------------------------------------------------------
Initialize(void)473     void Initialize(void)
474     {
475         nn::fs::Initialize();
476 
477         InitializeApplet(); // Applet process
478 
479         if (TransitionHandler::IsExitRequired())
480         {
481             // Ends application when the POWER Button is pressed during startup
482             FinalizeApplet();
483             nn::applet::CloseApplication();
484         }
485 
486         // Heap memory
487         s_AppHeap.Initialize(
488             nn::os::GetDeviceMemoryAddress(),   // Start address
489             nn::os::GetDeviceMemorySize());    // Memory size
490 
491         // ROMFS must be mounted before it can be used.
492         const size_t ROMFS_BUFFER_SIZE = 1024 * 64;
493         static char buffer[ROMFS_BUFFER_SIZE];
494         NN_PANIC_IF_FAILED(nn::fs::MountRom(16, 16, buffer, ROMFS_BUFFER_SIZE));
495 
496         if (InitializeGx() < 0)
497         {
498             NN_PANIC("failed gx init\n");
499         }
500 
501         // We want to use the gamepad, so initialize the HID library.
502         NN_PANIC_IF_FAILED(nn::hid::Initialize());
503 
504         // Start thread for camera processing
505         InitializeCameraThread();
506     }
507 
508     //------------------------------------------------------------
509     // Finalization
510     //------------------------------------------------------------
Finalize(void)511     void Finalize(void)
512     {
513         // Finalize thread for camera processing.
514         FinalizeCameraThread();
515 
516         nn::hid::Finalize();
517 
518         FinalizeGx();
519         FinalizeApplet();
520 
521         s_AppHeap.Finalize();
522     }
523 } //Namespace
524 
525 //=============================================================================
526 /* Please see man pages for details
527  */
528 namespace
529 {
SetupCamera()530     void SetupCamera()
531     {
532         nn::Result result;
533 
534         // Camera library initialization
535         while (true)
536         {
537             // If the end flag is set, stop initialization and instead finalize.
538             if (s_IsCameraThreadEnd)
539             {
540                 return;
541             }
542 
543             // Camera library initialization
544             result = nn::camera::Initialize();
545             if (result.IsSuccess())
546             {
547                 break;
548             }
549             else if (result == nn::camera::ResultFatalError())
550             {
551                 // Camera restart process failed
552                 NN_PANIC("Camera has broken.\n");
553             }
554             else if (result == nn::camera::ResultUsingOtherProcess())
555             {
556                 // Camera is being used by another process
557                 NN_PANIC("Camera is using by other process.\n");
558             }
559             else if (result == nn::camera::ResultAlreadyInitialized())
560             {
561                 // Because initialization was already done, no particular process is performed
562                 NN_LOG("Camera is already initialized.\n");
563                 break;
564             }
565             else if (result == nn::camera::ResultIsSleeping())
566             {
567                 // Fail because system is closed
568                 // If there is a request from the main thread to enter the wait state, block until end.
569                 if (s_IsCameraThreadSleep)
570                 {
571                     s_IsCameraThreadSleep = false;
572                     // Notify that the camera thread has entered the wait state (to Sleep, Home)
573                     s_CameraThreadSleepAckEvent.Signal();
574 
575                     // Wait until the thread recovery signal arrives (from Sleep, Home)
576                     s_CameraThreadAwakeEvent.Wait();
577                 }
578                 else
579                 {
580                     // Even if there is no request, block for a period of time in order to create an interval to retry.
581                     nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(10));
582                 }
583                 // Retry
584                 NN_LOG("ShellClose: Retry camera initialization\n");
585             }
586         }
587 
588         // Initialize camera settings
589         while (true)
590         {
591             // If the end flag is set, stop initialization and instead finalize.
592             if (s_IsCameraThreadEnd)
593             {
594                 nn::camera::Finalize();
595                 return;
596             }
597             result = InitializeCameraSetting();
598             if (result.IsSuccess())
599             {
600                 break;
601             }
602             else if (result == nn::camera::ResultFatalError())
603             {
604                 NN_PANIC("Camera has broken.\n");
605             }
606             else if (result == nn::camera::ResultIsSleeping())
607             {
608                 // Fail because system is closed
609                 // If there is a request from the main thread to enter the wait state, block until end.
610                 if (s_IsCameraThreadSleep)
611                 {
612                     s_IsCameraThreadSleep = false;
613                     // Notify that the camera thread has entered the wait state (to Sleep, Home)
614                     s_CameraThreadSleepAckEvent.Signal();
615 
616                     // Wait until the thread recovery signal arrives (from Sleep, Home)
617                     s_CameraThreadAwakeEvent.Wait();
618                 }
619                 else
620                 {
621                     // Even if there is no request, block for a period of time in order to create an interval to retry.
622                     nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(10));
623                 }
624                 // Retry
625                 NN_LOG("ShellClose: Retry camera setting\n");
626             }
627         }
628 
629         // Render settings for correcting offsets in the camera positions
630         SetCalibration();
631 
632         InitializeY2r();        // Y2R library-related initialization
633         InitializeResource();   // Resource initialization
634 
635         // Initialization has completed, so priority changes to become higher than the main thread.
636         nn::os::Thread::ChangeCurrentPriority(s_MainThreadPriority - 3);
637 
638         s_IsFinishCameraSetting = true;
639     }
640 
641     //==============================================================================
642     /* Please see man pages for details
643      */
InitializeCameraSetting()644     nn::Result InitializeCameraSetting()
645     {
646         /* Please see man pages for details */
647 
648         /* Please see man pages for details */
649 
650         // PORT_CAM1 is the port that corresponds to the inner camera or the outer camera on the right.
651         // PORT_CAM2 is the port that corresponds to the outer camera on the left.
652         // Use PORT_BOTH to specify both at the same time.
653 
654         // To paste an image as a texture, we must trim the image's dimensions to powers of two.
655         // VGA (640x480) is not a power of 2, so you may think to trim to 512x256, but here we want to trim the image as little as possible, so we trim to 512x384 and place the trimmed image in the center of a 512x512 texture.
656         //
657         //
658         s_TrimmingWidth  = 512;
659         s_TrimmingHeight = 384;
660         nn::camera::SetTrimming(nn::camera::PORT_BOTH, true);
661         nn::camera::SetTrimmingParamsCenter(
662                 nn::camera::PORT_BOTH,
663                 static_cast<s16>(s_TrimmingWidth),
664                 static_cast<s16>(s_TrimmingHeight),
665                 640,
666                 480);
667         s_TextureWidth  = 512;
668         s_TextureHeight = 512;
669 
670         // Specify the transfer size for image data.
671         // With the CTR, the image data input from the camera is accumulated in FIFO, and the image is transferred once data of the specified transfer size has been accumulated.
672         //
673         // You can specify a size of up to 10240 bytes, but an error is generated when FIFO overflows, so we recommend setting half that which is 5120 bytes.
674         //
675         // The GetMaxBytes function returns the maximum transfer size that matches the image size without exceeding 5120 bytes.
676         s_YuvTransferUnit = nn::camera::GetMaxBytes(s_TrimmingWidth, s_TrimmingHeight);
677         nn::camera::SetTransferBytes(nn::camera::PORT_BOTH, s_YuvTransferUnit, s_TrimmingWidth, s_TrimmingHeight);
678 
679         // Get events used for buffer error notifications.
680         // A buffer error is reported when the transfer of camera data fails (when FIFO overflows), so you need to take appropriate steps to resume transferring and capturing data.
681         //
682         nn::camera::GetBufferErrorInterruptEvent(&s_aCameraBufferErrorEvent[CAMERA_RIGHT], nn::camera::PORT_CAM1);
683         nn::camera::GetBufferErrorInterruptEvent(&s_aCameraBufferErrorEvent[CAMERA_LEFT], nn::camera::PORT_CAM2);
684 
685         // Get events used for camera V-Blank notifications.
686         // Frame rates are calculated and camera settings are changed during V-Blanks.
687         nn::camera::GetVsyncInterruptEvent(&s_aCameraVsyncEvent[CAMERA_RIGHT], nn::camera::PORT_CAM1);
688         nn::camera::GetVsyncInterruptEvent(&s_aCameraVsyncEvent[CAMERA_LEFT], nn::camera::PORT_CAM2);
689 
690         // Set the image size. Here, we set the maximum size that can be output by the camera to the VGA resolution (640x480).
691         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetSize(nn::camera::SELECT_OUT1_OUT2, nn::camera::SIZE_VGA, nn::camera::CONTEXT_A));
692         s_OriginalWidth  = 640;
693 
694         // As the norm, the filter to reduce noise in dark locations operates automatically in the camera module, so dark images are out of focus.
695         //
696         // The stereo cameras blur only one of the images for some subjects, making them hard to look at.
697         // Therefore, it is recommended to turn off noise-reduction filters.
698         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetNoiseFilter(nn::camera::SELECT_OUT1_OUT2, false));
699 
700         // Auto-exposure operates independently for the left and right stereo cameras (it is not linked between the two cameras).
701         // The degree of auto-exposure is calculated based on the brightness of the image. However, the scenery captured by the left and right cameras will have differences, and while the brightness of the overall image is used to determine how auto-exposure will work, there may be significant differences in the brightness of the left and right images.
702         //
703         // To reduce these differences in brightness, the region captured in both cameras (i.e., the right side of the region captured by the left camera, and the left side of the region captured by the right camera) is used as the region to determine how auto-exposure will work.
704         //
705         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoExposureWindow(nn::camera::SELECT_OUT1, 0, 0, 480, 480));
706         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoExposureWindow(nn::camera::SELECT_OUT2, 160, 0, 480, 480));
707 
708         // Set the region used to determine how auto white balance works to be just like the region used to determine how auto-exposure works.
709         // This reduces differences in color caused by differences in how auto white balance works.
710         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoWhiteBalanceWindow(nn::camera::SELECT_OUT1, 0, 0, 480, 480));
711         NN_UTIL_RETURN_IF_FAILED(nn::camera::SetAutoWhiteBalanceWindow(nn::camera::SELECT_OUT2, 160, 0, 480, 480));
712 
713         // Get the Y2R conversion coefficient suited to the data output by the camera.
714         //   This value is used when initializing Y2R.
715         //   It is unnecessary if you are not using Y2R.
716         NN_UTIL_RETURN_IF_FAILED(nn::camera::GetSuitableY2rStandardCoefficient(&s_Coefficient));
717 
718         return nn::ResultSuccess();
719     } //InitializeCameraSetting()
720 
721     // Update the state of the camera.
UpdateCamera(void)722     bool UpdateCamera(void)
723     {
724         nn::Result result;
725 
726         /* Please see man pages for details */
727         if (s_IsCameraActiveTarget && !s_IsCameraActive)
728         {
729             // Activate cameras that were in standby mode.
730             // After the cameras have been activated, call the StartCapture function to start capturing images.
731             result = nn::camera::Activate(nn::camera::SELECT_OUT1_OUT2);
732             if (result.IsSuccess())
733             {
734                 s_IsCameraActive = true;
735                 // The VSync signal may have shifted when the cameras are re-activated.
736                 // We must therefore re-synchronize it.
737                 s_IsNeedSynchronizeVsyncTiming = true;
738                 // Because the cameras have been re-activated, we need to wait for auto-exposure to stabilize.
739                 s_CameraFrameCount = 0;
740             }
741             else if (result == nn::camera::ResultIsSleeping())
742             {
743                 // If the system is closed, a failure may occur here.
744                 // Retry on recovery from sleep.
745                 NN_LOG("ShellClose: Camera activation is failed.\n");
746                 return false;
747             }
748             else if (result == nn::camera::ResultFatalError())
749             {
750                 NN_PANIC("Camera has broken.\n");
751             }
752         }
753 
754         /* Please see man pages for details */
755         if (!s_IsCameraActiveTarget && s_IsCameraActive)
756         {
757             // Transition cameras from active state to standby.
758 
759             // Stop capture before transitioning cameras to standby.
760             StopCameraCapture();
761 
762             // Transition cameras in standby.
763             result = nn::camera::Activate(nn::camera::SELECT_NONE);
764             if (result.IsSuccess())
765             {
766                 s_IsCameraActive = false;
767             }
768             else if (result == nn::camera::ResultIsSleeping())
769             {
770                 // If the system is closed, a failure may occur here.
771                 // Retry on recovery from sleep.
772                 NN_LOG("ShellClose: Camera deactivation is failed.\n");
773                 return false;
774             }
775             else if (result == nn::camera::ResultFatalError())
776             {
777                 NN_PANIC("Camera has broken.\n");
778             }
779         }
780 
781         // If cameras are not active, settings cannot be configured, so here we update and finalize.
782         if (!s_IsCameraActive)
783         {
784             return true;
785         }
786 
787         // Synchronize the timing at which the cameras take their subjects.
788         // In the CTR, the cameras operate independently, so even if they are set to the same frame rate they will take images with different timing.
789         //
790         // If this difference in timing is large, there will be a discrepancy in motion between the left and right camera subjects.
791         // Therefore, use the SynchronizeVsyncTiming function to allow the subject to be taken as close to the same timing as possible.
792         // By using this function, you can reduce timing discrepancies to approximately 100 microseconds (in the Release build).
793         // This function must be called while the cameras are active.
794         if (s_IsNeedSynchronizeVsyncTiming)
795         {
796             StopCameraCapture();
797             result = nn::camera::SynchronizeVsyncTiming(nn::camera::SELECT_OUT1, nn::camera::SELECT_OUT2);
798             if (result.IsSuccess())
799             {
800                 s_IsNeedSynchronizeVsyncTiming = false;
801             }
802             else if (result == nn::camera::ResultIsSleeping())
803             {
804                 // If the system is closed, a failure may occur here.
805                 // Retry on recovery from sleep.
806                 NN_LOG("ShellClose: Camera Synchronization is failed\n");
807                 return false;
808             }
809             else if (result == nn::camera::ResultFatalError())
810             {
811                 NN_PANIC("Camera has broken.\n");
812             }
813         }
814 
815         if (s_IsCameraCaptureEnable)
816         {
817             // Capture stops when there is an error, so reconfigure the transfer settings and resume capture operations.
818             //
819             RestartCameraCapture();
820         }
821         else
822         {
823             StopCameraCapture();
824         }
825 
826         return true;
827     } //UpdateCamera()
828 
829     //==============================================================================
830     /* Please see man pages for details
831      */
InitializeY2r(void)832     void InitializeY2r(void)
833     {
834         // Initialization of the Y2R library
835         if (!nn::y2r::Initialize())
836         {
837             NN_PANIC("Y2R is using by other process.\n");
838         }
839 
840         // Force the library to stop because it cannot be configured during a conversion.
841         nn::y2r::StopConversion();
842         while (nn::y2r::IsBusyConversion())
843         {
844             nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMicroSeconds(100));
845         }
846 
847         // Set the input format.
848         // The image data that is input from the camera is in YUV4:2:2 format, which is a format that can be processed in batch mode.
849         //
850         nn::y2r::SetInputFormat(nn::y2r::INPUT_YUV422_BATCH);
851 
852         // Set the output format.
853         // We set it to 24-bit RGB here.
854         // If you want to complete the conversion more quickly, you need to reduce the data size, so another option is set RGB 16bit.
855         //
856         nn::y2r::SetOutputFormat(nn::y2r::OUTPUT_RGB_24);
857         //nn::y2r::SetOutputFormat( nn::y2r::OUTPUT_RGB_16_565 );
858 
859         // Set the output data rotation.
860         // No rotation is necessary.
861         nn::y2r::SetRotation(nn::y2r::ROTATION_NONE);
862 
863         // Set the output data ordering (sequence).
864         // Since we want to apply the converted image as-is as a texture, set to 8x8 block format, which is the sort order that supports DMP_NATIVE.
865         //
866         // If an image is not going to be rendered, the image processing is sometimes easier if you select line format.
867         //
868         nn::y2r::SetBlockAlignment(nn::y2r::BLOCK_8_BY_8);
869         //nn::y2r::SetBlockAlignment( nn::y2r::BLOCK_LINE );
870 
871         // Configure whether we will receive a notification when the Y2R conversion is complete.
872         nn::y2r::SetTransferEndInterrupt(true);
873         nn::y2r::GetTransferEndEvent(&s_Y2rEndEvent);
874 
875         // Set the size of the input image for Y2R conversion.
876         nn::y2r::SetInputLineWidth(s_TrimmingWidth);
877         nn::y2r::SetInputLines(s_TrimmingHeight);
878 
879         // Sets the conversion coefficients.
880         // The camera module installed on the CTR system may change in the future.
881         // For this reason, instead of specifying specific conversion coefficients, we recommend using the GetSuitableY2rStandardCoefficient function to get the conversion coefficient that matches the camera, and using this to set the value.
882         //
883         //
884         // (In this demo, set the value already obtained when camera was initialized.)
885         nn::y2r::SetStandardCoefficient(s_Coefficient);
886 
887         // Set the alpha value for a 16-bit or 32-bit RGB output format.
888         // (This demo does not use it so it does not need to be set.)
889         nn::y2r::SetAlpha(0xFF);
890     } //InitializeY2r()
891 
892     //==============================================================================
893     /* Please see man pages for details
894      */
InitializeResource(void)895     void InitializeResource(void)
896     {
897         // Allocate a buffer for transferring camera image data.
898         s_YuvBufferSize = nn::camera::GetFrameBytes(s_TrimmingWidth, s_TrimmingHeight);
899         for (s32 i = 0; i < CAMERA_NUM; i++)
900         {
901             for (s32 j = 0; j < YUV_BUFFER_NUM; j++)
902             {
903                 s_paaYuvBuffer[i][j] = static_cast<u8*>(s_AppHeap.Allocate(s_YuvBufferSize, 64));
904                 memset(s_paaYuvBuffer[i][j], 0, s_YuvBufferSize);
905             }
906 
907             s_aYuvCapturing[i]      = 0;
908             s_aYuvLatestCaptured[i] = YUV_BUFFER_NUM - 1;
909             s_aYuvReading[i]        = YUV_BUFFER_NUM - 1;
910             s_aCsYuvSwap[i].Initialize();
911         }
912 
913         // In this sample demo, the main thread handles Y2R conversion.
914         // Initialize the queue used to wait for conversion requests.
915         s_Y2rRequestQueue.Initialize(s_aY2rRequestQueueMessage, Y2R_REQUEST_QUEUE_LENGTH);
916 
917         // Allocate a buffer for storing the results of Y2R conversion on both the left and right camera images.
918         s_RgbBufferSize = nn::y2r::GetOutputImageSize(s_TextureWidth, s_TextureHeight, nn::y2r::OUTPUT_RGB_24);
919         for (s32 i = 0; i < CAMERA_NUM; i++)
920         {
921             NN_ASSERT(!s_paRgbBuffer[i]);
922             s_paRgbBuffer[i] = static_cast<u8*>(s_AppHeap.Allocate(s_RgbBufferSize, 64));
923             memset(s_paRgbBuffer[i], 0, s_RgbBufferSize);
924         }
925     } //InitializeResource()
926 
927     //==============================================================================
928     /* Please see man pages for details
929      */
FinalizeResource(void)930     void FinalizeResource(void)
931     {
932         for (int i = 0; i < CAMERA_NUM; i++)
933         {
934             // Free the allocated buffers.
935             for (s32 j = 0; j < YUV_BUFFER_NUM; j++)
936             {
937                 if (s_paaYuvBuffer[i][j])
938                 {
939                     s_AppHeap.Free(s_paaYuvBuffer[i][j]);
940                     s_paaYuvBuffer[i][j] = NULL;
941                 }
942             }
943             if (s_paRgbBuffer[i])
944             {
945                 s_AppHeap.Free(s_paRgbBuffer[i]);
946                 s_paRgbBuffer[i] = NULL;
947             }
948 
949             // Also destroy events obtained from the camera library.
950             s_aCameraRecvEvent[i].Finalize();
951             s_aCameraBufferErrorEvent[i].Finalize();
952             s_aCameraVsyncEvent[i].Finalize();
953 
954             s_aCsYuvSwap[i].Finalize();
955         }
956         s_Y2rRequestQueue.Finalize();
957     } //FinalizeResource()
958 
959     //==============================================================================
960     /* Please see man pages for details
961      */
CameraThreadFunc(uptr param NN_IS_UNUSED_VAR)962     void CameraThreadFunc(uptr param NN_IS_UNUSED_VAR)
963     {
964         // Camera initialization
965         SetupCamera();
966 
967         // Exit if a request to finalize the thread comes during camera initialization.
968         if (s_IsCameraThreadEnd)
969         {
970             return;
971         }
972 
973         enum
974         {
975             EVENT_RECV_R,
976             EVENT_RECV_L,
977             EVENT_ERROR_R,
978             EVENT_ERROR_L,
979             EVENT_VSYNC_R,
980             EVENT_VSYNC_L,
981 
982             EVENT_MAX
983         };
984         nn::os::WaitObject* pa_WaitObject[EVENT_MAX];
985 
986         // If the cameras were activated before the thread was started, the Vsync event may have been signaled already, so clear the signal.
987         //
988         s_aCameraVsyncEvent[CAMERA_RIGHT].ClearSignal();
989         s_aCameraVsyncEvent[CAMERA_LEFT].ClearSignal();
990 
991         // We use a buffer error event when first starting because this is processed just like a recovery from a buffer error.
992         s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
993 
994         while (1)
995         {
996             // The SetReceiving function replaces the event notifying the completion of transfer, so only one of the events for reconfiguring the transfer settings is executed here. In this way, you never end up waiting on a nonexistent event.
997             //
998             //
999             //
1000 
1001             pa_WaitObject[EVENT_RECV_R ] = &s_aCameraRecvEvent[CAMERA_RIGHT];
1002             pa_WaitObject[EVENT_RECV_L ] = &s_aCameraRecvEvent[CAMERA_LEFT];
1003             pa_WaitObject[EVENT_ERROR_R] = &s_aCameraBufferErrorEvent[CAMERA_RIGHT];
1004             pa_WaitObject[EVENT_ERROR_L] = &s_aCameraBufferErrorEvent[CAMERA_LEFT];
1005             pa_WaitObject[EVENT_VSYNC_R] = &s_aCameraVsyncEvent[CAMERA_RIGHT];
1006             pa_WaitObject[EVENT_VSYNC_L] = &s_aCameraVsyncEvent[CAMERA_LEFT];
1007 
1008             s32 num = nn::os::WaitObject::WaitAny(pa_WaitObject, EVENT_MAX);
1009 
1010             // Sleep
1011             if (s_IsCameraThreadSleep)
1012             {
1013                 s_IsCameraThreadSleep = false;
1014 
1015                 // Stop camera capture
1016                 StopCameraCapture();
1017 
1018                 // Clear the Y2R conversion requests before going to sleep.
1019                 uptr msg;
1020                 while (s_Y2rRequestQueue.TryDequeue(&msg))
1021                 {
1022                 }
1023 
1024                 // Notify that the camera thread has entered the wait state (to Sleep, Home)
1025                 s_CameraThreadSleepAckEvent.Signal();
1026                 // Wait until the thread recovery signal arrives (from Sleep, Home)
1027                 s_CameraThreadAwakeEvent.Wait();
1028 
1029                 // Clear signaled V-Blank notifications before going to sleep.
1030                 s_aCameraVsyncEvent[CAMERA_RIGHT].ClearSignal();
1031                 s_aCameraVsyncEvent[CAMERA_LEFT].ClearSignal();
1032 
1033                 s_VsyncTimingDifference = 0;
1034 
1035                 // Camera Vsync synchronization is required after returning from sleep.
1036                 s_IsNeedSynchronizeVsyncTiming = true;
1037 
1038                 // Use the buffer error mechanism to resume capture.
1039                 s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
1040             }
1041 
1042             // End the thread
1043             if (s_IsCameraThreadEnd)
1044             {
1045                 StopCameraCapture();
1046                 break;
1047             }
1048 
1049             switch(num)
1050             {
1051             case EVENT_RECV_R:
1052             case EVENT_RECV_L:
1053                 {
1054                     // Re-configure transfers
1055                     CameraRecvFunc((num == EVENT_RECV_R)
1056                                     ? nn::camera::PORT_CAM1
1057                                     : nn::camera::PORT_CAM2);
1058                 }
1059                 break;
1060 
1061             case EVENT_ERROR_R:
1062             case EVENT_ERROR_L:
1063                 {
1064                     s_aCameraBufferErrorEvent[CAMERA_RIGHT].ClearSignal();
1065                     s_aCameraBufferErrorEvent[CAMERA_LEFT].ClearSignal();
1066 
1067                     if (!UpdateCamera())
1068                     {
1069                         // Failed on system closure.
1070                         // Signal a buffer error event so you can do UpdateCamera again after the system has been opened.
1071                         s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
1072 
1073                         if (!s_IsCameraThreadSleep)
1074                         {
1075                             // Even if there is no request to enter the wait state, sleep for a period of time in order to create an interval to retry.
1076                             nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(10));
1077                         }
1078                     }
1079                 }
1080                 break;
1081 
1082             case EVENT_VSYNC_R:
1083             case EVENT_VSYNC_L:
1084                 {
1085                     CameraVsyncFunc((num == EVENT_VSYNC_R)
1086                                      ? nn::camera::PORT_CAM1
1087                                      : nn::camera::PORT_CAM2);
1088                 }
1089                 break;
1090 
1091             default:
1092                 {
1093                     debug_print("Illegal event\n");
1094                 }
1095                 break;
1096             }; //Switch
1097         } //while
1098 
1099         {
1100             // Finalization of CAMERA/Y2R
1101             //   If you do not perform finalization with the following procedure,
1102             //   there is a possibility that sound noise will be generated in the HOME Menu.
1103             nn::y2r::StopConversion();                      // (1) Stop Y2R conversion
1104             nn::camera::StopCapture(nn::camera::PORT_BOTH); // (2) Stop capture
1105             nn::camera::Activate(nn::camera::SELECT_NONE);  // (3) Set all cameras to standby state
1106             nn::camera::Finalize();                         // (4) End camera
1107             nn::y2r::Finalize();                            //     End Y2R
1108         }
1109         {
1110             // Free the resources used for CAMERA/Y2R
1111             FinalizeResource();
1112         }
1113     } //CameraThreadFunc()
1114 
1115     //==============================================================================
1116     /* Please see man pages for details
1117      */
CameraRecvFunc(nn::camera::Port port)1118     void CameraRecvFunc(nn::camera::Port port)
1119     {
1120         s32 index = (port == nn::camera::PORT_CAM1) ? 0 : 1;
1121 
1122         // For the first four frames after the cameras are activated, the images that we get may be extremely dark.
1123         // The number of frames until auto-exposure stabilizes, together with these four frames, is around 14 frames indoors and around 30 frames outdoors.
1124         //
1125         // Here we wait for 30 frames before starting Y2R conversion.
1126         if (s_CameraFrameCount >= FRAME_NUM_FOR_STABILIZE /* == 30 */)
1127         {
1128             // Switch the write buffer.
1129             {
1130                 // Block so that this does not overlap with the process of switching the read buffer.
1131                 nn::os::CriticalSection::ScopedLock sl(s_aCsYuvSwap[index]);
1132 
1133                 s_aYuvLatestCaptured[index] = s_aYuvCapturing[index];
1134                 do
1135                 {
1136                     if (++s_aYuvCapturing[index] >= YUV_BUFFER_NUM)
1137                     {
1138                         s_aYuvCapturing[index] = 0;
1139                     }
1140                 } while (s_aYuvCapturing[index] == s_aYuvReading[index]);
1141             }
1142 
1143             // Make a Y2R conversion request.
1144             {
1145                 Y2rRequest_st* p_Req = &(saa_Y2rRequestBuffer[index][s_aY2rRequestBufferCounter[index]]);
1146 
1147                 p_Req->index = index;
1148                 p_Req->time  = nn::os::Tick::GetSystemCurrent();
1149 
1150                 // Put the request in the queue
1151                 if (!s_Y2rRequestQueue.TryEnqueue(reinterpret_cast<uptr>(p_Req)))
1152                 {
1153                     debug_print("Request port=%d: Queue is full.\n", index);
1154                 }
1155 
1156                 // Advance the circular buffer to the beginning for the next time
1157                 if (++s_aY2rRequestBufferCounter[index] >= Y2R_REQUEST_BUFFER_MAX)
1158                 {
1159                     s_aY2rRequestBufferCounter[index] = 0;
1160                 }
1161             }
1162         }
1163 
1164         // Configure the next frame's transfer.
1165         nn::camera::SetReceiving(
1166             & s_aCameraRecvEvent[index],
1167             s_paaYuvBuffer[index][s_aYuvCapturing[index]],
1168             port,
1169             s_YuvBufferSize,
1170             s_YuvTransferUnit);
1171 
1172     } //CameraRecvFunc()
1173 
1174     //==============================================================================
1175     /* Please see man pages for details
1176      */
CameraVsyncFunc(nn::camera::Port port)1177     void CameraVsyncFunc(nn::camera::Port port)
1178     {
1179         s32 index = (port == nn::camera::PORT_CAM1) ? CAMERA_RIGHT : CAMERA_LEFT;
1180 
1181         // Get the timing at which the V-Blank occurred.
1182         GetLatestVsyncTiming(saa_LatestVsyncTiming[index], port, 2);
1183 
1184         // To determine whether auto-exposure has stabilized, count up to 30 camera frames.
1185         //
1186         if ((index == 1) && (s_CameraFrameCount < FRAME_NUM_FOR_STABILIZE /* == 30 */))
1187         {
1188             s_CameraFrameCount++;
1189         }
1190 
1191         // Find the difference in V-Blank timing between the left and right cameras.
1192         if (index == 1)
1193         {
1194             s_VsyncTimingDifference
1195                 = saa_LatestVsyncTiming[CAMERA_LEFT][0].GetMicroSeconds()
1196                 - saa_LatestVsyncTiming[CAMERA_RIGHT][0].GetMicroSeconds();
1197         }
1198 
1199         // Calculate the V-Blank interval in order to calculate the frame rate.
1200         saa_VsyncInterval[index][s_aVsyncIntervalPos[index]]
1201             = saa_LatestVsyncTiming[index][0].GetMicroSeconds()
1202             - saa_LatestVsyncTiming[index][1].GetMicroSeconds();
1203         if (++s_aVsyncIntervalPos[index] >= VSYNC_INTERVAL_LOG_NUM)
1204         {
1205             s_aVsyncIntervalPos[index] = 0;
1206         }
1207 
1208         // Calculate the frame rate. Here we take the average of four frames.
1209         s64 sum = 0;
1210         s32 i = 0;
1211         for (i = 0; i < VSYNC_INTERVAL_LOG_NUM; i++)
1212         {
1213             if (saa_VsyncInterval[index][i] <= 0)
1214             {
1215                 break;
1216             }
1217             sum += saa_VsyncInterval[index][i];
1218         }
1219         if (sum != 0)
1220         {
1221             s_aFps[index] = static_cast<s64>(i) * 1000000000LL / sum;
1222         }
1223 
1224     } //CameraVsyncFunc()
1225 
1226     //==============================================================================
1227     /* Please see man pages for details
1228      */
StopCameraCapture(void)1229     void StopCameraCapture(void)
1230     {
1231         // Do nothing if StartCapture is not called.
1232         if (!s_IsCameraCaptureStarted)
1233         {
1234             return;
1235         }
1236 
1237         // Stop capture operations.
1238         nn::camera::StopCapture(nn::camera::PORT_BOTH);
1239 
1240         // Capture actually stops at the end of the frame, which is not necessarily immediately after the call to StopCapture, so wait for capture to end.
1241         //
1242         // A locking mechanism is necessary because IsBusy would wait indefinitely if StartCapture were to be run from another thread.
1243         //
1244         s32 cnt = 0;
1245         while (nn::camera::IsBusy(nn::camera::PORT_CAM1)
1246             || nn::camera::IsBusy(nn::camera::PORT_CAM2))
1247         {
1248             nn::os::Thread::Sleep(nn::fnd::TimeSpan::FromMilliSeconds(1));
1249 
1250             // Depending on when the cameras are stopped, if the system was closed while IsBusy was polling it may continue to return 'true' until the system is opened again. To avoid an infinite loop, do a timeout.
1251             //
1252             //
1253             //
1254             // Normally, IsBusy is 'true' for at most 1 frame, so set the timeout time to 1 frame.
1255             //
1256             if (++cnt > 66)
1257             {
1258                 NN_LOG("Busy timeout\n");
1259                 break;
1260             }
1261         }
1262         nn::camera::ClearBuffer(nn::camera::PORT_BOTH);
1263 
1264         s_IsCameraCaptureStarted = false;
1265     } //StopCameraCapture()
1266 
1267     //==============================================================================
1268     /* Please see man pages for details
1269      */
RestartCameraCapture(void)1270     void RestartCameraCapture(void)
1271     {
1272         // Stop capturing images from both ports.
1273         // Even though one of the ports has already been stopped, capture is stopped from both ports for simplicity.
1274         //
1275         StopCameraCapture();
1276 
1277         // Re-configure transfers.
1278         nn::camera::SetReceiving(
1279             & s_aCameraRecvEvent[CAMERA_RIGHT],
1280             s_paaYuvBuffer[CAMERA_RIGHT][s_aYuvCapturing[CAMERA_RIGHT]],
1281             nn::camera::PORT_CAM1,
1282             s_YuvBufferSize,
1283             s_YuvTransferUnit);
1284 
1285         nn::camera::SetReceiving(
1286             & s_aCameraRecvEvent[CAMERA_LEFT],
1287             s_paaYuvBuffer[CAMERA_LEFT][s_aYuvCapturing[CAMERA_LEFT]],
1288             nn::camera::PORT_CAM2,
1289             s_YuvBufferSize,
1290             s_YuvTransferUnit);
1291 
1292         // A locking mechanism is unnecessary because StartCapture and StopCapture are called only from the camera thread.
1293         nn::camera::StartCapture(nn::camera::PORT_BOTH);
1294 
1295         s_IsCameraCaptureStarted = true;
1296     } //RestartCameraCapture()
1297 
1298     //==============================================================================
1299     /* Please see man pages for details
1300 
1301 
1302      */
Y2rConversion(s32 index)1303     bool Y2rConversion(s32 index)
1304     {
1305         // Switch the camera read buffer.
1306         {
1307             nn::os::CriticalSection::ScopedLock sl(s_aCsYuvSwap[index]);
1308 
1309             s_aYuvReading[index] = s_aYuvLatestCaptured[index];
1310         }
1311 
1312         // *** BEGIN WARNING ***
1313         //
1314         // Due to a hardware bug in Y2R, when the camera and Y2R are being used at the same time, there is a possibility that the recovery from a camera buffer error could cause Y2R transfers to hang, depending on the timing of that recovery.
1315         //
1316         //
1317         // In this case, the conversion completion event obtained by nn::y2r::GetTransferEndEvent might never be signaled.
1318         //
1319         // For details on when this problem occurs and how to deal with it, see the Function Reference Manual for the Y2R library.
1320         //
1321         // In this sample, a timeout is inserted in the wait for the above event, and when a timeout occurs, a retry is performed.
1322         //
1323         //
1324         // *** END WARNING ***
1325 
1326         // We add an offset because we want to place a 512x384 image in the middle of a 512x512 buffer.
1327         s32 offset = nn::y2r::GetOutputImageSize(
1328                             s_TextureWidth,
1329                             (s_TextureHeight - s_TrimmingHeight) / 2,
1330                             nn::y2r::OUTPUT_RGB_24);
1331 
1332         s32  cnt     = 0;
1333         while (true)
1334         {
1335             // Configure a transfer for Y2R output images.
1336             nn::y2r::CTR::SetReceiving(
1337                 s_paRgbBuffer[index] + offset,
1338                 nn::y2r::GetOutputImageSize(s_TrimmingWidth, s_TrimmingHeight, nn::y2r::OUTPUT_RGB_24),
1339                 nn::y2r::GetOutputBlockSize(s_TrimmingWidth, nn::y2r::OUTPUT_RGB_24));
1340 
1341             // Configure a transfer for Y2R input images.
1342             // Transfers are measured in lines of data.
1343             nn::y2r::CTR::SetSendingYuv(
1344                 s_paaYuvBuffer[index][s_aYuvReading[index]],
1345                 nn::camera::GetFrameBytes(s_TrimmingWidth, s_TrimmingHeight),
1346                 nn::camera::GetLineBytes(s_TrimmingWidth));
1347 
1348             // Start Y2R conversion.
1349             nn::y2r::CTR::StartConversion();
1350 
1351             // Waits for Y2R conversion completion because data transfer is aborted if the transfer for the next conversion is performed before conversion of the first image has completed.
1352             //
1353             // We insert a timeout to deal with the bug that causes Y2R to hang.
1354             // Because the conversion process takes roughly 10 ms for a 512x384 image transferred in 24-bit RGB format, a timeout is set that is twice as long (20 ms).
1355             //
1356             // The size of the camera image and the format of the output image affect how long it takes to convert the image.
1357             if (s_Y2rEndEvent.Wait(nn::fnd::TimeSpan::FromMilliSeconds(20)))
1358             {
1359                 // Conversion succeeded
1360                 return true;
1361             }
1362             else
1363             {
1364                 // Conversion failed
1365 
1366                 NN_LOG("Y2R may have hung up.\n");
1367 
1368                 // Force conversion to stop
1369                 nn::y2r::StopConversion();
1370 
1371                 // Although it is very rare for problems to occur over and over, the probability of this happening is not zero, so to play it safe a process is inserted to explicitly exit the loop.
1372                 //
1373                 // We give up on converting this image here and move on to converting the next image.
1374                 if (++cnt >= 2)
1375                 {
1376                     // Give up on conversion
1377                     return false;
1378                 }
1379                 // Retry
1380             }
1381         } //while(true)
1382 
1383     } //Y2rConversion()
1384 
1385 } //Namespace
1386 
1387 //=============================================================================
1388 /* Please see man pages for details
1389  */
1390 namespace
1391 {
1392     //------------------------------------------------------------
1393     // Preparation before sleep/transition (Called from main thread.)
1394     //------------------------------------------------------------
PrepareTransitionCallback(void)1395     void PrepareTransitionCallback(void)
1396     {
1397         // If camera thread is not finalized, transition to wait state.
1398         if (!s_IsCameraThreadEnd)
1399         {
1400             // Notify camera thread to transition to wait state (to CameraThreadFunc).
1401             s_IsCameraThreadSleep = true;
1402 
1403             // Signal so that the camera thread does not block with WaitAny (to CameraThreadFunc)
1404             s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();
1405 
1406             // Wait for camera thread to transition to wait state (from CameraThreadFunc).
1407             s_CameraThreadSleepAckEvent.Wait();
1408         }
1409     }
1410 
1411     //------------------------------------------------------------
1412     // Recover processing after sleep/transition. (Called from main thread.)
1413     //------------------------------------------------------------
AfterTransitionCallback(void)1414     void AfterTransitionCallback(void)
1415     {
1416         // If camera thread is not finalized, cancel the wait state.
1417         // If recovering in order to finalize the application, here you finalize the camera thread without canceling.
1418         if (!s_IsCameraThreadEnd && !TransitionHandler::IsExitRequired())
1419         {
1420             // Signal to start the camera thread (to CameraThreadFunc)
1421             s_CameraThreadAwakeEvent.Signal();
1422         }
1423     }
1424 
1425     //-----------------------------------------------------
1426     // Start camera thread.
1427     //-----------------------------------------------------
InitializeCameraThread(void)1428     void InitializeCameraThread(void)
1429     {
1430         s_CameraThreadSleepAckEvent.Initialize(false);
1431         s_CameraThreadAwakeEvent.Initialize(false);
1432         s_aCameraRecvEvent[CAMERA_RIGHT].Initialize(false);
1433         s_aCameraRecvEvent[CAMERA_LEFT].Initialize(false);
1434         s_aCameraBufferErrorEvent[CAMERA_RIGHT].Initialize(false); // This will be used when recovering from the wait state, so initialize it first.
1435         TransitionHandler::SetPrepareSleepCallback(PrepareTransitionCallback);
1436         TransitionHandler::SetAfterSleepCallback(AfterTransitionCallback);
1437         TransitionHandler::SetPrepareHomeButtonCallback(PrepareTransitionCallback);
1438         TransitionHandler::SetAfterHomeButtonCallback(AfterTransitionCallback);
1439 
1440         // Remember the priority of the main thread so you can change the priority later.
1441         s_MainThreadPriority = nn::os::Thread::GetCurrentPriority();
1442 
1443         // Create a thread for camera processing.
1444         //   While initializing, the priority is set lower than the main thread.
1445         //   After initialization has completed, the priority changes to be higher than the main thread.
1446         s_CameraThread.StartUsingAutoStack(
1447             CameraThreadFunc,
1448             NULL,
1449             8192,
1450             s_MainThreadPriority + 3);
1451     }
1452 
1453     //-----------------------------------------------------
1454     // Finalization of camera thread
1455     //-----------------------------------------------------
FinalizeCameraThread(void)1456     void FinalizeCameraThread(void)
1457     {
1458         // Destroy the thread for camera processing
1459         s_IsCameraThreadEnd = true;                 // Set the end flag
1460         s_CameraThreadAwakeEvent.Signal();          // Signal so that it does not stop with the sleep state
1461         s_aCameraBufferErrorEvent[CAMERA_RIGHT].Signal();    // Signal so that the camera thread does not stop with WaitAny
1462         s_CameraThread.Join();                      // Wait for thread to end
1463         s_CameraThread.Finalize();                  // Discard thread
1464 
1465         TransitionHandler::SetPrepareSleepCallback(NULL);
1466         TransitionHandler::SetAfterSleepCallback(NULL);
1467         TransitionHandler::SetPrepareHomeButtonCallback(NULL);
1468         TransitionHandler::SetAfterHomeButtonCallback(NULL);
1469         s_CameraThreadSleepAckEvent.Finalize();
1470         s_CameraThreadAwakeEvent.Finalize();
1471     }
1472 } //Namespace
1473 
1474 //=============================================================================
1475 /* Please see man pages for details
1476  */
1477 namespace
1478 {
1479     //=============================================================================
InitializeApplet(void)1480     void InitializeApplet(void)
1481     {
1482         TransitionHandler::Initialize();
1483         TransitionHandler::EnableSleep();
1484     } //InitializeApplet()
1485 
1486     //=============================================================================
FinalizeApplet(void)1487     void FinalizeApplet(void)
1488     {
1489         TransitionHandler::DisableSleep();
1490         TransitionHandler::Finalize();
1491     } //FinalizeApplet()
1492 } //Namespace
1493 
1494 //=============================================================================
1495 /* Please see man pages for details
1496  */
1497 namespace
1498 {
1499     //=============================================================================
InitializeGx(void)1500     int InitializeGx(void)
1501     {
1502         nn::fs::Initialize();
1503 
1504         NN_ASSERT(!s_AddrForGxHeap);
1505         s_AddrForGxHeap = reinterpret_cast<uptr>(s_AppHeap.Allocate(MEMORY_SIZE_FCRAM_GX));
1506         s_RenderSystem.Initialize(s_AddrForGxHeap, MEMORY_SIZE_FCRAM_GX);
1507 
1508         s_ProgramID = glCreateProgram();
1509         s_ShaderID = glCreateShader(GL_VERTEX_SHADER);
1510 
1511         nn::fs::FileReader file(L"rom:/shader.shbin");
1512         size_t fileSize = file.GetSize();
1513         void*  buf      = s_AppHeap.Allocate(fileSize);
1514         s32    read     = file.Read(buf, fileSize);
1515 
1516         glShaderBinary(1, &s_ShaderID, GL_PLATFORM_BINARY_DMP, buf, read);
1517 
1518         file.Finalize();
1519         s_AppHeap.Free(buf);
1520 
1521         glAttachShader(s_ProgramID, s_ShaderID);
1522         glAttachShader(s_ProgramID, GL_DMP_FRAGMENT_SHADER_DMP);
1523 
1524         glBindAttribLocation(s_ProgramID, 0, "aPosition");
1525         glBindAttribLocation(s_ProgramID, 1, "aTexCoord");
1526 
1527         glLinkProgram(s_ProgramID);
1528         glValidateProgram(s_ProgramID);
1529         glUseProgram(s_ProgramID);
1530 
1531         glGenTextures(CAMERA_NUM, s_Texture);
1532 
1533         glBindTexture(GL_TEXTURE_2D, s_Texture[CAMERA_RIGHT]);
1534         glBindTexture(GL_TEXTURE_2D, s_Texture[CAMERA_LEFT]);
1535         glBindTexture(GL_TEXTURE_2D, 0);
1536 
1537         s_RenderSystem.SetClearColor(NN_GX_DISPLAY0, 0.36f, 0.42f, 0.5f, 1.0f);
1538         s_RenderSystem.SetClearColor(NN_GX_DISPLAY1, 0.0f, 0.0f, 0.0f, 1.0f);
1539         s_RenderSystem.SetClearColor(NN_GX_DISPLAY0_EXT, 0.36f, 0.42f, 0.5f, 1.0f);
1540 
1541         glClearDepthf(1.f);
1542 
1543         glEnable(GL_CULL_FACE);
1544         glFrontFace(GL_CCW);
1545         glCullFace(GL_BACK);
1546 
1547         LoadObjects();
1548 
1549         s_RenderSystem.SetLcdMode(NN_GX_DISPLAYMODE_STEREO);
1550 
1551         return 0;
1552     }
1553 
1554     //==============================================================================
FinalizeGx(void)1555     void FinalizeGx(void)
1556     {
1557         DeleteObjects();
1558 
1559         glDeleteTextures(CAMERA_NUM, s_Texture);
1560         glDetachShader(s_ProgramID, GL_DMP_FRAGMENT_SHADER_DMP);
1561         glDetachShader(s_ProgramID, s_ShaderID);
1562         glDeleteShader(s_ShaderID);
1563         glDeleteProgram(s_ProgramID);
1564 
1565         s_RenderSystem.Finalize();
1566 
1567         if (s_AddrForGxHeap)
1568         {
1569             s_AppHeap.Free(reinterpret_cast<void*>(s_AddrForGxHeap));
1570             s_AddrForGxHeap = 0;
1571         }
1572     }
1573 
1574     //=============================================================================
LoadObjects(void)1575     void LoadObjects(void)
1576     {
1577         // Objects used to paste a 512x512 camera image as a texture
1578         // The size is set to a value that can be divided by 512 in order to make it easier to calculate the camera position for displaying images pixel-by-pixel, and to calculate the amount to move to correct for position offsets in the stereo camera.
1579         //
1580         //
1581         GLfloat coords[] = {
1582             -1.024f, 1.024f, 0.f, 1.f,
1583             -1.024f,-1.024f, 0.f, 1.f,
1584              1.024f, 1.024f, 0.f, 1.f,
1585              1.024f,-1.024f, 0.f, 1.f
1586         };
1587         GLfloat texcoords[] = {
1588             0.f, 1.f, 0.f,
1589             0.f, 0.f, 0.f,
1590             1.f, 1.f, 0.f,
1591             1.f, 0.f, 0.f,
1592         };
1593 
1594         GLushort idxs[] = { 0, 1, 2, 3 };
1595 
1596         glGenBuffers(1, &s_ArrayBufferID);
1597         glBindBuffer(GL_ARRAY_BUFFER, s_ArrayBufferID);
1598         glBufferData(GL_ARRAY_BUFFER, sizeof(coords) + sizeof(texcoords), 0, GL_STATIC_DRAW);
1599         glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(coords), coords);
1600         glBufferSubData(GL_ARRAY_BUFFER, sizeof(coords), sizeof(texcoords), texcoords);
1601 
1602         glGenBuffers(1, &s_ElementArrayBufferID);
1603         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, s_ElementArrayBufferID);
1604         glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(idxs), idxs, GL_STATIC_DRAW);
1605 
1606         glEnableVertexAttribArray(0);
1607         glEnableVertexAttribArray(1);
1608 
1609         glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0) ;
1610         glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(sizeof(coords)));
1611     }
1612 
1613     //==============================================================================
DeleteObjects(void)1614     void DeleteObjects(void)
1615     {
1616         glDeleteBuffers(1, &s_ElementArrayBufferID);
1617         glDeleteBuffers(1, &s_ArrayBufferID);
1618     }
1619 
1620     //=============================================================================
ReadyObjects(void)1621     void ReadyObjects(void)
1622     {
1623         glUseProgram(s_ProgramID);
1624         glBindBuffer(GL_ARRAY_BUFFER, s_ArrayBufferID);
1625         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, s_ElementArrayBufferID);
1626 
1627         glEnableVertexAttribArray(0);
1628         glEnableVertexAttribArray(1);
1629 
1630         glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
1631         glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(16 * sizeof(GLfloat)));
1632     }
1633 
1634     //=============================================================================
SetTextureCombiner(void)1635     void SetTextureCombiner(void)
1636     {
1637         glUniform4f(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].constRgba"), 0.0f, 0.0f, 0.0f, 1.0f);
1638         glUniform3i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].srcRgb"), GL_TEXTURE0, GL_PREVIOUS, GL_PREVIOUS);
1639         glUniform3i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].srcAlpha"), GL_CONSTANT, GL_PREVIOUS, GL_PREVIOUS);
1640         glUniform3i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].operandRgb"), GL_SRC_COLOR, GL_SRC_COLOR, GL_SRC_COLOR);
1641         glUniform3i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].operandAlpha"),GL_SRC_ALPHA , GL_SRC_ALPHA, GL_SRC_ALPHA);
1642         glUniform1i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].combineRgb"), GL_REPLACE);
1643         glUniform1i(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].combineAlpha"), GL_REPLACE);
1644         glUniform1f(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].scaleRgb"), 1.0f);
1645         glUniform1f(glGetUniformLocation(s_ProgramID, "dmp_TexEnv[2].scaleAlpha"), 1.0f);
1646     }
1647 
1648     //=============================================================================
DrawFrame(void)1649     void DrawFrame(void)
1650     {
1651         glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
1652 
1653         // Render the upper screen (stereoscopic display)
1654         DrawDisplay0Stereo();
1655 
1656         // Render the lower screen
1657         DrawDisplay1();
1658 
1659         s_FrameCount++;
1660     } //DrawFrame
1661 
1662     //=============================================================================
DrawDisplay0Stereo(void)1663     void DrawDisplay0Stereo(void)
1664     {
1665         // Render images for the left eye
1666         DrawDisplay0();
1667 
1668         // Render images for the right eye
1669         DrawDisplay0Ext();
1670 
1671         s_RenderSystem.SwapBuffers();
1672     } //DrawDisplay0Stereo
1673 
1674     //=============================================================================
DrawDisplay0(void)1675     void DrawDisplay0(void)
1676     {
1677         s_RenderSystem.SetRenderTarget(NN_GX_DISPLAY0);
1678         s_RenderSystem.Clear();
1679         glViewport(0, 0, nn::gx::DISPLAY0_WIDTH, nn::gx::DISPLAY0_HEIGHT);
1680 
1681         // Skip the rendering of camera images until after the cameras are initialized.
1682         if (s_IsFinishCameraSetting)
1683         {
1684             // Render left camera images (SELECT_OUT2)
1685             DrawCameraImage(1);
1686         }
1687 
1688         s_RenderSystem.Transfer();
1689     } //DrawDisplay0
1690 
1691     //=============================================================================
DrawDisplay0Ext(void)1692     void DrawDisplay0Ext(void)
1693     {
1694         s_RenderSystem.SetRenderTarget(NN_GX_DISPLAY0_EXT);
1695         s_RenderSystem.Clear();
1696         glViewport(0, 0, nn::gx::DISPLAY0_WIDTH, nn::gx::DISPLAY0_HEIGHT);
1697 
1698         // Skip the rendering of camera images until after the cameras are initialized.
1699         if (s_IsFinishCameraSetting)
1700         {
1701             // Render right camera images (SELECT_OUT1)
1702             DrawCameraImage(0);
1703         }
1704 
1705         s_RenderSystem.Transfer();
1706     } //DrawDisplay0Ext
1707 
1708     //=============================================================================
DrawDisplay1(void)1709     void DrawDisplay1(void)
1710     {
1711         s_RenderSystem.SetRenderTarget(NN_GX_DISPLAY1);
1712 
1713         s_RenderSystem.Clear();
1714         s_RenderSystem.SetColor(1.0f, 1.0f, 1.0f);
1715         s_RenderSystem.SetFontSize(8.0f);
1716 
1717         s_RenderSystem.DrawText(8.0f,  8.0f, "Frame: %d", s_FrameCount);
1718 
1719         s_RenderSystem.DrawText(8.0f, 24.0f, "Left  : %2d.%03d fps",
1720                                     static_cast<s32>(s_aFps[CAMERA_LEFT] / 1000),
1721                                     static_cast<s32>(s_aFps[CAMERA_LEFT] % 1000));
1722 
1723         s_RenderSystem.DrawText(8.0f, 32.0f, "Right : %2d.%03d fps",
1724                                     static_cast<s32>(s_aFps[CAMERA_RIGHT] / 1000),
1725                                     static_cast<s32>(s_aFps[CAMERA_RIGHT] % 1000));
1726 
1727         s_RenderSystem.DrawText(8.0f, 48.0f, "Vsync timing diff: %lld usec", s_VsyncTimingDifference);
1728 
1729         s_RenderSystem.DrawText(8.0f, 64.0f, "Camera capture: %s",
1730                                     (s_IsCameraCaptureEnable && s_IsCameraActiveTarget) ? "Enable" : "Disable");
1731 
1732         s_RenderSystem.DrawText(8.0f, 72.0f, "Camera device:  %s",
1733                                     (s_IsCameraActiveTarget) ? "Active" : "Deactive");
1734 
1735         if (s_CameraFrameCount < FRAME_NUM_FOR_STABILIZE)
1736         {
1737             s_RenderSystem.DrawText(8.0, 88.0f, "Waiting for auto exposure stabilization");
1738         }
1739 
1740         s_RenderSystem.DrawText(8.0f, 216.0f, "Y button     : Stop/Restart capture");
1741         s_RenderSystem.DrawText(8.0f, 224.0f, "X button     : Deactive/Active camera");
1742         s_RenderSystem.DrawText(8.0f, 232.0f, "Start button : Finalize");
1743 
1744         s_RenderSystem.Transfer();
1745         s_RenderSystem.SwapBuffers();
1746     } //DrawDisplay1()
1747 
1748     //==============================================================================
SetCalibration(void)1749     void SetCalibration(void)
1750     {
1751         // Calculates the units of translational motion for correction.
1752         // Currently, object size is  2.048 * 2.048, and a 512x384 region trimmed from the VGA image is applied as the texture to here. So the amount you need to move the image to move it by only 1 pixel is 2.048 / 512 = 0.004.
1753         //
1754         //
1755         // With this setting, even if the camera image size is changed from VGA image to 512x384, the texture size does not change when the VGA image has been trimmed.
1756         //
1757         // However, the image is reduced internally by the camera, making the subject smaller compared to the VGA image, so the amount of translational motion to correct should be less by that amount
1758         //
1759         // At this time, the unit of translational motion for correction is 0.004*512/640 = 0.0032.
1760         f32 step = 0.004f * static_cast<f32>(s_OriginalWidth) / 640.0f;
1761 
1762         // Get calibration data.
1763         nn::camera::GetStereoCameraCalibrationData(&s_CalData);
1764 
1765         // Calculate the parallax.
1766         // The horizontal parallax determines the apparent depth of the subject (how much the subject appears to jump out of or sink into the screen).
1767         // To adjust the apparent depth of a camera subject, move the left and/or right camera images horizontally.
1768         // (Because the physical camera positions are fixed, this operation does not change the solidity of the subject.)
1769         // Here we find the parallax at a distance of 1 m so that a subject at that distance appears to sit directly on the surface of the LCD.
1770         f32 parallax = nn::camera::GetParallax(s_CalData, 1.0f);
1771 
1772         // Get a calibration matrix.
1773         // The size of the (stereoscopic) region over which the left and right images overlap differs from system to system.
1774         // This function first takes for the region over which the left and right images overlap and then finds a rectangular region whose sides have a ratio of 400:240.
1775         // If the rectangular region is smaller or larger than  400x240, the correction matrix is scaled so the cropped rectangular region can be displayed as 400x240.
1776         //
1777         // The correction matrix is scaled, so even though we were thinking of setting the screen display to "pixel-by-pixel" above, in fact the image cannot be displayed as pixel-by-pixel.
1778         //
1779         // If the 9th argument is set to a value larger than 400 (for example, to 480) then the correction matrix will be scaled such that the obtained rectangular region is larger than the width of the screen (400), so the edges of the image will exceed the size of the screen.
1780         //
1781         //This allows us to adjust the parallax even more.
1782         // (This could be used to allow the user to adjust the parallax by hand, for example.)
1783         // By specifying 0.0f as the sixth argument, you get a matrix that projects subjects an infinite distance away onto the surface of the screen. By specifying the return value from the GetParallaxOnChart function, you get a matrix that projects subjects 250 mm away onto the surface of the screen.
1784         //
1785         nn::camera::GetStereoCameraCalibrationMatrixEx(
1786                         &s_aCalMatrix[CAMERA_RIGHT],
1787                         &s_aCalMatrix[CAMERA_LEFT],
1788                         &s_CalScale,
1789                         s_CalData,
1790                         step,
1791                         parallax,
1792                         s_TrimmingWidth,
1793                         s_TrimmingHeight,
1794                         400,
1795                         240);
1796     }
1797 
1798     //=============================================================================
1799     /* Please see man pages for details
1800 
1801 
1802      */
DrawCameraImage(s32 index)1803     void DrawCameraImage(s32 index)
1804     {
1805         if (!s_ExistRgbImage[index])
1806         {
1807             // Do no render if there is no RGB image.
1808             return;
1809         }
1810 
1811         glBindTexture(GL_TEXTURE_2D, s_Texture[index]);
1812 
1813         // Update texture immediately after Y2R conversion
1814         if (s_IsUpdateRgbImage[index])
1815         {
1816             glTexImage2D(
1817                 GL_TEXTURE_2D | NN_GX_MEM_FCRAM | GL_NO_COPY_FCRAM_DMP,
1818                 0,
1819                 GL_RGB_NATIVE_DMP,
1820                 s_TextureWidth,
1821                 s_TextureHeight,
1822                 0,
1823                 GL_RGB_NATIVE_DMP,
1824                 GL_UNSIGNED_BYTE,       // GL_UNSIGNED_SHORT_5_5_5_1 when the Y2R output format is 16-bit RGB
1825                 s_paRgbBuffer[index]);
1826             s_IsUpdateRgbImage[index] = false;
1827         }
1828 
1829         ReadyObjects();
1830 
1831         glUniform1i(glGetUniformLocation(s_ProgramID, "dmp_Texture[0].samplerType"), GL_TEXTURE_2D);
1832 
1833         glActiveTexture(GL_TEXTURE0);
1834         glBindTexture(GL_TEXTURE_2D, s_Texture[index]);
1835         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
1836         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
1837         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
1838         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
1839 
1840         SetTextureCombiner();
1841 
1842         nn::math::Matrix44 proj, m;
1843 
1844         // Determine the frustum.
1845         // Since height and width are reversed,
1846         // DISPLAY0_HEIGHT : 400
1847         // DISPLAY0_WIDTH  : 240
1848         // Note that the values are the above.
1849 
1850         nn::math::MTX44Frustum(
1851             &proj,
1852             -0.02f,
1853             0.02f,
1854             -0.02f * static_cast<f32>(nn::gx::DISPLAY0_HEIGHT) / static_cast<f32>(nn::gx::DISPLAY0_WIDTH),
1855             0.02f  * static_cast<f32>(nn::gx::DISPLAY0_HEIGHT) / static_cast<f32>(nn::gx::DISPLAY0_WIDTH),
1856             0.2f,
1857             10.f);
1858         nn::math::MTX44Transpose(&m, &proj);
1859         glUniformMatrix4fv(glGetUniformLocation(s_ProgramID, "uProjection"), 1, GL_FALSE, static_cast<f32*>(m));
1860 
1861         // Here we consider where to place the camera (viewpoint) so that the image is displayed pixel-by-pixel.
1862         // Here we consider setting the object size to 2.048 x 2.048, and taking a 512x512 pixel image and pasting it as a texture to the object.
1863         //
1864         // Because the screen size is 400x240 here, the pixel displayed at the upper-left corner has the coordinates (-0.8, 0.48).
1865         // Because the upper-left corner of the near clipping plane has the coordinates (-0.02*400/240, -0.02) and the near clipping plane is separated from the camera by 0.2 along the z-axis, the distance from the camera to the object along the z-axis is 4.8, as determined by the ratio -0.48/0.02 = z/0.2.
1866         //
1867         //
1868 
1869         nn::math::Matrix34 eye;
1870         nn::math::Vector3 camPos(0.f, 0.f, 4.8f);
1871         nn::math::Vector3 camUp( 0.f, 1.f,  0.f);
1872         nn::math::Vector3 target(0.f, 0.f,  0.f);
1873         nn::math::MTX34LookAt(&eye, &camPos, &camUp, &target);
1874 
1875         nn::math::MTX34  tmp(eye);
1876         {
1877             nn::math::MTX34 move;
1878             nn::math::MTX34Identity(&move);
1879 
1880             // Calibrate the stereo cameras (correct the position offsets).
1881             // Because there are errors in stereo camera placement during manufacturing, the position of the camera subject will be offset between the left and right cameras.
1882             // (There are individual differences in the size of this offset.)
1883             // As a result, always correct this offset when rendering.
1884             nn::math::MTX34Mult(&move, &s_aCalMatrix[index], &move);
1885 
1886             // Rotate 90 degrees to the right because the coordinate system is defined by rotating the system, held horizontally, by 90 degrees to the left.
1887             nn::math::MTX34 rot90;
1888             nn::math::MTX34RotXYZDeg(&rot90, 0.0f, 0.0f, -90.0f);
1889             nn::math::MTX34Mult(&move, &rot90, &move);
1890 
1891             nn::math::MTX34Mult(&tmp, &tmp, &move);
1892         }
1893         nn::math::MTX44 mv(tmp);
1894 
1895         nn::math::MTX44Transpose(&mv, &mv);
1896         glUniformMatrix4fv(glGetUniformLocation(s_ProgramID, "uModelView"), 1, GL_FALSE, static_cast<f32*>(mv));
1897 
1898         glDisable(GL_BLEND);
1899 
1900         // Disable the depth test
1901         glDisable(GL_DEPTH_TEST);
1902 
1903         glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, 0);
1904     } //DrawCameraImage()
1905 
1906 } //Namespace
1907