#include <vector>
#include <iostream>
#include <fstream>
static const char* keys =
{
"{@width | | Projector width}"
"{@height | | Projector height}"
"{@periods | | Number of periods}"
"{@setMarkers | | Patterns with or without markers}"
"{@horizontal | | Patterns are horizontal}"
"{@methodId | | Method to be used}"
"{@outputPatternPath | | Path to save patterns}"
"{@outputWrappedPhasePath | | Path to save wrapped phase map}"
"{@outputUnwrappedPhasePath | | Path to save unwrapped phase map}"
"{@outputCapturePath | | Path to save the captures}"
"{@reliabilitiesPath | | Path to save reliabilities}"
};
static void help()
{
cout << "\nThis example generates sinusoidal patterns" << endl;
cout << "To call: ./example_structured_light_createsinuspattern <width> <height>"
" <number_of_period> <set_marker>(bool) <horizontal_patterns>(bool) <method_id>"
" <output_captures_path> <output_pattern_path>(optional) <output_wrapped_phase_path> (optional)"
" <output_unwrapped_phase_path>" << endl;
}
int main(
int argc,
char **argv)
{
if( argc < 2 )
{
help();
return -1;
}
CommandLineParser parser(argc, argv, keys);
params.width = parser.get<
int>(0);
params.height = parser.get<
int>(1);
params.nbrOfPeriods = parser.get<
int>(2);
params.setMarkers = parser.get<
bool>(3);
params.horizontal = parser.get<
bool>(4);
params.methodId = parser.get<
int>(5);
params.nbrOfPixelsBetweenMarkers = 70;
Ptr<structured_light::SinusoidalPattern> sinus =
structured_light::SinusoidalPattern::create(makePtr<structured_light::SinusoidalPattern::Params>(
params));
Ptr<phase_unwrapping::HistogramPhaseUnwrapping> phaseUnwrapping;
vector<Mat> patterns;
Mat shadowMask;
Mat unwrappedPhaseMap, unwrappedPhaseMap8;
Mat wrappedPhaseMap, wrappedPhaseMap8;
sinus->generate(patterns);
if( !cap.isOpened() )
{
cout << "Camera could not be opened" << endl;
return -1;
}
imshow(
"pattern", patterns[0]);
cout << "Press any key when ready" << endl;
int nbrOfImages = 30;
int count = 0;
vector<Mat> img(nbrOfImages);
while( count < nbrOfImages )
{
for(int i = 0; i < (int)patterns.size(); ++i )
{
imshow(
"pattern", patterns[i]);
cap >> img[count];
count += 1;
}
}
cout << "press enter when ready" << endl;
bool loop = true;
while ( loop )
{
if( c == 10 )
{
loop = false;
}
}
{
for( int i = 0; i < nbrOfImages; ++i )
{
vector<Mat> captures;
if( i == nbrOfImages - 2 )
{
captures.push_back(img[i]);
captures.push_back(img[i-1]);
captures.push_back(img[i+1]);
}
else if( i == nbrOfImages - 1 )
{
captures.push_back(img[i]);
captures.push_back(img[i-1]);
captures.push_back(img[i-2]);
}
else
{
captures.push_back(img[i]);
captures.push_back(img[i+1]);
captures.push_back(img[i+2]);
}
sinus->computePhaseMap(captures, wrappedPhaseMap, shadowMask);
if( camSize.height == -1 )
{
camSize.height = img[i].rows;
camSize.width = img[i].cols;
paramsUnwrapping.height = camSize.height;
paramsUnwrapping.width = camSize.width;
phaseUnwrapping =
phase_unwrapping::HistogramPhaseUnwrapping::create(paramsUnwrapping);
}
sinus->unwrapPhaseMap(wrappedPhaseMap, unwrappedPhaseMap, camSize, shadowMask);
phaseUnwrapping->unwrapPhaseMap(wrappedPhaseMap, unwrappedPhaseMap, shadowMask);
Mat reliabilities, reliabilities8;
phaseUnwrapping->getInverseReliabilityMap(reliabilities);
reliabilities.convertTo(reliabilities8,
CV_8U, 255,128);
ostringstream tt;
tt << i;
imwrite(reliabilitiesPath + tt.str() +
".png", reliabilities8);
unwrappedPhaseMap.convertTo(unwrappedPhaseMap8,
CV_8U, 1, 128);
wrappedPhaseMap.convertTo(wrappedPhaseMap8,
CV_8U, 255, 128);
if( !outputUnwrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputUnwrappedPhasePath +
"_FTP_" + name.str() +
".png", unwrappedPhaseMap8);
}
if( !outputWrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputWrappedPhasePath +
"_FTP_" + name.str() +
".png", wrappedPhaseMap8);
}
}
break;
for( int i = 0; i < nbrOfImages - 2; ++i )
{
vector<Mat> captures;
captures.push_back(img[i]);
captures.push_back(img[i+1]);
captures.push_back(img[i+2]);
sinus->computePhaseMap(captures, wrappedPhaseMap, shadowMask);
if( camSize.height == -1 )
{
camSize.height = img[i].rows;
camSize.width = img[i].cols;
paramsUnwrapping.height = camSize.height;
paramsUnwrapping.width = camSize.width;
phaseUnwrapping =
phase_unwrapping::HistogramPhaseUnwrapping::create(paramsUnwrapping);
}
sinus->unwrapPhaseMap(wrappedPhaseMap, unwrappedPhaseMap, camSize, shadowMask);
unwrappedPhaseMap.convertTo(unwrappedPhaseMap8,
CV_8U, 1, 128);
wrappedPhaseMap.convertTo(wrappedPhaseMap8,
CV_8U, 255, 128);
phaseUnwrapping->unwrapPhaseMap(wrappedPhaseMap, unwrappedPhaseMap, shadowMask);
Mat reliabilities, reliabilities8;
phaseUnwrapping->getInverseReliabilityMap(reliabilities);
reliabilities.convertTo(reliabilities8,
CV_8U, 255,128);
ostringstream tt;
tt << i;
imwrite(reliabilitiesPath + tt.str() +
".png", reliabilities8);
if( !outputUnwrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputUnwrappedPhasePath +
"_PSP_" + name.str() +
".png", unwrappedPhaseMap8);
else
imwrite(outputUnwrappedPhasePath +
"_FAPS_" + name.str() +
".png", unwrappedPhaseMap8);
}
if( !outputWrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputWrappedPhasePath +
"_PSP_" + name.str() +
".png", wrappedPhaseMap8);
else
imwrite(outputWrappedPhasePath +
"_FAPS_" + name.str() +
".png", wrappedPhaseMap8);
}
if( !outputCapturePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputCapturePath +
"_PSP_" + name.str() +
".png", img[i]);
else
imwrite(outputCapturePath +
"_FAPS_" + name.str() +
".png", img[i]);
if( i == nbrOfImages - 3 )
{
{
ostringstream nameBis;
nameBis << i+1;
ostringstream nameTer;
nameTer << i+2;
imwrite(outputCapturePath +
"_PSP_" + nameBis.str() +
".png", img[i+1]);
imwrite(outputCapturePath +
"_PSP_" + nameTer.str() +
".png", img[i+2]);
}
else
{
ostringstream nameBis;
nameBis << i+1;
ostringstream nameTer;
nameTer << i+2;
imwrite(outputCapturePath +
"_FAPS_" + nameBis.str() +
".png", img[i+1]);
imwrite(outputCapturePath +
"_FAPS_" + nameTer.str() +
".png", img[i+2]);
}
}
}
}
break;
default:
cout << "error" << endl;
}
cout << "done" << endl;
if( !outputPatternPath.empty() )
{
for( int i = 0; i < 3; ++ i )
{
ostringstream name;
name << i + 1;
imwrite(outputPatternPath + name.str() +
".png", patterns[i]);
}
}
loop = true;
while( loop )
{
if( key == 27 )
{
loop = false;
}
}
return 0;
}
std::string String
Definition: cvstd.hpp:149
Size2i Size
Definition: modules/core/include/opencv2/core/types.hpp:370
#define CV_8U
Definition: core/include/opencv2/core/hal/interface.h:73
#define CV_PI
Definition: cvdef.h:380
@ WINDOW_NORMAL
the user can resize the window (no constraint) / also use to switch a fullscreen window to a normal s...
Definition: highgui.hpp:143
@ WINDOW_FULLSCREEN
change the window to fullscreen.
Definition: highgui.hpp:147
@ WND_PROP_FULLSCREEN
fullscreen property (can be WINDOW_NORMAL or WINDOW_FULLSCREEN).
Definition: highgui.hpp:156
void imshow(const String &winname, InputArray mat)
Displays an image in the specified window.
int waitKey(int delay=0)
Waits for a pressed key.
void namedWindow(const String &winname, int flags=WINDOW_AUTOSIZE)
Creates a window.
void setWindowProperty(const String &winname, int prop_id, double prop_value)
Changes parameters of a window dynamically.
CV_EXPORTS_W bool imwrite(const String &filename, InputArray img, const std::vector< int > ¶ms=std::vector< int >())
Saves an image to a specified file.
@ PSP
Definition: sinusoidalpattern.hpp:59
@ FTP
Definition: sinusoidalpattern.hpp:58
@ FAPS
Definition: sinusoidalpattern.hpp:60
@ CAP_PVAPI
PvAPI, Prosilica GigE SDK.
Definition: videoio.hpp:108
@ CAP_PROP_PVAPI_PIXELFORMAT
Pixel format.
Definition: videoio.hpp:382
@ CAP_PVAPI_PIXELFORMAT_MONO8
Mono8.
Definition: videoio.hpp:401
int main(int argc, char *argv[])
Definition: highgui_qt.cpp:3
kinfu::Params Params
DynamicFusion implementation.
Definition: dynafu.hpp:44
PyParams params(const std::string &tag, const std::string &model, const std::string &weights, const std::string &device)
Definition: core/include/opencv2/core.hpp:107
First, the sinusoidal patterns must be generated. SinusoidalPattern class parameters have to be set by the user:
The user can also choose to save the patterns and the phase map.
The number of patterns is always equal to three, no matter the method used to compute the phase map. Those three patterns are projected in a loop which is fine since the system is cyclical.
Once the patterns have been generated, the camera is opened and the patterns are projected, using fullscreen resolution. In this tutorial, a prosilica camera is used to capture gray images. When the first pattern is displayed by the projector, the user can press any key to start the projection sequence.
In this tutorial, 30 images are projected so, each of the three patterns is projected ten times. The "while" loop takes care of the projection process. The captured images are stored in a vector of Mat. There is a 30 ms delay between two successive captures. When the projection is done, the user has to press "Enter" to start computing the phase maps.
The phase maps are ready to be computed according to the selected method. For FTP, a phase map is computed for each projected pattern, but we need to compute the shadow mask from three successive patterns, as explained in [faps]. Therefore, three patterns are set in a vector called captures. Care is taken to fill this vector with three patterns, especially when we reach the last captures. The unwrapping algorithm needs to know the size of the captured images so, we make sure to give it to the "unwrapPhaseMap" method. The phase maps are converted to 8-bit images in order to save them as png.
{
for( int i = 0; i < nbrOfImages; ++i )
{
vector<Mat> captures;
if( i == nbrOfImages - 2 )
{
captures.push_back(img[i]);
captures.push_back(img[i-1]);
captures.push_back(img[i+1]);
}
else if( i == nbrOfImages - 1 )
{
captures.push_back(img[i]);
captures.push_back(img[i-1]);
captures.push_back(img[i-2]);
}
else
{
captures.push_back(img[i]);
captures.push_back(img[i+1]);
captures.push_back(img[i+2]);
}
sinus->computePhaseMap(captures, wrappedPhaseMap, shadowMask);
if( camSize.height == -1 )
{
camSize.height = img[i].rows;
camSize.width = img[i].cols;
}
sinus->unwrapPhaseMap(wrappedPhaseMap, unwrappedPhaseMap, camSize, shadowMask);
unwrappedPhaseMap.convertTo(unwrappedPhaseMap8,
CV_8U, 1, 128);
wrappedPhaseMap.convertTo(wrappedPhaseMap8,
CV_8U, 255, 128);
if( !outputUnwrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputUnwrappedPhasePath +
"_FTP_" + name.str() +
".png", unwrappedPhaseMap8);
}
if( !outputWrappedPhasePath.empty() )
{
ostringstream name;
name << i;
imwrite(outputWrappedPhasePath +
"_FTP_" + name.str() +
".png", wrappedPhaseMap8);
}
}
break;
For PSP and FAPS, three projected images are used to compute a single phase map. These three images are set in "captures", a vector working as a FIFO.Here again, phase maps are converted to 8-bit images in order to save them as png.