zynq_7010/MultiFrameCapture.cpp

1180 lines
46 KiB
C++
Raw Normal View History

2023-07-17 03:29:37 +00:00
#include "MultiFrameCapture.h"
#include <thread>
2023-07-20 03:48:38 +00:00
#include <opencv2/opencv.hpp>
2023-07-17 03:29:37 +00:00
#include "DevUtil.h"
#include "Gpio.h"
#include "FpgaComm.h"
#include "gvideoisp1.h"
#include "CameraParam.h"
#include "correct_ultis.h"
#include "filetools.h"
#include "USBProtocol.h"
#include "utilsfunc.h"
#include "CImageMerge.h"
#include "Jpegcompress.h"
#include "CSizedetect.h"
#include "ThreadPool.h"
#include "HCamDevice.h"
const int vsp_A = 45;
const int vsp_B = 45;
// using namespace cv;
MultiFrameCapture::MultiFrameCapture(ScannerGlue glue,
std::shared_ptr<FpgaComm> fpga,
CISVendor vendor) : reset_pin(new GpioOut(Fpga_Reset)),
fpgaLoad(new Gpio(Fpga_Load)),
fpga_conf_initn(new Gpio(Fpga_InitN)),
snaped_index(0),
frame_count(1),
b_snap_run(true),
b_stop_snap(false),
b_imgproc(true),
m_glue(glue),
iImageremain(0),
2023-08-09 07:31:27 +00:00
bScanning(false),
////////////////默认300dpi 和灰度设置
resolution_(DPI_300),
cis_width_(WIDTH),
cis_height_(HEIGHT),
pixels_width_(WIDTH),
pixels_height_(HEIGHT),
paper_size_((unsigned int)PaperSize::G400_AUTO),
color_mode_(GRAY),
is_correct_(0),
is_double_paper_(0)
#ifdef TEST_SIMCAP
,
m_test_pimg(nullptr), m_test_pimg_len(0)
#endif
2023-07-17 03:29:37 +00:00
{
m_capFpageregs = fpga;
video.reset(new HCamDevice);
reset_fpga();
2023-07-17 03:29:37 +00:00
m_snap_thread.reset(new std::thread(&MultiFrameCapture::snaprun, this));
m_imgproc_thread.reset(new std::thread(&MultiFrameCapture::procimage, this));
}
MultiFrameCapture::~MultiFrameCapture()
{
if (m_capFpageregs.get())
m_capFpageregs.reset();
if (video.get())
video.reset();
m_frameinfos.Clear();
m_frameinfos.ShutDown();
if (m_imgproc_thread.get())
{
if (m_imgproc_thread->joinable())
{
b_imgproc = false;
m_imgproc_thread->join();
}
}
2023-08-09 07:31:27 +00:00
#ifdef TEST_SIMCAP
if (m_test_pimg)
free(m_test_pimg);
#endif
2023-07-17 03:29:37 +00:00
}
void MultiFrameCapture::SetParent(void *scanner)
{
}
2023-07-18 14:29:02 +00:00
void MultiFrameCapture::open()
2023-07-17 03:29:37 +00:00
{
2023-08-09 07:31:27 +00:00
int ret = video->open_device(cis_width_,cis_height_);
if(ret < -1)
return;
2023-07-17 03:29:37 +00:00
2023-08-09 07:31:27 +00:00
int i = 1 ;
char *buf = NULL;
while (i >= 0)
2023-07-17 03:29:37 +00:00
{
2023-08-09 07:31:27 +00:00
i = video->HtCamReadCaptureFrame((void **)&buf, 10);
2023-07-17 03:29:37 +00:00
}
}
void MultiFrameCapture::snap()
{
#ifndef TEST_SIMCAP
2023-07-17 03:29:37 +00:00
std::lock_guard<std::mutex> m_lock(m_mtx_snap);
2023-07-20 03:48:38 +00:00
b_stop_snap = b_end_snap = false;
2023-08-09 07:31:27 +00:00
video->HtCamStartVideoCapturing();
2023-07-17 03:29:37 +00:00
snaped_index++;
m_cv_snap.notify_all();
#endif
2023-07-17 03:29:37 +00:00
}
void MultiFrameCapture::stopsnap(bool autosize)
{
2023-08-09 07:31:27 +00:00
printf("stop stop stop\r\n");
2023-07-17 03:29:37 +00:00
if (autosize)
{
2023-08-09 07:31:27 +00:00
video->HtCamStopSampling();
2023-07-17 03:29:37 +00:00
b_stop_snap = true;
}
2023-08-09 07:31:27 +00:00
2023-07-20 03:48:38 +00:00
//b_stop_snap =false;
2023-08-09 07:31:27 +00:00
//video->HtCamStopVideoCapturing();
2023-07-17 03:29:37 +00:00
}
void MultiFrameCapture::close()
{
2023-08-09 07:31:27 +00:00
printf("close close close\r\n");
if (video.get())
video->close_device();
2023-07-17 03:29:37 +00:00
}
int MultiFrameCapture::read(int addr)
{
return m_capFpageregs->read(addr);
}
void *MultiFrameCapture::readFrameTest(int timeout)
{
return nullptr;
}
2023-08-09 07:31:27 +00:00
uint32_t MultiFrameCapture::compute_frame(int paper_size,int dpi)
{
SIZE size = GetPaperSize((PaperSize)paper_size,dpi);
return size.cy;
}
SIZE MultiFrameCapture::GetPaperSize(PaperSize paper, int dpi)
{
if (paper_map_.find(paper) != paper_map_.end())
{
SIZE resize{2338,3307};
resize.cx = paper_map_[paper].cx * dpi / 25.4;
resize.cy = paper_map_[paper].cy * dpi / 25.4;
return resize;
}
return SIZE{2338, 3307};
}
2023-07-17 03:29:37 +00:00
void MultiFrameCapture::UpdateScanParam(HG_ScanConfiguration config)
{
m_config = config;
2023-08-09 07:31:27 +00:00
if (!video.get())
{
return ;
}
resolution_ = config.params.dpi == 3 ? DPI_600 : DPI_300; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 2代表300 3代表600
color_mode_ = config.params.isColor == 1 ? COLOR : GRAY;
is_correct_ = config.params.isCorrect;
paper_size_ = config.params.pageSize;
is_double_paper_ = config.params.doubleFeeded;
cis_width_ = resolution_ == 0 ? WIDTH * 2 : WIDTH; //宽 :DPI不变下 彩色灰度是一样的
//width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO &&
cis_height_ = HEIGHT;
pixels_width_ = color_mode_ == 1 ? cis_width_ * 3 : cis_width_;
pixels_height_ = color_mode_ == 1 ? cis_height_ / 3 : cis_height_;
video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
2023-08-11 05:54:42 +00:00
printf(" -----------------------snap dpi = %d resolution = %d------------------\r\n",config.params.dpi, config.params.isColor);
FPGAConfigParam fpgaparam = GetFpgaparam(config.params.dpi, config.params.isColor);
video->HtCamSetSpTime(fpgaparam.Sp); // 2344 灰色 //2023-8-10 最新2650
// fpgaparam.Sp=0;
// video->HtCamGetSpTime(fpgaparam.Sp);
printf(" -----------------------HtCamSetSpTime%d------------------ \r\n",fpgaparam.Sp);
video->HtCamSetStSp(fpgaparam.MaxBright);
printf(" -----------------------HtCamSetStSp%d------------------\r\n",fpgaparam.MaxBright);
2023-08-09 07:31:27 +00:00
{
2023-08-11 05:54:42 +00:00
int val = config.params.dpi == 3 ? 600 :(config.params.dpi == 2?300:200);
uint32_t cnt = compute_frame(paper_size_ , val) / HEIGHT + 1; //多设一帧
2023-08-09 07:31:27 +00:00
video->HtCamSetFrameCnt(cnt);
printf(" -----------------------设置帧数:%d------------------\r\n",cnt);
}
printf("resolution_:%d\r\n", resolution_);
printf("color_mode_:%d\r\n", color_mode_);
printf("paper_size_:%d\r\n", paper_size_);
printf("paper_size_:%d\r\n", paper_size_);
printf("cis_width_:%d\r\n", cis_width_);
printf("cis_height_:%d\r\n", cis_height_);
printf("pixels_width_:%d\r\n", pixels_width_);
printf("pixels_height_:%d\r\n", pixels_height_);
#ifdef TEST_SIMCAP
if (m_test_pimg)
free(m_test_pimg);
2023-07-17 03:29:37 +00:00
FILE *f_img;
if (m_config.params.isColor == 1) // color
f_img = fopen("/home/root/color.bmp", "rb");
else
f_img = fopen("/home/root/gray.bmp", "rb");
2023-07-17 03:29:37 +00:00
if (f_img == nullptr)
printf("!!!!!!!!!! error while reading image \r\n");
2023-07-17 03:29:37 +00:00
fseek(f_img, 0, SEEK_END);
m_test_pimg_len = ftell(f_img);
m_test_pimg = (unsigned char*)malloc(m_test_pimg_len);
fseek(f_img, 0, SEEK_SET);
fread(m_test_pimg,1,m_test_pimg_len,f_img);
printf("!!!!!!!!!! info: reading image len = %d m_test_pimg = %p \r\n", m_test_pimg_len, m_test_pimg);
fclose(f_img);
#endif
2023-07-17 03:29:37 +00:00
}
void MultiFrameCapture::createCorrect(int correctmode)
{
if (m_correctThread.joinable())
m_correctThread.join();
stop_countdown();
m_correctThread = std::thread(&MultiFrameCapture::correctcolor, this, correctmode);
}
void MultiFrameCapture::setFPGATriggerMode(bool autotrigger, int delay)
{
}
void MultiFrameCapture::setFanMode(int mode)
{
}
void MultiFrameCapture::fpgaReload()
{
fpgaLoad->setValue(Gpio::Low);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
fpgaLoad->setValue(Gpio::High);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
m_capFpageregs->resetADC();
}
bool MultiFrameCapture::capturerImage()
{
// if (m_config.params.pageSize == (int)PaperSize::G400_AUTO ||
// m_config.params.pageSize == (int)PaperSize::G400_MAXAUTO ||
// m_config.params.pageSize == (int)PaperSize::G400_MAXSIZE)
// {
// }
// else
// {
// int color = this->color();
// int channels = color ? 3 : 1;
// int width = this->width() * channels * 6;
// int dpi = m_config.params.dpi;
// for (int i = 0; i < frame_count; i++)
// {
// V4L2_DATAINFO_Ex frame_info;
// StopWatch sw_read;
// auto data = video->read_frame(150);
// printf("!!!!!!!!!!! read frame[%d] = %.2f \n", i, sw_read.elapsed_ms());
// frame_info.lost_frame = data ? false : true;
// frame_info.last_frame = (i == (frame_count - 1));
// frame_info.frame_index = i;
// if (data)
// {
// printf("+++++++ success: read frame[%d] \n", i);
// cv::Mat mat(FRAME_HEIGHT, width, CV_8UC1, data);
// StopWatch sw_clone;
// frame_info.mat = mat.clone();
// printf("!!!!!!!!!! Clone time = %0.3f \n",sw_clone.elapsed_ms());
// frame_info.pixtype = color;
// frame_info.dpi = dpi;
// frame_info.width = frame_info.height = 0; // 从mat信息中获取宽高信息
// }
// else
// {
// printf("------- error: lost frame[%d] \n", i);
// }
// m_frameinfos.Put(frame_info);
// }
// }
return true;
}
void MultiFrameCapture::waitsnapdone(int state)
{
#ifdef TEST_SIMCAP
HG_JpegCompressInfo info;
info.DataLength = m_test_pimg_len;
2023-07-18 14:29:02 +00:00
info.pJpegData = (unsigned char *)m_test_pimg;
2023-07-18 14:29:02 +00:00
m_glue.m_imageready(info);
#else
printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait \n");
V4L2_DATAINFO_Ex info;
info.snaped_index = snaped_index;
info.snap_end = true;
info.error_code = state;
if (b_end_snap)
{
m_frameinfos.Put(info);
return;
}
std::unique_lock<std::mutex> lock(m_mtx_snapdone);
m_cv_snapdone.wait(lock);
b_end_snap = true;
m_frameinfos.Put(info);
2023-08-09 07:31:27 +00:00
//printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait done \n");
#endif
2023-07-17 03:29:37 +00:00
}
bool MultiFrameCapture::IsImageQueueEmpty()
{
2023-07-18 14:29:02 +00:00
//printf(" m_frameinfos.Size = %d iImageremain = %d bScanning = %d\n", m_frameinfos.Size(), iImageremain.operator int(), bScanning);
2023-07-17 03:29:37 +00:00
return (m_frameinfos.Size() == 0 && iImageremain == 0) && !bScanning;
}
void MultiFrameCapture::resetimageremain()
{
iImageremain = 0;
}
std::atomic_int &MultiFrameCapture::getimageremain()
{
return iImageremain;
}
void MultiFrameCapture::clearimages()
{
m_frameinfos.Clear();
}
void MultiFrameCapture::setScanFlag(bool brun)
{
bScanning = brun;
}
void MultiFrameCapture::reset_fpga()
{
reset_pin->setValue(Gpio::Low);
std::this_thread::sleep_for(std::chrono::milliseconds(50));
reset_pin->setValue(Gpio::High);
std::this_thread::sleep_for(std::chrono::milliseconds(50));
m_capFpageregs->resetADC();
}
void MultiFrameCapture::reload_fpga()
{
fpgaLoad->setValue(Gpio::Low);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
fpga_conf_initn->setValue(Gpio::Low);
std::this_thread::sleep_for(std::chrono::milliseconds(200));
fpgaLoad->setValue(Gpio::High);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
fpga_conf_initn->setValue(Gpio::High);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
std::this_thread::sleep_for(std::chrono::seconds(5));
printf("reload done \n");
}
void MultiFrameCapture::set_gain(int ix, int val)
{
for (int i = 0; i < 6; i++)
{
if (ix)
m_capFpageregs->setAGain(i, val);
else
m_capFpageregs->setBGain(i, val);
}
}
void MultiFrameCapture::set_offset(int ix, int val)
{
for (int i = 0; i < 6; i++)
{
if (ix)
m_capFpageregs->setAOffset(i, val);
else
m_capFpageregs->setBOffset(i, val);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
}
void MultiFrameCapture::set_expo(int ix, int val)
{
switch (ix)
{
case 0:
m_capFpageregs->setAExposureR(val);
break;
case 1:
m_capFpageregs->setAExposureG(val);
break;
case 2:
m_capFpageregs->setAExposureB(val);
break;
case 3:
m_capFpageregs->setBExposureR(val);
break;
case 4:
m_capFpageregs->setBExposureG(val);
break;
case 5:
m_capFpageregs->setBExposureB(val);
break;
default:
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
void MultiFrameCapture::configFPGAParam(int mode, int dpi)
{
FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode);
// m_capFpageregs->resetADC();
// std::this_thread::sleep_for(std::chrono::milliseconds(50));
int value_a, value_b;
for (int i = 0; i < 6; i++)
{
if (i < 3)
{
set_expo(i, fpgaparam.ExposureF[i]);
LOG("fpgaparam.ExposureF[%d] = %d \n", i, fpgaparam.ExposureF[i]);
}
else
{
set_expo(i, fpgaparam.ExposureB[i % 3]);
LOG("fpgaparam.ExposureB[%d] = %d\n", i % 3, fpgaparam.ExposureB[i % 3]);
}
int A_value, B_value;
2023-07-17 03:29:37 +00:00
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setAOffset(i, fpgaparam.OffsetF[i]);
LOG("fpgaparam.setAOffset[%d] = %d \n", i, fpgaparam.OffsetF[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->getAOffset(i, A_value, B_value);
LOG("fpgaparam.getAOffset[%d] = A_value = %d B_value = %d \n", i, A_value, B_value);
2023-07-17 03:29:37 +00:00
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setBOffset(i, fpgaparam.OffsetB[i]);
LOG("fpgaparam.setBOffset[%d] = %d \n", i, fpgaparam.OffsetB[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->getBOffset(i, A_value, B_value);
LOG("fpgaparam.getBOffset[%d] = A_value = %d B_value = %d \n", i, A_value, B_value);
2023-07-17 03:29:37 +00:00
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setAGain(i, fpgaparam.GainF[i]);
LOG("fpgaparam.GainF[%d] = %d\n", i, fpgaparam.GainF[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setBGain(i, fpgaparam.GainB[i]);
LOG("fpgaparam.GainB[%d] = %d\n", i, fpgaparam.GainB[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
2023-07-17 03:29:37 +00:00
// m_capFpageregs->getAGain(i,A_value,B_value);
// LOG("fpgaparam.getAGain[%d] = A_value = %d B_value = %d \n", i, A_value,B_value);
// std::this_thread::sleep_for(std::chrono::milliseconds(3));
// m_capFpageregs->getBGain(i,A_value,B_value);
// LOG("fpgaparam.getBGain[%d] = A_value = %d B_value = %d \n", i, A_value,B_value);
// std::this_thread::sleep_for(std::chrono::milliseconds(3));
}
};
2023-07-17 03:29:37 +00:00
int MultiFrameCapture::width()
{
int dpi = m_capFpageregs->getDpi();
int channel = 1;
#ifdef G400
int width = dpi == 0x02 ? 1296 * channel : (dpi == 0x03 ? (2592 * channel) : (864 * channel));
#else
int width = dpi == 0x02 ? 864 * channel : (dpi == 0x03 ? (1728 * channel) : (864 * channel)); // we only sup dunnancis now
#endif
return width;
}
int MultiFrameCapture::color()
{
2023-08-09 07:31:27 +00:00
// return m_capFpageregs->getColorMode();
2023-07-17 03:29:37 +00:00
}
2023-07-20 03:48:38 +00:00
// int MultiFrameCapture::imageProcessCurrentFrame()
// {
// }
2023-08-09 07:31:27 +00:00
#include "bmp.h"
2023-07-20 03:48:38 +00:00
static int cnt = 0;
2023-07-17 03:29:37 +00:00
void MultiFrameCapture::snaprun()
{
2023-08-09 07:31:27 +00:00
//frame_info 发送得数据信息 channels 图像位深 num 需要取得帧数 time_out读图超时时间设置
auto snap_func = [this](V4L2_DATAINFO_Ex frame_info, int channels,int num,int time_out)
2023-07-17 03:29:37 +00:00
{
2023-07-20 03:48:38 +00:00
unsigned char *data = NULL;
2023-08-09 07:31:27 +00:00
int ret = video->HtCamReadCaptureFrame((void **)&data, time_out);
if (ret == -1 || ret == -2)
{
printf("----------------获取图像超时或者失败------------\r\n");
return ret;
}
uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e);
printf("--------------fpga send line ------------:%d\r\n",sendLine);
2023-07-17 03:29:37 +00:00
if (data)
{
2023-08-09 07:31:27 +00:00
if (ret == 0)
frame_info.first_frame = true;
else
frame_info.first_frame = false;
2023-07-20 03:48:38 +00:00
2023-08-09 07:31:27 +00:00
frame_info.last_frame = num == ret+1 ? true:false;
printf("获取数据 width:%d height:%d is_first:%d is_last:%d\r\n",frame_info.width,frame_info.height,frame_info.first_frame,frame_info.last_frame);
2023-07-20 03:48:38 +00:00
cv::Mat mat = cv::Mat(frame_info.height, frame_info.width, CV_8UC1, data, cv::Mat::AUTO_STEP);
//cv::imwrite("/home/root/test.png", mat);
2023-08-09 07:31:27 +00:00
// printf("--------------frame_index------------:%d\r\n",frame_index);
// if (frame_index == 4)
// {
// savebitmap(data,15552,512,"1.bmp");
// }
frame_info.mat = mat.clone();
2023-07-17 03:29:37 +00:00
m_frameinfos.Put(frame_info);
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
}
2023-08-09 07:31:27 +00:00
return ret;
2023-07-17 03:29:37 +00:00
};
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
while (b_snap_run)
{
std::unique_lock<std::mutex> lock(m_mtx_snap);
m_cv_snap.wait(lock);
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
V4L2_DATAINFO_Ex frame_info;
2023-08-09 07:31:27 +00:00
int channels = 1;
frame_info.pixtype = color_mode_;
frame_info.dpi = resolution_;
frame_info.width = pixels_width_;
frame_info.height = pixels_height_;
2023-07-20 03:48:38 +00:00
2023-07-17 03:29:37 +00:00
frame_info.error_code = 0;
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
frame_info.snaped_index = snaped_index;
2023-07-20 03:48:38 +00:00
frame_info.first_frame = false;
frame_info.last_frame = false;
2023-08-09 07:31:27 +00:00
2023-07-20 03:48:38 +00:00
int color_mode = video->HtCamGetColorMode();
2023-08-09 07:31:27 +00:00
int count = 1;
uint32_t frame_num = 0;
uint32_t frame_cnt = 0;
video->HtCamGetFrameCnt(frame_cnt);
frame_info.frame_index = frame_cnt;
int func_sig = 0;
2023-08-11 05:54:42 +00:00
int time_out = color_mode_ == 1 ? 800 : 800;
2023-08-09 07:31:27 +00:00
int time_out_cnt = 0;
printf("--------------------- frame_info.width ------------------ :%d\r\n",frame_info.width );
printf("--------------------- frame_info.height ------------------ :%d\r\n",frame_info.height );
printf("--------------------- frame_info.pixtype ------------------ :%d\r\n",frame_info.pixtype );
printf("--------------------- frame_info.dpi ------------------ :%d\r\n",frame_info.dpi );
for (size_t i = 1; i <= frame_cnt; i++)
2023-07-17 03:29:37 +00:00
{
2023-08-09 07:31:27 +00:00
printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_cnt,i);
frame_info.last_frame = frame_cnt == i ? true : false;
//frame_info.frame_index = i;
func_sig = snap_func(frame_info, channels,frame_cnt,time_out);
if (func_sig == -1 ) //当前帧取图超时,在取一次!!! 一直超时 不就卡死了??? 这个地方还是需要加个时间限制几秒内一帧未取出就退了,返回异常状态吧?
{
i--;
time_out +=200;
time_out_cnt ++;
if (time_out_cnt >=5)
{
break;
}
continue;
}
2023-07-20 03:48:38 +00:00
2023-08-09 07:31:27 +00:00
if (b_stop_snap)
{
video->HtCamGetFrameNum(frame_num);
while (frame_num-1 > func_sig)
{
func_sig = snap_func(frame_info, channels,frame_num,time_out);//同上面一样
printf("-----------当前采集到第:[%d]帧 CIS总共采集[%d]帧 -------\r\n",func_sig+1,frame_num);
if (func_sig == -1 )
{
time_out +=200;
time_out_cnt ++;
if (time_out_cnt >=5)
{
break;
}
continue;
}
}
break;
}
2023-07-17 03:29:37 +00:00
}
2023-08-09 07:31:27 +00:00
video->HtCamStopVideoCapturing();
//iImageremain++;
printf("----------停止采集图像 ----------\r\n");
2023-07-17 03:29:37 +00:00
m_cv_snapdone.notify_all();
b_end_snap = true;
}
}
void MultiFrameCapture::updatesnapstatus(int papertype)
{
b_stop_snap = false;
snaped_index = 0;
}
void MultiFrameCapture::procimage()
{
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
static int idx = 0;
ThreadPool prc_pool(4);
2023-07-20 03:48:38 +00:00
std::queue<std::future<cv::Mat>> prc_fu;
2023-07-17 03:29:37 +00:00
unsigned int frames_height;
unsigned int frames_width = 0;
2023-08-09 07:31:27 +00:00
int cnt_ =0;
2023-07-17 03:29:37 +00:00
while (b_imgproc)
{
V4L2_DATAINFO_Ex frame = m_frameinfos.Take();
2023-07-20 03:48:38 +00:00
static int inx = 0;
2023-08-09 07:31:27 +00:00
if (!frame.mat.empty())
2023-07-17 03:29:37 +00:00
{
2023-08-09 07:31:27 +00:00
JpegCompress cmp(90);
HG_JpegCompressInfo info = cmp.GetCompressedImg(frame.mat);
2023-07-20 03:48:38 +00:00
info.first_frame = frame.first_frame;
info.last_frame = frame.last_frame;
info.index_frame = frame.frame_index;
2023-08-09 07:31:27 +00:00
info.data_type = 0;
//info.DataLength = frame.width *frame.height;
info.width = frame.width;
info.height = frame.height;
// cv::Mat mat = frame.mat.clone();
printf("获取数据2222 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame.width,frame.height,info.first_frame,info.last_frame,info.DataLength);
cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat);
2023-07-17 03:29:37 +00:00
m_glue.m_imageready(info);
2023-08-09 07:31:27 +00:00
//iImageremain--;
2023-07-20 03:48:38 +00:00
}
2023-08-09 07:31:27 +00:00
continue;
2023-07-17 03:29:37 +00:00
}
}
bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black)
{
// int config_dpi = dpi == 1 ? 2 : dpi;
// const int offset_indexs[] = {0, 1, 2, 5, 4, 3, 3, 4, 5, 2, 1, 0};
// int channels = mode == IMAGE_COLOR ? 3 : 1;
// int height = 60;
// int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 864);
// int orgimgwidth = width * 2 * 3 * channels;
// int dstwidth = width * 2 * 3;
// bool isNeedSave = true;
// string log;
// void *data = video->read_frame(1000);
// if (data == NULL)
// {
// isNeedSave = false;
// log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// return isNeedSave;
// }
// cv::Mat src(height, orgimgwidth, CV_8UC1, data);
// CImageMerge t_marge;
// cv::Mat mrgmat = t_marge.MergeImage(src, dstwidth, height, mode);
// printf("mrgmat width = %d height = %d \n", mrgmat.cols, mrgmat.rows);
// static int inx = 0;
// //imwrite(to_string(++inx)+".jpg",mrgmat);
// // return 0;
// FPGAConfigParam param = GetFpgaparam(dpi, mode);
// if (black) // 暗场
// {
// volatile double offValues[12]{0};
// int blockcount = 12;
// int bandwidth = mrgmat.cols / blockcount;
// for (int n = 0; n < blockcount; n++)
// {
// cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone();
// cv::Scalar mean = cv::mean(img);
// // printf("band[%d] mean = %0.2f bandwidth = %d \n", n, mean.val[0],bandwidth);
// offValues[n] = mean.val[0];
// }
// for (int s = 0; s < 2; s++)
// {
// unsigned int offsets[6]; // = (int *)(s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]);
// memcpy(offsets, (s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), sizeof(param.OffsetF));
// for (int j = 0; j < 6; j++)
// {
// int k = s * 6 + j;
// // double diff = BLACK_DIFF(offValues[k]);
// double diff = 8 - offValues[k];
// double step = radio * diff;
// // int preStep = offsetStep[k];
// // if (step * preStep < 0)
// // {
// // //step = 0 - preStep / 2;
// // step /= 2;
// // }
// // else
// // {
// // radio = 1;
// // }
// if (step < 1 && step > 0.5)
// step = 1;
// if (step < -0.5 && step > -1)
// step = -1;
// // FMT_STEP(step);
// bool isMinStep = abs(step) == 1 && step == offsetStep[k];
// bool isOutBounds = offsets[j] >= 255 && step > 0;
// isOutBounds |= offsets[j] <= 0 && step < 0;
// log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n";
// if (isOutBounds)
// log += " 第" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n";
// else if (abs(step) > 1 || isMinStep)
// {
// offsetStep[k] = (int)(step);
// printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]);
// offsets[offset_indexs[k]] += step;
// log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n";
// if (offsets[offset_indexs[k]] < 1)
// offsets[offset_indexs[k]] = 1;
// if (offsets[offset_indexs[k]] > 255)
// offsets[offset_indexs[k]] = 255;
// isNeedSave = false;
// }
// log += (s == 0 ? "彩色正面" : "彩色背面");
// log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n";
// // log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n");
// // ftt.append_log(log);
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// }
// memcpy((s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), offsets, sizeof(param.OffsetF));
// // memset(&param.OffsetF[0],0,sizeof(param.OffsetF));
// // memset(&param.OffsetB[0],0,sizeof(param.OffsetF));
// // param.OffsetB[5] =255;
// // param.OffsetF[4] =255;
// }
// if (isNeedSave)
// {
// printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str());
// log = "暗场校正完成 \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// imwrite(param.Flat_BwPath, mrgmat);
// }
// }
// else // 明场
// {
// if (mode == IMAGE_COLOR)
// {
// volatile double values[2][3];
// cv::Scalar a = mean(mrgmat(Rect(0, 0, mrgmat.cols / 2, mrgmat.rows)));
// cv::Scalar b = mean(mrgmat(Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows)));
// for (char j = 0; j < 3; j++)
// {
// values[0][j] = a.val[j];
// values[1][j] = b.val[j];
// printf("values[0][%d] = %.2f values[1][%d] = %.2f\n", j,values[0][j], j,values[1][j]);
// }
// log = "开始彩色明场校正 \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// for (int s = 0; s < 2; s++)
// {
// volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
// memcpy((void *)exposures, (s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), sizeof(param.ExposureB));
// for (int x = 0; x < 3; x++)
// {
// int k = (3 * s + x);
// //int diff = LIGHT_DIFF(*((double *)values + k));
// int diff = param.MaxBright - *((double *)values + k);
// log += " 明场:" + std::to_string(k) + ";diff" + std::to_string(diff) + "\r\n";
// double step = diff * radio;
// int preStep = *((int *)expStep + k);
// if (step * preStep < 0)
// {
// step = 0 - preStep / 2;
// }
// if (step < 1 && step > 0)
// step = 1;
// if (step < 0 && step > -1)
// step = -1;
// bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k);
// bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0;
// isOutBounds |= exposures[x] <= 0 && step < 0;
// if (isOutBounds)
// log += " 第" + to_string(x) + "个明场校正异常 \r\n";
// else if (abs(diff) >= 1 || isMinStep)
// {
// *((int *)expStep + k) = (int)(step);
// exposures[x] += step;
// if (exposures[x] > (param.Sp - 5))
// {
// exposures[x] = (param.Sp - 5);
// }
// if (exposures[x] < 0)
// exposures[x] = 0;
// isNeedSave = false;
// }
// log += " 曝光值:" + to_string(exposures[x]) + "\r\n";
// log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n";
// }
// memcpy((s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB));
// }
// // ftt.append_log(log);
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// if (isNeedSave)
// {
// log = "彩色明场校正完成\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// imwrite(param.Flat_WhitePath, mrgmat);
// }
// }
// else
// {
// double values[2];
// values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
// values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
// printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]);
// log = "-----开始灰色明场校正-----\r\n";
// log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// for (int s = 0; s < 2; s++)
// {
// int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
// //int diff = LIGHT_DIFF(values[s]);
// int diff = param.MaxBright - values[s];
// double step = diff * radio;
// log += " 明场:" + to_string(s) + ";diff" + to_string(diff) + "\r\n";
// int preStep = expStep[s][0];
// if (step * preStep < 0)
// {
// step = 0 - preStep / 2;
// }
// else
// {
// radio = 1;
// }
// if (step < 1 && step > 0)
// step = 1;
// if (step < 0 && step > -1)
// step = -1;
// int exp = *(exposures + 1);
// std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step));
// log += ss1;
// bool isMinStep = abs(step) <= 2 && step == expStep[s][0];
// bool isOutBounds = exp >= (param.Sp - 5) && step > 0;
// isOutBounds |= exp <= 0 && step < 0;
// if (isOutBounds)
// log += " 第" + to_string(s) + "个明场校正异常 \r\n";
// else if (abs(diff) > 1 || isMinStep)
// {
// exp += step;
// if (exp < 0)
// exp = 0;
// if (exp > (param.Sp - 5))
// exp = (param.Sp - 5);
// float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51
// for (int k = 0; k < 3; k++)
// {
// *(exposures + k) = (int)(exp * coffe[k]);
// expStep[s][k] = (int)(step);
// std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step));
// log += exps;
// std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k]));
// log += ss;
// }
// isNeedSave = false;
// }
// }
// // ftt.append_log(log);
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// if (isNeedSave)
// {
// printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str());
// log = "灰度明场校正完成\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// imwrite(param.Flat_WhitePath, mrgmat);
// }
// }
// }
// SaveFpgaparam(param);
// printf("exit Save_lut \n");
// return isNeedSave;
return 0;
}
void MultiFrameCapture::formatStep()
{
for (int i = 0; i < 2; i++)
{
for (int j = 0; j < 3; j++)
{
expStep[i][j] = 600;
}
}
for (int i = 0; i < 12; i++)
offsetStep[i] = 64;
}
void MultiFrameCapture::correctcolor(int correctmode)
{
StopWatch sw_correct;
std::string loginfo = "Start Correctcolor 200DPI COLOR \r\n";
if ((correctmode == 0) || (correctmode == 2))
{
loginfo = "Start Correctcolor 200DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x01, IMAGE_COLOR);
loginfo = "-----------200DPI COLOR Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 1))
{
loginfo = "Start Correctcolor 200DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x01, IMAGE_GRAY);
loginfo = "-----------200DPI Gray Correct Done----------- \r\n\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 4))
{
loginfo = " Start Correctcolor 300DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x02, IMAGE_COLOR);
loginfo = "-----------300DPI COLOR Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 3))
{
loginfo = "Start Correctcolor 300DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x02, IMAGE_GRAY);
loginfo = "-----------300DPI Gray Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 6))
{
loginfo = "Start Correctcolor 600DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x03, IMAGE_COLOR);
loginfo = "-----------600DPI COLOR Correct Done----------- \r\n\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 5))
{
loginfo = " Start Correctcolor 600DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x03, IMAGE_GRAY);
loginfo = "-----------600DPI Gray Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode < 0) || (correctmode > 6))
{
loginfo = "不支持的校正模式...\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLAT_FINISHED, "******Correct Done times = " + to_string(sw_correct.elapsed_s()) + " ****** \r\n");
}
void MultiFrameCapture::openDevice(int dpi, int mode)
{
reset_fpga();
bool dunnancis = true;
int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 864); // 1296 2592 864
int channels = mode == 0x01 ? 3 : 1;
int width = channelwidth * channels;
int frame_height = mode == 0x01 ? 60 * 3 : 60;
int startsample = 202; // 205
2023-07-17 03:29:37 +00:00
auto fpgaparam = GetFpgaparam(dpi, mode);
int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3);
ModeFpga fpgamod = {
.colorMode = mode,
.dpi = t_real_dpi,
.led = 1,
.sample = startsample, // 256+39
.adcA = 0,
.adcB = 0,
.selftest = 0,
.sp = fpgaparam.Sp}; // 600DPI 0x1450 300DPI 0xe10
2023-07-17 03:29:37 +00:00
2023-08-09 07:31:27 +00:00
configFPGAParam(mode, dpi);
2023-07-17 03:29:37 +00:00
StopWatch swwv4l2open;
2023-07-17 03:29:37 +00:00
printf("opened video with width = %d height = %d time eplased = %.2f \n", width, 60 * 2, swwv4l2open.elapsed_ms());
2023-08-09 07:31:27 +00:00
2023-07-17 03:29:37 +00:00
m_capFpageregs->setFrameNum(1);
m_capFpageregs->setFrameHeight(frame_height);
for (int i = 0; i < 1; i++)
{
char *buf = NULL;
2023-08-09 07:31:27 +00:00
video->HtCamReadCaptureFrame((void **)&buf, 200);
2023-07-17 03:29:37 +00:00
std::this_thread::sleep_for(std::chrono::milliseconds(100));
printf("abort first frame \n");
}
2023-08-09 07:31:27 +00:00
//video->close_video();
2023-07-17 03:29:37 +00:00
}
void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
{
openDevice(dpi, mode);
bool isDone = false;
formatStep();
int i = 0;
radio = 1;
while (!isDone) // 先暗场
{
string log = "==============================第" + to_string(i) + "次===============================\r\n";
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
configFPGAParam(mode, dpi);
// ftt.append_log(log);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->enableLed(false);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->capture();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
i++;
}
isDone = false;
formatStep();
while (!isDone) // 后明场
{
configFPGAParam(mode, dpi);
string log = "==============================第" + to_string(i) + "次===============================\r\n";
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->enableLed(true);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->capture();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, false);
i++;
}
printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray"));
creatLUTData(dpi, mode);
2023-08-09 07:31:27 +00:00
//video->close_video();
2023-07-17 03:29:37 +00:00
}
// void MultiFrameCapture::myFloodFill(cv::Mat& image, bool isTwoSide)
// {
// int w = image.cols;
2023-07-17 03:29:37 +00:00
// int h = image.rows;
// cv::Vec3b lt, rt, lb, rb;
// const double threshold = 10;
// if (!isTwoSide)
// {
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, 0) : image.at<uchar>(0, 0);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(0, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w - 1) : image.at<uchar>(0, w - 1);
// if (rt[0] < threshold)
// cv::floodFill(image, cv::Point(w - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, 0) : image.at<uchar>(h - 1, 0);
// if (lb[0] < threshold)
// cv::floodFill(image, cv::Point(0, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w - 1) : image.at<uchar>(h - 1, w - 1);
// if (rb[0] < threshold)
// cv::floodFill(image, cv::Point(w - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// }
// else
// {
// int w_ = w / 2;
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, 0) : image.at<uchar>(0, 0);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(0, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w_ - 1) : image.at<uchar>(0, w_ - 1);
// if (rt[0] < threshold && rt[1] < threshold && rt[2] < threshold)
// cv::floodFill(image, cv::Point(w_ - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, 0) : image.at<uchar>(h - 1, 0);
// if (lb[0] < threshold && lb[1] < threshold && lb[2] < threshold)
// cv::floodFill(image, cv::Point(0, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w_ - 1) : image.at<uchar>(h - 1, w_ - 1);
// if (rb[0] < threshold && rb[1] < threshold && rb[2] < threshold)
// cv::floodFill(image, cv::Point(w_ - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w_) : image.at<uchar>(0, w_);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(w_, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w - 1) : image.at<uchar>(0, w - 1);
// if (rt[0] < threshold && rt[1] < threshold && rt[2] < threshold)
// cv::floodFill(image, cv::Point(w - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w_) : image.at<uchar>(h - 1, w_);
// if (lb[0] < threshold && lb[1] < threshold && lb[2] < threshold)
// cv::floodFill(image, cv::Point(w_, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w - 1) : image.at<uchar>(h - 1, w - 1);
// if (rb[0] < threshold && rb[1] < threshold && rb[2] < threshold)
// cv::floodFill(image, cv::Point(w - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// }
// }