zynq_7010/MultiFrameCapture.cpp

1229 lines
48 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "MultiFrameCapture.h"
#include <thread>
#include <opencv2/opencv.hpp>
#include "DevUtil.h"
#include "Gpio.h"
#include "FpgaComm.h"
#include "gvideoisp1.h"
#include "CameraParam.h"
#include "correct_ultis.h"
#include "filetools.h"
#include "USBProtocol.h"
#include "utilsfunc.h"
#include "CImageMerge.h"
#include "Jpegcompress.h"
#include "CSizedetect.h"
#include "ThreadPool.h"
#include "HCamDevice.h"
const int vsp_A = 45;
const int vsp_B = 45;
// using namespace cv;
MultiFrameCapture::MultiFrameCapture(ScannerGlue glue,std::shared_ptr<FpgaComm> fpga, CISVendor vendor)
:reset_pin(new GpioOut(Fpga_Reset)),
fpgaLoad(new Gpio(Fpga_Load)),
fpga_conf_initn(new Gpio(Fpga_InitN)),
snaped_index(0),
frame_count(1),
b_snap_run(true),
b_stop_snap(false),
b_imgproc(true),
m_glue(glue),
iImageremain(0),
bScanning(false),
////////////////默认300dpi 和灰度设置
resolution_(DPI_300),
cis_width_(WIDTH),
cis_height_(HEIGHT),
pixels_width_(WIDTH),
pixels_height_(HEIGHT),
paper_size_((unsigned int)PaperSize::G400_AUTO),
color_mode_(GRAY),
is_correct_(0),
is_double_paper_(0)
#ifdef TEST_SIMCAP
,
m_test_pimg(nullptr), m_test_pimg_len(0)
#endif
{
m_capFpageregs = fpga;
video.reset(new HCamDevice);
reset_fpga();
m_snap_thread.reset(new std::thread(&MultiFrameCapture::snaprun, this));
m_imgproc_thread.reset(new std::thread(&MultiFrameCapture::procimage, this));
}
MultiFrameCapture::~MultiFrameCapture()
{
if (m_capFpageregs.get())
m_capFpageregs.reset();
if (video.get())
video.reset();
m_frameinfos.Clear();
m_frameinfos.ShutDown();
if (m_imgproc_thread.get())
{
if (m_imgproc_thread->joinable())
{
b_imgproc = false;
m_imgproc_thread->join();
}
}
#ifdef TEST_SIMCAP
if (m_test_pimg)
free(m_test_pimg);
#endif
}
void MultiFrameCapture::SetParent(void *scanner)
{
}
void MultiFrameCapture::open()
{
int ret = video->open_device(cis_width_,cis_height_);
if(ret < -1)
return;
int i = 1 ;
char *buf = NULL;
while (i >= 0)
{
i = video->HtCamReadCaptureFrame((void **)&buf, 10);
}
printf("open_device\r\n");
}
void MultiFrameCapture::snap()
{
#ifndef TEST_SIMCAP
std::lock_guard<std::mutex> m_lock(m_mtx_snap);
b_stop_snap = b_end_snap = false;
video->HtCamStartVideoCapturing();
snaped_index++;
m_cv_snap.notify_all();
#endif
}
void MultiFrameCapture::stopsnap(bool autosize)
{
printf("stop stop stop\r\n");
if (autosize)
{
video->HtCamStopSampling();
b_stop_snap = true;
}
}
void MultiFrameCapture::close()
{
printf("close close close\r\n");
if (video.get())
video->close_device();
}
int MultiFrameCapture::read(int addr)
{
}
void *MultiFrameCapture::readFrameTest(int timeout)
{
return nullptr;
}
uint32_t MultiFrameCapture::compute_frame(int paper_size,int dpi)
{
SIZE size = GetPaperSize((PaperSize)paper_size,dpi);
int val = size.cy /cis_height_ + 1; //1:可能会被四舍五入,只能大不能小
int cnt = color_mode_ ? val * 3 : val; //val * 3 彩色设置 513 但是采集实际高度是513 / 3
return cnt; //四舍五入的情况下再多去一帧
}
SIZE MultiFrameCapture::GetPaperSize(PaperSize paper, int dpi)
{
if (paper_map_.find(paper) != paper_map_.end())
{
SIZE resize{2338,3307};
resize.cx = paper_map_[paper].cx * dpi / 25.4;
resize.cy = paper_map_[paper].cy * dpi / 25.4;
printf("resize.cx:%d resize.cy:%d\r\n",resize.cy / cis_height_,resize.cx);
return resize;
}
return SIZE{2338, 3307};
}
void MultiFrameCapture::UpdateScanParam(HG_ScanConfiguration config)
{
m_config = config;
if (!video.get())
{
return ;
}
int config_dpi = config.params.dpi;
int config_color = config.params.isColor;
resolution_ = config.params.dpi == 3 ? DPI_600 : DPI_300; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 1 代表200 2代表300 3代表600
color_mode_ = config.params.isColor == 1 ? COLOR : GRAY;
is_correct_ = config.params.isCorrect;
paper_size_ = config.params.pageSize;
is_double_paper_ = config.params.doubleFeeded;
cis_width_ = resolution_ == 0 ? WIDTH * 2 : WIDTH; //宽 :DPI不变下 彩色灰度是一样的
//width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO &&
cis_height_ = HEIGHT;
pixels_width_ = color_mode_ == 1 ? cis_width_ * 3 : cis_width_;
pixels_height_ = color_mode_ == 1 ? cis_height_ / 3 : cis_height_;
video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
FPGAConfigParam fpgaparam = GetFpgaparam(config_dpi, config_color);
video->HtCamSetSpTime(fpgaparam.Sp, 49); // 2344 灰色 //2023-8-10 最新2650
if (color_mode_)
{
video->HtCamSetSpTime2(fpgaparam.HRatio);
}
video->HtCamSetStSp(fpgaparam.MaxBright);
configFPGAParam(config_color, config_dpi);
printf(" -----------------------resolution = %d color_mode_ = %d config_dpi:%d------------------\r\n",resolution_, color_mode_,config_dpi);
{
int val = config.params.dpi == 3 ? 600 :(config.params.dpi == 2?300:200);
printf("val = %d\r\n",val);
uint32_t cnt = compute_frame(paper_size_ , val); //多设一帧
video->HtCamSetFrameCnt(cnt);
printf(" -----------------------设置帧数:%d------------------\r\n",cnt);
}
//if(!lut.empty())
//lut.release();
//calcLUT(config.params.dpi,config.params.isColor,true,lut);
printf("lut channels = %d lut width = %d \n",lut.channels(),lut.cols);
printf("resolution_:%d\r\n", resolution_);
printf("color_mode_:%d\r\n", color_mode_);
printf("paper_size_:%d\r\n", paper_size_);
printf("cis_width_:%d\r\n", cis_width_);
printf("cis_height_:%d\r\n", cis_height_);
printf("pixels_width_:%d\r\n", pixels_width_);
printf("pixels_height_:%d\r\n", pixels_height_);
#ifdef TEST_SIMCAP
if (m_test_pimg)
free(m_test_pimg);
FILE *f_img;
if (m_config.params.isColor == 1) // color
f_img = fopen("/home/root/color.bmp", "rb");
else
f_img = fopen("/home/root/gray.bmp", "rb");
if (f_img == nullptr)
printf("!!!!!!!!!! error while reading image \r\n");
fseek(f_img, 0, SEEK_END);
m_test_pimg_len = ftell(f_img);
m_test_pimg = (unsigned char*)malloc(m_test_pimg_len);
fseek(f_img, 0, SEEK_SET);
fread(m_test_pimg,1,m_test_pimg_len,f_img);
printf("!!!!!!!!!! info: reading image len = %d m_test_pimg = %p \r\n", m_test_pimg_len, m_test_pimg);
fclose(f_img);
#endif
}
void MultiFrameCapture::createCorrect(int correctmode)
{
if (m_correctThread.joinable())
m_correctThread.join();
stop_countdown();
m_correctThread = std::thread(&MultiFrameCapture::correctcolor, this, correctmode);
}
void MultiFrameCapture::setFPGATriggerMode(bool autotrigger, int delay)
{
}
void MultiFrameCapture::setFanMode(int mode)
{
}
void MultiFrameCapture::fpgaReload()
{
fpgaLoad->setValue(Gpio::Low);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
fpgaLoad->setValue(Gpio::High);
std::this_thread::sleep_for(std::chrono::milliseconds(15));
m_capFpageregs->resetADC();
}
bool MultiFrameCapture::capturerImage()
{
return true;
}
void MultiFrameCapture::waitsnapdone(int state)
{
#ifdef TEST_SIMCAP
HG_JpegCompressInfo info;
info.DataLength = m_test_pimg_len;
info.pJpegData = (unsigned char *)m_test_pimg;
m_glue.m_imageready(info);
#else
printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait \n");
V4L2_DATAINFO_Ex info;
info.snaped_index = snaped_index;
info.snap_end = true;
info.error_code = state;
if (b_end_snap)
{
m_frameinfos.Put(info);
return;
}
std::unique_lock<std::mutex> lock(m_mtx_snapdone);
m_cv_snapdone.wait(lock);
b_end_snap = true;
m_frameinfos.Put(info);
//printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait done \n");
#endif
}
bool MultiFrameCapture::IsImageQueueEmpty()
{
//printf(" m_frameinfos.Size = %d iImageremain = %d bScanning = %d\n", m_frameinfos.Size(), iImageremain.operator int(), bScanning);
return (m_frameinfos.Size() == 0 && iImageremain == 0) && !bScanning;
}
void MultiFrameCapture::resetimageremain()
{
iImageremain = 0;
}
std::atomic_int &MultiFrameCapture::getimageremain()
{
return iImageremain;
}
void MultiFrameCapture::clearimages()
{
m_frameinfos.Clear();
}
void MultiFrameCapture::setScanFlag(bool brun)
{
bScanning = brun;
}
void MultiFrameCapture::reset_fpga()
{
}
void MultiFrameCapture::reload_fpga()
{
}
void MultiFrameCapture::configFPGAParam(int mode, int dpi)
{
FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode);
video->HtCamChangeExposureValueF(fpgaparam.ExposureF);
video->HtCamChangeExposureValueB(fpgaparam.ExposureB);
for (int i = 0; i < 6; i++)
{
video->HtCamWriteADCReg_ALL(true,true,i,fpgaparam.GainF[i]);
video->HtCamWriteADCReg_ALL(false,true,i,fpgaparam.OffsetF[i]);
video->HtCamWriteADCReg_ALL(true,false,i,fpgaparam.GainB[i]);
video->HtCamWriteADCReg_ALL(false,false,i,fpgaparam.OffsetB[i]);
}
// for (size_t i = 0; i < 20; i++)
// {
// //video->HtCamReadADCReg_ALL(i);
// }
};
int MultiFrameCapture::width()
{
int dpi = m_capFpageregs->getDpi();
int channel = 1;
#ifdef G400
int width = dpi == 0x02 ? 1296 * channel : (dpi == 0x03 ? (2592 * channel) : (864 * channel));
#else
int width = dpi == 0x02 ? 864 * channel : (dpi == 0x03 ? (1728 * channel) : (864 * channel)); // we only sup dunnancis now
#endif
return width;
}
int MultiFrameCapture::color()
{
// return m_capFpageregs->getColorMode();
}
#include "bmp.h"
void MultiFrameCapture::snaprun()
{
//frame_info 发送得数据信息 channels 图像位深 num 需要取得帧数 time_out读图超时时间设置
auto snap_func = [this](V4L2_DATAINFO_Ex frame_info, int channels,int time_out,int i)
{
StopWatch sw;
sw.reset();
unsigned char *data = NULL;
int ret = video->HtCamReadCaptureFrame((void **)&data, time_out);
if (ret == -1 || ret == -2)
{
printf("----------------获取图像超时或者失败------------\r\n");
return ret;
}
uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e);////0x000e 取出来的实际行数
//printf("--------------fpga send line ------------:%d\r\n",sendLine);
if (data)
{
//cv::Mat mat = cv::Mat(frame_info.height, frame_info.width, CV_8UC1, data, cv::Mat::AUTO_STEP);
//cv::imwrite("/home/root/test.png", mat);
// printf("--------------frame_index------------:%d\r\n",frame_index);
if (i == 5)
{
//cv::imwrite("/home/root/test.bmp", mat);
//cv::imwrite("/home/root/test.bmp", mat);
//savebitmap(data,15552,512,"1.bmp");
}
//frame_info.mat = mat.clone();
//JpegCompress cmp(90);
//HG_JpegCompressInfo info = cmp.GetCompressedImg(mat);
HG_JpegCompressInfo info ;
info.pJpegData = (unsigned char *)malloc(frame_info.height * frame_info.width);
memcpy(info.pJpegData , data , frame_info.height * frame_info.width);
info.DataLength = frame_info.width * frame_info.height;
info.first_frame = frame_info.first_frame;
info.last_frame = frame_info.last_frame;
info.index_frame = frame_info.frame_index;
info.data_type = 0;
info.width = frame_info.width;
info.height = frame_info.height;
printf("获取数据 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame_info.width,frame_info.height,info.first_frame,info.last_frame,info.DataLength);
//cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat);
m_glue.m_imageready(info);
//m_frameinfos.Put(frame_info);
printf("采集图像耗时:%f\r\n",sw.elapsed_ms());
}
return ret;
};
//static int ti = 0;
while (b_snap_run)
{
std::unique_lock<std::mutex> lock(m_mtx_snap);
m_cv_snap.wait(lock);
V4L2_DATAINFO_Ex frame_info;
frame_info.pixtype = color_mode_;
frame_info.dpi = resolution_;
frame_info.width = pixels_width_;
frame_info.height = pixels_height_;
frame_info.error_code = 0;
frame_info.snaped_index = snaped_index;
frame_info.first_frame = false;
frame_info.last_frame = false;
int channels = color_mode_ == 1 ? 3 : 1;
int color_mode = video->HtCamGetColorMode();
int func_sig = 0;
int time_out = color_mode == 1 ? 800 : 400;
int time_out_cnt = 0;
uint32_t frame_num = 0;
uint32_t frame_cnt = 0;
video->HtCamGetFrameCnt(frame_cnt);
frame_info.frame_index = frame_cnt + 1;
//video->HtCamSetVsnpTime(6);
//printf("设置 vsnp%d\r\n",ti);
//ti++;
// printf("--------------------- frame_info.width ------------------ :%d\r\n",frame_info.width );
// printf("--------------------- frame_info.height ------------------ :%d\r\n",frame_info.height );
// printf("--------------------- frame_info.pixtype ------------------ :%d\r\n",frame_info.pixtype );
// printf("--------------------- frame_info.dpi ------------------ :%d\r\n",frame_info.dpi );
for (size_t i = 1; i <= frame_cnt; i++)
{
printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_info.frame_index,i);
frame_info.first_frame = i == 1 ? true : false;
frame_info.last_frame = i == frame_cnt ? true : false;
func_sig = snap_func(frame_info, channels,time_out,i);
if (func_sig == -1 ) //当前帧取图超时,在取一次!!! 一直超时 不就卡死了??? 这个地方还是需要加个时间限制几秒内一帧未取出就退了,返回异常状态吧?
{
i--;
time_out +=200;
time_out_cnt ++;
if (time_out_cnt >=5)
{
break;
}
continue;
}
if (b_stop_snap)
{
video->HtCamGetFrameNum(frame_num);
int val = frame_num - i; //剩余还未采集的帧数
int ind = i + 1; //只是计数确认采集到低多少帧,无其他实用意义
while (val)
{
frame_info.last_frame = val == 1 ? true : false;
frame_info.frame_index = frame_num;
func_sig = snap_func(frame_info, channels,time_out ,ind);//同上面一样
printf("-----------当前采集到第:[%d]帧 CIS总共采集[%d]帧 -------\r\n",ind,frame_num);
if (func_sig == -1 )
{
time_out +=200;
time_out_cnt ++;
if (time_out_cnt >=5)
{
break;
}
continue;
}
val--;
ind++;
this_thread::sleep_for(std::chrono::milliseconds(65));
}
break;
}
////////////////////////////非常重要/////////////////////////
////////////////////////////采集速度18 - 20ms/////////////////////////
////////////////////////////传图速度88 - 89ms/////////////////////////
////////////////////////////采集过快会导致队列堆积/////////////////////////
////////////////////////////所以这个延时是采集速度和传图速度的差值/////////////////////////
////////////////////////////DPI 颜色不同 会导致数据大小不同所以这个地方延时肯定不一样 目前 60是200dpi/////////////////////////
int mill = resolution_ == DPI_600 ? 60 * 10 : 60;
printf("mill:%d\r\n",mill);
this_thread::sleep_for(std::chrono::milliseconds(mill));
}
video->HtCamStopVideoCapturing();
printf("---------- 退出图像采集流程 ----------\r\n");
m_cv_snapdone.notify_all();
b_end_snap = true;
}
}
void MultiFrameCapture::updatesnapstatus(int papertype)
{
b_stop_snap = false;
snaped_index = 0;
}
void MultiFrameCapture::procimage()
{
static int idx = 0;
ThreadPool prc_pool(4);
std::queue<std::future<cv::Mat>> prc_fu;
unsigned int frames_height;
unsigned int frames_width = 0;
int cnt_ =0;
StopWatch sw;
while (b_imgproc)
{
sw.reset();
V4L2_DATAINFO_Ex frame = m_frameinfos.Take();
static int inx = 0;
if (!frame.mat.empty())
{
JpegCompress cmp(100);
if(!lut.empty())
{
// correctColor(frame.mat,lut);
printf("correctColor done !!!!!!!!!!!!!!!!!!!!\n");
}
HG_JpegCompressInfo info = cmp.GetCompressedImg(frame.mat);
// info.pJpegData = (unsigned char *)malloc(frame.width * frame.height);
//memcpy(info.pJpegData,frame.mat.data,frame.width * frame.height);
//info.mat = frame.mat;
// frame.mat.copyTo(info.mat);
//info.mat = frame.mat.clone();
//info.DataLength = frame.width * frame.height;
info.first_frame = frame.first_frame;
info.last_frame = frame.last_frame;
info.index_frame = frame.frame_index;
info.data_type = 0;
info.width = frame.width;
info.height = frame.height;
printf("压缩图像耗时:%f 压缩图像队列 %d\r\n",sw.elapsed_ms(),m_frameinfos.Size());
printf("获取数据 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame.width,frame.height,info.first_frame,info.last_frame,info.DataLength);
//cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat);
m_glue.m_imageready(info);
}
continue;
}
}
static int temp_val = 0;
bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black)
{
printf("校正DPI[%d] 校正颜色:%s\n",dpi==1?200:(dpi==2?300:600),mode == IMAGE_COLOR?"彩色":"灰色");
int config_dpi = dpi == 1 ? 2 : dpi;
const int offset_indexs[] = {3, 4, 5, 2, 1, 0 ,0, 1, 2, 5, 4, 3};
int channels = mode == IMAGE_COLOR ? 3 : 1;
int height = 60;
int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 864);
int orgimgwidth = width * 2 * 3 * channels;
int dstwidth = width * 2 * 3;
bool isNeedSave = true;
string log;
unsigned char *data = NULL;
int ret = video->HtCamReadCaptureFrame((void **)&data, 1000);
if (data == NULL)
{
isNeedSave = false;
log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
return isNeedSave;
}
cv::Mat src(height, orgimgwidth, CV_8UC1, data);
CImageMerge t_marge;
cv::Mat mrgmat = t_marge.MergeImage(src, dstwidth, height, mode,true);
printf("mrgmat width = %d height = %d temp_val=%d\n", mrgmat.cols, mrgmat.rows,temp_val);
int inx = 0;
if (temp_val == 0 && !black)
{
//inx+=1;
//temp_val++;
//imwrite(to_string(inx) + ".bmp", mrgmat);
}
//return 0;
FPGAConfigParam param = GetFpgaparam(dpi, mode);
if (black) // 暗场
{
volatile double offValues[12]{0};
double offValues_min[12]{0};
int blockcount = 12;
int bandwidth = mrgmat.cols / blockcount;
for (int n = 0; n < blockcount; n++)
{
cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone();
cv::Scalar mean = cv::mean(img);
offValues[n] = mean.val[0];
if (mode)
{
auto tmp = *std::min_element(img.begin<cv::Vec3b>(),
img.end<cv::Vec3b>(), [](cv::Vec3b a, cv::Vec3b b) -> bool
{ return (a[0] + a[1] + a[2]) < (b[0] + b[1] + b[2]); });
offValues_min[n] = (tmp[0] + tmp[1] + tmp[2]) / 3.0;
}
else
{
offValues_min[n] = *std::min_element(img.begin<std::uint8_t>(),
img.end<std::uint8_t>(), [](std::uint8_t a, std::uint8_t b) -> bool
{ return a < b; });
}
printf("band[%d] mean = %0.2f bandwidth = %d offValues_min [%d] = %.2f \n", n, mean.val[0], bandwidth, n, offValues_min[n]);
}
// return 0;
for (int s = 0; s < 2; s++)
{
unsigned int offsets[6]; // = (int *)(s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]);
memcpy(offsets, (s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), sizeof(param.OffsetF));
for (int j = 0; j < 6; j++)
{
int k = s * 6 + j;
// double diff = BLACK_DIFF(offValues[k]);
// double diff = 8 - offValues[k];
double diff = 3 - offValues_min[k];
if (offValues[k] > 15)
{
diff = 15 - offValues[k];
}
double step = radio * diff;
// int preStep = offsetStep[k];
// if (step * preStep < 0)
// {
// //step = 0 - preStep / 2;
// step /= 2;
// }
// else
// {
// radio = 1;
// }
if (step < 1 && step > 0.5)
step = 1;
if (step < -0.5 && step > -1)
step = -1;
// FMT_STEP(step);
bool isMinStep = abs(step) == 1 && step == offsetStep[k];
bool isOutBounds = offsets[j] >= 255 && step > 0;
printf("\r\n");
isOutBounds |= offsets[j] <= 0 && step < 0;
log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n";
if (isOutBounds)
log += "" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n";
else if (abs(step) > 1 || isMinStep)
{
offsetStep[k] = (int)(step);
printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]);
offsets[offset_indexs[k]] += step;
log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n";
if (offsets[offset_indexs[k]] < 1)
offsets[offset_indexs[k]] = 1;
if (offsets[offset_indexs[k]] > 255)
offsets[offset_indexs[k]] = 255;
isNeedSave = false;
}
log += (s == 0 ? "彩色正面" : "彩色背面");
log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n";
// log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n");
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
}
memcpy((s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), offsets, sizeof(param.OffsetF));
// memset(&param.OffsetF[0],0,sizeof(param.OffsetF));
// memset(&param.OffsetB[0],0,sizeof(param.OffsetF));
// param.OffsetB[5] =255;
// param.OffsetF[4] =255;
}
if (isNeedSave)
{
printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str());
log = "暗场校正完成 \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_BwPath, svmat);
}
}
else // 明场
{
if (mode == IMAGE_COLOR)
{
volatile double values[2][3];
cv::Scalar a = cv::mean(mrgmat(cv::Rect(0, 10, mrgmat.cols / 2, mrgmat.rows-10)));
cv::Scalar b = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 10, mrgmat.cols / 2, mrgmat.rows-10)));
static int indxxx=0;
if(indxxx <= 10)
{
cv::imwrite(std::to_string(++indxxx)+".bmp",mrgmat);
}
for (char j = 0; j < 3; j++)
{
values[0][j] = a.val[2-j];
values[1][j] = b.val[2-j];
printf("values[0][%d] = %.2f a.val[%d] = %.2f values[1][%d] = %.2f b.val[%d] = %.2f\n", 2-j, values[0][j],j,a.val[j], 2-j, values[1][j],j,b.val[j]);
}
log = "开始彩色明场校正 \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
for (int s = 0; s < 2; s++)
{
volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
memcpy((void *)exposures, (s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), sizeof(param.ExposureB));
for (int x = 0; x < 3; x++)
{
int k = (3 * s + x);
// int diff = LIGHT_DIFF(*((double *)values + k));
int diff = 170 - *((double *)values + k);;//param.MaxBright - *((double *)values + k);
log += " 明场:" + std::to_string(k) + ";diff" + std::to_string(diff) + "\r\n";
double step = diff * radio;
int preStep = *((int *)expStep + k);
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k);
bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0;
isOutBounds |= exposures[x] <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(x) + "个明场校正异常 \r\n";
else if (abs(diff) >= 1 || isMinStep)
{
*((int *)expStep + k) = (int)(step);
exposures[x] += step;
if (exposures[x] > (param.Sp - 5))
{
exposures[x] = (param.Sp - 5);
}
if (exposures[x] < 0)
exposures[x] = 0;
isNeedSave = false;
}
log += " 曝光值:" + to_string(exposures[x]) + "\r\n";
log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n";
}
memcpy((s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB));
}
// ftt.append_log(log);
printf("\n%s",log.c_str());
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
log = "彩色明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_WhitePath, svmat);
}
}
else
{
double values[2];
values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]);
log = "-----开始灰色明场校正-----\r\n";
log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
for (int s = 0; s < 2; s++)
{
int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
// int diff = LIGHT_DIFF(values[s]);
int diff = 170 - values[s];;//param.MaxBright - values[s];
double step = diff * radio;
log += " 明场:" + to_string(s) + ";diff" + to_string(diff) + "\r\n";
int preStep = expStep[s][0];
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
else
{
radio = 1;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
int exp = *(exposures + 1);
std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step));
log += ss1;
bool isMinStep = abs(step) <= 2 && step == expStep[s][0];
bool isOutBounds = exp >= (param.Sp - 5) && step > 0;
isOutBounds |= exp <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(s) + "个明场校正异常 \r\n";
else if (abs(diff) > 1 || isMinStep)
{
exp += step;
if (exp < 0)
exp = 0;
if (exp > (param.Sp - 5))
exp = (param.Sp - 5);
float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51
for (int k = 0; k < 3; k++)
{
*(exposures + k) = (int)(exp * coffe[k]);
expStep[s][k] = (int)(step);
std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step));
log += exps;
std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k]));
log += ss;
}
isNeedSave = false;
}
}
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str());
log = "灰度明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_WhitePath, svmat);
}
}
}
SaveFpgaparam(param);
printf("exit Save_lut \n");
return isNeedSave;
}
void MultiFrameCapture::formatStep()
{
for (int i = 0; i < 2; i++)
{
for (int j = 0; j < 3; j++)
{
expStep[i][j] = 600;
}
}
for (int i = 0; i < 12; i++)
offsetStep[i] = 64;
}
void MultiFrameCapture::correctcolor(int correctmode)
{
StopWatch sw_correct;
std::string loginfo = "Start Correctcolor 300DPI Gray \r\n";
//printf("----------- %s \n",loginfo.c_str());
//creatcorrectconfig(0x02, IMAGE_GRAY);
//printf("----------- done \n",loginfo.c_str());
loginfo = "Start Correctcolor 300DPI COLOR \r\n";
printf("----------- %s \n",loginfo.c_str());
creatcorrectconfig(0x02, IMAGE_COLOR);
printf("----------- done \n",loginfo.c_str());
// if ((correctmode == 0) || (correctmode == 2))
// {
// loginfo = "Start Correctcolor 200DPI COLOR \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x01, IMAGE_COLOR);
// loginfo = "-----------200DPI COLOR Correct Done----------- \r\n\r\n ";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
// if ((correctmode == 0) || (correctmode == 1))
// {
// loginfo = "Start Correctcolor 200DPI GRAY \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x01, IMAGE_GRAY);
// loginfo = "-----------200DPI Gray Correct Done----------- \r\n\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
// if ((correctmode == 0) || (correctmode == 4))
// {
// loginfo = " Start Correctcolor 300DPI COLOR \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x02, IMAGE_COLOR);
// loginfo = "-----------300DPI COLOR Correct Done----------- \r\n\r\n ";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
// if ((correctmode == 0) || (correctmode == 3))
// {
// loginfo = "Start Correctcolor 300DPI GRAY \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x02, IMAGE_GRAY);
// loginfo = "-----------300DPI Gray Correct Done----------- \r\n\r\n ";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
// if ((correctmode == 0) || (correctmode == 6))
// {
// loginfo = "Start Correctcolor 600DPI COLOR \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x03, IMAGE_COLOR);
// loginfo = "-----------600DPI COLOR Correct Done----------- \r\n\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
// if ((correctmode == 0) || (correctmode == 5))
// {
// loginfo = " Start Correctcolor 600DPI GRAY \r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// creatcorrectconfig(0x03, IMAGE_GRAY);
// loginfo = "-----------600DPI Gray Correct Done----------- \r\n\r\n ";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
// }
if ((correctmode < 0) || (correctmode > 6))
{
loginfo = "不支持的校正模式...\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLAT_FINISHED, "******Correct Done times = " + to_string(sw_correct.elapsed_s()) + " ****** \r\n");
}
void MultiFrameCapture::openDevice(int dpi, int mode)
{
printf("openDevice dpi:%d mode:%d \r\n",dpi,mode);
reset_fpga();
bool dunnancis = true;
int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 864); // 1296 2592 864
int channels = mode == 0x01 ? 3 : 1;
int width = channelwidth * channels;
int frame_height = mode == 0x01 ? 60 * 3 : 60;
int startsample = 202; // 205
int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3);
resolution_ = dpi == 3 ? DPI_600 : DPI_300; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 2代表300 3代表600
color_mode_ = mode == 1 ? COLOR : GRAY;
cis_width_ = resolution_ == 0 ? WIDTH * 2 : WIDTH; //宽 :DPI不变下 彩色灰度是一样的
cis_height_ = mode == 0x01 ? 60 * 3 : 60;
pixels_width_ = color_mode_ == 1 ? cis_width_ * 3 : cis_width_;
pixels_height_ = color_mode_ == 1 ? cis_height_ / 3 : cis_height_;
int config_dpi = resolution_ == DPI_600 ? 3 : 2;
int config_color = color_mode_ ==COLOR ? 1:0;
StopWatch swwv4l2open;
video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
FPGAConfigParam fpgaparam = GetFpgaparam(config_dpi, config_color);
video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp); // 2344 灰色 //2023-8-10 最新2650
if (color_mode_)
{
video->HtCamSetSpTime2(fpgaparam.HRatio);
}
video->HtCamSetStSp(fpgaparam.MaxBright);
configFPGAParam(config_color, config_dpi);
printf(" -----------------------resolution = %d config_color = %d config_dpi:%d------------------\r\n",resolution_, config_color,config_dpi);
{
int val = 1;
video->HtCamSetFrameCnt(val);
printf(" -----------------------设置帧数:%d------------------\r\n",val);
}
printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色");
printf("分辨率:%d\r\n",resolution_ == DPI_600?600:300);
printf("采集宽:%d 高:%d\r\n",cis_width_,cis_height_);
printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_);
printf("resolution_:%d\r\n", resolution_);
printf("color_mode_:%d\r\n", color_mode_);
printf("paper_size_:%d\r\n", paper_size_);
printf("paper_size_:%d\r\n", paper_size_);
printf("cis_width_:%d\r\n", cis_width_);
printf("cis_height_:%d\r\n", cis_height_);
printf("pixels_width_:%d\r\n", pixels_width_);
printf("pixels_height_:%d\r\n", pixels_height_);
int ret = video->open_device(cis_width_,cis_height_);
if(ret < -1)
return;
int i = 1 ;
char *buf = NULL;
while (i >= 0)
{
i = video->HtCamReadCaptureFrame((void **)&buf, 500);
}
//video->close_video();
printf("opened video with width = %d height = %d time eplased = %.2f pbuffer = %p \n", width, 60 * 2, swwv4l2open.elapsed_ms(),buf);
}
void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
{
openDevice(dpi, mode);
bool isDone = false;
formatStep();
int i = 0;
radio = 1;
while (!isDone) // 先暗场
{
//break ;
string log = "==============================第" + to_string(i) + "次===============================\r\n";
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
int config_dpi = resolution_ == DPI_600 ? 3 : 2;
int config_color = color_mode_ ==COLOR ? 1:0;
configFPGAParam(config_color, config_dpi);
// ftt.append_log(log);
printf("log :%s\r\n",log.c_str());
std::this_thread::sleep_for(std::chrono::milliseconds(5));
unsigned int F[3]={0,0 ,0};
video->HtCamChangeExposureValueF(F);
video->HtCamChangeExposureValueB(F);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
video->HtCamStartVideoCapturing();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
video->HtCamStopVideoCapturing();
//video->close_device();
this_thread::sleep_for(std::chrono::milliseconds(200));
i++;
//return ;
}
isDone = false;
formatStep();
while (!isDone) // 后明场
{
string log = "==============================第" + to_string(i) + "次===============================\r\n";
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
configFPGAParam(mode, dpi);
// ftt.append_log(log);
printf("log :%s\r\n", log.c_str());
std::this_thread::sleep_for(std::chrono::milliseconds(5));
//unsigned int F[3] = {1, 1, 1};
//video->HtCamChangeExposureValueF(F);
//video->HtCamChangeExposureValueB(F);
//std::this_thread::sleep_for(std::chrono::milliseconds(5));
video->HtCamStartVideoCapturing();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, false); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
video->HtCamStopVideoCapturing();
// video->close_device();
this_thread::sleep_for(std::chrono::seconds(2));
i++;
}
printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray"));
//creatLUTData(dpi, mode);
video->close_device();
}
// void MultiFrameCapture::myFloodFill(cv::Mat& image, bool isTwoSide)
// {
// int w = image.cols;
// int h = image.rows;
// cv::Vec3b lt, rt, lb, rb;
// const double threshold = 10;
// if (!isTwoSide)
// {
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, 0) : image.at<uchar>(0, 0);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(0, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w - 1) : image.at<uchar>(0, w - 1);
// if (rt[0] < threshold)
// cv::floodFill(image, cv::Point(w - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, 0) : image.at<uchar>(h - 1, 0);
// if (lb[0] < threshold)
// cv::floodFill(image, cv::Point(0, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w - 1) : image.at<uchar>(h - 1, w - 1);
// if (rb[0] < threshold)
// cv::floodFill(image, cv::Point(w - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// }
// else
// {
// int w_ = w / 2;
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, 0) : image.at<uchar>(0, 0);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(0, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w_ - 1) : image.at<uchar>(0, w_ - 1);
// if (rt[0] < threshold && rt[1] < threshold && rt[2] < threshold)
// cv::floodFill(image, cv::Point(w_ - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, 0) : image.at<uchar>(h - 1, 0);
// if (lb[0] < threshold && lb[1] < threshold && lb[2] < threshold)
// cv::floodFill(image, cv::Point(0, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w_ - 1) : image.at<uchar>(h - 1, w_ - 1);
// if (rb[0] < threshold && rb[1] < threshold && rb[2] < threshold)
// cv::floodFill(image, cv::Point(w_ - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w_) : image.at<uchar>(0, w_);
// if (lt[0] < threshold && lt[1] < threshold && lt[2] < threshold)
// cv::floodFill(image, cv::Point(w_, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rt = image.channels() == 3 ? image.at<cv::Vec3b>(0, w - 1) : image.at<uchar>(0, w - 1);
// if (rt[0] < threshold && rt[1] < threshold && rt[2] < threshold)
// cv::floodFill(image, cv::Point(w - 1, 0), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// lb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w_) : image.at<uchar>(h - 1, w_);
// if (lb[0] < threshold && lb[1] < threshold && lb[2] < threshold)
// cv::floodFill(image, cv::Point(w_, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// rb = image.channels() == 3 ? image.at<cv::Vec3b>(h - 1, w - 1) : image.at<uchar>(h - 1, w - 1);
// if (rb[0] < threshold && rb[1] < threshold && rb[2] < threshold)
// cv::floodFill(image, cv::Point(w - 1, h - 1), cv::Scalar::all(255), 0, cv::Scalar::all(5), cv::Scalar::all(40), cv::FLOODFILL_FIXED_RANGE);
// }
// }