zynq_7010/zynq_7010_code/MultiFrameCapture.cpp

1100 lines
40 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "MultiFrameCapture.h"
#include <thread>
#include <opencv2/opencv.hpp>
#include "DevUtil.h"
#include "Gpio.h"
#include "CameraParam.h"
#include "correct_ultis.h"
#include "filetools.h"
#include "USBProtocol.h"
#include "utilsfunc.h"
#include "CImageMerge.h"
#include "Jpegcompress.h"
#include "ThreadPool.h"
#include "HCamDevice.h"
const int vsp_A = 45;
const int vsp_B = 45;
// using namespace cv;
MultiFrameCapture::MultiFrameCapture(ScannerGlue glue) :
snaped_index(0),
frame_count(1),
b_snap_run(true),
b_stop_snap(false),
b_imgproc(true),
m_glue(glue),
iImageremain(0),
bScanning(false),
////////////////默认300dpi 和灰度设置
resolution_(DPI_300),
v4l2_width_(WIDTH),
v4l2_height_(HEIGHT),
pixels_width_(WIDTH),
fpga_height_(HEIGHT),
paper_size_((unsigned int)PaperSize::G400_AUTO),
color_mode_(GRAY),
is_correct_(0),
is_double_paper_(0)
{
video.reset(new HCamDevice);
m_snap_thread.reset(new std::thread(&MultiFrameCapture::snaprun, this));
m_imgproc_thread.reset(new std::thread(&MultiFrameCapture::procimage, this));
}
MultiFrameCapture::~MultiFrameCapture()
{
if (video.get())
video.reset();
m_frameinfos.Clear();
m_frameinfos.ShutDown();
if (m_imgproc_thread.get())
{
if (m_imgproc_thread->joinable())
{
b_imgproc = false;
m_imgproc_thread->join();
}
}
#ifdef TEST_SIMCAP
if (m_test_pimg)
free(m_test_pimg);
#endif
}
void MultiFrameCapture::SetParent(void *scanner)
{
}
void MultiFrameCapture::open()
{
int ret = video->open_device(v4l2_width_,v4l2_height_);
if(ret < -1)
return;
int i = 1 ;
char *buf = NULL;
while (i >= 0)
{
i = video->HtCamReadCaptureFrame((void **)&buf, 10);
}
printf("open_device\r\n");
}
void MultiFrameCapture::snap()
{
#ifndef TEST_SIMCAP
std::lock_guard<std::mutex> m_lock(m_mtx_snap);
b_stop_snap = b_end_snap = false;
if (m_config.params.dpi == 3 )
this_thread::sleep_for(std::chrono::milliseconds(140));
else if(m_config.params.dpi == 2 && m_config.params.pageSize != (int)PaperSize::G400_AUTO)
this_thread::sleep_for(std::chrono::milliseconds(45));
else
this_thread::sleep_for(std::chrono::milliseconds(10));
video->HtCamStartVideoCapturing();
snaped_index++;
m_cv_snap.notify_all();
#endif
}
void MultiFrameCapture::stopsnap(bool autosize)
{
printf("stop stop stop\r\n");
if (autosize)
{
video->HtCamStopSampling();
b_stop_snap = true;
}
}
void MultiFrameCapture::close()
{
printf("close close close\r\n");
if (video.get())
video->close_device();
}
int MultiFrameCapture::read(int addr)
{
}
void *MultiFrameCapture::readFrameTest(int timeout)
{
return nullptr;
}
void MultiFrameCapture::SetLowPower(bool islow)
{
video->HtCamSetLowPower(islow);
}
#include<math.h>
uint32_t MultiFrameCapture::compute_frame(int paper_size,int dpi)
{
SIZE size = GetPaperSize((PaperSize)paper_size,dpi);
int val = ceil((float)size.cy /(float)pixels_height_);
return val;
}
void MultiFrameCapture::compute_height(int width,int height)
{
//////这块配置一定要注意
v4l2_width_ = resolution_ == DPI_600 ? width * 2 :(resolution_ == DPI_300 ? width :(float)width / 1.5) ; //宽 :DPI不变下 彩色灰度是一样的
//width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO &&
v4l2_height_ = height;
pixels_width_ = color_mode_ == COLOR ? v4l2_width_ * 3 : v4l2_width_;
pixels_height_ = color_mode_ == COLOR ? v4l2_height_ / 3 : v4l2_height_;
}
SIZE MultiFrameCapture::GetPaperSize(PaperSize paper, int dpi)
{
if (paper_map_.find(paper) != paper_map_.end())
{
SIZE resize{2480,3507};
resize.cx = paper_map_[paper].cx * dpi / 25.4;
resize.cy = paper_map_[paper].cy * dpi / 25.4;
printf("resize.cx:%d resize.cy:%d\r\n",resize.cx,resize.cy);
return resize;
}
return SIZE{2338, 3307};
}
void MultiFrameCapture::UpdateScanParam(HG_ScanConfiguration config)
{
m_config = config;
printf("config.params.pageSize:%d\r\n",config.params.pageSize);
if(config.params.slow_moire && config.params.dpi != 3)
{
config.params.dpi = 2;
}
if (!video.get())
{
return ;
}
int config_dpi = config.params.dpi;
int config_color = config.params.isColor;
resolution_ = config.params.dpi; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 1 代表200 2代表300 3代表600
color_mode_ = config.params.isColor;
is_correct_ = config.params.isCorrect;
paper_size_ = config.params.pageSize;
is_double_paper_ = config.params.doubleFeeded;
//width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO &&
//int height = resolution_ == DPI_600 ? 342 : 900;
int height = config.params.dpi == 3 ? 900 :(config.params.dpi == 2?999:300);
compute_height(WIDTH , height);
video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
FPGAConfigParam fpgaparam = GetFpgaparam(config_dpi, config_color);
video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp); //3通道 75 6通道49
if (color_mode_)
{
video->HtCamSetSpTime2(fpgaparam.Sp * 3);
}
video->HtCamSetStSp(fpgaparam.MaxBright);//172 是根据实际测试出来的图像质量得来 172 6通道 3通道 260
configFPGAParam(config_color, config_dpi);
{
int val = config.params.dpi == 3 ? 600 :(config.params.dpi == 2?300:200);
//除摩尔纹模式
if(config.params.slow_moire && config.params.dpi != 3)
{
int sp = fpgaparam.Sp + 232; //258 摩尔纹是400dpi 4677的高度
val = 400;
video->HtCamSetSpTime(sp,fpgaparam.MaxExp); // 2344 灰色 //2023-8-10 最新2650
if (color_mode_)
{
video->HtCamSetSpTime2(sp * 3);
}
printf("摩尔纹高度 = %d\r\n",val);
printf("灰度sp = %d\r\n",sp);
printf("彩色sp = %d\r\n",sp * 3);
}
uint32_t cnt = compute_frame(paper_size_ , val) ;
if (m_config.params.dpi == 2 && m_config.params.pageSize == (int)PaperSize::G400_A4)
{
cnt -=1 ;
}
if (m_config.params.dpi == 3)
{
cnt +=1 ;
}
video->HtCamSetFrameCnt(cnt);
printf(" -----------------------设置帧数:%d------------------\r\n",cnt);
}
printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色");
printf("分辨率:%d\r\n",resolution_);
printf("V4L2宽: %d 高:%d\r\n",v4l2_width_,v4l2_height_);
printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_);
printf("fpga_height_:%d\r\n", fpga_height_);
printf("color_mode_:%d\r\n", color_mode_);
printf("paper_size_:%d\r\n", paper_size_);
}
void MultiFrameCapture::createCorrect(int correctmode)
{
if (m_correctThread.joinable())
m_correctThread.join();
stop_countdown();
m_correctThread = std::thread(&MultiFrameCapture::correctcolor, this, correctmode);
}
void MultiFrameCapture::setFPGATriggerMode(bool autotrigger, int delay)
{
}
void MultiFrameCapture::setFanMode(int mode)
{
}
void MultiFrameCapture::fpgaReload()
{
}
bool MultiFrameCapture::capturerImage()
{
return true;
}
void MultiFrameCapture::waitsnapdone(int state)
{
StopWatch sw;
sw.reset();
printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait \n");
V4L2_DATAINFO_Ex info;
info.snaped_index = snaped_index;
info.snap_end = true;
info.error_code = state;
if (b_end_snap)
{
m_frameinfos.Put(info);
return;
}
sw.reset();
std::unique_lock<std::mutex> lock(m_mtx_snapdone);
m_cv_snapdone.wait(lock);
b_end_snap = true;
m_frameinfos.Put(info);
printf("!!!!!!!!!!!!!!!! m_cv_snapdone wait done :%f \n",sw.elapsed_ms());
}
bool MultiFrameCapture::IsImageQueueEmpty()
{
//printf(" m_frameinfos.Size = %d iImageremain = %d bScanning = %d\n", m_frameinfos.Size(), iImageremain.operator int(), bScanning);
return (m_frameinfos.Size() == 0 && iImageremain == 0) && !bScanning;
}
void MultiFrameCapture::resetimageremain()
{
iImageremain = 0;
}
std::atomic_int &MultiFrameCapture::getimageremain()
{
return iImageremain;
}
void MultiFrameCapture::clearimages()
{
m_frameinfos.Clear();
}
void MultiFrameCapture::setScanFlag(bool brun)
{
bScanning = brun;
}
void MultiFrameCapture::configFPGAParam(int mode, int dpi)
{
FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode);
video->HtCamChangeExposureValueF(fpgaparam.ExposureF);
video->HtCamChangeExposureValueB(fpgaparam.ExposureB);
for (int i = 0; i < 6; i++)
{
video->HtCamWriteADCReg_ALL(true,true,i,fpgaparam.GainF[i]);
video->HtCamWriteADCReg_ALL(false,true,i,fpgaparam.OffsetF[i]);
video->HtCamWriteADCReg_ALL(true,false,i,fpgaparam.GainB[i]);
video->HtCamWriteADCReg_ALL(false,false,i,fpgaparam.OffsetB[i]);
}
// for (size_t i = 0; i < 20; i++)
// {
// //video->HtCamReadADCReg_ALL(i);
// }
};
#include "bmp.h"
void MultiFrameCapture::snaprun()
{
//frame_info 发送得数据信息 channels 图像位深 num 需要取得帧数 time_out读图超时时间设置
auto snap_func = [this](V4L2_DATAINFO_Ex frame_info, int channels,int &time_out,int i)
{
StopWatch sw;
sw.reset();
unsigned char *data = NULL;
//printf("设置超时时间:%d\r\n",time_out);
int ret = video->HtCamReadCaptureFrame((void **)&data, time_out);
int t = resolution_ == DPI_600 ? sw.elapsed_ms() + 600 : sw.elapsed_ms() + 20;
time_out = t;
uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e);////0x000e 取出来的实际行数
printf("--------------fpga send line ------------:%d HtCamReadCaptureFrame =%0.2f \r\n",sendLine,sw.elapsed_ms());
if (data)
{
//cv::Mat mat = cv::Mat(frame_info.height, frame_info.width, CV_8UC1, data, cv::Mat::AUTO_STEP);
// printf("获取数据 width:%d height:%d \r\n",frame_info.width,frame_info.height);
//cv::imwrite("/home/root/test+_" + to_string(i)+".bmp", mat);
// printf("--------------frame_index------------:%d\r\n",frame_index);
if (i == 2)
{
// cv::imwrite("/home/root/test.bmp", mat);
//cv::imwrite("/home/root/test.bmp", mat);
//savebitmap(data,15552,512,"1.bmp");
}
//frame_info.mat = mat.clone();
//JpegCompress cmp(90);
//HG_JpegCompressInfo info = cmp.GetCompressedImg(mat);
HG_JpegCompressInfo info ;
info.pJpegData = data;
info.dpi = false ;
info.DataLength = frame_info.width * frame_info.height;
info.first_frame = frame_info.first_frame;
info.last_frame = frame_info.last_frame;
info.index_frame = frame_info.frame_index;
info.data_type = 0;
info.width = frame_info.width;
info.height = frame_info.height;
printf("获取数据 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame_info.width,frame_info.height,info.first_frame,info.last_frame,info.DataLength);
m_glue.m_imageready(info);
//m_frameinfos.Put(frame_info);
printf("采集图像耗时:%f\r\n",sw.elapsed_ms());
}
return ret;
};
static int ti = 100;
while (b_snap_run)
{
std::unique_lock<std::mutex> lock(m_mtx_snap);
m_cv_snap.wait(lock);
V4L2_DATAINFO_Ex frame_info;
frame_info.pixtype = color_mode_;
frame_info.dpi = resolution_;
frame_info.width = pixels_width_;
frame_info.height = pixels_height_;
frame_info.error_code = 0;
frame_info.snaped_index = snaped_index;
frame_info.first_frame = false;
frame_info.last_frame = false;
int channels = color_mode_ == 1 ? 3 : 1;
int color_mode = video->HtCamGetColorMode();
int func_sig = 0;
int time_out = resolution_ == DPI_600 ? 2000 : 600; ///这个时间是根据每帧的数据量来进行调测的
int time_out_cnt = 0;
if (color_mode_)
{
time_out += 400;
}
uint32_t frame_num = 0;
uint32_t frame_cnt = 0;
video->HtCamGetFrameCnt(frame_cnt);
printf("获取设置的帧数:%d\r\n",frame_cnt);
frame_info.frame_index = frame_cnt + 1;
// video->HtCamSetVsnpTime(ti);
// printf("设置 vsnp%d\r\n",ti);
//ti++;
// video->HtCamSetStSp(ti);
// printf("设置 StSp%d\r\n",ti);
// ti++;
int mill = 0; //= resolution_ == DPI_600 ? 30 : 25;
//mill = color_mode == 1?mill*3:mill;
printf("mill:%d\r\n",mill);
StopWatch sw;
sw.reset();
for (size_t i = 1; i <= frame_info.frame_index ; i++)
{
printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_info.frame_index,i);
frame_info.first_frame = i == 1 ? true : false;
frame_info.last_frame = i == frame_info.frame_index ? true : false;
func_sig = snap_func(frame_info, channels,time_out,i);
if (b_stop_snap)
{
video->HtCamGetFrameNum(frame_num);
int ind = i; //已采集了的帧数
int val = frame_num - ind; //剩余还未采集的帧数
while (val > 0)
{
frame_info.last_frame = val == 1 ? true : false;
frame_info.frame_index = frame_num;
ind++;
printf("-----------当前采集到第:[%d]帧 CIS总共采集[%d]帧 -------\r\n",ind,frame_num);
func_sig = snap_func(frame_info, channels,time_out ,ind);//同上面一样
val--;
if(resolution_ == DPI_600)
{
int d = 200;
this_thread::sleep_for(std::chrono::milliseconds(d));
}
}
break;
}
if(m_config.params.dpi == 3)
{
int d = 200;
this_thread::sleep_for(std::chrono::milliseconds(d));
}
}
uint32_t in=0;
video->HtCamStopVideoCapturing();
video->HtCamGetFrameNum(in);
printf("----------整张采集结束 总共采集帧数:%d 耗时:%f ----------\r\n",in,sw.elapsed_ms());
m_cv_snapdone.notify_all();
b_end_snap = true;
}
}
void MultiFrameCapture::updatesnapstatus(int papertype)
{
b_stop_snap = false;
snaped_index = 0;
}
void MultiFrameCapture::procimage()
{
static int idx = 0;
ThreadPool prc_pool(4);
std::queue<std::future<cv::Mat>> prc_fu;
unsigned int frames_height;
unsigned int frames_width = 0;
int cnt_ =0;
while (b_imgproc)
{
V4L2_DATAINFO_Ex frame = m_frameinfos.Take();
static int inx = 0;
if (!frame.mat.empty())
{
JpegCompress cmp(100);
HG_JpegCompressInfo info = cmp.GetCompressedImg(frame.mat);
// info.pJpegData = (unsigned char *)malloc(frame.width * frame.height);
//memcpy(info.pJpegData,frame.mat.data,frame.width * frame.height);
//info.mat = frame.mat;
// frame.mat.copyTo(info.mat);
//info.mat = frame.mat.clone();
//info.DataLength = frame.width * frame.height;
info.first_frame = frame.first_frame;
info.last_frame = frame.last_frame;
info.index_frame = frame.frame_index;
info.data_type = 0;
info.width = frame.width;
info.height = frame.height;
//cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat);
m_glue.m_imageready(info);
}
continue;
}
}
static int temp_val = 0;
bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black)
{
printf("校正DPI[%d] 校正颜色:%s\n",dpi==1?200:(dpi==2?300:600),mode == IMAGE_COLOR?"彩色":"灰色");
int config_dpi = dpi ;
const int offset_indexs[] = {3, 4, 5, 2, 1, 0 ,0, 1, 2, 5, 4, 3};
int channels = mode == IMAGE_COLOR ? 3 : 1;
int height = 60;
int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 576);
int orgimgwidth = width * 2 * 3 * channels;
printf("orgimgwidth:%d\r\n",orgimgwidth);
int dstwidth = width * 2 * 3;
bool isNeedSave = true;
string log;
unsigned char *data = NULL;
int ret = video->HtCamReadCaptureFrame((void **)&data, 1000);
if (data == NULL)
{
isNeedSave = false;
log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
return isNeedSave;
}
cv::Mat src(height, orgimgwidth, CV_8UC1, data);
CImageMerge t_marge;
cv::Mat mrgmat = t_marge.MergeImage(src, dstwidth, height, mode,true);
printf("mrgmat width = %d height = %d temp_val=%d\n", mrgmat.cols, mrgmat.rows,temp_val);
int inx = 0;
if (temp_val == 0 && !black)
{
//inx+=1;
//temp_val++;
//imwrite(to_string(inx) + ".bmp", mrgmat);
}
//return 0;
FPGAConfigParam param = GetFpgaparam(dpi, mode);
if (black) // 暗场
{
volatile double offValues[12]{0};
double offValues_min[12]{0};
int blockcount = 12;
int bandwidth = mrgmat.cols / blockcount;
for (int n = 0; n < blockcount; n++)
{
cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone();
cv::Scalar mean = cv::mean(img);
offValues[n] = mean.val[0];
if (mode)
{
auto tmp = *std::min_element(img.begin<cv::Vec3b>(),
img.end<cv::Vec3b>(), [](cv::Vec3b a, cv::Vec3b b) -> bool
{ return (a[0] + a[1] + a[2]) < (b[0] + b[1] + b[2]); });
offValues_min[n] = (tmp[0] + tmp[1] + tmp[2]) / 3.0;
}
else
{
offValues_min[n] = *std::min_element(img.begin<std::uint8_t>(),
img.end<std::uint8_t>(), [](std::uint8_t a, std::uint8_t b) -> bool
{ return a < b; });
}
printf("band[%d] mean = %0.2f bandwidth = %d offValues_min [%d] = %.2f \n", n, mean.val[0], bandwidth, n, offValues_min[n]);
}
// return 0;
for (int s = 0; s < 2; s++)
{
unsigned int offsets[6]; // = (int *)(s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]);
memcpy(offsets, (s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), sizeof(param.OffsetF));
for (int j = 0; j < 6; j++)
{
int k = s * 6 + j;
// double diff = BLACK_DIFF(offValues[k]);
// double diff = 8 - offValues[k];
double diff = 3 - offValues_min[k];
if (offValues[k] > 15)
{
diff = 15 - offValues[k];
}
double step = radio * diff;
// int preStep = offsetStep[k];
// if (step * preStep < 0)
// {
// //step = 0 - preStep / 2;
// step /= 2;
// }
// else
// {
// radio = 1;
// }
if (step < 1 && step > 0.5)
step = 1;
if (step < -0.5 && step > -1)
step = -1;
// FMT_STEP(step);
bool isMinStep = abs(step) == 1 && step == offsetStep[k];
bool isOutBounds = offsets[j] >= 255 && step > 0;
printf("\r\n");
isOutBounds |= offsets[j] <= 0 && step < 0;
log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n";
if (isOutBounds)
log += "" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n";
else if (abs(step) > 1 || isMinStep)
{
offsetStep[k] = (int)(step);
printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]);
offsets[offset_indexs[k]] += step;
log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n";
if (offsets[offset_indexs[k]] < 1)
offsets[offset_indexs[k]] = 1;
if (offsets[offset_indexs[k]] > 255)
offsets[offset_indexs[k]] = 255;
isNeedSave = false;
}
log += (s == 0 ? "彩色正面" : "彩色背面");
log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n";
// log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n");
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
}
memcpy((s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), offsets, sizeof(param.OffsetF));
// memset(&param.OffsetF[0],0,sizeof(param.OffsetF));
// memset(&param.OffsetB[0],0,sizeof(param.OffsetF));
// param.OffsetB[5] =255;
// param.OffsetF[4] =255;
}
if (isNeedSave)
{
printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str());
log = "暗场校正完成 \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_BwPath, svmat);
}
}
else // 明场
{
if (mode == IMAGE_COLOR)
{
volatile double values[2][3];
cv::Scalar a = cv::mean(mrgmat(cv::Rect(0, 10, mrgmat.cols / 2, mrgmat.rows-10)));
cv::Scalar b = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 10, mrgmat.cols / 2, mrgmat.rows-10)));
static int indxxx=0;
// if(indxxx <= 10)
// {
// cv::imwrite(std::to_string(++indxxx)+".bmp",mrgmat);
// }
for (char j = 0; j < 3; j++)
{
values[0][j] = a.val[2-j];
values[1][j] = b.val[2-j];
printf("values[0][%d] = %.2f a.val[%d] = %.2f values[1][%d] = %.2f b.val[%d] = %.2f\n", 2-j, values[0][j],j,a.val[j], 2-j, values[1][j],j,b.val[j]);
}
log = "开始彩色明场校正 \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
for (int s = 0; s < 2; s++)
{
volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
memcpy((void *)exposures, (s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), sizeof(param.ExposureB));
for (int x = 0; x < 3; x++)
{
int k = (3 * s + x);
// int diff = LIGHT_DIFF(*((double *)values + k));
// int diff;//= 240 - *((double *)values + k);;//param.MaxBright - *((double *)values + k);
// if(x==0)
// diff = 190 - *((double *)values + k);
// else
// diff = 240 - *((double *)values + k);
int diff = 200 - *((double *)values + k);
log += " 明场:" + std::to_string(k) + ";diff" + std::to_string(diff) + "\r\n";
double step = diff * radio;
int preStep = *((int *)expStep + k);
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k);
bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0;
isOutBounds |= exposures[x] <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(x) + "个明场校正异常 \r\n";
else if (abs(diff) >= 1 || isMinStep)
{
*((int *)expStep + k) = (int)(step);
exposures[x] += step;
if (exposures[x] > (param.Sp - 5))
{
exposures[x] = (param.Sp - 5);
}
if (exposures[x] < 0)
exposures[x] = 0;
isNeedSave = false;
}
log += " 曝光值:" + to_string(exposures[x]) + "\r\n";
log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n";
}
memcpy((s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB));
}
// ftt.append_log(log);
printf("\n%s",log.c_str());
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
log = "彩色明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_WhitePath, svmat);
}
}
else
{
double values[2];
values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]);
log = "-----开始灰色明场校正-----\r\n";
log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
for (int s = 0; s < 2; s++)
{
int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
// int diff = LIGHT_DIFF(values[s]);
int diff = 200 - values[s];
double step = diff * radio;
log += " 明场:" + to_string(s) + ";diff" + to_string(diff) + "\r\n";
int preStep = expStep[s][0];
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
else
{
radio = 1;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
int exp = *(exposures + 1);
std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step));
log += ss1;
bool isMinStep = abs(step) <= 2 && step == expStep[s][0];
bool isOutBounds = exp >= (param.Sp - 5) && step > 0;
isOutBounds |= exp <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(s) + "个明场校正异常 \r\n";
else if (abs(diff) > 1 || isMinStep)
{
exp += step;
if (exp < 0)
exp = 0;
if (exp > (param.Sp - 5))
exp = (param.Sp - 5);
float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51
for (int k = 0; k < 3; k++)
{
*(exposures + k) = (int)(exp * coffe[k]);
expStep[s][k] = (int)(step);
std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step));
log += exps;
std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k]));
log += ss;
}
isNeedSave = false;
}
}
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str());
log = "灰度明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10));
auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响
imwrite(param.Flat_WhitePath, svmat);
}
}
}
SaveFpgaparam(param);
printf("exit Save_lut \n");
return isNeedSave;
}
void MultiFrameCapture::formatStep()
{
for (int i = 0; i < 2; i++)
{
for (int j = 0; j < 3; j++)
{
expStep[i][j] = 600;
}
}
for (int i = 0; i < 12; i++)
offsetStep[i] = 64;
}
void MultiFrameCapture::correctcolor(int correctmode)
{
StopWatch sw_correct;
std::string loginfo = "Start Correctcolor 300DPI Gray \r\n";
if ((correctmode == 0) || (correctmode == 2))
{
loginfo = "Start Correctcolor 200DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x01, IMAGE_COLOR);
loginfo = "-----------200DPI COLOR Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 1))
{
loginfo = "Start Correctcolor 200DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x01, IMAGE_GRAY);
loginfo = "-----------200DPI Gray Correct Done----------- \r\n\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 4))
{
loginfo = " Start Correctcolor 300DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x02, IMAGE_COLOR);
loginfo = "-----------300DPI COLOR Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 3))
{
loginfo = "Start Correctcolor 300DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x02, IMAGE_GRAY);
loginfo = "-----------300DPI Gray Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 6))
{
loginfo = "Start Correctcolor 600DPI COLOR \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x03, IMAGE_COLOR);
loginfo = "-----------600DPI COLOR Correct Done----------- \r\n\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode == 0) || (correctmode == 5))
{
loginfo = " Start Correctcolor 600DPI GRAY \r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
creatcorrectconfig(0x03, IMAGE_GRAY);
loginfo = "-----------600DPI Gray Correct Done----------- \r\n\r\n ";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if ((correctmode < 0) || (correctmode > 6))
{
loginfo = "不支持的校正模式...\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo);
}
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLAT_FINISHED, "******Correct Done times = " + to_string(sw_correct.elapsed_s()) + " ****** \r\n");
}
void MultiFrameCapture::openDevice(int dpi, int mode)
{
bool dunnancis = true;
int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 576); // 1296 2592 864
int channels = mode == 0x01 ? 3 : 1;
int width = channelwidth * channels;
int frame_height = mode == 0x01 ? 60 * 3 : 60;
int startsample = 202; // 205
int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3);
resolution_ = dpi; //dpi 1代表200 2代表300 3代表600
color_mode_ = mode == 1 ? COLOR : GRAY;
compute_height(WIDTH , HEIGHT);
int config_dpi = resolution_;
int config_color = color_mode_ ==COLOR ? 1:0;
StopWatch swwv4l2open;
video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
FPGAConfigParam fpgaparam = GetFpgaparam(config_dpi, config_color);
video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp);
if (color_mode_)
{
video->HtCamSetSpTime2(fpgaparam.Sp * 3);
}
video->HtCamSetStSp(fpgaparam.MaxBright);
configFPGAParam(config_color, config_dpi);
printf(" -----------------------resolution = %d config_color = %d config_dpi:%d------------------\r\n",resolution_, config_color,config_dpi);
{
int val = 1;
video->HtCamSetFrameCnt(val);
printf(" -----------------------设置帧数:%d------------------\r\n",val);
}
printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色");
printf("分辨率:%d\r\n",resolution_);
printf("V4L2宽%d 高:%d\r\n",v4l2_width_,v4l2_height_);
printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_);
printf("color_mode_:%d\r\n", color_mode_);
printf("paper_size_:%d\r\n", paper_size_);
printf("paper_size_:%d\r\n", paper_size_);
int ret = video->open_device(v4l2_width_,v4l2_height_);
if(ret < -1)
return;
int i = 1 ;
char *buf = NULL;
while (i >= 0)
{
i = video->HtCamReadCaptureFrame((void **)&buf, 500);
}
printf("opened video with width = %d height = %d time eplased = %.2f pbuffer = %p \n", width, 60 * 2, swwv4l2open.elapsed_ms(),buf);
}
void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
{
openDevice(dpi, mode);
bool isDone = false;
formatStep();
int i = 0;
radio = 1;
while (!isDone) // 先暗场
{
//break ;
string log = "==============================第" + to_string(i) + "次===============================\r\n";
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
int config_dpi = resolution_;
int config_color = color_mode_ ==COLOR ? 1:0;
configFPGAParam(mode, dpi);
// ftt.append_log(log);
printf("log :%s\r\n",log.c_str());
std::this_thread::sleep_for(std::chrono::milliseconds(5));
unsigned int F[3]={0,0 ,0};
video->HtCamChangeExposureValueF(F);
video->HtCamChangeExposureValueB(F);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
video->HtCamStartVideoCapturing();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
video->HtCamStopVideoCapturing();
this_thread::sleep_for(std::chrono::milliseconds(200));
i++;
}
isDone = false;
formatStep();
while (!isDone) // 后明场
{
string log = "==============================第" + to_string(i) + "次===============================\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
configFPGAParam(mode, dpi);
printf("log :%s\r\n", log.c_str());
std::this_thread::sleep_for(std::chrono::milliseconds(5));
video->HtCamStartVideoCapturing();
std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, false); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
video->HtCamStopVideoCapturing();
this_thread::sleep_for(std::chrono::milliseconds(200));
i++;
}
printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray"));
//creatLUTData(dpi, mode);
video->close_device();
}