#include "MultiFrameCapture.h" #include #include #include "DevUtil.h" #include "Gpio.h" #include "CameraParam.h" #include "correct_ultis.h" #include "utilsfunc.h" #include "CImageMerge.h" #include "Jpegcompress.h" #include "ThreadPool.h" #include "HCamDevice.h" #include "StopWatch.h" const int vsp_A = 45; const int vsp_B = 45; #define ARRAYLEN(table) (sizeof(table) / sizeof(table[0])) // using namespace cv; MultiFrameCapture::MultiFrameCapture(ScannerGlue glue) :b_snap_run(true),b_stop_snap(false), m_glue(glue), bScanning(false), ////////////////默认300dpi 和灰度设置 resolution_(DPI_300), v4l2_width_(WIDTH), v4l2_height_(HEIGHT), pixels_width_(WIDTH), paper_size_((unsigned int)PaperSize::G400_AUTO), color_mode_(GRAY), is_correct_(0), is_double_paper_(0), is_remove_morr_(0) { video.reset(new HCamDevice); video->HtCamSetdivder(true); m_snap_thread.reset(new std::thread(&MultiFrameCapture::snaprun, this)); m_imgproc_thread.reset(new std::thread(&MultiFrameCapture::procimage, this)); } MultiFrameCapture::~MultiFrameCapture() { if (video.get()) video.reset(); if (m_imgproc_thread.get()) { if (m_imgproc_thread->joinable()) { m_imgproc_thread->join(); } } } void MultiFrameCapture::SetParent(void *scanner) { video->HtCamSetdivder(true); } void MultiFrameCapture::open() { int ret = video->open_device(v4l2_width_,v4l2_height_); if(ret < -1) return; int i = 1 ; char *buf = NULL; while (i >= 0) { i = video->HtCamReadCaptureFrame((void **)&buf, 10); } } void MultiFrameCapture::snap() { std::lock_guard m_lock(m_mtx_snap); b_stop_snap = b_end_snap = false; if (is_remove_morr_) this_thread::sleep_for(std::chrono::milliseconds(70)); else { if (resolution_ == 3 ) this_thread::sleep_for(std::chrono::milliseconds(140)); else if(resolution_ == 2 && paper_size_ != (int)PaperSize::G400_AUTO && color_mode_ == 1) this_thread::sleep_for(std::chrono::milliseconds(45)); else this_thread::sleep_for(std::chrono::milliseconds(10)); } video->HtCamStartVideoCapturing(); m_cv_snap.notify_all(); } void MultiFrameCapture::stopsnap(bool autosize) { if (autosize) { if (is_remove_morr_) this_thread::sleep_for(std::chrono::milliseconds(60)); // else this_thread::sleep_for(std::chrono::milliseconds(30)); // video->HtCamStopSampling(); b_stop_snap = true; } } void MultiFrameCapture::close() { if (video.get()) video->close_device(); } int MultiFrameCapture::read(int addr) { } void *MultiFrameCapture::readFrameTest(int timeout) { return nullptr; } void MultiFrameCapture::SetLowPower(bool islow) { video->HtCamSetLowPower(islow); } #include uint32_t MultiFrameCapture::compute_frame(int paper_size,int dpi) { SIZE size = GetPaperSize((PaperSize)paper_size,dpi); int val = ceil(((float)size.cy) /(float)pixels_height_); printf("幅面高度:%d 采集高度:%d 采集帧数:%d\r\n",size.cy,pixels_height_,val); //多加50是因为 你不清楚你需要采集多少数据,所以设置大点,设置太小 会导致 你实际采集的数据比你设置的大,这就会导致 ps层开辟的空间不够,然后导致采集的时候超时 //固定幅面的加减是根据你需要采集多少才能采集全图 ,+操作避免采集帧数不够 ,-操作避免采集帧数过多消耗资源 if (paper_size_ == (int)PaperSize::G400_AUTO || paper_size_ == (int)PaperSize::G400_MAXAUTO || paper_size_ == (int)PaperSize::G400_MAXSIZE ) { return val += 50; } if (is_remove_morr_) { return val += 2; } if (resolution_ == 1 && paper_size_== (int)PaperSize::G400_A4) { return val += 1; } } void MultiFrameCapture::compute_height(int width,int height) { //////这块配置一定要注意 v4l2_width_ = resolution_ == DPI_600 ? width * 2 :(resolution_ == DPI_300 ? width :(float)width / 1.5) ; //宽 :DPI不变下 彩色灰度是一样的 //width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO && v4l2_height_ = height; pixels_width_ = color_mode_ == COLOR ? v4l2_width_ * 3 : v4l2_width_; pixels_height_ = color_mode_ == COLOR ? v4l2_height_ / 3 : v4l2_height_; } SIZE MultiFrameCapture::GetPaperSize(PaperSize paper, int dpi) { if (paper_map_.find(paper) != paper_map_.end()) { SIZE resize{2480,3507}; resize.cx = paper_map_[paper].cx * dpi / 25.4; resize.cy = paper_map_[paper].cy * dpi / 25.4; return resize; } return SIZE{2338, 3307}; } void MultiFrameCapture::UpdateScanParam(HG_ScanConfiguration config) { m_config = config; if(config.params.slow_moire && config.params.dpi != 3) { config.params.dpi = 2; } if (!video.get()) { return ; } resolution_ = config.params.dpi; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 1 代表200 2代表300 3代表600 color_mode_ = config.params.isColor; is_correct_ = config.params.isCorrect; paper_size_ = config.params.pageSize; is_double_paper_ = config.params.doubleFeeded; is_remove_morr_ = config.params.slow_moire; //width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO && video->HtCamSetPeriod(config.params.dc); int height = config.params.dpi == 3 ? 873 :(config.params.dpi == 2?549:297); if (config.params.dpi == 2 && !color_mode_) { height = 216; } compute_height(WIDTH , height); video->HtCamSetClolr(color_mode_); video->HtCamSetDpi(resolution_); FPGAConfigParam fpgaparam = GetFpgaparam(resolution_, color_mode_); video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp); //3通道 75 6通道49 if (color_mode_) { video->HtCamSetSpTime2(fpgaparam.Sp * 3); } video->HtCamSetStSp(fpgaparam.MaxBright);//172 是根据实际测试出来的图像质量得来 172 6通道 3通道 260 configFPGAParam(color_mode_, resolution_); { int dpi = resolution_ == 3 ? 600 :(resolution_ == 2?300:200); //除摩尔纹模式 if(config.params.slow_moire && resolution_ != 3) { int sp = fpgaparam.Sp + 241; //258 摩尔纹是400dpi 4677的高度 if (color_mode_ == GRAY) sp = 6042; else sp = fpgaparam.Sp + 241; dpi = 400; video->HtCamSetSpTime(sp,fpgaparam.MaxExp); // 2344 灰色 //2023-8-10 最新2650 if (color_mode_) { video->HtCamSetSpTime2(sp * 3); printf("摩尔纹彩色sp = %d\r\n",sp * 3); } printf("摩尔纹DPI = %d\r\n",dpi); printf("摩尔纹灰度sp = %d\r\n",sp); } //长文稿模式选择使用 600 dpi走纸速度 if (paper_size_ == (unsigned int)PaperSize::G400_MAXSIZE && !config.params.slow_moire) { int sp = 0; if (color_mode_ == GRAY) sp = resolution_ == 1 ? 22950 : 15450; //长文稿模式 这几组数据是在600dpi的走纸速度调测的 sptime else sp = resolution_ == 1 ? 7800 : 5210; video->HtCamSetSpTime(sp,fpgaparam.MaxExp); // 2344 灰色 //2023-8-10 最新2650 printf("长幅面 灰度sp = %d\r\n",sp); if (color_mode_) { video->HtCamSetSpTime2(sp * 3); printf("长幅面 彩色sp = %d\r\n",sp * 3); } } //关于帧数设置:一定要注意帧数的数量 直接影响到你usb传输效率 uint32_t cnt = compute_frame(paper_size_ , dpi) ; video->HtCamSetFrameCnt(cnt); printf(" -----------------------设置帧数:%d------------------\r\n",cnt); } printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色"); printf("分辨率:%d\r\n",resolution_); printf("V4L2宽: %d 高:%d\r\n",v4l2_width_,v4l2_height_); printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_); printf("paper_size_:%d\r\n", paper_size_); } void MultiFrameCapture::createCorrect(int correctmode) { if (m_correctThread.joinable()) m_correctThread.join(); stop_countdown(); m_correctThread = std::thread(&MultiFrameCapture::correctcolor, this, correctmode); } void MultiFrameCapture::setFPGATriggerMode(bool autotrigger, int delay) { } void MultiFrameCapture::setFanMode(int mode) { } void MultiFrameCapture::fpgaReload() { } bool MultiFrameCapture::capturerImage() { return true; } void MultiFrameCapture::waitsnapdone(int state) { StopWatch sw; sw.reset(); HG_JpegCompressInfo ip; ip.error_code = state; ip.height = 0; ip.width = 0; ip.index_frame = 0; ip.DataLength = sizeof("0"); ip.first_frame = false; ip.last_frame = true; ip.pJpegData = (unsigned char*)"0"; if (b_end_snap) { printf("b_end_snap b_end_snap b_end_snap b_end_snap\r\n"); m_glue.m_imageready(ip); return; } sw.reset(); std::unique_lock lock(m_mtx_snapdone); m_cv_snapdone.wait(lock); m_glue.m_imageready(ip); //发一次空包 b_end_snap = true; printf("走纸结束,等待采集完成耗时:%f \n",sw.elapsed_ms()); } bool MultiFrameCapture::IsImageQueueEmpty() { return !bScanning; } void MultiFrameCapture::resetimageremain() { } std::atomic_int &MultiFrameCapture::getimageremain() { return iImageremain; } void MultiFrameCapture::clearimages() { } void MultiFrameCapture::setScanFlag(bool brun) { bScanning = brun; } void MultiFrameCapture::configFPGAParam(int mode, int dpi) { FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode); video->HtCamChangeExposureValueF(fpgaparam.ExposureF); video->HtCamChangeExposureValueB(fpgaparam.ExposureB); for (int i = 0; i < 6; i++) { video->HtCamWriteADCReg_ALL(true,true,i,fpgaparam.GainF[i]); video->HtCamWriteADCReg_ALL(false,true,i,fpgaparam.OffsetF[i]); video->HtCamWriteADCReg_ALL(true,false,i,fpgaparam.GainB[i]); video->HtCamWriteADCReg_ALL(false,false,i,fpgaparam.OffsetB[i]); } // for (size_t i = 0; i < 20; i++) // { // //video->HtCamReadADCReg_ALL(i); // } }; #include "bmp.h" void MultiFrameCapture::snaprun() { //frame_info 发送得数据信息 channels 图像位深 num 需要取得帧数 time_out读图超时时间设置 auto snap_func = [this](V4L2_DATAINFO_Ex info_ex, int channels,int &time_out,int i) { StopWatch sw; sw.reset(); unsigned char *data = NULL; int ret = video->HtCamReadCaptureFrame((void **)&data, time_out); uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e);////0x000e 取出来的实际行数 printf("--------------fpga send line ------------:%d\r\n",sendLine); if (data) { if (i == 2) { uint32_t cnt = 25; if (paper_size_ != (unsigned int)PaperSize::G400_MAXSIZE && !is_remove_morr_) { int val = resolution_ == 3 ? 600 :(resolution_ == 2?300:200); cnt = compute_frame((unsigned int)PaperSize::G400_AUTO , val) ; cnt *=(sw.elapsed_ms() +3); } else cnt = (info_ex.frame_index + 3) * sw.elapsed_ms(); if (is_remove_morr_) { cnt = 3000; } image_cb_(cnt); img_wait_.notify_all(); // cv::imwrite("/home/root/test.bmp", mat); //cv::imwrite("/home/root/test.bmp", mat); //savebitmap(data,10368,100,"/home/root/1.bmp"); printf("第二帧图像采集耗时:%f\r\n",sw.elapsed_ms()); } HG_JpegCompressInfo info ; info.pJpegData = data; info.dpi = resolution_ ; info.DataLength = info_ex.width * info_ex.height; info.first_frame = info_ex.first_frame; info.last_frame = info_ex.last_frame; info.index_frame = info_ex.frame_index + 1; //这个 +1 是最后一帧的假包,作用是返回错误代码的时候和数据段一起。 info.width = info_ex.width; info.height = info_ex.height; //printf("获取数据 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",info_ex.width,info_ex.height,info.first_frame,info.last_frame,info.DataLength); //printf("index_frame:%d\r\n",info.index_frame); m_glue.m_imageready(info); } return ret; }; static int ti = 100; while (b_snap_run) { std::unique_lock lock(m_mtx_snap); m_cv_snap.wait(lock); V4L2_DATAINFO_Ex frame_info_; frame_info_.pixtype = color_mode_; frame_info_.dpi = resolution_; frame_info_.width = pixels_width_; frame_info_.height = pixels_height_; frame_info_.error_code = 0; frame_info_.first_frame = false; frame_info_.last_frame = false; int channels = color_mode_ == 1 ? 3 : 1; int color_mode = video->HtCamGetColorMode(); int func_sig = 0; int time_out = resolution_ == DPI_600 ? 2000 : 500; ///这个时间是根据每帧的数据量来进行调测的 if (is_remove_morr_) { time_out = 2000; } uint32_t frame_num = 0; uint32_t frame_cnt = 0; video->HtCamGetFrameCnt(frame_cnt); printf("获取设置的帧数:%d\r\n",frame_cnt); frame_info_.frame_index = frame_cnt; //这个值别乱改 获取出来8 是指 0 - 8 ,实际采集图像是9 // video->HtCamSetVsnpTime(ti); // printf("设置 vsnp:%d\r\n",ti); //ti++; // video->HtCamSetStSp(ti); // printf("设置 StSp%d\r\n",ti); // ti++; StopWatch sw; sw.reset(); for (size_t i = 1; i <= frame_info_.frame_index ; i++) { printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_info_.frame_index,i); frame_info_.first_frame = i == 1 ? true : false; func_sig = snap_func(frame_info_, channels,time_out,i); if (b_stop_snap) { video->HtCamGetFrameNum(frame_num); int ind = i; //已采集了的帧数 int val = frame_num - ind; //剩余还未采集的帧数 printf("val :%d frame_num:%d i:%d\r\n",val,frame_num,i); while (val > 0) { frame_info_.frame_index = frame_num; ind++; printf("-----------当前采集到第:[%d]帧 CIS总共采集[%d]帧 -------\r\n",ind,frame_num); func_sig = snap_func(frame_info_, channels,time_out ,ind);//同上面一样 val--; } break; } } uint32_t in=0; video->HtCamStopVideoCapturing(); //video->HtCamGetFrameNum(in); printf("----------整张采集结束 总共采集帧数:%d 耗时:%f ----------\r\n",frame_num,sw.elapsed_ms()); m_cv_snapdone.notify_all(); b_end_snap = true; } } void MultiFrameCapture::updatesnapstatus(int papertype) { b_stop_snap = false; } void MultiFrameCapture::procimage() { } #define CIS_6CH bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black) { printf("校正DPI[%d] 校正颜色:%s\n",dpi==1?200:(dpi==2?300:600),mode == IMAGE_COLOR?"彩色":"灰色"); int config_dpi = dpi ; #ifdef CIS_6CH const int offset_indexs[] = {3, 4, 5, 2, 1, 0 ,0, 1, 2, 5, 4, 3}; #else const int offset_indexs[] = {2, 1, 0, 0, 1 ,2}; #endif int channels = mode == IMAGE_COLOR ? 3 : 1; int height = 60; int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 576); int orgimgwidth = width * 2 * 3 * channels; printf("orgimgwidth:%d\r\n",orgimgwidth); int dstwidth = width * 2 * 3; bool isNeedSave = true; string log; unsigned char *data = NULL; int ret = video->HtCamReadCaptureFrame((void **)&data, 1000); if (data == NULL) { isNeedSave = false; log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); return isNeedSave; } cv::Mat src(height, orgimgwidth, CV_8UC1, data); CImageMerge t_marge; cv::Mat mrgmat = t_marge.MergeImage(src, dstwidth, height, mode,true); printf("mrgmat width = %d height = %d \n", mrgmat.cols, mrgmat.rows); FPGAConfigParam param = GetFpgaparam(dpi, mode); if (black) // 暗场 { #ifdef CIS_6CH volatile double offValues[12]{0}; double offValues_min[12]{0}; int blockcount = 12; #else volatile double offValues[6]{0}; double offValues_min[6]{0}; int blockcount = 6; #endif int bandwidth = mrgmat.cols / blockcount; for (int n = 0; n < blockcount; n++) { cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone(); cv::Scalar mean = cv::mean(img); offValues[n] = mean.val[0]; if (mode) { auto tmp = *std::min_element(img.begin(), img.end(), [](cv::Vec3b a, cv::Vec3b b) -> bool { return (a[0] + a[1] + a[2]) < (b[0] + b[1] + b[2]); }); offValues_min[n] = (tmp[0] + tmp[1] + tmp[2]) / 3.0; } else { offValues_min[n] = *std::min_element(img.begin(), img.end(), [](std::uint8_t a, std::uint8_t b) -> bool { return a < b; }); } printf("band[%d] mean = %0.2f bandwidth = %d offValues_min [%d] = %.2f \n", n, mean.val[0], bandwidth, n, offValues_min[n]); } // return 0; for (int s = 0; s < 2; s++) { unsigned int offsets[6]; // = (int *)(s == 0 ? ¶m.OffsetF[0] : ¶m.OffsetB[0]); memcpy(offsets, (s == 0 ? ¶m.OffsetF[0] : ¶m.OffsetB[0]), sizeof(param.OffsetF)); int total_chs = ARRAYLEN(offset_indexs); for (int j = 0; j < total_chs/2; j++) { int k = s * 6 + j; // double diff = BLACK_DIFF(offValues[k]); // double diff = 8 - offValues[k]; double diff = 3 - offValues_min[k]; if (offValues[k] > 15) { diff = 15 - offValues[k]; } double step = radio * diff; // int preStep = offsetStep[k]; // if (step * preStep < 0) // { // //step = 0 - preStep / 2; // step /= 2; // } // else // { // radio = 1; // } if (step < 1 && step > 0.5) step = 1; if (step < -0.5 && step > -1) step = -1; // FMT_STEP(step); bool isMinStep = abs(step) == 1 && step == offsetStep[k]; bool isOutBounds = offsets[j] >= 255 && step > 0; printf("\r\n"); isOutBounds |= offsets[j] <= 0 && step < 0; log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n"; if (isOutBounds) log += " 第" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n"; else if (abs(step) > 1 || isMinStep) { offsetStep[k] = (int)(step); printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]); offsets[offset_indexs[k]] += step; log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n"; if (offsets[offset_indexs[k]] < 1) offsets[offset_indexs[k]] = 1; if (offsets[offset_indexs[k]] > 255) offsets[offset_indexs[k]] = 255; isNeedSave = false; } log += (s == 0 ? "彩色正面" : "彩色背面"); log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n"; // log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n"); // ftt.append_log(log); if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); log = ""; } memcpy((s == 0 ? ¶m.OffsetF[0] : ¶m.OffsetB[0]), offsets, sizeof(param.OffsetF)); // memset(¶m.OffsetF[0],0,sizeof(param.OffsetF)); // memset(¶m.OffsetB[0],0,sizeof(param.OffsetF)); // param.OffsetB[5] =255; // param.OffsetF[4] =255; } if (isNeedSave) { printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str()); log = "暗场校正完成 \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); log = ""; mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10)); auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响 imwrite(param.Flat_BwPath, svmat); } } else // 明场 { if (mode == IMAGE_COLOR) { volatile double values[2][3]; cv::Scalar a = cv::mean(mrgmat(cv::Rect(0, 10, mrgmat.cols / 2, mrgmat.rows-10))); cv::Scalar b = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 10, mrgmat.cols / 2, mrgmat.rows-10))); static int indxxx=0; // if(indxxx <= 10) // { // cv::imwrite(std::to_string(++indxxx)+".bmp",mrgmat); // } for (char j = 0; j < 3; j++) { values[0][j] = a.val[2-j]; values[1][j] = b.val[2-j]; printf("values[0][%d] = %.2f a.val[%d] = %.2f values[1][%d] = %.2f b.val[%d] = %.2f\n", 2-j, values[0][j],j,a.val[j], 2-j, values[1][j],j,b.val[j]); } log = "开始彩色明场校正 \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); for (int s = 0; s < 2; s++) { volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB); memcpy((void *)exposures, (s == 0 ? ¶m.ExposureF[0] : ¶m.ExposureB[0]), sizeof(param.ExposureB)); for (int x = 0; x < 3; x++) { int k = (3 * s + x); // int diff = LIGHT_DIFF(*((double *)values + k)); // int diff;//= 240 - *((double *)values + k);;//param.MaxBright - *((double *)values + k); // if(x==0) // diff = 190 - *((double *)values + k); // else // diff = 240 - *((double *)values + k); int diff; // if (x==0) // { // diff = 170 - *((double *)values + k); //R曝光值拉低 // } // else diff = 200 - *((double *)values + k); //200灰度等级 log += " 明场:" + std::to_string(k) + ";diff:" + std::to_string(diff) + "\r\n"; double step = diff * radio; int preStep = *((int *)expStep + k); if (step * preStep < 0) { step = 0 - preStep / 2; } if (step < 1 && step > 0) step = 1; if (step < 0 && step > -1) step = -1; bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k); bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0; isOutBounds |= exposures[x] <= 0 && step < 0; if (isOutBounds) log += " 第" + to_string(x) + "个明场校正异常 \r\n"; else if (abs(diff) >= 1 || isMinStep) { *((int *)expStep + k) = (int)(step); exposures[x] += step; if (exposures[x] > (param.Sp - 5)) { exposures[x] = (param.Sp - 5); } if (exposures[x] < 0) exposures[x] = 0; isNeedSave = false; } log += " 曝光值:" + to_string(exposures[x]) + "\r\n"; log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n"; } memcpy((s == 0 ? ¶m.ExposureF[0] : ¶m.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB)); } // ftt.append_log(log); printf("\n%s",log.c_str()); if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); if (isNeedSave) { log = "彩色明场校正完成\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); log = ""; mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10)); auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响 imwrite(param.Flat_WhitePath, svmat); } } else { double values[2]; values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0]; values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0]; printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]); log = "-----开始灰色明场校正-----\r\n"; log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); for (int s = 0; s < 2; s++) { int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB); // int diff = LIGHT_DIFF(values[s]); int diff = 200 - values[s]; double step = diff * radio; log += " 明场:" + to_string(s) + ";diff:" + to_string(diff) + "\r\n"; int preStep = expStep[s][0]; if (step * preStep < 0) { step = 0 - preStep / 2; } else { radio = 1; } if (step < 1 && step > 0) step = 1; if (step < 0 && step > -1) step = -1; int exp = *(exposures + 1); std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step)); log += ss1; bool isMinStep = abs(step) <= 2 && step == expStep[s][0]; bool isOutBounds = exp >= (param.Sp - 5) && step > 0; isOutBounds |= exp <= 0 && step < 0; if (isOutBounds) log += " 第" + to_string(s) + "个明场校正异常 \r\n"; else if (abs(diff) > 1 || isMinStep) { exp += step; if (exp < 0) exp = 0; if (exp > (param.Sp - 5)) exp = (param.Sp - 5); float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51 for (int k = 0; k < 3; k++) { *(exposures + k) = (int)(exp * coffe[k]); expStep[s][k] = (int)(step); std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step)); log += exps; std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k])); log += ss; } isNeedSave = false; } } // ftt.append_log(log); if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); if (isNeedSave) { printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str()); log = "灰度明场校正完成\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); log = ""; mrgmat = mrgmat(cv::Rect(0, 10, mrgmat.cols, mrgmat.rows -10)); auto svmat = extractRepresentRow2(mrgmat);//原图列像素均值,避免单一像圆列成像噪声影响 imwrite(param.Flat_WhitePath, svmat); } } } SaveFpgaparam(param); printf("exit Save_lut \n"); return isNeedSave; } void MultiFrameCapture::formatStep() { for (int i = 0; i < 2; i++) { for (int j = 0; j < 3; j++) { expStep[i][j] = 600; } } for (int i = 0; i < 12; i++) offsetStep[i] = 64; } void MultiFrameCapture::correctcolor(int correctmode) { StopWatch sw_correct; std::string loginfo = "Start Correctcolor 300DPI Gray \r\n"; if ((correctmode == 0) || (correctmode == 2)) { loginfo = "Start Correctcolor 200DPI COLOR \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x01, IMAGE_COLOR); loginfo = "-----------200DPI COLOR Correct Done----------- \r\n\r\n "; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode == 0) || (correctmode == 1)) { loginfo = "Start Correctcolor 200DPI GRAY \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x01, IMAGE_GRAY); loginfo = "-----------200DPI Gray Correct Done----------- \r\n\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode == 0) || (correctmode == 4)) { loginfo = " Start Correctcolor 300DPI COLOR \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x02, IMAGE_COLOR); loginfo = "-----------300DPI COLOR Correct Done----------- \r\n\r\n "; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode == 0) || (correctmode == 3)) { loginfo = "Start Correctcolor 300DPI GRAY \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x02, IMAGE_GRAY); loginfo = "-----------300DPI Gray Correct Done----------- \r\n\r\n "; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode == 0) || (correctmode == 6)) { loginfo = "Start Correctcolor 600DPI COLOR \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x03, IMAGE_COLOR); loginfo = "-----------600DPI COLOR Correct Done----------- \r\n\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode == 0) || (correctmode == 5)) { loginfo = " Start Correctcolor 600DPI GRAY \r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); creatcorrectconfig(0x03, IMAGE_GRAY); loginfo = "-----------600DPI Gray Correct Done----------- \r\n\r\n "; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if ((correctmode < 0) || (correctmode > 6)) { loginfo = "不支持的校正模式...\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, loginfo); } if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLAT_FINISHED, "******Correct Done times = " + to_string(sw_correct.elapsed_s()) + " ****** \r\n"); } void MultiFrameCapture::openDevice(int dpi, int mode) { bool dunnancis = true; int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 576); // 1296 2592 864 int channels = mode == 0x01 ? 3 : 1; int width = channelwidth * channels; int frame_height = mode == 0x01 ? 60 * 3 : 60; int startsample = 202; // 205 int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3); resolution_ = dpi; //dpi 1代表200 2代表300 3代表600 color_mode_ = mode == 1 ? COLOR : GRAY; compute_height(WIDTH , HEIGHT); int config_dpi = resolution_; int config_color = color_mode_ ==COLOR ? 1:0; StopWatch swwv4l2open; video->HtCamSetClolr(color_mode_); video->HtCamSetDpi(resolution_); FPGAConfigParam fpgaparam = GetFpgaparam(config_dpi, config_color); video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp); if (color_mode_) { video->HtCamSetSpTime2(fpgaparam.Sp * 3); } video->HtCamSetStSp(fpgaparam.MaxBright); configFPGAParam(config_color, config_dpi); printf(" -----------------------resolution = %d config_color = %d config_dpi:%d------------------\r\n",resolution_, config_color,config_dpi); { int val = 1; video->HtCamSetFrameCnt(val); printf(" -----------------------设置帧数:%d------------------\r\n",val); } printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色"); printf("分辨率:%d\r\n",resolution_); printf("V4L2宽:%d 高:%d\r\n",v4l2_width_,v4l2_height_); printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_); printf("color_mode_:%d\r\n", color_mode_); printf("paper_size_:%d\r\n", paper_size_); printf("paper_size_:%d\r\n", paper_size_); int ret = video->open_device(v4l2_width_,v4l2_height_); if(ret < -1) return; int i = 1 ; char *buf = NULL; while (i >= 0) { i = video->HtCamReadCaptureFrame((void **)&buf, 500); } printf("opened video with width = %d height = %d time eplased = %.2f pbuffer = %p \n", width, 60 * 2, swwv4l2open.elapsed_ms(),buf); } void MultiFrameCapture::creatcorrectconfig(int dpi, int mode) { openDevice(dpi, mode); bool isDone = false; formatStep(); int i = 0; radio = 1; while (!isDone) // 先暗场 { //break ; string log = "==============================第" + to_string(i) + "次===============================\r\n"; // ftt.append_log(log); if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); int config_dpi = resolution_; int config_color = color_mode_ ==COLOR ? 1:0; configFPGAParam(mode, dpi); // ftt.append_log(log); printf("log :%s\r\n",log.c_str()); std::this_thread::sleep_for(std::chrono::milliseconds(5)); unsigned int F[3]={0,0 ,0}; video->HtCamChangeExposureValueF(F); video->HtCamChangeExposureValueB(F); std::this_thread::sleep_for(std::chrono::milliseconds(5)); video->HtCamStartVideoCapturing(); std::this_thread::sleep_for(std::chrono::milliseconds(5)); isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white video->HtCamStopVideoCapturing(); this_thread::sleep_for(std::chrono::milliseconds(200)); i++; } isDone = false; formatStep(); while (!isDone) // 后明场 { string log = "==============================第" + to_string(i) + "次===============================\r\n"; if (m_glue.m_deviceevent) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); configFPGAParam(mode, dpi); printf("log :%s\r\n", log.c_str()); std::this_thread::sleep_for(std::chrono::milliseconds(5)); video->HtCamStartVideoCapturing(); std::this_thread::sleep_for(std::chrono::milliseconds(5)); isDone = saveLutImg(dpi, mode, false); // 0 color_black 1 color_white 2 gray_balck 3 gray_white video->HtCamStopVideoCapturing(); this_thread::sleep_for(std::chrono::milliseconds(200)); i++; } printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray")); //creatLUTData(dpi, mode); video->close_device(); } void MultiFrameCapture::set_image_callback(image_callback cb) { image_cb_ = cb; } void MultiFrameCapture::wait_image_notify() { std::unique_lock lock(img_lock_); img_wait_.wait(lock); }