调整校正和出图流程

This commit is contained in:
modehua 2023-08-14 18:50:06 -07:00
parent 9164b459a6
commit 4aef383168
18 changed files with 797 additions and 1151 deletions

View File

@ -11,8 +11,8 @@ CImageMerge::~CImageMerge()
cv::Mat CImageMerge::MergeImage(cv::Mat &srcMat, int dstwidth, int dstheight,int mode) cv::Mat CImageMerge::MergeImage(cv::Mat &srcMat, int dstwidth, int dstheight,int mode)
{ {
cv::Mat dst(srcMat.rows, srcMat.cols / (mode == 0 ? 1:3), CV_8UC(mode == 0 ? 1 : 3)); cv::Mat dst(srcMat.rows, srcMat.cols / (mode == 0 ? 1:3), CV_8UC(mode == 0 ? 1 : 3));
return dst; auto graymerge = [](cv::Mat& src,cv::Mat dst)->cv::Mat
auto graymerge = [](cv::Mat& src,cv::Mat dst)->cv::Mat{ {
int width_block = src.cols / 12; int width_block = src.cols / 12;
int heigh_block = src.rows; int heigh_block = src.rows;

View File

@ -9,35 +9,35 @@ using namespace std;
#define JSONPATH "/mnt/conf-disk/huago/cameraparam.json" #define JSONPATH "/mnt/conf-disk/huago/cameraparam.json"
#define TEXTLUT200COLORPATH "/usr/local/huago/Textlut200clr.bmp" #define TEXTLUT200COLORPATH "/mnt/conf-disk/huago/Textlut200clr.bmp"
#define LUT200COLORPATH "/usr/local/huago/lut200clr.bmp" #define LUT200COLORPATH "/mnt/conf-disk/huago/lut200clr.bmp"
#define LUT200_COLOR_BLACKPATH "/usr/local/huago/lut200clrbw.bmp" #define LUT200_COLOR_BLACKPATH "/mnt/conf-disk/huago/lut200clrbw.bmp"
#define LUT200_COLOR_WHITEPATH "/usr/local/huago/lut200clrwhite.bmp" #define LUT200_COLOR_WHITEPATH "/mnt/conf-disk/huago/lut200clrwhite.bmp"
#define TEXTLUT200GRAYPATH "/usr/local/huago/Textlut200gray.bmp" #define TEXTLUT200GRAYPATH "/mnt/conf-disk/huago/Textlut200gray.bmp"
#define LUT200GRAYPATH "/usr/local/huago/lut200gray.bmp" #define LUT200GRAYPATH "/mnt/conf-disk/huago/lut200gray.bmp"
#define LUT200_GRAY_BLACKPATH "/usr/local/huago/lut200graybw.bmp" #define LUT200_GRAY_BLACKPATH "/mnt/conf-disk/huago/lut200graybw.bmp"
#define LUT200_GRAY_WHITEPATH "/usr/local/huago/lut200graywhite.bmp" #define LUT200_GRAY_WHITEPATH "/mnt/conf-disk/huago/lut200graywhite.bmp"
#define TEXTLUT300COLORPATH "/usr/local/huago/Textlut300clr.bmp" #define TEXTLUT300COLORPATH "/mnt/conf-disk/huago/Textlut300clr.bmp"
#define LUT300COLORPATH "/usr/local/huago/lut300clr.bmp" #define LUT300COLORPATH "/mnt/conf-disk/huago/lut300clr.bmp"
#define LUT300_COLOR_BLACKPATH "/usr/local/huago/lut300clrbw.bmp" #define LUT300_COLOR_BLACKPATH "/mnt/conf-disk/huago/lut300clrbw.bmp"
#define LUT300_COLOR_WHITEPATH "/usr/local/huago/lut300clrwhite.bmp" #define LUT300_COLOR_WHITEPATH "/mnt/conf-disk/huago/lut300clrwhite.bmp"
#define TEXTLUT300GRAYPATH "/usr/local/huago/Textlut300gray.bmp" #define TEXTLUT300GRAYPATH "/mnt/conf-disk/huago/Textlut300gray.bmp"
#define LUT300GRAYPATH "/usr/local/huago/lut300gray.bmp" #define LUT300GRAYPATH "/mnt/conf-disk/huago/lut300gray.bmp"
#define LUT300_GRAY_BLACKPATH "/usr/local/huago/lut300graybw.bmp" #define LUT300_GRAY_BLACKPATH "/mnt/conf-disk/huago/lut300graybw.bmp"
#define LUT300_GRAY_WHITEPATH "/usr/local/huago/lut300graywhite.bmp" #define LUT300_GRAY_WHITEPATH "/mnt/conf-disk/huago/lut300graywhite.bmp"
#define LUT600COLORPATH "/usr/local/huago/lut600clr.bmp" #define LUT600COLORPATH "/mnt/conf-disk/huago/lut600clr.bmp"
#define TEXTLUT600COLORPATH "/usr/local/huago/Textlut600clr.bmp" #define TEXTLUT600COLORPATH "/mnt/conf-disk/huago/Textlut600clr.bmp"
#define LUT600_COLOR_BLACKPATH "/usr/local/huago/lut600clrbw.bmp" #define LUT600_COLOR_BLACKPATH "/mnt/conf-disk/huago/lut600clrbw.bmp"
#define LUT600_COLOR_WHITEPATH "/usr/local/huago/lut600clrwhite.bmp" #define LUT600_COLOR_WHITEPATH "/mnt/conf-disk/huago/lut600clrwhite.bmp"
#define LUT600GRAYPATH "/usr/local/huago/lut600gray.bmp" #define LUT600GRAYPATH "/mnt/conf-disk/huago/lut600gray.bmp"
#define TEXTLUT600GRAYPATH "/usr/local/huago/Textlut600gray.bmp" #define TEXTLUT600GRAYPATH "/mnt/conf-disk/huago/Textlut600gray.bmp"
#define LUT600_GRAY_BLACKPATH "/usr/local/huago/lut600graybw.bmp" #define LUT600_GRAY_BLACKPATH "/mnt/conf-disk/huago/lut600graybw.bmp"
#define LUT600_GRAY_WHITEPATH "/usr/local/huago/lut600graywhite.bmp" #define LUT600_GRAY_WHITEPATH "/mnt/conf-disk/huago/lut600graywhite.bmp"
CorrectParam::CorrectParam() CorrectParam::CorrectParam()
{ {

View File

@ -56,12 +56,28 @@ typedef struct ADC_82V38_Timing
#endif #endif
#ifdef ADC_82V48 #ifdef ADC_82V48
typedef union CIS_ADC_NEW
{
struct adcTiming
{
uint32_t regData : 8;
uint32_t : 2;
uint32_t regAddr : 5;
uint32_t rwbit : 1;
uint32_t regData1 : 8;
uint32_t : 2;
uint32_t regAddr1 : 5;
uint32_t rwbit1 : 1;
}bit;
uint32_t value;
};
struct adcTiming struct adcTiming
{ {
uint32_t regData : 8; uint32_t regData : 8; //数据区
uint32_t : 2; uint32_t : 2;
uint32_t regAddr : 5; uint32_t regAddr : 5; //寄存器地址
uint32_t rwbit : 1; uint32_t rwbit : 1; //读写 1读 0写
uint32_t regData1 : 8; uint32_t regData1 : 8;
uint32_t : 2; uint32_t : 2;
uint32_t regAddr1 : 5; uint32_t regAddr1 : 5;
@ -84,7 +100,7 @@ HCamDevice::HCamDevice()
videofd = 0; videofd = 0;
v4lWidth = 5184;//@300dpi 5184 @600dpi 5184*2 v4lWidth = 5184;//@300dpi 5184 @600dpi 5184*2
v4lHeight = 512 ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3 v4lHeight = 513 ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3
v4lBufferCount = 10; v4lBufferCount = 10;
v4l2buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; v4l2buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
nplanes = 1; // only use one plane nplanes = 1; // only use one plane
@ -110,7 +126,9 @@ HCamDevice::HCamDevice()
HtCamGetFrameCnt(val1); HtCamGetFrameCnt(val1);
//HtCamSwitchSampleModes(0); //测试数据 //HtCamSwitchSampleModes(0); //测试数据
// HtCamChangeExposureValue(500); //曝光 unsigned int i[3]={100 ,200 ,300};
HtCamChangeExposureValueF(i); //曝光
HtCamChangeExposureValueB(i);
// start sample // start sample
camera_dbg("ST SP : %d , VSNP : %d \r\n" , ST_SP , VSNP); camera_dbg("ST SP : %d , VSNP : %d \r\n" , ST_SP , VSNP);
@ -853,44 +871,95 @@ void HCamDevice::HtCamSwitchSampleModes(uint8_t mode)
pCamCtrlReg[4] |= (0x00020000); pCamCtrlReg[4] |= (0x00020000);
} }
void HCamDevice::HtCamChangeExposureValueF(uint32_t value) void HCamDevice::HtCamChangeExposureValueF(uint32_t* value)
{ {
if (virBaseAddr == NULL) if (virBaseAddr == NULL)
{ {
return ; return ;
} }
uint32_t *pCamCtrlReg = virBaseAddr; uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[5] = value[0];
// pCamCtrlReg[3] |= (uint32_t)(0x00006000); CamZ_Reg_2Short rgb;
pCamCtrlReg[5] = 0x00000000; rgb.value = HtCamReadFpgaRegs(0x06);
pCamCtrlReg[6] = 0x00000000; rgb.NShort[0] = value[1];
rgb.NShort[1] = value[2];
pCamCtrlReg[5] |= (uint32_t)(value); // RED HtCamWriteFpgaRegs(0x06,rgb.value);
pCamCtrlReg[5] |= (uint32_t)(value << 16); printf("曝光F R:%d G:%d B:%d \r\n",value[0],value[1],value[2]);
pCamCtrlReg[6] |= (uint32_t)(value); // GREEN
pCamCtrlReg[6] |= ((uint32_t)value << 16); // BLUE
} }
void HCamDevice::HtCamChangeExposureValueB(uint32_t value) void HCamDevice::HtCamChangeExposureValueB(uint32_t* value)
{ {
if (virBaseAddr == NULL) if (virBaseAddr == NULL)
{ {
return ; return ;
} }
uint32_t *pCamCtrlReg = virBaseAddr; uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[11] = value[0];
// pCamCtrlReg[3] |= (uint32_t)(0x00006000); CamZ_Reg_2Short rgb;
pCamCtrlReg[11] = 0x00000000; rgb.value = HtCamReadFpgaRegs(0x0C);
pCamCtrlReg[12] = 0x00000000; rgb.NShort[0] = value[1];
rgb.NShort[1] = value[2];
pCamCtrlReg[11] |= (uint32_t)(value); // RED HtCamWriteFpgaRegs(0x0C,rgb.value);
pCamCtrlReg[11] |= (uint32_t)(value << 16); printf("曝光B R:%d G:%d B:%d \r\n",value[0],value[1],value[2]);
pCamCtrlReg[12] |= (uint32_t)(value); // GREEN
pCamCtrlReg[12] |= ((uint32_t)value << 16); // BLUE
} }
void HCamDevice::HtCamWriteADCReg_ALL(bool is_gain,bool is_adc1,uint8_t addr, uint8_t data)
{
if (virBaseAddr == NULL)
return ;
if (is_gain)
{
addr++;
addr *= 2; //增益
}
else
addr += 0x0e; //偏移
printf("%s 地址 :%d 值%d \r\n",is_gain?"增益":"偏移",addr,data);
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdc1Write = 0x2000; //adc1使能
uint32_t EnableAdc2Write = 0x4000; //adc2使能
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// if (addr > 20)
// return;
pADCReg[addr] = data;
pAdcRegFrame->rwbit = 0;
pAdcRegFrame->regAddr = addr;
pAdcRegFrame->regData = data;
pAdcRegFrame->rwbit1 = 0;
pAdcRegFrame->regAddr1 = addr;
pAdcRegFrame->regData1 = data;
if (is_adc1) //adc 1
{
pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
else // adc2
{
pCamCtrlReg[0] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
}
void HCamDevice::HtCamWriteADCReg(uint8_t addr, uint8_t data) void HCamDevice::HtCamWriteADCReg(uint8_t addr, uint8_t data)
{ {
if (virBaseAddr == NULL) if (virBaseAddr == NULL)
@ -929,6 +998,41 @@ void HCamDevice::HtCamWriteADCReg(uint8_t addr, uint8_t data)
std::this_thread::sleep_for(std::chrono::milliseconds(1)); std::this_thread::sleep_for(std::chrono::milliseconds(1));
} }
// void HCamDevice::HtCamReadADCReg_ALL(booluint8_t addr, uint8_t *data)
// {
// if (virBaseAddr == NULL)
// {
// return ;
// }
// uint32_t *pCamCtrlReg = virBaseAddr;
// uint32_t AdcRegFrame = 0x0000;
// uint32_t EnableAdcWrite = (1 << 14);
// uint32_t tempData;
// adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// // if (addr > 0x14)
// // return;
// pAdcRegFrame->rwbit = 1;
// pAdcRegFrame->regAddr = addr;
// pAdcRegFrame->regData = 0;
// pAdcRegFrame->rwbit1 = 1;
// pAdcRegFrame->regAddr1 = addr;
// pAdcRegFrame->regData1 = 0;
// pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
// pCamCtrlReg[4] &= ~(EnableAdcWrite);
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[4] |= (EnableAdcWrite);
// std::this_thread::sleep_for(std::chrono::milliseconds(10));
// tempData = pCamCtrlReg[2];
// uint8_t value = (tempData >> 4) & (0xFF);
// (*data) = value;
// }
void HCamDevice::HtCamReadADCReg(uint8_t addr, uint8_t *data) void HCamDevice::HtCamReadADCReg(uint8_t addr, uint8_t *data)
{ {
if (virBaseAddr == NULL) if (virBaseAddr == NULL)
@ -940,26 +1044,27 @@ void HCamDevice::HtCamReadADCReg(uint8_t addr, uint8_t *data)
uint32_t AdcRegFrame = 0x0000; uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdcWrite = (1 << 14); uint32_t EnableAdcWrite = (1 << 14);
uint32_t tempData; uint32_t tempData;
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame; CIS_ADC_NEW pAdcRegFrame ;
// if (addr > 0x14) // if (addr > 0x14)
// return; // return;
pAdcRegFrame->rwbit = 1; pAdcRegFrame.bit.rwbit = 1;
pAdcRegFrame->regAddr = addr; pAdcRegFrame.bit.regAddr = addr;
pAdcRegFrame->regData = 0; pAdcRegFrame.bit.regData = 0;
pAdcRegFrame->rwbit1 = 1; pAdcRegFrame.bit.rwbit1 = 1;
pAdcRegFrame->regAddr1 = addr; pAdcRegFrame.bit.regAddr1 = addr;
pAdcRegFrame->regData1 = 0; pAdcRegFrame.bit.regData1 = 0;
pCamCtrlReg[7] = (uint32_t)AdcRegFrame; pCamCtrlReg[7] = pAdcRegFrame.value;
pCamCtrlReg[4] &= ~(EnableAdcWrite); pCamCtrlReg[4] &= ~(EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(5)); std::this_thread::sleep_for(std::chrono::milliseconds(5));
pCamCtrlReg[4] |= (EnableAdcWrite); pCamCtrlReg[4] |= (EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(10)); std::this_thread::sleep_for(std::chrono::milliseconds(10));
tempData = pCamCtrlReg[2]; pAdcRegFrame.value = HtCamReadFpgaRegs(0x07);
uint8_t value = (tempData >> 4) & (0xFF); printf("pAdcRegFrame->value:%0x\r\n",pAdcRegFrame.value);
(*data) = value; printf("pAdcRegFrame->regData :%d\r\n",pAdcRegFrame.bit.regData);
} }
uint8_t HCamDevice::getADCReg(int addr) uint8_t HCamDevice::getADCReg(int addr)
@ -1004,12 +1109,12 @@ uint32_t HCamDevice::HtCamReadFpgaRegs(uint8_t reg_addr)
return pCamCtrlReg[reg_addr] ; return pCamCtrlReg[reg_addr] ;
} }
void HCamDevice::HtCamSetSpTime(uint32_t reg_value) void HCamDevice::HtCamSetSpTime(uint32_t reg_value,uint32_t val)
{ {
CamZ_Reg_2Short sp_time; CamZ_Reg_2Short sp_time;
sp_time.value = HtCamReadFpgaRegs(0x03); sp_time.value = HtCamReadFpgaRegs(0x03);
sp_time.NShort[1] = reg_value; sp_time.NShort[1] = reg_value;
sp_time.NShort[0] = reg_value-100; sp_time.NShort[0] = val;
HtCamWriteFpgaRegs(0x03 , sp_time.value); HtCamWriteFpgaRegs(0x03 , sp_time.value);
} }
void HCamDevice::HtCamGetSpTime(uint32_t &reg_value) void HCamDevice::HtCamGetSpTime(uint32_t &reg_value)
@ -1025,7 +1130,7 @@ void HCamDevice::HtCamSetStSp(int start_smaple)
//sp_time.NShort[1] = 0x00C8;//彩色 //sp_time.NShort[1] = 0x00C8;//彩色
CamZ_Reg_2Short st_sp; CamZ_Reg_2Short st_sp;
st_sp.value = HtCamReadFpgaRegs(0x0d); st_sp.value = HtCamReadFpgaRegs(0x0d);
st_sp.NShort[0] = 0x0200; //st_sp.NShort[0] = 0x0200;
// if (color) // if (color)
// st_sp.NShort[1] = 0x00C8; //彩色 // st_sp.NShort[1] = 0x00C8; //彩色
// else // else
@ -1033,7 +1138,7 @@ void HCamDevice::HtCamSetStSp(int start_smaple)
st_sp.NShort[1] = start_smaple; //灰色 2023-8-3 st_sp.NShort[1] = start_smaple; //灰色 2023-8-3
//HtCamWriteFpgaRegs(0x0d , st_sp.value); HtCamWriteFpgaRegs(0x0d , st_sp.value);
} }
void HCamDevice::HtCamGetStSp(uint32_t &start_smaple) void HCamDevice::HtCamGetStSp(uint32_t &start_smaple)
@ -1224,6 +1329,8 @@ void HCamDevice::HtCamWriteAllADC()
for (int index = 0; index < adcRegSize; index++) for (int index = 0; index < adcRegSize; index++)
{ {
HtCamWriteADCReg(index, pADCReg[index]); HtCamWriteADCReg(index, pADCReg[index]);
//HtCamWriteADCRegA(index, pADCReg[index]);
//HtCamWriteADCRegB(index, pADCReg[index]);
} }
} }
@ -1265,14 +1372,19 @@ void HCamDevice::HtCamInitADCReg()
/* gain */ /* gain */
pADCReg[2] = 0x90; pADCReg[2] = 0x90;
pADCReg[3] = 0x00; pADCReg[3] = 0x00;
pADCReg[4] = 0x90; pADCReg[4] = 0x90;
pADCReg[5] = 0x00; pADCReg[5] = 0x00;
pADCReg[6] = 0x90; pADCReg[6] = 0x90;
pADCReg[7] = 0x00; pADCReg[7] = 0x00;
pADCReg[8] = 0x90; pADCReg[8] = 0x90;
pADCReg[9] = 0x00; pADCReg[9] = 0x00;
pADCReg[0xa] = 0x90; pADCReg[0xa] = 0x90;
pADCReg[0xb] = 0x00; pADCReg[0xb] = 0x00;
pADCReg[0xc] = 0x90; pADCReg[0xc] = 0x90;
pADCReg[0xd] = 0x00; pADCReg[0xd] = 0x00;
/* offset */ /* offset */

View File

@ -56,21 +56,39 @@ public:
void HtCamSetFrameCnt(uint32_t val);//设置需要采集的帧数 void HtCamSetFrameCnt(uint32_t val);//设置需要采集的帧数
uint32_t HtCamReadFpgaRegs(uint8_t reg_addr); //读取寄存器值 uint32_t HtCamReadFpgaRegs(uint8_t reg_addr); //读取寄存器值
void HtCamSetSpTime(uint32_t reg_value);//0x03 //内触发间隔 //影响到图像得拉伸 void HtCamSetSpTime(uint32_t reg_value,uint32_t val);//0x03 //内触发间隔 //影响到图像得拉伸
void HtCamGetSpTime(uint32_t &reg_value); //获取当前sptime void HtCamGetSpTime(uint32_t &reg_value); //获取当前sptime
void HtCamSetStSp(int start_sample);//0x0d //更具不同的颜色模式设置不同的st_sp //影响到图像的偏移 void HtCamSetStSp(int start_sample);//0x0d //更具不同的颜色模式设置不同的st_sp //影响到图像的偏移
void HtCamGetStSp(uint32_t &start_sample); void HtCamGetStSp(uint32_t &start_sample);
void HtCamSetVsnpTime(int color); void HtCamSetVsnpTime(int color);
void HtCamChangeExposureValueF(unsigned int value); //曝光值设置 void HtCamChangeExposureValueF(uint32_t* value); //曝光值设置
void HtCamChangeExposureValueB(unsigned int value); //曝光值设置 void HtCamChangeExposureValueB(uint32_t* value); //曝光值设置
//void HtCamWriteADCReg(uint8_t addr,uint8_t data); //写adc寄存器
//void HtCamReadADCReg(uint8_t addr, uint8_t *data); //读adc寄存器
////////////////ADC 寄存器0x00 - 0x14///////////////////
/////////////// 0x00 - 0xdh暂时只需要写高位低位不用写
// 1、adc参数配置寄存器分为两个 0x00 和 0x07
// 2、每个adc参数配置又分32位宽 前八个位宽为adc值
// is_gain == true 设置增益
// is_gain == false 设置偏移
// is_adc1 == true 设置adc1
// is_adc1 == false 设置adc2
// addr adc 寄存器地址
// adc 寄存器数据
void HtCamWriteADCReg_ALL(bool is_gain,bool is_adc1,uint8_t addr, uint8_t data);
void stopFPGAScan();//暂时无用 2023 -8 -2
void startFPGAScan();//暂时无用 2023 -8 -2
private: private:
int init_fpga(); //初始化 寄存器基地址 int init_fpga(); //初始化 寄存器基地址
int uninit_fpga(); int uninit_fpga();
void stopFPGAScan();//暂时无用 2023 -8 -2
void startFPGAScan();//暂时无用 2023 -8 -2
int HtCamWaitVideoCapture(int msTimeout); //等待图像信号 int HtCamWaitVideoCapture(int msTimeout); //等待图像信号
void HtCamWriteFpgaRegs(uint8_t reg_addr, uint32_t reg_value); //写寄存器 void HtCamWriteFpgaRegs(uint8_t reg_addr, uint32_t reg_value); //写寄存器

View File

@ -21,9 +21,8 @@ const int vsp_A = 45;
const int vsp_B = 45; const int vsp_B = 45;
// using namespace cv; // using namespace cv;
MultiFrameCapture::MultiFrameCapture(ScannerGlue glue, MultiFrameCapture::MultiFrameCapture(ScannerGlue glue,std::shared_ptr<FpgaComm> fpga, CISVendor vendor)
std::shared_ptr<FpgaComm> fpga, :reset_pin(new GpioOut(Fpga_Reset)),
CISVendor vendor) : reset_pin(new GpioOut(Fpga_Reset)),
fpgaLoad(new Gpio(Fpga_Load)), fpgaLoad(new Gpio(Fpga_Load)),
fpga_conf_initn(new Gpio(Fpga_InitN)), fpga_conf_initn(new Gpio(Fpga_InitN)),
snaped_index(0), snaped_index(0),
@ -122,9 +121,6 @@ void MultiFrameCapture::stopsnap(bool autosize)
video->HtCamStopSampling(); video->HtCamStopSampling();
b_stop_snap = true; b_stop_snap = true;
} }
//b_stop_snap =false;
//video->HtCamStopVideoCapturing();
} }
void MultiFrameCapture::close() void MultiFrameCapture::close()
@ -136,7 +132,7 @@ void MultiFrameCapture::close()
int MultiFrameCapture::read(int addr) int MultiFrameCapture::read(int addr)
{ {
return m_capFpageregs->read(addr);
} }
void *MultiFrameCapture::readFrameTest(int timeout) void *MultiFrameCapture::readFrameTest(int timeout)
@ -185,13 +181,13 @@ void MultiFrameCapture::UpdateScanParam(HG_ScanConfiguration config)
video->HtCamSetDpi(resolution_); video->HtCamSetDpi(resolution_);
printf(" -----------------------snap dpi = %d resolution = %d------------------\r\n",config.params.dpi, config.params.isColor); printf(" -----------------------snap dpi = %d resolution = %d------------------\r\n",config.params.dpi, config.params.isColor);
FPGAConfigParam fpgaparam = GetFpgaparam(config.params.dpi, config.params.isColor); FPGAConfigParam fpgaparam = GetFpgaparam(config.params.dpi, config.params.isColor);
video->HtCamSetSpTime(fpgaparam.Sp); // 2344 灰色 //2023-8-10 最新2650 video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.MaxExp); // 2344 灰色 //2023-8-10 最新2650
// fpgaparam.Sp=0; // fpgaparam.Sp=0;
// video->HtCamGetSpTime(fpgaparam.Sp); // video->HtCamGetSpTime(fpgaparam.Sp);
printf(" -----------------------HtCamSetSpTime%d------------------ \r\n",fpgaparam.Sp); printf(" -----------------------fpgaparam.Sp[1]%d fpgaparam.MaxExp[0]:%d------------------ \r\n",fpgaparam.Sp,fpgaparam.MaxExp);
video->HtCamSetStSp(fpgaparam.MaxBright); video->HtCamSetStSp(fpgaparam.MaxBright);
printf(" -----------------------HtCamSetStSp%d------------------\r\n",fpgaparam.MaxBright); printf(" -----------------------HtCamSetStSp%d------------------\r\n",fpgaparam.MaxBright);
configFPGAParam(0, 1);
{ {
int val = config.params.dpi == 3 ? 600 :(config.params.dpi == 2?300:200); int val = config.params.dpi == 3 ? 600 :(config.params.dpi == 2?300:200);
@ -259,44 +255,6 @@ void MultiFrameCapture::fpgaReload()
bool MultiFrameCapture::capturerImage() bool MultiFrameCapture::capturerImage()
{ {
// if (m_config.params.pageSize == (int)PaperSize::G400_AUTO ||
// m_config.params.pageSize == (int)PaperSize::G400_MAXAUTO ||
// m_config.params.pageSize == (int)PaperSize::G400_MAXSIZE)
// {
// }
// else
// {
// int color = this->color();
// int channels = color ? 3 : 1;
// int width = this->width() * channels * 6;
// int dpi = m_config.params.dpi;
// for (int i = 0; i < frame_count; i++)
// {
// V4L2_DATAINFO_Ex frame_info;
// StopWatch sw_read;
// auto data = video->read_frame(150);
// printf("!!!!!!!!!!! read frame[%d] = %.2f \n", i, sw_read.elapsed_ms());
// frame_info.lost_frame = data ? false : true;
// frame_info.last_frame = (i == (frame_count - 1));
// frame_info.frame_index = i;
// if (data)
// {
// printf("+++++++ success: read frame[%d] \n", i);
// cv::Mat mat(FRAME_HEIGHT, width, CV_8UC1, data);
// StopWatch sw_clone;
// frame_info.mat = mat.clone();
// printf("!!!!!!!!!! Clone time = %0.3f \n",sw_clone.elapsed_ms());
// frame_info.pixtype = color;
// frame_info.dpi = dpi;
// frame_info.width = frame_info.height = 0; // 从mat信息中获取宽高信息
// }
// else
// {
// printf("------- error: lost frame[%d] \n", i);
// }
// m_frameinfos.Put(frame_info);
// }
// }
return true; return true;
} }
@ -376,108 +334,23 @@ void MultiFrameCapture::reload_fpga()
printf("reload done \n"); printf("reload done \n");
} }
void MultiFrameCapture::set_gain(int ix, int val)
{
for (int i = 0; i < 6; i++)
{
if (ix)
m_capFpageregs->setAGain(i, val);
else
m_capFpageregs->setBGain(i, val);
}
}
void MultiFrameCapture::set_offset(int ix, int val)
{
for (int i = 0; i < 6; i++)
{
if (ix)
m_capFpageregs->setAOffset(i, val);
else
m_capFpageregs->setBOffset(i, val);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
}
void MultiFrameCapture::set_expo(int ix, int val)
{
switch (ix)
{
case 0:
m_capFpageregs->setAExposureR(val);
break;
case 1:
m_capFpageregs->setAExposureG(val);
break;
case 2:
m_capFpageregs->setAExposureB(val);
break;
case 3:
m_capFpageregs->setBExposureR(val);
break;
case 4:
m_capFpageregs->setBExposureG(val);
break;
case 5:
m_capFpageregs->setBExposureB(val);
break;
default:
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
void MultiFrameCapture::configFPGAParam(int mode, int dpi) void MultiFrameCapture::configFPGAParam(int mode, int dpi)
{ {
FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode); FPGAConfigParam fpgaparam = GetFpgaparam(dpi, mode);
// m_capFpageregs->resetADC();
// std::this_thread::sleep_for(std::chrono::milliseconds(50)); video->HtCamChangeExposureValueF(fpgaparam.ExposureF);
int value_a, value_b; video->HtCamChangeExposureValueB(fpgaparam.ExposureB);
for (int i = 0; i < 6; i++) for (int i = 0; i < 6; i++)
{ {
if (i < 3) video->HtCamWriteADCReg_ALL(true,true,i,fpgaparam.GainF[i]);
{ video->HtCamWriteADCReg_ALL(false,true,i,fpgaparam.OffsetF[i]);
set_expo(i, fpgaparam.ExposureF[i]);
LOG("fpgaparam.ExposureF[%d] = %d \n", i, fpgaparam.ExposureF[i]);
}
else
{
set_expo(i, fpgaparam.ExposureB[i % 3]);
LOG("fpgaparam.ExposureB[%d] = %d\n", i % 3, fpgaparam.ExposureB[i % 3]);
}
int A_value, B_value;
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setAOffset(i, fpgaparam.OffsetF[i]);
LOG("fpgaparam.setAOffset[%d] = %d \n", i, fpgaparam.OffsetF[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3)); video->HtCamWriteADCReg_ALL(true,false,i,fpgaparam.GainB[i]);
m_capFpageregs->getAOffset(i, A_value, B_value); video->HtCamWriteADCReg_ALL(false,false,i,fpgaparam.OffsetB[i]);
LOG("fpgaparam.getAOffset[%d] = A_value = %d B_value = %d \n", i, A_value, B_value);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setBOffset(i, fpgaparam.OffsetB[i]);
LOG("fpgaparam.setBOffset[%d] = %d \n", i, fpgaparam.OffsetB[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->getBOffset(i, A_value, B_value);
LOG("fpgaparam.getBOffset[%d] = A_value = %d B_value = %d \n", i, A_value, B_value);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setAGain(i, fpgaparam.GainF[i]);
LOG("fpgaparam.GainF[%d] = %d\n", i, fpgaparam.GainF[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
m_capFpageregs->setBGain(i, fpgaparam.GainB[i]);
LOG("fpgaparam.GainB[%d] = %d\n", i, fpgaparam.GainB[i]);
std::this_thread::sleep_for(std::chrono::milliseconds(3));
// m_capFpageregs->getAGain(i,A_value,B_value);
// LOG("fpgaparam.getAGain[%d] = A_value = %d B_value = %d \n", i, A_value,B_value);
// std::this_thread::sleep_for(std::chrono::milliseconds(3));
// m_capFpageregs->getBGain(i,A_value,B_value);
// LOG("fpgaparam.getBGain[%d] = A_value = %d B_value = %d \n", i, A_value,B_value);
// std::this_thread::sleep_for(std::chrono::milliseconds(3));
} }
}; };
int MultiFrameCapture::width() int MultiFrameCapture::width()
{ {
@ -496,10 +369,6 @@ int MultiFrameCapture::color()
// return m_capFpageregs->getColorMode(); // return m_capFpageregs->getColorMode();
} }
// int MultiFrameCapture::imageProcessCurrentFrame()
// {
// }
#include "bmp.h" #include "bmp.h"
static int cnt = 0; static int cnt = 0;
@ -516,7 +385,7 @@ void MultiFrameCapture::snaprun()
return ret; return ret;
} }
uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e); uint32_t sendLine = video->HtCamReadFpgaRegs(0x000e);////0x000e 取出来的实际行数
printf("--------------fpga send line ------------:%d\r\n",sendLine); printf("--------------fpga send line ------------:%d\r\n",sendLine);
if (data) if (data)
{ {
@ -525,7 +394,7 @@ void MultiFrameCapture::snaprun()
else else
frame_info.first_frame = false; frame_info.first_frame = false;
frame_info.last_frame = num == ret+1 ? true:false; frame_info.last_frame = num == ret + 1 ? true:false;
printf("获取数据 width:%d height:%d is_first:%d is_last:%d\r\n",frame_info.width,frame_info.height,frame_info.first_frame,frame_info.last_frame); printf("获取数据 width:%d height:%d is_first:%d is_last:%d\r\n",frame_info.width,frame_info.height,frame_info.first_frame,frame_info.last_frame);
cv::Mat mat = cv::Mat(frame_info.height, frame_info.width, CV_8UC1, data, cv::Mat::AUTO_STEP); cv::Mat mat = cv::Mat(frame_info.height, frame_info.width, CV_8UC1, data, cv::Mat::AUTO_STEP);
@ -537,7 +406,6 @@ void MultiFrameCapture::snaprun()
// } // }
frame_info.mat = mat.clone(); frame_info.mat = mat.clone();
m_frameinfos.Put(frame_info); m_frameinfos.Put(frame_info);
} }
return ret; return ret;
}; };
@ -548,29 +416,27 @@ void MultiFrameCapture::snaprun()
m_cv_snap.wait(lock); m_cv_snap.wait(lock);
V4L2_DATAINFO_Ex frame_info; V4L2_DATAINFO_Ex frame_info;
int channels = 1;
frame_info.pixtype = color_mode_; frame_info.pixtype = color_mode_;
frame_info.dpi = resolution_; frame_info.dpi = resolution_;
frame_info.width = pixels_width_; frame_info.width = pixels_width_;
frame_info.height = pixels_height_; frame_info.height = pixels_height_;
frame_info.error_code = 0; frame_info.error_code = 0;
frame_info.snaped_index = snaped_index; frame_info.snaped_index = snaped_index;
frame_info.first_frame = false; frame_info.first_frame = false;
frame_info.last_frame = false; frame_info.last_frame = false;
int channels = color_mode_ == 1 ? 3 : 1;
int color_mode = video->HtCamGetColorMode(); int color_mode = video->HtCamGetColorMode();
int count = 1; int func_sig = 0;
int time_out = color_mode == 1 ? 1000 : 800;
int time_out_cnt = 0;
uint32_t frame_num = 0; uint32_t frame_num = 0;
uint32_t frame_cnt = 0; uint32_t frame_cnt = 0;
video->HtCamGetFrameCnt(frame_cnt); video->HtCamGetFrameCnt(frame_cnt);
frame_info.frame_index = frame_cnt; frame_info.frame_index = frame_cnt;
int func_sig = 0;
int time_out = color_mode_ == 1 ? 800 : 800;
int time_out_cnt = 0;
printf("--------------------- frame_info.width ------------------ :%d\r\n",frame_info.width ); printf("--------------------- frame_info.width ------------------ :%d\r\n",frame_info.width );
printf("--------------------- frame_info.height ------------------ :%d\r\n",frame_info.height ); printf("--------------------- frame_info.height ------------------ :%d\r\n",frame_info.height );
@ -581,9 +447,7 @@ void MultiFrameCapture::snaprun()
for (size_t i = 1; i <= frame_cnt; i++) for (size_t i = 1; i <= frame_cnt; i++)
{ {
printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_cnt,i); printf("***********设置的帧数:%d 正在采集第[%d]帧************\r\n",frame_cnt,i);
frame_info.last_frame = frame_cnt == i ? true : false; frame_info.last_frame = frame_cnt == i ? true : false;
//frame_info.frame_index = i;
func_sig = snap_func(frame_info, channels,frame_cnt,time_out); func_sig = snap_func(frame_info, channels,frame_cnt,time_out);
if (func_sig == -1 ) //当前帧取图超时,在取一次!!! 一直超时 不就卡死了??? 这个地方还是需要加个时间限制几秒内一帧未取出就退了,返回异常状态吧? if (func_sig == -1 ) //当前帧取图超时,在取一次!!! 一直超时 不就卡死了??? 这个地方还是需要加个时间限制几秒内一帧未取出就退了,返回异常状态吧?
@ -621,9 +485,7 @@ void MultiFrameCapture::snaprun()
} }
} }
video->HtCamStopVideoCapturing(); video->HtCamStopVideoCapturing();
//iImageremain++;
printf("----------停止采集图像 ----------\r\n"); printf("----------停止采集图像 ----------\r\n");
m_cv_snapdone.notify_all(); m_cv_snapdone.notify_all();
@ -661,18 +523,14 @@ void MultiFrameCapture::procimage()
info.last_frame = frame.last_frame; info.last_frame = frame.last_frame;
info.index_frame = frame.frame_index; info.index_frame = frame.frame_index;
info.data_type = 0; info.data_type = 0;
//info.DataLength = frame.width *frame.height;
info.width = frame.width; info.width = frame.width;
info.height = frame.height; info.height = frame.height;
// cv::Mat mat = frame.mat.clone();
printf("获取数据2222 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame.width,frame.height,info.first_frame,info.last_frame,info.DataLength);
printf("获取数据 width:%d height:%d is_first:%d is_last:%d DataLength:%d\r\n",frame.width,frame.height,info.first_frame,info.last_frame,info.DataLength);
cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat); //cv::imwrite("/home/root/opencv"+to_string(cnt_++)+".bmp",frame.mat);
m_glue.m_imageready(info); m_glue.m_imageready(info);
//iImageremain--;
} }
continue; continue;
} }
@ -680,268 +538,295 @@ void MultiFrameCapture::procimage()
bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black) bool MultiFrameCapture::saveLutImg(int dpi, int mode, bool black)
{ {
// int config_dpi = dpi == 1 ? 2 : dpi; int config_dpi = dpi == 1 ? 2 : dpi;
// const int offset_indexs[] = {0, 1, 2, 5, 4, 3, 3, 4, 5, 2, 1, 0}; const int offset_indexs[] = {3, 4, 5, 2, 1, 0,0, 1, 2, 5, 4, 3};
// int channels = mode == IMAGE_COLOR ? 3 : 1; int channels = mode == IMAGE_COLOR ? 3 : 1;
// int height = 60; int height = 60;
// int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 864); int width = config_dpi == 0x02 ? 864 : (config_dpi == 0x03 ? 1728 : 864);
// int orgimgwidth = width * 2 * 3 * channels; int orgimgwidth = width * 2 * 3 * channels;
// int dstwidth = width * 2 * 3;
// bool isNeedSave = true;
// string log;
// void *data = video->read_frame(1000);
// if (data == NULL)
// {
// isNeedSave = false;
// log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// return isNeedSave;
// }
// cv::Mat src(height, orgimgwidth, CV_8UC1, data); int dstwidth = width * 2 * 3;
// CImageMerge t_marge;
// cv::Mat mrgmat = t_marge.MergeImage(src, dstwidth, height, mode); bool isNeedSave = true;
// printf("mrgmat width = %d height = %d \n", mrgmat.cols, mrgmat.rows); string log;
// static int inx = 0;
// //imwrite(to_string(++inx)+".jpg",mrgmat);
// // return 0;
// FPGAConfigParam param = GetFpgaparam(dpi, mode);
// if (black) // 暗场
// {
// volatile double offValues[12]{0};
// int blockcount = 12;
// int bandwidth = mrgmat.cols / blockcount;
// for (int n = 0; n < blockcount; n++)
// {
// cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone();
// cv::Scalar mean = cv::mean(img);
// // printf("band[%d] mean = %0.2f bandwidth = %d \n", n, mean.val[0],bandwidth);
// offValues[n] = mean.val[0];
// }
// for (int s = 0; s < 2; s++) unsigned char *data = NULL;
// { int ret = video->HtCamReadCaptureFrame((void **)&data, 1000);
// unsigned int offsets[6]; // = (int *)(s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]);
// memcpy(offsets, (s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), sizeof(param.OffsetF));
// for (int j = 0; j < 6; j++)
// {
// int k = s * 6 + j;
// // double diff = BLACK_DIFF(offValues[k]);
// double diff = 8 - offValues[k];
// double step = radio * diff;
// // int preStep = offsetStep[k];
// // if (step * preStep < 0)
// // {
// // //step = 0 - preStep / 2;
// // step /= 2;
// // }
// // else
// // {
// // radio = 1;
// // }
// if (step < 1 && step > 0.5) if (data == NULL)
// step = 1; {
// if (step < -0.5 && step > -1) isNeedSave = false;
// step = -1; log = "WARNNING WARNNING WARNNING FAILDED TO READ IMAGE DATA !!!!!!!!!!!!!!!!!!!\r\n";
// // FMT_STEP(step); if (m_glue.m_deviceevent)
// bool isMinStep = abs(step) == 1 && step == offsetStep[k]; m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// bool isOutBounds = offsets[j] >= 255 && step > 0; return isNeedSave;
// isOutBounds |= offsets[j] <= 0 && step < 0; }
// log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n";
// if (isOutBounds)
// log += " 第" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n";
// else if (abs(step) > 1 || isMinStep)
// {
// offsetStep[k] = (int)(step);
// printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]);
// offsets[offset_indexs[k]] += step;
// log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n";
// if (offsets[offset_indexs[k]] < 1) cv::Mat src(height, orgimgwidth, CV_8UC1, data);
// offsets[offset_indexs[k]] = 1; CImageMerge t_marge;
// if (offsets[offset_indexs[k]] > 255) cv::Mat mrgmat = src;//t_marge.MergeImage(src, dstwidth, height, mode);
// offsets[offset_indexs[k]] = 255;
// isNeedSave = false;
// }
// log += (s == 0 ? "彩色正面" : "彩色背面");
// log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n";
// // log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n");
// // ftt.append_log(log);
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// }
// memcpy((s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), offsets, sizeof(param.OffsetF));
// // memset(&param.OffsetF[0],0,sizeof(param.OffsetF));
// // memset(&param.OffsetB[0],0,sizeof(param.OffsetF));
// // param.OffsetB[5] =255;
// // param.OffsetF[4] =255;
// }
// if (isNeedSave) printf("mrgmat width = %d height = %d \n", mrgmat.cols, mrgmat.rows);
// { static int inx = 0;
// printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str()); imwrite(to_string(++inx)+".bmp",mrgmat);
// log = "暗场校正完成 \r\n"; return 1;
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log = "";
// imwrite(param.Flat_BwPath, mrgmat);
// }
// }
// else // 明场
// {
// if (mode == IMAGE_COLOR)
// {
// volatile double values[2][3];
// cv::Scalar a = mean(mrgmat(Rect(0, 0, mrgmat.cols / 2, mrgmat.rows)));
// cv::Scalar b = mean(mrgmat(Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows)));
// for (char j = 0; j < 3; j++)
// {
// values[0][j] = a.val[j];
// values[1][j] = b.val[j];
// printf("values[0][%d] = %.2f values[1][%d] = %.2f\n", j,values[0][j], j,values[1][j]);
// }
// log = "开始彩色明场校正 \r\n"; FPGAConfigParam param = GetFpgaparam(dpi, mode);
// if (m_glue.m_deviceevent) if (black) // 暗场
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); {
// for (int s = 0; s < 2; s++) volatile double offValues[12]{0};
// { double offValues_min[12]{0};
// volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB); int blockcount = 12;
// memcpy((void *)exposures, (s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), sizeof(param.ExposureB)); int bandwidth = mrgmat.cols / blockcount;
// for (int x = 0; x < 3; x++) for (int n = 0; n < blockcount; n++)
// { {
// int k = (3 * s + x); cv::Mat img = mrgmat(cv::Rect(bandwidth * n, 10, bandwidth, mrgmat.rows - 10)).clone();
// //int diff = LIGHT_DIFF(*((double *)values + k)); cv::Scalar mean = cv::mean(img);
// int diff = param.MaxBright - *((double *)values + k); offValues[n] = mean.val[0];
// log += " 明场:" + std::to_string(k) + ";diff" + std::to_string(diff) + "\r\n"; if(mode)
{
auto tmp = *std::min_element(img.begin<cv::Vec3b>(),
img.end<cv::Vec3b>(),[](cv::Vec3b a,cv::Vec3b b)->bool{return (a[0]+a[1]+a[2]) < (b[0]+b[1]+b[2]) ;});
offValues_min[n] = (tmp[0]+tmp[1]+tmp[2])/3.0;
}
else
{
offValues_min[n] = *std::min_element(img.begin<std::uint8_t>(),
img.end<std::uint8_t>(),[](std::uint8_t a,std::uint8_t b)->bool{return a < b;});
}
printf("band[%d] mean = %0.2f bandwidth = %d offValues_min [%d] = %.2f \n", n, mean.val[0],bandwidth,n,offValues_min[n] );
}
//return 0;
// double step = diff * radio; for (int s = 0; s < 2; s++)
// int preStep = *((int *)expStep + k); {
// if (step * preStep < 0) unsigned int offsets[6]; // = (int *)(s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]);
// { memcpy(offsets, (s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), sizeof(param.OffsetF));
// step = 0 - preStep / 2; for (int j = 0; j < 6; j++)
// } {
// if (step < 1 && step > 0) int k = s * 6 + j;
// step = 1; // double diff = BLACK_DIFF(offValues[k]);
// if (step < 0 && step > -1) //double diff = 8 - offValues[k];
// step = -1; double diff = 3-offValues_min[k];
if(offValues[k] > 25)
{
diff = 25 -offValues[k];
}
double step = radio * diff;
// int preStep = offsetStep[k];
// if (step * preStep < 0)
// {
// //step = 0 - preStep / 2;
// step /= 2;
// }
// else
// {
// radio = 1;
// }
// bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k); if (step < 1 && step > 0.5)
// bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0; step = 1;
// isOutBounds |= exposures[x] <= 0 && step < 0; if (step < -0.5 && step > -1)
// if (isOutBounds) step = -1;
// log += " 第" + to_string(x) + "个明场校正异常 \r\n"; // FMT_STEP(step);
// else if (abs(diff) >= 1 || isMinStep) bool isMinStep = abs(step) == 1 && step == offsetStep[k];
// { bool isOutBounds = offsets[j] >= 255 && step > 0;
// *((int *)expStep + k) = (int)(step);
// exposures[x] += step;
// if (exposures[x] > (param.Sp - 5))
// {
// exposures[x] = (param.Sp - 5);
// }
// if (exposures[x] < 0)
// exposures[x] = 0;
// isNeedSave = false;
// }
// log += " 曝光值:" + to_string(exposures[x]) + "\r\n";
// log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n";
// }
// memcpy((s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB));
// }
// // ftt.append_log(log);
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// if (isNeedSave) printf("\r\n");
// { isOutBounds |= offsets[j] <= 0 && step < 0;
// log = "彩色明场校正完成\r\n"; log += " 暗场校正 :" + std::to_string(k) + ";diff:" + std::to_string(diff) + ";light:" + std::to_string(offValues[k]) + ";offset:" + std::to_string(offsets[j]) + ";step:" + std::to_string(step) + "\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); if (isOutBounds)
// log = ""; log += "" + std::to_string(k) + "条带暗场校正异常,暗场值无法降低 \r\n";
// imwrite(param.Flat_WhitePath, mrgmat); else if (abs(step) > 1 || isMinStep)
// } {
// } offsetStep[k] = (int)(step);
// else printf(" k = %d offsets[%d] = %d step = %f mean = %f\n", k, offset_indexs[k], offsets[offset_indexs[k]], step, offValues[k]);
// { offsets[offset_indexs[k]] += step;
// double values[2]; log += "offsetStep" + std::to_string(k) + " = " + std::to_string(offsetStep[k]) + ", offset_indexs" + std::to_string(k) + " =" + std::to_string(offset_indexs[k]) + "\r\n";
// values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
// values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
// printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]);
// log = "-----开始灰色明场校正-----\r\n";
// log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n";
// if (m_glue.m_deviceevent)
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// for (int s = 0; s < 2; s++)
// {
// int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
// //int diff = LIGHT_DIFF(values[s]);
// int diff = param.MaxBright - values[s];
// double step = diff * radio;
// log += " 明场:" + to_string(s) + ";diff" + to_string(diff) + "\r\n";
// int preStep = expStep[s][0];
// if (step * preStep < 0)
// {
// step = 0 - preStep / 2;
// }
// else
// {
// radio = 1;
// }
// if (step < 1 && step > 0)
// step = 1;
// if (step < 0 && step > -1)
// step = -1;
// int exp = *(exposures + 1); if (offsets[offset_indexs[k]] < 1)
// std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step)); offsets[offset_indexs[k]] = 1;
// log += ss1; if (offsets[offset_indexs[k]] > 255)
// bool isMinStep = abs(step) <= 2 && step == expStep[s][0]; offsets[offset_indexs[k]] = 255;
// bool isOutBounds = exp >= (param.Sp - 5) && step > 0; isNeedSave = false;
// isOutBounds |= exp <= 0 && step < 0; }
// if (isOutBounds) log += (s == 0 ? "彩色正面" : "彩色背面");
// log += " 第" + to_string(s) + "个明场校正异常 \r\n"; log += "偏移值:" + std::to_string(offsets[0]) + "," + std::to_string(offsets[1]) + "," + std::to_string(offsets[2]) + "," + std::to_string(offsets[3]) + "," + std::to_string(offsets[4]) + "," + std::to_string(offsets[5]) + "\r\n";
// else if (abs(diff) > 1 || isMinStep) // log += (s == 0 ? "彩色正面暗场校正完成 \r\n" : "彩色背面暗场校正完成 \r\n");
// { // ftt.append_log(log);
// exp += step; if (m_glue.m_deviceevent)
// if (exp < 0) m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// exp = 0; log = "";
// if (exp > (param.Sp - 5)) }
// exp = (param.Sp - 5); memcpy((s == 0 ? &param.OffsetF[0] : &param.OffsetB[0]), offsets, sizeof(param.OffsetF));
// memset(&param.OffsetF[0],0,sizeof(param.OffsetF));
// memset(&param.OffsetB[0],0,sizeof(param.OffsetF));
// param.OffsetB[5] =255;
// param.OffsetF[4] =255;
}
// float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51 if (isNeedSave)
// for (int k = 0; k < 3; k++) {
// { printf("Save LUT image path :%s \n", param.Flat_BwPath.c_str());
// *(exposures + k) = (int)(exp * coffe[k]); log = "暗场校正完成 \r\n";
// expStep[s][k] = (int)(step); if (m_glue.m_deviceevent)
// std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step)); m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// log += exps; log = "";
// std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k])); imwrite(param.Flat_BwPath, mrgmat);
// log += ss; }
// } }
// isNeedSave = false; else // 明场
// } {
// } if (mode == IMAGE_COLOR)
// // ftt.append_log(log); {
// if (m_glue.m_deviceevent) volatile double values[2][3];
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); cv::Scalar a = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows)));
// if (isNeedSave) cv::Scalar b = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows)));
// { for (char j = 0; j < 3; j++)
// printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str()); {
// log = "灰度明场校正完成\r\n"; values[0][j] = a.val[j];
// if (m_glue.m_deviceevent) values[1][j] = b.val[j];
// m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); printf("values[0][%d] = %.2f values[1][%d] = %.2f\n", j,values[0][j], j,values[1][j]);
// log = ""; }
// imwrite(param.Flat_WhitePath, mrgmat);
// } log = "开始彩色明场校正 \r\n";
// } if (m_glue.m_deviceevent)
// } m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
// SaveFpgaparam(param); for (int s = 0; s < 2; s++)
// printf("exit Save_lut \n"); {
// return isNeedSave; volatile int exposures[3]; // = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
return 0; memcpy((void *)exposures, (s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), sizeof(param.ExposureB));
for (int x = 0; x < 3; x++)
{
int k = (3 * s + x);
//int diff = LIGHT_DIFF(*((double *)values + k));
int diff = param.MaxBright - *((double *)values + k);
log += " 明场:" + std::to_string(k) + ";diff" + std::to_string(diff) + "\r\n";
double step = diff * radio;
int preStep = *((int *)expStep + k);
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
bool isMinStep = abs(step) <= 2 && step == *((int *)expStep + k);
bool isOutBounds = exposures[x] >= (param.Sp - 5) && step > 0;
isOutBounds |= exposures[x] <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(x) + "个明场校正异常 \r\n";
else if (abs(diff) >= 1 || isMinStep)
{
*((int *)expStep + k) = (int)(step);
exposures[x] += step;
if (exposures[x] > (param.Sp - 5))
{
exposures[x] = (param.Sp - 5);
}
if (exposures[x] < 0)
exposures[x] = 0;
isNeedSave = false;
}
log += " 曝光值:" + to_string(exposures[x]) + "\r\n";
log += " 调整步长:" + to_string(*((int *)expStep + k)) + "\r\n";
}
memcpy((s == 0 ? &param.ExposureF[0] : &param.ExposureB[0]), (void *)exposures, sizeof(param.ExposureB));
}
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
log = "彩色明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
imwrite(param.Flat_WhitePath, mrgmat);
}
}
else
{
double values[2];
values[0] = cv::mean(mrgmat(cv::Rect(0, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
values[1] = cv::mean(mrgmat(cv::Rect(mrgmat.cols / 2, 0, mrgmat.cols / 2, mrgmat.rows))).val[0];
printf("values[0] = %.2f values[1] = %.2f\n", values[0], values[1]);
log = "-----开始灰色明场校正-----\r\n";
log += " 灰色扫描灰度明场均值:" + to_string(values[0]) + "," + to_string(values[1]) + "\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
for (int s = 0; s < 2; s++)
{
int *exposures = (int *)(s == 0 ? param.ExposureF : param.ExposureB);
//int diff = LIGHT_DIFF(values[s]);
int diff = param.MaxBright - values[s];
double step = diff * radio;
log += " 明场:" + to_string(s) + ";diff" + to_string(diff) + "\r\n";
int preStep = expStep[s][0];
if (step * preStep < 0)
{
step = 0 - preStep / 2;
}
else
{
radio = 1;
}
if (step < 1 && step > 0)
step = 1;
if (step < 0 && step > -1)
step = -1;
int exp = *(exposures + 1);
std::string ss1(string_format("exp[%d] = %d step = %.3f \r\n", s, exp, step));
log += ss1;
bool isMinStep = abs(step) <= 2 && step == expStep[s][0];
bool isOutBounds = exp >= (param.Sp - 5) && step > 0;
isOutBounds |= exp <= 0 && step < 0;
if (isOutBounds)
log += "" + to_string(s) + "个明场校正异常 \r\n";
else if (abs(diff) > 1 || isMinStep)
{
exp += step;
if (exp < 0)
exp = 0;
if (exp > (param.Sp - 5))
exp = (param.Sp - 5);
float coffe[3] = {1, 1, 1}; // 0.2, 1,0.51
for (int k = 0; k < 3; k++)
{
*(exposures + k) = (int)(exp * coffe[k]);
expStep[s][k] = (int)(step);
std::string exps(string_format("expStep[%d][%d] = %.3f\r\n", s, k, step));
log += exps;
std::string ss(string_format("exposures[%d] = %0.3f \r\n", k, exposures[k]));
log += ss;
}
isNeedSave = false;
}
}
// ftt.append_log(log);
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
if (isNeedSave)
{
printf("Save LUT image path :%s \n", param.Flat_WhitePath.c_str());
log = "灰度明场校正完成\r\n";
if (m_glue.m_deviceevent)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
log = "";
imwrite(param.Flat_WhitePath, mrgmat);
}
}
}
SaveFpgaparam(param);
printf("exit Save_lut \n");
return isNeedSave;
} }
void MultiFrameCapture::formatStep() void MultiFrameCapture::formatStep()
@ -1033,6 +918,7 @@ void MultiFrameCapture::correctcolor(int correctmode)
void MultiFrameCapture::openDevice(int dpi, int mode) void MultiFrameCapture::openDevice(int dpi, int mode)
{ {
printf("openDevice dpi:%d mode:%d \r\n",dpi,mode);
reset_fpga(); reset_fpga();
bool dunnancis = true; bool dunnancis = true;
int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 864); // 1296 2592 864 int channelwidth = dpi == 0x02 ? 864 : (dpi == 0x03 ? 1728 : 864); // 1296 2592 864
@ -1042,32 +928,48 @@ void MultiFrameCapture::openDevice(int dpi, int mode)
int startsample = 202; // 205 int startsample = 202; // 205
auto fpgaparam = GetFpgaparam(dpi, mode); auto fpgaparam = GetFpgaparam(dpi, mode);
int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3); int t_real_dpi = dpi == 1 ? 2 : (dpi == 2 ? 2 : 3);
ModeFpga fpgamod = {
.colorMode = mode,
.dpi = t_real_dpi,
.led = 1,
.sample = startsample, // 256+39
.adcA = 0,
.adcB = 0,
.selftest = 0,
.sp = fpgaparam.Sp}; // 600DPI 0x1450 300DPI 0xe10
resolution_ = dpi == 3 ? DPI_600 : DPI_300; //0:600dpi 1:300dpi config.params.dpi = 2||3 pc 2代表300 3代表600
color_mode_ = mode == 1 ? COLOR : GRAY;
cis_width_ = resolution_ == 0 ? WIDTH * 2 : WIDTH; //宽 :DPI不变下 彩色灰度是一样的
//width_ = paper_size_ == PaperSize::G400_MAXSIZE || paper_size_ ==PaperSize::G400_MAXAUTO &&
cis_height_ = mode == 0x01 ? 60 * 3 : 60;
pixels_width_ = color_mode_ == 1 ? cis_width_ * 3 : cis_width_;
pixels_height_ = color_mode_ == 1 ? cis_height_ / 3 : cis_height_;
printf("颜色模式:%s\r\n",color_mode_== COLOR ? "彩色":"灰色");
printf("分辨率:%d\r\n",resolution_ == DPI_600?600:300);
printf("采集宽:%d 高:%d\r\n",cis_width_,cis_height_);
printf("像素宽:%d 高: %d\r\n",pixels_width_,pixels_height_);
configFPGAParam(mode, dpi); configFPGAParam(mode, dpi);
StopWatch swwv4l2open; StopWatch swwv4l2open;
printf("opened video with width = %d height = %d time eplased = %.2f \n", width, 60 * 2, swwv4l2open.elapsed_ms()); video->HtCamSetClolr(color_mode_);
video->HtCamSetDpi(resolution_);
video->HtCamSetFrameCnt(1);
uint32_t val;
video->HtCamGetFrameCnt(val);
printf("设置帧数:%d\r\n",val);
video->HtCamSetSpTime(fpgaparam.Sp,fpgaparam.Sp-100);
m_capFpageregs->setFrameNum(1); printf(" -----------------------fpgaparam.Sp[1]%d fpgaparam.MaxExp[0]:%d------------------ \r\n",fpgaparam.Sp,fpgaparam.MaxExp);
m_capFpageregs->setFrameHeight(frame_height); int ret = video->open_device(cis_width_,cis_height_);
for (int i = 0; i < 1; i++) if(ret < -1)
return;
int i = 1 ;
char *buf = NULL;
while (i >= 0)
{ {
char *buf = NULL; i = video->HtCamReadCaptureFrame((void **)&buf, 500);
video->HtCamReadCaptureFrame((void **)&buf, 200);
std::this_thread::sleep_for(std::chrono::milliseconds(100));
printf("abort first frame \n");
} }
//video->close_video(); //video->close_video();
printf("opened video with width = %d height = %d time eplased = %.2f \n", width, 60 * 2, swwv4l2open.elapsed_ms());
} }
void MultiFrameCapture::creatcorrectconfig(int dpi, int mode) void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
@ -1085,13 +987,25 @@ void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log); m_glue.m_deviceevent((int)HG_ScannerStatus::AUTO_FLATTING, log);
configFPGAParam(mode, dpi); configFPGAParam(mode, dpi);
// ftt.append_log(log); // ftt.append_log(log);
printf("log :%s\r\n",log.c_str());
std::this_thread::sleep_for(std::chrono::milliseconds(5)); std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->enableLed(false);
unsigned int F[3]={1,1 ,1};
video->HtCamChangeExposureValueF(F);
video->HtCamChangeExposureValueB(F);
std::this_thread::sleep_for(std::chrono::milliseconds(5)); std::this_thread::sleep_for(std::chrono::milliseconds(5));
m_capFpageregs->capture();
video->HtCamStartVideoCapturing();
std::this_thread::sleep_for(std::chrono::milliseconds(5)); std::this_thread::sleep_for(std::chrono::milliseconds(5));
isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white isDone = saveLutImg(dpi, mode, true); // 0 color_black 1 color_white 2 gray_balck 3 gray_white
i++; video->HtCamStopVideoCapturing();
//video->close_device();
this_thread::sleep_for(std::chrono::milliseconds(200));
i++;
//return ;
} }
isDone = false; isDone = false;
formatStep(); formatStep();
@ -1111,8 +1025,8 @@ void MultiFrameCapture::creatcorrectconfig(int dpi, int mode)
i++; i++;
} }
printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray")); printf("creatcorrectconfig %s \n", (mode == IMAGE_COLOR ? " Color" : " Gray"));
creatLUTData(dpi, mode); //creatLUTData(dpi, mode);
//video->close_video(); video->close_device();
} }
// void MultiFrameCapture::myFloodFill(cv::Mat& image, bool isTwoSide) // void MultiFrameCapture::myFloodFill(cv::Mat& image, bool isTwoSide)

View File

@ -12,7 +12,7 @@ class Gpio;
class GpioOut; class GpioOut;
#define WIDTH 5184 #define WIDTH 5184
#define HEIGHT 512 #define HEIGHT 513 //只能为3的倍数
#define DPI_600 0 #define DPI_600 0
#define DPI_300 1 #define DPI_300 1
#define COLOR 1 #define COLOR 1
@ -69,9 +69,7 @@ private:
private: private:
void reset_fpga(); void reset_fpga();
void reload_fpga(); void reload_fpga();
void set_gain(int ix, int val);
void set_offset(int ix, int val);
void set_expo(int ix, int val);
void configFPGAParam(int mode,int dpi); void configFPGAParam(int mode,int dpi);
int color(); int color();
int width(); int width();

View File

@ -1010,14 +1010,6 @@ CorrectParam.h
mutex mutex
- -
/home/modehua/sdk/zynq_7010/CuoZhiMotor.h
Motor.h
/home/modehua/sdk/zynq_7010/Motor.h
thread
-
iostream
-
/home/modehua/sdk/zynq_7010/DevUtil.h /home/modehua/sdk/zynq_7010/DevUtil.h
string string
- -
@ -1042,68 +1034,10 @@ Gpio.h
scanservices_utils.h scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h /home/modehua/sdk/zynq_7010/scanservices_utils.h
/home/modehua/sdk/zynq_7010/FsmState.cpp
FsmState.h
/home/modehua/sdk/zynq_7010/FsmState.h
Scanner.h
/home/modehua/sdk/zynq_7010/Scanner.h
/home/modehua/sdk/zynq_7010/FsmState.h
memory
-
map
-
typeinfo
-
string
-
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
utilsfunc.h
/home/modehua/sdk/zynq_7010/utilsfunc.h
/home/modehua/sdk/zynq_7010/Gpio.h /home/modehua/sdk/zynq_7010/Gpio.h
string string
- -
/home/modehua/sdk/zynq_7010/HCamDevice.cpp
HCamDevice.h
/home/modehua/sdk/zynq_7010/HCamDevice.h
stdint.h
-
thread
-
chrono
-
string
-
poll.h
-
sys/epoll.h
-
unistd.h
-
fcntl.h
-
sys/mman.h
-
sys/ioctl.h
-
string.h
-
CameraParams.h
/home/modehua/sdk/zynq_7010/CameraParams.h
linux/v4l2-subdev.h
-
iostream
-
errno.h
-
iostream
-
fstream
-
/home/modehua/sdk/zynq_7010/HCamDevice.h /home/modehua/sdk/zynq_7010/HCamDevice.h
string string
- -
@ -1120,12 +1054,6 @@ thread
map map
- -
/home/modehua/sdk/zynq_7010/HGUsb.h
memory
-
UsbEndpoint.h
/home/modehua/sdk/zynq_7010/UsbEndpoint.h
/home/modehua/sdk/zynq_7010/ICapturer.h /home/modehua/sdk/zynq_7010/ICapturer.h
atomic atomic
- -
@ -1144,14 +1072,6 @@ HCamDevice.h
vector vector
- -
/home/modehua/sdk/zynq_7010/IScanner.h
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
string
-
BlockingQueue.h
/home/modehua/sdk/zynq_7010/BlockingQueue.h
/home/modehua/sdk/zynq_7010/Jpegcompress.h /home/modehua/sdk/zynq_7010/Jpegcompress.h
turbojpeg.h turbojpeg.h
- -
@ -1174,72 +1094,6 @@ map
mutex mutex
- -
/home/modehua/sdk/zynq_7010/Keyboard.h
stdio.h
-
stdlib.h
-
unistd.h
-
fcntl.h
-
sys/ioctl.h
-
linux/input.h
-
sys/epoll.h
-
string.h
-
thread
-
time.h
-
condition_variable
-
functional
-
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
/home/modehua/sdk/zynq_7010/Led.h
DevUtil.h
/home/modehua/sdk/zynq_7010/DevUtil.h
/home/modehua/sdk/zynq_7010/MemoryInfo.h
/home/modehua/sdk/zynq_7010/Motor.h
Gpio.h
/home/modehua/sdk/zynq_7010/Gpio.h
Pwm.h
/home/modehua/sdk/zynq_7010/Pwm.h
vector
-
thread
-
iostream
-
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
utilsfunc.h
/home/modehua/sdk/zynq_7010/utilsfunc.h
exception
-
Motordef.h
/home/modehua/sdk/zynq_7010/Motordef.h
MotorConfig.h
/home/modehua/sdk/zynq_7010/MotorConfig.h
/home/modehua/sdk/zynq_7010/MotorConfig.h
vector
-
json.hpp
/home/modehua/sdk/zynq_7010/json.hpp
Motordef.h
/home/modehua/sdk/zynq_7010/Motordef.h
/home/modehua/sdk/zynq_7010/Motordef.h
/home/modehua/sdk/zynq_7010/MultiFrameCapture.cpp /home/modehua/sdk/zynq_7010/MultiFrameCapture.cpp
MultiFrameCapture.h MultiFrameCapture.h
/home/modehua/sdk/zynq_7010/MultiFrameCapture.h /home/modehua/sdk/zynq_7010/MultiFrameCapture.h
@ -1290,90 +1144,6 @@ CorrectParam.h
IPreproc.h IPreproc.h
/home/modehua/sdk/zynq_7010/IPreproc.h /home/modehua/sdk/zynq_7010/IPreproc.h
/home/modehua/sdk/zynq_7010/PanelLeds.h
Led.h
/home/modehua/sdk/zynq_7010/Led.h
iostream
-
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
/home/modehua/sdk/zynq_7010/Pwm.h
string
-
/home/modehua/sdk/zynq_7010/Scanner.cpp
Scanner.h
/home/modehua/sdk/zynq_7010/Scanner.h
iostream
-
filetools.h
/home/modehua/sdk/zynq_7010/filetools.h
fpgacontrol.h
/home/modehua/sdk/zynq_7010/fpgacontrol.h
MemoryInfo.h
/home/modehua/sdk/zynq_7010/MemoryInfo.h
SysInforTool.h
/home/modehua/sdk/zynq_7010/SysInforTool.h
USBProtocol.h
/home/modehua/sdk/zynq_7010/USBProtocol.h
/home/modehua/sdk/zynq_7010/Scanner.h
memory
-
functional
-
CuoZhiMotor.h
/home/modehua/sdk/zynq_7010/CuoZhiMotor.h
ZouZhiMotor.h
/home/modehua/sdk/zynq_7010/ZouZhiMotor.h
Sensor.h
/home/modehua/sdk/zynq_7010/Sensor.h
PanelLeds.h
/home/modehua/sdk/zynq_7010/PanelLeds.h
BlockingQueue.h
/home/modehua/sdk/zynq_7010/BlockingQueue.h
FsmState.h
/home/modehua/sdk/zynq_7010/FsmState.h
MultiFrameCapture.h
/home/modehua/sdk/zynq_7010/MultiFrameCapture.h
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
Keyboard.h
/home/modehua/sdk/zynq_7010/Keyboard.h
utilsfunc.h
/home/modehua/sdk/zynq_7010/utilsfunc.h
filetools.h
/home/modehua/sdk/zynq_7010/filetools.h
SysInforTool.h
/home/modehua/sdk/zynq_7010/SysInforTool.h
MotorConfig.h
/home/modehua/sdk/zynq_7010/MotorConfig.h
correct_ultis.h
/home/modehua/sdk/zynq_7010/correct_ultis.h
FpgaComm.h
/home/modehua/sdk/zynq_7010/FpgaComm.h
/home/modehua/sdk/zynq_7010/Sensor.h
Gpio.h
/home/modehua/sdk/zynq_7010/Gpio.h
thread
-
condition_variable
-
BlockingQueue.h
/home/modehua/sdk/zynq_7010/BlockingQueue.h
FsmState.h
/home/modehua/sdk/zynq_7010/FsmState.h
Pwm.h
/home/modehua/sdk/zynq_7010/Pwm.h
/home/modehua/sdk/zynq_7010/SysInforTool.h
scannersysinfo.h
/home/modehua/sdk/zynq_7010/scannersysinfo.h
json.hpp
/home/modehua/sdk/zynq_7010/json.hpp
/home/modehua/sdk/zynq_7010/ThreadPool.h /home/modehua/sdk/zynq_7010/ThreadPool.h
vector vector
- -
@ -1396,60 +1166,6 @@ stdexcept
/home/modehua/sdk/zynq_7010/USBProtocol.h /home/modehua/sdk/zynq_7010/USBProtocol.h
/home/modehua/sdk/zynq_7010/UsbEndpoint.h
unistd.h
-
sys/types.h
-
sys/stat.h
-
fcntl.h
-
map
-
errno.h
-
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
/home/modehua/sdk/zynq_7010/UsbScanner.cpp
UsbScanner.h
/home/modehua/sdk/zynq_7010/UsbScanner.h
turbojpeg.h
-
JsonConfig.h
/home/modehua/sdk/zynq_7010/JsonConfig.h
USBProtocol.h
/home/modehua/sdk/zynq_7010/USBProtocol.h
filetools.h
/home/modehua/sdk/zynq_7010/filetools.h
logs_out.h
/home/modehua/sdk/zynq_7010/logs_out.h
/home/modehua/sdk/zynq_7010/UsbScanner.h
IScanner.h
/home/modehua/sdk/zynq_7010/IScanner.h
Scanner.h
/home/modehua/sdk/zynq_7010/Scanner.h
memory
-
FsmState.h
/home/modehua/sdk/zynq_7010/FsmState.h
usbdevice.h
/home/modehua/sdk/zynq_7010/usbdevice.h
/home/modehua/sdk/zynq_7010/UsbmsgHandler.cpp
UsbmsgHandler.h
/home/modehua/sdk/zynq_7010/UsbmsgHandler.h
Scanner.h
/home/modehua/sdk/zynq_7010/Scanner.h
/home/modehua/sdk/zynq_7010/UsbmsgHandler.h
/home/modehua/sdk/zynq_7010/ZouZhiMotor.h
Motor.h
/home/modehua/sdk/zynq_7010/Motor.h
/home/modehua/sdk/zynq_7010/autoevent.hpp /home/modehua/sdk/zynq_7010/autoevent.hpp
mutex mutex
- -
@ -1460,10 +1176,6 @@ condition_variable
stdint.h stdint.h
- -
/home/modehua/sdk/zynq_7010/buildconf.h
/home/modehua/sdk/zynq_7010/camtp.h
/home/modehua/sdk/zynq_7010/correct_ultis.h /home/modehua/sdk/zynq_7010/correct_ultis.h
sstream sstream
- -
@ -1498,10 +1210,6 @@ iostream
ctime ctime
- -
/home/modehua/sdk/zynq_7010/fpgacontrol.h
string
-
/home/modehua/sdk/zynq_7010/gvideo.h /home/modehua/sdk/zynq_7010/gvideo.h
string string
- -
@ -1848,74 +1556,6 @@ limits
type_traits type_traits
- -
/home/modehua/sdk/zynq_7010/logs_out.h
syslog.h
-
stdio.h
-
/home/modehua/sdk/zynq_7010/main.cpp
cstdio
-
iostream
-
fstream
-
DevUtil.h
/home/modehua/sdk/zynq_7010/DevUtil.h
Motor.h
/home/modehua/sdk/zynq_7010/Motor.h
thread
-
string
-
gvideo.h
/home/modehua/sdk/zynq_7010/gvideo.h
FpgaComm.h
/home/modehua/sdk/zynq_7010/FpgaComm.h
FsmState.h
/home/modehua/sdk/zynq_7010/FsmState.h
Scanner.h
/home/modehua/sdk/zynq_7010/Scanner.h
sstream
-
stdio.h
-
stdlib.h
-
signal.h
-
sys/file.h
-
HGUsb.h
/home/modehua/sdk/zynq_7010/HGUsb.h
memory.h
-
UsbScanner.h
/home/modehua/sdk/zynq_7010/UsbScanner.h
scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h
turbojpeg.h
-
JsonConfig.h
/home/modehua/sdk/zynq_7010/JsonConfig.h
syslog.h
-
utilsfunc.h
/home/modehua/sdk/zynq_7010/utilsfunc.h
correct_ultis.h
/home/modehua/sdk/zynq_7010/correct_ultis.h
errno.h
-
USBProtocol.h
/home/modehua/sdk/zynq_7010/USBProtocol.h
MotorConfig.h
/home/modehua/sdk/zynq_7010/MotorConfig.h
iomanip
-
usbdevice.h
/home/modehua/sdk/zynq_7010/usbdevice.h
/home/modehua/sdk/zynq_7010/scannersysinfo.h /home/modehua/sdk/zynq_7010/scannersysinfo.h
sstream sstream
- -
@ -1954,40 +1594,6 @@ unistd.h
sys/ioctl.h sys/ioctl.h
- -
/home/modehua/sdk/zynq_7010/usb_gadget.h
linux/usb/ch9.h
-
linux/usb/gadgetfs.h
-
linux/usb/functionfs.h
-
usbstring.h
/home/modehua/sdk/zynq_7010/usbstring.h
/home/modehua/sdk/zynq_7010/usbdevice.h
stdint.h
-
thread
-
functional
-
memory
-
camtp.h
/home/modehua/sdk/zynq_7010/camtp.h
buildconf.h
/home/modehua/sdk/zynq_7010/buildconf.h
usb_gadget.h
/home/modehua/sdk/zynq_7010/usb_gadget.h
libaio.h
-
queue
-
mutex
-
/home/modehua/sdk/zynq_7010/usbstring.h
/home/modehua/sdk/zynq_7010/utilsfunc.h /home/modehua/sdk/zynq_7010/utilsfunc.h
scanservices_utils.h scanservices_utils.h
/home/modehua/sdk/zynq_7010/scanservices_utils.h /home/modehua/sdk/zynq_7010/scanservices_utils.h

Binary file not shown.

View File

@ -21,17 +21,17 @@ void setOffset(int *config, int step)
} }
} }
// cv::Mat extractRepresentRow2(const cv::Mat &src) cv::Mat extractRepresentRow2(const cv::Mat &src)
// { {
// cv::Mat BWbalenceSrc(1, src.cols * src.channels(), CV_8UC1); cv::Mat BWbalenceSrc(1, src.cols * src.channels(), CV_8UC1);
// cv::Mat temp_imageBW(src.rows, src.cols * src.channels(), CV_8UC1, src.data); cv::Mat temp_imageBW(src.rows, src.cols * src.channels(), CV_8UC1, src.data);
// for (size_t i = 0; i < BWbalenceSrc.cols; i++) for (size_t i = 0; i < BWbalenceSrc.cols; i++)
// BWbalenceSrc.at<u_char>(0, i) = cv::mean(temp_imageBW(cv::Rect(i, 0, 1, temp_imageBW.rows)))[0]; BWbalenceSrc.at<u_char>(0, i) = cv::mean(temp_imageBW(cv::Rect(i, 0, 1, temp_imageBW.rows)))[0];
// return BWbalenceSrc; return BWbalenceSrc;
// } }
cv::Mat loadLUT(const std::string &file) cv::Mat loadLUT(const std::string &file)
{ {
@ -122,63 +122,61 @@ void correctColor(cv::Mat &src, int dpi, int mode, bool lutgraghic)
cv::LUT(image_temp(cv::Rect(i, 0, 1, image_temp.rows)), lutMat(cv::Rect(0, i, 256, 1)), image_temp(cv::Rect(i, 0, 1, image_temp.rows))); cv::LUT(image_temp(cv::Rect(i, 0, 1, image_temp.rows)), lutMat(cv::Rect(0, i, 256, 1)), image_temp(cv::Rect(i, 0, 1, image_temp.rows)));
} }
// void creatLUTData(int dpi, int mode) void creatLUTData(int dpi, int mode)
// { {
// printf("eneter creatLUTData \n"); printf("eneter creatLUTData \n");
// FPGAConfigParam param = GetFpgaparam(dpi, mode); FPGAConfigParam param = GetFpgaparam(dpi, mode);
// auto colormode = mode == 1 ? IMREAD_COLOR : IMREAD_GRAYSCALE; auto colormode = mode == 1 ? IMREAD_COLOR : IMREAD_GRAYSCALE;
// std::string blackPath = param.Flat_BwPath; std::string blackPath = param.Flat_BwPath;
// std::string whitePath = param.Flat_WhitePath; std::string whitePath = param.Flat_WhitePath;
// cv::Mat lut; cv::Mat lut;
// cv::Mat twMat = cv::imread(whitePath, colormode); cv::Mat twMat = cv::imread(whitePath, colormode);
// cv::Mat tbMat = cv::imread(blackPath, colormode); cv::Mat tbMat = cv::imread(blackPath, colormode);
// cv::Mat wMat, bMat; cv::Mat wMat, bMat;
// if (mode == 1) if (mode == 1)
// { {
// wMat = cv::Mat(twMat.rows, twMat.cols * 3, CV_8UC1, twMat.data); wMat = cv::Mat(twMat.rows, twMat.cols * 3, CV_8UC1, twMat.data);
// bMat = cv::Mat(twMat.rows, twMat.cols * 3, CV_8UC1, tbMat.data); bMat = cv::Mat(twMat.rows, twMat.cols * 3, CV_8UC1, tbMat.data);
// } }
// else else
// { {
// wMat = twMat; wMat = twMat;
// bMat = tbMat; bMat = tbMat;
// } }
// #ifdef USE_NEWFLAT #ifdef USE_NEWFLAT
// //lut = calcLUT(extractRepresentRow2(bMat), extractRepresentRow2(wMat), false); //lut = calcLUT(extractRepresentRow2(bMat), extractRepresentRow2(wMat), false);
// //cv::imwrite(param.LutPath, lut); //cv::imwrite(param.LutPath, lut);
// lut = calcLUT(extractRepresentRow2(bMat), extractRepresentRow2(wMat), true); lut = calcLUT(extractRepresentRow2(bMat), extractRepresentRow2(wMat), true);
// cv::imwrite(param.TextLutPath, lut); cv::imwrite(param.TextLutPath, lut);
// #else #else
// lut = create_lut(extractRepresentRow2(bMat), extractRepresentRow2(wMat), dpi, mode); lut = create_lut(extractRepresentRow2(bMat), extractRepresentRow2(wMat), dpi, mode);
// // Mat dst(bMat.cols * bMat.channels(), 256, CV_8UC1); // Mat dst(bMat.cols * bMat.channels(), 256, CV_8UC1);
// // memcpy(dst.data, lut.data, bMat.cols * bMat.channels() * 256); // memcpy(dst.data, lut.data, bMat.cols * bMat.channels() * 256);
// cv::imwrite(param.LutPath, lut); cv::imwrite(param.LutPath, lut);
// #endif #endif
// printf("exit creatLUTData \n"); printf("exit creatLUTData \n");
// } }
FPGAConfigParam GetFpgaparam(int dpi, int mode) FPGAConfigParam GetFpgaparam(int dpi, int mode)
{ {
//FPGAConfigParam param = CorrectParam().GetFpgaparam(dpi, mode);
return correctparam.GetFpgaparam(dpi, mode); return correctparam.GetFpgaparam(dpi, mode);
} }
void SaveFpgaparam(FPGAConfigParam &param) void SaveFpgaparam(FPGAConfigParam &param)
{ {
correctparam.SaveCorrectParam(param); correctparam.SaveCorrectParam(param);
//CorrectParam().SaveCorrectParam(param);
} }
// cv::Mat colMean(const cv::Mat &image) cv::Mat colMean(const cv::Mat &image)
// { {
// cv::Mat meanMat(1, image.step, CV_8UC1); cv::Mat meanMat(1, image.step, CV_8UC1);
// cv::Mat tempMat(image.rows, image.step, CV_8UC1, image.data); cv::Mat tempMat(image.rows, image.step, CV_8UC1, image.data);
// for (int i = 0; i < tempMat.step; i++) for (int i = 0; i < tempMat.step; i++)
// meanMat.data[i] = cv::mean(tempMat(cv::Rect(i, 0, 1, tempMat.rows)))[0]; meanMat.data[i] = cv::mean(tempMat(cv::Rect(i, 0, 1, tempMat.rows)))[0];
// return meanMat; return meanMat;
// } }
float gamma(float value, float ex) float gamma(float value, float ex)
{ {
@ -189,214 +187,214 @@ float gamma(float value, float ex)
#define BLACK_OFFSET 0 #define BLACK_OFFSET 0
void fittingLUT(const std::vector<u_char> &points, u_char min_value, u_char max_value, u_char *data) void fittingLUT(const std::vector<u_char> &points, u_char min_value, u_char max_value, u_char *data)
{ {
// float step = max_value - min_value + 1; float step = max_value - min_value + 1;
// memset(data, min_value, 127); memset(data, min_value, 127);
// memset(data + 127, max_value, 129); memset(data + 127, max_value, 129);
// int b = points[0]; int b = points[0];
// int w = points[1]; int w = points[1];
// int tb = min_value; int tb = min_value;
// int tw = max_value; int tw = max_value;
// step = cv::max((float)(tw - tb + 1) / (float)(w - b + 1), 0.0f); step = cv::max((float)(tw - tb + 1) / (float)(w - b + 1), 0.0f);
// float temp; float temp;
// for (int j = 0, length = (255 - b + 1); j < length; j++) for (int j = 0, length = (255 - b + 1); j < length; j++)
// { {
// temp = gamma(tb + step * j, GAMMA_EX) - BLACK_OFFSET; temp = gamma(tb + step * j, GAMMA_EX) - BLACK_OFFSET;
// data[j + b] = cv::min(255, cv::max(0, static_cast<int>(temp))); data[j + b] = cv::min(255, cv::max(0, static_cast<int>(temp)));
// } }
} }
// std::vector<double> caculate(const std::vector<double> &points_x, const std::vector<double> &points_y) std::vector<double> caculate(const std::vector<double> &points_x, const std::vector<double> &points_y)
// { {
// int MaxElement = points_x.size() - 1; int MaxElement = points_x.size() - 1;
// //计算常数f //计算常数f
// double f = points_y[0]; double f = points_y[0];
// //求解 //求解
// int n, m; int n, m;
// // double a[MaxElement][MaxElement+1]; // double a[MaxElement][MaxElement+1];
// std::vector<std::vector<double>> a; std::vector<std::vector<double>> a;
// // a.resize(MaxElement); // a.resize(MaxElement);
// for (int i = 0; i < MaxElement; i++) for (int i = 0; i < MaxElement; i++)
// { {
// std::vector<double> b; std::vector<double> b;
// b.resize(MaxElement + 1); b.resize(MaxElement + 1);
// a.push_back(b); a.push_back(b);
// } }
// for (int i = 0; i < MaxElement; i++) for (int i = 0; i < MaxElement; i++)
// { {
// for (int j = 0; j < MaxElement; j++) for (int j = 0; j < MaxElement; j++)
// a[i][j] = cv::pow(points_x[i + 1], MaxElement - j); a[i][j] = cv::pow(points_x[i + 1], MaxElement - j);
// a[i][MaxElement] = points_y[i + 1] - f; a[i][MaxElement] = points_y[i + 1] - f;
// } }
// int i, j; int i, j;
// n = MaxElement; n = MaxElement;
// for (j = 0; j < n; j++) for (j = 0; j < n; j++)
// { {
// double max = 0; double max = 0;
// double imax = 0; double imax = 0;
// for (i = j; i < n; i++) for (i = j; i < n; i++)
// { {
// if (imax < cv::abs(a[i][j])) if (imax < cv::abs(a[i][j]))
// { {
// imax = cv::abs(a[i][j]); imax = cv::abs(a[i][j]);
// max = a[i][j]; //得到各行中所在列最大元素 max = a[i][j]; //得到各行中所在列最大元素
// m = i; m = i;
// } }
// } }
// if (cv::abs(a[j][j]) != max) if (cv::abs(a[j][j]) != max)
// { {
// double b = 0; double b = 0;
// for (int k = j; k < n + 1; k++) for (int k = j; k < n + 1; k++)
// { {
// b = a[j][k]; b = a[j][k];
// a[j][k] = a[m][k]; a[j][k] = a[m][k];
// a[m][k] = b; a[m][k] = b;
// } }
// } }
// for (int r = j; r < n + 1; r++) for (int r = j; r < n + 1; r++)
// { {
// a[j][r] = a[j][r] / max; //让该行的所在列除以所在列的第一个元素目的是让首元素为1 a[j][r] = a[j][r] / max; //让该行的所在列除以所在列的第一个元素目的是让首元素为1
// } }
// for (i = j + 1; i < n; i++) for (i = j + 1; i < n; i++)
// { {
// double c = a[i][j]; double c = a[i][j];
// if (c == 0.0) if (c == 0.0)
// continue; continue;
// for (int s = j; s < n + 1; s++) for (int s = j; s < n + 1; s++)
// { {
// a[i][s] = a[i][s] - a[j][s] * c; //前后行数相减使下一行或者上一行的首元素为0 a[i][s] = a[i][s] - a[j][s] * c; //前后行数相减使下一行或者上一行的首元素为0
// } }
// } }
// } }
// for (i = n - 2; i >= 0; i--) for (i = n - 2; i >= 0; i--)
// { {
// for (j = i + 1; j < n; j++) for (j = i + 1; j < n; j++)
// { {
// a[i][n] = a[i][n] - a[j][n] * a[i][j]; a[i][n] = a[i][n] - a[j][n] * a[i][j];
// } }
// } }
// std::vector<double> result; std::vector<double> result;
// for (int k = 0; k < n; k++) for (int k = 0; k < n; k++)
// result.push_back(a[k][n]); result.push_back(a[k][n]);
// result.push_back(f); result.push_back(f);
// return result; return result;
// } }
// cv::Mat createLUT(const std::vector<cv::Mat> &mats, bool isTextCorrect) cv::Mat createLUT(const std::vector<cv::Mat> &mats, bool isTextCorrect)
// { {
// int rows = mats[0].cols; int rows = mats[0].cols;
// cv::Mat lut(rows, 256, CV_8UC1); cv::Mat lut(rows, 256, CV_8UC1);
// double max_val, min_val; double max_val, min_val;
// cv::minMaxIdx(mats[0], &min_val, nullptr); cv::minMaxIdx(mats[0], &min_val, nullptr);
// cv::minMaxIdx(mats[1], nullptr, &max_val); cv::minMaxIdx(mats[1], nullptr, &max_val);
// for (size_t i = 0; i < rows; i++) for (size_t i = 0; i < rows; i++)
// { {
// std::vector<u_char> grayPoints; std::vector<u_char> grayPoints;
// for (size_t j = 0; j < mats.size(); j++) for (size_t j = 0; j < mats.size(); j++)
// grayPoints.push_back(mats[j].data[i]); grayPoints.push_back(mats[j].data[i]);
// fittingLUT(grayPoints, static_cast<u_char>(min_val), static_cast<u_char>(max_val), lut.data + i * 256); fittingLUT(grayPoints, static_cast<u_char>(min_val), static_cast<u_char>(max_val), lut.data + i * 256);
// } }
// if (isTextCorrect) if (isTextCorrect)
// { {
// std::vector<double> points_x = {0, 25, 205, 255}, points_y = {0, 0, 230, 255}; std::vector<double> points_x = {0, 25, 205, 255}, points_y = {0, 0, 230, 255};
// std::vector<double> coefficient = caculate(points_x, points_y); std::vector<double> coefficient = caculate(points_x, points_y);
// u_char buffer[256]; u_char buffer[256];
// for (int i = 0; i < 256; i++) for (int i = 0; i < 256; i++)
// { {
// int temp = coefficient[0] * i * i * i + coefficient[1] * i * i + coefficient[2] * i + coefficient[3]; int temp = coefficient[0] * i * i * i + coefficient[1] * i * i + coefficient[2] * i + coefficient[3];
// buffer[i] = static_cast<u_char>(cv::min(255, cv::max(0, temp))); buffer[i] = static_cast<u_char>(cv::min(255, cv::max(0, temp)));
// } }
// cv::Mat lut_lut(256, 1, CV_8UC1, buffer); cv::Mat lut_lut(256, 1, CV_8UC1, buffer);
// cv::LUT(lut, lut_lut, lut); cv::LUT(lut, lut_lut, lut);
// } }
// return lut; return lut;
// } }
#ifdef G400 #ifdef G400
#define CHANNEL 408 #define CHANNEL 408
#else #else
#define CHANNEL 432 #define CHANNEL 432
#endif #endif
// cv::Mat calcLUT(const cv::Mat &black, const cv::Mat &white, bool isTextCorrection) cv::Mat calcLUT(const cv::Mat &black, const cv::Mat &white, bool isTextCorrection)
// { {
// std::vector<cv::Mat> w; std::vector<cv::Mat> w;
// w.push_back(colMean(black)); w.push_back(colMean(black));
// w.push_back(colMean(white)); w.push_back(colMean(white));
// cv::Mat lut = createLUT(w, isTextCorrection); cv::Mat lut = createLUT(w, isTextCorrection);
// for (size_t i = 0, block = lut.rows / CHANNEL; i < block; i++) for (size_t i = 0, block = lut.rows / CHANNEL; i < block; i++)
// { {
// cv::Mat lutROI = lut(cv::Rect(0, i * CHANNEL, 256, CHANNEL)); cv::Mat lutROI = lut(cv::Rect(0, i * CHANNEL, 256, CHANNEL));
// cv::Mat tran; cv::Mat tran;
// cv::transpose(lutROI, tran); cv::transpose(lutROI, tran);
// memcpy(lutROI.data, tran.data, tran.total()); memcpy(lutROI.data, tran.data, tran.total());
// } }
// return lut; return lut;
// } }
// cv::Mat create_lut(const cv::Mat &black, const cv::Mat &white, int dpi, bool colormode) cv::Mat create_lut(const cv::Mat &black, const cv::Mat &white, int dpi, bool colormode)
// { {
// #ifndef USE_NEWFLAT #ifndef USE_NEWFLAT
// if (black.empty() || white.empty() || black.channels() != 1 || white.channels() != 1 || black.step != white.step) if (black.empty() || white.empty() || black.channels() != 1 || white.channels() != 1 || black.step != white.step)
// return cv::Mat(); return cv::Mat();
// int channel = 1; int channel = 1;
// if (black.step == 4896 || black.step == 7344) if (black.step == 4896 || black.step == 7344)
// channel = 408; channel = 408;
// else if (black.step == 14688 || black.step == 22032 || black.step == 44064) else if (black.step == 14688 || black.step == 22032 || black.step == 44064)
// channel = 432; // 486 channel = 432; // 486
// if (channel == 1) if (channel == 1)
// return cv::Mat(); return cv::Mat();
// const int rows = black.cols / channel; const int rows = black.cols / channel;
// const int cols = 256; const int cols = 256;
// auto cc = CV_8UC(channel); auto cc = CV_8UC(channel);
// Mat lut(rows, cols, CV_8UC(channel)); Mat lut(rows, cols, CV_8UC(channel));
// for (size_t i = 0; i < rows; i++) for (size_t i = 0; i < rows; i++)
// { {
// Mat lut_row = lut(cv::Rect(0, i, cols, 1)); Mat lut_row = lut(cv::Rect(0, i, cols, 1));
// unsigned char *ptr_buffer = lut_row.data; unsigned char *ptr_buffer = lut_row.data;
// unsigned char *ptr_black = black.data + i * channel; unsigned char *ptr_black = black.data + i * channel;
// unsigned char *ptr_white = white.data + i * channel; unsigned char *ptr_white = white.data + i * channel;
// for (size_t j = 0; j < cols; j++) for (size_t j = 0; j < cols; j++)
// for (size_t k = 0; k < channel; k++) for (size_t k = 0; k < channel; k++)
// { {
// if (ptr_black[k] >= ptr_white[k]) if (ptr_black[k] >= ptr_white[k])
// { {
// ptr_buffer[j * channel + k] = 0; ptr_buffer[j * channel + k] = 0;
// continue; continue;
// } }
// if (j <= ptr_black[k]) if (j <= ptr_black[k])
// ptr_buffer[j * channel + k] = 0; ptr_buffer[j * channel + k] = 0;
// else if (j >= ptr_white[k]) else if (j >= ptr_white[k])
// ptr_buffer[j * channel + k] = 255; ptr_buffer[j * channel + k] = 255;
// else else
// { {
// float val = 255.0f * (j - ptr_black[k]) / (ptr_white[k] - ptr_black[k]) * 1.275; // float val = 255.0f * (j - ptr_black[k]) / (ptr_white[k] - ptr_black[k]) * 1.275; //
// ptr_buffer[j * channel + k] = (unsigned char)cv::max(0.0f, cv::min(val, 255.0f)); ptr_buffer[j * channel + k] = (unsigned char)cv::max(0.0f, cv::min(val, 255.0f));
// } }
// } }
// } }
// cv::Mat saveMat(black.step, 256, CV_8UC1, lut.data); cv::Mat saveMat(black.step, 256, CV_8UC1, lut.data);
// return saveMat.clone(); return saveMat.clone();
// #else #else
// return calcLUT(black, white, false); return calcLUT(black, white, false);
// #endif #endif
// } }
// cv::Mat GetMergeMat(int dstwidth, int dstheight, int type, cv::Mat &mat) cv::Mat GetMergeMat(int dstwidth, int dstheight, int type, cv::Mat &mat)
// { {
// return CImageMerge().MergeImage(type == CV_8UC3, mat, dstwidth, dstheight); return CImageMerge().MergeImage(type == CV_8UC3, mat, dstwidth, dstheight);
// } }

View File

@ -14,9 +14,9 @@ void initStep();
void setOffset(int *config, int step); void setOffset(int *config, int step);
//cv::Mat calcLUT(const cv::Mat& black, const cv::Mat& white, bool isTextCorrection); cv::Mat calcLUT(const cv::Mat& black, const cv::Mat& white, bool isTextCorrection);
//cv::Mat extractRepresentRow2(const cv::Mat& src); cv::Mat extractRepresentRow2(const cv::Mat& src);
void initLut(const std::string lutpath,bool iscolor); void initLut(const std::string lutpath,bool iscolor);
@ -28,6 +28,6 @@ FPGAConfigParam GetFpgaparam(int dpi,int mode);
void SaveFpgaparam(FPGAConfigParam& param); void SaveFpgaparam(FPGAConfigParam& param);
//cv::Mat create_lut(const cv::Mat& black, const cv::Mat& white,int dpi, bool colormode); cv::Mat create_lut(const cv::Mat& black, const cv::Mat& white,int dpi, bool colormode);
//cv::Mat GetMergeMat(int dstwidth ,int dstheight,int type,cv::Mat& mat); cv::Mat GetMergeMat(int dstwidth ,int dstheight,int type,cv::Mat& mat);