zynq_7010/zynq_7010_code/HCamDevice.cpp

1403 lines
35 KiB
C++
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "HCamDevice.h"
#include <stdint.h>
#include <thread>
#include <chrono>
#include <string>
#include <poll.h>
#include <sys/epoll.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <string.h>
#include "CameraParams.h"
#include <linux/v4l2-subdev.h>
#include <iostream>
#include <errno.h>
#define ADC_82V48
#define HT_CAM_REG_CR_STARTSAMPLE_MASK 0x00000001
#define HT_CAM_REG_CR_STOPSAMPLE_MASK 0x00000002
#define HT_CAM_REG_CR_CHANGE_MASK 0x00020000
#define camera_print(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#ifdef DEBUG_PRINT
#define camera_dbg(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#else
#define camera_dbg(...) NULL;
#endif
#define camera_err(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#define CLEAR(x) memset(&(x), 0, sizeof(x))
#pragma pack(1)
#ifdef ADC_82V38
#pragma pack(1)
typedef struct ADC_82V38_Timing
{
uint32_t regData : 9;
uint32_t : 3;
uint32_t regAddr : 3;
uint32_t rwbit : 1;
uint32_t regData1 : 9;
uint32_t : 3;
uint32_t regAddr1 : 3;
uint32_t rwbit1 : 1;
} adcTiming;
#endif
#ifdef ADC_82V48
typedef union CIS_ADC_NEW
{
struct adcTiming
{
uint32_t regData : 8;
uint32_t : 2;
uint32_t regAddr : 5;
uint32_t rwbit : 1;
uint32_t regData1 : 8;
uint32_t : 2;
uint32_t regAddr1 : 5;
uint32_t rwbit1 : 1;
}bit;
uint32_t value;
};
struct adcTiming
{
uint32_t regData : 8; //数据区
uint32_t : 2;
uint32_t regAddr : 5; //寄存器地址
uint32_t rwbit : 1; //读写 1读 0写
uint32_t regData1 : 8;
uint32_t : 2;
uint32_t regAddr1 : 5;
uint32_t rwbit1 : 1;
};
#endif
#define V4L2_EVENT_HTCAMMOD_CLASS (V4L2_EVENT_PRIVATE_START | 0x1000)
#define V4L2_EVENT_HTCAMMOD_OVERFREP (V4L2_EVENT_HTCAMMOD_CLASS | 0X1)
extern int errno;
HCamDevice::HCamDevice()
{
virBaseAddr = NULL;
memfd = 0;
subDeviceName = "/dev/v4l-subdev0";
videoDevName = "/dev/video0";
subDevicefd = -1;
videofd = -1;
v4lWidth = 5184;//@300dpi 5184 @600dpi 5184*2
v4lHeight = 513 ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3
v4lBufferCount = 32;
v4l2buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
nplanes = 1; // only use one plane
drivertype = V4L2_CAP_VIDEO_CAPTURE_MPLANE;
//drivertype == V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2memtype = V4L2_MEMORY_MMAP;
captureBufers = NULL;
init_fpga();
uint8_t val = 0;
HtCamSetdivder(true);
HtCamSetPeriod(20);
HtCamWriteFpgaRegs(17, 0x04ce99ff); //扫描传感器阈值
HtCamSetDpi(1); //设置默认300dpi
HtCamSetClolr(0);//设置默认灰色模式
HtCamSetFrameCnt(10);
uint32_t val1 = 0;
HtCamGetFrameCnt(val1);
//HtCamSwitchSampleModes(0); //测试数据
unsigned int i[3]={100 ,200 ,300};
HtCamChangeExposureValueF(i); //曝光
HtCamChangeExposureValueB(i);
// start sample
camera_dbg("ST SP : %d , VSNP : %d \r\n" , ST_SP , VSNP);
//HtCamChangeMonoStartSample(600);//200 * 3 + CamProperty::ST_SP_VSNP
// if( ST_SP != 0 || ST_SP_VSNP != 0)
// HtCamChangeMonoStartSample( ST_SP * 3 + ST_SP_VSNP);
if( VSNP != 0)
{
unsigned int value = HtCamReadFpgaRegs(16);
value=0;
value = value & 0xffffff00;
HtCamWriteFpgaRegs(16, VSNP | value);
}
HtCamWriteFpgaRegs(16, 0x0A0A); //相位0x0B0B 2023-9-4 0A0B, 2023-9-11 0a0a
//HtCamChangeTriggerInAndEXt(1);
HtCamInitADCReg();
HtCamWriteAllADC();
//devmem 0x40010040 32 0x00000B00 //灰度模式 VSNP值 0x10
//this->event_thread.reset(new std::thread(&HCamDevice::HtCamEventWorkThread, this));
//this->event_thread.reset(new std::thread(&HCamDevice::auto_duty_cycle,this));
for (int i = 0; i < 20; i++)
{
//HtCamReadADCReg(i, &val);
HtCamReadADCReg_ALL(i);
//camera_print("ADDR: 0x%x, Value: 0x%x\n", i, val);
}
}
HCamDevice::~HCamDevice()
{
uninit_fpga();
close_device();
//HtCamExitVideoCapturing();
if (this->event_thread->joinable())
this->event_thread->join();
}
int HCamDevice::init_fpga()
{
int fd = open("/dev/mem", O_RDWR | O_SYNC);
void *map_base = NULL;
camera_print("init_fapa open /dev/mem is:[%s]\n",fd < 0 ? "fail":"suceed");
if (fd < 0)
{
return fd;
}
memfd = fd;
map_base = mmap(NULL, AddrMapSize, PROT_READ | PROT_WRITE, MAP_SHARED, memfd, phyBaseAddr);
camera_print("init_fapa init mmap base addr is:[%s]\n",map_base != NULL ? "suceed":"fail");
if (map_base == NULL)
{
close(fd);
return -1;
}
virBaseAddr = (uint32_t *)map_base;
return fd;
}
int HCamDevice::uninit_fpga()
{
int ret = close(memfd);
camera_print("uninit_fpga clsoe /dev/mem is :%s \n",ret < 0 ? "fail" : "succeed");
return ret;
}
int HCamDevice::open_device(int width , int height)
{
close_device();
v4lWidth = width;//@300dpi 5184 @600dpi 5184*2
v4lHeight = height ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3
int fd = open(videoDevName.c_str(), O_RDWR, 0);
camera_print("open_device video_fd :%s is:[%s] \n",videoDevName.c_str(),fd < 0 ? "fail" : "succeed");
if (fd == -1)
{
return fd;
}
videofd = fd;
fd = open(subDeviceName.c_str(), O_RDWR, 0);
camera_print("open_device sub_device_fd :%s is:[%s] \n",subDeviceName.c_str(),fd < 0 ? "fail" : "succeed");
if (fd == -1)
{
camera_print("t Cam Cannot open subdevice file\n");
return -1;
}
subDevicefd = fd;
//set_width_hegith(width,height);
camera_print("open_device set width_hegith width:[%d] height:[%d] \r\n",width,height);
init_sample();
init_capture();
init_video_buffer();
return 0;
}
int HCamDevice::close_device()
{
if (videofd == -1 || subDevicefd == -1)
{
return 0;
}
HtCamStopVideoCapturing();
uint8_t i;
int ret = 0;
for (i = 0; i < v4lBufferCount; ++i)
{
ret = munmap(captureBufers[i].start, captureBufers[i].length);
if (-1 == ret )
camera_print("munmap [%d]\n",i);
}
ret = close(videofd);
videofd = -1;
camera_print("close video_fd :%s is:[%s] \n",videoDevName.c_str(),ret < 0 ? "fail" : "succeed");
ret = close(subDevicefd);
subDevicefd = -1;
camera_print("close sub_device_fd :%s is:[%s] \n",subDeviceName.c_str(),ret < 0 ? "fail" : "succeed");
return ret;
}
void HCamDevice::set_width_hegith(int width,int hegith)
{
v4lWidth = width;
v4lHeight = hegith;
}
void HCamDevice::HtCamSetdivder(bool is_enble)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
CamZ_Reg_2Short rgb;
rgb.value = HtCamReadFpgaRegs(0x08);
// SCAN_PWM scan;
// scan.value = rgb.NShort[0];
// scan.bit.scan_pwm_enble = is_enble;//1是关0是开
// ////下面全是默认值 。FPGA 这边数据不准确,所以重新写一下
// scan.bit.scan_pwm_start = 1;
// scan.bit.scan_pwm_timer_runs = 1;
// scan.bit.scan_pwm_enble_o = 1;
// scan.bit.scan_pwm_input_clear = 0;
// scan.bit.scan_pwm_reset =0;
printf("rgb.NShort[0]:%d\r\n",rgb.NShort[0]);
uint32_t r = 0x0001001e;
if (!is_enble)
r=0x0001001F;
rgb.NShort[0] = r;
rgb.NShort[1] = 0x0001;
HtCamWriteFpgaRegs(0x08,rgb.value);
}
void HCamDevice::HtCamSetPeriod(double reg_value)
{
reg_value /= 100;
printf("reg_value:%f\r\n",reg_value);
double pr = reg_value == 0 ? 0x183C/2 :reg_value * 0x183C / 1;
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
CamZ_Reg_2Short rgb;
rgb.value = HtCamReadFpgaRegs(0x09);
rgb.NShort[0] = 0x183C; //周期参数默认0x183C 不可修改
rgb.NShort[1] = pr; //占空比 = 百分比 / 0x183C *1
printf("PWM占空比%d\r\n",rgb.NShort[1]);
HtCamWriteFpgaRegs(0x09,rgb.value);
}
// void HCamDevice::HtCamGetPeriod(double &reg_value)
// {
// int reg = HtCamReadFpgaRegs(0x11);
// }
// void HCamDevice::HtCamSetLowLevelTimer(double reg_value)
// {
// reg_value /= 100;
// CamZ_Reg_11 reg;
// reg.value = HtCamReadFpgaRegs(0x11);
// int crossover_period = reg.div_reg * 10;//单位是ns
// int cycle = 1 / 1.6119 * 100000000;//单位是s 转换为ns
// reg.low_reg = cycle * (1 - reg_value) / crossover_period;
// }
// void HCamDevice::auto_duty_cycle()
// {
// std::unique_lock<std::mutex> lock(auto_duty_wait);
// while (true)
// {
// wait_auto_duty_.wait(lock);
// }
// }
int HCamDevice::HtCamEventWorkThread(void)
{
struct pollfd pfd;
pfd.fd = subDevicefd;
pfd.events = POLLPRI;
struct v4l2_event ev;
while (true)
{
if (pfd.fd > 0)
{
// camera_print("POLLING ON SUB-DEV \n");
if (poll(&pfd, 1, -1) > 0)
{
while (!ioctl(subDevicefd, VIDIOC_DQEVENT, &ev))
{
// if (this->getCameraEvents)
// {
// this->getCameraEvents(ev.type);
// }
// else
// {
// switch (ev.type)
// {
// case V4L2_EVENT_HTCAMMOD_OVERFREP:
// camera_print("Get Sub-dev event:0x%08x \n", ev.type);
// break;
// default:
// break;
// }
// }
}
}
}
}
return 0;
}
void HCamDevice::HtCamStartVideoCapturing()
{
uint8_t n_buffers;
struct v4l2_buffer buf;
for (n_buffers = 0; n_buffers < v4lBufferCount; n_buffers++)
{
memset(&buf, 0, sizeof(buf));
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(videofd, VIDIOC_QBUF, &buf) == -1)
{
camera_err(" VIDIOC_QBUF error\n");
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(captureBufers);
close(videofd);
return;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
}
enum v4l2_buf_type type;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(videofd, VIDIOC_STREAMON, &type) == -1)
{
camera_err(" VIDIOC_STREAMON error! %s\n", strerror(errno));
return;
}
else
{
//camera_print(" stream on succeed\n");
}
startFPGAScan();
return;
}
void HCamDevice::HtCamStopVideoCapturing()
{
enum v4l2_buf_type type;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(videofd, VIDIOC_STREAMOFF, &type) == -1)
camera_err(" VIDIOC_STREAMOFF error! %s\n", strerror(errno));
stopFPGAScan();
}
void HCamDevice::HtCamExitVideoCapturing()
{
// HtCamStopVideoCapturing();
// uint8_t i;
// for (i = 0; i < v4lBufferCount; ++i)
// {
// if (-1 == munmap(captureBufers[i].start, captureBufers[i].length))
// printf("munmap \n");
// }
// if(close(videofd)<0)
// {
// camera_err("close video fd error \n");
// }
// if(close(memfd)<0)
// {
// camera_err("close mem fd error \n");
// }
// if(close(subDevicefd)<0)
// {
// camera_err("close sub Device fd error \n");
// }
}
int HCamDevice::HtCamWaitVideoCapture(int msTimeout)
{
struct pollfd pfd;
pfd.fd = videofd;
pfd.events = POLLIN | POLLRDNORM;
if (poll(&pfd, 1, msTimeout) > 0)
return pfd.revents;
return 0;
}
int HCamDevice::HtCamReadCaptureFrame(void **pbuf, int timeout)
{
if (!HtCamWaitVideoCapture(timeout))
{
camera_err("read frame time out\n");
if ((lastSucceedBufferIndex + 1) >= v4lBufferCount)
*pbuf = captureBufers[0].start;
else
*pbuf = captureBufers[lastSucceedBufferIndex + 1].start;
return -1;
}
struct v4l2_buffer buf;
CLEAR(buf);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));
}
if (!buf.m.planes)
{
return -3;
}
int ret = 0;
if (( ret = ioctl(videofd, VIDIOC_DQBUF, &buf)) == 0) //这个地方入栈失败好像下面就都不用操作了 ??? 毕竟已经丢帧了算
{
camera_dbg("*****DQBUF[%d] FINISH*****\n", buf.index);
}
else
{
camera_print("****VIDIOC_QBUF 1 FAIL :%d*****\n",ret);
return -2;
}
if (ioctl(videofd, VIDIOC_QBUF, &buf) == 0) //出栈一样
{
camera_dbg("************QBUF[%d] FINISH**************\n", buf.index);
}
else
{
camera_print("****VIDIOC_QBUF 2 FAIL*****\n");
return -2;
}
//camera_print("buf.bytesused is %d \r\n", buf.bytesused);
lastSucceedBufferIndex = buf.index;
*pbuf = captureBufers[buf.index].start;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
free(buf.m.planes);
}
return buf.index;
}
int HCamDevice::HtCamReadNextFrame(void **pbuf)
{
if ((lastSucceedBufferIndex + 1) >= v4lBufferCount)
*pbuf = captureBufers[0].start;
else
*pbuf = captureBufers[lastSucceedBufferIndex + 1].start;
return (lastSucceedBufferIndex++) ;
}
int HCamDevice::V4LBufferQuery()
{
int validCount = 0;
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));
}
for (int i = 0; i < v4lBufferCount; i++)
{ // assume there are 10 buffers
buf.index = i;
if (ioctl(videofd, VIDIOC_QUERYBUF, &buf) == -1)
{
camera_dbg("Failed to query buffer");
}
if(buf.bytesused)
validCount ++ ;
}
return validCount ;
}
void HCamDevice::stopFPGAScan()
{
uint32_t REG2 = HtCamReadFpgaRegs(0x02);
REG2 &= ~0x04;
HtCamWriteFpgaRegs(0x02, REG2);
HtCamStopSampling();
}
void HCamDevice::startFPGAScan()
{
uint32_t REG2 = HtCamReadFpgaRegs(0x02);
REG2 |= 0x04;
HtCamWriteFpgaRegs(0x02, REG2);
HtCamStartSampling();
}
// This function is prohibited
int HCamDevice::HtCamStartSampling()
{
//ioctl(subDevicefd,0x40,NULL);
uint32_t *CamReg;
if (virBaseAddr == NULL)
return -1;
CamReg = (uint32_t *)virBaseAddr;
CamReg[10] &= ~(HT_CAM_REG_CR_STARTSAMPLE_MASK);
CamReg[10] |= (HT_CAM_REG_CR_STARTSAMPLE_MASK);
return 0;
}
// This function is prohibited
int HCamDevice::HtCamStopSampling()
{
uint32_t *CamReg;
if (virBaseAddr == NULL)
return -1;
CamReg = (uint32_t *)virBaseAddr;
CamReg[10] |= (HT_CAM_REG_CR_STOPSAMPLE_MASK);
CamReg[10] &= ~(HT_CAM_REG_CR_STOPSAMPLE_MASK);
//ioctl(subDevicefd,0x40,NULL);
return 0;
}
int HCamDevice::init_sample()
{
struct v4l2_subdev_format subdev_fmt;
int ret;
memset(&subdev_fmt, 0, sizeof(subdev_fmt));//全部置零
subdev_fmt.pad = 0;
subdev_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
subdev_fmt.format.width = v4lWidth;
subdev_fmt.format.height = v4lHeight;
ret = ioctl(subDevicefd, VIDIOC_SUBDEV_S_FMT, &subdev_fmt);
// ret = ioctl(subDevicefd, 0x40010034, 0x00CE0080);/////////////////////////////////////////////////////
if (ret < 0)
printf("VIDIOC_SUBDEV_S_FMT failed.\n");
struct v4l2_event_subscription sub;
memset(&sub, 0, sizeof(sub));
sub.type = V4L2_EVENT_HTCAMMOD_OVERFREP;
ret = ioctl(subDevicefd, VIDIOC_SUBSCRIBE_EVENT, &sub);
if (ret < 0)
printf("VIDIOC_SUBSCRIBE_EVENT failed.\n");
return ret;
}
int HCamDevice::init_capture()
{
struct v4l2_format fmt;
struct v4l2_control ctrl;
CLEAR(fmt);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = v4lWidth;
fmt.fmt.pix_mp.height = v4lHeight;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_GREY;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
fmt.fmt.pix_mp.num_planes = nplanes;
}
else
{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = v4lWidth;
fmt.fmt.pix.height = v4lHeight;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
}
int ioctlRet = ioctl(videofd, VIDIOC_S_FMT, &fmt) ;
if ( ioctlRet < 0)
{
camera_err("error %s\n" , strerror(errno));
camera_err(" setting the data format failed!width %d height %d planes %d \n" , fmt.fmt.pix_mp.width,fmt.fmt.pix_mp.height , fmt.fmt.pix_mp.num_planes );
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix_mp.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix_mp.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix_mp.field);
close_device();
return -1;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
if (v4lWidth != fmt.fmt.pix_mp.width || v4lHeight != fmt.fmt.pix_mp.height)
camera_err(" does not support %u * %u\n", v4lWidth, v4lHeight);
v4lWidth = fmt.fmt.pix_mp.width;
v4lHeight = fmt.fmt.pix_mp.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix_mp.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix_mp.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix_mp.field);
if (ioctl(videofd, VIDIOC_G_FMT, &fmt) < 0)
camera_err(" get the data format failed!\n");
nplanes = fmt.fmt.pix_mp.num_planes;
}
else
{
if (v4lWidth != fmt.fmt.pix.width || v4lHeight != fmt.fmt.pix.height)
camera_err(" does not support %u * %u\n", v4lWidth, v4lHeight);
v4lWidth = fmt.fmt.pix.width;
v4lHeight = fmt.fmt.pix.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix.field);
}
CLEAR(ctrl);
ctrl.id = (V4L2_CID_USER_BASE + 0xc121); // V4L2_CID_XILINX_LOW_LATENCY
ctrl.value = (1 << 2); // XVIP_LOW_LATENCY_DISABLE
if (ioctl(videofd, VIDIOC_S_CTRL, &ctrl) < 0)
{
camera_err("Fail to set control:%s.\n", strerror(errno));
}
return 0;
}
int HCamDevice::init_video_buffer()
{
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
CLEAR(req);
req.count = v4lBufferCount;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = v4l2memtype;
if (ioctl(videofd, VIDIOC_REQBUFS, &req) < 0)
{
camera_err(" VIDIOC_REQBUFS failed\n");
close(videofd);
return -1;
}
v4lBufferCount = req.count;
camera_print(" reqbuf number is %d\n", v4lBufferCount);
if (captureBufers != NULL)
free(captureBufers);
captureBufers = (captureBuffer *)calloc(v4lBufferCount, sizeof(struct captureBuffer));
uint8_t n_buffers;
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
CLEAR(buf);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = v4l2memtype;
buf.index = n_buffers;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(videofd, VIDIOC_QUERYBUF, &buf) == -1)
{
camera_err(" VIDIOC_QUERYBUF error\n");
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(captureBufers);
close(videofd);
return -1;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
captureBufers[n_buffers].length = buf.m.planes[0].length;
captureBufers[n_buffers].start = mmap(NULL,
buf.m.planes[0].length,
PROT_READ /*|PROT_WRITE*/,
MAP_SHARED, videofd,
buf.m.planes[0].m.mem_offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
n_buffers, captureBufers[n_buffers].start, buf.m.planes[0].length,
buf.m.planes[0].m.mem_offset);
free(buf.m.planes);
}
else
{
captureBufers[n_buffers].length = buf.length;
captureBufers[n_buffers].start = mmap(NULL,
buf.length,
PROT_READ /*| PROT_WRITE*/,
MAP_SHARED, videofd,
buf.m.offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
n_buffers, captureBufers[n_buffers].start, buf.length, buf.m.offset);
}
}
lastSucceedBufferIndex = (n_buffers - 1);
return 1;
}
void HCamDevice::HtCamSwitchSampleModes(uint8_t mode)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
if (!mode)
pCamCtrlReg[4] &= ~(0x00020000);
else
pCamCtrlReg[4] |= (0x00020000);
}
void HCamDevice::HtCamChangeExposureValueB(uint32_t* value)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[5] = value[0]<<16;//G 通道
std::this_thread::sleep_for(std::chrono::milliseconds(5));
CamZ_Reg_2Short rgb;
rgb.value = HtCamReadFpgaRegs(0x06);
rgb.NShort[0] = value[1]; // R通道
rgb.NShort[1] = value[2]; //B 通道
HtCamWriteFpgaRegs(0x06,rgb.value);
// pCamCtrlReg[5] = 0x00000000;
// pCamCtrlReg[6] = 0x00000000;
// pCamCtrlReg[5] |= (uint32_t)(value[1]); // RED
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[5] |= (uint32_t)(value[1] << 16);
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[6] |= (uint32_t)(value[0]); // GREEN
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[6] |= ((uint32_t)value[2] << 16); // BLUE
std::this_thread::sleep_for(std::chrono::milliseconds(5));
printf("曝光B R:%d G:%d B:%d \r\n",value[0],value[1],value[2]);
}
void HCamDevice::HtCamChangeExposureValueF(uint32_t* value)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[11] = value[0] << 16;//G 通道
std::this_thread::sleep_for(std::chrono::milliseconds(5));
CamZ_Reg_2Short rgb;
rgb.value = HtCamReadFpgaRegs(0x0C);
rgb.NShort[0] = value[1];// R通道
rgb.NShort[1] = value[2]; //B 通道
HtCamWriteFpgaRegs(0x0C,rgb.value);
// pCamCtrlReg[11] = 0x00000000;
// pCamCtrlReg[12] = 0x00000000;
// pCamCtrlReg[11] |= (uint32_t)(value[1]); // RED
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[11] |= (uint32_t)(value[1] << 16);
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[12] |= (uint32_t)(value[0]); // GREEN
// std::this_thread::sleep_for(std::chrono::milliseconds(5));
// pCamCtrlReg[12] |= ((uint32_t)value[2] << 16); // BLUE
std::this_thread::sleep_for(std::chrono::milliseconds(5));
printf("曝光F R:%d G:%d B:%d \r\n",value[0],value[1],value[2]);
}
void HCamDevice::HtCamWriteADCReg_ALL(bool is_gain,bool is_adc1,uint8_t addr, uint8_t data)
{
if (virBaseAddr == NULL)
return ;
if (is_gain)
{
addr++;
addr *= 2; //增益
}
else
addr += 0x0e; //偏移
//printf("%s 地址 :%d 值%d \r\n",is_gain?"增益":"偏移",addr,data);
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdc1Write = 0x2000; //adc1使能
uint32_t EnableAdc2Write = 0x4000; //adc2使能
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// if (addr > 20)
// return;
pADCReg[addr] = data;
pAdcRegFrame->rwbit = 0;
pAdcRegFrame->regAddr = addr;
pAdcRegFrame->regData = data;
pAdcRegFrame->rwbit1 = 0;
pAdcRegFrame->regAddr1 = addr;
pAdcRegFrame->regData1 = data;
if (is_adc1) //adc 1
{
pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
else // adc2
{
pCamCtrlReg[0] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
}
void HCamDevice::HtCamWriteADCReg(uint8_t addr, uint8_t data)
{
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdc1Write = 0x2000; //adc1使能
uint32_t EnableAdc2Write = 0x4000; //adc2使能
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// if (addr > 20)
// return;
pADCReg[data] = addr;
pAdcRegFrame->rwbit = 0;
pAdcRegFrame->regAddr = addr;
pAdcRegFrame->regData = data;
pAdcRegFrame->rwbit1 = 0;
pAdcRegFrame->regAddr1 = addr;
pAdcRegFrame->regData1 = data;
pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[0] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
void HCamDevice::HtCamSetLowPower(bool val)
{
if (val)
{
printf("重置\r\n");
ioctl(subDevicefd,200,NULL);
}
else
{
printf("val :%d\r\n",val);
CamZ_Reg_4_New low_power ;
low_power.value = HtCamReadFpgaRegs(0x04);
low_power.soft_rst_n = val;
HtCamWriteFpgaRegs(0x04 , low_power.value);
low_power.value = HtCamReadFpgaRegs(0x04);
printf("low_power:%d\r\n",low_power.soft_rst_n);
}
}
void HCamDevice::HtCamReadADCReg_ALL(uint8_t addr)
{
if (virBaseAddr == NULL)
return ;
//printf("read addr :%0x\r\n",addr);
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t EnableAdcWrite1 = (1 << 14);
uint32_t EnableAdcWrite2 = (1 << 13);
CIS_ADC_NEW pAdcRegFrame ;
// if (addr > 20)
// return;
pAdcRegFrame.bit.rwbit = 1;
pAdcRegFrame.bit.regAddr = addr;
pAdcRegFrame.bit.regData = 0;
pAdcRegFrame.bit.rwbit1 = 1;
pAdcRegFrame.bit.regAddr1 = addr;
pAdcRegFrame.bit.regData1 = 0;
pCamCtrlReg[0] = pAdcRegFrame.value;
pCamCtrlReg[4] &= ~(EnableAdcWrite2);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdcWrite2);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pAdcRegFrame.value = HtCamReadFpgaRegs(0x00);
// printf("pAdcRegFrame->value 1111:%0x\r\n",pAdcRegFrame.value);
// printf("pAdcRegFrame->11111 :%d\r\n",pAdcRegFrame.bit.regData);
pCamCtrlReg[7] = pAdcRegFrame.value;
pCamCtrlReg[4] &= ~(EnableAdcWrite1);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdcWrite1);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pAdcRegFrame.value = HtCamReadFpgaRegs(0x07);
// printf("pAdcRegFrame->value 2222:%0x\r\n",pAdcRegFrame.value);
// printf("pAdcRegFrame->regData2222 :%d\r\n",pAdcRegFrame.bit.regData);
CamZ_Reg_2Short rgb;
rgb.value = HtCamReadFpgaRegs(0x01);
CIS_ADC_NEW c ,b;
c.value = rgb.NShort[0];
b.value = rgb.NShort[1];
printf("adc1 data:[%d] adc2 data:[%d]\r\n",b.bit.regData , c.bit.regData);
}
void HCamDevice::HtCamReadADCReg(uint8_t addr, uint8_t *data)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdcWrite = (1 << 16);
uint32_t tempData;
CIS_ADC_NEW pAdcRegFrame ;
// if (addr > 0x14)
// return;
pAdcRegFrame.bit.rwbit = 1;
pAdcRegFrame.bit.regAddr = addr;
pAdcRegFrame.bit.regData = 0;
pAdcRegFrame.bit.rwbit1 = 1;
pAdcRegFrame.bit.regAddr1 = addr;
pAdcRegFrame.bit.regData1 = 0;
pCamCtrlReg[7] = pAdcRegFrame.value;
pCamCtrlReg[4] &= ~(EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
pCamCtrlReg[4] |= (EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(10));
pAdcRegFrame.value = HtCamReadFpgaRegs(0x07);
printf("pAdcRegFrame->value:%0x addr:%d \r\n",pAdcRegFrame.value,addr);
printf("pAdcRegFrame->regData :%d\r\n",pAdcRegFrame.bit.regData);
}
uint8_t HCamDevice::getADCReg(int addr)
{
// if(addr > adcRegSize)
// return 0 ;
// return pADCReg[addr];
}
void HCamDevice::setADCReg(int addr , uint8_t value)
{
// if(addr > adcRegSize)
// return ;
// pADCReg[addr] = value ;
}
// void HCamDevice::saveADCReg()
// {
// CamADCConfig adcConfig( _ADCPATH);
// int size = adcConfig.getSize();
// size = adcRegSize < size ? adcRegSize : size ;
// std::vector<int> adcValue(size) ;
// for(int index =0 ; index < size ; index++)
// {
// adcValue[index] = pADCReg[index];
// }
// if(!adcConfig.saveValue(adcValue))
// {
// std::cout << "Save adc Value Error!" << std::endl ;
// }
// }
uint32_t HCamDevice::HtCamReadFpgaRegs(uint8_t reg_addr)
{
uint32_t *pCamCtrlReg = virBaseAddr;
unsigned int u = pCamCtrlReg[reg_addr] ;
// FILE *fp;
// fp = fopen("/home/root/logF", "a+");
// fprintf(fp , "read reg %d , value : %d\r\n" ,reg_addr , u );
// fclose(fp);
// outFile << "PC read reg " << (int)reg_addr <<" value : " << u << std::endl;
return pCamCtrlReg[reg_addr] ;
}
void HCamDevice::HtCamSetSpTime(uint32_t reg_value,uint32_t val)
{
CamZ_Reg_2Short sp_time;
sp_time.value = HtCamReadFpgaRegs(0x03);
sp_time.NShort[1] = reg_value;
sp_time.NShort[0] = val;
HtCamWriteFpgaRegs(0x03 , sp_time.value);
printf("-----------------------灰度内触发:[%d] 读写间隔:[%d]------------------ \r\n",reg_value,val);
}
void HCamDevice::HtCamGetSpTime(uint32_t &reg_value)
{
CamZ_Reg_2Short sp_time;
sp_time.value = HtCamReadFpgaRegs(0x03);
reg_value = sp_time.NShort[1];
}
void HCamDevice::HtCamSetSpTime2(uint32_t reg_value)
{
HtCamWriteFpgaRegs(0x13 , reg_value);
printf("-----------------------彩色内触发:[%d]\r\n",reg_value);
}
void HCamDevice::HtCamSetStSp(int start_smaple)
{
//sp_time.NShort[1] = 0x00D0;//灰度
//sp_time.NShort[1] = 0x00C8;//彩色
CamZ_Reg_2Short st_sp;
st_sp.value = HtCamReadFpgaRegs(0x0d);
//st_sp.NShort[0] = 0x0200;
// if (color)
// st_sp.NShort[1] = 0x00C8; //彩色
// else
// st_sp.NShort[1] = 0x00d0 //灰度 2023 8/10 0x00C8
st_sp.NShort[1] = start_smaple; //灰色 2023-8-3
HtCamWriteFpgaRegs(0x0d , st_sp.value);
printf(" -----------------------st_sp%d------------------\r\n",start_smaple);
}
void HCamDevice::HtCamGetStSp(uint32_t &start_smaple)
{
CamZ_Reg_2Short st_sp;
st_sp.value = HtCamReadFpgaRegs(0x0d);
start_smaple = st_sp.NShort[1];
}
void HCamDevice::HtCamSetVsnpTime(int color)
{
CamZ_Reg_2char st_sp;
st_sp.value = HtCamReadFpgaRegs(0x10);
st_sp.NChar[0] = color;
st_sp.NChar[1] = color;
HtCamWriteFpgaRegs(0x10 , st_sp.value);
}
void HCamDevice::HtCamSetFrameCnt(uint32_t val)
{
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t ss = val;
HtCamWriteFpgaRegs(0x14,ss);
}
void HCamDevice::HtCamGetFrameCnt(uint32_t &val)
{
val = HtCamReadFpgaRegs(0x14);
//camera_print("HtCamGetFrameCnt:%d\r\n",val);
}
void HCamDevice::HtCamGetFrameNum(uint32_t &val)
{
val = HtCamReadFpgaRegs(0x15);
camera_print(" HtCamGetFrameNum :%d\n",val);
}
void HCamDevice:: HtCamWriteFpgaRegs(uint8_t reg_addr, uint32_t reg_value)
{
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[reg_addr] = reg_value;
}
void HCamDevice::HtCamChangeMonoStartSample(int start_sample)
{
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t ss = start_sample;
pCamCtrlReg[13] &= ~(0xFFFF0000); // clear
pCamCtrlReg[13] |= (uint32_t)(ss << 16);
}
void HCamDevice::ChangeScanState(bool state)
{
CamZ_Reg_4 reg4;
reg4.value = HtCamReadFpgaRegs(4);
reg4.gpio1 = state ;
HtCamWriteFpgaRegs(4, reg4.value);
}
void HCamDevice::HtCamChangeTriggerInAndEXt(int in_out)
{
uint32_t *pCamCtrlReg = virBaseAddr;
if (in_out)
{
pCamCtrlReg[10] |= (0x00000040);
}
else
{
pCamCtrlReg[10] &= ~(0x00000040);
}
}
void HCamDevice::HtCamSetDpi(int dpi)
{
CamZ_Reg_A_New rega;
rega.value = HtCamReadFpgaRegs(0x0A);
if (dpi == 1 )
{
rega.dpi_200 = 1;
rega.dpi = 1;
}
else if (dpi == 2)
{
rega.dpi_200 = 0;
rega.dpi = 1;
}
else
{
rega.dpi_200 = 0;
rega.dpi = 0;
}
HtCamWriteFpgaRegs(0x0A,rega.value);
}
void HCamDevice::HtCamSetClolr(int color)
{
CamZ_Reg_4_New reg_4;
reg_4.value=HtCamReadFpgaRegs(0x04);
reg_4.color_mode = color;
reg_4.en_frameset = 1; //帧计数默认开启
reg_4.en_pattern = 0;
reg_4.en_line = 1;
HtCamWriteFpgaRegs(0x04,reg_4.value);
// HtCamSetStSp(color);
// HtCamSetVsnpTime(color);
}
void HCamDevice::HtCamSetTriggerMode(int val)
{
uint32_t *pCamCtrlReg = virBaseAddr;
if (val)
pCamCtrlReg[10] |= 0x00000040;
else
pCamCtrlReg[10] &= ~(0x00000040);
}
int HCamDevice::HtCamGetTriggerMode()
{
uint32_t *pCamCtrlReg = virBaseAddr;
return ((pCamCtrlReg[10] & (0x00000040)) >> 6);
}
int HCamDevice::HtCamGetColorMode()
{
uint32_t *pCamCtrlReg = virBaseAddr;
return ((pCamCtrlReg[4] & (0x00000004)) >> 2);
}
void HCamDevice::init_ps_regs()
{
}
#include <iostream>
#include <fstream>
void HCamDevice::savePsReg()
{
}
void HCamDevice::HtCamWriteAllADC()
{
for (int index = 0; index < 20 ; index++)
{
HtCamWriteADCReg(index, pADCReg[index]);
}
}
void HCamDevice::HtCamInitADCReg()
{
pADCReg[0] = 0x07;
pADCReg[1] = 0x50;//0x50;
/* gain */
pADCReg[2] = 0x90;
pADCReg[3] = 0x00;
pADCReg[4] = 0x90;
pADCReg[5] = 0x00;
pADCReg[6] = 0x90;
pADCReg[7] = 0x00;
pADCReg[8] = 0x90;
pADCReg[9] = 0x00;
pADCReg[0xa] = 0x90;
pADCReg[0xb] = 0x00;
pADCReg[0xc] = 0x90;
pADCReg[0xd] = 0x00;
/* offset */
pADCReg[0xe] = 0x58;
pADCReg[0xf] = 0x5b;
pADCReg[0x10] = 0x55;
pADCReg[0x11] = 0x55;
pADCReg[0x12] = 0x50;
pADCReg[0x13] = 0x55;//0x55;
}
void HCamDevice::HtCamResizeBuffer(int width, int height, int number)
{
v4lWidth = width;
v4lHeight = height;
v4lBufferCount = number;
close_device();
init_sample();
init_capture();
init_video_buffer();
camera_dbg("v4lWidth = %d, v4lHeight = %d, v4lBufferCount = %d\n", v4lWidth, v4lHeight, v4lBufferCount);
}
void HCamDevice::HtCamOverClockClear()
{
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[10] |= (0x00001000);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[10] &= ~(0x00001000);
}