zynq_7010/HCamDevice.cpp

1387 lines
33 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "HCamDevice.h"
#include <stdint.h>
#include <thread>
#include <chrono>
#include <string>
#include <poll.h>
#include <sys/epoll.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <string.h>
// #include "cameraConfig.h"
#include "CameraParams.h"
#include <linux/v4l2-subdev.h>
//#include "camconfig.h"
#include <iostream>
#include <errno.h>
#define ADC_82V48
#define HT_CAM_REG_CR_STARTSAMPLE_MASK 0x00000001
#define HT_CAM_REG_CR_STOPSAMPLE_MASK 0x00000002
#define HT_CAM_REG_CR_CHANGE_MASK 0x00020000
#define camera_print(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#ifdef DEBUG_PRINT
#define camera_dbg(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#else
#define camera_dbg(...) NULL;
#endif
#define camera_err(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#define CLEAR(x) memset(&(x), 0, sizeof(x))
#pragma pack(1)
#ifdef ADC_82V38
#pragma pack(1)
typedef struct ADC_82V38_Timing
{
uint32_t regData : 9;
uint32_t : 3;
uint32_t regAddr : 3;
uint32_t rwbit : 1;
uint32_t regData1 : 9;
uint32_t : 3;
uint32_t regAddr1 : 3;
uint32_t rwbit1 : 1;
} adcTiming;
#endif
#ifdef ADC_82V48
struct adcTiming
{
uint32_t regData : 8;
uint32_t : 2;
uint32_t regAddr : 5;
uint32_t rwbit : 1;
uint32_t regData1 : 8;
uint32_t : 2;
uint32_t regAddr1 : 5;
uint32_t rwbit1 : 1;
};
#endif
#define V4L2_EVENT_HTCAMMOD_CLASS (V4L2_EVENT_PRIVATE_START | 0x1000)
#define V4L2_EVENT_HTCAMMOD_OVERFREP (V4L2_EVENT_HTCAMMOD_CLASS | 0X1)
extern int errno;
HCamDevice::HCamDevice()
{
virBaseAddr = NULL;
memfd = 0;
subDeviceName = "/dev/v4l-subdev0";
videoDevName = "/dev/video0";
subDevicefd = 0;
videofd = 0;
v4lWidth = 5184;//@300dpi 5184 @600dpi 5184*2
v4lHeight = 512 ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3
v4lBufferCount = 10;
v4l2buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
nplanes = 1; // only use one plane
drivertype = V4L2_CAP_VIDEO_CAPTURE_MPLANE;
v4l2memtype = V4L2_MEMORY_MMAP;
captureBufers = NULL;
init_fpga();
uint8_t val = 0;
for (int i = 0; i < 20; i++)
{
HtCamReadADCReg(i, &val);
camera_print("ADDR: 0x%x, Value: 0x%x\n", i, val);
}
HtCamSetDpi(1); //设置默认300dpi
HtCamSetClolr(0);//设置默认灰色模式
HtCamSetFrameCnt(10);
uint32_t val1 = 0;
HtCamGetFrameCnt(val1);
//HtCamSwitchSampleModes(0); //测试数据
// HtCamChangeExposureValue(500); //曝光
// start sample
camera_dbg("ST SP : %d , VSNP : %d \r\n" , ST_SP , VSNP);
//HtCamChangeMonoStartSample(600);//200 * 3 + CamProperty::ST_SP_VSNP
// if( ST_SP != 0 || ST_SP_VSNP != 0)
// HtCamChangeMonoStartSample( ST_SP * 3 + ST_SP_VSNP);
if( VSNP != 0)
{
unsigned int value = HtCamReadFpgaRegs(16);
value=0;
value = value & 0xffffff00;
HtCamWriteFpgaRegs(16, VSNP | value);
}
//HtCamChangeTriggerInAndEXt(1);
HtCamInitADCReg();
HtCamWriteAllADC();
//devmem 0x40010040 32 0x00000B00 //灰度模式 VSNP值 0x10
//this->event_thread.reset(new std::thread(&HCamDevice::HtCamEventWorkThread, this));
}
HCamDevice::~HCamDevice()
{
uninit_fpga();
close_device();
//HtCamExitVideoCapturing();
if (this->event_thread->joinable())
this->event_thread->join();
}
int HCamDevice::init_fpga()
{
int fd = open("/dev/mem", O_RDWR | O_SYNC);
void *map_base = NULL;
camera_print("init_fapa open /dev/mem is:[%s]\n",fd < 0 ? "fail":"suceed");
if (fd < 0)
{
return fd;
}
memfd = fd;
map_base = mmap(NULL, AddrMapSize, PROT_READ | PROT_WRITE, MAP_SHARED, memfd, phyBaseAddr);
camera_print("init_fapa init mmap base addr is:[%s]\n",map_base != NULL ? "suceed":"fail");
if (map_base == NULL)
{
close(fd);
return -1;
}
virBaseAddr = (uint32_t *)map_base;
return fd;
}
int HCamDevice::uninit_fpga()
{
int ret = close(memfd);
camera_print("uninit_fpga clsoe /dev/mem is :%s \n",ret < 0 ? "fail" : "succeed");
return ret;
}
int HCamDevice::open_device(int width , int height)
{
v4lWidth = width;//@300dpi 5184 @600dpi 5184*2
v4lHeight = height ;//* 3;//color_h/gray_h = 3 比如目标复原图像高度为128 则灰度需要FPGA采集128 彩色采集128*3
int fd = open(videoDevName.c_str(), O_RDWR, 0);
camera_print("open_device video_fd :%s is:[%s] \n",videoDevName.c_str(),fd < 0 ? "fail" : "succeed");
if (fd == -1)
{
return fd;
}
videofd = fd;
fd = open(subDeviceName.c_str(), O_RDWR, 0);
camera_print("open_device sub_device_fd :%s is:[%s] \n",subDeviceName.c_str(),fd < 0 ? "fail" : "succeed");
if (fd == -1)
{
camera_print("t Cam Cannot open subdevice file\n");
return -1;
}
subDevicefd = fd;
//set_width_hegith(width,height);
camera_print("open_device set width_hegith width:[%d] height:[%d] \r\n",width,height);
init_sample();
init_capture();
init_video_buffer();
return 0;
}
int HCamDevice::close_device()
{
HtCamStopVideoCapturing();
uint8_t i;
int ret = 0;
for (i = 0; i < v4lBufferCount; ++i)
{
ret = munmap(captureBufers[i].start, captureBufers[i].length);
if (-1 == ret )
camera_print("munmap [%d]\n",i);
}
ret = close(videofd);
camera_print("close video_fd :%s is:[%s] \n",videoDevName.c_str(),ret < 0 ? "fail" : "succeed");
ret = close(subDevicefd);
camera_print("close sub_device_fd :%s is:[%s] \n",subDeviceName.c_str(),ret < 0 ? "fail" : "succeed");
return ret;
}
void HCamDevice::set_width_hegith(int width,int hegith)
{
v4lWidth = width;
v4lHeight = hegith;
}
int HCamDevice::HtCamEventWorkThread(void)
{
struct pollfd pfd;
pfd.fd = subDevicefd;
pfd.events = POLLPRI;
struct v4l2_event ev;
while (true)
{
if (pfd.fd > 0)
{
// camera_print("POLLING ON SUB-DEV \n");
if (poll(&pfd, 1, -1) > 0)
{
while (!ioctl(subDevicefd, VIDIOC_DQEVENT, &ev))
{
// if (this->getCameraEvents)
// {
// this->getCameraEvents(ev.type);
// }
// else
// {
// switch (ev.type)
// {
// case V4L2_EVENT_HTCAMMOD_OVERFREP:
// camera_print("Get Sub-dev event:0x%08x \n", ev.type);
// break;
// default:
// break;
// }
// }
}
}
}
}
return 0;
}
void HCamDevice::HtCamStartVideoCapturing()
{
uint8_t n_buffers;
struct v4l2_buffer buf;
for (n_buffers = 0; n_buffers < v4lBufferCount; n_buffers++)
{
memset(&buf, 0, sizeof(buf));
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(videofd, VIDIOC_QBUF, &buf) == -1)
{
((CAM_INFO_REG *)&pPsReg[CAM_INFO])->cam_run_status = 0;
camera_err(" VIDIOC_QBUF error\n");
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(captureBufers);
close(videofd);
return;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
}
enum v4l2_buf_type type;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(videofd, VIDIOC_STREAMON, &type) == -1)
{
((CAM_INFO_REG *)&pPsReg[CAM_INFO])->cam_run_status = 0;
camera_err(" VIDIOC_STREAMON error! %s\n", strerror(errno));
return;
}
else
{
((CAM_INFO_REG *)&pPsReg[CAM_INFO])->cam_run_status = 1;
camera_print(" stream on succeed\n");
}
startFPGAScan();
return;
}
void HCamDevice::HtCamStopVideoCapturing()
{
enum v4l2_buf_type type;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(videofd, VIDIOC_STREAMOFF, &type) == -1)
camera_err(" VIDIOC_STREAMOFF error! %s\n", strerror(errno));
stopFPGAScan();
}
void HCamDevice::HtCamExitVideoCapturing()
{
// HtCamStopVideoCapturing();
// uint8_t i;
// for (i = 0; i < v4lBufferCount; ++i)
// {
// if (-1 == munmap(captureBufers[i].start, captureBufers[i].length))
// printf("munmap \n");
// }
// if(close(videofd)<0)
// {
// camera_err("close video fd error \n");
// }
// if(close(memfd)<0)
// {
// camera_err("close mem fd error \n");
// }
// if(close(subDevicefd)<0)
// {
// camera_err("close sub Device fd error \n");
// }
}
int HCamDevice::HtCamWaitVideoCapture(int msTimeout)
{
struct pollfd pfd;
pfd.fd = videofd;
pfd.events = POLLIN | POLLRDNORM;
if (poll(&pfd, 1, msTimeout) > 0)
return pfd.revents;
return 0;
}
int HCamDevice::HtCamReadCaptureFrame(void **pbuf, int timeout)
{
if (!HtCamWaitVideoCapture(timeout))
{
camera_err("read frame time out\n");
return -1;
}
struct v4l2_buffer buf;
CLEAR(buf);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));
}
if (!buf.m.planes)
{
return -3;
}
int ret = 0;
if (( ret = ioctl(videofd, VIDIOC_DQBUF, &buf)) == 0) //这个地方入栈失败好像下面就都不用操作了 ??? 毕竟已经丢帧了算
{
camera_print("*****DQBUF[%d] FINISH*****\n", buf.index);
}
else
{
camera_print("****VIDIOC_QBUF 1 FAIL :%d*****\n",ret);
return -2;
}
if (ioctl(videofd, VIDIOC_QBUF, &buf) == 0) //出栈一样
{
camera_print("************QBUF[%d] FINISH**************\n", buf.index);
}
else
{
camera_print("****VIDIOC_QBUF 2 FAIL*****\n");
return -2;
}
//camera_print("buf.bytesused is %d \r\n", buf.bytesused);
lastSucceedBufferIndex = buf.index;
*pbuf = captureBufers[buf.index].start;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
free(buf.m.planes);
}
return buf.index;
}
int HCamDevice::HtCamReadNextFrame(void **pbuf)
{
if ((lastSucceedBufferIndex + 1) >= v4lBufferCount)
*pbuf = captureBufers[0].start;
else
*pbuf = captureBufers[lastSucceedBufferIndex + 1].start;
return (lastSucceedBufferIndex++) ;
}
int HCamDevice::V4LBufferQuery()
{
int validCount = 0;
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(nplanes, sizeof(struct v4l2_plane));
}
for (int i = 0; i < v4lBufferCount; i++)
{ // assume there are 10 buffers
buf.index = i;
if (ioctl(videofd, VIDIOC_QUERYBUF, &buf) == -1)
{
camera_dbg("Failed to query buffer");
}
if(buf.bytesused)
validCount ++ ;
}
return validCount ;
}
void HCamDevice::stopFPGAScan()
{
uint32_t REG2 = HtCamReadFpgaRegs(0x02);
REG2 &= ~0x04;
HtCamWriteFpgaRegs(0x02, REG2);
HtCamStopSampling();
}
void HCamDevice::startFPGAScan()
{
uint32_t REG2 = HtCamReadFpgaRegs(0x02);
REG2 |= 0x04;
HtCamWriteFpgaRegs(0x02, REG2);
HtCamStartSampling();
}
void HCamDevice::HtCamChangeAdjustSpTime(uint32_t sp_time_gap, uint32_t sp_time_rw)
{
uint32_t *pCamCtrlReg = virBaseAddr;
sp_time_gap = (sp_time_gap << 16);
pCamCtrlReg[3] |= (uint32_t)(sp_time_rw);
pCamCtrlReg[3] |= (uint32_t)(sp_time_gap);
}
void HCamDevice::HtCamChangeAdjustPhase(uint16_t times, uint8_t dir)
{
uint32_t *pCamCtrlReg = virBaseAddr;
int i;
if (dir)
pCamCtrlReg[10] |= (1 << 9);
else
pCamCtrlReg[10] &= ~(1 << 9);
for (i = 0; i < times; i++)
{
pCamCtrlReg[10] &= ~(1 << 8);
pCamCtrlReg[10] |= (1 << 8);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[10] &= ~(1 << 10);
pCamCtrlReg[10] |= (1 << 10);
}
}
// This function is prohibited
int HCamDevice::HtCamStartSampling()
{
uint32_t *CamReg;
if (virBaseAddr == NULL)
return -1;
CamReg = (uint32_t *)virBaseAddr;
CamReg[10] &= ~(HT_CAM_REG_CR_STARTSAMPLE_MASK);
CamReg[10] |= (HT_CAM_REG_CR_STARTSAMPLE_MASK);
return 0;
}
// This function is prohibited
int HCamDevice::HtCamStopSampling()
{
printf("调用了 HtCamStopSampling\r\n");
uint32_t *CamReg;
if (virBaseAddr == NULL)
return -1;
CamReg = (uint32_t *)virBaseAddr;
CamReg[10] |= (HT_CAM_REG_CR_STOPSAMPLE_MASK);
CamReg[10] &= ~(HT_CAM_REG_CR_STOPSAMPLE_MASK);
return 0;
}
int HCamDevice::init_fd()
{
// int fd;
// void *map_base;
// if ((fd = open("/dev/mem", O_RDWR | O_SYNC)) < 0)
// {
// camera_dbg("Cannot open /dev/mem \n");
// return -1;
// }
// memfd = fd;
// map_base = mmap(NULL, AddrMapSize, PROT_READ | PROT_WRITE, MAP_SHARED, memfd, phyBaseAddr);
// if (map_base == NULL)
// {
// camera_dbg("Cannot mmap addr \n");
// close(fd);
// return -1;
// }
// virBaseAddr = (uint32_t *)map_base;
// if ((fd = open(videoDevName.c_str(), O_RDWR, 0)) == -1)
// {
// camera_dbg("Ht Cam Cannot open video file\n");
// return -1;
// }
// videofd = fd;
// if ((fd = open(subDeviceName.c_str(), O_RDWR, 0)) == -1)
// {
// camera_dbg("t Cam Cannot open subdevice file\n");
// return -1;
// }
// subDevicefd = fd;
// return 1;
}
int HCamDevice::init_sample()
{
struct v4l2_subdev_format subdev_fmt;
int ret;
memset(&subdev_fmt, 0, sizeof(subdev_fmt));//全部置零
subdev_fmt.pad = 0;
subdev_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
subdev_fmt.format.width = v4lWidth;
subdev_fmt.format.height = v4lHeight;
ret = ioctl(subDevicefd, VIDIOC_SUBDEV_S_FMT, &subdev_fmt);
// ret = ioctl(subDevicefd, 0x40010034, 0x00CE0080);/////////////////////////////////////////////////////
if (ret < 0)
printf("VIDIOC_SUBDEV_S_FMT failed.\n");
struct v4l2_event_subscription sub;
memset(&sub, 0, sizeof(sub));
sub.type = V4L2_EVENT_HTCAMMOD_OVERFREP;
ret = ioctl(subDevicefd, VIDIOC_SUBSCRIBE_EVENT, &sub);
if (ret < 0)
printf("VIDIOC_SUBSCRIBE_EVENT failed.\n");
return ret;
}
int HCamDevice::init_capture()
{
struct v4l2_format fmt;
struct v4l2_control ctrl;
CLEAR(fmt);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = v4lWidth;
fmt.fmt.pix_mp.height = v4lHeight;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_GREY;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
fmt.fmt.pix_mp.num_planes = nplanes;
}
else
{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = v4lWidth;
fmt.fmt.pix.height = v4lHeight;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
}
int ioctlRet = ioctl(videofd, VIDIOC_S_FMT, &fmt) ;
if ( ioctlRet < 0)
{
camera_err("error %s\n" , strerror(errno));
camera_err(" setting the data format failed!width %d height %d planes %d \n" , fmt.fmt.pix_mp.width,fmt.fmt.pix_mp.height , fmt.fmt.pix_mp.num_planes );
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix_mp.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix_mp.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix_mp.field);
// close(videofd);
return -1;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
if (v4lWidth != fmt.fmt.pix_mp.width || v4lHeight != fmt.fmt.pix_mp.height)
camera_err(" does not support %u * %u\n", v4lWidth, v4lHeight);
v4lWidth = fmt.fmt.pix_mp.width;
v4lHeight = fmt.fmt.pix_mp.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix_mp.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix_mp.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix_mp.field);
if (ioctl(videofd, VIDIOC_G_FMT, &fmt) < 0)
camera_err(" get the data format failed!\n");
nplanes = fmt.fmt.pix_mp.num_planes;
}
else
{
if (v4lWidth != fmt.fmt.pix.width || v4lHeight != fmt.fmt.pix.height)
camera_err(" does not support %u * %u\n", v4lWidth, v4lHeight);
v4lWidth = fmt.fmt.pix.width;
v4lHeight = fmt.fmt.pix.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n", fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n", fmt.fmt.pix.width);
camera_print(" fmt.fmt.pix.height = %d\n", fmt.fmt.pix.height);
camera_print(" fmt.fmt.pix.field = %d\n", fmt.fmt.pix.field);
}
CLEAR(ctrl);
ctrl.id = (V4L2_CID_USER_BASE + 0xc121); // V4L2_CID_XILINX_LOW_LATENCY
ctrl.value = (1 << 2); // XVIP_LOW_LATENCY_DISABLE
if (ioctl(videofd, VIDIOC_S_CTRL, &ctrl) < 0)
{
camera_err("Fail to set control:%s.\n", strerror(errno));
}
return 0;
}
int HCamDevice::init_video_buffer()
{
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
CLEAR(req);
req.count = v4lBufferCount;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = v4l2memtype;
printf("I GET THE drivertype %d\n",drivertype);
if (ioctl(videofd, VIDIOC_REQBUFS, &req) < 0)
{
camera_err(" VIDIOC_REQBUFS failed\n");
close(videofd);
return -1;
}
v4lBufferCount = req.count;
camera_dbg(" reqbuf number is %d\n", v4lBufferCount);
if (captureBufers != NULL)
free(captureBufers);
captureBufers = (captureBuffer *)calloc(v4lBufferCount, sizeof(struct captureBuffer));
uint8_t n_buffers;
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
CLEAR(buf);
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = v4l2memtype;
buf.index = n_buffers;
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
buf.length = nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(videofd, VIDIOC_QUERYBUF, &buf) == -1)
{
camera_err(" VIDIOC_QUERYBUF error\n");
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(captureBufers);
close(videofd);
return -1;
}
if (drivertype == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
{
captureBufers[n_buffers].length = buf.m.planes[0].length;
captureBufers[n_buffers].start = mmap(NULL,
buf.m.planes[0].length,
PROT_READ /*|PROT_WRITE*/,
MAP_SHARED, videofd,
buf.m.planes[0].m.mem_offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
n_buffers, captureBufers[n_buffers].start, buf.m.planes[0].length,
buf.m.planes[0].m.mem_offset);
free(buf.m.planes);
}
else
{
captureBufers[n_buffers].length = buf.length;
captureBufers[n_buffers].start = mmap(NULL,
buf.length,
PROT_READ /*| PROT_WRITE*/,
MAP_SHARED, videofd,
buf.m.offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
n_buffers, captureBufers[n_buffers].start, buf.length, buf.m.offset);
}
}
lastSucceedBufferIndex = (n_buffers - 1);
return 1;
}
int HCamDevice::init_dev()
{
// v4lWidth=5184;
// v4lHeight=2;
init_fd();//申请内存空间,打开文件
init_sample();//重置
init_capture();
init_video_buffer();
// TestPattern 1
HtCamSwitchSampleModes(0);
// HtCamChangeExposureValue(500); //曝光
// start sample
camera_dbg("ST SP : %d , VSNP : %d \r\n" , ST_SP , VSNP);
HtCamChangeMonoStartSample(200 * 3 + ST_SP_VSNP);
// if( ST_SP != 0 || ST_SP_VSNP != 0)
// HtCamChangeMonoStartSample( ST_SP * 3 + ST_SP_VSNP);
if( VSNP != 0)
{
unsigned int value = HtCamReadFpgaRegs(16);
value=0;
value = value & 0xffffff00;
HtCamWriteFpgaRegs(16, VSNP | value);
}
// CamZ_Reg_4 reg_4;
// reg_4.value=HtCamReadFpgaRegs(0x04);
// reg_4.en_pattern = 1;
// HtCamWriteFpgaRegs(0x04,reg_4.value);
// CamZ_Reg_A reg_a;
// reg_a.value=HtCamReadFpgaRegs(0x0A);
// reg_a.DPI = 0;
// HtCamWriteFpgaRegs(0x0A,reg_a.value);
HtCamSetDpi(0);
// ex_trigger = 0, int_trigger = 1
HtCamChangeTriggerInAndEXt(1);
HtCamInitADCReg();
HtCamWriteAllADC();
return 0;
}
void HCamDevice::HtCamSwitchSampleModes(uint8_t mode)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
if (!mode)
pCamCtrlReg[4] &= ~(0x00020000);
else
pCamCtrlReg[4] |= (0x00020000);
}
void HCamDevice::HtCamChangeExposureValue(uint32_t value)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
// pCamCtrlReg[3] |= (uint32_t)(0x00006000);
pCamCtrlReg[5] = 0x00000000;
pCamCtrlReg[6] = 0x00000000;
pCamCtrlReg[5] |= (uint32_t)(value); // RED
pCamCtrlReg[5] |= (uint32_t)(value << 16);
pCamCtrlReg[6] |= (uint32_t)(value); // GREEN
pCamCtrlReg[6] |= ((uint32_t)value << 16); // BLUE
}
void HCamDevice::HtCamWriteADCReg(uint8_t addr, uint8_t data)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdc1Write = 0x4000;
uint32_t EnableAdc2Write = 0x2000;
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// if (addr > 20)
// return;
pADCReg[addr] = data;
pAdcRegFrame->rwbit = 0;
pAdcRegFrame->regAddr = addr;
pAdcRegFrame->regData = data;
pAdcRegFrame->rwbit1 = 0;
pAdcRegFrame->regAddr1 = addr;
pAdcRegFrame->regData1 = data;
pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc2Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[0] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[4] |= (EnableAdc1Write);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
void HCamDevice::HtCamReadADCReg(uint8_t addr, uint8_t *data)
{
if (virBaseAddr == NULL)
{
return ;
}
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t AdcRegFrame = 0x0000;
uint32_t EnableAdcWrite = (1 << 14);
uint32_t tempData;
adcTiming *pAdcRegFrame = (adcTiming *)&AdcRegFrame;
// if (addr > 0x14)
// return;
pAdcRegFrame->rwbit = 1;
pAdcRegFrame->regAddr = addr;
pAdcRegFrame->regData = 0;
pAdcRegFrame->rwbit1 = 1;
pAdcRegFrame->regAddr1 = addr;
pAdcRegFrame->regData1 = 0;
pCamCtrlReg[7] = (uint32_t)AdcRegFrame;
pCamCtrlReg[4] &= ~(EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
pCamCtrlReg[4] |= (EnableAdcWrite);
std::this_thread::sleep_for(std::chrono::milliseconds(10));
tempData = pCamCtrlReg[2];
uint8_t value = (tempData >> 4) & (0xFF);
(*data) = value;
}
uint8_t HCamDevice::getADCReg(int addr)
{
if(addr > adcRegSize)
return 0 ;
return pADCReg[addr];
}
void HCamDevice::setADCReg(int addr , uint8_t value)
{
if(addr > adcRegSize)
return ;
pADCReg[addr] = value ;
}
// void HCamDevice::saveADCReg()
// {
// CamADCConfig adcConfig( _ADCPATH);
// int size = adcConfig.getSize();
// size = adcRegSize < size ? adcRegSize : size ;
// std::vector<int> adcValue(size) ;
// for(int index =0 ; index < size ; index++)
// {
// adcValue[index] = pADCReg[index];
// }
// if(!adcConfig.saveValue(adcValue))
// {
// std::cout << "Save adc Value Error!" << std::endl ;
// }
// }
uint32_t HCamDevice::HtCamReadFpgaRegs(uint8_t reg_addr)
{
uint32_t *pCamCtrlReg = virBaseAddr;
unsigned int u = pCamCtrlReg[reg_addr] ;
// FILE *fp;
// fp = fopen("/home/root/logF", "a+");
// fprintf(fp , "read reg %d , value : %d\r\n" ,reg_addr , u );
// fclose(fp);
// outFile << "PC read reg " << (int)reg_addr <<" value : " << u << std::endl;
return pCamCtrlReg[reg_addr] ;
}
void HCamDevice::HtCamSetSpTime(uint32_t reg_value)
{
CamZ_Reg_2Short sp_time;
sp_time.value = HtCamReadFpgaRegs(0x03);
sp_time.NShort;
}
void HCamDevice::HtCamGetSpTime(uint32_t &reg_value)
{
}
void HCamDevice::HtCamSetStSp(int color)
{
//sp_time.NShort[1] = 0x00D0;//灰度
//sp_time.NShort[1] = 0x00C8;//彩色
CamZ_Reg_2Short st_sp;
st_sp.value = HtCamReadFpgaRegs(0x0d);
st_sp.NShort[0] = 0x0200;
if (color)
st_sp.NShort[1] = 0x00C8; //彩色
else
st_sp.NShort[1] = 0x00DC; //灰色 2023-8-3
HtCamWriteFpgaRegs(0x0d , st_sp.value);
}
void HCamDevice::HtCamGetStSp(uint32_t &reg_value)
{
reg_value = HtCamReadFpgaRegs(0x0d);
}
void HCamDevice::HtCamSetVsnpTime(int color)
{
HtCamWriteFpgaRegs(0x10,color?0xD0D0:0xD0D0);
//uint32_t *pCamCtrlReg = virBaseAddr;
//uint32_t ss = start_sample;
//pCamCtrlReg[20] &= ~(0xFFFF0000); // clear
//pCamCtrlReg[20] |= (uint32_t)(ss << 16);
}
void HCamDevice::HtCamSetFrameCnt(uint32_t val)
{
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t ss = val;
HtCamWriteFpgaRegs(0x14,ss);
}
void HCamDevice::HtCamGetFrameCnt(uint32_t &val)
{
val = HtCamReadFpgaRegs(0x14);
camera_print("HtCamGetFrameCnt:%d\r\n",val);
}
void HCamDevice::HtCamGetFrameNum(uint32_t &val)
{
val = HtCamReadFpgaRegs(0x15);
camera_print(" HtCamGetFrameNum :%d\n",val);
}
void HCamDevice:: HtCamWriteFpgaRegs(uint8_t reg_addr, uint32_t reg_value)
{
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[reg_addr] = reg_value;
}
void HCamDevice::HtCamChangeMonoStartSample(int start_sample)
{
uint32_t *pCamCtrlReg = virBaseAddr;
uint32_t ss = start_sample;
pCamCtrlReg[13] &= ~(0xFFFF0000); // clear
pCamCtrlReg[13] |= (uint32_t)(ss << 16);
}
void HCamDevice::HtCamChangePhase(int phase)
{
// phase &= 0xff ;
// VSNP = phase;
// unsigned int oValue = HtCamReadFpgaRegs(16);
// oValue = oValue & 0xffffff00;
// oValue = oValue + phase ;
// HtCamWriteFpgaRegs(16, oValue);
}
void HCamDevice::ChangeScanState(bool state)
{
CamZ_Reg_4 reg4;
reg4.value = HtCamReadFpgaRegs(4);
reg4.gpio1 = state ;
HtCamWriteFpgaRegs(4, reg4.value);
}
void HCamDevice::HtCamChangeTriggerInAndEXt(int in_out)
{
uint32_t *pCamCtrlReg = virBaseAddr;
if (in_out)
{
pCamCtrlReg[10] |= (0x00000040);
}
else
{
pCamCtrlReg[10] &= ~(0x00000040);
}
}
void HCamDevice::HtCamSetDpi(int dpi)
{
uint32_t *pCamCtrlReg = virBaseAddr;
if (dpi)
{
pCamCtrlReg[10] |= (0x00000800);
}
else
{
pCamCtrlReg[10] &= ~(0x00000800);
}
}
void HCamDevice::HtCamSetClolr(int color)
{
CamZ_Reg_4_New reg_4;
reg_4.value=HtCamReadFpgaRegs(0x04);
reg_4.color_mode = color;
reg_4.en_frameset = 1; //帧计数默认开启
HtCamWriteFpgaRegs(0x04,reg_4.value);
HtCamSetStSp(color);
HtCamSetVsnpTime(color);
}
void HCamDevice::HtCamSetTriggerMode(int val)
{
uint32_t *pCamCtrlReg = virBaseAddr;
if (val)
pCamCtrlReg[10] |= 0x00000040;
else
pCamCtrlReg[10] &= ~(0x00000040);
}
int HCamDevice::HtCamGetTriggerMode()
{
uint32_t *pCamCtrlReg = virBaseAddr;
return ((pCamCtrlReg[10] & (0x00000040)) >> 6);
}
int HCamDevice::HtCamGetColorMode()
{
uint32_t *pCamCtrlReg = virBaseAddr;
return ((pCamCtrlReg[4] & (0x00000004)) >> 2);
}
void HCamDevice::init_ps_regs()
{
// pPsReg[CAM_DPI] = psConfig.getReg(CAM_DPI);
// pPsReg[RLS_CHECK_CODE] = psConfig.getReg(RLS_CHECK_CODE);
//pPsReg[HEARTBAT] = psConfig.getReg(HEARTBAT);
}
uint32_t HCamDevice::HtCamReadPsRegs(uint8_t reg_addr)
{
return pPsReg[reg_addr];
}
#include <iostream>
#include <fstream>
void HCamDevice::HtCamWritePsRegs(uint8_t reg_addr, uint32_t reg_value)
{
printf("reg addr %d\r\n" ,reg_addr );
if (reg_addr >= PSReg::REG_NUM)
return;
pPsReg[reg_addr] = reg_value;
switch (reg_addr)
{
case BUF_WIDTH:
case BUF_HEIGHT:
case BUF_NUM:
HtCamResizeBuffer(pPsReg[BUF_WIDTH], pPsReg[BUF_HEIGHT], pPsReg[BUF_NUM]);
break;
case CAM_DPI:
HtCamPsDpiChange(reg_value);
break;
case CAM_STATUS:
{
HtCamImageProcessChange();
}
break;
default:
break;
}
}
void HCamDevice::savePsReg()
{
// CamPSConfig psConfig(CONFIG_PATH);
// psConfig.setReg(PS_VERSION , pPsReg[PS_VERSION] );
// psConfig.setReg(CAM_STATUS , pPsReg[CAM_STATUS] );
// psConfig.setReg(MOTOR_SPEED , pPsReg[MOTOR_SPEED]);
// psConfig.setReg(CAM_DPI , pPsReg[CAM_DPI] );
// psConfig.setReg(HEARTBAT , pPsReg[HEARTBAT] );
// psConfig.setReg(RLS_CHECK_CODE, pPsReg[RLS_CHECK_CODE]);
// psConfig.save();
}
void HCamDevice::HtCamWriteAllADC()
{
for (int index = 0; index < adcRegSize; index++)
{
HtCamWriteADCReg(index, pADCReg[index]);
}
}
void HCamDevice::HtCamInitADCReg()
{
#ifdef ADC_82V38
pADCReg[0] = 0x23;
pADCReg[1] = 0xF3;
pADCReg[2] = 40;
pADCReg[3] = 40;
pADCReg[4] = 40;
pADCReg[5] = 255;
pADCReg[6] = 255;
pADCReg[7] = 255;
return ;
#endif
#ifdef ADC_82V48
// int size = adcConfig.getSize();
// std::cout << " read ADC size : " << size << std::endl ;
// if (size > 0)
// {
// for (int index = 0; index < size; index++)
// {
// pADCReg[index] = adcConfig.getReg(index);
// }
// return;
//}
pADCReg[0] = 0x07;
pADCReg[1] = 0x50;
/* 1200 cis */
if (_CAM_TYPE == "PYTHONZ_1200")
{
/* gain */
pADCReg[2] = 0x90;
pADCReg[3] = 0x00;
pADCReg[4] = 0x90;
pADCReg[5] = 0x00;
pADCReg[6] = 0x90;
pADCReg[7] = 0x00;
pADCReg[8] = 0x90;
pADCReg[9] = 0x00;
pADCReg[0xa] = 0x90;
pADCReg[0xb] = 0x00;
pADCReg[0xc] = 0x90;
pADCReg[0xd] = 0x00;
/* offset */
pADCReg[0xe] = 0x58;
pADCReg[0xf] = 0x5b;
pADCReg[0x10] = 0x55;
pADCReg[0x11] = 0x55;
pADCReg[0x12] = 0x50;
pADCReg[0x13] = 0x55;
}
else
{
/* offset */
if (_CAM_TYPE == "PYTHONZ_WSS")
{
pADCReg[2] = 0x55;
pADCReg[3] = 0x00;
pADCReg[4] = 0x4a;
pADCReg[5] = 0x00;
pADCReg[6] = 0x50;
pADCReg[7] = 0x00;
pADCReg[8] = 0x4e;
pADCReg[9] = 0x00;
pADCReg[0xa] = 0x40;
pADCReg[0xb] = 0x00;
pADCReg[0xc] = 0x50;
pADCReg[0xd] = 0x00;
pADCReg[0xe] = 0x65;
pADCReg[0xf] = 0x65;
pADCReg[0x10] = 0x65;
pADCReg[0x11] = 0x65;
pADCReg[0x12] = 0x65;
pADCReg[0x13] = 0x65;
}
else
{
pADCReg[2] = 0xA0;
pADCReg[3] = 0x00;
pADCReg[4] = 0xA0;
pADCReg[5] = 0x00;
pADCReg[6] = 0xA0;
pADCReg[7] = 0x00;
pADCReg[8] = 0xA0;
pADCReg[9] = 0x00;
pADCReg[0xa] = 0xA0;
pADCReg[0xb] = 0x00;
pADCReg[0xc] = 0xA0;
pADCReg[0xd] = 0x00;
pADCReg[0xe] = 0x20;
pADCReg[0xf] = 0x20;
pADCReg[0x10] = 0x20;
pADCReg[0x11] = 0x20;
pADCReg[0x12] = 0x20;
pADCReg[0x13] = 0x20;
}
}
#endif
}
void HCamDevice::HtCamResizeBuffer(int width, int height, int number)
{
v4lWidth = width;
v4lHeight = height;
v4lBufferCount = number;
HtCamExitVideoCapturing();
init_fd();
init_sample();
init_capture();
init_video_buffer();
camera_dbg("v4lWidth = %d, v4lHeight = %d, v4lBufferCount = %d\n", v4lWidth, v4lHeight, v4lBufferCount);
}
void HCamDevice::HtCamImageProcessChange()
{
CAM_STATUS_REG* status = (CAM_STATUS_REG*)&pPsReg[CAM_STATUS];
if(status->doImageProcess)
{
HtCamPsDpiChange(pPsReg[CAM_DPI]);
}else
{
pPsReg[IMG_WIDTH] = pPsReg[BUF_WIDTH];
}
}
// DPI切换-暂只适配1200/1800
bool HCamDevice::HtCamPsDpiChange(int dpi)
{
std::cout << "DPI Change for " << dpi <<std::endl;
for (auto i = _DPI_WIDTH.begin(); i != _DPI_WIDTH.end(); i++)
{
if (dpi == i->first)
{
_N_DPI = dpi;
_IMG_BUF_WIDTH = i->second;
pPsReg[BUF_WIDTH] = _DPI_V4LWIDTH[dpi];
if(dpi != 300 || dpi != 600)
{
((CAM_STATUS_REG*)&pPsReg[CAM_STATUS])->doZoom = 1 ;
}
if (dpi > 300)
{
HtCamSetDpi(0);
pPsReg[BUF_HEIGHT] = _V4LHEIGHT/2;
}
else
{
HtCamSetDpi(1);
pPsReg[BUF_HEIGHT] = _V4LHEIGHT;
}
if(v4lWidth != pPsReg[BUF_WIDTH] || v4lHeight != pPsReg[BUF_HEIGHT] || v4lBufferCount != pPsReg[BUF_NUM])
{
HtCamResizeBuffer(pPsReg[BUF_WIDTH], pPsReg[BUF_HEIGHT] , pPsReg[BUF_NUM]);
camera_print(" fmt.fmt.pix.width = %d\n", v4lWidth);
camera_print(" fmt.fmt.pix.height = %d\n", v4lHeight);
camera_print(" fmt.fmt.pix.field = %d\n", pPsReg[BUF_NUM]);
}
return true;
}
}
return false;
}
void HCamDevice::HtCamOverClockClear()
{
uint32_t *pCamCtrlReg = virBaseAddr;
pCamCtrlReg[10] |= (0x00001000);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
pCamCtrlReg[10] &= ~(0x00001000);
}