banboshi_V1/halftoneproject-master/Code/Device/ScannerDev.cs

953 lines
41 KiB
C#
Raw Normal View History

2023-10-31 13:19:29 +08:00
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Timers;
using System.Windows.Forms;
using MvCamCtrl.NET;
using Newtonsoft.Json.Linq;
using OpenCvSharp.Dnn;
using static ControllerDllCSharp.ClassLibControllerDll;
namespace ProductionControl.Device
{
public class ScannerDev : IDisposable
{
private uint m_nRowStep = 0;
[DllImport("user32.dll")]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool IsWindow(IntPtr hWnd);
public enum ScannerType
{
[Description("板卡相机")]
GENTL = 0,
[Description("网口相机")]
CC = 1,
}
private ScannerType scannerType;
public string bmpSavePath { get; private set; }
public Action<WarningEnum, string> WarningEvent;
/// <summary>
/// 拍照回传 1-num,文件名.bmp)
/// </summary>
public Action<int, string> ScanEventPath;
//public Action<int, byte[]> ScanEvent;
public Action<int, Bitmap> ScanEvent;
/// <summary>
/// 曝光
/// </summary>
public float ExposureTime { get; private set; }
/// <summary>
/// 增益
/// </summary>
public float Gain { get; private set; }
/// <summary>
/// 帧率
/// </summary>
public float ResultingFrameRate { get; private set; }
/// <summary>
/// 图片大小
/// </summary>
public Size size { get; private set; }
/// <summary>
/// 是否连续模式
/// </summary>
public bool isContinuousMode { get; private set; }
/// <summary>
/// 是否打开设备成功
/// </summary>
public bool IsInit { get; private set; } = false;
//public string ErrInfo { get; private set; }
//private System.Timers.Timer timer = new System.Timers.Timer();
#region
private static MyCamera.cbOutputExdelegate ImageCallback;
private static MyCamera.MV_CHUNK_DATA_CONTENT stChunkInfo;// Chunk结构体信息
private MyCamera.MV_GENTL_DEV_INFO_LIST m_stGentDeviceList = new MyCamera.MV_GENTL_DEV_INFO_LIST();
private MyCamera.MV_GENTL_IF_INFO_LIST m_stIFInfoList = new MyCamera.MV_GENTL_IF_INFO_LIST();
private MyCamera.MV_CC_DEVICE_INFO_LIST m_stCCDeviceList = new MyCamera.MV_CC_DEVICE_INFO_LIST();
private MyCamera device = new MyCamera();
private bool m_bGrabbing = false;//开始采集TAG
private Thread m_hReceiveThread = null;
//显示图像控件句柄
private IntPtr previewHwnd = IntPtr.Zero;
//拍照数量
private int scanNum = 0;
#endregion
public ScannerDev(ScannerType type)
{
this.scannerType = type;
}
public bool open()
{
if (IsInit) return true;
System.GC.Collect();
int nRet;
try
{
if (this.scannerType == ScannerType.GENTL)
{
nRet = MyCamera.MV_CC_EnumInterfacesByGenTL_NET(ref m_stIFInfoList, Config.Scanner_GENTL_CTI);
if (File.Exists(Config.Scanner_GENTL_CTI))
{
if (0 != nRet || m_stIFInfoList.nInterfaceNum < 1)
{
WarningEvent?.Invoke(WarningEnum.High, $"Enumerate interfaces fail! ({nRet})");
return false;
}
}
//
MyCamera.MV_GENTL_IF_INFO stIFInfo = (MyCamera.MV_GENTL_IF_INFO)Marshal.PtrToStructure(m_stIFInfoList.pIFInfo[0], typeof(MyCamera.MV_GENTL_IF_INFO));
nRet = MyCamera.MV_CC_EnumDevicesByGenTL_NET(ref stIFInfo, ref m_stGentDeviceList);
if (0 != nRet || m_stGentDeviceList.nDeviceNum < 1)
{
WarningEvent?.Invoke(WarningEnum.High, $"Enumerate devices fail! ({nRet})");
return false;
}
// ch:获取选择的设备信息 | en:Get selected device information
MyCamera.MV_GENTL_DEV_INFO device = (MyCamera.MV_GENTL_DEV_INFO)Marshal.PtrToStructure(m_stGentDeviceList.pDeviceInfo[0], typeof(MyCamera.MV_GENTL_DEV_INFO));
// ch:打开设备 | en:Open device
if (null == this.device)
{
this.device = new MyCamera();
if (null == this.device)
{
WarningEvent?.Invoke(WarningEnum.High, $"Open device fail!");
return false;
}
}
nRet = this.device.MV_CC_CreateDeviceByGenTL_NET(ref device);
if (MyCamera.MV_OK != nRet)
{
WarningEvent?.Invoke(WarningEnum.High, $"Open device fail! ({nRet})");
return false;
}
nRet = this.device.MV_CC_OpenDevice_NET();
if (MyCamera.MV_OK != nRet)
{
this.device.MV_CC_DestroyDevice_NET();
WarningEvent?.Invoke(WarningEnum.High, $"Device open fail! ({nRet})");
return false;
}
}
else //CC
{
nRet = MyCamera.MV_CC_EnumDevices_NET(MyCamera.MV_GIGE_DEVICE | MyCamera.MV_USB_DEVICE, ref m_stCCDeviceList);
if (0 != nRet || m_stCCDeviceList.nDeviceNum < 1)
{
WarningEvent?.Invoke(WarningEnum.High, $"Enumerate devices fail! ({nRet})");
return false;
}
// ch:获取选择的设备信息
MyCamera.MV_CC_DEVICE_INFO device = (MyCamera.MV_CC_DEVICE_INFO)Marshal.PtrToStructure(m_stCCDeviceList.pDeviceInfo[0], typeof(MyCamera.MV_CC_DEVICE_INFO));
// ch:打开设备 | en:Open device
if (null == this.device)
{
this.device = new MyCamera();
if (null == this.device)
{
WarningEvent?.Invoke(WarningEnum.High, $"Open device fail!");
return false;
}
}
nRet = this.device.MV_CC_CreateDevice_NET(ref device);
if (MyCamera.MV_OK != nRet)
{
WarningEvent?.Invoke(WarningEnum.High, $"Open device fail! ({nRet})");
return false;
}
nRet = this.device.MV_CC_OpenDevice_NET();
if (MyCamera.MV_OK != nRet)
{
this.device.MV_CC_DestroyDevice_NET();
WarningEvent?.Invoke(WarningEnum.High, $"Open device fail! ({nRet})");
return false;
}
// ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera)
if (device.nTLayerType == MyCamera.MV_GIGE_DEVICE)
{
int nPacketSize = this.device.MV_CC_GetOptimalPacketSize_NET();
if (nPacketSize > 0)
{
nRet = this.device.MV_CC_SetIntValue_NET("GevSCPSPacketSize", (uint)nPacketSize);
//if (nRet != MyCamera.MV_OK)
//{
// ShowErrorMsg("Set Packet Size failed!", nRet);
//}
}
else
{
//ShowErrorMsg("Get Packet Size failed!", nPacketSize);
}
}
}
// ch:注册回调函数 | en:Register image callback //本类不使用回调,使用线程主动循环读取
//ImageCallback = new MyCamera.cbOutputExdelegate(ImageCallbackFunc);
//nRet = device.MV_CC_RegisterImageCallBackEx_NET(ImageCallback, IntPtr.Zero);
//if (MyCamera.MV_OK != nRet)
//{
// WarningEvent?.Invoke(WarningEnum.High, $"Register image callback failed! ({nRet})");
// return false;
//}
init2();
// ch:设置采集[软触发/连续]模式 | en:Set Continues Aquisition Mode
device.MV_CC_SetEnumValue_NET("AcquisitionMode", (uint)MyCamera.MV_CAM_ACQUISITION_MODE.MV_ACQ_MODE_CONTINUOUS);
setMode(false); //软触发
//
getParam();
IsInit = true;
//timer.Elapsed += Timer_Elapsed;
//timer.Interval = 100;
//timer.Enabled = true;
return true;
}
catch (Exception ex)
{
WarningEvent?.Invoke(WarningEnum.High, ex.Message);
return false;
}
}
public void close()
{
if (!IsInit) return;
try
{
IsInit = false;
// ch:取流标志位清零 | en:Reset flow flag bit
if (m_bGrabbing == true)
{
m_bGrabbing = false;
m_hReceiveThread.Join();
}
// ch:关闭设备 | en:Close Device
device.MV_CC_CloseDevice_NET();
device.MV_CC_DestroyDevice_NET();
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="hwnd">显示图像控件句柄</param>
/// <returns></returns>
public bool start(IntPtr preview_Hwnd,string bmp_save_path)
{
if (!IsInit) return false;
if (m_bGrabbing) return true;
this.previewHwnd= preview_Hwnd;
this.bmpSavePath = bmp_save_path;
// ch:标志位置位true | en:Set position bit true
m_bGrabbing = true;
m_hReceiveThread = new Thread(ReceiveThreadProcess);
m_hReceiveThread.Start();
// ch:开始采集 | en:Start Grabbing
int nRet = device.MV_CC_StartGrabbing_NET();
if (MyCamera.MV_OK != nRet)
{
m_bGrabbing = false;
m_hReceiveThread.Join();
WarningEvent?.Invoke(WarningEnum.High, $"Start Grabbing Fail! ({nRet})");
return false;
}
return true;
}
public bool stop()
{
if (!IsInit) return false;
if (!m_bGrabbing) return true;
try
{
// ch:标志位设为false | en:Set flag bit false
m_bGrabbing = false;
m_hReceiveThread.Join();
// ch:停止采集 | en:Stop Grabbing
int nRet = device.MV_CC_StopGrabbing_NET();
if (nRet != MyCamera.MV_OK)
{
WarningEvent?.Invoke(WarningEnum.High, $"Stop Grabbing Fail! ({nRet})");
return false;
}
}
catch
{
return false;
}
return true;
}
#region private
private bool init2()
{
// ch:获取包大小 || en: Get Payload Size
MyCamera.MVCC_INTVALUE stParam = new MyCamera.MVCC_INTVALUE();
//int nRet = device.MV_CC_GetIntValue_NET("PayloadSize", ref stParam);//strKey [IN] 属性键值,如获取宽度信息则为"Width" pstValue[IN][OUT] 返回给调用者有关相机属性结构体指针
//if (MyCamera.MV_OK != nRet)
//{
// System.Windows.Forms.MessageBox.Show("Get PayloadSize Fail");
// return false;
//}
//g_nPayloadSize = stParam.nCurValue;
// ch:获取高 || en: Get Height
int nRet = device.MV_CC_GetIntValue_NET("Height", ref stParam);
if (MyCamera.MV_OK != nRet)
{
System.Windows.Forms.MessageBox.Show("Get Height Fail");
return false;
}
uint nHeight = stParam.nCurValue;
// ch:获取宽 || en: Get Width
nRet = device.MV_CC_GetIntValue_NET("Width", ref stParam);
if (MyCamera.MV_OK != nRet)
{
System.Windows.Forms.MessageBox.Show("Get Width Fail");
return false;
}
uint nWidth = stParam.nCurValue;
this.size = new Size((int)nWidth, (int)nHeight);
// ch:获取步长 || en: Get nRowStep
m_nRowStep = nWidth * nHeight;
return true;
}
/// <summary>
/// false-软/硬 触发 true-连续
/// </summary>
/// <param name="type"></param>
public void setMode(bool isContinuous, MyCamera.MV_CAM_TRIGGER_SOURCE triggerSource= MyCamera.MV_CAM_TRIGGER_SOURCE.MV_TRIGGER_SOURCE_SOFTWARE)
{
if (isContinuous)
{
device.MV_CC_SetEnumValue_NET("TriggerMode", (uint)MyCamera.MV_CAM_TRIGGER_MODE.MV_TRIGGER_MODE_OFF);
}
else
{
device.MV_CC_SetEnumValue_NET("TriggerMode", (uint)MyCamera.MV_CAM_TRIGGER_MODE.MV_TRIGGER_MODE_ON);
// ch:触发源选择:0 - Line0;
// 1 - Line1;
// 2 - Line2;
// 3 - Line3;
// 4 - Counter;
// 7 - Software;
//if (cbSoftTrigger.Checked)
device.MV_CC_SetEnumValue_NET("TriggerSource", (uint)triggerSource);
//else
// m_MyCamera.MV_CC_SetEnumValue_NET("TriggerSource", (uint)MyCamera.MV_CAM_TRIGGER_SOURCE.MV_TRIGGER_SOURCE_LINE0);
}
isContinuousMode = isContinuous;
}
public void setPreviewWin(IntPtr preview_Hwnd)
{
this.previewHwnd = preview_Hwnd;
}
public void ReceiveThreadProcess()
{
MyCamera.MV_FRAME_OUT stFrameInfo = new MyCamera.MV_FRAME_OUT();
MyCamera.MV_DISPLAY_FRAME_INFO stDisplayInfo = new MyCamera.MV_DISPLAY_FRAME_INFO();
int nRet = MyCamera.MV_OK;
while (m_bGrabbing)
{
bool isOpenWin = IsWindow(this.previewHwnd);
if ((!isContinuousMode && this.scanNum < 1) ||
(isContinuousMode && (this.previewHwnd == IntPtr.Zero || !isOpenWin)))
{
2023-11-08 13:52:00 +08:00
Thread.Sleep(50);
2023-10-31 13:19:29 +08:00
continue;
}
//
nRet = device.MV_CC_GetImageBuffer_NET(ref stFrameInfo, 1000);
if (nRet == MyCamera.MV_OK)
{
if (RemoveCustomPixelFormats(stFrameInfo.stFrameInfo.enPixelType)
|| stFrameInfo.stFrameInfo.nFrameLen == 0)
{
device.MV_CC_FreeImageBuffer_NET(ref stFrameInfo);
continue;
}
//pictureBox1显示
if (this.previewHwnd != IntPtr.Zero && isOpenWin)
{
stDisplayInfo.hWnd = this.previewHwnd;// pictureBox1.Handle;
stDisplayInfo.pData = stFrameInfo.pBufAddr;
stDisplayInfo.nDataLen = stFrameInfo.stFrameInfo.nFrameLen;
stDisplayInfo.nWidth = stFrameInfo.stFrameInfo.nWidth;
stDisplayInfo.nHeight = stFrameInfo.stFrameInfo.nHeight;
stDisplayInfo.enPixelType = stFrameInfo.stFrameInfo.enPixelType;
device.MV_CC_DisplayOneFrame_NET(ref stDisplayInfo);
}
//保存
if (this.scanNum>0)//save tag
{
MyCamera.MV_SAVE_IMG_TO_FILE_PARAM stSaveFileParam = new MyCamera.MV_SAVE_IMG_TO_FILE_PARAM();
//lock (BufForDriverLock)
{
if (scannerType == ScannerType.CC)
{
stSaveFileParam.enImageType = MyCamera.MV_SAVE_IAMGE_TYPE.MV_Image_Bmp;
stSaveFileParam.enPixelType = stFrameInfo.stFrameInfo.enPixelType;
stSaveFileParam.pData = stFrameInfo.pBufAddr;//m_BufForDriver;
stSaveFileParam.nDataLen = stFrameInfo.stFrameInfo.nFrameLen;
stSaveFileParam.nHeight = stFrameInfo.stFrameInfo.nHeight;
stSaveFileParam.nWidth = stFrameInfo.stFrameInfo.nWidth;
stSaveFileParam.iMethodValue = 2;
//Save path
//stSaveFileParam.pImagePath = this.bmpSavePath + "\\" + scannerType.ToString() + "_w" + stSaveFileParam.nWidth.ToString() + "_h" + stSaveFileParam.nHeight.ToString() + "_fn" + stFrameInfo.stFrameInfo.nFrameNum.ToString() + ".bmp";
stSaveFileParam.pImagePath = this.bmpSavePath + "\\" + DateTime.Now.Ticks + ".bmp";
//m_MyCamera.MV_CC_RegisterImageCallBackEx_NET(new MyCamera.cbOutputExdelegate(ImageCallBack), IntPtr.Zero);
nRet = device.MV_CC_SaveImageToFile_NET(ref stSaveFileParam);
if (MyCamera.MV_OK == nRet)
{
if (ScanEventPath != null)
ScanEventPath?.Invoke(this.scanNum--, stSaveFileParam.pImagePath);
else
ScanEvent?.Invoke(this.scanNum--, read2Bmp(stSaveFileParam.pImagePath));
}
}
else
{
//=== raw byte[]
//byte[] rawFileBuff = toRawImage(stFrameInfo);
//if (rawFileBuff != null)
// ScanEvent?.Invoke(this.scanNum--, rawFileBuff);
//=== bmp
Bitmap bmp = toBitmap(stFrameInfo.pBufAddr, stFrameInfo.stFrameInfo);
//bmp = bytes2bmp(bmp2bytes(bmp));
//string bmpPath = "d:\\" + DateTime.Now.Ticks + ".bmp";
//bmp.Save(bmpPath, ImageFormat.Bmp);
////bmp = (Bitmap)Bitmap.FromFile(bmpPath);
//bmp = read2Bmp(bmpPath);
if (bmp != null)
ScanEvent?.Invoke(this.scanNum--, bmp);
}
}
}
//free
device.MV_CC_FreeImageBuffer_NET(ref stFrameInfo);
}
else
{
Thread.Sleep(5);
}
}
}
public void AfterGigeUsbImageAcquired_Station1(MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pTemp )
{
Bitmap Station1Image1 = new System.Drawing.Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, pFrameInfo.nWidth * 1, System.Drawing.Imaging.PixelFormat.Format8bppIndexed, pTemp);
ColorPalette cp = Station1Image1.Palette;
for (int i = 0; i < 256; i++)
{
cp.Entries[i] = Color.FromArgb(i, i, i);
}
Station1Image1.Palette = cp;
}
private byte[] toRawImage(MyCamera.MV_FRAME_OUT stFrameOut)
{
MyCamera.MvGvspPixelType enType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Undefined;
uint nChannelNum = 0;
if (IsColorPixelFormat(stFrameOut.stFrameInfo.enPixelType))
{
enType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed;
nChannelNum = 3;
}
else if (IsMonoPixelFormat(stFrameOut.stFrameInfo.enPixelType))
{
enType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8;
nChannelNum = 1;
}
else
return null;
//
if (enType != MyCamera.MvGvspPixelType.PixelType_Gvsp_Undefined)
{
IntPtr pBufForConvert = Marshal.AllocHGlobal((int)(stFrameOut.stFrameInfo.nWidth * stFrameOut.stFrameInfo.nHeight * nChannelNum));
MyCamera.MV_PIXEL_CONVERT_PARAM stConvertPixelParam = new MyCamera.MV_PIXEL_CONVERT_PARAM();
stConvertPixelParam.nWidth = stFrameOut.stFrameInfo.nWidth;
stConvertPixelParam.nHeight = stFrameOut.stFrameInfo.nHeight;
stConvertPixelParam.pSrcData = stFrameOut.pBufAddr;
stConvertPixelParam.nSrcDataLen = stFrameOut.stFrameInfo.nFrameLen;
stConvertPixelParam.enSrcPixelType = stFrameOut.stFrameInfo.enPixelType;
stConvertPixelParam.enDstPixelType = enType;
stConvertPixelParam.pDstBuffer = pBufForConvert;
stConvertPixelParam.nDstBufferSize = (uint)(stFrameOut.stFrameInfo.nWidth * stFrameOut.stFrameInfo.nHeight * nChannelNum);
int nRet = device.MV_CC_ConvertPixelType_NET(ref stConvertPixelParam);
if (MyCamera.MV_OK != nRet)
return null;
// ch:将图像数据保存到本地文件 | en:Save image data to local file
byte[] rawImage = new byte[stConvertPixelParam.nDstLen];
Marshal.Copy(stConvertPixelParam.pDstBuffer, rawImage, 0, (int)stConvertPixelParam.nDstLen);
if (pBufForConvert != IntPtr.Zero)
Marshal.FreeHGlobal(pBufForConvert);
//
return rawImage;
}
return null;
}
private Bitmap toBitmap(IntPtr pData, MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo)
{
int nRet = MyCamera.MV_OK;
//MyCamera device = m_pMyCamera;
//MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo = new MyCamera.MV_FRAME_OUT_INFO_EX();
//IntPtr pData = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)g_nPayloadSize);
if (pData == IntPtr.Zero)
return null;
IntPtr pTemp = IntPtr.Zero;
IntPtr pImageBuffer = IntPtr.Zero;
if (IsColorPixelFormat(pFrameInfo.enPixelType)) // 彩色图像处理
{
if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed)
{
pTemp = pData;
}
else
{
// 其他格式彩色图像转为RGB
nRet = getBmpPtr(device, pData, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType, pImageBuffer, MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed);
if (MyCamera.MV_OK != nRet)
return null;
pTemp = pImageBuffer;
}
// Packed转Plane
Byte[] byteArrImageData = new Byte[m_nRowStep * 3];
unsafe
{
byte* pBufForSaveImage = (byte*)pTemp;
UInt32 nSupWidth = (pFrameInfo.nWidth + (UInt32)3) & 0xfffffffc;
for (int nRow = 0; nRow < pFrameInfo.nHeight; nRow++)
{
for (int col = 0; col < pFrameInfo.nWidth; col++)
{
byteArrImageData[nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col)];
byteArrImageData[pFrameInfo.nWidth * pFrameInfo.nHeight + nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 1)];
byteArrImageData[pFrameInfo.nWidth * pFrameInfo.nHeight * 2 + nRow * nSupWidth + col] = pBufForSaveImage[nRow * pFrameInfo.nWidth * 3 + (3 * col + 2)];
}
}
pTemp = System.Runtime.InteropServices.Marshal.UnsafeAddrOfPinnedArrayElement(byteArrImageData, 0);
}
//
Bitmap Station1Image1 = new System.Drawing.Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, pFrameInfo.nWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, pTemp);
//ColorPalette cp = Station1Image1.Palette;
//for (int i = 0; i < 256; i++)
//{
// cp.Entries[i] = Color.FromArgb(i, i, i);
//}
//Station1Image1.Palette = cp;
return Station1Image1;
}
else if (IsMonoPixelFormat(pFrameInfo.enPixelType)) // Mono图像处理
{
if (pFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8)
{
pTemp = pData;
}
else
{
// 其他格式Mono转为Mono8
//nRet = ConvertToMono8(device, pData, pImageBuffer, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType);
nRet = getBmpPtr(device, pData, pFrameInfo.nHeight, pFrameInfo.nWidth, pFrameInfo.enPixelType, pImageBuffer, MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed);
if (MyCamera.MV_OK != nRet)
return null;
pTemp = pImageBuffer;
}
//
Bitmap Station1Image1 = new System.Drawing.Bitmap(pFrameInfo.nWidth, pFrameInfo.nHeight, pFrameInfo.nWidth * 1, System.Drawing.Imaging.PixelFormat.Format8bppIndexed, pTemp);
ColorPalette cp = Station1Image1.Palette;
for (int i = 0; i < 256; i++)
{
cp.Entries[i] = Color.FromArgb(i, i, i);
}
Station1Image1.Palette = cp;
return Station1Image1;
}
else
return null;
}
public Int32 getBmpPtr(MyCamera device, IntPtr pSrc, ushort nHeight, ushort nWidth, MyCamera.MvGvspPixelType nPixelType, IntPtr pDst, MyCamera.MvGvspPixelType type)
{
if (IntPtr.Zero == pSrc || IntPtr.Zero == pDst)
{
return MyCamera.MV_E_PARAMETER;
}
int nRet = MyCamera.MV_OK;
//MyCamera device = obj as MyCamera;
MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM();
stPixelConvertParam.pSrcData = pSrc;//源数据
if (IntPtr.Zero == stPixelConvertParam.pSrcData)
{
return -1;
}
stPixelConvertParam.nWidth = nWidth;//图像宽度
stPixelConvertParam.nHeight = nHeight;//图像厚度
stPixelConvertParam.enSrcPixelType = nPixelType;//源数据的格式
stPixelConvertParam.nSrcDataLen = (uint)(nWidth * nHeight * ((((uint)nPixelType) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.nDstBufferSize = (uint)(nWidth * nHeight * ((((uint)type) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.pDstBuffer = pDst;//转换后的数据
stPixelConvertParam.enDstPixelType = type;
stPixelConvertParam.nDstBufferSize = (uint)nWidth * nHeight * 3;
nRet = device.MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换
if (MyCamera.MV_OK != nRet)
{
return -1;
}
return MyCamera.MV_OK;
}
/// <summary>
/// 其他黑白格式转为Mono8
/// </summary>
/// <param name="obj"></param>
/// <param name="pInData">输出图片数据</param>
/// <param name="pOutData">输出图片数据</param>
/// <param name="nHeight">高</param>
/// <param name="nWidth">宽</param>
/// <param name="nPixelType">像素格式</param>
/// <returns></returns>
public Int32 ConvertToMono8(MyCamera device, IntPtr pInData, IntPtr pOutData, ushort nHeight, ushort nWidth, MyCamera.MvGvspPixelType nPixelType)
{
if (IntPtr.Zero == pInData || IntPtr.Zero == pOutData)
{
return MyCamera.MV_E_PARAMETER;
}
int nRet = MyCamera.MV_OK;
//MyCamera device = obj as MyCamera;
MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM();
stPixelConvertParam.pSrcData = pInData;//源数据
if (IntPtr.Zero == stPixelConvertParam.pSrcData)
{
return -1;
}
stPixelConvertParam.nWidth = nWidth;//图像宽度
stPixelConvertParam.nHeight = nHeight;//图像厚度
stPixelConvertParam.enSrcPixelType = nPixelType;//源数据的格式
stPixelConvertParam.nSrcDataLen = (uint)(nWidth * nHeight * ((((uint)nPixelType) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.nDstBufferSize = (uint)(nWidth * nHeight * ((((uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.pDstBuffer = pOutData;//转换后的数据
stPixelConvertParam.enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8;
stPixelConvertParam.nDstBufferSize = (uint)(nWidth * nHeight * 3);
nRet = device.MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换
if (MyCamera.MV_OK != nRet)
{
return -1;
}
return nRet;
}
/// <summary>
/// 其他彩色格式转为RGB8
/// </summary>
/// <param name="obj"></param>
/// <param name="pSrc"></param>
/// <param name="nHeight"></param>
/// <param name="nWidth"></param>
/// <param name="nPixelType"></param>
/// <param name="pDst"></param>
/// <returns></returns>
public Int32 ConvertToRGB(MyCamera device, IntPtr pSrc, ushort nHeight, ushort nWidth, MyCamera.MvGvspPixelType nPixelType, IntPtr pDst)
{
if (IntPtr.Zero == pSrc || IntPtr.Zero == pDst)
{
return MyCamera.MV_E_PARAMETER;
}
int nRet = MyCamera.MV_OK;
//MyCamera device = obj as MyCamera;
MyCamera.MV_PIXEL_CONVERT_PARAM stPixelConvertParam = new MyCamera.MV_PIXEL_CONVERT_PARAM();
stPixelConvertParam.pSrcData = pSrc;//源数据
if (IntPtr.Zero == stPixelConvertParam.pSrcData)
{
return -1;
}
stPixelConvertParam.nWidth = nWidth;//图像宽度
stPixelConvertParam.nHeight = nHeight;//图像厚度
stPixelConvertParam.enSrcPixelType = nPixelType;//源数据的格式
stPixelConvertParam.nSrcDataLen = (uint)(nWidth * nHeight * ((((uint)nPixelType) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.nDstBufferSize = (uint)(nWidth * nHeight * ((((uint)MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed) >> 16) & 0x00ff) >> 3);
stPixelConvertParam.pDstBuffer = pDst;//转换后的数据
stPixelConvertParam.enDstPixelType = MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed;
stPixelConvertParam.nDstBufferSize = (uint)nWidth * nHeight * 3;
nRet = device.MV_CC_ConvertPixelType_NET(ref stPixelConvertParam);//格式转换
if (MyCamera.MV_OK != nRet)
{
return -1;
}
return MyCamera.MV_OK;
}
private static void ImageCallbackFunc(IntPtr pData, ref MyCamera.MV_FRAME_OUT_INFO_EX pFrameInfo, IntPtr pUser)
{
//Print parse the timestamp information in the frame
Console.WriteLine("ImageCallBack: ExposureTime[" + Convert.ToString(pFrameInfo.fExposureTime)
+ "], SecondCount[" + Convert.ToString(pFrameInfo.nSecondCount)
+ "], CycleCount[" + Convert.ToString(pFrameInfo.nCycleCount)
+ "], CycleOffset[" + Convert.ToString(pFrameInfo.nCycleOffset)
+ "], FrameNum[" + Convert.ToString(pFrameInfo.nFrameNum) + "]");
int nStrSize = Marshal.SizeOf(stChunkInfo);
int nUnparsedChunkContent = (int)pFrameInfo.UnparsedChunkList.pUnparsedChunkContent;
for (int i = 0; i < pFrameInfo.nUnparsedChunkNum; i++)
{
stChunkInfo = (MyCamera.MV_CHUNK_DATA_CONTENT)Marshal.PtrToStructure((IntPtr)(nUnparsedChunkContent + i * nStrSize), typeof(MyCamera.MV_CHUNK_DATA_CONTENT));
Console.WriteLine("ChunkInfo:" + "ChunkID[0x{0:x8}],ChunkLen[" + Convert.ToString(stChunkInfo.nChunkLen) + "]", stChunkInfo.nChunkID);
}
Console.WriteLine("************************************");
}
static bool IsMonoPixelFormat(MyCamera.MvGvspPixelType enType)
{
switch (enType)
{
case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono10_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono12_Packed:
return true;
default:
return false;
}
}
static bool IsColorPixelFormat(MyCamera.MvGvspPixelType enType)
{
switch (enType)
{
case MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BGR8_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_YUV422_YUYV_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR8:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG8:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB8:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG8:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB10_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG10_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG10_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR10_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGB12_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerBG12_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerRG12_Packed:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12:
case MyCamera.MvGvspPixelType.PixelType_Gvsp_BayerGR12_Packed:
return true;
default:
return false;
}
}
// ch:去除自定义的像素格式 | en:Remove custom pixel formats
private bool RemoveCustomPixelFormats(MyCamera.MvGvspPixelType enPixelFormat)
{
long nResult = ((long)enPixelFormat) & (unchecked((long)0x80000000));
if (0x80000000 == nResult)
{
return true;
}
else
{
return false;
}
}
#endregion
public void getParam()
{
if (!IsInit) return;
MyCamera.MVCC_FLOATVALUE stParam = new MyCamera.MVCC_FLOATVALUE();
int nRet = device.MV_CC_GetFloatValue_NET("ExposureTime", ref stParam);
if (MyCamera.MV_OK == nRet)
{
ExposureTime = stParam.fCurValue;//.ToString("F1");
}
nRet = device.MV_CC_GetFloatValue_NET("Gain", ref stParam);
if (MyCamera.MV_OK == nRet)
{
Gain = stParam.fCurValue;//.ToString("F1");
}
nRet = device.MV_CC_GetFloatValue_NET("ResultingFrameRate", ref stParam);
if (MyCamera.MV_OK == nRet)
{
ResultingFrameRate = stParam.fCurValue;//.ToString("F1");
}
}
/// <summary>
///
/// </summary>
/// <param name="exposureTime">曝光</param>
/// <param name="gain">增益</param>
/// <param name="resultingFrameRate">帧率</param>
public void setParam(float exposureTime, float gain=0, float resultingFrameRate=0)
{
if (!IsInit) return;
bool change = false;
int nRet;
if (exposureTime != ExposureTime)
{
device.MV_CC_SetEnumValue_NET("ExposureAuto", 0);
nRet = device.MV_CC_SetFloatValue_NET("ExposureTime", exposureTime);
if (nRet != MyCamera.MV_OK)
{
WarningEvent?.Invoke(WarningEnum.Normal, $"Scanner Set Exposure Time Fail! ({nRet})");
}
change = true;
}
if (gain != Gain)
{
device.MV_CC_SetEnumValue_NET("GainAuto", 0);
nRet = device.MV_CC_SetFloatValue_NET("Gain", gain);
if (nRet != MyCamera.MV_OK)
{
WarningEvent?.Invoke(WarningEnum.Normal, $"Scanner Set Gain Time Fail! ({nRet})");
}
change = true;
}
if (resultingFrameRate != ResultingFrameRate)
{
nRet = device.MV_CC_SetFloatValue_NET("AcquisitionFrameRate", resultingFrameRate);
if (nRet != MyCamera.MV_OK)
{
WarningEvent?.Invoke(WarningEnum.Normal, $"Scanner Set AcquisitionFrameRate Time Fail! ({nRet})");
}
change = true;
}
//
if(change)
getParam();
}
/// <summary>
/// 软触发拍照
/// </summary>
/// <param name="num">拍照数量</param>
/// <returns></returns>
public bool scan(int num=1)
{
if (!IsInit) return false;
if (!isContinuousMode)
{
// ch:触发命令 | en:Trigger command
int nRet = device.MV_CC_SetCommandValue_NET("TriggerSoftware");
if (MyCamera.MV_OK != nRet)
{
WarningEvent?.Invoke(WarningEnum.Normal, $"Trigger Software Fail! ({nRet})");
return false;
}
}
this.scanNum= num;
return true;
}
public void Dispose()
{
stop();
}
private byte[] bmp2bytes(Bitmap bmp)
{
MemoryStream ms = new MemoryStream();
bmp.Save(ms, System.Drawing.Imaging.ImageFormat.Bmp);
byte[] bytes = ms.GetBuffer(); //byte[] bytes= ms.ToArray(); 这两句都可以,至于区别么,下面有解释
ms.Close();
bmp.Dispose();
return bytes;
}
private Bitmap bytes2bmp(byte[] bytes)
{
MemoryStream ms1 = new MemoryStream(bytes);
Bitmap bm = (Bitmap)Image.FromStream(ms1);
ms1.Close();
return bm;
}
public Bitmap read2Bmp(string path)
{
MemoryStream ms = new System.IO.MemoryStream(File.ReadAllBytes(path));
Bitmap bmp = new Bitmap(ms);
ms.Close();
ms.Dispose();
//FileStream fs = new FileStream(path, FileMode.Open);
//byte[] bmpdata = new byte[fs.Length];
//fs.Read(bmpdata, 0, bmpdata.Length);
//Bitmap bmp = new Bitmap(fs);
//fs.Close();
return bmp;
}
}
}