package com.netsdk.demo.customize; import java.io.File; import java.util.Arrays; import java.util.Scanner; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.netsdk.demo.util.CaseMenu; import com.netsdk.lib.structure.NET_LEFTDETECTION_RULE_INFO; import com.netsdk.lib.structure.NET_PARKINGDETECTION_RULE_INFO; import com.netsdk.lib.structure.NET_RIOTERDETECTION_RULE_INFO; import com.netsdk.lib.structure.NET_WANDERDETECTION_RULE_INFO; import com.sun.jna.Memory; import com.sun.jna.Pointer; import com.sun.jna.ptr.IntByReference; import com.netsdk.lib.NetSDKLib; import com.netsdk.lib.NetSDKLib.LLong; import com.netsdk.lib.ToolKits; import com.netsdk.lib.enumeration.EM_EVENT_IVS; import com.netsdk.lib.NetSDKLib.*; import static com.netsdk.lib.NetSDKLib.EM_ANALYSE_EVENT_TYPE.EM_ANALYSE_EVENT_IVS_WANDERDETECTION; public class SmartDetection { public static final NetSDKLib netSdk = NetSDKLib.NETSDK_INSTANCE; // 登陆句柄 private LLong loginHandle = new LLong(0); private LLong lAttachHandle = new LLong(0); private LLong AttachHandle = new LLong(0); // 任务ID private int nTaskID = 0; // 设备信息扩展 private NET_DEVICEINFO_Ex deviceInfo = new NET_DEVICEINFO_Ex(); public void InitTest() { // 初始化SDK库 netSdk.CLIENT_Init(DisConnectCallBack.getInstance(), null); // 设置断线重连成功回调函数 netSdk.CLIENT_SetAutoReconnect(new HaveReConnectCallBack(), null); // 打开日志,可选 NetSDKLib.LOG_SET_PRINT_INFO setLog = new NetSDKLib.LOG_SET_PRINT_INFO(); String logPath = new File(".").getAbsoluteFile().getParent() + File.separator + "sdk_log" + File.separator + "sdk.log"; setLog.bSetFilePath = 1; System.arraycopy(logPath.getBytes(), 0, setLog.szLogFilePath, 0, logPath.getBytes().length); setLog.bSetPrintStrategy = 1; setLog.nPrintStrategy = 0; if (!netSdk.CLIENT_LogOpen(setLog)) { System.err.println("Open SDK Log Failed!!!"); } Login(); } public void Login() { // 登陆设备 int nSpecCap = NetSDKLib.EM_LOGIN_SPAC_CAP_TYPE.EM_LOGIN_SPEC_CAP_TCP; // TCP登入 IntByReference nError = new IntByReference(0); loginHandle = netSdk.CLIENT_LoginEx2(m_strIp, m_nPort, m_strUser, m_strPassword, nSpecCap, null, deviceInfo, nError); if (loginHandle.longValue() != 0) { System.out.printf("Login Device[%s] Success!\n", m_strIp); } else { System.err.printf("Login Device[%s] Fail.Error[%s]\n", m_strIp, ToolKits.getErrorCode()); LoginOut(); } } public void LoginOut() { System.out.println("End Test"); if (loginHandle.longValue() != 0) { netSdk.CLIENT_Logout(loginHandle); } System.out.println("See You..."); netSdk.CLIENT_Cleanup(); System.exit(0); } /* * 添加睿厨监测任务 */ @SuppressWarnings("resource") public void AddAnalyseTaskKITCHEN() { Scanner scan = new Scanner(System.in); System.out.println("请输入emDataSourceType值:"); int emDataSourceType = scan.nextInt(); if (emDataSourceType == 1) { // 入参 NET_REMOTE_REALTIME_STREAM_INFO msg = new NET_REMOTE_REALTIME_STREAM_INFO(); msg.stuRuleInfo.nRuleCount = 1; msg.emStartRule = 0; msg.emStreamProtocolType = 3; byte[] path = "rtsp://admin:admin@10.33.9.184:554/cam/realmonitor?channel=1&subtype=0".getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); msg.nChannelID = 0; msg.nStreamType = 0; int[] emTpye = new int[2]; emTpye[0] = 1; emTpye[1] = 11; msg.stuRuleInfo.stuRuleInfos[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_OPERATEMONITOR; msg.stuRuleInfo.stuRuleInfos[0].dwRuleType = NetSDKLib.EVENT_IVS_SMART_KITCHEN_CLOTHES_DETECTION; // 智慧厨房穿着检测事件 msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum = 2; msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes = emTpye; NET_SMART_KITCHEN_CLOTHES_DETECTION_RULE_INFO SRuleInfo = new NET_SMART_KITCHEN_CLOTHES_DETECTION_RULE_INFO(); SRuleInfo.bChefClothesEnable = 1; SRuleInfo.bMaskEnable = 1; SRuleInfo.bChefHatEnable = 1; SRuleInfo.nChefClothesColorNum = 1; int[] Colors = new int[8]; Colors[0] = 1; SRuleInfo.emChefClothesColors = Colors; SRuleInfo.nReportInterval = 30; Pointer pReserv1 = new Memory(SRuleInfo.size()); msg.stuRuleInfo.stuRuleInfos[0].pReserved = pReserv1; ToolKits.SetStructDataToPointer(SRuleInfo, msg.stuRuleInfo.stuRuleInfos[0].pReserved, 0); Pointer pInParam = new Memory(msg.size()); ToolKits.SetStructDataToPointer(msg, pInParam, 0); // 出参 NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK(); // if(netSdk.(loginHandle, emDataSourceType, pInParam, pOutParam, 5000)){ // nTaskID=pOutParam.nTaskID; // System.out.println("AddAnalyseTask Succeed! " + "任务ID" + nTaskID + "任务对应的虚拟通道号" + pOutParam.nVirtualChannel); // }else{ // System.err.printf("AddAnalyseTask Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError()); // // } } } /** * 添加物品遗留任务 */ public void AddAnalyseTaskLEFTDETECTION() { Scanner scanner = new Scanner(System.in); System.out.println("请输入智能分析数据源类型,1:远程实时流;2:主动推送图片文件"); int emDataSourceType = scanner.nextInt(); /** * 主动推送暂无操作 */ if (emDataSourceType != 1) { // TODO System.out.println("主动推送暂无事件安排"); return; } // 入参结构体 NET_REMOTE_REALTIME_STREAM_INFO msg = new NET_REMOTE_REALTIME_STREAM_INFO(); // 设置分析规则的条数 msg.stuRuleInfo.nRuleCount = 1; // 设置智能任务启动规则 msg.emStartRule = 0; /** * 设置视频流协议类型 EM_STREAM_PROTOCOL_TYPE,枚举值为 EM_STREAM_PROTOCOL_UNKNOWN=0; // 未知 * EM_STREAM_PROTOCOL_PRIVATE_V2=1; // 私有二代 EM_STREAM_PROTOCOL_PRIVATE_V3=2; // * 私有三代 EM_STREAM_PROTOCOL_RTSP=3; // rtsp EM_STREAM_PROTOCOL_ONVIF=4; // Onvif * EM_STREAM_PROTOCOL_GB28181=5; // GB28181 */ msg.emStreamProtocolType = NetSDKLib.EM_STREAM_PROTOCOL_TYPE.EM_STREAM_PROTOCOL_RTSP; /** * 视频流地址 */ byte[] path = "rtsp://admin:admin@10.33.12.138:554/cam/realmonitor?channel=1&subtype=0 ".getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); /** * 通道号 */ msg.nChannelID = 6; /** * 配置智能分析规则 */ // 设置分析大类 msg.stuRuleInfo.stuRuleInfos[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_NORMAL; // 设置规则类型 msg.stuRuleInfo.stuRuleInfos[0].dwRuleType = EM_EVENT_IVS.EVENT_IVS_LEFTDETECTION.getId(); /** * 设置检测物体类型个数 */ msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum = 2; /** * 设置检测物体类型列表 */ int[] emType = new int[2]; emType[0] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_ENTITY; emType[1] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_CONTAINER; msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes = emType; // 设置规则配置 // 1.new 一个结构体对象 NET_LEFTDETECTION_RULE_INFO sRuleInfo = new NET_LEFTDETECTION_RULE_INFO(); // 2.对结构体对象赋值 sRuleInfo.nDetectRegionPoint = 4; for (int i = 0; i < 4; i++) { POINTCOORDINATE region = new POINTCOORDINATE(); sRuleInfo.stuDetectRegion[i] = region; } // 设置检测区域 sRuleInfo.stuDetectRegion[0].nX = 0; sRuleInfo.stuDetectRegion[0].nY = 0; sRuleInfo.stuDetectRegion[1].nX = 0; sRuleInfo.stuDetectRegion[1].nY = 8191; sRuleInfo.stuDetectRegion[2].nX = 8191; sRuleInfo.stuDetectRegion[2].nY = 8191; sRuleInfo.stuDetectRegion[3].nX = 8191; sRuleInfo.stuDetectRegion[3].nY = 0; // 设置触发报警位置 sRuleInfo.nTriggerPosition = 1; byte[] position = new byte[1]; position[0] = 0; sRuleInfo.bTriggerPosition = position; // 最短持续时间 sRuleInfo.nMinDuration = 10; // 跟踪持续时间 sRuleInfo.nTrackDuration = 20; // 尺寸过滤器是否有效 sRuleInfo.bSizeFileter = false; // 3.分配内存 Pointer pReserv = new Memory(sRuleInfo.size()); // 设置到msg.stuRuleInfo.stuRuleInfos[0].pReserved msg.stuRuleInfo.stuRuleInfos[0].pReserved = pReserv; // 使用ToolKits.SetStructDataToPointer进行内存对齐 ToolKits.SetStructDataToPointer(sRuleInfo, msg.stuRuleInfo.stuRuleInfos[0].pReserved, 0); // ToolKits.SetStructDataToPointer(sRuleInfo,msg.stuRuleInfo.stuRuleInfos[0].pReserved,0); // pInParam分配内存 Pointer pInParam = new Memory(msg.size()); ToolKits.SetStructDataToPointer(msg, pInParam, 0); // 出参 NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK(); if (netSdk.CLIENT_AddAnalyseTask(loginHandle, emDataSourceType, pInParam, pOutParam, 5000)) { nTaskID = pOutParam.nTaskID; System.out .println("AddAnalyseTask Succeed! " + "任务ID" + nTaskID + "任务对应的虚拟通道号" + pOutParam.nVirtualChannel); } else { System.err.printf("AddAnalyseTask Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError()); return; } } /** * 添加人员徘徊任务 */ public void AddAnalyseTaskWANDERDETECTION() { Scanner scanner = new Scanner(System.in); System.out.println("请输入智能分析数据源类型,1:远程实时流;2:主动推送图片文件"); int emDataSourceType = scanner.nextInt(); /** * 主动推送暂无操作 */ if (emDataSourceType != 1) { // TODO System.out.println("主动推送暂无事件安排"); return; } // 入参结构体 NET_REMOTE_REALTIME_STREAM_INFO msg = new NET_REMOTE_REALTIME_STREAM_INFO(); // 设置分析规则的条数 msg.stuRuleInfo.nRuleCount = 1; // 设置智能任务启动规则 msg.emStartRule = 0; /** * 设置视频流协议类型 EM_STREAM_PROTOCOL_TYPE,枚举值为 EM_STREAM_PROTOCOL_UNKNOWN=0; // 未知 * EM_STREAM_PROTOCOL_PRIVATE_V2=1; // 私有二代 EM_STREAM_PROTOCOL_PRIVATE_V3=2; // * 私有三代 EM_STREAM_PROTOCOL_RTSP=3; // rtsp EM_STREAM_PROTOCOL_ONVIF=4; // Onvif * EM_STREAM_PROTOCOL_GB28181=5; // GB28181 */ msg.emStreamProtocolType = NetSDKLib.EM_STREAM_PROTOCOL_TYPE.EM_STREAM_PROTOCOL_RTSP; /** * 视频流地址 */ byte[] path = "rtsp://admin:admin@10.33.12.138:554/cam/realmonitor?channel=1&subtype=0".getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); /** * 通道号 */ msg.nChannelID = 7; /** * 配置智能分析规则 */ // 设置分析大类 msg.stuRuleInfo.stuRuleInfos[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_NORMAL; // 设置规则类型 msg.stuRuleInfo.stuRuleInfos[0].dwRuleType = EM_EVENT_IVS.EVENT_IVS_PARKINGDETECTION.getId(); /** * 设置检测物体类型个数 */ msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum = 2; /** * 设置检测物体类型列表 */ int[] emType = new int[1]; emType[0] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_HUMAN; msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes = emType; // 设置规则配置 // 1.new 一个结构体对象 NET_WANDERDETECTION_RULE_INFO sRuleInfo = new NET_WANDERDETECTION_RULE_INFO(); // 2.对结构体对象赋值 sRuleInfo.nDetectRegionPoint = 4; for (int i = 0; i < 4; i++) { POINTCOORDINATE region = new POINTCOORDINATE(); sRuleInfo.stuDetectRegion[i] = region; } // 设置检测区域 sRuleInfo.stuDetectRegion[0].nX = 0; sRuleInfo.stuDetectRegion[0].nY = 0; sRuleInfo.stuDetectRegion[1].nX = 0; sRuleInfo.stuDetectRegion[1].nY = 8191; sRuleInfo.stuDetectRegion[2].nX = 8191; sRuleInfo.stuDetectRegion[2].nY = 8191; sRuleInfo.stuDetectRegion[3].nX = 8191; sRuleInfo.stuDetectRegion[3].nY = 0; // 设置触发报警位置 sRuleInfo.nTriggerPosition = 1; byte[] position = new byte[1]; position[0] = 0; sRuleInfo.bTriggerPosition = position; // 触发报警的徘徊或滞留人数 sRuleInfo.nTriggerTargetsNumber = 5; // 最短持续时间 sRuleInfo.nMinDuration = 10; // 报告时间间隔,0表示不重复报警,默认30,单位秒0-600 sRuleInfo.nReportInterval = 30; // 跟踪持续时间 sRuleInfo.nTrackDuration = 20; // 规则特定的尺寸过滤器是否有效 sRuleInfo.bSizeFileter = false; // 规则特定的尺寸过滤器 // sRuleInfo.stuSizeFileter=new NET_CFG_SIZEFILTER_INFO // 3.分配内存 Pointer pReserv = new Memory(sRuleInfo.size()); // 设置到msg.stuRuleInfo.stuRuleInfos[0].pReserved msg.stuRuleInfo.stuRuleInfos[0].pReserved = pReserv; // 使用ToolKits.SetStructDataToPointer进行内存对齐 ToolKits.SetStructDataToPointer(sRuleInfo, msg.stuRuleInfo.stuRuleInfos[0].pReserved, 0); // pInParam分配内存 Pointer pInParam = new Memory(msg.size()); ToolKits.SetStructDataToPointer(msg, pInParam, 0); // 出参 NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK(); if (netSdk.CLIENT_AddAnalyseTask(loginHandle, emDataSourceType, pInParam, pOutParam, 5000)) { nTaskID = pOutParam.nTaskID; System.out .println("AddAnalyseTask Succeed! " + "任务ID" + nTaskID + "任务对应的虚拟通道号" + pOutParam.nVirtualChannel); } else { System.err.printf("AddAnalyseTask Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError()); return; } } /** * 添加车辆违停任务 */ public void AddAnalyseTaskPARKINGDETECTION() { Scanner scanner = new Scanner(System.in); System.out.println("请输入智能分析数据源类型,1:远程实时流;2:主动推送图片文件"); int emDataSourceType = scanner.nextInt(); /** * 主动推送暂无操作 */ if (emDataSourceType != 1) { // TODO System.out.println("主动推送暂无事件安排"); return; } // 入参结构体 NET_REMOTE_REALTIME_STREAM_INFO msg = new NET_REMOTE_REALTIME_STREAM_INFO(); // 设置分析规则的条数 msg.stuRuleInfo.nRuleCount = 1; // 设置智能任务启动规则 msg.emStartRule = 0; /** * 设置视频流协议类型 EM_STREAM_PROTOCOL_TYPE,枚举值为 EM_STREAM_PROTOCOL_UNKNOWN=0; // 未知 * EM_STREAM_PROTOCOL_PRIVATE_V2=1; // 私有二代 EM_STREAM_PROTOCOL_PRIVATE_V3=2; // * 私有三代 EM_STREAM_PROTOCOL_RTSP=3; // rtsp EM_STREAM_PROTOCOL_ONVIF=4; // Onvif * EM_STREAM_PROTOCOL_GB28181=5; // GB28181 */ msg.emStreamProtocolType = NetSDKLib.EM_STREAM_PROTOCOL_TYPE.EM_STREAM_PROTOCOL_RTSP; /** * 视频流地址 */ byte[] path = "rtsp://admin:admin@10.33.12.138:554/cam/realmonitor?channel=1&subtype=0 ".getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); /** * 通道号 */ msg.nChannelID = 5; /** * 配置智能分析规则 */ // 设置分析大类 msg.stuRuleInfo.stuRuleInfos[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_NORMAL; // 设置规则类型 msg.stuRuleInfo.stuRuleInfos[0].dwRuleType = EM_EVENT_IVS.EVENT_IVS_PARKINGDETECTION.getId(); /** * 设置检测物体类型个数 */ msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum = 2; /** * 设置检测物体类型列表 */ int[] emType = new int[1]; emType[0] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_VEHICLE; msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes = emType; // 设置规则配置 // 1.new 一个结构体对象 NET_PARKINGDETECTION_RULE_INFO sRuleInfo = new NET_PARKINGDETECTION_RULE_INFO(); // 2.对结构体对象赋值 sRuleInfo.nDetectRegionPoint = 4; for (int i = 0; i < 4; i++) { POINTCOORDINATE region = new POINTCOORDINATE(); sRuleInfo.stuDetectRegion[i] = region; } // 设置检测区域 sRuleInfo.stuDetectRegion[0].nX = 0; sRuleInfo.stuDetectRegion[0].nY = 0; sRuleInfo.stuDetectRegion[1].nX = 0; sRuleInfo.stuDetectRegion[1].nY = 8191; sRuleInfo.stuDetectRegion[2].nX = 8191; sRuleInfo.stuDetectRegion[2].nY = 8191; sRuleInfo.stuDetectRegion[3].nX = 8191; sRuleInfo.stuDetectRegion[3].nY = 0; // 设置触发报警位置 sRuleInfo.nTriggerPosition = 1; byte[] position = new byte[1]; position[0] = 0; sRuleInfo.bTriggerPosition = position; // 最短持续时间 sRuleInfo.nMinDuration = 10; // 跟踪持续时间 sRuleInfo.nTrackDuration = 20; // 尺寸过滤器是否有效 sRuleInfo.bSizeFileter = false; // 3.分配内存 Pointer pReserv = new Memory(sRuleInfo.size()); // 设置到msg.stuRuleInfo.stuRuleInfos[0].pReserved msg.stuRuleInfo.stuRuleInfos[0].pReserved = pReserv; // 使用ToolKits.SetStructDataToPointer进行内存对齐 ToolKits.SetStructDataToPointer(sRuleInfo, msg.stuRuleInfo.stuRuleInfos[0].pReserved, 0); // pInParam分配内存 Pointer pInParam = new Memory(msg.size()); ToolKits.SetStructDataToPointer(msg, pInParam, 0); // 出参 NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK(); if (netSdk.CLIENT_AddAnalyseTask(loginHandle, emDataSourceType, pInParam, pOutParam, 5000)) { nTaskID = pOutParam.nTaskID; System.out .println("AddAnalyseTask Succeed! " + "任务ID" + nTaskID + "任务对应的虚拟通道号" + pOutParam.nVirtualChannel); } else { System.err.printf("AddAnalyseTask Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError()); return; } } /** * 添加人群聚集任务 */ public void AddAnalyseTaskRIOTERDETECTION() { Scanner scanner = new Scanner(System.in); System.out.println("请输入智能分析数据源类型,1:远程实时流;2:主动推送图片文件"); int emDataSourceType = scanner.nextInt(); /** * 主动推送暂无操作 */ if (emDataSourceType != 1) { // TODO System.out.println("主动推送暂无事件安排"); return; } // 入参结构体 NET_REMOTE_REALTIME_STREAM_INFO msg = new NET_REMOTE_REALTIME_STREAM_INFO(); // 设置分析规则的条数 msg.stuRuleInfo.nRuleCount = 1; // 设置智能任务启动规则 msg.emStartRule = 0; /** * 设置视频流协议类型 EM_STREAM_PROTOCOL_TYPE,枚举值为 EM_STREAM_PROTOCOL_UNKNOWN=0; // 未知 * EM_STREAM_PROTOCOL_PRIVATE_V2=1; // 私有二代 EM_STREAM_PROTOCOL_PRIVATE_V3=2; // * 私有三代 EM_STREAM_PROTOCOL_RTSP=3; // rtsp EM_STREAM_PROTOCOL_ONVIF=4; // Onvif * EM_STREAM_PROTOCOL_GB28181=5; // GB28181 */ msg.emStreamProtocolType = NetSDKLib.EM_STREAM_PROTOCOL_TYPE.EM_STREAM_PROTOCOL_RTSP; /** * 视频流地址 */ byte[] path = "rtsp://admin:admin@10.33.12.138:554/cam/realmonitor?channel=1&subtype=0".getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); /** * 通道号 */ msg.nChannelID = 8; /** * 配置智能分析规则 */ // 设置分析大类 msg.stuRuleInfo.stuRuleInfos[0].emClassType = NetSDKLib.EM_SCENE_CLASS_TYPE.EM_SCENE_CLASS_NORMAL; // 设置规则类型 msg.stuRuleInfo.stuRuleInfos[0].dwRuleType = EM_EVENT_IVS.EVENT_IVS_RIOTERDETECTION.getId(); /** * 设置检测物体类型个数 */ msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum = 2; /** * 设置检测物体类型列表 */ int[] emType = new int[1]; emType[0] = NetSDKLib.EM_ANALYSE_OBJECT_TYPE.EM_ANALYSE_OBJECT_TYPE_HUMAN; msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes = emType; // 设置规则配置 // 1.new 一个结构体对象 NET_RIOTERDETECTION_RULE_INFO sRuleInfo = new NET_RIOTERDETECTION_RULE_INFO(); // 2.对结构体对象赋值 sRuleInfo.nDetectRegionPoint = 4; for (int i = 0; i < 4; i++) { POINTCOORDINATE region = new POINTCOORDINATE(); sRuleInfo.stuDetectRegion[i] = region; } // 设置检测区域 sRuleInfo.stuDetectRegion[0].nX = 0; sRuleInfo.stuDetectRegion[0].nY = 0; sRuleInfo.stuDetectRegion[1].nX = 0; sRuleInfo.stuDetectRegion[1].nY = 8191; sRuleInfo.stuDetectRegion[2].nX = 8191; sRuleInfo.stuDetectRegion[2].nY = 8191; sRuleInfo.stuDetectRegion[3].nX = 8191; sRuleInfo.stuDetectRegion[3].nY = 0; // 设置检测模式个数 sRuleInfo.nModeNum = 1; byte[] mode = new byte[1]; /** * 0:按最小聚集区域检测 1:按聚集人员数量阈值检测 */ mode[0] = 1; sRuleInfo.nModeList = mode; // 最小聚集区域矩形框,矩形框的左上和右下点 /* * POINTCOORDINATE[] coordinate=new POINTCOORDINATE[2]; for (int i = 0; i < 2; * i++) { coordinate[i]=new POINTCOORDINATE(); } coordinate[0].nX=0; * coordinate[0].nY=400; coordinate[1].nX=400; coordinate[1].nY=0; * * sRuleInfo.stuMinDetectRect=coordinate; */ // 聚集人数阈值,0不报警 sRuleInfo.nRioterThreshold = 5; // 最短持续时间 sRuleInfo.nMinDuration = 10; // 报告时间间隔 sRuleInfo.nReportInterval = 10; // 灵敏度 sRuleInfo.nSensitivity = 5; // 跟踪持续时间 sRuleInfo.nTrackDuration = 20; // 3.分配内存 Pointer pReserv = new Memory(sRuleInfo.size()); // 设置到msg.stuRuleInfo.stuRuleInfos[0].pReserved msg.stuRuleInfo.stuRuleInfos[0].pReserved = pReserv; // 使用ToolKits.SetStructDataToPointer进行内存对齐 ToolKits.SetStructDataToPointer(sRuleInfo, msg.stuRuleInfo.stuRuleInfos[0].pReserved, 0); // pInParam分配内存 Pointer pInParam = new Memory(msg.size()); ToolKits.SetStructDataToPointer(msg, pInParam, 0); // 出参 NET_OUT_ADD_ANALYSE_TASK pOutParam = new NET_OUT_ADD_ANALYSE_TASK(); if (netSdk.CLIENT_AddAnalyseTask(loginHandle, emDataSourceType, pInParam, pOutParam, 5000)) { nTaskID = pOutParam.nTaskID; System.out .println("AddAnalyseTask Succeed! " + "任务ID" + nTaskID + "任务对应的虚拟通道号" + pOutParam.nVirtualChannel); } else { System.err.printf("AddAnalyseTask Failed!Last Error[0x%x]\n", netSdk.CLIENT_GetLastError()); return; } } /* * 添加横幅检测任务 * * @SuppressWarnings("resource") public void AddAnalyseTaskBANNER(){ Scanner * scan = new Scanner(System.in); System.out.println("请输入emDataSourceType值:"); * int emDataSourceType=scan.nextInt(); if(emDataSourceType==1){ //入参 * NET_REMOTE_REALTIME_STREAM_INFO msg=new NET_REMOTE_REALTIME_STREAM_INFO(); * msg.stuRuleInfo.nRuleCount=1; msg.emStartRule=0; msg.emStreamProtocolType=3; * byte[] * path="rtsp://admin:admin@10.33.9.184:555/cam/realmonitor?channel=1&subtype=0" * .getBytes(); System.arraycopy(path, 0, msg.szPath, 0, path.length); * msg.nChannelID=0; msg.nStreamType=0; int [] emTpye=new int[16]; emTpye[0]=0; * msg.stuRuleInfo.stuRuleInfos[0].dwRuleType=NetSDKLib. * EVENT_IVS_BANNER_DETECTION; //拉横幅事件 * msg.stuRuleInfo.stuRuleInfos[0].emClassType=NetSDKLib.EM_SCENE_CLASS_TYPE. * EM_SCENE_CLASS_CROWD_ABNORMAL; * msg.stuRuleInfo.stuRuleInfos[0].nObjectTypeNum=1; * msg.stuRuleInfo.stuRuleInfos[0].emObjectTypes=emTpye; * NET_BANNER_DETECTION_RULE_INFO BRuleInfo=new * NET_BANNER_DETECTION_RULE_INFO(); BRuleInfo.nDetectRegionPoint=4; * POINTCOORDINATE[] piont=new POINTCOORDINATE[4]; for(int j=0 * ;j= 0; i--) { array[i] = (byte) (b & 1); b = (byte) (b >> 1); } return array; } public static void main(String[] args) { SmartDetection XM = new SmartDetection(); XM.InitTest(); XM.RunTest(); XM.LoginOut(); } }