Java源码示例:org.opencv.features2d.DescriptorExtractor

示例1
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
示例2
public KMeansMatcher()
{
	model = null;
	featureDetector = FeatureDetector.create(FeatureDetector.PYRAMID_ORB);
	descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
	matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
}
 
示例3
public void init() {
	featureDetector = FeatureDetector.create(detectorType);
	descriptorExtractor = DescriptorExtractor.create(extractorType);
	
	if (this.writeDebugImage) {
		this.writeDestImageKeyPoints = true;
		this.writeCutMatchedImageFromSrc = true;
		this.writeMatchingImage = true;
		this.writeDrawMatchedLineImage = true;
	}
}
 
示例4
public static Mat FeatureSiftLannbased(Mat src, Mat dst){
	FeatureDetector fd = FeatureDetector.create(FeatureDetector.SIFT);
	DescriptorExtractor de = DescriptorExtractor.create(DescriptorExtractor.SIFT);
	DescriptorMatcher Matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
	
	MatOfKeyPoint mkp = new MatOfKeyPoint();
	fd.detect(src, mkp);
	Mat desc = new Mat();
	de.compute(src, mkp, desc);
	Features2d.drawKeypoints(src, mkp, src);
	
	MatOfKeyPoint mkp2 = new MatOfKeyPoint();
	fd.detect(dst, mkp2);
	Mat desc2 = new Mat();
	de.compute(dst, mkp2, desc2);
	Features2d.drawKeypoints(dst, mkp2, dst);
	
	
	// Matching features
	MatOfDMatch Matches = new MatOfDMatch();
	Matcher.match(desc, desc2, Matches);
	
	List<DMatch> l = Matches.toList();
	List<DMatch> goodMatch = new ArrayList<DMatch>();
	for (int i = 0; i < l.size(); i++) {
		DMatch dmatch = l.get(i);
		if (Math.abs(dmatch.queryIdx - dmatch.trainIdx) < 10f) {
			goodMatch.add(dmatch);
		}
		
	}
	
	Matches.fromList(goodMatch);
	// Show result
	Mat OutImage = new Mat();
	Features2d.drawMatches(src, mkp, dst, mkp2, Matches, OutImage);
	
	return OutImage;
}
 
示例5
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
示例6
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
示例7
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
示例8
@Test
	public void imgMatching2() throws Exception {
		System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
//		Mat src_base = Imgcodecs.imread("D:\\test\\test5.jpg");
//		Mat src_test = Imgcodecs.imread("D:\\test\\test3.jpg");

		Mat src_base = Imgcodecs.imread("g:/test/find-src.jpg");
		Mat src_test = Imgcodecs.imread("g:/test/find-dest2.jpg");
		
		Mat gray_base = new Mat();
		Mat gray_test = new Mat();
		// 转换为灰度
		Imgproc.cvtColor(src_base, gray_base, Imgproc.COLOR_RGB2GRAY);
		Imgproc.cvtColor(src_test, gray_test, Imgproc.COLOR_RGB2GRAY);
		// 初始化ORB检测描述子
		FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.ORB);//特别提示下这里opencv暂时不支持SIFT、SURF检测方法,这个好像是opencv(windows) java版的一个bug,本人在这里被坑了好久。
		DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
		// 关键点及特征描述矩阵声明
		MatOfKeyPoint keyPoint1 = new MatOfKeyPoint(), keyPoint2 = new MatOfKeyPoint();
		Mat descriptorMat1 = new Mat(), descriptorMat2 = new Mat();
		// 计算ORB特征关键点
		featureDetector.detect(gray_base, keyPoint1);
		featureDetector.detect(gray_test, keyPoint2);
		

        Mat output=new Mat();
        Features2d.drawKeypoints(gray_base, keyPoint1, output );
        Imgcodecs.imwrite("g:/test/out.jpg", output);
        
		// 计算ORB特征描述矩阵
		descriptorExtractor.compute(gray_base, keyPoint1, descriptorMat1);
		descriptorExtractor.compute(gray_test, keyPoint2, descriptorMat2);
		float result = 0;
		// 特征点匹配
		System.out.println("test5:" + keyPoint1.size());
		System.out.println("test3:" + keyPoint2.size());
		if (!keyPoint1.size().empty() && !keyPoint2.size().empty()) {
			// FlannBasedMatcher matcher = new FlannBasedMatcher();
			DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_L1);
			MatOfDMatch matches = new MatOfDMatch();
			matcher.match(descriptorMat1, descriptorMat2, matches);
			// 最优匹配判断
			double minDist = 100;
			DMatch[] dMatchs = matches.toArray();
			int num = 0;
			for (int i = 0; i < dMatchs.length; i++) {
				if (dMatchs[i].distance <= 2 * minDist) {
					result += dMatchs[i].distance * dMatchs[i].distance;
					num++;
				}
			}
			// 匹配度计算
			result /= num;
		}
		System.out.println(result);
	}
 
示例9
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir + "/resources/data/" );

	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> {face detection, sift} -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// one bolt with a HaarCascade classifier detecting faces. This operation outputs a Frame including the Features with detected faces.
	// the xml file must be present on the classpath!
	builder.setBolt("face", new SingleInputBolt( new HaarCascadeOp("face", "lbpcascade_frontalface.xml").outputFrame(true)), 1)
		.shuffleGrouping("scale");
	
	// add a bolt that performs SIFT keypoint extraction
	builder.setBolt("sift", new SingleInputBolt( new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)), 2)
		.shuffleGrouping("scale");
	
	// Batch bolt that waits for input from both the face and sift detection bolts and combines them in a single frame object
	builder.setBolt("combiner", new BatchInputBolt(new SequenceNrBatcher(2), new FeatureCombinerOp()), 1)
		.fieldsGrouping("sift", new Fields(FrameSerializer.STREAMID))
		.fieldsGrouping("face", new Fields(FrameSerializer.STREAMID));
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("combiner");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "multifeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
示例10
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
			
	conf.setNumWorkers(5); // number of workers in the topology
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(Config.NIMBUS_TASK_LAUNCH_SECS, 30);
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
			
	List<String> prototypes = new ArrayList<String>();
	prototypes.add( "file://"+ userDir +"/resources/data" );
	
	// create a linear DRPC builder called 'match'
	LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("match");

	//add a FeatureMatchRequestOp that receives drpc requests
	builder.addBolt(new RequestBolt(new FeatureMatchRequestOp()), 1); 
	
	 // add two bolts that perform sift extraction (as used in other examples!)
	builder.addBolt(new SingleInputBolt(
		new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)
		), 1).shuffleGrouping();

	// add bolt that matches queries it gets with the prototypes it has loaded upon the prepare.
	// The prototypes are divided over the available tasks which means that each query has to be send to all tasks (use allGrouping)
	// the matcher only reports a match if at least 1 strong match has been found (can be set to 0)
	builder.addBolt(new SingleInputBolt(new PartialMatcher(prototypes, 0, 0.5f)), 2).allGrouping(); 
	
	// add a bolt that aggregates all the results it gets from the two matchers 
	builder.addBolt(new BatchBolt(new FeatureMatchResultOp(true)), 1).fieldsGrouping(new Fields(CVParticleSerializer.REQUESTID));
	
	// create local drpc server and cluster. Deploy the drpc topology on the cluster
	LocalDRPC drpc = new LocalDRPC();
	LocalCluster cluster = new LocalCluster();
	cluster.submitTopology("drpc-demo", conf, builder.createLocalTopology(drpc));
	
	// use all face images as queries (same images as loaded by the matcher!)
	File queryDir = new File(userDir +"/resources/data/");
	for(String img : queryDir.list()){
		if(!img.endsWith(".jpg")) continue; // to avoid reading non-image files
		// execute the drpc with the image as argument. Note that the execute blocks
		String matchesJson = drpc.execute("match", "file://"+userDir +"/resources/data/"+img);
		System.out.println(img+" : " + matchesJson);
	}
		
	cluster.shutdown();
	drpc.shutdown();
}
 
示例11
public static void main(String[] args){
	
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(6); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir+"/resources/data/"); // will process all video files in this directory (i.e. two files)

	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new HaarCascadeOp("face", "lbpcascade_frontalface.xml") );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	
	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> fat[face detection & sift] -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("fat_features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 3)
		.shuffleGrouping("scale");
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("fat_features");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "fatfeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
示例12
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(3); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib"); // sets the opencv lib to be used by all OpenCVOperation implementing operations
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");

	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(true));
	operations.add(new DrawFeaturesOp() );
	 
	int frameSkip = 13; 
	
	TopologyBuilder builder = new TopologyBuilder();
	builder.setSpout("spout", new CVParticleSpout( 
			new FetchAndOperateFetcher( // use a meta fetcher to combine a Fetcher and Operation
					new StreamFrameFetcher(urls).frameSkip(frameSkip), // use a normal fetcher to get video frames from streams
					new SequentialFrameOp(operations).outputFrame(true).retainImage(true) // use a sequential operation to execute a number of operations on frames
				)
			) , 2 );
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("spout");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "sequential_spout", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
示例13
public static void main(String[] args){
	
	// first some global (topology) configuration
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	//conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(4); // number of workers in the topology
	conf.setMaxSpoutPending(20); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n302/n302.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n346/n346.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n319/n319.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n794b/n794b.stream"); 

	int frameSkip = 13;
	
	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new ScaleImageOp(0.5f) );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	operations.add(new FeatureExtractionOp("surf", FeatureDetector.SURF, DescriptorExtractor.SURF));
	operations.add(new DrawFeaturesOp());
	
	// now create the topology itself (spout -> background subtraction --> streamer)
	TopologyBuilder builder = new TopologyBuilder();
			
	// number of tasks must match the number of urls!
	builder.setSpout("spout", new CVParticleSpout( new StreamFrameFetcher(urls).frameSkip(frameSkip) ), 1 ).setNumTasks(6);
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 2)
		.shuffleGrouping("spout");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("features");
	
	try {
		
		// run in local mode
		/*
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "deployment_Test", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		*/
		// run on a storm cluster
		StormSubmitter.submitTopology("Your_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
示例14
@Override
public List<CVParticle> execute(CVParticle particle) throws Exception {
	List<CVParticle> result = new ArrayList<CVParticle>();
	if(!(particle instanceof Frame)) return result;
	
	Frame frame = (Frame)particle;
	if(frame.getImageType().equals(Frame.NO_IMAGE)) return result;
	try{
		MatOfByte mob = new MatOfByte(frame.getImageBytes());
		Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
		
		FeatureDetector siftDetector = FeatureDetector.create(detectorType);
		MatOfKeyPoint mokp = new MatOfKeyPoint();
		siftDetector.detect(image, mokp);
		List<KeyPoint> keypoints = mokp.toList();
		
		Mat descriptors = new Mat();
		DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
		extractor.compute(image, mokp, descriptors);
		List<Descriptor> descrList = new ArrayList<Descriptor>();
		float[] tmp = new float[1];
		for(int r=0; r<descriptors.rows(); r++){
			float[] values = new float[descriptors.cols()];
			for(int c=0; c<descriptors.cols(); c++){
				descriptors.get(r, c, tmp);
				values[c] = tmp[0];
			}
			descrList.add(new Descriptor(frame.getStreamId(), frame.getSequenceNr(), new Rectangle((int)keypoints.get(r).pt.x, (int)keypoints.get(r).pt.y, 0, 0), 0, values));
		}
		
		Feature feature = new Feature(frame.getStreamId(), frame.getSequenceNr(), featureName, 0, descrList, null);
		if(outputFrame){
			frame.getFeatures().add(feature);
			result.add(frame);
		}else{
			result.add(feature);
		}		
	}catch(Exception e){
		// catching exception at this point will prevent the sent of a fail! 
		logger.warn("Unable to extract features for frame!", e);
	}
	return result;
}
 
示例15
public static DescriptorExtractor create(int extractorType)
{
    
    DescriptorExtractor retVal = DescriptorExtractor.__fromPtr__(create_0(extractorType));
    
    return retVal;
}
 
示例16
public static DescriptorExtractor create(int extractorType)
{
    
    DescriptorExtractor retVal = DescriptorExtractor.__fromPtr__(create_0(extractorType));
    
    return retVal;
}
 
示例17
public static DescriptorExtractor create(int extractorType)
{
    
    DescriptorExtractor retVal = DescriptorExtractor.__fromPtr__(create_0(extractorType));
    
    return retVal;
}
 
示例18
public static DescriptorExtractor create(int extractorType)
{
    
    DescriptorExtractor retVal = DescriptorExtractor.__fromPtr__(create_0(extractorType));
    
    return retVal;
}
 
示例19
public static DescriptorExtractor create(int extractorType)
{
    
    DescriptorExtractor retVal = DescriptorExtractor.__fromPtr__(create_0(extractorType));
    
    return retVal;
}
 
示例20
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}
 
示例21
protected DescriptorExtractor(long addr) { nativeObj = addr; } 
示例22
public static DescriptorExtractor __fromPtr__(long addr) { return new DescriptorExtractor(addr); } 
示例23
protected DescriptorExtractor(long addr) { nativeObj = addr; } 
示例24
public static DescriptorExtractor __fromPtr__(long addr) { return new DescriptorExtractor(addr); } 
示例25
protected DescriptorExtractor(long addr) { nativeObj = addr; } 
示例26
public static DescriptorExtractor __fromPtr__(long addr) { return new DescriptorExtractor(addr); } 
示例27
protected DescriptorExtractor(long addr) { nativeObj = addr; } 
示例28
public static DescriptorExtractor __fromPtr__(long addr) { return new DescriptorExtractor(addr); } 
示例29
protected DescriptorExtractor(long addr) { nativeObj = addr; } 
示例30
public static DescriptorExtractor __fromPtr__(long addr) { return new DescriptorExtractor(addr); }