Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 3729D200B36 for ; Wed, 22 Jun 2016 00:24:05 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 33CB3160A60; Tue, 21 Jun 2016 22:24:05 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 7DFE8160A4F for ; Wed, 22 Jun 2016 00:24:04 +0200 (CEST) Received: (qmail 28321 invoked by uid 500); 21 Jun 2016 22:24:03 -0000 Mailing-List: contact dev-help@nifi.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@nifi.apache.org Delivered-To: mailing list dev@nifi.apache.org Received: (qmail 28310 invoked by uid 99); 21 Jun 2016 22:24:03 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 21 Jun 2016 22:24:03 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 3E42FE020A; Tue, 21 Jun 2016 22:24:03 +0000 (UTC) From: JPercivall To: dev@nifi.apache.org Reply-To: dev@nifi.apache.org References: In-Reply-To: Subject: [GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch... Content-Type: text/plain Message-Id: <20160621222403.3E42FE020A@git1-us-west.apache.org> Date: Tue, 21 Jun 2016 22:24:03 +0000 (UTC) archived-at: Tue, 21 Jun 2016 22:24:05 -0000 Github user JPercivall commented on a diff in the pull request: https://github.com/apache/nifi/pull/556#discussion_r67963557 --- Diff: nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java --- @@ -0,0 +1,311 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.media; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Pattern; + +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.annotation.lifecycle.OnScheduled; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.components.ValidationContext; +import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.components.Validator; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ProcessorLog; +import org.apache.nifi.processor.AbstractProcessor; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.ProcessorInitializationContext; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.io.InputStreamCallback; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.util.ObjectHolder; + +import org.apache.tika.exception.TikaException; +import org.apache.tika.io.TikaInputStream; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.sax.BodyContentHandler; +import org.xml.sax.SAXException; + +@InputRequirement(Requirement.INPUT_REQUIRED) +@Tags({"media", "file", "format", "metadata", "audio", "video", "image", "document", "pdf"}) +@CapabilityDescription("Extract the content metadata from flowfiles containing audio, video, image, and other file " + + "types. This processor relies on the Apache Tika project for file format detection and parsing. It " + + "extracts a long list of metadata types for media files including audio, video, and print media " + + "formats." + + "For the more details and the list of supported file types, visit the library's website " + + "at http://tika.apache.org/.") +@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata " + + "will be inserted with the attribute name \".\", or \"\" if " + + "\"Metadata Key Prefix\" is not provided.")}) +@SupportsBatching +public class ExtractMediaMetadata extends AbstractProcessor { + + static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new PropertyDescriptor.Builder() + .name("Max Number of Attributes") + .description("Specify the max number of attributes to add to the flowfile. There is no guarantee in what order" + + " the tags will be processed. By default it will process all of them.") + .required(false) + .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) + .build(); + + private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new PropertyDescriptor.Builder() + .name("Max Attribute Length") + .description("Specifies the maximum length of a single attribute value. When a metadata item has multiple" + + " values, they will be merged until this length is reached and then \", ...\" will be added as" + + " an indicator that additional values where dropped. If a single value is longer than this, it" + + " will be truncated and \"(truncated)\" appended to indicate that truncation occurred.") + .required(true) + .defaultValue("10240") --- End diff -- That is a huge default max length, is there a reason for setting it so high? --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastructure@apache.org or file a JIRA ticket with INFRA. ---