001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 * 
010 *      http://www.apache.org/licenses/LICENSE-2.0
011 * 
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.oozie.action.hadoop;
019
020import org.apache.oozie.DagELFunctions;
021import org.apache.oozie.util.ELEvaluationException;
022import org.apache.oozie.util.XLog;
023import org.apache.oozie.workflow.WorkflowInstance;
024import org.json.simple.JSONValue;
025
026import java.util.Map;
027
028/**
029 * Hadoop EL functions.
030 */
031public class HadoopELFunctions {
032
033    private static final String HADOOP_COUNTERS = "oozie.el.action.hadoop.counters";
034
035    public static final String RECORDS = "org.apache.hadoop.mapred.Task$Counter";
036    public static final String MAP_IN = "MAP_INPUT_RECORDS";
037    public static final String MAP_OUT = "MAP_OUTPUT_RECORDS";
038    public static final String REDUCE_IN = "REDUCE_INPUT_RECORDS";
039    public static final String REDUCE_OUT = "REDUCE_OUTPUT_RECORDS";
040    public static final String GROUPS = "REDUCE_INPUT_GROUPS";
041
042    private static final String RECORDS_023 = "org.apache.hadoop.mapreduce.TaskCounter";
043
044    @SuppressWarnings("unchecked")
045    public static Map<String, Map<String, Long>> hadoop_counters(String nodeName) throws ELEvaluationException {
046        WorkflowInstance instance = DagELFunctions.getWorkflow().getWorkflowInstance();
047        Object obj = instance.getTransientVar(nodeName + WorkflowInstance.NODE_VAR_SEPARATOR + HADOOP_COUNTERS);
048        Map<String, Map<String, Long>> counters = (Map<String, Map<String, Long>>) obj;
049        if (counters == null) {
050            counters = getCounters(nodeName);
051            // In Hadoop 0.23 they deprecated 'org.apache.hadoop.mapred.Task$Counter' and they REMOVED IT
052            // Here we are getting the new Name and inserting it using the old name if the old name is not found
053            if (counters.get(RECORDS) == null) {
054                counters.put(RECORDS, counters.get(RECORDS_023));
055            }
056            instance.setTransientVar(nodeName + WorkflowInstance.NODE_VAR_SEPARATOR + HADOOP_COUNTERS, counters);
057        }
058        return counters;
059    }
060
061    @SuppressWarnings("unchecked")
062    private static Map<String, Map<String, Long>> getCounters(String nodeName) throws ELEvaluationException {
063        String jsonCounters = DagELFunctions.getActionVar(nodeName, MapReduceActionExecutor.HADOOP_COUNTERS);
064        if (jsonCounters == null) {
065            throw new IllegalArgumentException(XLog.format("Hadoop counters not available for action [{0}]", nodeName));
066        }
067        return (Map) JSONValue.parse(jsonCounters);
068    }
069
070}