Not able to see LOG4J messages

Hi,

I have written java code for CUSTOM LOAD in pig.I want to LOG some info so i have used LOG4J to log.But i am not able to see messages in LOGS.Could you please help me how to log messgaes in cloudxlab.

I have placed my log4j properties file in src library and exported my code as jar file and ran it.
I am getting message like “intialise log4j propeties”.

package learnhadoop;


import java.io.IOException;
import java.util.List;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.Reference.Range;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.log4j.Logger;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;



public class CustomLoad extends LoadFunc {

	
	static final Logger logger = Logger.getLogger(CustomLoad.class.getName());
	
	private final TupleFactory tupleFactory = TupleFactory.getInstance();
	private RecordReader reader;

	

	@Override
	public void setLocation(String location, Job job) throws IOException {
		FileInputFormat.setInputPaths(job, location);
	}

	@SuppressWarnings("rawtypes")
	@Override
	public InputFormat getInputFormat() {
		return new TextInputFormat();
	}

	@Override
	public void prepareToRead(RecordReader reader, PigSplit split) {
		this.reader = reader;
	}

	@Override
	public Tuple getNext() throws IOException {
		try {
			if (!reader.nextKeyValue()) {
				return null;
			}
			logger.warn("custom load -creating tuple");
			Tuple tuple = tupleFactory.newTuple(2);
			tuple.set(1, "one");
			tuple.set(2, "two");
			return tuple;
		} catch (InterruptedException e) {
			throw new ExecException(e);
		}
	}
}

Hi Charan,

You will have to initialize the logger in the following way:

 // Set up a simple configuration that logs on the console.
 BasicConfigurator.configure();

Also, do not forget to:

import org.apache.log4j.BasicConfigurator;

Hi Sandeep,

I am not able to see messages in logs after setting simple configuration also. Could you please help me on this. Could you please provide any sample code on this and do I need to set any LOG4J properties in my src folder of eclipse?

PIG script: steps I followed

    grunt> Register  hdfs://ip-172-31-53-48.ec2.internal:8020/user/charanrajlv3971/learnhadoop.jar
    grunt> data = LOAD 'cust.txt' USING learnhadoop.CustomLoad() as (data:chararray);
    grunt> dump data;

job id :
application_1495368194937_2780

Logs:
I have clicked on job id and clicked LOGS on left hand side of page.Except job default logs I am not able to see my own messages.

Source code: exported as JAR FILE from my eclipse and uploaded to HDFS using hue
package learnhadoop;

import java.io.IOException;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Logger;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;



public class CustomLoad extends LoadFunc {

	
	static final Logger logger = Logger.getLogger(CustomLoad.class.getName());
	
	private final TupleFactory tupleFactory = TupleFactory.getInstance();
	private RecordReader reader;

	public CustomLoad(){
		BasicConfigurator.configure();
		logger.warn("custom load constructor");
	}

	@Override
	public void setLocation(String location, Job job) throws IOException {
		FileInputFormat.setInputPaths(job, location);
	}

	@SuppressWarnings("rawtypes")
	@Override
	public InputFormat getInputFormat() {
		return new TextInputFormat();
	}

	@Override
	public void prepareToRead(RecordReader reader, PigSplit split) {
		this.reader = reader;
	}

	@Override
	public Tuple getNext() throws IOException {
		    logger.error("get next tuple - method");
		try {
			if (!reader.nextKeyValue()) {
				return null;
			}
			logger.warn("custom load -creating tuple");
			Tuple tuple = tupleFactory.newTuple(2);
			tuple.set(1, "one");
			tuple.set(2, "two");
			logger.debug("checking logs is working or not");
			return tuple;
		} catch (InterruptedException e) {
			throw new ExecException(e);
		}
	}
}