Hi Team -
My word count program is working in Intellij. However when i run the same using Spark submit i am getting below error.
java.lang.ClassNotFoundException: com.test.Countt
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:230)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:712)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
My code :
package com.test
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object countt {
def main(args: Array[String]) {
// create Spark context with Spark configuration
val sc = new SparkContext(new SparkConf().setAppName(“Spark Count”).setMaster(“local[2]”))
// get threshold
//val threshold = args(1).toInt
// read in text file and split each document into words
// val tokenized = sc.textFile(args(0)).flatMap(.split(" "))
val tokenized = sc.textFile(“C:/Users/KRM1NH3/Desktop/test.txt”).flatMap(.split(" “))
// count the occurrence of each word
val wordCounts = tokenized.map((, 1)).reduceByKey( + )
// filter out words with fewer than threshold occurrences
//val filtered = wordCounts.filter(.2 >= threshold)
// count characters
//val charCounts = filtered.flatMap(.1.toCharArray).map((, 1)).reduceByKey(_ + _)
System.out.println(wordCounts.collect().mkString(”, "))
}
}
Spark Submit command:
spark-submit2 --class “com.test.Countt” --master local[4] WordCount1.jar “test.txt”