Print RDD elements under spark scala

Hi ,
I am trying below run a jar through spark-submit under cloudxlab terminal for below given code, but its throwing following error - Exception in thread “main” java.lang.NoSuchMethodError: scala.Predef$.intArrayOps([I)[I

But its running on my personal system. Please check.

Intellij code -

package com.sparkscala.ops

import org.apache.spark.{SparkContext,SparkConf}
import org.apache.spark.rdd.RDD

object SparkScalaOps {

def main(args : Array[String]): Unit =
{
val conf = new SparkConf().setMaster(“local”).setAppName(“SparkScalaOps”)
val sc = new SparkContext(conf)

val data : List[Int] = List(1,2,4,3,5,6,7)
val x = sc.parallelize(data)
val y = x.map(f=> (f,1))
println("value of x and x_map are as follows :")
x.collect.foreach(println)
//x_map.collect().foreach(println)

}

}

////////////////////////////

build.sbt -

name := “SparkScalaOps”

version := “0.1”

scalaVersion := “2.12.4”

// https://mvnrepository.com/artifact/org.apache.spark/spark-core
libraryDependencies += “org.apache.spark” %% “spark-core” % “2.4.5”

// https://mvnrepository.com/artifact/org.apache.spark/spark-sql
libraryDependencies += “org.apache.spark” %% “spark-sql” % “2.4.5”