Connectiong to the Hive metastore on hdfs using Hive.jl or Spark.jl

Tried sess = SparkSession(master="yarn-cluster", enable_hive_support=true)

Errors:

19/04/02 23:28:42 INFO spark.SparkContext: Running Spark version 2.3.1.xxx.xxx
19/04/02 23:28:42 WARN spark.SparkConf: spark.master yarn-cluster is deprecated in Spark 2.0+, please instead use "yarn" with specified deploy mode.
19/04/02 23:28:42 INFO spark.SparkContext: Submitted application: Julia App on Spark
19/04/02 23:28:42 ERROR spark.SparkContext: Error initializing SparkContext.
org.apache.spark.SparkException: Detected yarn cluster mode, but isn't running on a cluster. Deployment to YARN is not supported directly by SparkContext. Please use spark-submit.
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:378)
	at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:933)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:924)
	at scala.Option.getOrElse(Option.scala:121)
	at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:924)
19/04/02 23:28:42 ERROR util.Utils: Uncaught exception in thread main
java.lang.NullPointerException
	at org.apache.spark.SparkContext.org$apache$spark$SparkContext$$postApplicationEnd(SparkContext.scala:2389)
	at org.apache.spark.SparkContext$$anonfun$stop$1.apply$mcV$sp(SparkContext.scala:1904)
	at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1360)
	at org.apache.spark.SparkContext.stop(SparkContext.scala:1903)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:579)
	at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:933)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:924)
	at scala.Option.getOrElse(Option.scala:121)
	at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:924)
19/04/02 23:28:42 INFO spark.SparkContext: Successfully stopped SparkContext
Exception in thread "main" org.apache.spark.SparkException: Detected yarn cluster mode, but isn't running on a cluster. Deployment to YARN is not supported directly by SparkContext. Please use spark-submit.
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:378)
	at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:933)
	at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:924)
	at scala.Option.getOrElse(Option.scala:121)
	at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:924)

Printed output:

JavaCall.JavaCallError("Error calling Java: org.apache.spark.SparkException: Detected yarn cluster mode, but isn't running on a cluster. Deployment to YARN is not supported directly by SparkContext. Please use spark-submit.")

Stacktrace:
 [1] geterror(::Bool) at /home_dir/xxxxx/.julia/packages/JavaCall/toamy/src/core.jl:294
 [2] geterror at /home_dir/xxxxx/.julia/packages/JavaCall/toamy/src/core.jl:274 [inlined]
 [3] _jcall(::JavaCall.JavaObject{Symbol("org.apache.spark.sql.SparkSession$Builder")}, ::Ptr{Nothing}, ::Ptr{Nothing}, ::Type, ::Tuple{}) at /home_dir/xxxxx/.julia/packages/JavaCall/toamy/src/core.jl:247
 [4] jcall(::JavaCall.JavaObject{Symbol("org.apache.spark.sql.SparkSession$Builder")}, ::String, ::Type, ::Tuple{}) at /home_dir/xxxxx/.julia/packages/JavaCall/toamy/src/core.jl:153
 [5] #SparkSession#8(::String, ::String, ::Dict{String,String}, ::Bool, ::Type) at /home_dir/xxxxx/.julia/packages/Spark/zK34P/src/sql.jl:25
 [6] (::getfield(Core, Symbol("#kw#Type")))(::NamedTuple{(:master, :enable_hive_support),Tuple{String,Bool}}, ::Type{SparkSession}) at ./none:0
 [7] top-level scope at In[3]:1