Search code examples
pythonjdbcjvmjpypejaydebeapi

Python JDBC Connection throwing JVM Issue


I am trying to connect to Impala database using Python jaydebeapi. I am facing the JVM issue when I am calling the connection class two times. Please find below my connection class and Sql_Query Class.

Connection_Class:

import jaydebeapi
import jpype
import datetime
import ConfigParser
from fileinput import close

config = ConfigParser.RawConfigParser ( )
config.read ( 'ConfigFile.properties' )


def Impala_Connection(sql_query):
    conn_impala = None


    try:
        jars_location = config.get ( 'Jars_info' , 'Jars_Location' )
        args = "-Djava.class.path=%s" % jars_location
        jvm_path = jpype.getDefaultJVMPath ( )
        jpype.startJVM ( jvm_path , args )

    except IOError as err:

        print('An error occurred trying to read the file:{}".format(e)')
        sql_query.close ( )
    else:

        try:

            print "Start executing: " + sql_query + " at " + str (
            datetime.datetime.now ( ).strftime ( "%Y-%m-%d %H:%M" ) ) + "\n"
            url = config.get ( 'Jars_info' , 'Jdbc_Url' )
            Jdbc_Driver_Class = config.get ( 'Jars_info' , 'Jdbc_Driver_Class' )
            username = config.get ( 'Jars_info' , 'username' )
            password = config.get ( 'Jars_info' , 'password' )
            jdbc_jar_location = config.get ( 'Jars_info' , 'Jdbc_Jar_Location' )
            conn_impala = jaydebeapi.connect ( Jdbc_Driver_Class , url , {username , password} , jdbc_jar_location )
            curs = conn_impala.cursor ( )
            sql_execution = curs.execute ( sql_query )
            data = curs.fetchall ( sql_execution )
            curs.close()
            return (data)

        except Exception, err:
            print("Something went wrong with Impala Connection: {}".format(err))
        finally:
            close(conn_impala)
            jpype.shutdownJVM()

Sql_Query_Class:

from pyspark import SparkConf, SparkContext
from com.my.common_funcitons.Impala_Query_Executor import Impala_Connection
import sys

conf = SparkConf().setAppName("pyspark")
sc = SparkContext(conf=conf)


tbl_name = sys.argv[1]
refid = sys.argv[2]
metadata_Query="SELECT * from Metadata_Table TABLE_NAME='%s' and TEMP.unique_id=%s" %(tbl_name,refid)
metadata_info=Impala_Connection(metadata_Query)

if len(metadata_info) == 0:
    new_tbl_name = tbl_name+"_%"
    metadata_Query="SELECT * from Metadata_Table TABLE_NAME='%s' and TEMP.unique_id=%s"" (new_tbl_name,refid)
    metadata_info=Impala_Connection(metadata_Query)
    for row in metadata_info:
        metadata_no_of_columns=row[0]
        metadata_table_id=row[1]

    else:

    for row in metadata_info:
        metadata_no_of_columns=row[0]
        metadata_table_id=row[1]

I have two type of table name like table and table_000 so if i am getting null data i need to add table_% and require same.when i am calling the Impala connection from the same class i am getting JVM issue please find the error stack below:

File "/usr/lib64/python2.7/site-packages/jpype/_core.py", line 50, in startJVM
    _jpype.startup(jvm, tuple(args), True)
RuntimeError: Unable to start JVM at native/common/jp_env.cpp:78

I am tried adding JVMshutdown in final class but still facing the issue.Please suggest me a solution.


Solution

  • Using jpype.isJVMStarted() function i can check whether jvm is started or not i resolved my issue.