Search code examples
apache-sparkspring-bootcassandranetbeans-platform

Spring Spark Cassandra - Whitelabel Error Page


I am trying to use spark and cassandra through Spring in netbeans and I get an error:

type=Internal Server Error, status=500 Failed to open native connection to Cassandra at {127.0.0.1}:9042.

Spark and Cassandra were functioning just fine before I try to integrate Spring. There are already data in my Cassandra database which I take through spark and process them. Basically, I want to print the results(a matrix) in a /welcome page through a RestController.

Here is my really simple File Structure: image

Here is my pom.xml:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.mycompany</groupId>
<artifactId>my-app</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<build>
    <plugins>

        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-compiler-plugin</artifactId>
            <version>2.3.2</version>
            <configuration>
                <debug>true</debug>
            </configuration>
        </plugin>

        <plugin>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-maven-plugin</artifactId>
            <version>2.0.0.RELEASE</version>
            <executions>
                <execution>
                    <goals>
                        <goal>repackage</goal>
                    </goals>
                </execution>
            </executions>
        </plugin>

    </plugins>
</build>
<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
</properties>

<dependencies>
    <!--Spring dependencies-->
    <dependency>
        <groupId>com.fasterxml.jackson.core</groupId>
        <artifactId>jackson-databind</artifactId>
        <version>2.9.0</version>
    </dependency>
    <dependency>
        <groupId>com.google.code.gson</groupId>
        <artifactId>gson</artifactId>
        <version>2.8.2</version>
    </dependency>
    <dependency>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-web</artifactId>
        <version>2.0.0.RELEASE</version>
    </dependency>
    <dependency>
        <groupId>org.springframework</groupId>
        <artifactId>spring-core</artifactId>
        <version>5.0.4.RELEASE</version>
    </dependency>

    <!--Spark dependencies-->
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.11</artifactId>
        <version>2.2.1</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.11</artifactId>
        <version>2.2.1</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-mllib_2.11</artifactId>
        <version>2.2.1</version>
    </dependency>

    <!--Cassandra dependencies--> 

    <!--Spark cassandra connector dependencies-->
    <dependency>
        <groupId>com.datastax.spark</groupId>
        <artifactId>spark-cassandra-connector_2.11</artifactId>
        <version>2.0.7</version>
    </dependency>
</dependencies>

The spark context and session initialization:

@Configuration
public class Sparkstart { 
    @Bean
    public SparkSession sparksession() {
       SparkSession sp = SparkSession
             .builder()
             .master("local[*]")
             .appName("preprocessing")
             .config("spark.cassandra.connection.host","127.0.0.1")
             .getOrCreate();
       return sp;
    }

    @Bean
    public JavaSparkContext sc(){
        JavaSparkContext sc = new JavaSparkContext(sparksession().sparkContext());
        return sc;
    }       
}

The class where I take the data from Cassandra database:

@Component
public class Aftersparkstart {
    @Autowired
    private SparkSession sp;

    @Autowired
    private JavaSparkContext sc;

    @Autowired
    private Pearsonclass prs;

public Matrix start(){

List<String> desclist = new ArrayList<>();
    desclist.add(some data);
    desclist.add(some data);
Dataset<Row> peakset = sp.read().format("org.apache.spark.sql.cassandra")
            .options(new HashMap<String, String>() {
                {
                    put("keyspace", "mdb");
                    put("table", "filepeaks");
                }
            })
            .load().select(col("directoryname"), col("description"), col("intensity")).filter(col("description").isin(desclist.toArray()));

Dataset<Row> finalpeaks = peakset.groupBy(peakset.col("description"), peakset.col("directoryname")).avg("intensity").orderBy(asc("directoryname"), asc("description"));

Matrix r=prs.pearsonmethod(finalpeaks,dirlist,desclist);
return r;
}
}

And the class where the processing by spark takes place:

@Component
public class Pearsonclass{

public Matrix pearsonmethod(Dataset<Row> peaks, List<String> dirlist, List<String> desclist) {
    "...stuff..."
    return r2;
}
}

And finally the RestController:

@RestController
public class Firstcontroller {

@Autowired
private Aftersparkstart str;

@RequestMapping("/welcome")
public Matrix welcome(){
    //return wlc.retrievemsg();
    return str.start();
}
}

I am pretty sure I am missing something in the dependencies but I don't know what!


Solution

  • Got it! I just upgraded my Cassandra version from 3.11.0 to 3.11.2. The problem was JDK incompatibility with Cassandra. I have 1.8.0_162-8u162 with which the previous Cassandra version didn't get along..!