I have this terraform config for creating a single mongodb replica and a service but I can't connect to mongo using the cli and the cluster domain name.
locals {
labels = {
"app" = "mongo"
}
volume_config_name = "mongo-config"
}
module "mongo" {
source = "terraform-iaac/stateful-set/kubernetes"
version = "1.4.2"
# insert the 3 required variables here
image = "mongo:4.4"
name = "mongodb"
namespace = kubernetes_namespace.cmprimg.metadata[0].name
custom_labels = local.labels
volume_host_path = [
{
volume_name = "data"
path_on_node = "/data/db"
},
]
volume_mount = [
{
mount_path = "/data/db"
volume_name = "data"
},
{
mount_path = "/etc/mongod.conf.orig"
volume_name = "mongodb-conf"
sub_path = "configfile" // Key from configmap
}
]
volume_config_map = [{
mode = "0777"
volume_name = "mongodb-conf"
name = "mongodb-confmap"
}]
# volume_claim = [
# {
# name = "mongo"
# namespace = kubernetes_namespace.cmprimg.metadata[0].name
# access_modes = ["ReadWriteOnce"]
# requests_storage = "4Gi"
# persistent_volume_name = "mongo"
# storage_class_name = "linode-block-storage-retain"
# }
# ]
env = {
"MONGO_INITDB_ROOT_USERNAME" = var.username,
"MONGO_INITDB_ROOT_PASSWORD" = var.password,
}
command = [
"mongod",
"--bind_ip",
"0.0.0.0",
]
internal_port = [
{
name = "mongo"
internal_port = 27017
}
]
resources = {
request_cpu = "100m"
request_memory = "800Mi"
limit_cpu = "120m"
limit_memory = "900Mi"
}
replicas = 1
}
module "mongo_service" {
source = "terraform-iaac/service/kubernetes"
version = "1.0.4"
# insert the 3 required variables here
app_name = module.mongo.name
app_namespace = kubernetes_namespace.cmprimg.metadata[0].name
port_mapping = [
{
name = "mongo"
internal_port = 27107
external_port = 27017
}
]
custom_labels = local.labels
}
resource "kubernetes_persistent_volume_claim" "example" {
metadata {
name = "mongo"
namespace = kubernetes_namespace.cmprimg.metadata[0].name
labels = local.labels
}
spec {
access_modes = ["ReadWriteOnce"]
resources {
requests = {
storage = "20Gi"
}
}
storage_class_name = "linode-block-storage-retain"
}
}
resource "kubernetes_config_map" "mongodb_conf" {
metadata {
name = "mongodb-confmap"
namespace = kubernetes_namespace.cmprimg.metadata[0].name
labels = local.labels
}
data = {
"configfile" = yamlencode({
storage : {
dbPath : "/data/db",
},
net : {
port : 27017,
bindIp : "0.0.0.0",
}
})
}
}
I can exec into the mongodb pod and use mongo cli to connect using localhost, but when I'm in the same pod and use mongocli to connect using the domain name mongodb.default.svc.cluster.local:27017
I get connection refused. I can see in the logs that mongodb binds to 0.0.0.0
but can't connect through external ports. Did I misconfigure the service or do something else wrong?
Take a closer look at this section:
port_mapping = [
{
name = "mongo"
internal_port = 27107
external_port = 27017
}
]
.
portMappings = [
{
containerPort = var.container_port
hostPort = var.container_port
protocol = "tcp"
}
]