Skip to content

CDRIVER-4275 and CDRIVER-4275 #1158

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Dec 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .evergreen/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,8 @@ functions:
export ORCHESTRATION_FILE=${ORCHESTRATION_FILE}
export OCSP=${OCSP}
export REQUIRE_API_VERSION=${REQUIRE_API_VERSION}
sh .evergreen/integration-tests.sh
export LOAD_BALANCER=${LOAD_BALANCER}
bash .evergreen/integration-tests.sh
- command: expansions.update
params:
file: mongoc/mo-expansion.yml
Expand Down Expand Up @@ -16644,6 +16645,7 @@ tasks:
- func: bootstrap mongo-orchestration
vars:
AUTH: noauth
LOAD_BALANCER: 'on'
SSL: ssl
TOPOLOGY: sharded_cluster
VERSION: latest
Expand Down Expand Up @@ -24022,6 +24024,7 @@ tasks:
- func: bootstrap mongo-orchestration
vars:
AUTH: auth
LOAD_BALANCER: 'on'
SSL: ssl
TOPOLOGY: sharded_cluster
VERSION: '5.0'
Expand Down Expand Up @@ -24049,6 +24052,7 @@ tasks:
- func: bootstrap mongo-orchestration
vars:
AUTH: auth
LOAD_BALANCER: 'on'
SSL: ssl
TOPOLOGY: sharded_cluster
VERSION: latest
Expand Down Expand Up @@ -24076,6 +24080,7 @@ tasks:
- func: bootstrap mongo-orchestration
vars:
AUTH: noauth
LOAD_BALANCER: 'on'
SSL: nossl
TOPOLOGY: sharded_cluster
VERSION: '5.0'
Expand Down Expand Up @@ -24103,6 +24108,7 @@ tasks:
- func: bootstrap mongo-orchestration
vars:
AUTH: noauth
LOAD_BALANCER: 'on'
SSL: nossl
TOPOLOGY: sharded_cluster
VERSION: latest
Expand Down
9 changes: 8 additions & 1 deletion .evergreen/integration-tests.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#! /bin/sh
#! /bin/bash
# Start up mongo-orchestration (a server to spawn mongodb clusters) and set up a cluster.
#
# Specify the following environment variables:
Expand All @@ -14,12 +14,15 @@
# OCSP: off, on
# REQUIRE_API_VERSION: set to a non-empty string to set the requireApiVersion parameter
# This is currently only supported for standalone servers
# LOAD_BALANCER: off, on
#
# This script may be run locally.
#

set -o errexit # Exit the script with error if any of the commands fail

: "${LOAD_BALANCER:=off}"

DIR=$(dirname $0)
# Functions to fetch MongoDB binaries
. $DIR/download-mongodb.sh
Expand Down Expand Up @@ -61,6 +64,10 @@ if [ -z "$ORCHESTRATION_FILE" ]; then
if [ "$SSL" != "nossl" ]; then
ORCHESTRATION_FILE="${ORCHESTRATION_FILE}-ssl"
fi

if [ "$LOAD_BALANCER" = "on" ]; then
ORCHESTRATION_FILE="${ORCHESTRATION_FILE}-load-balancer"
fi
fi

# Set up mongo orchestration home.
Expand Down
3 changes: 2 additions & 1 deletion build/evergreen_config_lib/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,8 @@
export ORCHESTRATION_FILE=${ORCHESTRATION_FILE}
export OCSP=${OCSP}
export REQUIRE_API_VERSION=${REQUIRE_API_VERSION}
sh .evergreen/integration-tests.sh
export LOAD_BALANCER=${LOAD_BALANCER}
bash .evergreen/integration-tests.sh
''', test=False),
OD([
("command", "expansions.update"),
Expand Down
15 changes: 11 additions & 4 deletions build/evergreen_config_lib/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,9 +669,15 @@ def to_dict(self):
commands.append(
func('fetch build', BUILD_NAME=self.depends_on['name']))

orchestration = bootstrap(TOPOLOGY='sharded_cluster' if self.loadbalanced else 'replica_set',
AUTH='auth' if self.auth else 'noauth',
SSL='ssl')
if self.loadbalanced:
orchestration = bootstrap(TOPOLOGY='sharded_cluster',
AUTH='auth' if self.auth else 'noauth',
SSL='ssl',
LOAD_BALANCER='on')
else:
orchestration = bootstrap(TOPOLOGY='replica_set',
AUTH='auth' if self.auth else 'noauth',
SSL='ssl')

if self.auth:
orchestration['vars']['AUTHSOURCE'] = 'thisDB'
Expand Down Expand Up @@ -1150,7 +1156,8 @@ def to_dict(self):
orchestration = bootstrap(TOPOLOGY='sharded_cluster',
AUTH='auth' if self.test_auth else 'noauth',
SSL='ssl' if self.test_ssl else 'nossl',
VERSION=self.version)
VERSION=self.version,
LOAD_BALANCER='on')
commands.append(orchestration)
commands.append(func("clone drivers-evergreen-tools"))
commands.append(func("start load balancer",
Expand Down
57 changes: 57 additions & 0 deletions orchestration_configs/sharded_clusters/auth-load-balancer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
{
"id": "shard_cluster_1",
"login": "bob",
"password": "pwd123",
"auth_key": "secret",
"shards": [
{
"id": "sh01",
"shardParams": {
"members": [
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27217
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27218
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27219
}
}
]
}
}
],
"routers": [
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27017,
"setParameter": {
"loadBalancerPort": 27050
}
},
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27018,
"setParameter": {
"loadBalancerPort": 27051
}
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
{
"id": "shard_cluster_1",
"login": "bob",
"password": "pwd123",
"auth_key": "secret",
"shards": [
{
"id": "sh01",
"shardParams": {
"members": [
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27217
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27218
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27219
}
}
]
}
}
],
"routers": [
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27017,
"setParameter": {
"loadBalancerPort": 27050
}
},
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27018,
"setParameter": {
"loadBalancerPort": 27051
}
}
],
"sslParams": {
"sslOnNormalPorts": true,
"sslPEMKeyFile": "/tmp/orchestration-home/server.pem",
"sslCAFile": "/tmp/orchestration-home/ca.pem",
"sslWeakCertificateValidation" : true
}
}
54 changes: 54 additions & 0 deletions orchestration_configs/sharded_clusters/basic-load-balancer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"id": "shard_cluster_1",
"shards": [
{
"id": "sh01",
"shardParams": {
"members": [
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27217
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27218
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27219
}
}
]
}
}
],
"routers": [
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27017,
"setParameter": {
"loadBalancerPort": 27050
}
},
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27018,
"setParameter": {
"loadBalancerPort": 27051
}
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
{
"id": "shard_cluster_1",
"shards": [
{
"id": "sh01",
"shardParams": {
"members": [
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27217
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27218
}
},
{
"procParams": {
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"shardsvr": true,
"port": 27219
}
}
]
}
}
],
"routers": [
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27017,
"setParameter": {
"loadBalancerPort": 27050
}
},
{
"ipv6": true,
"bind_ip": "127.0.0.1,::1",
"port": 27018,
"setParameter": {
"loadBalancerPort": 27051
}
}
],
"sslParams": {
"sslOnNormalPorts": true,
"sslPEMKeyFile": "/tmp/orchestration-home/server.pem",
"sslCAFile": "/tmp/orchestration-home/ca.pem",
"sslWeakCertificateValidation" : true
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -241,10 +241,4 @@ bool
mongoc_server_description_has_service_id (
const mongoc_server_description_t *description);

/* mongoc_global_mock_service_id is only used for testing. The test runner sets
* this to true when testing against a load balanced deployment to mock the
* presence of a serviceId field in the "hello" response. The purpose of this is
* further described in the Load Balancer test README. */
extern bool mongoc_global_mock_service_id;

#endif
12 changes: 0 additions & 12 deletions src/libmongoc/src/mongoc/mongoc-server-description.c
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,6 @@ static bson_oid_t kObjectIdZero = {{0}};

const bson_oid_t kZeroServiceId = {{0}};

bool mongoc_global_mock_service_id = false;

static bool
_match_tag_set (const mongoc_server_description_t *sd,
bson_iter_t *tag_set_iter);
Expand Down Expand Up @@ -746,16 +744,6 @@ mongoc_server_description_handle_hello (mongoc_server_description_t *sd,
}
}


if (mongoc_global_mock_service_id) {
bson_iter_t pid_iter;

if (bson_iter_init_find (&pid_iter, &sd->topology_version, "processId") &&
BSON_ITER_HOLDS_OID (&pid_iter)) {
bson_oid_copy (bson_iter_oid (&pid_iter), &sd->service_id);
}
}

if (is_shard) {
sd->type = MONGOC_SERVER_MONGOS;
} else if (sd->set_name) {
Expand Down
Loading