Skip to content

Commit e1582a7

Browse files
(DOCSP-25810): Output Cleanup (#41)
1 parent 4c5f61a commit e1582a7

33 files changed

+358
-352
lines changed
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"name": "mongo-cdc-sink",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSinkConnector",
5+
"topics": "CDCTutorial.Source",
6+
"change.data.capture.handler": "com.mongodb.kafka.connect.sink.cdc.mongodb.ChangeStreamHandler",
7+
"connection.uri": "mongodb://mongo1",
8+
"database": "CDCTutorial",
9+
"collection": "Destination"
10+
}
11+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"name": "mongo-cdc-source",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
5+
"connection.uri": "mongodb://mongo1",
6+
"database": "CDCTutorial",
7+
"collection": "Source"
8+
}
9+
}
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"schema": { "type": "string", "optional": false },
3+
"payload": {
4+
"_id": { "_data": "8261...." },
5+
...
6+
"operationType": "delete",
7+
"clusterTime": { "$timestamp": { "t": 1631108282, "i": 1 } },
8+
"ns": { "db": "CDCTutorial", "coll": "Source" },
9+
"documentKey": { "_id": { "$oid": "6138..." } }
10+
}
11+
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
{
2+
"schema": { "type": "string", "optional": false },
3+
"payload": {
4+
"_id": { "_data": "8260..." },
5+
"operationType": "insert",
6+
"clusterTime": { "$timestamp": { "t": 1611..., "i": 2 } },
7+
"wallTime": { "$date": "..." },
8+
"fullDocument": {
9+
"_id": { "$oid": "600b38ad..." },
10+
"proclaim": "Hello World!"
11+
},
12+
"ns": { "db": "CDCTutorial", "coll": "Source" },
13+
"documentKey": { "_id": { "$oid": "600b38a..." } }
14+
}
15+
}
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
Kafka topics:
2+
...
3+
The status of the connectors:
4+
5+
sink | mongo-cdc-sink | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSinkConnector
6+
source | mongo-cdc-source | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSourceConnector
7+
8+
Currently configured connectors
9+
10+
[
11+
"mongo-cdc-sink"
12+
"mongo-cdc-source"
13+
]
14+
...
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
Kafka topics:
2+
...
3+
The status of the connectors:
4+
5+
source | mongo-cdc-source | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSourceConnector
6+
7+
Currently configured connectors
8+
9+
[
10+
"mongo-cdc-source"
11+
]
12+
...
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import pymongo
2+
from bson.json_util import dumps
3+
4+
client = pymongo.MongoClient('mongodb://mongo1')
5+
db = client.get_database(name='Tutorial1')
6+
with db.orders.watch() as stream:
7+
print('\nA change stream is open on the Tutorial1.orders namespace. Currently watching ...\n\n')
8+
for change in stream:
9+
print(dumps(change, indent = 2))
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
{
2+
"_id": {
3+
"_data": "826264..."
4+
},
5+
"operationType": "insert",
6+
"clusterTime": {
7+
"$timestamp": {
8+
"t": 1650754657,
9+
"i": 1
10+
}
11+
},
12+
"wallTime": {
13+
"$date": "2022-10-13T17:06:23.409Z"
14+
},
15+
"fullDocument": {
16+
"_id": {
17+
"$oid": "<_id value of document>"
18+
},
19+
"test": 1
20+
},
21+
"ns": {
22+
"db": "Tutorial1",
23+
"coll": "orders"
24+
},
25+
"documentKey": {
26+
"_id": {
27+
"$oid": "<_id value of document>"
28+
}
29+
}
30+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import pymongo
2+
from bson.json_util import dumps
3+
client = pymongo.MongoClient('mongodb://mongo1')
4+
db = client.get_database(name='Tutorial1')
5+
pipeline = [ { "$match": { "$and": [ { "fullDocument.type": "temp" }, { "fullDocument.value": { "$gte": 100 } } ] } } ]
6+
with db.sensors.watch(pipeline=pipeline) as stream:
7+
print('\nChange Stream is opened on the Tutorial1.sensors namespace. Currently watching for values > 100...\n\n')
8+
for change in stream:
9+
print(dumps(change, indent = 2))

source/includes/tutorials/docker-success.rst

Lines changed: 0 additions & 9 deletions
This file was deleted.
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
...
2+
Creating zookeeper ... done
3+
Creating broker ... done
4+
Creating schema-registry ... done
5+
Creating connect ... done
6+
Creating rest-proxy ... done
7+
Creating mongo1 ... done
8+
Creating mongo1-setup ... done
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
Kafka topics:
2+
3+
"topic": "docker-connect-status",
4+
"topic": "docker-connect-offsets",
5+
"topic": "docker-connect-configs",
6+
"topic": "__consumer_offsets",
7+
8+
The status of the connectors:
9+
10+
11+
Currently configured connectors
12+
13+
[]
14+
15+
16+
Version of MongoDB Connector for Apache Kafka installed:
17+
18+
{"class":"com.mongodb.kafka.connect.MongoSinkConnector","type":"sink","version":"1.8.0"}
19+
{"class":"com.mongodb.kafka.connect.MongoSourceConnector","type":"source","version":"1.8.0"}
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from kafka import KafkaProducer
2+
import json
3+
from json import dumps
4+
5+
p = KafkaProducer(bootstrap_servers = ['broker:29092'], value_serializer = lambda x:dumps(x).encode('utf-8'))
6+
7+
data = {'name': 'roscoe'}
8+
9+
p.send('Tutorial2.pets', value = data)
10+
11+
p.flush()
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
Kafka topics:
2+
...
3+
The status of the connectors:
4+
5+
sink | mongo-tutorial-sink | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSinkConnector
6+
7+
Currently configured connectors
8+
9+
[
10+
"mongo-tutorial-sink"
11+
]
12+
...
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
"name": "mongo-tutorial-sink",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSinkConnector",
5+
"topics": "Tutorial2.pets",
6+
"connection.uri": "mongodb://mongo1",
7+
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
8+
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
9+
"value.converter.schemas.enable": false,
10+
"database": "Tutorial2",
11+
"collection": "pets"
12+
}
13+
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
{
2+
"_id": {
3+
"_data": "8262655A..."
4+
},
5+
"operationType": "insert",
6+
"clusterTime": {
7+
"$timestamp": {
8+
"t": 1650809557,
9+
"i": 2
10+
}
11+
},
12+
"wallTime": {
13+
"$date": "2022-10-13T17:06:23.409Z"
14+
},
15+
"fullDocument": {
16+
"_id": {
17+
"$oid": "62655a..."
18+
},
19+
"order_id": 1,
20+
"item": "coffee"
21+
},
22+
"ns": {
23+
"db": "Tutorial1",
24+
"coll": "orders"
25+
},
26+
"documentKey": {
27+
"_id": {
28+
"$oid": "62655a..."
29+
}
30+
}
31+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{ "_id": { "$oid": "<your _id value>" }, "order_id": 2, "item": "oatmeal" }
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
Partition: 0 Offset: 0
2+
3+
Key (198 bytes):
4+
5+
{"schema":{"type":"string","optional":false},"payload":"{\"_id\": {\"_data\": \"8263496B53000000022B022C0100296E5A1004516A6011E8F74ADEA2D28F5138C12D4146645F6964006463496B5280D076018B3305360004\"}}"}
6+
7+
Value (572 bytes):
8+
9+
{"schema":{"type":"string","optional":false},"payload":"{\"_id\": {\"_data\": \"8263496B53000000022B022C0100296E5A1004516A6011E8F74ADEA2D28F5138C12D4146645F6964006463496B5280D076018B3305360004\"}, \"operationType\": \"insert\", \"clusterTime\": {\"$timestamp\": {\"t\": 1665755987, \"i\": 2}}, \"wallTime\": {\"$date\": 1665755987015}, \"fullDocument\": {\"_id\": {\"$oid\": \"63496b5280d076018b330536\"}, \"order_id\": 1, \"item\": \"coffee\"}, \"ns\": {\"db\": \"Tutorial1\", \"coll\": \"orders\"}, \"documentKey\": {\"_id\": {\"$oid\": \"63496b5280d076018b330536\"}}}"}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
Kafka topics:
2+
...
3+
4+
The status of the connectors:
5+
6+
source | mongo-simple-source | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSourceConnector
7+
8+
Currently configured connectors
9+
10+
[
11+
"mongo-simple-source"
12+
]
13+
...
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
{
2+
"name": "mongo-simple-source",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
5+
"connection.uri": "mongodb://mongo1",
6+
"publish.full.document.only": true,
7+
"database": "Tutorial1",
8+
"collection": "orders"
9+
}
10+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"name": "mongo-simple-source",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
5+
"connection.uri": "mongodb://mongo1",
6+
"database": "Tutorial1",
7+
"collection": "orders"
8+
}
9+
}
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
{
2+
tx_time: ISODate("2022-05-25T21:16:35.983Z"),
3+
_id: ObjectId("628e9..."),
4+
symbol: 'FAV',
5+
price: 18.43,
6+
company_name: 'FUZZY ATTACK VENTURES'
7+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"schema":{ ... }, "payload": "{ "_id": { "$oid": "628e9..."}, "company_symbol": "MSP", "Company_name": "MASSIVE SUBMARINE PARTNERS", "price": 309.98, "tx_time": { "$date": 16535..." }"}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
Kafka topics:
2+
...
3+
4+
The status of the connectors:
5+
6+
source | mongo-source-marketdata | RUNNING | RUNNING | com.mongodb.kafka.connect.MongoSourceConnector
7+
8+
Currently configured connectors
9+
10+
[
11+
"mongo-source-marketdata"
12+
]
13+
...
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
{
2+
"name": "mongo-sink-marketdata",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSinkConnector",
5+
"topics": "marketdata.Stocks.PriceData",
6+
"connection.uri": "mongodb://mongo1",
7+
"database": "Stocks",
8+
"collection": "StockDataMigrate",
9+
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
10+
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
11+
"timeseries.timefield": "tx_time",
12+
"timeseries.timefield.auto.convert": "true",
13+
"timeseries.timefield.auto.convert.date.format": "yyyy-MM-dd'T'HH:mm:ss'Z'"
14+
}
15+
}
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
{
2+
"name": "mongo-source-marketdata",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
5+
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
6+
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
7+
"publish.full.document.only": "true",
8+
"connection.uri": "mongodb://mongo1",
9+
"topic.prefix": "marketdata",
10+
"database": "Stocks",
11+
"collection": "PriceData",
12+
"copy.existing": "true"
13+
}
14+
}

source/quick-start.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,8 @@ When you start the sandbox, Docker downloads any images it needs to run.
7979
After Docker downloads and builds the images, you should see the following
8080
output in your development environment:
8181

82-
.. include:: /includes/tutorials/docker-success.rst
82+
.. literalinclude:: /includes/tutorials/docker-success.txt
83+
:language: text
8384

8485
.. include:: /includes/tutorials/port-mapping-note.rst
8586

0 commit comments

Comments
 (0)