Skip to content

DOCSP-33345: Java comments pt. 6 #475

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Oct 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,14 @@ public final class GridFSOperations {

private static void createCustomBucket(MongoClient mongoClient) throws Exception {
MongoDatabase database = mongoClient.getDatabase("mydb");
// Creates a custom GridFS bucket named "myCustomBucket"
// start createCustomGridFSBucket
GridFSBucket gridFSBucket = GridFSBuckets.create(database, "myCustomBucket");
// end createCustomGridFSBucket
}

private static void uploadOptions() {
// Defines options that specify configuration information for files uploaded to the bucket
// start uploadOptions
GridFSUploadOptions options = new GridFSUploadOptions()
.chunkSizeBytes(1048576) // 1MB chunk size
Expand All @@ -48,37 +50,48 @@ private static void uploadFromInputStream(GridFSBucket gridFSBucket) throws Exce
// start uploadFromInputStream
String filePath = "/path/to/project.zip";
try (InputStream streamToUploadFrom = new FileInputStream(filePath) ) {
// Defines options that specify configuration information for files uploaded to the bucket
GridFSUploadOptions options = new GridFSUploadOptions()
.chunkSizeBytes(1048576)
.metadata(new Document("type", "zip archive"));

// Uploads a file from an input stream to the GridFS bucket
ObjectId fileId = gridFSBucket.uploadFromStream("myProject.zip", streamToUploadFrom, options);

// Prints the "_id" value of the uploaded file
System.out.println("The file id of the uploaded file is: " + fileId.toHexString());
}
// end uploadFromInputStream
}

private static void uploadFromOutputStream(GridFSBucket gridFSBucket) throws Exception {
// Reads the file data from the specified path into memory
// start uploadFromOutputStream
Path filePath = Paths.get("/path/to/project.zip");
byte[] data = Files.readAllBytes(filePath);

// Defines options that specify configuration information for files uploaded to the bucket
GridFSUploadOptions options = new GridFSUploadOptions()
.chunkSizeBytes(1048576)
.metadata(new Document("type", "zip archive"));

try (GridFSUploadStream uploadStream = gridFSBucket.openUploadStream("myProject.zip", options)) {
// Writes file data to the GridFS upload stream
uploadStream.write(data);
uploadStream.flush();

// Prints the "_id" value of the uploaded file
System.out.println("The file id of the uploaded file is: " + uploadStream.getObjectId().toHexString());

// Prints a message if any exceptions occur during the upload process
} catch (Exception e) {
System.err.println("The file upload failed: " + e);
}
// end uploadFromOutputStream
}

private static void findAllFiles(GridFSBucket gridFSBucket) throws Exception {
// Prints the details of each file in the GridFS bucket
// start findAllFiles
gridFSBucket.find().forEach(new Consumer<GridFSFile>() {
@Override
Expand All @@ -89,9 +102,12 @@ public void accept(final GridFSFile gridFSFile) {
// end findAllFiles
}
private static void findMatchingFiles(GridFSBucket gridFSBucket) throws Exception {
// Creates a filter and sort document to match documents and sort them by ascending "filename" values
// start findMatchingFiles
Bson query = Filters.eq("metadata.type", "zip archive");
Bson sort = Sorts.ascending("filename");

// Retrieves 5 documents in the bucket that match the filter and prints metadata
gridFSBucket.find(query)
.sort(sort)
.limit(5)
Expand All @@ -105,9 +121,11 @@ public void accept(final GridFSFile gridFSFile) {
}

private static void downloadToOutputStream(GridFSBucket gridFSBucket) throws Exception {
// Defines options to select the first version of a bucket file
// start downloadToStream
GridFSDownloadOptions downloadOptions = new GridFSDownloadOptions().revision(0);

// Downloads a file to an output stream
try (FileOutputStream streamToDownloadTo = new FileOutputStream("/tmp/myProject.zip")) {
gridFSBucket.downloadToStream("myProject.zip", streamToDownloadTo, downloadOptions);
streamToDownloadTo.flush();
Expand All @@ -118,10 +136,14 @@ private static void downloadToOutputStream(GridFSBucket gridFSBucket) throws Exc
private static void downloadToMemory(GridFSBucket gridFSBucket) throws Exception {
// start downloadToMemory
ObjectId fileId = new ObjectId("60345d38ebfcf47030e81cc9");

// Opens an input stream to read a file containing a specified "_id" value and downloads the file
try (GridFSDownloadStream downloadStream = gridFSBucket.openDownloadStream(fileId)) {
int fileLength = (int) downloadStream.getGridFSFile().getLength();
byte[] bytesToWriteTo = new byte[fileLength];
downloadStream.read(bytesToWriteTo);

// Prints the downloaded file's contents as a string
System.out.println(new String(bytesToWriteTo, StandardCharsets.UTF_8));
}
// end downloadToMemory
Expand All @@ -131,18 +153,23 @@ private static void downloadToMemory(GridFSBucket gridFSBucket) throws Exception
private static void renameFile(GridFSBucket gridFSBucket) throws Exception {
// start renameFile
ObjectId fileId = new ObjectId("60345d38ebfcf47030e81cc9");

// Renames the file that has a specified "_id" value to "mongodbTutorial.zip"
gridFSBucket.rename(fileId, "mongodbTutorial.zip");
// end renameFile
}

private static void deleteFile(GridFSBucket gridFSBucket) throws Exception {
// start deleteFile
ObjectId fileId = new ObjectId("60345d38ebfcf47030e81cc9");

// Deletes the file that has a specified "_id" value from the GridFS bucket
gridFSBucket.delete(fileId);
// end deleteFile
}

private static void dropBucket(MongoClient mongoClient) throws Exception {
// Deletes a database's default GridFS bucket
// start dropBucket
MongoDatabase database = mongoClient.getDatabase("mydb");
GridFSBucket gridFSBucket = GridFSBuckets.create(database);
Expand All @@ -154,6 +181,7 @@ public static void main(final String[] args) throws Exception {
String uri = "mongodb://localhost:27017";

try (MongoClient mongoClient = MongoClients.create(uri)) {
// Creates a GridFS bucket on a database
// start createGridFSBucket
MongoDatabase database = mongoClient.getDatabase("mydb");
GridFSBucket gridFSBucket = GridFSBuckets.create(database);
Expand Down
43 changes: 36 additions & 7 deletions source/includes/fundamentals/code-snippets/IndexPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ private IndexPage() {

public static void main(String[] args) {
IndexPage page = new IndexPage();

page.singleIndex();
page.compoundIndex();
page.wildCardIndex();
Expand All @@ -48,109 +49,136 @@ public static void main(String[] args) {

private void singleIndex() {
System.out.println("single index");

// Creates an index on the "title" field in ascending order
// begin single index
String resultCreateIndex = collection.createIndex(Indexes.ascending("title"));
System.out.println(String.format("Index created: %s", resultCreateIndex));
// end single index

// Retrieves matching documents directly from the "title" index, applying a sort and projection
// begin covered single query
Bson filter = eq("title", "Batman");
Bson sort = Sorts.ascending("title");
Bson projection = fields(include("title"), excludeId());
FindIterable<Document> cursor = collection.find(filter).sort(sort).projection(projection);
// end covered single query

// Prints the results of the find operation
cursor.forEach(doc -> System.out.println(doc));

}

private void compoundIndex() {
System.out.println("compound index");

// Creates a compound index on the "type" and "rated" fields in ascending order
// begin compound index
String resultCreateIndex = collection.createIndex(Indexes.ascending("type", "rated"));
System.out.println(String.format("Index created: %s", resultCreateIndex));
// end compound index

// Retrieves matching documents directly from the compound index, applying a sort and projection
// begin covered compound query
Bson filter = and(eq("type", "movie"), eq("rated", "G"));
Bson sort = Sorts.ascending("type", "rated");
Bson projection = fields(include("type", "rated"), excludeId());
FindIterable<Document> cursor = collection.find(filter).sort(sort).projection(projection);
// end covered compound query

// Prints the results of the find operation
cursor.forEach(doc -> System.out.println(doc));
}

private void multiKeyIndex() {
System.out.println("multikey index");
// Creates a compound multikey index on the "rated", "genres", and "title" fields in ascending order
// begin multikey index
String resultCreateIndex = collection.createIndex(Indexes.ascending("rated", "genres", "title"));
System.out.println(String.format("Index created: %s", resultCreateIndex));
// end multikey index

// Retrieves matching documents directly from the multikey index, applying a sort and projection
// begin covered multikey query
Bson filter = and(eq("genres", "Animation"), eq("rated", "G"));
Bson sort = Sorts.ascending("title");
Bson projection = fields(include("title", "rated"), excludeId());
FindIterable<Document> cursor = collection.find(filter).sort(sort).projection(projection);
// end covered multikey query

// Prints the results of the find operation
cursor.forEach(doc -> System.out.println(doc));
}

private void textIndex() {
System.out.println("text index");
// Creates a text index on the "plot" field
// begin text index
// create a text index of the "plot" field in the "movies" collection
// if a text index already exists with a different configuration, this will
// error
try {
String resultCreateIndex = collection.createIndex(Indexes.text("plot"));
System.out.println(String.format("Index created: %s", resultCreateIndex));

// Prints a message if a text index already exists with a different configuration
} catch (MongoCommandException e) {
if (e.getErrorCodeName().equals("IndexOptionsConflict"))
System.out.println("there's an existing text index with different options");
}
// end text index

// Retrieves matching documents directly from the text index, applying a projection
// begin text query
Bson filter = text("java coffee shop");
Bson projection = fields(include("fullplot"), excludeId());
FindIterable<Document> cursor = collection.find(filter).projection(projection);
// end text query

// Prints the results of the find operation
cursor.forEach(doc -> System.out.println(doc));
}

private void geoSpatialIndex() {
System.out.println("geospatial index");
collection = database.getCollection("theaters");

// Creates a geospatial index on the "location.geo" field
// begin geospatial index
// if an existing geo index exists, this will error
try {
String resultCreateIndex = collection.createIndex(Indexes.geo2dsphere("location.geo"));
System.out.println(String.format("Index created: %s", resultCreateIndex));

// Prints a message if a geospatial index already exists with a different configuration
} catch (MongoCommandException e) {
if (e.getErrorCodeName().equals("IndexOptionsConflict"))
System.out.println("there's an existing text index with different options");
System.out.println("there's an existing geospatial index with different options");
}
// end geospatial index

// begin geospatial query
// MongoDB Headquarters in NY, NY.
// Stores the coordinates of the NY MongoDB headquarters
Point refPoint = new Point(new Position(-73.98456, 40.7612));

// Retrieves documents that represent locations up to 1000 meters from the specified point directly from the geospatial index
// Creates a filter to match a document
Bson filter = near("location.geo", refPoint, 1000.0, 0.0);
FindIterable<Document> cursor = collection.find(filter);
// end geospatial query

// Prints the results of the find operation
cursor.forEach(doc -> System.out.println(doc));
}

private void uniqueIndex() {
System.out.println("unique index");
collection = database.getCollection("theaters");

// Creates a unique index on the "theaterID" field in descending order
// begin unique index
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Delete this comment

Suggested change
// begin unique index
// begin unique index

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this comment should be kept since it is used for the literalinclude directive here.

In the Code Comment Style Guide, it's identified as a "Marker" comment type.

// this will fail if any duplicate values exist on the field you are indexing
try {
IndexOptions indexOptions = new IndexOptions().unique(true);
String resultCreateIndex = collection.createIndex(Indexes.descending("theaterId"), indexOptions);
System.out.println(String.format("Index created: %s", resultCreateIndex));

// Prints a message if the "theaterID" field contains duplicate values
} catch (DuplicateKeyException e) {
System.out.printf("duplicate field values encountered, couldn't create index: \t%s\n", e);
}
Expand All @@ -160,6 +188,7 @@ private void wildcardIndex() {
System.out.println("wildcard index");
collection = database.getCollection("theaters");

// Creates a wildcard index on all values of the "location" field in ascending order
// begin wildcard index
String resultCreateIndex = collection.createIndex(Indexes.ascending("location.$**"));
System.out.println(String.format("Index created: %s", resultCreateIndex));
Expand Down
7 changes: 7 additions & 0 deletions source/includes/fundamentals/code-snippets/Insert.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ public static void main(String[] args) {

private void insertOneExample() {
collection.drop();
// Inserts a sample document and prints the document's ID
// begin insertOneExample
Document doc1 = new Document("color", "red").append("qty", 5);

Expand All @@ -58,6 +59,7 @@ private void insertOneExample() {

private void insertManyExample() {
collection.drop();
// Inserts sample documents into a collection
// begin insertManyExample
List<Document> documents = new ArrayList<>();

Expand All @@ -69,6 +71,7 @@ private void insertManyExample() {

InsertManyResult result = collection.insertMany(documents);

// Retrieves and prints the ID values of each inserted document
List<ObjectId> insertedIds = new ArrayList<>();
result.getInsertedIds().values()
.forEach(doc -> insertedIds.add(doc.asObjectId().getValue()));
Expand All @@ -95,11 +98,15 @@ private void insertManyErrorExample() {

// begin insertManyErrorExample
List<Integer> insertedIds = new ArrayList<>();

// Inserts sample documents and prints their "_id" values
try {
InsertManyResult result = collection.insertMany(documents);
result.getInsertedIds().values()
.forEach(doc -> insertedIds.add(doc.asInt32().getValue()));
System.out.println("Inserted documents with the following ids: " + insertedIds);

// Prints a message if any exceptions occur during the operation and the "_id" values of inserted documents
} catch(MongoBulkWriteException exception) {
exception.getWriteResult().getInserts()
.forEach(doc -> insertedIds.add(doc.getId().asInt32().getValue()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ public static void main(String [] args){
}

private void updateOneAttemptExample(){
// Updates the "qty" value of the document that matches the filter
// begin updateOneAttemptExample
Bson filter = Filters.eq("color", "orange");
Bson update = Updates.inc("qty", 10);
Expand All @@ -52,15 +53,20 @@ private void updateOneAttemptExample(){

private void updateOneExample(){
// begin updateOneExample
// Creates a filter and update document to increment the matching document's "qty" value
Bson filter = Filters.eq("color", "orange");
Bson update = Updates.inc("qty", 10);

// Updates the matching document or inserts a document if none match the query filter
UpdateOptions options = new UpdateOptions().upsert(true);
System.out.println(collection.updateOne(filter, update, options));
// end updateOneExample
}

private void preview(boolean drop){
Bson filter = Filters.empty();

// Prints the JSON representation of each document in the collection
collection.find(filter).forEach(doc -> System.out.println(doc.toJson()));
if (drop){
collection.drop();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,23 @@ public class JMXMonitoring {
public static void main(String[] args) throws InterruptedException {
// start jmx-example
JMXConnectionPoolListener connectionPoolListener = new JMXConnectionPoolListener();

MongoClientSettings settings =
MongoClientSettings.builder()
.applyConnectionString(URI)
.applyToConnectionPoolSettings(builder -> builder.addConnectionPoolListener(connectionPoolListener))
.build();

// Creates a MongoClient instance that enables connection pool event monitoring with the JMX tool
MongoClient mongoClient = MongoClients.create(settings);

try {
System.out.println("Navigate to JConsole to see your connection pools...");

// Pauses the code execution so you can navigate to JConsole and inspect your connection pools
Thread.sleep(Long.MAX_VALUE);

// Prints exception details if any exceptions occur during the code execution
} catch (Exception e) {
e.printStackTrace();
}
Expand Down