From 4d1846930ba430da4872c7d41b3b8e4bf8283d62 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 4 Mar 2024 15:01:37 +0100 Subject: [PATCH 01/22] chore: Dependency Update and changes for mongo-scala-driver 5.0.0 --- build.sbt | 4 +- .../mongocamp/driver/mongodb/GridFSDAO.scala | 40 ++++++------ .../mongodb/database/DatabaseProvider.scala | 20 +++--- .../driver/mongodb/gridfs/Base.scala | 41 +++++------- .../driver/mongodb/gridfs/Crud.scala | 12 ++-- .../driver/mongodb/operation/Base.scala | 62 +++++++------------ .../gridfs/GridfsDatabaseFunctions.scala | 11 ++-- .../driver/mongodb/operation/IndexSpec.scala | 2 +- 8 files changed, 78 insertions(+), 114 deletions(-) diff --git a/build.sbt b/build.sbt index c5cd8009..bc34a543 100644 --- a/build.sbt +++ b/build.sbt @@ -63,7 +63,7 @@ resolvers += "Sonatype OSS Snapshots".at("https://oss.sonatype.org/content/repos libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.5" % Test -libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.0" % Test +libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.3" % Test libraryDependencies += "joda-time" % "joda-time" % "2.12.7" % Test @@ -75,7 +75,7 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-parser" ).map(_ % circeVersion % Test) -libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "4.11.1" +libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.0.0" libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.5" % Provided diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala b/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala index 481eaa10..ac702c2c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala @@ -1,21 +1,22 @@ package dev.mongocamp.driver.mongodb -import dev.mongocamp.driver.mongodb.database.{ ChangeObserver, CollectionStatus, DatabaseProvider } +import dev.mongocamp.driver.mongodb.database.{ChangeObserver, CollectionStatus, DatabaseProvider} import dev.mongocamp.driver.mongodb.gridfs.Metadata import org.bson.types.ObjectId import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.gridfs.{ GridFSBucket, GridFSFile } +import org.mongodb.scala.gridfs.{GridFSBucket, GridFSFile} import org.mongodb.scala.model.CountOptions -import org.mongodb.scala.{ Document, Observable, ReadConcern, ReadPreference, SingleObservable, WriteConcern } +import org.mongodb.scala.{Document, Observable, ReadConcern, ReadPreference, SingleObservable, WriteConcern} abstract class GridFSDAO(provider: DatabaseProvider, bucketName: String) extends Metadata(provider, bucketName) { - var bucket: GridFSBucket = provider.bucket(bucketName) + protected var bucket: GridFSBucket = provider.bucket(bucketName) - val databaseName: String = provider.guessDatabaseName(bucketName) + protected val databaseName: String = provider.guessDatabaseName(bucketName) - def addChangeObserver(observer: ChangeObserver[Document]): ChangeObserver[Document] = + def addChangeObserver(observer: ChangeObserver[Document]): ChangeObserver[Document] = { Files.addChangeObserver(observer: ChangeObserver[Document]) + } def fileCollectionStatus: Observable[CollectionStatus] = Files.collectionStatus @@ -23,32 +24,33 @@ abstract class GridFSDAO(provider: DatabaseProvider, bucketName: String) extends protected def gridfsBucket: GridFSBucket = bucket - def count(filter: Bson = Document(), options: CountOptions = CountOptions()): Observable[Long] = - Files.count(filter, options) + def count(filter: Bson = Document(), options: CountOptions = CountOptions()): Observable[Long] = Files.count(filter, options) - def createMetadataIndex(key: String, sortAscending: Boolean = true): SingleObservable[String] = + def createMetadataIndex(key: String, sortAscending: Boolean = true): SingleObservable[String] = { Files.createIndexForField(createMetadataKey(key), sortAscending) + } - def dropIndexForName(name: String): SingleObservable[Void] = - Files.dropIndexForName(name) + def dropIndexForName(name: String): SingleObservable[Unit] = Files.dropIndexForName(name) - def renameFile(id: ObjectId, newFilename: String): Observable[Void] = - gridfsBucket.rename(id, newFilename) + def renameFile(id: ObjectId, newFilename: String): Observable[Unit] = gridfsBucket.rename(id, newFilename) - def renameFile(file: GridFSFile, newFilename: String): Observable[Void] = - gridfsBucket.rename(file.getId, newFilename) + def renameFile(file: GridFSFile, newFilename: String): Observable[Unit] = gridfsBucket.rename(file.getId, newFilename) - def withReadConcern(readConcern: ReadConcern): Unit = + def withReadConcern(readConcern: ReadConcern): Unit = { bucket = GridFSBucket(provider.database(), bucketName).withReadConcern(readConcern) + } - def withWriteConcern(writeConcern: WriteConcern): Unit = + def withWriteConcern(writeConcern: WriteConcern): Unit = { bucket = GridFSBucket(provider.database(), bucketName).withWriteConcern(writeConcern) + } - def withChunkSizeBytes(chunkSizeBytes: Int): Unit = + def withChunkSizeBytes(chunkSizeBytes: Int): Unit = { bucket = GridFSBucket(provider.database(), bucketName).withChunkSizeBytes(chunkSizeBytes) + } - def withReadPreference(readPreference: ReadPreference): Unit = + def withReadPreference(readPreference: ReadPreference): Unit = { bucket = GridFSBucket(provider.database(), bucketName).withReadPreference(readPreference) + } override def toString: String = "%s:%s@%s, %s".format(databaseName, bucketName, provider.config, super.toString) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala index cb9a506b..72dc5796 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala @@ -2,7 +2,7 @@ package dev.mongocamp.driver.mongodb.database import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.bson.codecs.CustomCodecProvider -import org.bson.codecs.configuration.CodecRegistries.{ fromProviders, fromRegistries } +import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries} import org.bson.codecs.configuration.CodecRegistry import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY import org.mongodb.scala._ @@ -12,8 +12,8 @@ import scala.collection.mutable import scala.reflect.ClassTag class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) extends Serializable { - private val cachedDatabaseMap = new mutable.HashMap[String, MongoDatabase]() - private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() + private val cachedDatabaseMap = new mutable.HashMap[String, MongoDatabase]() + private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() private var cachedClient: Option[MongoClient] = None val DefaultDatabaseName: String = config.database @@ -40,7 +40,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def databaseNames: List[String] = databaseInfos.map(info => info.name) - def dropDatabase(databaseName: String = DefaultDatabaseName): SingleObservable[Void] = database(databaseName).drop() + def dropDatabase(databaseName: String = DefaultDatabaseName): SingleObservable[Unit] = database(databaseName).drop() def compactDatabase(databaseName: String = DefaultDatabaseName, maxWaitPerCollection: Int = DefaultMaxWait): List[CompactResult] = { collectionNames(databaseName).flatMap(collectionName => dao(collectionName).compact.result(maxWaitPerCollection)) @@ -83,15 +83,13 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext database(databaseName).runCommand(document) } - def collectionStatus( - collectionName: String, - databaseName: String = DefaultDatabaseName - ): Observable[CollectionStatus] = + def collectionStatus(collectionName: String, databaseName: String = DefaultDatabaseName): Observable[CollectionStatus] = { runCommand(Map("collStats" -> collectionName), databaseName).map(document => CollectionStatus(document)) + } def collection[A](collectionName: String)(implicit ct: ClassTag[A]): MongoCollection[A] = if (collectionName.contains(DatabaseProvider.CollectionSeparator)) { - val newDatabaseName: String = guessDatabaseName(collectionName) + val newDatabaseName: String = guessDatabaseName(collectionName) val newCollectionName: String = guessName(collectionName) database(newDatabaseName).getCollection[A](newCollectionName) } @@ -120,7 +118,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def bucket(bucketName: String): GridFSBucket = { if (bucketName.contains(DatabaseProvider.CollectionSeparator)) { val newDatabaseName = guessDatabaseName(bucketName) - val newBucketName = guessName(bucketName) + val newBucketName = guessName(bucketName) GridFSBucket(database(newDatabaseName), newBucketName) } else { @@ -144,7 +142,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext } object DatabaseProvider { - val ObjectIdKey = "_id" + val ObjectIdKey = "_id" val CollectionSeparator = ":" private val CustomRegistry = fromProviders(CustomCodecProvider()) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala index 3f3466dd..fe42c81b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala @@ -9,8 +9,8 @@ import dev.mongocamp.driver.mongodb.Converter import dev.mongocamp.driver.mongodb.database.DatabaseProvider import com.typesafe.scalalogging.LazyLogging import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.gridfs.{ GridFSBucket, GridFSDownloadObservable } -import org.mongodb.scala.{ Document, Observable, ReadConcern, ReadPreference, WriteConcern } +import org.mongodb.scala.gridfs.{GridFSBucket, GridFSDownloadObservable} +import org.mongodb.scala.{Document, Observable, ReadConcern, ReadPreference, WriteConcern} abstract class Base extends LazyLogging { @@ -18,12 +18,13 @@ abstract class Base extends LazyLogging { def createMetadataKey(key: String): String = { var metadataKey = key - if (!metadataKey.startsWith("metadata")) + if (!metadataKey.startsWith("metadata")) { metadataKey = "%s.%s".format("metadata", key) + } metadataKey } - def drop(): Observable[Void] = gridfsBucket.drop() + def drop(): Observable[Unit] = gridfsBucket.drop() def bucketName: String = gridfsBucket.bucketName @@ -35,49 +36,35 @@ abstract class Base extends LazyLogging { def readConcern: ReadConcern = gridfsBucket.readConcern - def upload( - fileName: String, - source: Observable[ByteBuffer], - metadata: AnyRef = Document(), - chunkSizeBytes: Int = 1024 * 256 - ): Observable[ObjectId] = { + def upload(fileName: String, source: Observable[ByteBuffer], metadata: AnyRef = Document(), chunkSizeBytes: Int = 1024 * 256): Observable[ObjectId] = { val metadataDocument = { metadata match { case document: Document => document - case _ => Converter.toDocument(metadata) + case _ => Converter.toDocument(metadata) } } - val options: GridFSUploadOptions = new GridFSUploadOptions() - .chunkSizeBytes(chunkSizeBytes) - .metadata(metadataDocument) + val options: GridFSUploadOptions = new GridFSUploadOptions().chunkSizeBytes(chunkSizeBytes).metadata(metadataDocument) gridfsBucket.uploadFromObservable(fileName, source, options) } - def uploadFile( - fileName: String, - file: File, - metadata: AnyRef = Document(), - chunkSizeBytes: Int = 1204 * 256 - ): Observable[ObjectId] = + def uploadFile(fileName: String, file: File, metadata: AnyRef = Document(), chunkSizeBytes: Int = 1204 * 256): Observable[ObjectId] = { upload(fileName, GridFSStreamObservable(file.newInputStream, chunkSizeBytes), metadata, chunkSizeBytes) + } - def download(oid: ObjectId): GridFSDownloadObservable = - gridfsBucket.downloadToObservable(oid) + def download(oid: ObjectId): GridFSDownloadObservable = gridfsBucket.downloadToObservable(oid) - def download(id: ObjectId, file: File): GridFSStreamObserver = - download(id, file.newOutputStream) + def download(id: ObjectId, file: File): GridFSStreamObserver = download(id, file.newOutputStream) def downloadFileResult(id: ObjectId, file: File): Long = streamObserverResult(download(id, file)) def download(oid: ObjectId, outputStream: OutputStream): GridFSStreamObserver = { val observable: GridFSDownloadObservable = gridfsBucket.downloadToObservable(oid) - val observer = GridFSStreamObserver(outputStream) + val observer = GridFSStreamObserver(outputStream) observable.subscribe(observer) observer } - def downloadStreamResult(id: ObjectId, outputStream: OutputStream): Long = - streamObserverResult(download(id, outputStream)) + def downloadStreamResult(id: ObjectId, outputStream: OutputStream): Long = streamObserverResult(download(id, outputStream)) protected def streamObserverResult(observer: GridFSStreamObserver): Long = { while (!observer.completed.get) {} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala index b8dd903e..7deb2b2f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala @@ -3,18 +3,14 @@ package dev.mongocamp.driver.mongodb.gridfs import java.io.InputStream import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.{ Document, Observable } +import org.mongodb.scala.{Document, Observable} abstract class Crud extends Search { - def deleteOne(id: ObjectId): Observable[Void] = gridfsBucket.delete(id) + def deleteOne(id: ObjectId): Observable[Unit] = gridfsBucket.delete(id) - def insertOne( - fileName: String, - stream: InputStream, - metadata: AnyRef = Document(), - chunkSizeBytes: Int = 1204 * 256 - ): Observable[ObjectId] = + def insertOne(fileName: String, stream: InputStream, metadata: AnyRef = Document(), chunkSizeBytes: Int = 1204 * 256): Observable[ObjectId] = { upload(fileName, GridFSStreamObservable(stream, chunkSizeBytes), metadata, chunkSizeBytes) + } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala index 862254a5..9609d90e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala @@ -4,13 +4,12 @@ import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb.database.MongoIndex import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Sorts._ -import org.mongodb.scala.model.{ CountOptions, DropIndexOptions, IndexOptions, Indexes } -import org.mongodb.scala.{ Document, ListIndexesObservable, MongoCollection, Observable, SingleObservable } +import org.mongodb.scala.model.{CountOptions, DropIndexOptions, IndexOptions, Indexes} +import org.mongodb.scala.{Document, ListIndexesObservable, MongoCollection, Observable, SingleObservable} import scala.concurrent.duration.Duration -import scala.reflect.ClassTag -abstract class Base[A]()(implicit ct: ClassTag[A]) extends LazyLogging { +abstract class Base[A] extends LazyLogging { protected def coll: MongoCollection[A] @@ -18,13 +17,9 @@ abstract class Base[A]()(implicit ct: ClassTag[A]) extends LazyLogging { coll.countDocuments(filter, options) } - def drop(): Observable[Void] = coll.drop() + def drop(): Observable[Unit] = coll.drop() - def createIndexForField( - fieldName: String, - sortAscending: Boolean = true, - options: IndexOptions = IndexOptions() - ): SingleObservable[String] = { + def createIndexForField(fieldName: String, sortAscending: Boolean = true, options: IndexOptions = IndexOptions()): SingleObservable[String] = { if (sortAscending) { createIndex(ascending(fieldName), options) } @@ -33,46 +28,35 @@ abstract class Base[A]()(implicit ct: ClassTag[A]) extends LazyLogging { } } - def createIndexForFieldWithName( - fieldName: String, - sortAscending: Boolean = true, - name: String - ): SingleObservable[String] = + def createIndexForFieldWithName(fieldName: String, sortAscending: Boolean = true, name: String): SingleObservable[String] = { createIndexForField(fieldName, sortAscending, MongoIndex.indexOptionsWithName(Some(name))) + } - def createUniqueIndexForField( - fieldName: String, - sortAscending: Boolean = true, - name: Option[String] = None - ): SingleObservable[String] = + def createUniqueIndexForField(fieldName: String, sortAscending: Boolean = true, name: Option[String] = None): SingleObservable[String] = { createIndexForField(fieldName, sortAscending, MongoIndex.indexOptionsWithName(name).unique(true)) + } - def createHashedIndexForField(fieldName: String, options: IndexOptions = IndexOptions()): SingleObservable[String] = + def createHashedIndexForField(fieldName: String, options: IndexOptions = IndexOptions()): SingleObservable[String] = { createIndex(Indexes.hashed(fieldName), options) + } - def createTextIndexForField(fieldName: String, options: IndexOptions = IndexOptions()): SingleObservable[String] = + def createTextIndexForField(fieldName: String, options: IndexOptions = IndexOptions()): SingleObservable[String] = { createIndex(Indexes.text(fieldName), options) + } - def createExpiringIndexForField( - fieldName: String, - duration: Duration, - sortAscending: Boolean = true, - name: Option[String] = None - ): SingleObservable[String] = - createIndexForField( - fieldName, - sortAscending, - MongoIndex.indexOptionsWithName(name).expireAfter(duration._1, duration._2) - ) - - def createIndex(key: Bson, options: IndexOptions = IndexOptions()): SingleObservable[String] = - coll.createIndex(key, options) - - def dropIndexForName(name: String, options: DropIndexOptions = new DropIndexOptions()): SingleObservable[Void] = + def createExpiringIndexForField(fieldName: String, duration: Duration, sortAscending: Boolean = true, name: Option[String] = None): SingleObservable[String] = { + createIndexForField(fieldName, sortAscending, MongoIndex.indexOptionsWithName(name).expireAfter(duration._1, duration._2)) + } + + def createIndex(key: Bson, options: IndexOptions = IndexOptions()): SingleObservable[String] = coll.createIndex(key, options) + + def dropIndexForName(name: String, options: DropIndexOptions = new DropIndexOptions()): SingleObservable[Unit] = { coll.dropIndex(name, options) + } - def dropIndex(keys: Bson, options: DropIndexOptions = new DropIndexOptions()): SingleObservable[Void] = + def dropIndex(keys: Bson, options: DropIndexOptions = new DropIndexOptions()): SingleObservable[Unit] = { coll.dropIndex(keys, options) + } def listIndexes: ListIndexesObservable[Map[String, Any]] = coll.listIndexes[Map[String, Any]]() diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala index d70d1971..9e4e4832 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala @@ -10,16 +10,13 @@ import org.mongodb.scala.result.UpdateResult trait GridfsDatabaseFunctions extends MongoImplicits { - def createIndexOnImages(key: String): String = - ImageFilesDAO.createMetadataIndex(key) + def createIndexOnImages(key: String): String = ImageFilesDAO.createMetadataIndex(key) - def dropIndexOnImages(key: String): Void = - ImageFilesDAO.dropIndexForName(key) + def dropIndexOnImages(key: String): Unit = ImageFilesDAO.dropIndexForName(key) - def deleteImage(id: ObjectId): Void = - ImageFilesDAO.deleteOne(id) + def deleteImage(id: ObjectId): Unit = ImageFilesDAO.deleteOne(id) - def dropImages: Void = ImageFilesDAO.drop() + def dropImages: Unit = ImageFilesDAO.drop() def imagesCount: Long = ImageFilesDAO.count() diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala index 4b2a99f2..5def73cb 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala @@ -23,7 +23,7 @@ class IndexSpec extends PersonSpecification { val index: MongoIndex = PersonDAO.indexForName("name_1").get index.expire must beFalse - val dropIndexResult: Void = PersonDAO.dropIndexForName(createIndexResult).result() + val dropIndexResult: Unit = PersonDAO.dropIndexForName(createIndexResult).result() PersonDAO.indexList must haveSize(1) } From 094f50d4afb2cbf9774c18c4a0fb998000c62221 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 4 Mar 2024 15:07:00 +0100 Subject: [PATCH 02/22] chore: plugins updated 5 dependency updates for project * dev.quadstingray:sbt-json : 0.6.5 -> 0.7.1 * org.scoverage:sbt-scoverage : 2.0.9 -> 2.0.11 * org.xerial.sbt:sbt-sonatype : 3.9.21 -> 3.10.0 * "com.github.gseitz" % "sbt-release" => "com.github.sbt" % "sbt-release" and changes for keep handling like before --- build_release.sbt | 2 ++ project/build.properties | 2 +- project/plugins.sbt | 8 ++++---- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/build_release.sbt b/build_release.sbt index 47649cc1..6ba48a77 100644 --- a/build_release.sbt +++ b/build_release.sbt @@ -6,6 +6,8 @@ import sbtrelease.ReleasePlugin.runtimeVersion import scala.sys.process.* +releaseVersionBump := sbtrelease.Version.Bump.NextStable + val gitAddAllTask = ReleaseStep(action = st => { "git add .".! st diff --git a/project/build.properties b/project/build.properties index d4151995..8cf07b7c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.9.7 \ No newline at end of file +sbt.version=1.9.8 \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt index 858d37d1..88ae3188 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,7 @@ addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.9") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.11") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") @@ -13,13 +13,13 @@ addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4") addSbtPlugin("com.github.fedragon" % "sbt-todolist" % "0.7") // Release -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.10.0") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") +addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") -addSbtPlugin("dev.quadstingray" %% "sbt-json" % "0.6.5") +addSbtPlugin("dev.quadstingray" %% "sbt-json" % "0.7.1") addDependencyTreePlugin From 3df7127c034063e152dd5fa5c388b87699ab8c24 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 4 Mar 2024 15:14:21 +0100 Subject: [PATCH 03/22] style: reformat code with scalafmt --- README.md | 5 +++-- .../dev/mongocamp/driver/mongodb/GridFSDAO.scala | 6 +++--- .../driver/mongodb/database/DatabaseProvider.scala | 12 ++++++------ .../dev/mongocamp/driver/mongodb/gridfs/Base.scala | 8 ++++---- .../dev/mongocamp/driver/mongodb/gridfs/Crud.scala | 2 +- .../mongocamp/driver/mongodb/operation/Base.scala | 11 ++++++++--- .../driver/mongodb/gridfs/GridFSDatabaseSpec.scala | 9 +++++---- .../mongodb/gridfs/GridfsDatabaseFunctions.scala | 2 +- 8 files changed, 31 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 6967a96c..5b30b610 100644 --- a/README.md +++ b/README.md @@ -158,8 +158,9 @@ class RestaurantDemoSpec extends Specification with RestaurantDemoDatabaseFuncti ## Run Tests ```shell - docker run --publish 27017:27017 mongocamp/mongodb:latest; - sbt test; +docker run -d --publish 27017:27017 --name mongodb mongocamp/mongodb:latest; +sbt test; +docker rm -f mongodb; ``` ## Supporters diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala b/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala index ac702c2c..82b4bb2f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/GridFSDAO.scala @@ -1,12 +1,12 @@ package dev.mongocamp.driver.mongodb -import dev.mongocamp.driver.mongodb.database.{ChangeObserver, CollectionStatus, DatabaseProvider} +import dev.mongocamp.driver.mongodb.database.{ ChangeObserver, CollectionStatus, DatabaseProvider } import dev.mongocamp.driver.mongodb.gridfs.Metadata import org.bson.types.ObjectId import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.gridfs.{GridFSBucket, GridFSFile} +import org.mongodb.scala.gridfs.{ GridFSBucket, GridFSFile } import org.mongodb.scala.model.CountOptions -import org.mongodb.scala.{Document, Observable, ReadConcern, ReadPreference, SingleObservable, WriteConcern} +import org.mongodb.scala.{ Document, Observable, ReadConcern, ReadPreference, SingleObservable, WriteConcern } abstract class GridFSDAO(provider: DatabaseProvider, bucketName: String) extends Metadata(provider, bucketName) { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala index 72dc5796..2ba1451b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala @@ -2,7 +2,7 @@ package dev.mongocamp.driver.mongodb.database import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.bson.codecs.CustomCodecProvider -import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries} +import org.bson.codecs.configuration.CodecRegistries.{ fromProviders, fromRegistries } import org.bson.codecs.configuration.CodecRegistry import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY import org.mongodb.scala._ @@ -12,8 +12,8 @@ import scala.collection.mutable import scala.reflect.ClassTag class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) extends Serializable { - private val cachedDatabaseMap = new mutable.HashMap[String, MongoDatabase]() - private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() + private val cachedDatabaseMap = new mutable.HashMap[String, MongoDatabase]() + private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() private var cachedClient: Option[MongoClient] = None val DefaultDatabaseName: String = config.database @@ -89,7 +89,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def collection[A](collectionName: String)(implicit ct: ClassTag[A]): MongoCollection[A] = if (collectionName.contains(DatabaseProvider.CollectionSeparator)) { - val newDatabaseName: String = guessDatabaseName(collectionName) + val newDatabaseName: String = guessDatabaseName(collectionName) val newCollectionName: String = guessName(collectionName) database(newDatabaseName).getCollection[A](newCollectionName) } @@ -118,7 +118,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def bucket(bucketName: String): GridFSBucket = { if (bucketName.contains(DatabaseProvider.CollectionSeparator)) { val newDatabaseName = guessDatabaseName(bucketName) - val newBucketName = guessName(bucketName) + val newBucketName = guessName(bucketName) GridFSBucket(database(newDatabaseName), newBucketName) } else { @@ -142,7 +142,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext } object DatabaseProvider { - val ObjectIdKey = "_id" + val ObjectIdKey = "_id" val CollectionSeparator = ":" private val CustomRegistry = fromProviders(CustomCodecProvider()) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala index fe42c81b..cf361035 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala @@ -9,8 +9,8 @@ import dev.mongocamp.driver.mongodb.Converter import dev.mongocamp.driver.mongodb.database.DatabaseProvider import com.typesafe.scalalogging.LazyLogging import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.gridfs.{GridFSBucket, GridFSDownloadObservable} -import org.mongodb.scala.{Document, Observable, ReadConcern, ReadPreference, WriteConcern} +import org.mongodb.scala.gridfs.{ GridFSBucket, GridFSDownloadObservable } +import org.mongodb.scala.{ Document, Observable, ReadConcern, ReadPreference, WriteConcern } abstract class Base extends LazyLogging { @@ -40,7 +40,7 @@ abstract class Base extends LazyLogging { val metadataDocument = { metadata match { case document: Document => document - case _ => Converter.toDocument(metadata) + case _ => Converter.toDocument(metadata) } } val options: GridFSUploadOptions = new GridFSUploadOptions().chunkSizeBytes(chunkSizeBytes).metadata(metadataDocument) @@ -59,7 +59,7 @@ abstract class Base extends LazyLogging { def download(oid: ObjectId, outputStream: OutputStream): GridFSStreamObserver = { val observable: GridFSDownloadObservable = gridfsBucket.downloadToObservable(oid) - val observer = GridFSStreamObserver(outputStream) + val observer = GridFSStreamObserver(outputStream) observable.subscribe(observer) observer } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala index 7deb2b2f..550c3d14 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala @@ -3,7 +3,7 @@ package dev.mongocamp.driver.mongodb.gridfs import java.io.InputStream import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.{Document, Observable} +import org.mongodb.scala.{ Document, Observable } abstract class Crud extends Search { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala index 9609d90e..1e4f7555 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala @@ -4,8 +4,8 @@ import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb.database.MongoIndex import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Sorts._ -import org.mongodb.scala.model.{CountOptions, DropIndexOptions, IndexOptions, Indexes} -import org.mongodb.scala.{Document, ListIndexesObservable, MongoCollection, Observable, SingleObservable} +import org.mongodb.scala.model.{ CountOptions, DropIndexOptions, IndexOptions, Indexes } +import org.mongodb.scala.{ Document, ListIndexesObservable, MongoCollection, Observable, SingleObservable } import scala.concurrent.duration.Duration @@ -44,7 +44,12 @@ abstract class Base[A] extends LazyLogging { createIndex(Indexes.text(fieldName), options) } - def createExpiringIndexForField(fieldName: String, duration: Duration, sortAscending: Boolean = true, name: Option[String] = None): SingleObservable[String] = { + def createExpiringIndexForField( + fieldName: String, + duration: Duration, + sortAscending: Boolean = true, + name: Option[String] = None + ): SingleObservable[String] = { createIndexForField(fieldName, sortAscending, MongoIndex.indexOptionsWithName(name).expireAfter(duration._1, duration._2)) } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala index 01854024..89439f48 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala @@ -25,8 +25,8 @@ class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with "insert file and in" in { val fileName = "scala-logo.png" - val filePath = ImageDAOSourcePath + fileName - val uploadBytes = File(filePath).bytes.toList + val filePath = ImageDAOSourcePath + fileName + val uploadBytes = File(filePath).bytes.toList val oid: ObjectId = insertImage(filePath, ImageMetadata("template1", group = "templates")) val file = findImage(oid) @@ -69,7 +69,7 @@ class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with } "find stats in file in" in { - val fileStats = ImageFilesDAO.fileCollectionStatus.result() + val fileStats = ImageFilesDAO.fileCollectionStatus.result() val chunkStats = ImageFilesDAO.chunkCollectionStats.result() fileStats.count must be greaterThan 0 @@ -85,7 +85,8 @@ class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with imagesCount must be equalTo 1 val file = File(ImageDAOTargetPath) - if (!file.exists) + if (!file.exists) { file.createDirectory() + } } } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala index 9e4e4832..fbde0e06 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala @@ -16,7 +16,7 @@ trait GridfsDatabaseFunctions extends MongoImplicits { def deleteImage(id: ObjectId): Unit = ImageFilesDAO.deleteOne(id) - def dropImages: Unit = ImageFilesDAO.drop() + def dropImages(): Unit = ImageFilesDAO.drop() def imagesCount: Long = ImageFilesDAO.count() From 9e73ce18a4c4fe5b4354da51962365db303f63cf Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 4 Mar 2024 16:20:19 +0100 Subject: [PATCH 04/22] feat: support lucene query converter support search with long value on string field --- .../mongodb/lucene/LuceneQueryConverter.scala | 166 ++++++++++++------ .../lucene/MongoCampLuceneAnalyzer.scala | 3 +- src/test/resources/json/people.json | 2 +- .../driver/mongodb/dao/PersonDAOSpec.scala | 4 +- .../mongodb/gridfs/GridFSDatabaseSpec.scala | 8 +- .../mongodb/lucene/LuceneSearchSpec.scala | 10 ++ .../driver/mongodb/operation/IndexSpec.scala | 49 +++--- 7 files changed, 150 insertions(+), 92 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala index 38b81380..2d4227ac 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala @@ -16,8 +16,8 @@ import scala.jdk.CollectionConverters._ object LuceneQueryConverter extends LazyLogging { - def toDocument(query: Query): Bson = { - getMongoDbSearchMap(query, false) + def toDocument(query: Query, searchWithValueAndString: Boolean = false): Bson = { + getMongoDbSearchMap(query, negated = false, searchWithValueAndString) } def parse(queryString: String, defaultField: String): Query = { @@ -30,59 +30,59 @@ object LuceneQueryConverter extends LazyLogging { query } - private def getMongoDbSearchMap(query: Query, negated: Boolean): Map[String, Any] = { + private def getMongoDbSearchMap(query: Query, negated: Boolean, searchWithValueAndString: Boolean): Map[String, Any] = { val searchMapResponse = mutable.Map[String, Any]() query match { - case booleanQuery: BooleanQuery => - appendBooleanQueryToSearchMap(searchMapResponse, booleanQuery) - case termRangeQuery: TermRangeQuery => - appendTermRangeQueryToSearchMap(negated, searchMapResponse, termRangeQuery) - case termQuery: TermQuery => - appendTermQueryToSearchMap(negated, searchMapResponse, termQuery) - case query: PrefixQuery => - appendPrefixQueryToSearchMap(negated, searchMapResponse, query) - case query: WildcardQuery => - appendWildCardQueryToSearchMap(negated, searchMapResponse, query) - case query: PhraseQuery => - appendPhraseQueryToSearchMap(negated, searchMapResponse, query) + case booleanQuery: BooleanQuery => appendBooleanQueryToSearchMap(searchMapResponse, booleanQuery, searchWithValueAndString) + case termRangeQuery: TermRangeQuery => appendTermRangeQueryToSearchMap(negated, searchMapResponse, termRangeQuery, searchWithValueAndString) + case termQuery: TermQuery => appendTermQueryToSearchMap(negated, searchMapResponse, termQuery, searchWithValueAndString) + case query: PrefixQuery => appendPrefixQueryToSearchMap(negated, searchMapResponse, query) + case query: WildcardQuery => appendWildCardQueryToSearchMap(negated, searchMapResponse, query) + case query: PhraseQuery => appendPhraseQueryToSearchMap(negated, searchMapResponse, query) case a: Any => - logger.error(s"Unexpected QueryType <${a.getClass.getSimpleName}>") + val simpleNameOption = Option(a.getClass.getSimpleName).filterNot(s => s.trim.equalsIgnoreCase("")) + if (simpleNameOption.isDefined) { + logger.error(s"Unexpected QueryType <${a.getClass.getSimpleName}>") + } } searchMapResponse.toMap - } - private def appendBooleanQueryToSearchMap(searchMapResponse: mutable.Map[String, Any], booleanQuery: BooleanQuery): Unit = { + + private def appendBooleanQueryToSearchMap( + searchMapResponse: mutable.Map[String, Any], + booleanQuery: BooleanQuery, + searchWithValueAndString: Boolean + ): Unit = { val subQueries = booleanQuery.clauses().asScala val listOfAnd = ArrayBuffer[Map[String, Any]]() val listOfOr = ArrayBuffer[Map[String, Any]]() var nextTypeAnd = true - subQueries - .foreach(c => { - val queryMap = getMongoDbSearchMap(c.getQuery, c.isProhibited) - var thisTypeAnd = true + subQueries.foreach(c => { + val queryMap = getMongoDbSearchMap(c.getQuery, c.isProhibited, searchWithValueAndString) + var thisTypeAnd = true - if (c.getOccur == Occur.MUST) { - thisTypeAnd = true - } - else if (c.getOccur == Occur.SHOULD) { - thisTypeAnd = false - } - else if (c.getOccur == Occur.MUST_NOT) { - // searchMapResponse ++= queryMap - } - else { - logger.error(s"Unexpected Occur <${c.getOccur.name()}>") - throw new NotSupportedException(s"${c.getOccur.name()} currently not supported") - } + if (c.getOccur == Occur.MUST) { + thisTypeAnd = true + } + else if (c.getOccur == Occur.SHOULD) { + thisTypeAnd = false + } + else if (c.getOccur == Occur.MUST_NOT) { + // searchMapResponse ++= queryMap + } + else { + logger.error(s"Unexpected Occur <${c.getOccur.name()}>") + throw new NotSupportedException(s"${c.getOccur.name()} currently not supported") + } - if (nextTypeAnd && thisTypeAnd) { - listOfAnd += queryMap - } - else { - listOfOr += queryMap - } - nextTypeAnd = thisTypeAnd - }) + if (nextTypeAnd && thisTypeAnd) { + listOfAnd += queryMap + } + else { + listOfOr += queryMap + } + nextTypeAnd = thisTypeAnd + }) if (listOfAnd.nonEmpty) { searchMapResponse.put("$and", listOfAnd.toList) @@ -91,25 +91,77 @@ object LuceneQueryConverter extends LazyLogging { searchMapResponse.put("$or", listOfOr.toList) } } - private def appendTermRangeQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], termRangeQuery: TermRangeQuery): Unit = { - val lowerBound = checkAndConvertValue(new String(termRangeQuery.getLowerTerm.bytes)) - val upperBound = checkAndConvertValue(new String(termRangeQuery.getUpperTerm.bytes)) - val inRangeSearch = Map("$lte" -> upperBound, "$gte" -> lowerBound) + + private def appendTermRangeQueryToSearchMap( + negated: Boolean, + searchMapResponse: mutable.Map[String, Any], + termRangeQuery: TermRangeQuery, + searchWithValueAndString: Boolean + ): Unit = { + val lowerBoundString = new String(termRangeQuery.getLowerTerm.bytes) + val lowerBound = checkAndConvertValue(lowerBoundString) + val upperBoundString = new String(termRangeQuery.getUpperTerm.bytes) + val upperBound = checkAndConvertValue(upperBoundString) + + val searchWithStringValue = searchWithValueAndString && (lowerBoundString != lowerBound || upperBoundString != upperBound) + + val inRangeSearch = Map("$lte" -> upperBound, "$gte" -> lowerBound) + val inRangeStringSearch = Map("$lte" -> upperBoundString, "$gte" -> lowerBoundString) if (negated) { - searchMapResponse.put(termRangeQuery.getField, Map("$not" -> inRangeSearch)) + if (searchWithStringValue) { + searchMapResponse.put( + "$and", + List(Map(termRangeQuery.getField -> Map("$not" -> inRangeSearch)), Map(termRangeQuery.getField -> Map("$not" -> inRangeStringSearch))) + ) + } + else { + searchMapResponse.put(termRangeQuery.getField, Map("$not" -> inRangeSearch)) + } } else { - searchMapResponse.put(termRangeQuery.getField, inRangeSearch) + if (searchWithStringValue) { + searchMapResponse.put( + "$or", + List(Map(termRangeQuery.getField -> inRangeSearch), Map(termRangeQuery.getField -> inRangeStringSearch)) + ) + } + else { + searchMapResponse.put(termRangeQuery.getField, inRangeSearch) + } } } - private def appendTermQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], termQuery: TermQuery): Unit = { + + private def appendTermQueryToSearchMap( + negated: Boolean, + searchMapResponse: mutable.Map[String, Any], + termQuery: TermQuery, + searchWithValueAndString: Boolean + ): Unit = { + val convertedValue = checkAndConvertValue(termQuery.getTerm.text()) if (negated) { - searchMapResponse.put(termQuery.getTerm.field(), Map("$ne" -> checkAndConvertValue(termQuery.getTerm.text()))) + if (!searchWithValueAndString || convertedValue == termQuery.getTerm.text()) { + searchMapResponse.put(termQuery.getTerm.field(), Map("$ne" -> convertedValue)) + } + else { + searchMapResponse.put( + "$and", + List(Map(termQuery.getTerm.field() -> Map("$ne" -> convertedValue)), Map(termQuery.getTerm.field() -> Map("$ne" -> termQuery.getTerm.text()))) + ) + } } else { - searchMapResponse.put(termQuery.getTerm.field(), Map("$eq" -> checkAndConvertValue(termQuery.getTerm.text()))) + if (!searchWithValueAndString || convertedValue == termQuery.getTerm.text()) { + searchMapResponse.put(termQuery.getTerm.field(), Map("$eq" -> convertedValue)) + } + else { + searchMapResponse.put( + "$or", + List(Map(termQuery.getTerm.field() -> Map("$eq" -> convertedValue)), Map(termQuery.getTerm.field() -> Map("$eq" -> termQuery.getTerm.text()))) + ) + } } } + private def appendPrefixQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], query: PrefixQuery): Unit = { val searchValue = s"${checkAndConvertValue(query.getPrefix.text())}(.*?)" val listOfSearches: List[Bson] = List(Map(query.getField -> generateRegexQuery(s"$searchValue", "i"))) @@ -120,6 +172,7 @@ object LuceneQueryConverter extends LazyLogging { searchMapResponse ++= Map("$and" -> listOfSearches) } } + private def appendWildCardQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], query: WildcardQuery): Unit = { val searchValue = checkAndConvertValue(query.getTerm.text().replace("*", "(.*?)")) if (negated) { @@ -129,10 +182,9 @@ object LuceneQueryConverter extends LazyLogging { searchMapResponse.put(query.getField, generateRegexQuery(s"$searchValue", "i")) } } + private def appendPhraseQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], query: PhraseQuery): Unit = { - val listOfSearches = query.getTerms - .map(term => Map(term.field() -> generateRegexQuery(s"(.*?)${checkAndConvertValue(term.text())}(.*?)", "i"))) - .toList + val listOfSearches = query.getTerms.map(term => Map(term.field() -> generateRegexQuery(s"(.*?)${checkAndConvertValue(term.text())}(.*?)", "i"))).toList if (negated) { searchMapResponse.put("$nor", listOfSearches) } @@ -140,9 +192,11 @@ object LuceneQueryConverter extends LazyLogging { searchMapResponse ++= Map("$and" -> listOfSearches) } } + private def generateRegexQuery(pattern: String, options: String): Map[String, String] = { Map("$regex" -> pattern, "$options" -> options) } + private def checkAndConvertValue(s: String): Any = { def checkOrReturn[A <: Any](f: () => A): Option[A] = { @@ -156,7 +210,7 @@ object LuceneQueryConverter extends LazyLogging { } } catch { - case e: Exception => None + case _: Exception => None } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/MongoCampLuceneAnalyzer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/MongoCampLuceneAnalyzer.scala index 15cfd6a4..ca2a1c86 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/MongoCampLuceneAnalyzer.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/MongoCampLuceneAnalyzer.scala @@ -10,12 +10,11 @@ class MongoCampLuceneAnalyzer(stopWords: CharArraySet = CharArraySet.EMPTY_SET, override protected def createComponents(fieldName: String): Analyzer.TokenStreamComponents = { val src = new StandardTokenizer src.setMaxTokenLength(maxTokenLength) - val tok: TokenStream = new StopFilter(src, stopwords) + val tok: TokenStream = new StopFilter(src, stopWords) new Analyzer.TokenStreamComponents( (r: Reader) => { src.setMaxTokenLength(maxTokenLength) src.setReader(r) - }, tok ) diff --git a/src/test/resources/json/people.json b/src/test/resources/json/people.json index 5959829a..5ca35848 100644 --- a/src/test/resources/json/people.json +++ b/src/test/resources/json/people.json @@ -1,4 +1,4 @@ -{ "_id" : { "$oid" : "5e9ef66185c0145fa5d3c447" }, "id" : { "$numberLong" : "0" }, "guid" : "a17be99a-8913-4bb6-8f14-16d4fa1b3559", "isActive" : true, "balance" : 3349.0, "picture" : "http://placehold.it/32x32", "age" : 25, "name" : "Cheryl Hoffman", "gender" : "female", "email" : "cherylhoffman@melbacor.com", "phone" : "+1 (942) 477-2284", "address" : "308 Just Court, Rose, Maine, 4477", "about" : "Adipisicing aliquip deserunt mollit veniam. Quis mollit cupidatat laboris dolor incididunt esse voluptate amet aute. Sit labore magna minim ex aliquip do labore ullamco labore labore.\r\n", "registered" : { "$date" : "2014-02-08T02:10:36.000+0000" }, "tags" : [ "esse", "sunt", "exercitation", "nostrud", "aliqua", "voluptate", "ullamco" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Castaneda Mccullough" }, { "id" : { "$numberLong" : "1" }, "name" : "Black Whitaker" }, { "id" : { "$numberLong" : "2" }, "name" : "Wendy Strong" }, { "id" : { "$numberLong" : "3" }, "name" : "Dixie Reilly" }, { "id" : { "$numberLong" : "4" }, "name" : "Deleon Turner" } ], "greeting" : "Hello, Cheryl Hoffman! You have 9 unread messages.", "favoriteFruit" : "apple" } +{ "_id" : { "$oid" : "5e9ef66185c0145fa5d3c447" }, "id" : { "$numberLong" : "0" }, "guid" : "a17be99a-8913-4bb6-8f14-16d4fa1b3559", "isActive" : true, "balance" : 3349.0, "picture" : "http://placehold.it/32x32", "age" : 25, "name" : "Cheryl Hoffman", "gender" : "female", "email" : "cherylhoffman@melbacor.com", "phone" : "+1 (942) 477-2284", "address" : "308 Just Court, Rose, Maine, 4477", "about" : "Adipisicing aliquip deserunt mollit veniam. Quis mollit cupidatat laboris dolor incididunt esse voluptate amet aute. Sit labore magna minim ex aliquip do labore ullamco labore labore.\r\n", "registered" : { "$date" : "2014-02-08T02:10:36.000+0000" }, "tags" : [ "esse", "sunt", "exercitation", "nostrud", "aliqua", "voluptate", "ullamco" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Castaneda Mccullough" }, { "id" : { "$numberLong" : "1" }, "name" : "Black Whitaker" }, { "id" : { "$numberLong" : "2" }, "name" : "Wendy Strong" }, { "id" : { "$numberLong" : "3" }, "name" : "Dixie Reilly" }, { "id" : { "$numberLong" : "4" }, "name" : "Deleon Turner" } ], "greeting" : "Hello, Cheryl Hoffman! You have 9 unread messages.", "favoriteFruit" : "apple", "stringNumber": "123" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c448" }, "id" : { "$numberLong" : "1" }, "guid" : "19ebe4fe-f860-4cbc-ac0a-664a418e2173", "isActive" : true, "balance" : 2316.0, "picture" : "http://placehold.it/32x32", "age" : 25, "name" : "Bowen Leon", "gender" : "male", "email" : "bowenleon@inrt.com", "phone" : "+1 (904) 457-2017", "address" : "138 Miami Court, Urbana, Kansas, 1034", "about" : "Commodo in mollit laboris incididunt excepteur nulla cillum sunt do occaecat Lorem. Excepteur esse id magna pariatur irure anim officia exercitation veniam anim dolor. Sunt irure est dolore nisi nulla nulla. Nostrud aliquip exercitation ut adipisicing esse ullamco incididunt mollit laborum duis exercitation. Ipsum commodo excepteur nulla sit irure laboris magna ipsum Lorem.\r\n", "registered" : { "$date" : "2014-01-26T16:08:40.000+0000" }, "tags" : [ "ipsum", "qui", "proident", "sunt", "cillum", "veniam", "laboris" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Reyes Velasquez" }, { "id" : { "$numberLong" : "1" }, "name" : "Rosalie Hooper" }, { "id" : { "$numberLong" : "2" }, "name" : "Alyssa David" } ], "greeting" : "Hello, Bowen Leon! You have 9 unread messages.", "favoriteFruit" : "apple" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c449" }, "id" : { "$numberLong" : "2" }, "guid" : "6ee53e07-2e61-48cd-9bc9-b3505a0438f3", "isActive" : false, "balance" : 1527.0, "picture" : "http://placehold.it/32x32", "age" : 40, "name" : "Cecilia Lynn", "gender" : "female", "email" : "cecilialynn@medicroix.com", "phone" : "+1 (875) 525-3138", "address" : "124 Herzl Street, Greenwich, Arkansas, 5309", "about" : "Esse adipisicing ipsum esse consectetur eu ad sunt sit culpa enim velit elit velit deserunt. Aliqua nulla et laboris nulla aute excepteur Lorem. Ut aliquip non excepteur exercitation consectetur anim est ex irure dolore ut. Consequat enim enim dolor excepteur mollit consectetur. Magna sunt reprehenderit est quis.\r\n", "registered" : { "$date" : "2014-02-21T23:13:05.000+0000" }, "tags" : [ "eiusmod", "minim", "magna", "est", "laborum", "nisi", "qui" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Erika Harmon" }, { "id" : { "$numberLong" : "1" }, "name" : "Horn Larsen" }, { "id" : { "$numberLong" : "2" }, "name" : "Gertrude Fuller" }, { "id" : { "$numberLong" : "3" }, "name" : "Spencer Hutchinson" }, { "id" : { "$numberLong" : "4" }, "name" : "Beryl Buckley" } ], "greeting" : "Hello, Cecilia Lynn! You have 7 unread messages.", "favoriteFruit" : "strawberry" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44a" }, "id" : { "$numberLong" : "3" }, "guid" : "a01c8bb6-95ac-4235-b6b3-475734f0dd92", "isActive" : false, "balance" : 2682.0, "picture" : "http://placehold.it/32x32", "age" : 24, "name" : "Sylvia Ortega", "gender" : "female", "email" : "sylviaortega@viagrand.com", "phone" : "+1 (983) 470-3157", "address" : "617 Vernon Avenue, Advance, Connecticut, 7787", "about" : "Tempor aliquip dolor excepteur proident ex magna commodo laboris. Ullamco ex esse excepteur nostrud. Duis ex anim pariatur dolore ut irure. Consequat non Lorem laborum esse anim magna consequat voluptate dolor elit. Mollit sint consequat ipsum minim id anim aute reprehenderit eu velit voluptate commodo.\r\n", "registered" : { "$date" : "2014-01-13T07:33:15.000+0000" }, "tags" : [ "ut", "culpa", "reprehenderit", "ad", "amet", "officia", "nostrud" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Ferrell Rhodes" }, { "id" : { "$numberLong" : "1" }, "name" : "Ana Guy" }, { "id" : { "$numberLong" : "2" }, "name" : "Rosanne Griffin" }, { "id" : { "$numberLong" : "3" }, "name" : "Morrow Adams" }, { "id" : { "$numberLong" : "4" }, "name" : "Keri White" }, { "id" : { "$numberLong" : "5" }, "name" : "Tracey Sykes" } ], "greeting" : "Hello, Sylvia Ortega! You have 9 unread messages.", "favoriteFruit" : "apple" } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala index 396bf7a7..3d5cccd7 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala @@ -19,8 +19,8 @@ class PersonDAOSpec extends PersonSpecification with MongoImplicits { } "support columnNames" in { - val columnNames = PersonDAO.columnNames(100) - columnNames.size mustEqual 18 + val columnNames = PersonDAO.columnNames(200) + columnNames.size mustEqual 19 } "support results" in { diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala index 89439f48..ec47d97f 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala @@ -2,14 +2,12 @@ package dev.mongocamp.driver.mongodb.gridfs import better.files.File import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.test.TestDatabase._ import dev.mongocamp.driver.mongodb.model.ImageMetadata +import dev.mongocamp.driver.mongodb.test.TestDatabase._ import org.bson.types.ObjectId import org.specs2.mutable.Specification import org.specs2.specification.BeforeAll -import scala.io.Source - class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with BeforeAll { "GridFSDatabase" should { @@ -58,7 +56,7 @@ class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with files.head.getMetadata.get("name").toString must be equalTo "logo2" // update complete metadata for one file - updateMetadata(files.head, ImageMetadata("logo22", group = "logos")) + updateMetadata(files.head, ImageMetadata("logo22")) // update metadata entry for all files updateMetadataElements(Map(), Map("group" -> "logos3", "newKey" -> "newEntryValue")) @@ -80,7 +78,7 @@ class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with } override def beforeAll(): Unit = { - dropImages + dropImages() insertImage(ImageDAOSourcePath + "scala-logo.jpg", ImageMetadata("logo2", indexSet = Set(5, 6, 7))) imagesCount must be equalTo 1 diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala index de0db498..fa3f4090 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala @@ -9,6 +9,16 @@ class LuceneSearchSpec extends PersonSpecification { "LuceneSearch" should { + "search with with number in string" in { + val luceneQuery = LuceneQueryConverter.parse("stringNumber: 123", "id") + val search2 = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + search2 must haveSize(0) + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery, searchWithValueAndString = true), sortByBalance).resultList() + search must haveSize(1) + search.head.age mustEqual 25 + search.head.name mustEqual "Cheryl Hoffman" + } + "search with extended query" in { val luceneQuery = LuceneQueryConverter.parse("(favoriteFruit:\"apple\" AND age:\"25\") OR name:*Cecile* AND -active:false AND 123", "id") // #region lucene-parser-with-explicit diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala index 5def73cb..60d0bd5d 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala @@ -15,17 +15,17 @@ class IndexSpec extends PersonSpecification { "create / drop indexes for key" in { - var createIndexResult: String = PersonDAO.createIndexForField("name").result() + val createIndexResult: String = PersonDAO.createIndexForField("name").result() createIndexResult mustEqual "name_1" - PersonDAO.indexList must haveSize(2) + PersonDAO.indexList() must haveSize(2) val index: MongoIndex = PersonDAO.indexForName("name_1").get index.expire must beFalse - val dropIndexResult: Unit = PersonDAO.dropIndexForName(createIndexResult).result() + PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "evaluate has index" in { @@ -37,77 +37,74 @@ class IndexSpec extends PersonSpecification { "create descending index for key" in { - var createIndexResult: String = - PersonDAO.createIndexForFieldWithName("name", sortAscending = false, "myIndex").result() + val createIndexResult: String = PersonDAO.createIndexForFieldWithName("name", sortAscending = false, "myIndex").result() createIndexResult mustEqual "myIndex" - PersonDAO.indexList must haveSize(2) - val index: MongoIndex = PersonDAO.indexForName("myIndex").get + PersonDAO.indexList() must haveSize(2) + PersonDAO.indexForName("myIndex").get PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "create unique index for key" in { - var createIndexResult: String = - PersonDAO.createUniqueIndexForField("id", sortAscending = false, Some("myUniqueIndex")).result() + val createIndexResult: String = PersonDAO.createUniqueIndexForField("id", sortAscending = false, Some("myUniqueIndex")).result() createIndexResult mustEqual "myUniqueIndex" - PersonDAO.indexList must haveSize(2) - val index: MongoIndex = PersonDAO.indexForName("myUniqueIndex").get + PersonDAO.indexList() must haveSize(2) + PersonDAO.indexForName("myUniqueIndex").get PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "create text index for key" in { - var createIndexResult: String = PersonDAO.createTextIndexForField("email").result() + val createIndexResult: String = PersonDAO.createTextIndexForField("email").result() createIndexResult mustEqual "email_text" - PersonDAO.indexList must haveSize(2) - val index: MongoIndex = PersonDAO.indexForName("email_text").get + PersonDAO.indexList() must haveSize(2) + PersonDAO.indexForName("email_text").get PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "create hashed index for key" in { - var createIndexResult: String = PersonDAO.createHashedIndexForField("email").result() + val createIndexResult: String = PersonDAO.createHashedIndexForField("email").result() createIndexResult mustEqual "email_hashed" - PersonDAO.indexList must haveSize(2) - val index: MongoIndex = PersonDAO.indexForName("email_hashed").get + PersonDAO.indexList() must haveSize(2) + PersonDAO.indexForName("email_hashed").get PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "create expiring index for key" in { - var createIndexResult: String = - PersonDAO.createExpiringIndexForField("email", Duration(1, TimeUnit.SECONDS)).result() + val createIndexResult: String = PersonDAO.createExpiringIndexForField("email", Duration(1, TimeUnit.SECONDS)).result() createIndexResult mustEqual "email_1" - PersonDAO.indexList must haveSize(2) + PersonDAO.indexList() must haveSize(2) val index: MongoIndex = PersonDAO.indexForName("email_1").get index.expire must beTrue PersonDAO.dropIndexForName(createIndexResult).result() - PersonDAO.indexList must haveSize(1) + PersonDAO.indexList() must haveSize(1) } "return an index list" in { From c6fe36b5c41e356d9540b9e85d30ca61f50dc9aa Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Wed, 6 Mar 2024 08:01:47 +0100 Subject: [PATCH 05/22] feat(jdbc): start implementing jdbc driver for mongodb --- build.sbt | 2 + scalastyle-config.xml | 117 ------ .../META-INF/services/java.sql.Driver | 1 + .../driver/mongodb/database/MongoConfig.scala | 111 +++--- .../mongodb/jdbc/MongoDatabaseMetaData.scala | 358 ++++++++++++++++++ .../mongodb/jdbc/MongoJdbcConnection.scala | 174 +++++++++ .../driver/mongodb/jdbc/MongoJdbcDriver.scala | 60 +++ .../mongodb/jdbc/MongoPreparedStatement.scala | 306 +++++++++++++++ .../jdbc/SQLAlreadyClosedException.scala | 5 + 9 files changed, 970 insertions(+), 164 deletions(-) delete mode 100644 scalastyle-config.xml create mode 100644 src/main/resources/META-INF/services/java.sql.Driver create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala diff --git a/build.sbt b/build.sbt index bc34a543..3701e4a7 100644 --- a/build.sbt +++ b/build.sbt @@ -97,6 +97,8 @@ libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5" libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.11.0" +libraryDependencies += ("com.vdurmont" % "semver4j" % "3.1.0") + buildInfoPackage := "dev.mongocamp.driver.mongodb" buildInfoOptions += BuildInfoOption.BuildTime diff --git a/scalastyle-config.xml b/scalastyle-config.xml deleted file mode 100644 index 11b327cb..00000000 --- a/scalastyle-config.xml +++ /dev/null @@ -1,117 +0,0 @@ - - Scalastyle standard configuration - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/main/resources/META-INF/services/java.sql.Driver b/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 00000000..091eb819 --- /dev/null +++ b/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1 @@ +dev.mongocamp.driver.mongodb.jdbc.MongoJdbcDriver \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala index f628cb0f..c0c90657 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala @@ -3,29 +3,29 @@ package dev.mongocamp.driver.mongodb.database import java.util.concurrent.TimeUnit import com.mongodb.MongoCompressor import com.mongodb.MongoCredential.createCredential -import com.mongodb.event.{ CommandListener, ConnectionPoolListener } +import com.mongodb.event.{CommandListener, ConnectionPoolListener} import dev.mongocamp.driver.mongodb.database.MongoConfig._ -import com.typesafe.config.{ Config, ConfigFactory } +import com.typesafe.config.{Config, ConfigFactory} import org.mongodb.scala.connection._ -import org.mongodb.scala.{ MongoClientSettings, MongoCredential, ServerAddress } +import org.mongodb.scala.{MongoClientSettings, MongoCredential, ServerAddress} import scala.jdk.CollectionConverters._ import scala.collection.mutable.ArrayBuffer case class MongoConfig( - database: String, - host: String = DefaultHost, - port: Int = DefaultPort, - applicationName: String = DefaultApplicationName, - userName: Option[String] = None, - password: Option[String] = None, - authDatabase: String = DefaultAuthenticationDatabaseName, - poolOptions: MongoPoolOptions = MongoPoolOptions(), - compressors: List[String] = List(), - connectionPoolListener: List[ConnectionPoolListener] = List(), - commandListener: List[CommandListener] = List(), - customClientSettings: Option[MongoClientSettings] = None -) { + database: String, + host: String = DefaultHost, + port: Int = DefaultPort, + applicationName: String = DefaultApplicationName, + userName: Option[String] = None, + password: Option[String] = None, + authDatabase: String = DefaultAuthenticationDatabaseName, + poolOptions: MongoPoolOptions = MongoPoolOptions(), + compressors: List[String] = List(), + connectionPoolListener: List[ConnectionPoolListener] = List(), + commandListener: List[CommandListener] = List(), + customClientSettings: Option[MongoClientSettings] = None + ) { val clientSettings: MongoClientSettings = { if (customClientSettings.isDefined) { @@ -81,71 +81,88 @@ case class MongoConfig( trait ConfigHelper { val conf: Config = ConfigFactory.load() - def stringConfig(configPath: String, key: String, default: String = ""): Option[String] = + def stringConfig(configPath: String, key: String, default: String = ""): Option[String] = { if (conf.hasPath("%s.%s".format(configPath, key))) { val str = conf.getString("%s.%s".format(configPath, key)) - if (str.nonEmpty) + if (str.nonEmpty) { Some(str) - else + } + else { None + } } - else if (default.nonEmpty) + else if (default.nonEmpty) { Some(default) - else + } + else { None + } + } - def intConfig(configPath: String, key: String, default: Int = 0): Int = - if (conf.hasPath("%s.%s".format(configPath, key))) + def intConfig(configPath: String, key: String, default: Int = 0): Int = { + if (conf.hasPath("%s.%s".format(configPath, key))) { conf.getInt("%s.%s".format(configPath, key)) - else + } + else { default + } + } - def booleanConfig(configPath: String, key: String, default: Boolean = false): Boolean = - if (conf.hasPath("%s.%s".format(configPath, key))) + def booleanConfig(configPath: String, key: String, default: Boolean = false): Boolean = { + if (conf.hasPath("%s.%s".format(configPath, key))) { conf.getBoolean("%s.%s".format(configPath, key)) - else + } + else { default + } + } } object MongoConfig extends ConfigHelper { - val DefaultHost = "127.0.0.1" - val DefaultPort = 27017 + val DefaultHost = "127.0.0.1" + val DefaultPort = 27017 val DefaultAuthenticationDatabaseName = "admin" - val DefaultApplicationName = "mongocampdb-app" + val DefaultApplicationName = "mongocampdb-app" - val DefaultPoolMaxConnectionIdleTime = 60 - val DefaultPoolMaxSize = 50 - val DefaultPoolMinSize = 0 - val DefaultPoolMaxWaitQueueSize = 500 + val DefaultPoolMaxConnectionIdleTime = 60 + val DefaultPoolMaxSize = 50 + val DefaultPoolMinSize = 0 + val DefaultPoolMaxWaitQueueSize = 500 val DefaultPoolMaintenanceInitialDelay = 0 val ComressionSnappy = "snappy" - val ComressionZlib = "zlib" - val ComressionZstd = "zstd" + val ComressionZlib = "zlib" + val ComressionZstd = "zstd" val DefaultConfigPathPrefix = "mongodb" def fromPath(configPath: String = DefaultConfigPathPrefix): MongoConfig = { - def poolOptionsConfig(key: String, default: Int): Int = - if (conf.hasPath("%s.pool.%s".format(configPath, key))) + def poolOptionsConfig(key: String, default: Int): Int = { + if (conf.hasPath("%s.pool.%s".format(configPath, key))) { conf.getInt("%s.pool.%s".format(configPath, key)) - else + } + else { default + } + } val port: Int = intConfig(configPath, "port", DefaultPort) - val compressors: List[String] = - if (conf.hasPath("%s.compressors".format(configPath))) + val compressors: List[String] = { + if (conf.hasPath("%s.compressors".format(configPath))) { conf.getStringList("%s.compressors".format(configPath)).asScala.toList - else + } + else { List() + } + } - val host = stringConfig(configPath, "host", DefaultHost).get - val database = stringConfig(configPath, "database").get - val userName = stringConfig(configPath, "userName") - val password = stringConfig(configPath, "password") - val authDatabase = stringConfig(configPath, "authDatabase", DefaultAuthenticationDatabaseName).get + val host = stringConfig(configPath, "host", DefaultHost).get + val database = stringConfig(configPath, "database").get + val userName = stringConfig(configPath, "userName") + val password = stringConfig(configPath, "password") + val authDatabase = stringConfig(configPath, "authDatabase", DefaultAuthenticationDatabaseName).get val applicationName = stringConfig(configPath, "applicationName", DefaultApplicationName).get val poolOptions = MongoPoolOptions( diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala new file mode 100644 index 00000000..a1eed1b0 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala @@ -0,0 +1,358 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.sql.{Connection, DatabaseMetaData, ResultSet, RowIdLifetime} + +class MongoDatabaseMetaData extends DatabaseMetaData{ + + override def allProceduresAreCallable(): Boolean = ??? + + override def allTablesAreSelectable(): Boolean = ??? + + override def getURL: String = ??? + + override def getUserName: String = ??? + + override def isReadOnly: Boolean = ??? + + override def nullsAreSortedHigh(): Boolean = ??? + + override def nullsAreSortedLow(): Boolean = ??? + + override def nullsAreSortedAtStart(): Boolean = ??? + + override def nullsAreSortedAtEnd(): Boolean = ??? + + override def getDatabaseProductName: String = ??? + + override def getDatabaseProductVersion: String = ??? + + override def getDriverName: String = ??? + + override def getDriverVersion: String = ??? + + override def getDriverMajorVersion: Int = ??? + + override def getDriverMinorVersion: Int = ??? + + override def usesLocalFiles(): Boolean = ??? + + override def usesLocalFilePerTable(): Boolean = ??? + + override def supportsMixedCaseIdentifiers(): Boolean = ??? + + override def storesUpperCaseIdentifiers(): Boolean = ??? + + override def storesLowerCaseIdentifiers(): Boolean = ??? + + override def storesMixedCaseIdentifiers(): Boolean = ??? + + override def supportsMixedCaseQuotedIdentifiers(): Boolean = ??? + + override def storesUpperCaseQuotedIdentifiers(): Boolean = ??? + + override def storesLowerCaseQuotedIdentifiers(): Boolean = ??? + + override def storesMixedCaseQuotedIdentifiers(): Boolean = ??? + + override def getIdentifierQuoteString: String = ??? + + override def getSQLKeywords: String = ??? + + override def getNumericFunctions: String = ??? + + override def getStringFunctions: String = ??? + + override def getSystemFunctions: String = ??? + + override def getTimeDateFunctions: String = ??? + + override def getSearchStringEscape: String = ??? + + override def getExtraNameCharacters: String = ??? + + override def supportsAlterTableWithAddColumn(): Boolean = ??? + + override def supportsAlterTableWithDropColumn(): Boolean = ??? + + override def supportsColumnAliasing(): Boolean = ??? + + override def nullPlusNonNullIsNull(): Boolean = ??? + + override def supportsConvert(): Boolean = ??? + + override def supportsConvert(fromType: Int, toType: Int): Boolean = ??? + + override def supportsTableCorrelationNames(): Boolean = ??? + + override def supportsDifferentTableCorrelationNames(): Boolean = ??? + + override def supportsExpressionsInOrderBy(): Boolean = ??? + + override def supportsOrderByUnrelated(): Boolean = ??? + + override def supportsGroupBy(): Boolean = ??? + + override def supportsGroupByUnrelated(): Boolean = ??? + + override def supportsGroupByBeyondSelect(): Boolean = ??? + + override def supportsLikeEscapeClause(): Boolean = ??? + + override def supportsMultipleResultSets(): Boolean = ??? + + override def supportsMultipleTransactions(): Boolean = ??? + + override def supportsNonNullableColumns(): Boolean = ??? + + override def supportsMinimumSQLGrammar(): Boolean = ??? + + override def supportsCoreSQLGrammar(): Boolean = ??? + + override def supportsExtendedSQLGrammar(): Boolean = ??? + + override def supportsANSI92EntryLevelSQL(): Boolean = ??? + + override def supportsANSI92IntermediateSQL(): Boolean = ??? + + override def supportsANSI92FullSQL(): Boolean = ??? + + override def supportsIntegrityEnhancementFacility(): Boolean = ??? + + override def supportsOuterJoins(): Boolean = ??? + + override def supportsFullOuterJoins(): Boolean = ??? + + override def supportsLimitedOuterJoins(): Boolean = ??? + + override def getSchemaTerm: String = ??? + + override def getProcedureTerm: String = ??? + + override def getCatalogTerm: String = ??? + + override def isCatalogAtStart: Boolean = ??? + + override def getCatalogSeparator: String = ??? + + override def supportsSchemasInDataManipulation(): Boolean = ??? + + override def supportsSchemasInProcedureCalls(): Boolean = ??? + + override def supportsSchemasInTableDefinitions(): Boolean = ??? + + override def supportsSchemasInIndexDefinitions(): Boolean = ??? + + override def supportsSchemasInPrivilegeDefinitions(): Boolean = ??? + + override def supportsCatalogsInDataManipulation(): Boolean = ??? + + override def supportsCatalogsInProcedureCalls(): Boolean = ??? + + override def supportsCatalogsInTableDefinitions(): Boolean = ??? + + override def supportsCatalogsInIndexDefinitions(): Boolean = ??? + + override def supportsCatalogsInPrivilegeDefinitions(): Boolean = ??? + + override def supportsPositionedDelete(): Boolean = ??? + + override def supportsPositionedUpdate(): Boolean = ??? + + override def supportsSelectForUpdate(): Boolean = ??? + + override def supportsStoredProcedures(): Boolean = ??? + + override def supportsSubqueriesInComparisons(): Boolean = ??? + + override def supportsSubqueriesInExists(): Boolean = ??? + + override def supportsSubqueriesInIns(): Boolean = ??? + + override def supportsSubqueriesInQuantifieds(): Boolean = ??? + + override def supportsCorrelatedSubqueries(): Boolean = ??? + + override def supportsUnion(): Boolean = ??? + + override def supportsUnionAll(): Boolean = ??? + + override def supportsOpenCursorsAcrossCommit(): Boolean = ??? + + override def supportsOpenCursorsAcrossRollback(): Boolean = ??? + + override def supportsOpenStatementsAcrossCommit(): Boolean = ??? + + override def supportsOpenStatementsAcrossRollback(): Boolean = ??? + + override def getMaxBinaryLiteralLength: Int = ??? + + override def getMaxCharLiteralLength: Int = ??? + + override def getMaxColumnNameLength: Int = ??? + + override def getMaxColumnsInGroupBy: Int = ??? + + override def getMaxColumnsInIndex: Int = ??? + + override def getMaxColumnsInOrderBy: Int = ??? + + override def getMaxColumnsInSelect: Int = ??? + + override def getMaxColumnsInTable: Int = ??? + + override def getMaxConnections: Int = ??? + + override def getMaxCursorNameLength: Int = ??? + + override def getMaxIndexLength: Int = ??? + + override def getMaxSchemaNameLength: Int = ??? + + override def getMaxProcedureNameLength: Int = ??? + + override def getMaxCatalogNameLength: Int = ??? + + override def getMaxRowSize: Int = ??? + + override def doesMaxRowSizeIncludeBlobs(): Boolean = ??? + + override def getMaxStatementLength: Int = ??? + + override def getMaxStatements: Int = ??? + + override def getMaxTableNameLength: Int = ??? + + override def getMaxTablesInSelect: Int = ??? + + override def getMaxUserNameLength: Int = ??? + + override def getDefaultTransactionIsolation: Int = ??? + + override def supportsTransactions(): Boolean = ??? + + override def supportsTransactionIsolationLevel(level: Int): Boolean = ??? + + override def supportsDataDefinitionAndDataManipulationTransactions(): Boolean = ??? + + override def supportsDataManipulationTransactionsOnly(): Boolean = ??? + + override def dataDefinitionCausesTransactionCommit(): Boolean = ??? + + override def dataDefinitionIgnoredInTransactions(): Boolean = ??? + + override def getProcedures(catalog: String, schemaPattern: String, procedureNamePattern: String): ResultSet = ??? + + override def getProcedureColumns(catalog: String, schemaPattern: String, procedureNamePattern: String, columnNamePattern: String): ResultSet = ??? + + override def getTables(catalog: String, schemaPattern: String, tableNamePattern: String, types: Array[String]): ResultSet = ??? + + override def getSchemas: ResultSet = ??? + + override def getCatalogs: ResultSet = ??? + + override def getTableTypes: ResultSet = ??? + + override def getColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ??? + + override def getColumnPrivileges(catalog: String, schema: String, table: String, columnNamePattern: String): ResultSet = ??? + + override def getTablePrivileges(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ??? + + override def getBestRowIdentifier(catalog: String, schema: String, table: String, scope: Int, nullable: Boolean): ResultSet = ??? + + override def getVersionColumns(catalog: String, schema: String, table: String): ResultSet = ??? + + override def getPrimaryKeys(catalog: String, schema: String, table: String): ResultSet = ??? + + override def getImportedKeys(catalog: String, schema: String, table: String): ResultSet = ??? + + override def getExportedKeys(catalog: String, schema: String, table: String): ResultSet = ??? + + override def getCrossReference(parentCatalog: String, parentSchema: String, parentTable: String, foreignCatalog: String, foreignSchema: String, foreignTable: String): ResultSet = ??? + + override def getTypeInfo: ResultSet = ??? + + override def getIndexInfo(catalog: String, schema: String, table: String, unique: Boolean, approximate: Boolean): ResultSet = ??? + + override def supportsResultSetType(`type`: Int): Boolean = ??? + + override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = ??? + + override def ownUpdatesAreVisible(`type`: Int): Boolean = ??? + + override def ownDeletesAreVisible(`type`: Int): Boolean = ??? + + override def ownInsertsAreVisible(`type`: Int): Boolean = ??? + + override def othersUpdatesAreVisible(`type`: Int): Boolean = ??? + + override def othersDeletesAreVisible(`type`: Int): Boolean = ??? + + override def othersInsertsAreVisible(`type`: Int): Boolean = ??? + + override def updatesAreDetected(`type`: Int): Boolean = ??? + + override def deletesAreDetected(`type`: Int): Boolean = ??? + + override def insertsAreDetected(`type`: Int): Boolean = ??? + + override def supportsBatchUpdates(): Boolean = ??? + + override def getUDTs(catalog: String, schemaPattern: String, typeNamePattern: String, types: Array[Int]): ResultSet = ??? + + override def getConnection: Connection = ??? + + override def supportsSavepoints(): Boolean = ??? + + override def supportsNamedParameters(): Boolean = ??? + + override def supportsMultipleOpenResults(): Boolean = ??? + + override def supportsGetGeneratedKeys(): Boolean = ??? + + override def getSuperTypes(catalog: String, schemaPattern: String, typeNamePattern: String): ResultSet = ??? + + override def getSuperTables(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ??? + + override def getAttributes(catalog: String, schemaPattern: String, typeNamePattern: String, attributeNamePattern: String): ResultSet = ??? + + override def supportsResultSetHoldability(holdability: Int): Boolean = ??? + + override def getResultSetHoldability: Int = ??? + + override def getDatabaseMajorVersion: Int = ??? + + override def getDatabaseMinorVersion: Int = ??? + + override def getJDBCMajorVersion: Int = ??? + + override def getJDBCMinorVersion: Int = ??? + + override def getSQLStateType: Int = ??? + + override def locatorsUpdateCopy(): Boolean = ??? + + override def supportsStatementPooling(): Boolean = ??? + + override def getRowIdLifetime: RowIdLifetime = ??? + + override def getSchemas(catalog: String, schemaPattern: String): ResultSet = ??? + + override def supportsStoredFunctionsUsingCallSyntax(): Boolean = ??? + + override def autoCommitFailureClosesAllResultSets(): Boolean = ??? + + override def getClientInfoProperties: ResultSet = ??? + + override def getFunctions(catalog: String, schemaPattern: String, functionNamePattern: String): ResultSet = ??? + + override def getFunctionColumns(catalog: String, schemaPattern: String, functionNamePattern: String, columnNamePattern: String): ResultSet = ??? + + override def getPseudoColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ??? + + override def generatedKeyAlwaysReturned(): Boolean = ??? + + override def unwrap[T](iface: Class[T]): T = ??? + + override def isWrapperFor(iface: Class[_]): Boolean = ??? +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala new file mode 100644 index 00000000..04931edb --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala @@ -0,0 +1,174 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import org.mongodb.scala.MongoClient + +import java.{sql, util} +import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLWarning, SQLXML, Savepoint, Statement, Struct} +import java.util.Properties +import java.util.concurrent.Executor + +class MongoJdbcConnection(client: MongoClient) extends Connection { + private var _isClosed = false + private var _isReadOnly = false + + override def createStatement(): Statement = new MongoPreparedStatement(this, null) + + override def prepareStatement(sql: String): PreparedStatement = { + new MongoPreparedStatement(this, sql) + } + + override def prepareCall(sql: String): CallableStatement = { + checkClosed() + null + } + + override def nativeSQL(sql: String): String = ??? + + override def setAutoCommit(autoCommit: Boolean): Unit = { + checkClosed() + } + + override def getAutoCommit: Boolean = { + checkClosed() + true + } + + + override def commit(): Unit = { + checkClosed() + } + + + override def rollback(): Unit = { + checkClosed() + } + + + override def close(): Unit = { + _isClosed = true + client.close() + } + + override def isClosed: Boolean = _isClosed + + override def getMetaData: DatabaseMetaData = ??? + + override def setReadOnly(readOnly: Boolean): Unit = { + checkClosed() + _isReadOnly = readOnly + } + + override def isReadOnly: Boolean = _isReadOnly + + override def setCatalog(catalog: String): Unit = { + checkClosed() + } + + override def getCatalog: String = null + + override def setTransactionIsolation(level: Int): Unit = { + checkClosed() + // Since the only valid value for MongoDB is Connection.TRANSACTION_NONE, and the javadoc for this method + // indicates that this is not a valid value for level here, throw unsupported operation exception. + throw new UnsupportedOperationException("MongoDB provides no support for transactions.") + } + + + override def getTransactionIsolation: Int = { + checkClosed() + Connection.TRANSACTION_NONE + } + + override def getWarnings: SQLWarning = { + checkClosed() + null + } + + override def clearWarnings(): Unit = checkClosed() + + override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = ??? + + override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = ??? + + override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = ??? + + override def getTypeMap: util.Map[String, Class[_]] = ??? + + override def setTypeMap(map: util.Map[String, Class[_]]): Unit = ??? + + override def setHoldability(holdability: Int): Unit = ??? + + override def getHoldability: Int = ??? + + override def setSavepoint(): Savepoint = ??? + + override def setSavepoint(name: String): Savepoint = ??? + + override def rollback(savepoint: Savepoint): Unit = ??? + + override def releaseSavepoint(savepoint: Savepoint): Unit = ??? + + override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = ??? + + override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = ??? + + override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = ??? + + override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = ??? + + override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = ??? + + override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = ??? + + override def createClob(): Clob = ??? + + override def createBlob(): Blob = ??? + + override def createNClob(): NClob = ??? + + override def createSQLXML(): SQLXML = ??? + + override def isValid(timeout: Int): Boolean = ??? + + override def setClientInfo(name: String, value: String): Unit = ??? + + override def setClientInfo(properties: Properties): Unit = ??? + + override def getClientInfo(name: String): String = ??? + + override def getClientInfo: Properties = ??? + + override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = ??? + + override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = ??? + + override def setSchema(schema: String): Unit = ??? + + override def getSchema: String = ??? + + override def abort(executor: Executor): Unit = ??? + + override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = ??? + + override def getNetworkTimeout: Int = ??? + + @throws[SQLAlreadyClosedException] + override def unwrap[T](iface: Class[T]): T = { + checkClosed() + null.asInstanceOf[T] + } + + @throws[SQLAlreadyClosedException] + override def isWrapperFor(iface: Class[_]): Boolean = { + checkClosed() + false + } + + + @throws[SQLAlreadyClosedException] + private def checkClosed(): Unit = { + if (isClosed) { + throw new SQLAlreadyClosedException(this.getClass.getSimpleName) + } + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala new file mode 100644 index 00000000..4150c8d4 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala @@ -0,0 +1,60 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import com.mongodb.MongoCredential.createCredential +import com.vdurmont.semver4j.Semver +import dev.mongocamp.driver.mongodb.BuildInfo +import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY +import org.mongodb.scala.{ConnectionString, MongoClient, MongoClientSettings, MongoCredential} + +import java.sql.{Connection, DriverPropertyInfo} +import java.util.Properties +import java.util.logging.Logger + +class MongoJdbcDriver extends java.sql.Driver { + + private lazy val semVer = new Semver(BuildInfo.version) + + + /** + * Connect to the database using a URL like : + * jdbc:mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] + * The URL excepting the jdbc: prefix is passed as it is to the MongoDb native Java driver. + */ + override def connect(url: String, info: Properties): Connection = { + if (url == null || !acceptsURL(url)) { + return null + } + + val connectionUrl = url.replaceFirst("^jdbc:", "") + val username = info.getProperty("user") + val password = info.getProperty("password") + + val builder = MongoClientSettings + .builder() + .applyConnectionString(new ConnectionString(connectionUrl)) + .codecRegistry(DEFAULT_CODEC_REGISTRY) + + if (!username.equalsIgnoreCase("") && !password.equalsIgnoreCase("")) { + val credential: MongoCredential = createCredential(username, "admin", password.toCharArray) + builder.credential(credential).build() + } + + val client: MongoClient = MongoClient(builder.build()) + new MongoJdbcConnection(client) + } + + override def acceptsURL(url: String): Boolean = { + val internalUrl = url.replaceFirst("^jdbc:", "") + internalUrl.startsWith("mongodb://") || internalUrl.startsWith("mongodb+srv://") + } + + override def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = ??? + + override def getMajorVersion: Int = semVer.getMajor + + override def getMinorVersion: Int = semVer.getMinor + + override def jdbcCompliant(): Boolean = true + + override def getParentLogger: Logger = null +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala new file mode 100644 index 00000000..b0f182f5 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala @@ -0,0 +1,306 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.io.{InputStream, Reader} +import java.net.URL +import java.sql +import java.sql.{Blob, Clob, Connection, Date, NClob, ParameterMetaData, PreparedStatement, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLFeatureNotSupportedException, SQLWarning, SQLXML, Time, Timestamp} +import java.util.Calendar + +class MongoPreparedStatement(connection: MongoJdbcConnection, private var query: String) extends PreparedStatement { + private var lastResultSet: ResultSet = null + private var _isClosed = false + private var maxRows = -1 + private var fetchSize = -1 + + override def executeQuery(sql: String): ResultSet = { + checkClosed() + query = sql + if (lastResultSet != null && !lastResultSet.isClosed) { + lastResultSet.close(); + } + if (query == null) { + throw new SQLException("Null statement."); + } + // todo: execute and generate result set + // lastResultSet = connection.getScriptEngine().execute(query, fetchSize); + lastResultSet + } + + override def executeUpdate(sql: String): Int = ??? + + override def executeQuery(): ResultSet = { + execute(query) + lastResultSet + } + + override def execute(sql: String): Boolean = { + executeQuery(sql) + lastResultSet != null + } + + override def executeUpdate(): Int = executeUpdate(query) + + override def setNull(parameterIndex: Int, sqlType: Int): Unit = {} + + override def setBoolean(parameterIndex: Int, x: Boolean): Unit = {} + + override def setByte(parameterIndex: Int, x: Byte): Unit = {} + + override def setShort(parameterIndex: Int, x: Short): Unit = {} + + override def setInt(parameterIndex: Int, x: Int): Unit = {} + + override def setLong(parameterIndex: Int, x: Long): Unit = {} + + override def setFloat(parameterIndex: Int, x: Float): Unit = {} + + override def setDouble(parameterIndex: Int, x: Double): Unit = {} + + override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = {} + + override def setString(parameterIndex: Int, x: String): Unit = {} + + override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = {} + + override def setDate(parameterIndex: Int, x: Date): Unit = {} + + override def setTime(parameterIndex: Int, x: Time): Unit = {} + + override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = {} + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} + + override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} + + override def clearParameters(): Unit = {} + + override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = {} + + override def setObject(parameterIndex: Int, x: Any): Unit = {} + + override def execute(): Boolean = { + query != null && execute(query) + } + + override def addBatch(): Unit = {} + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = {} + + override def setRef(parameterIndex: Int, x: Ref): Unit = {} + + override def setBlob(parameterIndex: Int, x: Blob): Unit = {} + + override def setClob(parameterIndex: Int, x: Clob): Unit = {} + + override def setArray(parameterIndex: Int, x: sql.Array): Unit = {} + + override def getMetaData: ResultSetMetaData = { + null + } + + override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = {} + + override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = {} + + override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = {} + + override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = {} + + override def setURL(parameterIndex: Int, x: URL): Unit = {} + + override def getParameterMetaData: ParameterMetaData = null + + override def setRowId(parameterIndex: Int, x: RowId): Unit = {} + + override def setNString(parameterIndex: Int, value: String): Unit = {} + + override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = {} + + override def setNClob(parameterIndex: Int, value: NClob): Unit = {} + + override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = {} + + override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = {} + + override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = {} + + override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = {} + + override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int, scaleOrLength: Int): Unit = {} + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = {} + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = {} + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = {} + + override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = {} + + override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = {} + + override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = {} + + override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = {} + + override def setClob(parameterIndex: Int, reader: Reader): Unit = {} + + override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = {} + + override def setNClob(parameterIndex: Int, reader: Reader): Unit = {} + + override def close(): Unit = { + _isClosed = true + if (lastResultSet == null || lastResultSet.isClosed) { + return + } + lastResultSet.close() + } + + override def getMaxFieldSize: Int = { + 0 + } + + override def setMaxFieldSize(max: Int): Unit = {} + + override def getMaxRows: Int = maxRows + + override def setMaxRows(max: Int): Unit = maxRows = max + + override def setEscapeProcessing(enable: Boolean): Unit = {} + + override def getQueryTimeout: Int = { + checkClosed() + throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") + } + + override def setQueryTimeout(seconds: Int): Unit = { + checkClosed() + throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") + } + + override def cancel(): Unit = { + checkClosed() + throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") + } + + override def getWarnings: SQLWarning = { + checkClosed() + null + } + + override def clearWarnings(): Unit = { + checkClosed() + } + + override def setCursorName(name: String): Unit = { + checkClosed() + } + + override def getResultSet: ResultSet = { + checkClosed() + lastResultSet; + } + + override def getUpdateCount: Int = { + checkClosed() + -1 + } + + override def getMoreResults: Boolean = false + + override def setFetchDirection(direction: Int): Unit = {} + + override def getFetchDirection: Int = ResultSet.FETCH_FORWARD + + override def setFetchSize(rows: Int): Unit = { + if (rows <= 1) { + throw new SQLException("Fetch size must be > 1. Actual: " + rows) + } + fetchSize = rows + } + + override def getFetchSize: Int = fetchSize + + override def getResultSetConcurrency: Int = throw new SQLFeatureNotSupportedException(); + + override def getResultSetType: Int = ResultSet.TYPE_FORWARD_ONLY + + override def addBatch(sql: String): Unit = {} + + + override def clearBatch(): Unit = {} + + override def executeBatch(): Array[Int] = { + checkClosed() + null + } + + override def getConnection: Connection = { + checkClosed() + connection + } + + override def getMoreResults(current: Int): Boolean = { + checkClosed() + false + } + + override def getGeneratedKeys: ResultSet = { + checkClosed() + null + } + + override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = { + checkClosed() + 0 + } + + override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = { + checkClosed() + 0 + } + + override def executeUpdate(sql: String, columnNames: Array[String]): Int = { + checkClosed() + 0 + } + + override def execute(sql: String, autoGeneratedKeys: Int): Boolean = { + checkClosed() + false + } + + override def execute(sql: String, columnIndexes: Array[Int]): Boolean = { + checkClosed() + false + } + + override def execute(sql: String, columnNames: Array[String]): Boolean = { + checkClosed() + false + } + + override def getResultSetHoldability: Int = 0 + + override def isClosed: Boolean = _isClosed + + override def setPoolable(poolable: Boolean): Unit = {} + + override def isPoolable: Boolean = false + + override def closeOnCompletion(): Unit = {} + + override def isCloseOnCompletion: Boolean = false + + override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] + + override def isWrapperFor(iface: Class[_]): Boolean = false + + private def checkClosed(): Unit = { + if (isClosed) { + throw new SQLAlreadyClosedException(this.getClass.getSimpleName) + } + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala new file mode 100644 index 00000000..4657c02e --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala @@ -0,0 +1,5 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.sql.SQLException + +class SQLAlreadyClosedException(name: String) extends SQLException(name + " has already been closed.") \ No newline at end of file From 48bf9b4ad120a6064fb29958d5e83410940202f5 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 11 Mar 2024 22:03:34 +0100 Subject: [PATCH 06/22] docs: update vitepress --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d8ac5d52..a4d6f689 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,6 @@ "@iconify-json/fluent-emoji" : "^1.1.18", "@unocss/preset-icons" : "^0.58.5", "unocss" : "^0.58.5", - "vitepress" : "1.0.0-rc.44" + "vitepress" : "1.0.0-rc.45" } } \ No newline at end of file From 36a43071d15a0b5ad9dc4a26c94fe06be215265f Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Wed, 17 Apr 2024 07:32:55 +0200 Subject: [PATCH 07/22] feat: sql converter with execution --- build.sbt | 14 +- .../mongodb/sql/MongoSqlQueryHolder.scala | 529 ++++++++++++++++++ .../driver/mongodb/sql/SQLCommandType.scala | 11 + .../driver/mongodb/model/model.scala | 2 +- .../driver/mongodb/sql/DeleteSqlSpec.scala | 64 +++ .../driver/mongodb/sql/InsertSqlSpec.scala | 58 ++ .../driver/mongodb/sql/OtherSqlSpec.scala | 100 ++++ .../driver/mongodb/sql/SelectSqlSpec.scala | 143 +++++ .../driver/mongodb/sql/UpdateSqlSpec.scala | 73 +++ .../driver/mongodb/test/TestDatabase.scala | 4 +- 10 files changed, 993 insertions(+), 5 deletions(-) create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala diff --git a/build.sbt b/build.sbt index 3701e4a7..e55683d0 100644 --- a/build.sbt +++ b/build.sbt @@ -65,7 +65,7 @@ libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.5" % Test libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.3" % Test -libraryDependencies += "joda-time" % "joda-time" % "2.12.7" % Test +libraryDependencies += "joda-time" % "joda-time" % "2.12.7" val circeVersion = "0.14.6" @@ -97,7 +97,17 @@ libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5" libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.11.0" -libraryDependencies += ("com.vdurmont" % "semver4j" % "3.1.0") +libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" + +// Mongo SQL Converter +val MongoDbVersion = "4.3.1" +libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.3" +//libraryDependencies += "com.google.code.gson" % "gson" % "2.8.9" +//libraryDependencies += "com.google.guava" % "guava" % "32.0.0-jre" +//libraryDependencies += "commons-io" % "commons-io" % "2.11.0" +//libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" +//libraryDependencies += "org.mongodb" % "bson" % MongoDbVersion +//libraryDependencies += "org.mongodb" % "mongodb-driver-sync" % MongoDbVersion buildInfoPackage := "dev.mongocamp.driver.mongodb" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala new file mode 100644 index 00000000..db3849b5 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -0,0 +1,529 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import SQLCommandType.SQLCommandType +import com.mongodb.client.model.DropIndexOptions +import net.sf.jsqlparser.statement.Statement +import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression} +import net.sf.jsqlparser.expression.operators.relational._ +import net.sf.jsqlparser.expression.{Expression, Parenthesis} +import net.sf.jsqlparser.parser.{CCJSqlParser, StreamProvider} +import net.sf.jsqlparser.schema.{Column, Table} +import net.sf.jsqlparser.statement.create.index.CreateIndex +import net.sf.jsqlparser.statement.delete.Delete +import net.sf.jsqlparser.statement.drop.Drop +import net.sf.jsqlparser.statement.insert.Insert +import net.sf.jsqlparser.statement.select.{AllColumns, FromItem, PlainSelect, Select, SelectExpressionItem, SelectItem, SubSelect} +import net.sf.jsqlparser.statement.truncate.Truncate +import net.sf.jsqlparser.statement.update.Update +import org.bson.conversions.Bson +import org.mongodb.scala.model.IndexOptions +import org.mongodb.scala.model.Sorts.ascending +import org.mongodb.scala.{Document, Observable} + +import java.util.concurrent.TimeUnit +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ + +class MongoSqlQueryHolder { + private val aggregatePipeline: ArrayBuffer[Document] = ArrayBuffer() + private var sqlTable: Table = _ + private var alias: Option[String] = None + private var sqlCommandType: SQLCommandType = _ + private var updateOrDeleteFilter: Option[Map[String, Any]] = None + private var setElement: Option[Bson] = None + private val documentsToInsert: ArrayBuffer[Document] = ArrayBuffer.empty + private var indexOptions: Option[IndexOptions] = None + + def this(statement: net.sf.jsqlparser.statement.Statement) = { + this() + + if (classOf[Select].isAssignableFrom(statement.getClass)) { + val select = statement.asInstanceOf[Select] + convertSelectStatement(select) + } + else if (classOf[Insert].isAssignableFrom(statement.getClass)) { + val insert = statement.asInstanceOf[Insert] + convertInsertStatement(insert) + } + else if (classOf[Update].isAssignableFrom(statement.getClass)) { + val update = statement.asInstanceOf[Update] + convertUpdateStatement(update) + } + else if (classOf[Delete].isAssignableFrom(statement.getClass)) { + val delete = statement.asInstanceOf[Delete] + convertDeleteStatement(delete) + } + else if (classOf[CreateIndex].isAssignableFrom(statement.getClass)) { + val createIndex = statement.asInstanceOf[CreateIndex] + convertCreateIndexStatement(createIndex) + } + else if (classOf[Drop].isAssignableFrom(statement.getClass)) { + val drop = statement.asInstanceOf[Drop] + drop.getType.toUpperCase match { + case "TABLE" => + sqlCommandType = SQLCommandType.DropTable + sqlTable = drop.getName + case "INDEX" => + sqlCommandType = SQLCommandType.DropIndex + sqlTable = drop.getName + if (!getCollection.contains(".")) { + throw new IllegalArgumentException("not supported drop index without collection specified in the name") + } + case "DATABASE" => + sqlCommandType = SQLCommandType.DropDatabase + sqlTable = drop.getName + case _ => + throw new IllegalArgumentException("not supported drop command type") + } + } + else if (classOf[Truncate].isAssignableFrom(statement.getClass)) { + val truncate = statement.asInstanceOf[Truncate] + sqlCommandType = SQLCommandType.Delete + sqlTable = truncate.getTable + } + else { + throw new IllegalArgumentException("not supported sql command type") + } + "" + } + + def getCollection: String = sqlTable.getFullyQualifiedName + + def run(provider: DatabaseProvider, allowDiskUsage: Boolean = true): Observable[Document] = { + sqlCommandType match { + case SQLCommandType.Insert => + provider + .dao(getCollection) + .insertMany(documentsToInsert.toList) + .map(e => { + val map = e.getInsertedIds.asScala.map(d => d._1.toString -> d._2).toMap + val document = org.mongodb.scala.Document("wasAcknowledged" -> e.wasAcknowledged(), "insertedIds" -> Document(map)) + document + }) + + case SQLCommandType.Select => + provider.dao(getCollection).findAggregated(aggregatePipeline.toList, allowDiskUsage) + + case SQLCommandType.Update => + val updateSet = setElement.getOrElse(throw new IllegalArgumentException("update set element must be defined")) + provider + .dao(getCollection) + .updateMany(getUpdateOrDeleteFilter, Map("$set" -> updateSet)) + .map(e => + org.mongodb.scala.Document( + "modifiedCount" -> e.getModifiedCount, + "matchedCount" -> e.getMatchedCount, + "wasAcknowledged" -> e.wasAcknowledged() + ) + ) + + case SQLCommandType.Delete => + provider + .dao(getCollection) + .deleteMany(getUpdateOrDeleteFilter) + .map(e => org.mongodb.scala.Document("deletedCount" -> e.getDeletedCount, "wasAcknowledged" -> e.wasAcknowledged())) + + case SQLCommandType.CreateIndex => + provider.dao(getCollection).createIndex(setElement.get, indexOptions.get).map(e => org.mongodb.scala.Document("indexName" -> e)) + + case SQLCommandType.DropIndex => + val collectionName = sqlTable.getSchemaName + val indexName = sqlTable.getName + provider.dao(collectionName).dropIndexForName(indexName, new DropIndexOptions().maxTime(1, TimeUnit.MINUTES)).map(_ => org.mongodb.scala.Document("indexName" -> indexName)) + + case SQLCommandType.DropTable => + provider.dao(getCollection).drop().map(_ => org.mongodb.scala.Document("wasAcknowledged" -> true)) + + case _ => + throw new IllegalArgumentException("not supported sql command type") + } + } + + private def getUpdateOrDeleteFilter: Bson = { + updateOrDeleteFilter.getOrElse(Map.empty).toMap + } + + private def convertValue(expression: Expression): Any = { + expression match { + case e: net.sf.jsqlparser.expression.LongValue => e.getValue + case e: net.sf.jsqlparser.expression.DoubleValue => e.getValue + case e: net.sf.jsqlparser.expression.StringValue => e.getValue + case e: net.sf.jsqlparser.expression.DateValue => e.getValue + case e: net.sf.jsqlparser.expression.TimeValue => e.getValue + case e: net.sf.jsqlparser.expression.TimestampValue => e.getValue + case e: net.sf.jsqlparser.expression.NullValue => null + case _ => + throw new IllegalArgumentException("not supported value type") + } + } + + private def parseWhere(ex: Expression, queryMap: mutable.Map[String, Any]): Unit = { + ex match { + case e: EqualsTo => + queryMap.put(e.getLeftExpression.toString, Map("$eq" -> convertValue(e.getRightExpression))) + case e: NotEqualsTo => + queryMap.put(e.getLeftExpression.toString, Map("$ne" -> convertValue(e.getRightExpression))) + case e: GreaterThan => + queryMap.put(e.getLeftExpression.toString, Map("$gt" -> convertValue(e.getRightExpression))) + case e: GreaterThanEquals => + queryMap.put(e.getLeftExpression.toString, Map("$gte" -> convertValue(e.getRightExpression))) + case e: MinorThan => + queryMap.put(e.getLeftExpression.toString, Map("$lt" -> convertValue(e.getRightExpression))) + case e: Between => + val fieldName = e.getLeftExpression.toString + if (e.isNot) { + queryMap.put( + "$or", + List( + Map(fieldName -> Map("$lte" -> convertValue(e.getBetweenExpressionStart))), + Map(fieldName -> Map("$gte" -> convertValue(e.getBetweenExpressionEnd))) + ) + ) + } + else { + queryMap.put( + "$and", + List( + Map(fieldName -> Map("$gte" -> convertValue(e.getBetweenExpressionStart))), + Map(fieldName -> Map("$lte" -> convertValue(e.getBetweenExpressionEnd))) + ) + ) + } + case e: MinorThanEquals => + queryMap.put(e.getLeftExpression.toString, Map("$lte" -> convertValue(e.getRightExpression))) + case e: OrExpression => + val left = mutable.Map[String, Any]() + val right = mutable.Map[String, Any]() + parseWhere(e.getLeftExpression, left) + parseWhere(e.getRightExpression, right) + queryMap.put("$or", List(left, right)) + case e: AndExpression => + val left = mutable.Map[String, Any]() + val right = mutable.Map[String, Any]() + parseWhere(e.getLeftExpression, left) + parseWhere(e.getRightExpression, right) + queryMap.put("$and", List(left, right)) + case e: Parenthesis => + parseWhere(e.getExpression, queryMap) + case e: InExpression => + val value = e.getRightItemsList match { + case l: ExpressionList => l.getExpressions.asScala.map(convertValue) + case i: ItemsList => throw new IllegalArgumentException(s"${i.getClass.getSimpleName} not supported") + } + val functionName = if (e.isNot) "$nin" else "$in" + queryMap.put(e.getLeftExpression.toString, Map(functionName -> value)) + case e: IsNullExpression => + if (e.isNot) { + queryMap.put(e.getLeftExpression.toString, Map("$ne" -> null)) + } + else { + queryMap.put(e.getLeftExpression.toString, Map("$eq" -> null)) + } + case _ => + throw new IllegalArgumentException("not supported where expression") + } + } + + private def convertSelectStatement(select: Select): Unit = { + select.getSelectBody match { + case plainSelect: PlainSelect => + val selectItems = Option(plainSelect.getSelectItems).map(_.asScala).getOrElse(List.empty) + val aliasList = ArrayBuffer[String]() + sqlCommandType = SQLCommandType.Select + Option(plainSelect.getGroupBy).foreach(gbEl => { + val groupBy = gbEl.getGroupByExpressionList.getExpressions.asScala.map(_.toString).toList + val groupId = mutable.Map[String, Any]() + val group = mutable.Map[String, Any]() + groupBy.foreach(g => groupId += g -> ("$" + g)) + selectItems.foreach { + case e: SelectExpressionItem => + val expressionName = e.getExpression.toString + if (expressionName.contains("count")) { + group += expressionName -> Map("$sum" -> 1) + } + else { + if (!groupBy.contains(expressionName)) { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + group += expressionName -> Map(espr.head -> espr.last) + } + } + case e: SelectItem => + e.toString + } + val groupMap = Map("_id" -> groupId) ++ group.toMap ++ groupId.keys.map(s => s -> Map("$first" -> ("$" + s))).toMap + aggregatePipeline += Map("$group" -> groupMap) + }) + def convertFromItemToTable(fromItem: FromItem): Table = { + val tableName = Option(fromItem.getAlias).map(a => fromItem.toString.replace(a.toString, "")).getOrElse(fromItem).toString + new Table(tableName) + } + sqlTable = convertFromItemToTable(plainSelect.getFromItem) + alias = Option(plainSelect.getFromItem.getAlias).map(alias => { + val aliasName = alias.getName + aliasList += aliasName + aggregatePipeline += Map( + "$project" -> Map( + "_id" -> 0, + aliasName -> "$$ROOT" + ) + ) + aliasName + }) + Option(plainSelect.getJoins) + .map(_.asScala) + .getOrElse(List.empty) + .foreach(join => { + var lookupMap = Map[String, Any]() + if (join.getOnExpressions != null && !join.getOnExpressions.isEmpty) + join.getRightItem.getAlias match { + case null => + lookupMap += "from" -> join.getRightItem.toString + lookupMap += "as" -> join.getRightItem.toString + case _ => + lookupMap += "from" -> join.getRightItem.toString.replace(join.getRightItem.getAlias.toString, "") + lookupMap += "as" -> join.getRightItem.getAlias.getName + } + join.getOnExpressions.asScala.foreach(e => { + val equalsTo = e.asInstanceOf[EqualsTo] + val joinCollectionPrefix = s"${lookupMap("as")}." + aliasList += lookupMap("as").toString + val primaryCollectionPrefix = s"${alias.getOrElse(sqlTable)}." + val expressionList = List(equalsTo.getLeftExpression.toString, equalsTo.getRightExpression.toString) + .filter(e => e.contains(joinCollectionPrefix) || e.contains(primaryCollectionPrefix)) + if (expressionList.size == 2) { + expressionList.foreach { exp => + if (exp.contains(primaryCollectionPrefix)) { + lookupMap += "localField" -> exp + } + if (exp.contains(joinCollectionPrefix)) { + lookupMap += "foreignField" -> exp.replace(joinCollectionPrefix, "") + } + } + } + else { + throw new IllegalArgumentException("join on expression must contain collection and lookup collection") + } + }) + aggregatePipeline += Map("$lookup" -> lookupMap) + aggregatePipeline += Map("$unwind" -> Map("path" -> s"$$${lookupMap("as")}", "preserveNullAndEmptyArrays" -> false)) + }) + Option(plainSelect.getWhere).foreach { where => + val filterQuery = mutable.Map[String, Any]() + parseWhere(where, filterQuery) + aggregatePipeline += Map( + "$match" -> filterQuery + ) + } + Option(plainSelect.getOrderByElements).foreach { orderBy => + aggregatePipeline += Map( + "$sort" -> orderBy.asScala.map(e => e.getExpression.toString -> (if (e.isAsc) 1 else -1)).toMap + ) + } + Option(plainSelect.getHaving()).foreach { having => + val filterQuery = mutable.Map[String, Any]() + parseWhere(having, filterQuery) + aggregatePipeline += Map( + "$match" -> filterQuery + ) + } + val hasAllColumns = selectItems.exists(_.isInstanceOf[AllColumns]) + if (selectItems.nonEmpty && !hasAllColumns) { + val addFields = selectItems.filter { + case e: SelectExpressionItem => + e.getAlias match { + case null => false + case _ => + true + } + case _ => false + } + val fields: Map[String, Any] = addFields + .map(_.asInstanceOf[SelectExpressionItem]) + .map(e => e.getAlias.getName -> ("$" + e.getExpression.toString)) + .toMap + if (fields.nonEmpty) { + aggregatePipeline += Map("$addFields" -> fields) + } + aggregatePipeline += Map( + "$project" -> selectItems + .filterNot(s => s.toString.equalsIgnoreCase("*")) + .map { + case e: SelectExpressionItem => + e.getAlias match { + case null => + e.getExpression.toString -> 1 + case _ => + e.getAlias.getName -> 1 + } + case _ => throw new IllegalArgumentException("not supported sql command type") + } + .toMap + ) + } + if (aliasList.nonEmpty) { + aliasList += "$$ROOT" + aggregatePipeline += Map( + "$replaceWith" -> Map("$mergeObjects" -> aliasList.map(string => if (string.startsWith("$")) string else "$" + string).toList) + ) + } + Option(plainSelect.getDistinct).foreach { distinct => + val groupMap: mutable.Map[String, Any] = mutable.Map() + selectItems.foreach { + case e: SelectExpressionItem => + val expressionName = e.getExpression.toString + if (expressionName.contains("count")) { + groupMap += expressionName -> Map("$sum" -> 1) + } + else { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + if (espr.head.equalsIgnoreCase(espr.last)) { + groupMap += expressionName -> Map("$first" -> espr.last) + } + else { + groupMap += expressionName -> Map(espr.head -> espr.last) + } + } + case e: SelectItem => + e.toString + } + groupMap.put("_id", groupMap.keys.map(s => s -> ("$" + s)).toMap) + aggregatePipeline += Map("$group" -> groupMap.toMap) + if (plainSelect.getOrderByElements != null) { + aggregatePipeline += Map( + "$sort" -> plainSelect.getOrderByElements.asScala.map(e => e.getExpression.toString -> (if (e.isAsc) 1 else -1)).toMap + ) + } + } + Option(plainSelect.getOffset).foreach { offset => + aggregatePipeline += Map("$skip" -> convertValue(offset.getOffset)) + } + Option(plainSelect.getLimit).foreach { limit => + aggregatePipeline += Map("$limit" -> convertValue(limit.getRowCount)) + } + case _ => throw new IllegalArgumentException("not supported sql command type") + } + } + + private def convertInsertStatement(insert: Insert): Unit = { + insert.getItemsList match { + case i: ExpressionList => + val expressionList = i.getExpressions.asScala.toList + val document = mutable.Map[String, Any]() + var index = 0 + if (insert.getColumns == null) { + throw new IllegalArgumentException("column names must be specified") + } + insert.getColumns.asScala + .map(_.getColumnName) + .foreach(colName => { + document += colName -> convertValue(expressionList(index)) + index += 1 + }) + documentsToInsert += document.toMap + case i: MultiExpressionList => + i.getExpressionLists.asScala.foreach { el => + val expressionList = el.getExpressions.asScala.toList + val document = mutable.Map[String, Any]() + var index = 0 + insert.getColumns.asScala + .map(_.getColumnName) + .foreach(colName => { + document += colName -> convertValue(expressionList(index)) + index += 1 + }) + documentsToInsert += document.toMap + } + case i: ItemsList => + throw new IllegalArgumentException(s"not supported items list of type ${i.getClass.getSimpleName}") + } + sqlCommandType = SQLCommandType.Insert + sqlTable = insert.getTable + } + + private def convertUpdateStatement(update: Update): Unit = { + val filter = Option(update.getWhere) + .map { where => + val filterQuery = mutable.Map[String, Any]() + parseWhere(where, filterQuery) + filterQuery.toMap + } + .getOrElse(Map.empty) + updateOrDeleteFilter = Some(filter) + + val updateSetElement = mutable.Map[String, Any]() + Option(update.getUpdateSets) + .map(_.asScala) + .getOrElse(List.empty) + .foreach(set => { + val expressionList = set.getExpressions.asScala.toList + var index = 0 + if (set.getColumns == null) { + throw new IllegalArgumentException("column names must be specified") + } + set.getColumns.asScala + .map(_.getColumnName) + .foreach(colName => { + updateSetElement += colName -> convertValue(expressionList(index)) + index += 1 + }) + + }) + if (updateSetElement.nonEmpty) { + this.setElement = Some(updateSetElement.toMap) + } + sqlCommandType = SQLCommandType.Update + sqlTable = update.getTable + } + + private def convertDeleteStatement(delete: Delete): Unit = { + val filter = Option(delete.getWhere) + .map { where => + val filterQuery = mutable.Map[String, Any]() + parseWhere(where, filterQuery) + filterQuery.toMap + } + .getOrElse(Map.empty) + updateOrDeleteFilter = Some(filter) + sqlCommandType = SQLCommandType.Delete + sqlTable = delete.getTable + } + + private def convertCreateIndexStatement(createIndex: CreateIndex): Unit = { + sqlTable = createIndex.getTable + sqlCommandType = SQLCommandType.CreateIndex + val mongoIndexOptions = IndexOptions() + val indexToCreate = Option(createIndex.getIndex).getOrElse(throw new IllegalArgumentException("index must be defined")) + mongoIndexOptions.name(indexToCreate.getName) + indexToCreate.getType match { + case "UNIQUE" => + mongoIndexOptions.unique(true) + case _ => + "" + } + indexOptions = Some(mongoIndexOptions) + setElement = Some(ascending(indexToCreate.getColumns.asScala.map(_.getColumnName).toSeq: _*)) + } +} + +object MongoSqlQueryHolder { + + def stringToStatement(sql: String, charset: String = "UTF-8") = { + val stream: java.io.InputStream = new java.io.ByteArrayInputStream(sql.getBytes(charset)) + val jSqlParser = new CCJSqlParser(new StreamProvider(stream, charset)) + val statements = jSqlParser.Statements().getStatements.asScala + if (statements.size != 1) { + throw new IllegalArgumentException("only one statement is supported") + } + statements.head + } + + def apply(statement: net.sf.jsqlparser.statement.Statement): MongoSqlQueryHolder = new MongoSqlQueryHolder(statement) + + def apply(sql: String, charset: String = "UTF-8"): MongoSqlQueryHolder = { + new MongoSqlQueryHolder(stringToStatement(sql, charset)) + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala new file mode 100644 index 00000000..09e0b6fe --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala @@ -0,0 +1,11 @@ +package dev.mongocamp.driver.mongodb.sql + +import net.sf.jsqlparser.statement.create.index.CreateIndex + +object SQLCommandType extends Enumeration { + + type SQLCommandType = Value + + val Delete, Select, Update, Insert, CreateIndex, DropTable, DropIndex, DropDatabase = Value + +} \ No newline at end of file diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/model/model.scala b/src/test/scala/dev/mongocamp/driver/mongodb/model/model.scala index a133264b..f68189c1 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/model/model.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/model/model.scala @@ -57,7 +57,7 @@ case class Student(_id: Long, name: String, scores: List[Score], image: Option[O case class Score(score: Double, `type`: String) -case class Grade(_id: ObjectId, student_id: Long, class_id: Long, scores: List[Score]) +case class Grade(_id: ObjectId, studentId: Long, classId: Long, scores: List[Score]) // #endregion model_student case class CodecTest( diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala new file mode 100644 index 00000000..d8015078 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala @@ -0,0 +1,64 @@ +package dev.mongocamp.driver.mongodb.sql + +import better.files.{File, Resource} +import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.model.{Grade, Score} +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO +import org.bson.types.ObjectId +import org.specs2.mutable.Specification +import org.specs2.specification.{BeforeAll, BeforeEach} + +class DeleteSqlSpec extends Specification with BeforeEach { + sequential + + object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") + + override def before(): Unit = { + this.GradeDAO.drop().result() + this.GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + } + + "MongoSqlQueryHolder" should { + + "delete with where" in { + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE studentId = 1;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("deletedCount") mustEqual 1 + val documents = GradeDAO.count().result() + documents mustEqual 2 + } + + "delete all" in { + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("deletedCount") mustEqual 3 + val documents = GradeDAO.count().result() + documents mustEqual 0 + } + + "delete all with or" in { + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE classId = 4 or classId = 7;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("deletedCount") mustEqual 2 + val documents = GradeDAO.count().result() + documents mustEqual 1 + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala new file mode 100644 index 00000000..d674a8db --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala @@ -0,0 +1,58 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.test.TestDatabase +import org.mongodb.scala.Document + +class InsertSqlSpec extends PersonSpecification { + + "MongoSqlQueryHolder" should { + + "insert" in { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + val queryConverter = MongoSqlQueryHolder("INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.get("insertedIds") mustNotEqual null + val documents = dao.find().resultList() + documents.size mustEqual 1 + documents.head.getString("column1") mustEqual "value1" + documents.head.getLong("column2") mustEqual 123 + } + + "insert 2 rows" in { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + val queryConverter = MongoSqlQueryHolder( + "INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z'), ('value2', 456, '2022-02-01T00:00:00.000Z');" + ) + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.get("insertedIds") mustNotEqual null + val documents = dao.find().resultList() + documents.size mustEqual 2 + documents.head.getString("column1") mustEqual "value1" + documents.head.getLong("column2") mustEqual 123 + } + + "insert not named" in { + var errorCaught = false + try { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + MongoSqlQueryHolder("INSERT INTO table_name VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") + } + catch { + case e: Exception => + e.getMessage mustEqual "column names must be specified" + errorCaught = true + } + errorCaught mustEqual true + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala new file mode 100644 index 00000000..1ba85e1e --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala @@ -0,0 +1,100 @@ +package dev.mongocamp.driver.mongodb.sql + +import com.mongodb.client.model.IndexOptions +import dev.mongocamp.driver.mongodb.model.{Grade, Score} +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import org.bson.types.ObjectId +import org.mongodb.scala.model.Sorts.ascending +import org.specs2.mutable.Specification +import org.specs2.specification.BeforeEach + +import scala.concurrent.duration.DurationInt + +class OtherSqlSpec extends PersonSpecification with BeforeEach{ + sequential + + object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") + + override protected def before: Any = { + this.GradeDAO.drop().result() + this.GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + this.GradeDAO.createIndex(ascending("studentId"), new IndexOptions().name("student_idx")).result() + } + + override def beforeAll(): Unit = { + super.beforeAll() + } + + "MongoSqlQueryHolder" should { + + "drop collection" in { + val queryConverter = MongoSqlQueryHolder("Drop table universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + val grade = GradeDAO.count(Map("studentId" -> 1)).result() + grade mustEqual 0 + val collections = TestDatabase.provider.collectionNames() + collections must not contain "universityGrades" + } + + "truncate collection" in { + val queryConverter = MongoSqlQueryHolder("TRUNCATE TABLE universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + val grade = GradeDAO.count(Map("studentId" -> 1)).result() + grade mustEqual 0 + val collections = TestDatabase.provider.collectionNames() + collections.contains("universityGrades") must beTrue + } + + "create index " in { + val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_name ON people (name);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getStringValue("indexName") mustEqual "idx_name" + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + indices.find(_.getString("name") == "idx_name") must beSome + } + + "create unique index " in { + val queryConverter = MongoSqlQueryHolder("CREATE unique INDEX uidx_name ON people (email);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getStringValue("indexName") mustEqual "uidx_name" + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + indices.find(_.getString("name") == "uidx_name") must beSome + } + + "create index with multi" in { + val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_multiname ON people (name, gender);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getStringValue("indexName") mustEqual "idx_multiname" + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + indices.find(_.getString("name") == "idx_multiname") must beSome + } + + "drop index " in { + val queryConverter = MongoSqlQueryHolder("DROP INDEX universityGrades.student_idx;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getStringValue("indexName") mustEqual "student_idx" + val indices = TestDatabase.provider.collection("universityGrades").listIndexes().resultList() + indices.find(_.getString("name") == "student_idx") must beNone + } + + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala new file mode 100644 index 00000000..1d0b3375 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala @@ -0,0 +1,143 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.test.TestDatabase + +class SelectSqlSpec extends PersonSpecification { + + "MongoSqlQueryHolder Select" should { + "simple sql" in { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 99 + selectResponse.head.getInteger("age") mustEqual 25 + selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + } + + "sql with in query" in { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age in (30, 18, 25, 22) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 41 + selectResponse.head.getInteger("age") mustEqual 25 + selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + } + + "sql with not in query" in { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age not in (30, 18, 25, 22) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 159 + selectResponse.head.getInteger("age") mustEqual 40 + selectResponse.head.getString("guid") mustEqual "6ee53e07-2e61-48cd-9bc9-b3505a0438f3" + } + + "and sql" in { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 and (age < 30 or age > 30) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 99 + selectResponse.head.getInteger("age") mustEqual 25 + selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + } + + "simple select all sql" in { + val queryConverter = MongoSqlQueryHolder("select * from people where age < 30 order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 99 + selectResponse.head.getInteger("age") mustEqual 25 + selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + } + + "simple select between" in { + val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance BETWEEN 1500 AND 2000") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 41 + selectResponse.head.getInteger("age") mustEqual 40 + selectResponse.head.getString("internal") mustEqual "6ee53e07-2e61-48cd-9bc9-b3505a0438f3" + } + + "simple select not between" in { + val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance not BETWEEN 1500 AND 2000") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 159 + selectResponse.head.getInteger("age") mustEqual 25 + selectResponse.head.getString("internal") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + } + + "search with with number in string" in { + val queryConverter = MongoSqlQueryHolder( + "select p1.id, p1.guid, p1.name, p2.age, p2.balance from people as p1 join people as p2 on p1.id = p2.id where p2.age < 30 order by p2.id asc" + ) + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 99 + val document = selectResponse.head + document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + document.getInteger("age") mustEqual 25 + } + + "is not null" in { + val queryConverter = MongoSqlQueryHolder("select * from people where age is not null") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 200 + val document = selectResponse.head + document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + document.getInteger("age") mustEqual 25 + } + + "is null" in { + val queryConverter = MongoSqlQueryHolder("select * from people where blubber is null") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 200 + val document = selectResponse.head + document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" + document.getInteger("age") mustEqual 25 + } + + "group by with count" in { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age order by age;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 21 + val document = selectResponse.head + document.getInteger("age") mustEqual 20 + document.getInteger("tmp") mustEqual 4 + } + + "having filter" in { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 9 + val document = selectResponse.head + document.getInteger("age") mustEqual 21 + document.getInteger("tmp") mustEqual 11 + } + + "with limit 5" in { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 5 + val document = selectResponse.head + document.getInteger("age") mustEqual 21 + document.getInteger("tmp") mustEqual 11 + } + + "with limit 5 and offset 10" in { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5 offset 5;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 4 + val document = selectResponse.head + document.getInteger("age") mustEqual 27 + document.getInteger("tmp") mustEqual 12 + document.getInteger("sum(age)") mustEqual 324 + } + + "destinct" in { + val queryConverter = MongoSqlQueryHolder("select distinct favoriteFruit, count(*) from people order by count(*) desc;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 3 + val document = selectResponse.head + document.getString("favoriteFruit") mustEqual "strawberry" + document.getInteger("count(*)") mustEqual 71 + selectResponse.map(_.getString("favoriteFruit")) mustEqual List("strawberry", "apple", "banana") + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala new file mode 100644 index 00000000..7f0257ea --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala @@ -0,0 +1,73 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.model.{Grade, Score} +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} +import org.bson.types.ObjectId +import org.specs2.mutable.Specification +import org.specs2.specification.BeforeEach +import dev.mongocamp.driver.mongodb._ +class UpdateSqlSpec extends Specification with BeforeEach { + sequential + + object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") + + override def before(): Unit = { + this.GradeDAO.drop().result() + this.GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + } + + "MongoSqlQueryHolder" should { + + "update single document" in { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE studentId = 1;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("modifiedCount") mustEqual 1 + selectResponse.head.getLong("matchedCount") mustEqual 1 + val grade = GradeDAO.find(Map("studentId" -> 1)).result() + grade.classId mustEqual 47 + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() + documents.getLong("classId") mustEqual 47 + documents.getStringValue("column1") mustEqual "hello" + } + + "update all" in { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("modifiedCount") mustEqual 3 + selectResponse.head.getLong("matchedCount") mustEqual 3 + val grade = GradeDAO.find(Map("studentId" -> 1)).result() + grade.classId mustEqual 47 + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() + documents.getLong("classId") mustEqual 47 + documents.getStringValue("column1") mustEqual "hello" + } + + "update multiple with or" in { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE classId = 4 or classId = 7;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + selectResponse.head.getBoolean("wasAcknowledged") mustEqual true + selectResponse.head.getLong("modifiedCount") mustEqual 2 + selectResponse.head.getLong("matchedCount") mustEqual 2 + val grade = GradeDAO.find(Map("studentId" -> 2)).result() + grade.classId mustEqual 47 + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 2)).result() + documents.getLong("classId") mustEqual 47 + documents.getStringValue("column1") mustEqual "hello" + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala index 154e38e7..ad0088e6 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala @@ -17,9 +17,9 @@ object TestDatabase extends LazyLogging { File(ImageDAOTargetPath).createIfNotExists() - private val registry = fromProviders(classOf[Person], classOf[Friend], classOf[CodecTest], classOf[Book]) + private val registry = fromProviders(classOf[Person], classOf[Friend], classOf[CodecTest], classOf[Book], classOf[Grade], classOf[Score]) - val provider = DatabaseProvider.fromPath(configPath = "unit.test.mongo", registry = fromRegistries(registry)) + val provider: DatabaseProvider = DatabaseProvider.fromPath(configPath = "unit.test.mongo", registry = fromRegistries(registry)) def consumeDatabaseChanges(changeStreamDocument: ChangeStreamDocument[Document]): Unit = { if (changeStreamDocument.getOperationType != OperationType.INSERT) { From 5f5c1ab1ca04359cdac37dbc1c6f5a879c4347be Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Wed, 17 Apr 2024 08:24:54 +0200 Subject: [PATCH 08/22] chore: 7 dependency updates for mongodb-driver * ch.qos.logback:logback-classic:test : 1.5.3 -> 1.5.5 * com.github.jsqlparser:jsqlparser : 4.3 -> 4.9 * com.github.luben:zstd-jni:provided : 1.5.5-11 -> 1.5.6-2 * de.bwaldvogel:mongo-java-server-h2-backend:provided : 1.44.0 -> 1.45.0 * de.bwaldvogel:mongo-java-server:provided : 1.44.0 -> 1.45.0 * org.mongodb.scala:mongo-scala-driver : 5.0.0 -> 5.0.1 * org.scala-lang.modules:scala-collection-compat : 2.11.0 -> 2.12.0 --- build.sbt | 20 +-- .../mongodb/sql/MongoSqlQueryHolder.scala | 152 ++++++++---------- 2 files changed, 76 insertions(+), 96 deletions(-) diff --git a/build.sbt b/build.sbt index e55683d0..e85423ed 100644 --- a/build.sbt +++ b/build.sbt @@ -63,7 +63,7 @@ resolvers += "Sonatype OSS Snapshots".at("https://oss.sonatype.org/content/repos libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.5" % Test -libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.3" % Test +libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.5" % Test libraryDependencies += "joda-time" % "joda-time" % "2.12.7" @@ -75,15 +75,15 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-parser" ).map(_ % circeVersion % Test) -libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.0.0" +libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.0.1" libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.5" % Provided -libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.5-11" % Provided +libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-2" % Provided libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "9.10.0" -val MongoJavaServerVersion = "1.44.0" +val MongoJavaServerVersion = "1.45.0" libraryDependencies += "de.bwaldvogel" % "mongo-java-server" % MongoJavaServerVersion % Provided @@ -95,19 +95,11 @@ libraryDependencies += "com.typesafe" % "config" % "1.4.3" libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5" -libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.11.0" +libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0" libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" -// Mongo SQL Converter -val MongoDbVersion = "4.3.1" -libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.3" -//libraryDependencies += "com.google.code.gson" % "gson" % "2.8.9" -//libraryDependencies += "com.google.guava" % "guava" % "32.0.0-jre" -//libraryDependencies += "commons-io" % "commons-io" % "2.11.0" -//libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" -//libraryDependencies += "org.mongodb" % "bson" % MongoDbVersion -//libraryDependencies += "org.mongodb" % "mongodb-driver-sync" % MongoDbVersion +libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.9" buildInfoPackage := "dev.mongocamp.driver.mongodb" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index db3849b5..86baf52f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -5,22 +5,22 @@ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import SQLCommandType.SQLCommandType import com.mongodb.client.model.DropIndexOptions import net.sf.jsqlparser.statement.Statement -import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression} +import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } import net.sf.jsqlparser.expression.operators.relational._ -import net.sf.jsqlparser.expression.{Expression, Parenthesis} -import net.sf.jsqlparser.parser.{CCJSqlParser, StreamProvider} -import net.sf.jsqlparser.schema.{Column, Table} +import net.sf.jsqlparser.expression.{ Expression, Parenthesis } +import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } +import net.sf.jsqlparser.schema.{ Column, Table } import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop import net.sf.jsqlparser.statement.insert.Insert -import net.sf.jsqlparser.statement.select.{AllColumns, FromItem, PlainSelect, Select, SelectExpressionItem, SelectItem, SubSelect} +import net.sf.jsqlparser.statement.select.{ AllColumns, FromItem, PlainSelect, Select, SelectItem } import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update import org.bson.conversions.Bson import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending -import org.mongodb.scala.{Document, Observable} +import org.mongodb.scala.{ Document, Observable } import java.util.concurrent.TimeUnit import scala.collection.mutable @@ -29,7 +29,7 @@ import scala.jdk.CollectionConverters._ class MongoSqlQueryHolder { private val aggregatePipeline: ArrayBuffer[Document] = ArrayBuffer() - private var sqlTable: Table = _ + private var sqlTable: Table = _ private var alias: Option[String] = None private var sqlCommandType: SQLCommandType = _ private var updateOrDeleteFilter: Option[Map[String, Any]] = None @@ -61,7 +61,7 @@ class MongoSqlQueryHolder { convertCreateIndexStatement(createIndex) } else if (classOf[Drop].isAssignableFrom(statement.getClass)) { - val drop = statement.asInstanceOf[Drop] + val drop = statement.asInstanceOf[Drop] drop.getType.toUpperCase match { case "TABLE" => sqlCommandType = SQLCommandType.DropTable @@ -131,8 +131,11 @@ class MongoSqlQueryHolder { case SQLCommandType.DropIndex => val collectionName = sqlTable.getSchemaName - val indexName = sqlTable.getName - provider.dao(collectionName).dropIndexForName(indexName, new DropIndexOptions().maxTime(1, TimeUnit.MINUTES)).map(_ => org.mongodb.scala.Document("indexName" -> indexName)) + val indexName = sqlTable.getName + provider + .dao(collectionName) + .dropIndexForName(indexName, new DropIndexOptions().maxTime(1, TimeUnit.MINUTES)) + .map(_ => org.mongodb.scala.Document("indexName" -> indexName)) case SQLCommandType.DropTable => provider.dao(getCollection).drop().map(_ => org.mongodb.scala.Document("wasAcknowledged" -> true)) @@ -209,9 +212,9 @@ class MongoSqlQueryHolder { case e: Parenthesis => parseWhere(e.getExpression, queryMap) case e: InExpression => - val value = e.getRightItemsList match { - case l: ExpressionList => l.getExpressions.asScala.map(convertValue) - case i: ItemsList => throw new IllegalArgumentException(s"${i.getClass.getSimpleName} not supported") + val value = e.getRightExpression match { + case l: ParenthesedExpressionList[Expression] => l.asScala.map(convertValue) + case i: Any => throw new IllegalArgumentException(s"${i.getClass.getSimpleName} not supported") } val functionName = if (e.isNot) "$nin" else "$in" queryMap.put(e.getLeftExpression.toString, Map(functionName -> value)) @@ -238,20 +241,17 @@ class MongoSqlQueryHolder { val groupId = mutable.Map[String, Any]() val group = mutable.Map[String, Any]() groupBy.foreach(g => groupId += g -> ("$" + g)) - selectItems.foreach { - case e: SelectExpressionItem => - val expressionName = e.getExpression.toString - if (expressionName.contains("count")) { - group += expressionName -> Map("$sum" -> 1) - } - else { - if (!groupBy.contains(expressionName)) { - val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) - group += expressionName -> Map(espr.head -> espr.last) - } + selectItems.foreach { case e: SelectItem[Expression] => + val expressionName = e.getExpression.toString + if (expressionName.contains("count")) { + group += expressionName -> Map("$sum" -> 1) + } + else { + if (!groupBy.contains(expressionName)) { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + group += expressionName -> Map(espr.head -> espr.last) } - case e: SelectItem => - e.toString + } } val groupMap = Map("_id" -> groupId) ++ group.toMap ++ groupId.keys.map(s => s -> Map("$first" -> ("$" + s))).toMap aggregatePipeline += Map("$group" -> groupMap) @@ -329,10 +329,10 @@ class MongoSqlQueryHolder { "$match" -> filterQuery ) } - val hasAllColumns = selectItems.exists(_.isInstanceOf[AllColumns]) + val hasAllColumns = selectItems.exists(i => i.toString.equalsIgnoreCase("*")) if (selectItems.nonEmpty && !hasAllColumns) { val addFields = selectItems.filter { - case e: SelectExpressionItem => + case e: SelectItem[Expression] => e.getAlias match { case null => false case _ => @@ -341,9 +341,10 @@ class MongoSqlQueryHolder { case _ => false } val fields: Map[String, Any] = addFields - .map(_.asInstanceOf[SelectExpressionItem]) + .map(_.asInstanceOf[SelectItem[Expression]]) .map(e => e.getAlias.getName -> ("$" + e.getExpression.toString)) .toMap + if (fields.nonEmpty) { aggregatePipeline += Map("$addFields" -> fields) } @@ -351,7 +352,7 @@ class MongoSqlQueryHolder { "$project" -> selectItems .filterNot(s => s.toString.equalsIgnoreCase("*")) .map { - case e: SelectExpressionItem => + case e: SelectItem[Expression] => e.getAlias match { case null => e.getExpression.toString -> 1 @@ -362,6 +363,7 @@ class MongoSqlQueryHolder { } .toMap ) + } if (aliasList.nonEmpty) { aliasList += "$$ROOT" @@ -371,23 +373,20 @@ class MongoSqlQueryHolder { } Option(plainSelect.getDistinct).foreach { distinct => val groupMap: mutable.Map[String, Any] = mutable.Map() - selectItems.foreach { - case e: SelectExpressionItem => - val expressionName = e.getExpression.toString - if (expressionName.contains("count")) { - groupMap += expressionName -> Map("$sum" -> 1) + selectItems.foreach { case e: SelectItem[Expression] => + val expressionName = e.getExpression.toString + if (expressionName.contains("count")) { + groupMap += expressionName -> Map("$sum" -> 1) + } + else { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + if (espr.head.equalsIgnoreCase(espr.last)) { + groupMap += expressionName -> Map("$first" -> espr.last) } else { - val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) - if (espr.head.equalsIgnoreCase(espr.last)) { - groupMap += expressionName -> Map("$first" -> espr.last) - } - else { - groupMap += expressionName -> Map(espr.head -> espr.last) - } + groupMap += expressionName -> Map(espr.head -> espr.last) } - case e: SelectItem => - e.toString + } } groupMap.put("_id", groupMap.keys.map(s => s -> ("$" + s)).toMap) aggregatePipeline += Map("$group" -> groupMap.toMap) @@ -408,37 +407,32 @@ class MongoSqlQueryHolder { } private def convertInsertStatement(insert: Insert): Unit = { - insert.getItemsList match { - case i: ExpressionList => - val expressionList = i.getExpressions.asScala.toList - val document = mutable.Map[String, Any]() - var index = 0 - if (insert.getColumns == null) { - throw new IllegalArgumentException("column names must be specified") - } - insert.getColumns.asScala - .map(_.getColumnName) - .foreach(colName => { - document += colName -> convertValue(expressionList(index)) - index += 1 - }) + val columns: List[String] = Option(insert.getColumns).map(_.asScala).getOrElse(List.empty).map(_.getColumnName).toList + if (columns.isEmpty) { + throw new IllegalArgumentException("column names must be specified") + } + var singleDocumentCreated = false + val baseExpressionList: ExpressionList[_] = insert.getSelect.getValues.getExpressions + baseExpressionList.asScala.foreach { + case e: ParenthesedExpressionList[Expression] => + val document = mutable.Map[String, Any]() + columns.foreach(colName => document += colName -> convertValue(e.get(columns.indexOf(colName)))) documentsToInsert += document.toMap - case i: MultiExpressionList => - i.getExpressionLists.asScala.foreach { el => - val expressionList = el.getExpressions.asScala.toList - val document = mutable.Map[String, Any]() - var index = 0 - insert.getColumns.asScala - .map(_.getColumnName) - .foreach(colName => { - document += colName -> convertValue(expressionList(index)) - index += 1 - }) - documentsToInsert += document.toMap + case _ => + try { + if (!singleDocumentCreated) { + val document = mutable.Map[String, Any]() + columns.foreach(colName => document += colName -> convertValue(baseExpressionList.get(columns.indexOf(colName)).asInstanceOf[Expression])) + documentsToInsert += document.toMap + } + singleDocumentCreated = true + } + catch { + case _: Throwable => + throw new IllegalArgumentException("not supported expression list") } - case i: ItemsList => - throw new IllegalArgumentException(s"not supported items list of type ${i.getClass.getSimpleName}") } + sqlCommandType = SQLCommandType.Insert sqlTable = insert.getTable } @@ -458,18 +452,12 @@ class MongoSqlQueryHolder { .map(_.asScala) .getOrElse(List.empty) .foreach(set => { - val expressionList = set.getExpressions.asScala.toList - var index = 0 - if (set.getColumns == null) { + val columns: List[String] = Option(set.getColumns).map(_.asScala).getOrElse(List.empty).map(_.getColumnName).toList + if (columns.isEmpty) { throw new IllegalArgumentException("column names must be specified") } - set.getColumns.asScala - .map(_.getColumnName) - .foreach(colName => { - updateSetElement += colName -> convertValue(expressionList(index)) - index += 1 - }) - + columns + .foreach(colName => updateSetElement += colName -> convertValue(set.getValue(columns.indexOf(colName)))) }) if (updateSetElement.nonEmpty) { this.setElement = Some(updateSetElement.toMap) From 0c8861a677416d972240d6ac30c6635ab73d1333 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Fri, 26 Apr 2024 23:13:19 +0200 Subject: [PATCH 09/22] refactor: added serverAddressList as connection option --- .../mongodb/database/ConfigHelper.scala | 43 +++++++++ .../driver/mongodb/database/MongoConfig.scala | 91 ++++++------------- .../mongodb/pagination/MongoPagination.scala | 1 + .../mongodb/server/H2BackendConfig.scala | 3 + .../driver/mongodb/server/ServerBackend.scala | 6 ++ .../driver/mongodb/server/ServerConfig.scala | 18 ++-- .../mongodb/sync/MongoSyncException.scala | 3 + .../mongodb/sync/MongoSyncOperation.scala | 56 +++--------- .../driver/mongodb/sync/MongoSyncResult.scala | 14 +++ .../driver/mongodb/sync/SyncDirection.scala | 6 ++ .../driver/mongodb/sync/SyncStrategy.scala | 6 ++ 11 files changed, 128 insertions(+), 119 deletions(-) create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/database/ConfigHelper.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/server/H2BackendConfig.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/server/ServerBackend.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncException.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncResult.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncDirection.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncStrategy.scala diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/ConfigHelper.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/ConfigHelper.scala new file mode 100644 index 00000000..3551705b --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/ConfigHelper.scala @@ -0,0 +1,43 @@ +package dev.mongocamp.driver.mongodb.database + +import com.typesafe.config.{ Config, ConfigFactory } + +trait ConfigHelper { + val conf: Config = ConfigFactory.load() + + def stringConfig(configPath: String, key: String, default: String = ""): Option[String] = { + if (conf.hasPath("%s.%s".format(configPath, key))) { + val str = conf.getString("%s.%s".format(configPath, key)) + if (str.nonEmpty) { + Some(str) + } + else { + None + } + } + else if (default.nonEmpty) { + Some(default) + } + else { + None + } + } + + def intConfig(configPath: String, key: String, default: Int = 0): Int = { + if (conf.hasPath("%s.%s".format(configPath, key))) { + conf.getInt("%s.%s".format(configPath, key)) + } + else { + default + } + } + + def booleanConfig(configPath: String, key: String, default: Boolean = false): Boolean = { + if (conf.hasPath("%s.%s".format(configPath, key))) { + conf.getBoolean("%s.%s".format(configPath, key)) + } + else { + default + } + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala index c0c90657..7f2d3368 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala @@ -4,8 +4,8 @@ import java.util.concurrent.TimeUnit import com.mongodb.MongoCompressor import com.mongodb.MongoCredential.createCredential import com.mongodb.event.{CommandListener, ConnectionPoolListener} -import dev.mongocamp.driver.mongodb.database.MongoConfig._ import com.typesafe.config.{Config, ConfigFactory} +import dev.mongocamp.driver.mongodb.database.MongoConfig.{CompressionSnappy, CompressionZlib, CompressionZstd, DefaultApplicationName, DefaultAuthenticationDatabaseName, DefaultHost, DefaultPort} import org.mongodb.scala.connection._ import org.mongodb.scala.{MongoClientSettings, MongoCredential, ServerAddress} @@ -21,10 +21,11 @@ case class MongoConfig( password: Option[String] = None, authDatabase: String = DefaultAuthenticationDatabaseName, poolOptions: MongoPoolOptions = MongoPoolOptions(), - compressors: List[String] = List(), - connectionPoolListener: List[ConnectionPoolListener] = List(), - commandListener: List[CommandListener] = List(), - customClientSettings: Option[MongoClientSettings] = None + compressors: List[String] = List.empty, + connectionPoolListener: List[ConnectionPoolListener] = List.empty, + commandListener: List[CommandListener] = List.empty, + customClientSettings: Option[MongoClientSettings] = None, + serverAddressList: List[ServerAddress] = List.empty ) { val clientSettings: MongoClientSettings = { @@ -32,7 +33,8 @@ case class MongoConfig( customClientSettings.get } else { - val clusterSettings: ClusterSettings = ClusterSettings.builder().hosts(List(new ServerAddress(host, port)).asJava).build() + val internalServerAddressList = if (serverAddressList.nonEmpty) serverAddressList else List(new ServerAddress(host, port)) + val clusterSettings: ClusterSettings = ClusterSettings.builder().hosts(internalServerAddressList.asJava).build() val connectionPoolSettingsBuilder = ConnectionPoolSettings .builder() @@ -47,13 +49,13 @@ case class MongoConfig( val compressorList = new ArrayBuffer[MongoCompressor]() compressors.foreach(compression => { - if (ComressionSnappy.equalsIgnoreCase(compression)) { + if (CompressionSnappy.equalsIgnoreCase(compression)) { compressorList.+=(MongoCompressor.createSnappyCompressor()) } - else if (ComressionZlib.equalsIgnoreCase(compression)) { + else if (CompressionZlib.equalsIgnoreCase(compression)) { compressorList.+=(MongoCompressor.createZlibCompressor()) } - else if (ComressionZstd.equalsIgnoreCase(compression)) { + else if (CompressionZstd.equalsIgnoreCase(compression)) { compressorList.+=(MongoCompressor.createZstdCompressor()) } }) @@ -78,61 +80,21 @@ case class MongoConfig( } } -trait ConfigHelper { - val conf: Config = ConfigFactory.load() - - def stringConfig(configPath: String, key: String, default: String = ""): Option[String] = { - if (conf.hasPath("%s.%s".format(configPath, key))) { - val str = conf.getString("%s.%s".format(configPath, key)) - if (str.nonEmpty) { - Some(str) - } - else { - None - } - } - else if (default.nonEmpty) { - Some(default) - } - else { - None - } - } - - def intConfig(configPath: String, key: String, default: Int = 0): Int = { - if (conf.hasPath("%s.%s".format(configPath, key))) { - conf.getInt("%s.%s".format(configPath, key)) - } - else { - default - } - } - - def booleanConfig(configPath: String, key: String, default: Boolean = false): Boolean = { - if (conf.hasPath("%s.%s".format(configPath, key))) { - conf.getBoolean("%s.%s".format(configPath, key)) - } - else { - default - } - } -} - object MongoConfig extends ConfigHelper { - val DefaultHost = "127.0.0.1" - val DefaultPort = 27017 + val DefaultHost = "127.0.0.1" + val DefaultPort = 27017 val DefaultAuthenticationDatabaseName = "admin" - val DefaultApplicationName = "mongocampdb-app" + val DefaultApplicationName = "mongocampdb-app" - val DefaultPoolMaxConnectionIdleTime = 60 - val DefaultPoolMaxSize = 50 - val DefaultPoolMinSize = 0 - val DefaultPoolMaxWaitQueueSize = 500 + val DefaultPoolMaxConnectionIdleTime = 60 + val DefaultPoolMaxSize = 50 + val DefaultPoolMinSize = 0 + val DefaultPoolMaxWaitQueueSize = 500 val DefaultPoolMaintenanceInitialDelay = 0 - val ComressionSnappy = "snappy" - val ComressionZlib = "zlib" - val ComressionZstd = "zstd" + val CompressionSnappy = "snappy" + val CompressionZlib = "zlib" + val CompressionZstd = "zstd" val DefaultConfigPathPrefix = "mongodb" @@ -158,11 +120,11 @@ object MongoConfig extends ConfigHelper { } } - val host = stringConfig(configPath, "host", DefaultHost).get - val database = stringConfig(configPath, "database").get - val userName = stringConfig(configPath, "userName") - val password = stringConfig(configPath, "password") - val authDatabase = stringConfig(configPath, "authDatabase", DefaultAuthenticationDatabaseName).get + val host = stringConfig(configPath, "host", DefaultHost).get + val database = stringConfig(configPath, "database").get + val userName = stringConfig(configPath, "userName") + val password = stringConfig(configPath, "password") + val authDatabase = stringConfig(configPath, "authDatabase", DefaultAuthenticationDatabaseName).get val applicationName = stringConfig(configPath, "applicationName", DefaultApplicationName).get val poolOptions = MongoPoolOptions( @@ -175,4 +137,5 @@ object MongoConfig extends ConfigHelper { MongoConfig(database, host, port, applicationName, userName, password, authDatabase, poolOptions, compressors) } + } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala index dbeee833..569a3c8b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala @@ -1,4 +1,5 @@ package dev.mongocamp.driver.mongodb.pagination + import dev.mongocamp.driver.mongodb.database.ConfigHelper trait MongoPagination[A <: Any] extends ConfigHelper { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/server/H2BackendConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/server/H2BackendConfig.scala new file mode 100644 index 00000000..a9c5a448 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/server/H2BackendConfig.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb.server + +case class H2BackendConfig(inMemory: Boolean = true, path: Option[String] = None) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerBackend.scala b/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerBackend.scala new file mode 100644 index 00000000..38652380 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerBackend.scala @@ -0,0 +1,6 @@ +package dev.mongocamp.driver.mongodb.server + +object ServerBackend extends Enumeration { + type ServerBackend = Value + val Memory, H2 = Value +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerConfig.scala index 9181afd9..387a7e3c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerConfig.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/server/ServerConfig.scala @@ -11,13 +11,6 @@ case class ServerConfig( h2BackendConfig: Option[H2BackendConfig] = None ) -case class H2BackendConfig(inMemory: Boolean = true, path: Option[String] = None) - -object ServerBackend extends Enumeration { - type ServerBackend = Value - val Memory, H2 = Value -} - object ServerConfig extends ConfigHelper { val DefaultServerConfigPathPrefix = "local.mongodb.server" @@ -26,10 +19,12 @@ object ServerConfig extends ConfigHelper { val DefaultPort = 28018 def serverBackendFromString(backendName: String): ServerBackend.Value = - if (ServerBackend.H2.toString.toLowerCase.equals(backendName.toLowerCase)) + if (ServerBackend.H2.toString.toLowerCase.equals(backendName.toLowerCase)) { ServerBackend.H2 - else + } + else { ServerBackend.Memory + } def fromPath(configPath: String = DefaultServerConfigPathPrefix): ServerConfig = { @@ -47,11 +42,12 @@ object ServerConfig extends ConfigHelper { val path = stringConfig(configPath, "h2.path") Some(H2BackendConfig(inMemory, path)) } - else + else { None + } } ServerConfig(name, host, port, serverBackend, h2BackendConfig) - } + } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncException.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncException.scala new file mode 100644 index 00000000..937bc7ab --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncException.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb.sync + +case class MongoSyncException(message: String) extends Exception(message) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala index 0f5e34b8..0873cdaf 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala @@ -2,7 +2,7 @@ package dev.mongocamp.driver.mongodb.sync import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.database.{ ConfigHelper, DatabaseProvider } +import dev.mongocamp.driver.mongodb.database.{ConfigHelper, DatabaseProvider} import dev.mongocamp.driver.mongodb.sync.SyncDirection.SyncDirection import dev.mongocamp.driver.mongodb.sync.SyncStrategy.SyncStrategy import org.mongodb.scala.Document @@ -12,18 +12,6 @@ import org.mongodb.scala.model.Updates._ import java.util.Date -object SyncStrategy extends Enumeration { - type SyncStrategy = Value - val SyncAll = Value -} - -object SyncDirection extends Enumeration { - type SyncDirection = Value - val SourceToTarget, TargetToSource, TwoWay = Value -} - -case class MongoSyncException(message: String) extends Exception(message) - case class MongoSyncOperation( collectionName: String, syncDirection: SyncDirection = SyncDirection.SourceToTarget, @@ -35,10 +23,8 @@ case class MongoSyncOperation( def excecute(source: DatabaseProvider, target: DatabaseProvider): List[MongoSyncResult] = try { - val sourceInfos: Seq[Document] = - source.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) - val targetInfos: Seq[Document] = - target.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) + val sourceInfos: Seq[Document] = source.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) + val targetInfos: Seq[Document] = target.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) if (SyncDirection.SourceToTarget == syncDirection) { val diff = sourceInfos.diff(targetInfos) @@ -48,13 +34,15 @@ case class MongoSyncOperation( val diff = targetInfos.diff(sourceInfos) List(syncInternal(target, source, sourceInfos.size, diff)) } - else if (SyncDirection.TwoWay == syncDirection) + else if (SyncDirection.TwoWay == syncDirection) { List( syncInternal(source, target, targetInfos.size, sourceInfos.diff(targetInfos)), syncInternal(target, source, sourceInfos.size, targetInfos.diff(sourceInfos)) ) - else + } + else { List(MongoSyncResult(collectionName)) + } } catch { case e: Exception => @@ -87,7 +75,7 @@ case class MongoSyncOperation( MongoSyncResult( collectionName, syncDate, - true, + acknowleged = true, filteredDocumentsToSync.size, countBefore, countAfter, @@ -100,29 +88,9 @@ object MongoSyncOperation extends ConfigHelper { val MaxWaitDefault = 600 val MaxWait: Int = intConfig(configPath = "dev.mongocamp.mongodb.sync", key = "maxWait", default = MaxWaitDefault) - val SyncColumnLastSync: String = - stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastSync", default = "_lastSync").get - val SyncColumnLastUpdate: String = - stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastUpdate", default = "_lastUpdate").get + val SyncColumnLastSync: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastSync", default = "_lastSync").get + val SyncColumnLastUpdate: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastUpdate", default = "_lastUpdate").get - val WriteSyncLogOnMaster = booleanConfig(configPath = "dev.mongocamp.mongodb.sync", key = "writeSyncLogOnMaster") - val SyncLogTableName: String = - stringConfig( - configPath = "dev.mongocamp.mongodb.sync", - key = "syncLogTableName", - default = "mongodb-sync-log" - ).get + val WriteSyncLogOnMaster = booleanConfig(configPath = "dev.mongocamp.mongodb.sync", key = "writeSyncLogOnMaster") + val SyncLogTableName: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncLogTableName", default = "mongodb-sync-log").get } - -//case class MongoSyncInfo(id: Any = new ObjectId(), syncDate: Date = new Date(), updateDate: Date = new Date()) - -case class MongoSyncResult( - collectionName: String, - syncDate: Date = new Date(), - acknowleged: Boolean = false, - synced: Int = -1, - countBefore: Int = -1, - countAfter: Int = -1, - syncTime: Long = -1, - exception: Option[Exception] = None -) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncResult.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncResult.scala new file mode 100644 index 00000000..cb6ec253 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncResult.scala @@ -0,0 +1,14 @@ +package dev.mongocamp.driver.mongodb.sync + +import java.util.Date + +case class MongoSyncResult( + collectionName: String, + syncDate: Date = new Date(), + acknowleged: Boolean = false, + synced: Int = -1, + countBefore: Int = -1, + countAfter: Int = -1, + syncTime: Long = -1, + exception: Option[Exception] = None +) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncDirection.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncDirection.scala new file mode 100644 index 00000000..391891b5 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncDirection.scala @@ -0,0 +1,6 @@ +package dev.mongocamp.driver.mongodb.sync + +object SyncDirection extends Enumeration { + type SyncDirection = Value + val SourceToTarget, TargetToSource, TwoWay = Value +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncStrategy.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncStrategy.scala new file mode 100644 index 00000000..b44f5aba --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/SyncStrategy.scala @@ -0,0 +1,6 @@ +package dev.mongocamp.driver.mongodb.sync + +object SyncStrategy extends Enumeration { + type SyncStrategy = Value + val SyncAll = Value +} From 0408c7e2cd9d581775697f7bc30cf1856c2d938a Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Fri, 26 Apr 2024 23:15:51 +0200 Subject: [PATCH 10/22] feat: implemented jdbc driver methods --- .../driver/mongodb/jdbc/MongoJdbcDriver.scala | 49 +++++++++---------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala index 4150c8d4..5b48b83d 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala @@ -1,46 +1,45 @@ package dev.mongocamp.driver.mongodb.jdbc -import com.mongodb.MongoCredential.createCredential import com.vdurmont.semver4j.Semver import dev.mongocamp.driver.mongodb.BuildInfo -import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY -import org.mongodb.scala.{ConnectionString, MongoClient, MongoClientSettings, MongoCredential} +import dev.mongocamp.driver.mongodb.database.{ DatabaseProvider, MongoConfig } +import org.mongodb.scala.{ ConnectionString, ServerAddress } -import java.sql.{Connection, DriverPropertyInfo} +import java.sql.{ Connection, DriverPropertyInfo } import java.util.Properties import java.util.logging.Logger +import scala.jdk.CollectionConverters.CollectionHasAsScala class MongoJdbcDriver extends java.sql.Driver { private lazy val semVer = new Semver(BuildInfo.version) - - /** - * Connect to the database using a URL like : - * jdbc:mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] - * The URL excepting the jdbc: prefix is passed as it is to the MongoDb native Java driver. - */ + /** Connect to the database using a URL like : jdbc:mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] The + * URL excepting the jdbc: prefix is passed as it is to the MongoDb native Java driver. + */ override def connect(url: String, info: Properties): Connection = { if (url == null || !acceptsURL(url)) { return null } val connectionUrl = url.replaceFirst("^jdbc:", "") - val username = info.getProperty("user") - val password = info.getProperty("password") - - val builder = MongoClientSettings - .builder() - .applyConnectionString(new ConnectionString(connectionUrl)) - .codecRegistry(DEFAULT_CODEC_REGISTRY) - - if (!username.equalsIgnoreCase("") && !password.equalsIgnoreCase("")) { - val credential: MongoCredential = createCredential(username, "admin", password.toCharArray) - builder.credential(credential).build() - } - - val client: MongoClient = MongoClient(builder.build()) - new MongoJdbcConnection(client) + val username = Option(info.getProperty("user")).filter(_.trim.nonEmpty) + val password = Option(info.getProperty("password")).filter(_.trim.nonEmpty) + + val string = new ConnectionString(connectionUrl) + val provider = DatabaseProvider( + MongoConfig( + string.getDatabase, + MongoConfig.DefaultHost, + MongoConfig.DefaultPort, + string.getApplicationName, + username, + password, + string.getDatabase, + serverAddressList = string.getHosts.asScala.toList.map(h => new ServerAddress(h)) + ) + ) + new MongoJdbcConnection(provider) } override def acceptsURL(url: String): Boolean = { From 0a7a7de71071c4f6eaeda16fd6528b0fdf578df0 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Fri, 3 May 2024 22:37:53 +0200 Subject: [PATCH 11/22] feat: added show databases and show tables implementation --- .../mongodb/sql/MongoSqlQueryHolder.scala | 32 +++++++++++++++---- .../driver/mongodb/sql/SQLCommandType.scala | 2 +- .../driver/mongodb/sql/OtherSqlSpec.scala | 30 +++++++++++++++++ 3 files changed, 56 insertions(+), 8 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index 86baf52f..1cf7be78 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -4,23 +4,24 @@ import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import SQLCommandType.SQLCommandType import com.mongodb.client.model.DropIndexOptions -import net.sf.jsqlparser.statement.Statement -import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } +import net.sf.jsqlparser.statement.{ShowStatement, Statement, UnsupportedStatement} +import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression} import net.sf.jsqlparser.expression.operators.relational._ -import net.sf.jsqlparser.expression.{ Expression, Parenthesis } -import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } -import net.sf.jsqlparser.schema.{ Column, Table } +import net.sf.jsqlparser.expression.{Expression, Parenthesis} +import net.sf.jsqlparser.parser.{CCJSqlParser, StreamProvider} +import net.sf.jsqlparser.schema.{Column, Table} import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop import net.sf.jsqlparser.statement.insert.Insert -import net.sf.jsqlparser.statement.select.{ AllColumns, FromItem, PlainSelect, Select, SelectItem } +import net.sf.jsqlparser.statement.select.{AllColumns, FromItem, PlainSelect, Select, SelectItem} +import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update import org.bson.conversions.Bson import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending -import org.mongodb.scala.{ Document, Observable } +import org.mongodb.scala.{Document, Observable} import java.util.concurrent.TimeUnit import scala.collection.mutable @@ -84,6 +85,19 @@ class MongoSqlQueryHolder { sqlCommandType = SQLCommandType.Delete sqlTable = truncate.getTable } + else if (classOf[ShowTablesStatement].isAssignableFrom(statement.getClass)) { + sqlCommandType = SQLCommandType.ShowTables + } + else if (classOf[UnsupportedStatement].isAssignableFrom(statement.getClass)) { + val unsupportedStatement = statement.asInstanceOf[UnsupportedStatement] + val isShowDatabases = unsupportedStatement.toString.toLowerCase.contains("show databases") + val isShowSchemas = unsupportedStatement.toString.toLowerCase.contains("show schemas") + if (isShowDatabases | isShowSchemas) { + sqlCommandType = SQLCommandType.ShowDatabases + } else { + throw new IllegalArgumentException("not supported sql command type") + } + } else { throw new IllegalArgumentException("not supported sql command type") } @@ -137,6 +151,10 @@ class MongoSqlQueryHolder { .dropIndexForName(indexName, new DropIndexOptions().maxTime(1, TimeUnit.MINUTES)) .map(_ => org.mongodb.scala.Document("indexName" -> indexName)) + case SQLCommandType.ShowTables => + provider.collections() + case SQLCommandType.ShowDatabases => + provider.databases case SQLCommandType.DropTable => provider.dao(getCollection).drop().map(_ => org.mongodb.scala.Document("wasAcknowledged" -> true)) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala index 09e0b6fe..ba60512b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala @@ -6,6 +6,6 @@ object SQLCommandType extends Enumeration { type SQLCommandType = Value - val Delete, Select, Update, Insert, CreateIndex, DropTable, DropIndex, DropDatabase = Value + val Delete, Select, Update, Insert, CreateIndex, DropTable, DropIndex, DropDatabase, ShowDatabases, ShowTables = Value } \ No newline at end of file diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala index 1ba85e1e..23f3caaf 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala @@ -95,6 +95,36 @@ class OtherSqlSpec extends PersonSpecification with BeforeEach{ indices.find(_.getString("name") == "student_idx") must beNone } + "show tables" in { + val queryConverter = MongoSqlQueryHolder("show tables;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size must be greaterThanOrEqualTo(1) + selectResponse.head.getStringValue("name") mustEqual "mongo-sync-log" + } + + "show databases" in { + val queryConverter = MongoSqlQueryHolder("SHOW DATABASES;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size must be greaterThanOrEqualTo(1) + } + + "show schemas" in { + val queryConverter = MongoSqlQueryHolder("SHOW SCHEMAS;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size must be greaterThanOrEqualTo(1) + } + + "show databases" in { + val queryConverter = MongoSqlQueryHolder("SHOW databases;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size must be greaterThanOrEqualTo(1) + } + + "show schemas" in { + val queryConverter = MongoSqlQueryHolder("show SCHEMAS;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size must be greaterThanOrEqualTo(1) + } } } From cf5aa462ab64554bf96aaf3380451841436c1971 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Sat, 4 May 2024 16:13:11 +0200 Subject: [PATCH 12/22] feat: execute sql with database selection --- .../mongodb/sql/MongoSqlQueryHolder.scala | 26 +++++++++++-------- .../driver/mongodb/sql/SelectSqlSpec.scala | 8 ++++++ 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index 1cf7be78..06c8d892 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -4,24 +4,25 @@ import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import SQLCommandType.SQLCommandType import com.mongodb.client.model.DropIndexOptions -import net.sf.jsqlparser.statement.{ShowStatement, Statement, UnsupportedStatement} -import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression} +import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator +import net.sf.jsqlparser.statement.{ ShowStatement, Statement, UnsupportedStatement } +import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } import net.sf.jsqlparser.expression.operators.relational._ -import net.sf.jsqlparser.expression.{Expression, Parenthesis} -import net.sf.jsqlparser.parser.{CCJSqlParser, StreamProvider} -import net.sf.jsqlparser.schema.{Column, Table} +import net.sf.jsqlparser.expression.{ Expression, Parenthesis } +import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } +import net.sf.jsqlparser.schema.{ Column, Table } import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop import net.sf.jsqlparser.statement.insert.Insert -import net.sf.jsqlparser.statement.select.{AllColumns, FromItem, PlainSelect, Select, SelectItem} +import net.sf.jsqlparser.statement.select.{ AllColumns, FromItem, PlainSelect, Select, SelectItem } import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update import org.bson.conversions.Bson import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending -import org.mongodb.scala.{Document, Observable} +import org.mongodb.scala.{ Document, Observable } import java.util.concurrent.TimeUnit import scala.collection.mutable @@ -90,11 +91,12 @@ class MongoSqlQueryHolder { } else if (classOf[UnsupportedStatement].isAssignableFrom(statement.getClass)) { val unsupportedStatement = statement.asInstanceOf[UnsupportedStatement] - val isShowDatabases = unsupportedStatement.toString.toLowerCase.contains("show databases") - val isShowSchemas = unsupportedStatement.toString.toLowerCase.contains("show schemas") + val isShowDatabases = unsupportedStatement.toString.toLowerCase.contains("show databases") + val isShowSchemas = unsupportedStatement.toString.toLowerCase.contains("show schemas") if (isShowDatabases | isShowSchemas) { sqlCommandType = SQLCommandType.ShowDatabases - } else { + } + else { throw new IllegalArgumentException("not supported sql command type") } } @@ -104,7 +106,9 @@ class MongoSqlQueryHolder { "" } - def getCollection: String = sqlTable.getFullyQualifiedName + def getCollection: String = { + sqlTable.getFullyQualifiedName.replace(".", CollectionSeparator).replace("'", "").replace("`", "") + } def run(provider: DatabaseProvider, allowDiskUsage: Boolean = true): Observable[Document] = { sqlCommandType match { diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala index 1d0b3375..4f750e66 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala @@ -15,6 +15,14 @@ class SelectSqlSpec extends PersonSpecification { selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" } + "simple sql with schema" in { + val queryConverter = MongoSqlQueryHolder("select * from `mongocamp-unit-test`.`friend`") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1327 + selectResponse.head.getString("name") mustEqual "Castaneda Mccullough" + selectResponse.head.getLong("id") mustEqual 33 + } + "sql with in query" in { val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age in (30, 18, 25, 22) order by id asc") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() From 1df9e976dc027a9a11532c750264098b133d1ba5 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 10 Jun 2024 22:23:28 +0200 Subject: [PATCH 13/22] style: reformat code with scalafmt --- .../mongodb/operation/CrudObserver.scala | 6 ---- .../mongodb/operation/SimpleObserver.scala | 12 ++++++++ .../relation/OneToManyRelationship.scala | 15 ++++++++++ .../relation/OneToOneRelationship.scala | 15 ++++++++++ .../mongodb/relation/Relationship.scala | 22 -------------- .../mongodb/sql/MongoSqlQueryHolder.scala | 29 ++++++++++++------- 6 files changed, 60 insertions(+), 39 deletions(-) create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/operation/SimpleObserver.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala index 21def035..873a4f27 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala @@ -30,10 +30,4 @@ trait CrudObserver[A] extends Crud[A] { } -class SimpleObserver[T] extends Observer[T] with LazyLogging { - override def onError(e: Throwable): Unit = logger.error(e.getMessage, e) - override def onComplete(): Unit = {} - - override def onNext(result: T): Unit = {} -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/SimpleObserver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/SimpleObserver.scala new file mode 100644 index 00000000..cdb747d6 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/SimpleObserver.scala @@ -0,0 +1,12 @@ +package dev.mongocamp.driver.mongodb.operation + +import com.typesafe.scalalogging.LazyLogging +import org.mongodb.scala.Observer + +class SimpleObserver[T] extends Observer[T] with LazyLogging { + override def onError(e: Throwable): Unit = logger.error(e.getMessage, e) + + override def onComplete(): Unit = {} + + override def onNext(result: T): Unit = {} +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala new file mode 100644 index 00000000..10aaad15 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala @@ -0,0 +1,15 @@ +package dev.mongocamp.driver.mongodb.relation + +import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } + +case class OneToManyRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { + + def relatedRecords(value: Any): List[A] = { + val key = "%s_%s".format(id, value) + if (!useCache || !hasCachedValue(key)) + addCachedValue(key, dao.find(daoKey, value).resultList()) + getCachedValue[List[A]](key) + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala new file mode 100644 index 00000000..1b98b03a --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala @@ -0,0 +1,15 @@ +package dev.mongocamp.driver.mongodb.relation + +import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } + +case class OneToOneRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { + + def relatedRecord(value: Any): Option[A] = { + val key = "%s_%s".format(id, value) + if (!useCache || !hasCachedValue(key)) + addCachedValue(key, dao.find(daoKey, value).resultOption()) + getCachedValue[Option[A]](key) + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/Relationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/Relationship.scala index e51b14ea..33243c94 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/relation/Relationship.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/Relationship.scala @@ -11,25 +11,3 @@ abstract class Relationship { removeCachedValue(key) } } - -case class OneToOneRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { - - def relatedRecord(value: Any): Option[A] = { - val key = "%s_%s".format(id, value) - if (!useCache || !hasCachedValue(key)) - addCachedValue(key, dao.find(daoKey, value).resultOption()) - getCachedValue[Option[A]](key) - } - -} - -case class OneToManyRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { - - def relatedRecords(value: Any): List[A] = { - val key = "%s_%s".format(id, value) - if (!useCache || !hasCachedValue(key)) - addCachedValue(key, dao.find(daoKey, value).resultList()) - getCachedValue[List[A]](key) - } - -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index 06c8d892..d993461e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -1,21 +1,21 @@ package dev.mongocamp.driver.mongodb.sql +import com.mongodb.client.model.DropIndexOptions import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import SQLCommandType.SQLCommandType -import com.mongodb.client.model.DropIndexOptions import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator -import net.sf.jsqlparser.statement.{ ShowStatement, Statement, UnsupportedStatement } +import dev.mongocamp.driver.mongodb.sql.SQLCommandType.SQLCommandType import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } import net.sf.jsqlparser.expression.operators.relational._ import net.sf.jsqlparser.expression.{ Expression, Parenthesis } import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } -import net.sf.jsqlparser.schema.{ Column, Table } +import net.sf.jsqlparser.schema.Table +import net.sf.jsqlparser.statement.UnsupportedStatement import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop import net.sf.jsqlparser.statement.insert.Insert -import net.sf.jsqlparser.statement.select.{ AllColumns, FromItem, PlainSelect, Select, SelectItem } +import net.sf.jsqlparser.statement.select.{ FromItem, PlainSelect, Select, SelectItem } import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update @@ -24,6 +24,7 @@ import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending import org.mongodb.scala.{ Document, Observable } +import java.sql.SQLException import java.util.concurrent.TimeUnit import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -521,13 +522,19 @@ class MongoSqlQueryHolder { object MongoSqlQueryHolder { def stringToStatement(sql: String, charset: String = "UTF-8") = { - val stream: java.io.InputStream = new java.io.ByteArrayInputStream(sql.getBytes(charset)) - val jSqlParser = new CCJSqlParser(new StreamProvider(stream, charset)) - val statements = jSqlParser.Statements().getStatements.asScala - if (statements.size != 1) { - throw new IllegalArgumentException("only one statement is supported") + try { + val stream: java.io.InputStream = new java.io.ByteArrayInputStream(sql.getBytes(charset)) + val jSqlParser = new CCJSqlParser(new StreamProvider(stream, charset)) + val statements = jSqlParser.Statements().getStatements.asScala + if (statements.size != 1) { + throw new IllegalArgumentException("only one statement is supported") + } + statements.head + } + catch { + case e: net.sf.jsqlparser.parser.ParseException => + throw new SQLException("The given SQL is not parsable.", e) } - statements.head } def apply(statement: net.sf.jsqlparser.statement.Statement): MongoSqlQueryHolder = new MongoSqlQueryHolder(statement) From 8e242155af024145d0cb7c378f051a9149bb336e Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Thu, 13 Jun 2024 07:24:35 +0200 Subject: [PATCH 14/22] feat: detect schema --- build.sbt | 4 +- .../schema/schema_stage11_group.json | 213 +++++++++ .../relation/OneToManyRelationship.scala | 5 +- .../relation/OneToOneRelationship.scala | 5 +- .../driver/mongodb/schema/CirceSchema.scala | 162 +++++++ .../driver/mongodb/schema/JsonConverter.scala | 23 + .../driver/mongodb/schema/JsonSchema.scala | 13 + .../mongodb/schema/JsonSchemaDefinition.scala | 9 + .../mongodb/schema/SchemaAnalysis.scala | 5 + .../mongodb/schema/SchemaAnalysisField.scala | 12 + .../schema/SchemaAnalysisFieldType.scala | 3 + .../mongodb/schema/SchemaExplorer.scala | 421 ++++++++++++++++++ .../driver/mongodb/schema/SchemaSpec.scala | 49 ++ .../driver/mongodb/test/TestDatabase.scala | 2 + 14 files changed, 921 insertions(+), 5 deletions(-) create mode 100644 src/main/resources/schema/schema_stage11_group.json create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchemaDefinition.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysis.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisField.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisFieldType.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala diff --git a/build.sbt b/build.sbt index e85423ed..496aac36 100644 --- a/build.sbt +++ b/build.sbt @@ -73,7 +73,7 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-core", "io.circe" %% "circe-generic", "io.circe" %% "circe-parser" -).map(_ % circeVersion % Test) +).map(_ % circeVersion) libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.0.1" @@ -101,6 +101,8 @@ libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.9" +libraryDependencies += "org.postgresql" % "postgresql" % "42.7.3" + buildInfoPackage := "dev.mongocamp.driver.mongodb" buildInfoOptions += BuildInfoOption.BuildTime diff --git a/src/main/resources/schema/schema_stage11_group.json b/src/main/resources/schema/schema_stage11_group.json new file mode 100644 index 00000000..e095c1ee --- /dev/null +++ b/src/main/resources/schema/schema_stage11_group.json @@ -0,0 +1,213 @@ +{ + "S": { + "$push": { + "$switch": { + "branches": [ + { + "case": { + "$eq": [ + "$_id.sT", + "bS" + ] + }, + "then": { + "c": "$c", + "n": "$_id.n", + "t": "$_id.t" + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "vS" + ] + }, + "then": { + "ve": { + "a": { + "$cond": [ + { + "$eq": [ + "$_id.t", + "bool" + ] + }, + { + "$eq": [ + "$a", + 1 + ] + }, + "$a" + ] + }, + "g": "$g", + "i": { + "$cond": [ + { + "$eq": [ + "$_id.t", + "bool" + ] + }, + { + "$eq": [ + "$i", + 1 + ] + }, + "$i" + ] + } + } + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "lS" + ] + }, + "then": { + "le": { + "al": "$al", + "gl": "$gl", + "il": "$il" + } + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "fS" + ] + }, + "then": { + "cu": "$cu", + "lF": "$lF", + "mF": "$mF" + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "vH" + ] + }, + "then": { + "vH": { + "s": "$s", + "ns": "$ns", + "it": "$it", + "end": { + "$cond": [ + { + "$in": [ + "$_id.t", + [ + "objectId", + "date" + ] + ] + }, + { + "$add": [ + "1970-01-01T01:00:00+01:00", + { + "$multiply": [ + "$end", + 1000 + ] + } + ] + }, + "$end" + ] + }, + "sta": { + "$cond": [ + { + "$in": [ + "$_id.t", + [ + "objectId", + "date" + ] + ] + }, + { + "$add": [ + "1970-01-01T01:00:00+01:00", + { + "$multiply": [ + "$sta", + 1000 + ] + } + ] + }, + "$sta" + ] + }, + "r": "$r" + } + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "lH" + ] + }, + "then": { + "lH": { + "end": "$end", + "it": "$it", + "ns": "$ns", + "r": "$r", + "s": "$s", + "sta": "$sta" + } + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "wH" + ] + }, + "then": { + "wH": { + "it": "$it" + } + } + }, + { + "case": { + "$eq": [ + "$_id.sT", + "hH" + ] + }, + "then": { + "hH": { + "it": "$it" + } + } + } + ], + "default": null + } + } + }, + "_id": { + "n": "$_id.n", + "t": "$_id.t" + } +} \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala index 10aaad15..9b633542 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala @@ -1,14 +1,15 @@ package dev.mongocamp.driver.mongodb.relation -import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } case class OneToManyRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { def relatedRecords(value: Any): List[A] = { val key = "%s_%s".format(id, value) - if (!useCache || !hasCachedValue(key)) + if (!useCache || !hasCachedValue(key)) { addCachedValue(key, dao.find(daoKey, value).resultList()) + } getCachedValue[List[A]](key) } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala index 1b98b03a..126c34c2 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala @@ -1,14 +1,15 @@ package dev.mongocamp.driver.mongodb.relation -import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } case class OneToOneRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { def relatedRecord(value: Any): Option[A] = { val key = "%s_%s".format(id, value) - if (!useCache || !hasCachedValue(key)) + if (!useCache || !hasCachedValue(key)) { addCachedValue(key, dao.find(daoKey, value).resultOption()) + } getCachedValue[Option[A]](key) } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala new file mode 100644 index 00000000..b6fc8f5f --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala @@ -0,0 +1,162 @@ +package dev.mongocamp.driver.mongodb.schema + +import io.circe.Decoder.Result +import io.circe.{ Decoder, Encoder, HCursor, Json } +import org.bson.types.ObjectId +import org.joda.time.DateTime +import org.mongodb.scala.Document + +import java.util.Date + +trait CirceSchema { + + implicit val DateFormat: Encoder[Date] with Decoder[Date] = new Encoder[Date] with Decoder[Date] { + override def apply(a: Date): Json = Encoder.encodeString.apply(a.toInstant.toString) + + override def apply(c: HCursor): Result[Date] = Decoder.decodeString + .map( + s => new DateTime(s).toDate + ) + .apply(c) + } + + implicit val DateTimeFormat: Encoder[DateTime] with Decoder[DateTime] = new Encoder[DateTime] with Decoder[DateTime] { + override def apply(a: DateTime): Json = Encoder.encodeString.apply(a.toInstant.toString) + + override def apply(c: HCursor): Result[DateTime] = Decoder.decodeString + .map( + s => new DateTime(s) + ) + .apply(c) + } + + implicit val ObjectIdFormat: Encoder[ObjectId] with Decoder[ObjectId] = new Encoder[ObjectId] with Decoder[ObjectId] { + override def apply(a: ObjectId): Json = Encoder.encodeString.apply(a.toHexString) + + override def apply(c: HCursor): Result[ObjectId] = Decoder.decodeString + .map( + s => new ObjectId(s) + ) + .apply(c) + } + + implicit val MapStringAnyFormat: Encoder[Map[String, Any]] with Decoder[Map[String, Any]] = new Encoder[Map[String, Any]] with Decoder[Map[String, Any]] { + override def apply(a: Map[String, Any]): Json = encodeMapStringAny(a) + + override def apply(c: HCursor): Result[Map[String, Any]] = Decoder.decodeMap[String, Any].apply(c) + } + + implicit val AnyFormat: Encoder[Any] with Decoder[Any] = new Encoder[Any] with Decoder[Any] { + override def apply(a: Any): Json = encodeAnyToJson(a) + + override def apply(c: HCursor): Result[Any] = { + Decoder.decodeJson + .map( + a => decodeFromJson(a) + ) + .apply(c) + } + } + + def encodeMapStringAny(a: Map[String, Any]): Json = { + Json.obj( + a.keySet + .map( + key => (key, encodeAnyToJson(a(key))) + ) + .toList: _* + ) + } + + def decodeFromJson(json: Json): Any = { + json match { + case a if a.isNumber => + val value = a.asNumber.get + val long = value.toLong + if (long.isDefined) { + long.get + } + else { + value.toDouble + } + case a if a.isString => + val string = a.asString.get + if (string.length == 24 && string.substring(10, 11).equals("T") && string.endsWith("Z")) { + try { + val date = new DateTime(string) + date + } + catch { + case _: Exception => string + } + } + else { + string + } + case a if a.isBoolean => a.asBoolean.getOrElse(false) + case a if a.isArray => + a.asArray.get.toList.map( + e => decodeFromJson(e) + ) + case a if a.isObject => + a.asObject.get.toMap.map( + e => (e._1, decodeFromJson(e._2)) + ) + case a if a.isNull => null + case _ => null + } + } + + def encodeAnyToJson(a: Any, deepth: Int = 0): Json = { + a match { + case s: String => Json.fromString(s) + case b: Boolean => Json.fromBoolean(b) + case l: Long => Json.fromLong(l) + case i: Int => Json.fromInt(i) + case bi: BigInt => Json.fromBigInt(bi) + case bd: BigDecimal => Json.fromBigDecimal(bd) + case d: Double => Json.fromDoubleOrNull(d) + case f: Float => Json.fromFloatOrNull(f) + case d: Date => Encoder.encodeString.apply(d.toInstant.toString) + case d: DateTime => Encoder.encodeString.apply(d.toInstant.toString) + case o: ObjectId => Encoder.encodeString.apply(o.toHexString) + case m: Map[String, _] => encodeMapStringAny(m) + case seq: Seq[_] => + Json.arr( + seq.map( + e => encodeAnyToJson(e, deepth) + ): _* + ) + case set: Set[_] => + Json.arr( + set + .map( + e => encodeAnyToJson(e, deepth) + ) + .toList: _* + ) + case product: Product => + val productElementNames = product.productElementNames.toList + val fieldMap = productElementNames + .map( + key => { + val index = productElementNames.indexOf(key) + (key, product.productElement(index)) + } + ) + .toMap + encodeAnyToJson(fieldMap) + case r: Document => encodeAnyToJson(r.toMap) + case any: Any => + if (deepth < 256) { + encodeAnyToJson(any, deepth + 1) + } + else { + Json.Null + } + case _ => + Json.Null + } + } + +} \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala new file mode 100644 index 00000000..5913f1cd --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala @@ -0,0 +1,23 @@ +package dev.mongocamp.driver.mongodb.schema + +import better.files.Resource +import io.circe.jawn.decode +import io.circe.syntax._ +import io.circe.generic.auto._ +class JsonConverter extends CirceSchema { + + def toJson(s: Any): String = { + s.asJson.noSpaces + } + + def readJsonMap(fileContent: String): Map[String, Any] = { + val decoded = decode[Map[String, Any]](fileContent) + decoded.getOrElse(Map()) + } + + def readJsonMapFromFile(fileName: String): Map[String, Any] = { + val fileContent = Resource.asString(fileName).getOrElse("{}") + readJsonMap(fileContent) + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala new file mode 100644 index 00000000..9e7e651e --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala @@ -0,0 +1,13 @@ +package dev.mongocamp.driver.mongodb.schema + +case class JsonSchema(`$schema`: String, `$ref`: String, definitions: Map[String, JsonSchemaDefinition]) { + def toJson: String = { + new JsonConverter().toJson(this) + } +} + +object JsonSchema { + def apply(objectName: String, definitions: Map[String, JsonSchemaDefinition]): JsonSchema = { + JsonSchema("https://json-schema.org/draft/2020-12/schema", s"#/definitions/$objectName", definitions) + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchemaDefinition.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchemaDefinition.scala new file mode 100644 index 00000000..a83eb4f6 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchemaDefinition.scala @@ -0,0 +1,9 @@ +package dev.mongocamp.driver.mongodb.schema + +case class JsonSchemaDefinition( + `type`: String, + title: String, + additionalProperties: Boolean, + required: List[String], + properties: Map[String, Map[String, Any]] +) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysis.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysis.scala new file mode 100644 index 00000000..3fd37ea7 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysis.scala @@ -0,0 +1,5 @@ +package dev.mongocamp.driver.mongodb.schema + +import scala.collection.mutable.ArrayBuffer + +case class SchemaAnalysis(count: Long, sample: Long, percentageOfAnalysed: Double, fields: ArrayBuffer[SchemaAnalysisField]) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisField.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisField.scala new file mode 100644 index 00000000..c9013998 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisField.scala @@ -0,0 +1,12 @@ +package dev.mongocamp.driver.mongodb.schema + +import scala.collection.mutable.ArrayBuffer + +case class SchemaAnalysisField ( + name: String, + fullName: String, + fieldTypes: List[SchemaAnalysisFieldType], + count: Long, + percentageOfParent: Double, + subFields: ArrayBuffer[SchemaAnalysisField] + ) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisFieldType.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisFieldType.scala new file mode 100644 index 00000000..da0f2185 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaAnalysisFieldType.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb.schema + +case class SchemaAnalysisFieldType(fieldType: String, count: Long, percentageOfParent: Double) \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala new file mode 100644 index 00000000..e94eaf49 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala @@ -0,0 +1,421 @@ +package dev.mongocamp.driver.mongodb.schema + +import better.files.Resource +import dev.mongocamp.driver.mongodb._ +import org.bson.conversions.Bson +import org.mongodb.scala.Document + +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.concurrent.duration.DurationInt +import io.circe.parser.decode + +class SchemaExplorer { + private val NameSeparator: String = "." + private val FieldSplitter: String = "_/_" + private val ArrayItemMark: String = "[]" + private val KeyFieldType = "$$t" + private val ObjectName = "xl" + private val ArrayName = "xa" + private val ArrayElementText = "[array element]" + + private case class PipelineStage(stage: String, value: Any) + + private def schemaAggregation(deepth: Int, sampleSize: Option[Int]): List[PipelineStage] = { + val buffer = ArrayBuffer[PipelineStage]() + buffer.addAll( + sampleSize.map(size => PipelineStage("sample", Map("size" -> size))) + ) + + buffer.addOne(PipelineStage("project", Map("_" -> processObject(deepth, 0, "$$ROOT", List()), "_id" -> 0))) + + (0 to deepth).foreach(_ => { + buffer.addOne(PipelineStage("unwind", Map("path" -> "$_", "preserveNullAndEmptyArrays" -> true))) + buffer.addOne(PipelineStage("replaceRoot", Map("newRoot" -> Map("$cond" -> List(Map("$eq" -> List("$_", null)), "$$ROOT", "$_"))))) + }) + + buffer.addAll( + List( + PipelineStage("project", Map("_" -> 0)), + PipelineStage("project", Map("l" -> "$$REMOVE", "n" -> 1, "t" -> 1, "v" -> "$$REMOVE")), + PipelineStage("group", Map("_id" -> Map("n" -> "$n", "t" -> "$t"), "c" -> Map("$sum" -> 1))), + PipelineStage("facet", Map("bS" -> List(Map("$project" -> Map("_id" -> Map("n" -> "$_id.n", "sT" -> "bS", "t" -> "$_id.t"), "c" -> 1))))), + PipelineStage("project", Map("data" -> Map("$concatArrays" -> List("$bS")))), + PipelineStage("unwind", "$data"), + PipelineStage("replaceRoot", Map("newRoot" -> "$data")), + PipelineStage("group", new JsonConverter().readJsonMapFromFile("schema/schema_stage11_group.json")), + PipelineStage("replaceRoot", Map("newRoot" -> Map("$mergeObjects" -> "$S"))), + PipelineStage("sort", Map("t" -> 1)), + PipelineStage("group", Map("T" -> Map("$push" -> "$$ROOT"), "_id" -> Map("n" -> "$n"), "c" -> Map("$sum" -> "$c"))), + PipelineStage("project", Map("T" -> 1, "_id" -> 0, "c" -> 1, "n" -> "$_id.n")), + PipelineStage("sort", Map("n" -> 1)) + ) + ) + buffer.toList + } + + private case class AggregationField(name: String, value: String, level: Int) + + private def createBranch(`case`: Bson, `then`: Bson): Bson = Map("case" -> `case`, "then" -> `then`) + + private def createLet(in: Bson, vars: Bson): Bson = Map("$let" -> Map("in" -> in, "vars" -> vars)) + + private def fieldValue(fieldName: String, fieldLevel: Int) = { + Map("_" -> null, "e" -> fieldLevel, "n" -> generateFieldName(fieldName), "t" -> KeyFieldType) + } + + private def generateFieldName(fieldName: String): Any = { + val field = fieldName + .replace("$$", "") + .replace(ObjectName, FieldSplitter ++ ObjectName) + .replace(ArrayName, FieldSplitter + ArrayName) + .replace(ArrayItemMark, FieldSplitter + ArrayItemMark) + val fields = field + .split(FieldSplitter) + .filterNot(s => s == null || s.isEmpty || s.isBlank) + val responseArray: ArrayBuffer[String] = ArrayBuffer() + fields.toList + .map(string => string.replace(ObjectName, "$$" ++ ObjectName)) + .foreach(string => { + var fieldName = string + if (fieldName.startsWith(NameSeparator)) { + responseArray.addOne(NameSeparator) + fieldName = fieldName.substring(1) + } + val hasEndingSeperator: Boolean = if (fieldName.endsWith(NameSeparator)) { + fieldName = fieldName.substring(0, fieldName.length - 1) + true + } + else { + false + } + responseArray.addOne(fieldName) + if (hasEndingSeperator) { + responseArray.addOne(NameSeparator) + } + }) + + if (responseArray.size == 1) { + var result = responseArray.head + if (!result.startsWith("$$")) { + result = "$$%s".format(result) + } + result + } + else { + Map("$concat" -> responseArray.toList) + } + } + + private def processField(maxLevel: Int, field: AggregationField, parents: List[String]): Bson = { + val newParents = addToParents(parents, field.name) + val fullName: String = if (parents.isEmpty) field.name else newParents.mkString + val stringBranch = createBranch(Map("$eq" -> List(KeyFieldType, "string")), fieldValue(fullName, field.level)) + val arrayBranch = createBranch(Map("$eq" -> List(KeyFieldType, "array")), processArrayField(maxLevel, fullName, field, parents)) + val objectBranch = createBranch(Map("$eq" -> List(KeyFieldType, "object")), processObjectField(maxLevel, fullName, field, parents)) + Map( + "$switch" -> Map( + "branches" -> List(stringBranch, arrayBranch, objectBranch), + "default" -> fieldValue(fullName, field.level) + ) + ) + } + + private def processArrayField(maxLevel: Int, fullName: String, field: AggregationField, parents: List[String]): Bson = { + val nestedObject = if (field.level >= maxLevel) { + null + } + else { + Map("$concatArrays" -> List(List(null), processArray(maxLevel, field, parents))) + } + Map("_" -> nestedObject, "n" -> generateFieldName(fullName), "t" -> KeyFieldType, "e" -> field.level) + } + + private def processArray(maxLevel: Int, field: AggregationField, parents: List[String]): Bson = { + val level = field.level + val itemVar = s"$ArrayName$level" + val item = AggregationField(ArrayItemMark, itemVar, level + 1) + Map( + "$map" -> Map( + "as" -> itemVar, + "in" -> createLet(processField(maxLevel, item, addToParents(parents, field.name)), createTypeField(item)), + "input" -> generateFieldName(field.value) + ) + ) + } + + private def addToParents(list: List[String], newElement: String): List[String] = { + if (!list.contains(newElement) || newElement.equalsIgnoreCase(ArrayItemMark)) { + if (list.isEmpty) { + List(newElement) + } + else { + list ++ List(NameSeparator, newElement) + } + } + else { + list + } + } + + private def processObjectField(maxLevel: Int, fullName: String, field: AggregationField, parents: List[String]): Bson = { + val nestedObject = if (field.level >= maxLevel) { + null + } + else { + Map("$concatArrays" -> List(List(null), processObject(maxLevel, field.level + 1, field.value, addToParents(parents, field.name)))) + } + Map( + "_" -> nestedObject, + "n" -> generateFieldName(fullName), + "t" -> KeyFieldType, + "e" -> field.level + ) + } + + private def processObject(maxLevel: Int, level: Int, objectName: String, parents: List[String]): Bson = { + val itemVar = s"$ObjectName$level" + val field = AggregationField(s"$itemVar.k", s"$itemVar.v", level) + val objectNameFunction = if (objectName.startsWith("$$")) objectName else "$$" + objectName + Map( + "$map" -> Map( + "as" -> itemVar, + "in" -> createLet(processField(maxLevel, field, parents), createTypeField(field)), + "input" -> Map("$objectToArray" -> objectNameFunction) + ) + ) + } + + private def createTypeField(field: AggregationField): Map[String, Any] = { + val fieldValue = if (field.value.startsWith("$$")) field.value else "$$" + field.value + Map("t" -> Map("$type" -> fieldValue)) + } + + private val emptyField = SchemaAnalysisField("ROOT", "", List(), -1, -1, ArrayBuffer()) + + private def fieldsToJsonSchemaDefinition(map: mutable.Map[String, JsonSchemaDefinition], objectName: String, fields: List[SchemaAnalysisField]): Unit = { + map.put(objectName, null) + val requiredFields = ArrayBuffer[String]() + val properties = mutable.Map[String, Map[String, Any]]() + fields.distinct.foreach(field => { + val fieldMap = mutable.Map[String, Any]() + val fieldObjectName = getObjectName(camelCaseObjectName(field.name), map) + if (field.fieldTypes.exists(_.fieldType.equalsIgnoreCase("object"))) { + fieldsToJsonSchemaDefinition(map, fieldObjectName, field.subFields.toList) + } + if (field.percentageOfParent == 1.0) { + requiredFields.addOne(field.name) + } + if (field.fieldTypes.size == 1) { + val t = field.fieldTypes.head + val convertedFieldType = convertFieldType(t.fieldType) + fieldMap.put("type", convertedFieldType.name) + convertedFieldType.pattern.foreach(value => fieldMap.put("pattern", value)) + convertedFieldType.format.foreach(value => fieldMap.put("format", value)) + val mapping: Map[String, Any] = if (t.fieldType.equalsIgnoreCase("array")) { + val items = { + val subField = field.subFields.head + if (subField.fieldTypes.size == 1) { + if (subField.fieldTypes.head.fieldType.equalsIgnoreCase("object")) { + fieldsToJsonSchemaDefinition(map, fieldObjectName, subField.subFields.toList) + Map("$ref" -> s"#/definitions/$fieldObjectName") + } + else { + val convertedFieldType = convertFieldType(subField.fieldTypes.head.fieldType) + val mutableMap = mutable.Map[String, Any]() + mutableMap.put("type", convertedFieldType.name) + convertedFieldType.pattern.foreach(value => mutableMap.put("pattern", value)) + convertedFieldType.format.foreach(value => mutableMap.put("format", value)) + mutableMap.toMap + } + } + else { + Map("oneOf" -> getOneOfMapping(field, fieldObjectName, map)) + } + } + Map("type" -> "array", "items" -> items) + } + else if (t.fieldType.equalsIgnoreCase("object")) { + fieldsToJsonSchemaDefinition(map, fieldObjectName, field.subFields.toList) + Map("$ref" -> s"#/definitions/$fieldObjectName") + } + else { + val convertedFieldType = convertFieldType(t.fieldType) + val mutableMap = mutable.Map[String, Any]() + mutableMap.put("type", convertedFieldType.name) + convertedFieldType.pattern.foreach(value => mutableMap.put("pattern", value)) + convertedFieldType.format.foreach(value => mutableMap.put("format", value)) + mutableMap.toMap + } + mapping.foreach(element => fieldMap.put(element._1, element._2)) + } + else { + fieldMap.put("oneOf", getOneOfMapping(field, fieldObjectName, map)) + } + properties.put(field.name, fieldMap.toMap) + }) + val jsonSchemaDefinition = JsonSchemaDefinition("object", objectName, additionalProperties = false, requiredFields.toList, properties.toMap) + map.put(objectName, jsonSchemaDefinition) + } + + private case class JsonSchemaFieldType(name: String, pattern: Option[String] = None, format: Option[String] = None) + + private def convertFieldType(fieldType: String): JsonSchemaFieldType = { + val numberTypes = List("double", "float") + val fullNumberTypes = List("int", "long") + if (fieldType.equalsIgnoreCase("objectId")) { + JsonSchemaFieldType("string", Some("^([a-fA-F0-9]{2})+$")) + } + else if (fieldType.equalsIgnoreCase("bool")) { + JsonSchemaFieldType("boolean") + } + else if (fieldType.equalsIgnoreCase("date")) { + JsonSchemaFieldType("string", None, Some("date-time")) + } + else if (fullNumberTypes.exists(_.equalsIgnoreCase(fieldType))) { + JsonSchemaFieldType("integer") + } + else if (numberTypes.exists(_.equalsIgnoreCase(fieldType))) { + JsonSchemaFieldType("number") + } + else { + JsonSchemaFieldType(fieldType) + } + } + + private def getOneOfMapping(field: SchemaAnalysisField, fieldObjectName: String, map: mutable.Map[String, JsonSchemaDefinition]) = { + field.fieldTypes + .map(t => { + if (t.fieldType.equalsIgnoreCase("array")) { + if (t.fieldType.equalsIgnoreCase("object")) { + fieldsToJsonSchemaDefinition(map, fieldObjectName, field.subFields.toList) + Map("type" -> "array", "items" -> Map("$ref" -> s"#/definitions/$fieldObjectName")) + } + else { + val arrayItemType = field.subFields.find(_.name.equalsIgnoreCase(ArrayElementText)).map(_.fieldTypes.head.fieldType).getOrElse("Error") + val convertedFieldType = convertFieldType(arrayItemType) + val mutableMap = mutable.Map[String, Any]() + mutableMap.put("type", convertedFieldType.name) + convertedFieldType.pattern.foreach(value => mutableMap.put("pattern", value)) + convertedFieldType.format.foreach(value => mutableMap.put("format", value)) + mutableMap.toMap + } + } + else if (t.fieldType.equalsIgnoreCase("object")) { + fieldsToJsonSchemaDefinition(map, fieldObjectName, field.subFields.toList) + Map("$ref" -> s"#/definitions/$fieldObjectName") + } + else { + val convertedFieldType = convertFieldType(t.fieldType) + val mutableMap = mutable.Map[String, Any]() + mutableMap.put("type", convertedFieldType.name) + convertedFieldType.pattern.foreach(value => mutableMap.put("pattern", value)) + convertedFieldType.format.foreach(value => mutableMap.put("format", value)) + mutableMap.toMap + } + }) + .distinct + } + + private def getObjectName(objectName: String, map: mutable.Map[String, JsonSchemaDefinition]): String = { + val name = if (map.exists(_._1.equals(objectName))) { + val nameCounterString = objectName.split("_").last + val count = + try nameCounterString.toLong + catch { + case nF: NumberFormatException => 0 + } + val nameCounter: Long = if (count.toString.equalsIgnoreCase(nameCounterString)) { + count + 1 + } + else { + 1 + } + getObjectName(s"${objectName}_$nameCounter", map) + } + else { + objectName + } + name + } + + private def camelCaseObjectName(objectName: String) = { + objectName.split("[^a-zA-Z]").map(_.capitalize).mkString.trim + } + + private def convertToBsonPipeline(pipeline: List[PipelineStage]): Seq[Bson] = { + val response: Seq[Bson] = pipeline.map(element => { + val stage = if (element.stage.startsWith("$")) element.stage else "$" + element.stage + Map(stage -> element.value) + }) + response + } + + def analyzeSchema(dao: MongoDAO[Document], deepth: Int = 10, sample: Option[Int] = None): SchemaAnalysis = { + val dbResponse = dao.findAggregated(convertToBsonPipeline(schemaAggregation(deepth, sample)), allowDiskUse = true).resultList(3.minutes.toSeconds.toInt) + val countResponse = dao.count().result() + val sampledDataCountOption: Option[Long] = dbResponse + .find(document => document.getString("n").equalsIgnoreCase("_id")) + .map(_.getLongValue("c")) + val sampledDataCount = sampledDataCountOption.getOrElse(-1L) + val fieldsMap = mutable.Map[String, SchemaAnalysisField]() + fieldsMap.put(emptyField.name, emptyField.copy(count = sampledDataCount)) + dbResponse.foreach(document => { + val documentMap = mapFromDocument(document) + val fullName = documentMap.get("n").map(_.toString).getOrElse("") + var name: String = fullName + var parentName: String = emptyField.name + var percentage: Double = 0 + val fieldCount = document.getLongValue("c") + + if (fullName.contains(NameSeparator)) { + val fieldNames = fullName.split(NameSeparator.charAt(0)) + name = fieldNames.last + val parentFields = fieldNames.splitAt(fieldNames.length - 1) + parentName = parentFields._1.mkString(".") + } + else { + percentage = fieldCount / sampledDataCount.toDouble + } + + val parent = fieldsMap.getOrElse( + parentName, { + val newF = emptyField.copy(name = parentName) + fieldsMap.put(parentName, newF) + newF + } + ) + + if (fullName.contains(NameSeparator)) { + percentage = fieldCount.toDouble / parent.count.toDouble + } + val types: List[SchemaAnalysisFieldType] = documentMap + .get("T") + .map(_.asInstanceOf[List[Map[String, Any]]]) + .getOrElse(List()) + .map(typeDocument => { + val doc = documentFromScalaMap(typeDocument) + val count = doc.getLongValue("c") + val fieldTypePercentage: Double = count.toDouble / parent.count.toDouble + SchemaAnalysisFieldType(doc.getStringValue("t"), count, fieldTypePercentage) + }) + + val newField = SchemaAnalysisField(name.replace(ArrayItemMark, ArrayElementText), fullName, types, fieldCount, percentage, ArrayBuffer()) + + parent.subFields.addOne(newField) + fieldsMap.put(s"$parentName$NameSeparator$name".replace("ROOT.", ""), newField) + }) + + val fieldPercentage: Double = if (countResponse != 0) sampledDataCount / countResponse else 0 + SchemaAnalysis(countResponse, sampledDataCount, fieldPercentage, fieldsMap.get("ROOT").map(_.subFields).getOrElse(ArrayBuffer())) + } + + def detectSchema(dao: MongoDAO[Document], deepth: Int = 10, sample: Option[Int] = None): JsonSchema = { + val objectName = camelCaseObjectName(dao.collection.namespace.getCollectionName) + val analysis = analyzeSchema(dao, deepth, sample) + val map = mutable.Map[String, JsonSchemaDefinition]() + fieldsToJsonSchemaDefinition(map, objectName, analysis.fields.toList) + JsonSchema(objectName, map.toMap) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala new file mode 100644 index 00000000..b46d1a33 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala @@ -0,0 +1,49 @@ +package dev.mongocamp.driver.mongodb.schema + +import dev.mongocamp.driver.mongodb.test.TestDatabase.{PersonDAO, PersonDocumentDAO} +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ +import org.specs2.mutable.{Before, Specification} + +class SchemaSpec extends Specification with Before { + + sequential + + "Schema" should { + "detect Json Schema from document dao" in { + val schemaExplorer = new SchemaExplorer() + val schema = schemaExplorer.detectSchema(PersonDocumentDAO) + val schemaJson = schema.toJson + schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue + schemaJson.contains("\"Friends\":") must beTrue + schemaJson.contains("\"title\":\"Friends\"") must beTrue + schemaJson.contains("\"People\":") must beTrue + schemaJson.contains("\"title\":\"People\"") must beTrue + schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") must beTrue + schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}") must beTrue + } + + } + + override def before: Any = { + + try { + UserDAO.drop().result() + LoginDAO.drop().result() + SimplePersonDAO.drop().result() + } + catch { + case e: Exception => + } + + val personList = PersonDAO.find().resultList() + personList.foreach { person => + UserDAO.insertOne(User(person.id, person.name, person.guid)).result() + LoginDAO.insertOne(Login(person.guid, person.email, person.email.reverse)).result() + person.friends.foreach { f => + SimplePersonDAO.insertOne(SimplePerson((person.id + 11) * (f.id + 3), f.name, person.id)).result() + } + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala index ad0088e6..fe16b240 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala @@ -33,6 +33,8 @@ object TestDatabase extends LazyLogging { } } + object PersonDocumentDAO extends MongoDAO[Document](provider, "people") + object PersonDAO extends MongoDAO[Person](provider, "people") object BookDAO extends MongoDAO[Book](provider, "books") From 2ecc2769539fc111a24dd8da5a7f1fe99c52a016 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Wed, 16 Oct 2024 22:57:05 +0200 Subject: [PATCH 15/22] feat: implemented jdbc driver methods --- build.sbt | 2 +- .../mongodb/database/DatabaseProvider.scala | 12 +- .../driver/mongodb/database/MongoConfig.scala | 2 +- .../driver/mongodb/database/MongoIndex.scala | 2 +- .../SqlCommandNotSupportedException.scala | 3 + .../mongodb/jdbc/MongoDatabaseMetaData.scala | 681 +++++++++---- .../mongodb/jdbc/MongoJdbcCloseable.scala | 34 + .../mongodb/jdbc/MongoJdbcConnection.scala | 225 +++-- .../driver/mongodb/jdbc/MongoJdbcDriver.scala | 18 +- .../mongodb/jdbc/MongoPreparedStatement.scala | 306 ------ .../MongodbJdbcDriverPropertyInfoHelper.scala | 32 + .../jdbc/SQLAlreadyClosedException.scala | 5 - .../jdbc/resultSet/MongoDbResultSet.scala | 897 ++++++++++++++++++ .../resultSet/MongoDbResultSetMetaData.scala | 125 +++ .../statement/MongoPreparedStatement.scala | 740 +++++++++++++++ .../mongodb/sql/MongoSqlQueryHolder.scala | 133 ++- .../driver/mongodb/sql/SQLCommandType.scala | 2 +- src/test/resources/json/people.json | 2 +- src/test/resources/liquibase/00-init.xml | 64 ++ .../liquibase/02-add-not-null-constraint.xml | 14 + .../resources/liquibase/03-tag-database.xml | 9 + .../resources/liquibase/04-split-table.xml | 92 ++ .../liquibase/05-add-foreign-keys.xml | 18 + .../liquibase/06-change-column-type.xml | 17 + .../resources/liquibase/07-merge-columns.xml | 18 + .../resources/liquibase/08-create-view.xml | 20 + .../liquibase/09-add-default-columns.xml | 36 + .../resources/liquibase/10-add-person.xml | 21 + src/test/resources/liquibase/11-add-note.xml | 79 ++ .../liquibase/12-add-task-relation.xml | 176 ++++ src/test/resources/liquibase/addressbook.csv | 7 + src/test/resources/liquibase/changelog.xml | 36 + .../driver/mongodb/dao/PersonDAOSpec.scala | 2 +- .../driver/mongodb/jdbc/BaseJdbcSpec.scala | 23 + .../driver/mongodb/jdbc/ExploreJdbcSpec.scala | 80 ++ .../mongodb/jdbc/LiquibaseJdbcSpec.scala | 41 + .../driver/mongodb/jdbc/SelectJDBCSpec.scala | 50 + .../driver/mongodb/sql/DeleteSqlSpec.scala | 7 +- .../driver/mongodb/sql/OtherSqlSpec.scala | 15 +- .../driver/mongodb/sql/SelectSqlSpec.scala | 18 +- 40 files changed, 3445 insertions(+), 619 deletions(-) create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/exception/SqlCommandNotSupportedException.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcCloseable.scala delete mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala delete mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala create mode 100644 src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala create mode 100755 src/test/resources/liquibase/00-init.xml create mode 100755 src/test/resources/liquibase/02-add-not-null-constraint.xml create mode 100755 src/test/resources/liquibase/03-tag-database.xml create mode 100755 src/test/resources/liquibase/04-split-table.xml create mode 100755 src/test/resources/liquibase/05-add-foreign-keys.xml create mode 100755 src/test/resources/liquibase/06-change-column-type.xml create mode 100755 src/test/resources/liquibase/07-merge-columns.xml create mode 100755 src/test/resources/liquibase/08-create-view.xml create mode 100755 src/test/resources/liquibase/09-add-default-columns.xml create mode 100755 src/test/resources/liquibase/10-add-person.xml create mode 100755 src/test/resources/liquibase/11-add-note.xml create mode 100755 src/test/resources/liquibase/12-add-task-relation.xml create mode 100755 src/test/resources/liquibase/addressbook.csv create mode 100755 src/test/resources/liquibase/changelog.xml create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala create mode 100644 src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala diff --git a/build.sbt b/build.sbt index 496aac36..0c009a04 100644 --- a/build.sbt +++ b/build.sbt @@ -101,7 +101,7 @@ libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.9" -libraryDependencies += "org.postgresql" % "postgresql" % "42.7.3" +libraryDependencies += "org.liquibase" % "liquibase-core" % "4.28.0" % Test buildInfoPackage := "dev.mongocamp.driver.mongodb" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala index 2ba1451b..9e500a75 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala @@ -16,7 +16,17 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() private var cachedClient: Option[MongoClient] = None - val DefaultDatabaseName: String = config.database + private var defaultDatabaseName: String = config.database + + def DefaultDatabaseName: String = defaultDatabaseName + + def connectionString = { + s"mongodb://${config.host}:${config.port}/${config.database}" + } + + def setDefaultDatabaseName(databaseName: String): Unit = { + defaultDatabaseName = databaseName + } def client: MongoClient = { if (isClosed) { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala index 7f2d3368..b8402430 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala @@ -16,7 +16,7 @@ case class MongoConfig( database: String, host: String = DefaultHost, port: Int = DefaultPort, - applicationName: String = DefaultApplicationName, + var applicationName: String = DefaultApplicationName, userName: Option[String] = None, password: Option[String] = None, authDatabase: String = DefaultAuthenticationDatabaseName, diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala index 8318842f..4d0f3e89 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala @@ -51,7 +51,7 @@ object MongoIndex extends ObservableIncludes with LazyLogging { indexOptions.getOrElse("weights", Map()).asInstanceOf[Map[String, _]].keys.toList else indexOptions.getOrElse("key", Map).asInstanceOf[Map[String, _]].keys.toList, - indexOptions.getOrElse("unique", false).asInstanceOf[Boolean], + indexOptions.getOrElse("unique", indexOptions("name").toString.equalsIgnoreCase("_id_")).asInstanceOf[Boolean], indexOptions.getOrElse("v", -1).asInstanceOf[Int], indexOptions.getOrElse("ns", "").toString, indexOptions.getOrElse("key", Map).asInstanceOf[Map[String, _]], diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/exception/SqlCommandNotSupportedException.scala b/src/main/scala/dev/mongocamp/driver/mongodb/exception/SqlCommandNotSupportedException.scala new file mode 100644 index 00000000..c43c65f2 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/exception/SqlCommandNotSupportedException.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb.exception + +class SqlCommandNotSupportedException(message: String) extends Exception(message) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala index a1eed1b0..01ca0974 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala @@ -1,358 +1,629 @@ package dev.mongocamp.driver.mongodb.jdbc -import java.sql.{Connection, DatabaseMetaData, ResultSet, RowIdLifetime} +import com.vdurmont.semver4j.Semver +import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator +import dev.mongocamp.driver.mongodb.{ BuildInfo, Converter, GenericObservable } +import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet +import dev.mongocamp.driver.mongodb.schema.SchemaExplorer +import org.mongodb.scala.bson.{ BsonNull, BsonString } +import org.mongodb.scala.bson.collection.immutable.Document -class MongoDatabaseMetaData extends DatabaseMetaData{ +import java.sql.{ Connection, DatabaseMetaData, ResultSet, RowIdLifetime, Types } +import scala.collection.mutable.ArrayBuffer - override def allProceduresAreCallable(): Boolean = ??? +class MongoDatabaseMetaData(connection: MongoJdbcConnection) extends DatabaseMetaData { + private lazy val semVer = new Semver(BuildInfo.version) + private lazy val jdbcSemVer = new Semver("4.2") + private lazy val DatabaseNameKey = "mongodb" - override def allTablesAreSelectable(): Boolean = ??? + override def allProceduresAreCallable() = false - override def getURL: String = ??? + override def allTablesAreSelectable(): Boolean = false - override def getUserName: String = ??? + override def getURL: String = { + connection.getDatabaseProvider.connectionString + } - override def isReadOnly: Boolean = ??? + override def getUserName: String = connection.getDatabaseProvider.config.userName.getOrElse("not set") - override def nullsAreSortedHigh(): Boolean = ??? + override def isReadOnly: Boolean = false - override def nullsAreSortedLow(): Boolean = ??? + override def nullsAreSortedHigh(): Boolean = false - override def nullsAreSortedAtStart(): Boolean = ??? + override def nullsAreSortedLow(): Boolean = false - override def nullsAreSortedAtEnd(): Boolean = ??? + override def nullsAreSortedAtStart(): Boolean = false - override def getDatabaseProductName: String = ??? + override def nullsAreSortedAtEnd(): Boolean = false - override def getDatabaseProductVersion: String = ??? + override def getDatabaseProductName: String = DatabaseNameKey - override def getDriverName: String = ??? + override def getDatabaseProductVersion: String = { + connection.getDatabaseProvider.runCommand(Document("buildInfo" -> 1)).map(doc => doc.getString("version")).result(10) + } - override def getDriverVersion: String = ??? + override def getDriverName: String = BuildInfo.name - override def getDriverMajorVersion: Int = ??? + override def getDriverVersion: String = semVer.getValue - override def getDriverMinorVersion: Int = ??? + override def getDriverMajorVersion: Int = semVer.getMajor - override def usesLocalFiles(): Boolean = ??? + override def getDriverMinorVersion: Int = semVer.getMinor - override def usesLocalFilePerTable(): Boolean = ??? + override def usesLocalFiles(): Boolean = false - override def supportsMixedCaseIdentifiers(): Boolean = ??? + override def usesLocalFilePerTable(): Boolean = false - override def storesUpperCaseIdentifiers(): Boolean = ??? + override def supportsMixedCaseIdentifiers(): Boolean = false - override def storesLowerCaseIdentifiers(): Boolean = ??? + override def storesUpperCaseIdentifiers(): Boolean = false - override def storesMixedCaseIdentifiers(): Boolean = ??? + override def storesLowerCaseIdentifiers(): Boolean = false - override def supportsMixedCaseQuotedIdentifiers(): Boolean = ??? + override def storesMixedCaseIdentifiers(): Boolean = false - override def storesUpperCaseQuotedIdentifiers(): Boolean = ??? + override def supportsMixedCaseQuotedIdentifiers(): Boolean = false - override def storesLowerCaseQuotedIdentifiers(): Boolean = ??? + override def storesUpperCaseQuotedIdentifiers(): Boolean = false - override def storesMixedCaseQuotedIdentifiers(): Boolean = ??? + override def storesLowerCaseQuotedIdentifiers(): Boolean = false - override def getIdentifierQuoteString: String = ??? + override def storesMixedCaseQuotedIdentifiers(): Boolean = false - override def getSQLKeywords: String = ??? + override def getIdentifierQuoteString: String = null - override def getNumericFunctions: String = ??? + override def getSQLKeywords: String = "" - override def getStringFunctions: String = ??? + override def getNumericFunctions: String = null - override def getSystemFunctions: String = ??? + override def getStringFunctions: String = null - override def getTimeDateFunctions: String = ??? + override def getSystemFunctions: String = null - override def getSearchStringEscape: String = ??? + override def getTimeDateFunctions: String = "date" - override def getExtraNameCharacters: String = ??? + override def getSearchStringEscape: String = "\\" - override def supportsAlterTableWithAddColumn(): Boolean = ??? + override def getExtraNameCharacters: String = null - override def supportsAlterTableWithDropColumn(): Boolean = ??? + override def supportsAlterTableWithAddColumn(): Boolean = false - override def supportsColumnAliasing(): Boolean = ??? + override def supportsAlterTableWithDropColumn(): Boolean = false - override def nullPlusNonNullIsNull(): Boolean = ??? + override def supportsColumnAliasing(): Boolean = true - override def supportsConvert(): Boolean = ??? + override def nullPlusNonNullIsNull(): Boolean = false - override def supportsConvert(fromType: Int, toType: Int): Boolean = ??? + override def supportsConvert(): Boolean = false - override def supportsTableCorrelationNames(): Boolean = ??? + override def supportsConvert(fromType: Int, toType: Int): Boolean = false - override def supportsDifferentTableCorrelationNames(): Boolean = ??? + override def supportsTableCorrelationNames(): Boolean = false - override def supportsExpressionsInOrderBy(): Boolean = ??? + override def supportsDifferentTableCorrelationNames(): Boolean = false - override def supportsOrderByUnrelated(): Boolean = ??? + override def supportsExpressionsInOrderBy(): Boolean = false - override def supportsGroupBy(): Boolean = ??? + override def supportsOrderByUnrelated(): Boolean = true - override def supportsGroupByUnrelated(): Boolean = ??? + override def supportsGroupBy(): Boolean = true - override def supportsGroupByBeyondSelect(): Boolean = ??? + override def supportsGroupByUnrelated(): Boolean = true - override def supportsLikeEscapeClause(): Boolean = ??? + override def supportsGroupByBeyondSelect(): Boolean = true - override def supportsMultipleResultSets(): Boolean = ??? + override def supportsLikeEscapeClause(): Boolean = true - override def supportsMultipleTransactions(): Boolean = ??? + override def supportsMultipleResultSets(): Boolean = true - override def supportsNonNullableColumns(): Boolean = ??? + override def supportsMultipleTransactions(): Boolean = false - override def supportsMinimumSQLGrammar(): Boolean = ??? + override def supportsNonNullableColumns(): Boolean = true - override def supportsCoreSQLGrammar(): Boolean = ??? + override def supportsMinimumSQLGrammar(): Boolean = false - override def supportsExtendedSQLGrammar(): Boolean = ??? + override def supportsCoreSQLGrammar(): Boolean = false - override def supportsANSI92EntryLevelSQL(): Boolean = ??? + override def supportsExtendedSQLGrammar(): Boolean = false - override def supportsANSI92IntermediateSQL(): Boolean = ??? + override def supportsANSI92EntryLevelSQL(): Boolean = false - override def supportsANSI92FullSQL(): Boolean = ??? + override def supportsANSI92IntermediateSQL(): Boolean = false - override def supportsIntegrityEnhancementFacility(): Boolean = ??? + override def supportsANSI92FullSQL(): Boolean = false - override def supportsOuterJoins(): Boolean = ??? + override def supportsIntegrityEnhancementFacility(): Boolean = false - override def supportsFullOuterJoins(): Boolean = ??? + override def supportsOuterJoins(): Boolean = false - override def supportsLimitedOuterJoins(): Boolean = ??? + override def supportsFullOuterJoins(): Boolean = false - override def getSchemaTerm: String = ??? + override def supportsLimitedOuterJoins(): Boolean = false - override def getProcedureTerm: String = ??? + override def getSchemaTerm: String = "database" - override def getCatalogTerm: String = ??? + override def getProcedureTerm: String = null - override def isCatalogAtStart: Boolean = ??? + override def getCatalogTerm: String = "database" - override def getCatalogSeparator: String = ??? + override def isCatalogAtStart: Boolean = true - override def supportsSchemasInDataManipulation(): Boolean = ??? + override def getCatalogSeparator: String = "." - override def supportsSchemasInProcedureCalls(): Boolean = ??? + override def supportsSchemasInDataManipulation(): Boolean = false - override def supportsSchemasInTableDefinitions(): Boolean = ??? + override def supportsSchemasInProcedureCalls(): Boolean = false - override def supportsSchemasInIndexDefinitions(): Boolean = ??? + override def supportsSchemasInTableDefinitions(): Boolean = false - override def supportsSchemasInPrivilegeDefinitions(): Boolean = ??? + override def supportsSchemasInIndexDefinitions(): Boolean = false - override def supportsCatalogsInDataManipulation(): Boolean = ??? + override def supportsSchemasInPrivilegeDefinitions(): Boolean = false - override def supportsCatalogsInProcedureCalls(): Boolean = ??? + override def supportsCatalogsInDataManipulation(): Boolean = true - override def supportsCatalogsInTableDefinitions(): Boolean = ??? + override def supportsCatalogsInProcedureCalls(): Boolean = false - override def supportsCatalogsInIndexDefinitions(): Boolean = ??? + override def supportsCatalogsInTableDefinitions(): Boolean = false - override def supportsCatalogsInPrivilegeDefinitions(): Boolean = ??? + override def supportsCatalogsInIndexDefinitions(): Boolean = false - override def supportsPositionedDelete(): Boolean = ??? + override def supportsCatalogsInPrivilegeDefinitions(): Boolean = false - override def supportsPositionedUpdate(): Boolean = ??? + override def supportsPositionedDelete(): Boolean = false - override def supportsSelectForUpdate(): Boolean = ??? + override def supportsPositionedUpdate(): Boolean = false - override def supportsStoredProcedures(): Boolean = ??? + override def supportsSelectForUpdate(): Boolean = false - override def supportsSubqueriesInComparisons(): Boolean = ??? + override def supportsStoredProcedures(): Boolean = false - override def supportsSubqueriesInExists(): Boolean = ??? + override def supportsSubqueriesInComparisons(): Boolean = false - override def supportsSubqueriesInIns(): Boolean = ??? + override def supportsSubqueriesInExists(): Boolean = false - override def supportsSubqueriesInQuantifieds(): Boolean = ??? + override def supportsSubqueriesInIns(): Boolean = false - override def supportsCorrelatedSubqueries(): Boolean = ??? + override def supportsSubqueriesInQuantifieds(): Boolean = false - override def supportsUnion(): Boolean = ??? + override def supportsCorrelatedSubqueries(): Boolean = false - override def supportsUnionAll(): Boolean = ??? + override def supportsUnion(): Boolean = true - override def supportsOpenCursorsAcrossCommit(): Boolean = ??? + override def supportsUnionAll(): Boolean = true - override def supportsOpenCursorsAcrossRollback(): Boolean = ??? + override def supportsOpenCursorsAcrossCommit(): Boolean = false - override def supportsOpenStatementsAcrossCommit(): Boolean = ??? + override def supportsOpenCursorsAcrossRollback(): Boolean = false - override def supportsOpenStatementsAcrossRollback(): Boolean = ??? + override def supportsOpenStatementsAcrossCommit(): Boolean = false - override def getMaxBinaryLiteralLength: Int = ??? + override def supportsOpenStatementsAcrossRollback(): Boolean = false - override def getMaxCharLiteralLength: Int = ??? + override def getMaxBinaryLiteralLength: Int = 0 - override def getMaxColumnNameLength: Int = ??? + override def getMaxCharLiteralLength: Int = 0 - override def getMaxColumnsInGroupBy: Int = ??? + override def getMaxColumnNameLength: Int = 0 - override def getMaxColumnsInIndex: Int = ??? + override def getMaxColumnsInGroupBy: Int = 0 - override def getMaxColumnsInOrderBy: Int = ??? + override def getMaxColumnsInIndex: Int = 0 - override def getMaxColumnsInSelect: Int = ??? + override def getMaxColumnsInOrderBy: Int = 0 - override def getMaxColumnsInTable: Int = ??? + override def getMaxColumnsInSelect: Int = 0 - override def getMaxConnections: Int = ??? + override def getMaxColumnsInTable: Int = 0 - override def getMaxCursorNameLength: Int = ??? + override def getMaxConnections: Int = 0 - override def getMaxIndexLength: Int = ??? + override def getMaxCursorNameLength: Int = 0 - override def getMaxSchemaNameLength: Int = ??? + override def getMaxIndexLength: Int = 0 - override def getMaxProcedureNameLength: Int = ??? + override def getMaxSchemaNameLength: Int = 0 - override def getMaxCatalogNameLength: Int = ??? + override def getMaxProcedureNameLength: Int = 0 - override def getMaxRowSize: Int = ??? + override def getMaxCatalogNameLength: Int = 0 - override def doesMaxRowSizeIncludeBlobs(): Boolean = ??? + override def getMaxRowSize: Int = 0 - override def getMaxStatementLength: Int = ??? + override def doesMaxRowSizeIncludeBlobs(): Boolean = false - override def getMaxStatements: Int = ??? + override def getMaxStatementLength: Int = 0 - override def getMaxTableNameLength: Int = ??? + override def getMaxStatements: Int = 0 - override def getMaxTablesInSelect: Int = ??? + override def getMaxTableNameLength: Int = 90 - override def getMaxUserNameLength: Int = ??? + override def getMaxTablesInSelect: Int = 0 - override def getDefaultTransactionIsolation: Int = ??? + override def getMaxUserNameLength: Int = 0 - override def supportsTransactions(): Boolean = ??? + override def getDefaultTransactionIsolation: Int = Connection.TRANSACTION_NONE - override def supportsTransactionIsolationLevel(level: Int): Boolean = ??? + override def supportsTransactions(): Boolean = false - override def supportsDataDefinitionAndDataManipulationTransactions(): Boolean = ??? + override def supportsTransactionIsolationLevel(level: Int): Boolean = false - override def supportsDataManipulationTransactionsOnly(): Boolean = ??? + override def supportsDataDefinitionAndDataManipulationTransactions(): Boolean = false - override def dataDefinitionCausesTransactionCommit(): Boolean = ??? + override def supportsDataManipulationTransactionsOnly(): Boolean = false - override def dataDefinitionIgnoredInTransactions(): Boolean = ??? + override def dataDefinitionCausesTransactionCommit(): Boolean = false - override def getProcedures(catalog: String, schemaPattern: String, procedureNamePattern: String): ResultSet = ??? + override def dataDefinitionIgnoredInTransactions(): Boolean = false - override def getProcedureColumns(catalog: String, schemaPattern: String, procedureNamePattern: String, columnNamePattern: String): ResultSet = ??? + override def getProcedures(catalog: String, schemaPattern: String, procedureNamePattern: String): ResultSet = { new MongoDbResultSet(null, List.empty, 10) } - override def getTables(catalog: String, schemaPattern: String, tableNamePattern: String, types: Array[String]): ResultSet = ??? + override def getProcedureColumns(catalog: String, schemaPattern: String, procedureNamePattern: String, columnNamePattern: String): ResultSet = { + new MongoDbResultSet(null, List.empty, 10) + } - override def getSchemas: ResultSet = ??? + override def getTables(catalog: String, schemaPattern: String, tableNamePattern: String, types: Array[String]): ResultSet = { + val internalSchemaPattern = Option(schemaPattern).getOrElse("(.*?)") + val internalTableNamePattern = Option(tableNamePattern).getOrElse("(.*?)") + val documents: List[Document] = connection.getDatabaseProvider.databaseNames + .filter(s => internalSchemaPattern.r.findFirstMatchIn(s).nonEmpty) + .flatMap(dbName => { + val collDocuments: List[Document] = connection.getDatabaseProvider + .collectionNames(dbName) + .filter(s => internalTableNamePattern.r.findFirstMatchIn(s).nonEmpty) + .map(collName => { + Document( + "TABLE_CAT" -> BsonString(DatabaseNameKey), + "TABLE_SCHEM" -> BsonString(dbName), + "TABLE_NAME" -> BsonString(collName), + "TABLE_TYPE" -> BsonString("TABLE"), + "REMARKS" -> BsonString("COLLECTION"), + "TYPE_CAT" -> BsonString(DatabaseNameKey), + "TYPE_SCHEM" -> BsonString(dbName), + "TYPE_NAME" -> BsonString("COLLECTION"), + "SELF_REFERENCING_COL_NAME" -> BsonNull(), + "REF_GENERATION" -> BsonNull() + ) + }) + collDocuments + }) + new MongoDbResultSet(null, documents, 10) + } + + override def getSchemas: ResultSet = getSchemas("", "(.*?)") + + override def getCatalogs: ResultSet = { + val documents = List( + Document( + "TABLE_CAT" -> DatabaseNameKey + ) + ) + new MongoDbResultSet(null, documents, 10) + } + + override def getTableTypes: ResultSet = { + val documents = List( + Document( + "TABLE_TYPE" -> "COLLECTION" + ) + ) + new MongoDbResultSet(null, documents, 10) + } + + override def getColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = { + val schemaRegex = schemaPattern.replace("%", "(.*?)").r + val tableNameRegex = tableNamePattern.replace("%", "(.*?)").r + val columnNameRegex = columnNamePattern.replace("%", "(.*?)").r + val databaseNames = connection.getDatabaseProvider.databaseNames.filter(s => schemaRegex.findFirstMatchIn(s).nonEmpty) + val documents = ArrayBuffer[Document]() + val schemaExplorer = new SchemaExplorer() + var i = 0 + databaseNames.map(dbName => { + val allCollections = connection.getDatabaseProvider.collectionNames(dbName) + val filtered = allCollections.filter(tbl => tableNameRegex.findFirstMatchIn(tbl).nonEmpty) + filtered.map(table => { + val dao = connection.getDatabaseProvider.dao(s"$dbName$CollectionSeparator$table") + val schemaAnalysis = schemaExplorer.analyzeSchema(dao) + val relevantColumns = schemaAnalysis.fields.filter(field => columnNameRegex.findFirstMatchIn(field.name).nonEmpty) + relevantColumns.foreach(schemaAnalysis => { + val fieldTypeName = schemaAnalysis.fieldTypes.head.fieldType + var decimalDigits: Option[Int] = None + val fieldType = fieldTypeName match { + case "string" => Types.LONGVARCHAR + case "null" => Types.VARCHAR + case "objectId" => Types.VARCHAR + case "date" => Types.DATE + case "int" => + decimalDigits = Some(0) + Types.INTEGER + case "long" => + decimalDigits = Some(0) + Types.BIGINT + case "number" => + decimalDigits = Some(Int.MaxValue) + Types.DOUBLE + case "double" => + decimalDigits = Some(Int.MaxValue) + Types.DOUBLE + case "array" => Types.ARRAY + case "bool" => Types.BOOLEAN + case "object" => Types.JAVA_OBJECT + case _ => + Types.VARCHAR + } + documents += Converter.toDocument( + Map( + "TABLE_CAT" -> DatabaseNameKey, + "TABLE_SCHEM" -> dbName, + "TABLE_NAME" -> table, + "COLUMN_NAME" -> schemaAnalysis.name, + "DATA_TYPE" -> fieldType, + "TYPE_NAME" -> fieldTypeName, + "COLUMN_SIZE" -> null, + "BUFFER_LENGTH" -> null, + "DECIMAL_DIGITS" -> decimalDigits.getOrElse(null), + "NUM_PREC_RADIX" -> null, + "NULLABLE" -> DatabaseMetaData.columnNullable, // how to check + "REMARKS" -> null, + "COLUMN_DEF" -> null, + "SQL_DATA_TYPE" -> null, + "SQL_DATETIME_SUB" -> null, + "CHAR_OCTET_LENGTH" -> null, + "ORDINAL_POSITION" -> i, + "IS_NULLABLE" -> "YES", + "SCOPE_CATLOG" -> null, + "SCOPE_SCHEMA" -> null, + "SCOPE_TABLE" -> null, + "SOURCE_DATA_TYPE" -> null, + "IS_AUTOINCREMENT" -> "NO" + ) + ) + i = i + 1 + }) + }) + }) + new MongoDbResultSet(null, documents.toList, 10) + } + + override def getColumnPrivileges(catalog: String, schema: String, table: String, columnNamePattern: String): ResultSet = { + null + } + + override def getTablePrivileges(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = { + null + } + + override def getBestRowIdentifier(catalog: String, schema: String, table: String, scope: Int, nullable: Boolean): ResultSet = { + null + } + + override def getVersionColumns(catalog: String, schema: String, table: String): ResultSet = { + null + } + + override def getPrimaryKeys(catalog: String, schema: String, table: String): ResultSet = { + val dao = connection.getDatabaseProvider.dao(s"$schema$CollectionSeparator$table") + val uniqueIndices = dao.indexList().filter(_.unique) + val pkDocuments = uniqueIndices.map(i => + Map( + "TABLE_CAT" -> DatabaseNameKey, + "TABLE_SCHEM" -> schema, + "TABLE_NAME" -> table, + "COLUMN_NAME" -> i.fields.head, + "KEY_SEQ" -> 0, + "PK_NAME" -> i.name + ) + ) + new MongoDbResultSet(null, pkDocuments.map(i => Converter.toDocument(i)), 10) + + } + + override def getImportedKeys(catalog: String, schema: String, table: String): ResultSet = { + null + } + + override def getExportedKeys(catalog: String, schema: String, table: String): ResultSet = { + null + } + + override def getCrossReference( + parentCatalog: String, + parentSchema: String, + parentTable: String, + foreignCatalog: String, + foreignSchema: String, + foreignTable: String + ): ResultSet = { + null + } + + override def getTypeInfo: ResultSet = { + val objectIdValue = "OBJECT_ID" + val documentValue = "DOCUMENT" + val types = List( + Map( + "TYPE_NAME" -> objectIdValue, + "DATA_TYPE" -> Types.VARCHAR, + "PRECISION" -> "800", + "LITERAL_PREFIX" -> "'", + "LITERAL_SUFFIX" -> "'", + "CREATE_PARAMS" -> null, + "NULLABLE" -> DatabaseMetaData.typeNullable, + "CASE_SENSITIVE" -> true, + "SEARCHABLE" -> DatabaseMetaData.typeSearchable, + "UNSIGNED_ATTRIBUTE" -> false, + "FIXED_PREC_SCALE" -> false, + "AUTO_INCREMENT" -> false, + "LOCAL_TYPE_NAME" -> objectIdValue, + "MINIMUM_SCALE" -> 0, + "MAXIMUM_SCALE" -> 0, + "SQL_DATA_TYPE" -> null, + "SQL_DATETIME_SUB" -> null, + "NUM_PREC_RADIX" -> 10 + ), + Map( + "TYPE_NAME" -> documentValue, + "DATA_TYPE" -> Types.CLOB, + "PRECISION" -> "16777216", + "LITERAL_PREFIX" -> "'", + "LITERAL_SUFFIX" -> "'", + "CREATE_PARAMS" -> null, + "NULLABLE" -> DatabaseMetaData.typeNullable, + "CASE_SENSITIVE" -> true, + "SEARCHABLE" -> DatabaseMetaData.typeSearchable, + "UNSIGNED_ATTRIBUTE" -> false, + "FIXED_PREC_SCALE" -> false, + "AUTO_INCREMENT" -> false, + "LOCAL_TYPE_NAME" -> documentValue, + "MINIMUM_SCALE" -> 0, + "MAXIMUM_SCALE" -> 0, + "SQL_DATA_TYPE" -> null, + "SQL_DATETIME_SUB" -> null, + "NUM_PREC_RADIX" -> 10 + ) + ) + new MongoDbResultSet(null, types.map(i => Converter.toDocument(i)), 10) + } + + override def getIndexInfo(catalog: String, schema: String, table: String, unique: Boolean, approximate: Boolean): ResultSet = { + val schemaRegex = schema.r + val tableNameRegex = table.r + val databaseNames = connection.getDatabaseProvider.databaseNames.filter(s => schemaRegex.findFirstMatchIn(s).nonEmpty) + val documents = ArrayBuffer[Document]() + databaseNames.map(dbName => { + val allCollections = connection.getDatabaseProvider.collectionNames(dbName) + allCollections + .filter(tbl => tableNameRegex.findFirstMatchIn(tbl).nonEmpty) + .map(table => { + val dao = connection.getDatabaseProvider.dao(s"$dbName$CollectionSeparator$table") + dao + .indexList() + .map(index => { + val fields = index.fields + fields.zipWithIndex.foreach { case (field, i) => + documents += Converter.toDocument( + Map( + "TABLE_CAT" -> DatabaseNameKey, + "TABLE_SCHEM" -> dbName, + "TABLE_NAME" -> table, + "NON_UNIQUE" -> (if (!index.unique) "YES" else "NO"), + "INDEX_QUALIFIER" -> dbName, + "INDEX_NAME" -> index.name, + "TYPE" -> 0, + "ORDINAL_POSITION" -> i, + "COLUMN_NAME" -> field, + "ASC_OR_DESC" -> "A", + "CARDINALITY" -> "0", + "PAGES" -> "0", + "FILTER_CONDITION" -> "" + ) + ) + } + }) + }) + }) + new MongoDbResultSet(null, documents.toList, 10) + } + + override def supportsResultSetType(`type`: Int): Boolean = { + `type` == ResultSet.TYPE_FORWARD_ONLY + } + + override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = false + + override def ownUpdatesAreVisible(`type`: Int): Boolean = false + + override def ownDeletesAreVisible(`type`: Int): Boolean = false + + override def ownInsertsAreVisible(`type`: Int): Boolean = false + + override def othersUpdatesAreVisible(`type`: Int): Boolean = false + + override def othersDeletesAreVisible(`type`: Int): Boolean = false + + override def othersInsertsAreVisible(`type`: Int): Boolean = false + + override def updatesAreDetected(`type`: Int): Boolean = false + + override def deletesAreDetected(`type`: Int): Boolean = false + + override def insertsAreDetected(`type`: Int): Boolean = false + + override def supportsBatchUpdates(): Boolean = false + + override def getUDTs(catalog: String, schemaPattern: String, typeNamePattern: String, types: Array[Int]): ResultSet = { + new MongoDbResultSet(null, List.empty, 10) + } + + override def getConnection: Connection = connection - override def getCatalogs: ResultSet = ??? + override def supportsSavepoints(): Boolean = false - override def getTableTypes: ResultSet = ??? + override def supportsNamedParameters(): Boolean = false - override def getColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ??? + override def supportsMultipleOpenResults(): Boolean = false - override def getColumnPrivileges(catalog: String, schema: String, table: String, columnNamePattern: String): ResultSet = ??? + override def supportsGetGeneratedKeys(): Boolean = false - override def getTablePrivileges(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ??? + override def getSuperTypes(catalog: String, schemaPattern: String, typeNamePattern: String): ResultSet = { new MongoDbResultSet(null, List.empty, 10) } - override def getBestRowIdentifier(catalog: String, schema: String, table: String, scope: Int, nullable: Boolean): ResultSet = ??? + override def getSuperTables(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = { new MongoDbResultSet(null, List.empty, 10) } - override def getVersionColumns(catalog: String, schema: String, table: String): ResultSet = ??? + override def getAttributes(catalog: String, schemaPattern: String, typeNamePattern: String, attributeNamePattern: String): ResultSet = { + new MongoDbResultSet(null, List.empty, 10) + } - override def getPrimaryKeys(catalog: String, schema: String, table: String): ResultSet = ??? + override def supportsResultSetHoldability(holdability: Int): Boolean = false - override def getImportedKeys(catalog: String, schema: String, table: String): ResultSet = ??? + override def getResultSetHoldability: Int = ResultSet.HOLD_CURSORS_OVER_COMMIT - override def getExportedKeys(catalog: String, schema: String, table: String): ResultSet = ??? + override def getDatabaseMajorVersion: Int = semVer.getMajor - override def getCrossReference(parentCatalog: String, parentSchema: String, parentTable: String, foreignCatalog: String, foreignSchema: String, foreignTable: String): ResultSet = ??? + override def getDatabaseMinorVersion: Int = semVer.getMinor - override def getTypeInfo: ResultSet = ??? + override def getJDBCMajorVersion: Int = jdbcSemVer.getMajor - override def getIndexInfo(catalog: String, schema: String, table: String, unique: Boolean, approximate: Boolean): ResultSet = ??? + override def getJDBCMinorVersion: Int = jdbcSemVer.getMinor - override def supportsResultSetType(`type`: Int): Boolean = ??? + override def getSQLStateType: Int = DatabaseMetaData.sqlStateXOpen - override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = ??? + override def locatorsUpdateCopy(): Boolean = false - override def ownUpdatesAreVisible(`type`: Int): Boolean = ??? + override def supportsStatementPooling(): Boolean = false - override def ownDeletesAreVisible(`type`: Int): Boolean = ??? + override def getRowIdLifetime: RowIdLifetime = null - override def ownInsertsAreVisible(`type`: Int): Boolean = ??? + override def getSchemas(catalog: String, schemaPattern: String): ResultSet = { + val documents = connection.getDatabaseProvider.databaseNames + .filter(s => schemaPattern.r.findFirstMatchIn(s).nonEmpty) + .map(dbName => { + Document( + "TABLE_SCHEM" -> dbName, + "TABLE_CATALOG" -> DatabaseNameKey + ) + }) + new MongoDbResultSet(null, documents, 10) + } - override def othersUpdatesAreVisible(`type`: Int): Boolean = ??? + override def supportsStoredFunctionsUsingCallSyntax(): Boolean = false - override def othersDeletesAreVisible(`type`: Int): Boolean = ??? + override def autoCommitFailureClosesAllResultSets(): Boolean = false - override def othersInsertsAreVisible(`type`: Int): Boolean = ??? + override def getClientInfoProperties: ResultSet = { new MongoDbResultSet(null, List.empty, 10) } - override def updatesAreDetected(`type`: Int): Boolean = ??? + override def getFunctions(catalog: String, schemaPattern: String, functionNamePattern: String): ResultSet = { new MongoDbResultSet(null, List.empty, 10) } - override def deletesAreDetected(`type`: Int): Boolean = ??? + override def getFunctionColumns(catalog: String, schemaPattern: String, functionNamePattern: String, columnNamePattern: String): ResultSet = { + new MongoDbResultSet(null, List.empty, 10) + } - override def insertsAreDetected(`type`: Int): Boolean = ??? + override def getPseudoColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = { + new MongoDbResultSet(null, List.empty, 10) + } - override def supportsBatchUpdates(): Boolean = ??? + override def generatedKeyAlwaysReturned(): Boolean = false - override def getUDTs(catalog: String, schemaPattern: String, typeNamePattern: String, types: Array[Int]): ResultSet = ??? + override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] - override def getConnection: Connection = ??? - - override def supportsSavepoints(): Boolean = ??? - - override def supportsNamedParameters(): Boolean = ??? - - override def supportsMultipleOpenResults(): Boolean = ??? - - override def supportsGetGeneratedKeys(): Boolean = ??? - - override def getSuperTypes(catalog: String, schemaPattern: String, typeNamePattern: String): ResultSet = ??? - - override def getSuperTables(catalog: String, schemaPattern: String, tableNamePattern: String): ResultSet = ??? - - override def getAttributes(catalog: String, schemaPattern: String, typeNamePattern: String, attributeNamePattern: String): ResultSet = ??? - - override def supportsResultSetHoldability(holdability: Int): Boolean = ??? - - override def getResultSetHoldability: Int = ??? - - override def getDatabaseMajorVersion: Int = ??? - - override def getDatabaseMinorVersion: Int = ??? - - override def getJDBCMajorVersion: Int = ??? - - override def getJDBCMinorVersion: Int = ??? - - override def getSQLStateType: Int = ??? - - override def locatorsUpdateCopy(): Boolean = ??? - - override def supportsStatementPooling(): Boolean = ??? - - override def getRowIdLifetime: RowIdLifetime = ??? - - override def getSchemas(catalog: String, schemaPattern: String): ResultSet = ??? - - override def supportsStoredFunctionsUsingCallSyntax(): Boolean = ??? - - override def autoCommitFailureClosesAllResultSets(): Boolean = ??? - - override def getClientInfoProperties: ResultSet = ??? - - override def getFunctions(catalog: String, schemaPattern: String, functionNamePattern: String): ResultSet = ??? - - override def getFunctionColumns(catalog: String, schemaPattern: String, functionNamePattern: String, columnNamePattern: String): ResultSet = ??? - - override def getPseudoColumns(catalog: String, schemaPattern: String, tableNamePattern: String, columnNamePattern: String): ResultSet = ??? - - override def generatedKeyAlwaysReturned(): Boolean = ??? - - override def unwrap[T](iface: Class[T]): T = ??? - - override def isWrapperFor(iface: Class[_]): Boolean = ??? + override def isWrapperFor(iface: Class[_]): Boolean = false } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcCloseable.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcCloseable.scala new file mode 100644 index 00000000..f09d75a2 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcCloseable.scala @@ -0,0 +1,34 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.sql.{SQLException, SQLFeatureNotSupportedException} + +trait MongoJdbcCloseable extends AutoCloseable { + + protected def checkClosed(): Unit = { + if (closed) { + throw new SQLException("Closed " + this.getClass.getSimpleName) + } + } + + private var closed: Boolean = false + + override def close(): Unit = { + checkClosed() + closed = true + } + + def isClosed: Boolean = closed + + def sqlFeatureNotSupported[A <: Any](message: String): A = { + sqlFeatureNotSupported(Option(message).filter(_.trim.nonEmpty)) + } + + def sqlFeatureNotSupported[A <: Any](message: Option[String] = None): A = { + checkClosed() + if (message.nonEmpty) { + throw new SQLFeatureNotSupportedException(message.get) + } + throw new SQLFeatureNotSupportedException() + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala index 04931edb..6b5e5075 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala @@ -1,17 +1,22 @@ package dev.mongocamp.driver.mongodb.jdbc -import org.mongodb.scala.MongoClient +import dev.mongocamp.driver.mongodb.Converter +import dev.mongocamp.driver.mongodb.bson.BsonConverter +import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.jdbc.statement.MongoPreparedStatement import java.{sql, util} -import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLWarning, SQLXML, Savepoint, Statement, Struct} +import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLException, SQLWarning, SQLXML, Savepoint, Statement, Struct} import java.util.Properties import java.util.concurrent.Executor +import scala.jdk.CollectionConverters._ -class MongoJdbcConnection(client: MongoClient) extends Connection { - private var _isClosed = false +class MongoJdbcConnection(databaseProvider: DatabaseProvider) extends Connection with MongoJdbcCloseable { private var _isReadOnly = false - override def createStatement(): Statement = new MongoPreparedStatement(this, null) + def getDatabaseProvider: DatabaseProvider = databaseProvider + + override def createStatement(): Statement = MongoPreparedStatement(this) override def prepareStatement(sql: String): PreparedStatement = { new MongoPreparedStatement(this, sql) @@ -19,10 +24,14 @@ class MongoJdbcConnection(client: MongoClient) extends Connection { override def prepareCall(sql: String): CallableStatement = { checkClosed() - null + createMongoStatement(Some(sql)) } - override def nativeSQL(sql: String): String = ??? + override def nativeSQL(sql: String): String = { + checkClosed() + // todo: return debug string + sql + } override def setAutoCommit(autoCommit: Boolean): Unit = { checkClosed() @@ -33,25 +42,20 @@ class MongoJdbcConnection(client: MongoClient) extends Connection { true } - override def commit(): Unit = { checkClosed() } - override def rollback(): Unit = { checkClosed() } - override def close(): Unit = { - _isClosed = true - client.close() + super.close() + databaseProvider.client.close() } - override def isClosed: Boolean = _isClosed - - override def getMetaData: DatabaseMetaData = ??? + override def getMetaData: DatabaseMetaData = new MongoDatabaseMetaData(this) override def setReadOnly(readOnly: Boolean): Unit = { checkClosed() @@ -67,13 +71,9 @@ class MongoJdbcConnection(client: MongoClient) extends Connection { override def getCatalog: String = null override def setTransactionIsolation(level: Int): Unit = { - checkClosed() - // Since the only valid value for MongoDB is Connection.TRANSACTION_NONE, and the javadoc for this method - // indicates that this is not a valid value for level here, throw unsupported operation exception. - throw new UnsupportedOperationException("MongoDB provides no support for transactions.") + sqlFeatureNotSupported() } - override def getTransactionIsolation: Int = { checkClosed() Connection.TRANSACTION_NONE @@ -84,91 +84,190 @@ class MongoJdbcConnection(client: MongoClient) extends Connection { null } - override def clearWarnings(): Unit = checkClosed() + override def clearWarnings(): Unit = { + checkClosed() + } - override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = ??? + def createMongoStatement(sqlOption: Option[String] = None): MongoPreparedStatement = { + checkClosed() + val stmt = statement.MongoPreparedStatement(this) + sqlOption.foreach(stmt.setSql) + stmt + } - override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = ??? + override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = { + checkClosed() + createMongoStatement() + } - override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = ??? + override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = { + checkClosed() + createMongoStatement(Some(sql)) + } - override def getTypeMap: util.Map[String, Class[_]] = ??? + override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = { + checkClosed() + createMongoStatement(Some(sql)) + } - override def setTypeMap(map: util.Map[String, Class[_]]): Unit = ??? + override def getTypeMap: util.Map[String, Class[_]] = { + checkClosed() + null + } - override def setHoldability(holdability: Int): Unit = ??? + override def setTypeMap(map: util.Map[String, Class[_]]): Unit = { + checkClosed() + } - override def getHoldability: Int = ??? + override def setHoldability(holdability: Int): Unit = { + checkClosed() + } - override def setSavepoint(): Savepoint = ??? + override def getHoldability: Int = { + checkClosed() + 0 + } - override def setSavepoint(name: String): Savepoint = ??? + override def setSavepoint(): Savepoint = { + checkClosed() + null + } - override def rollback(savepoint: Savepoint): Unit = ??? + override def setSavepoint(name: String): Savepoint = { + checkClosed() + null + } - override def releaseSavepoint(savepoint: Savepoint): Unit = ??? + override def rollback(savepoint: Savepoint): Unit = { + checkClosed() + } - override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = ??? + override def releaseSavepoint(savepoint: Savepoint): Unit = { + checkClosed() + } - override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = ??? + override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = { + createMongoStatement() + } - override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = ??? + override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = { + createMongoStatement(Option(sql)) + } - override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = ??? + override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = { + checkClosed() + createMongoStatement(Some(sql)) + } - override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = ??? + override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = { + createMongoStatement(Option(sql)) + } - override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = ??? + override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = { + createMongoStatement(Option(sql)) + } - override def createClob(): Clob = ??? + override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = { + createMongoStatement(Option(sql)) + } - override def createBlob(): Blob = ??? + override def createClob(): Clob = { + checkClosed() + null + } - override def createNClob(): NClob = ??? + override def createBlob(): Blob = { + checkClosed() + null + } + + override def createNClob(): NClob = { + checkClosed() + null + } - override def createSQLXML(): SQLXML = ??? + override def createSQLXML(): SQLXML = { + checkClosed() + null + } - override def isValid(timeout: Int): Boolean = ??? + override def isValid(timeout: Int): Boolean = { + checkClosed() + true + } - override def setClientInfo(name: String, value: String): Unit = ??? + override def setClientInfo(name: String, value: String): Unit = { + checkClosed() + if ("ApplicationName".equalsIgnoreCase(name) || "appName".equalsIgnoreCase(name) || "name".equalsIgnoreCase(name)) { + if (value != null) { + databaseProvider.closeClient() + databaseProvider.config.applicationName = value + } + } + } - override def setClientInfo(properties: Properties): Unit = ??? + override def setClientInfo(properties: Properties): Unit = { + properties.asScala.foreach(entry => setClientInfo(entry._1, entry._2)) + } - override def getClientInfo(name: String): String = ??? + override def getClientInfo(name: String): String = { + checkClosed() + if ("ApplicationName".equalsIgnoreCase(name) || "appName".equalsIgnoreCase(name) || "name".equalsIgnoreCase(name)) { + databaseProvider.config.applicationName + } else { + null + } + } - override def getClientInfo: Properties = ??? + override def getClientInfo: Properties = { + val properties = new Properties() + properties.setProperty("ApplicationName", databaseProvider.config.applicationName) + val document = Converter.toDocument(databaseProvider.config) + BsonConverter.asMap(document).foreach(entry => properties.setProperty(entry._1, entry._2.toString)) + properties + } - override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = ??? + override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = { + checkClosed() + null + } - override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = ??? + override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = { + checkClosed() + null + } - override def setSchema(schema: String): Unit = ??? + override def setSchema(schema: String): Unit = { + checkClosed() + databaseProvider.setDefaultDatabaseName(schema) + } - override def getSchema: String = ??? + override def getSchema: String = { + checkClosed() + databaseProvider.DefaultDatabaseName + } - override def abort(executor: Executor): Unit = ??? + override def abort(executor: Executor): Unit = { + checkClosed() + } - override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = ??? + override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = { + checkClosed() + } - override def getNetworkTimeout: Int = ??? + override def getNetworkTimeout: Int = { + checkClosed() + 0 + } - @throws[SQLAlreadyClosedException] override def unwrap[T](iface: Class[T]): T = { checkClosed() null.asInstanceOf[T] } - @throws[SQLAlreadyClosedException] override def isWrapperFor(iface: Class[_]): Boolean = { checkClosed() false } - - @throws[SQLAlreadyClosedException] - private def checkClosed(): Unit = { - if (isClosed) { - throw new SQLAlreadyClosedException(this.getClass.getSimpleName) - } - } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala index 5b48b83d..ddbc856e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala @@ -11,10 +11,12 @@ import java.util.logging.Logger import scala.jdk.CollectionConverters.CollectionHasAsScala class MongoJdbcDriver extends java.sql.Driver { + private val propertyInfoHelper = new MongodbJdbcDriverPropertyInfoHelper() private lazy val semVer = new Semver(BuildInfo.version) - /** Connect to the database using a URL like : jdbc:mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] The + /** + * Connect to the database using a URL like : jdbc:mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] The * URL excepting the jdbc: prefix is passed as it is to the MongoDb native Java driver. */ override def connect(url: String, info: Properties): Connection = { @@ -23,19 +25,21 @@ class MongoJdbcDriver extends java.sql.Driver { } val connectionUrl = url.replaceFirst("^jdbc:", "") - val username = Option(info.getProperty("user")).filter(_.trim.nonEmpty) - val password = Option(info.getProperty("password")).filter(_.trim.nonEmpty) + val username = Option(info.getProperty(MongodbJdbcDriverPropertyInfoHelper.AuthUser)).filter(_.trim.nonEmpty) + val password = Option(info.getProperty(MongodbJdbcDriverPropertyInfoHelper.AuthPassword)).filter(_.trim.nonEmpty) val string = new ConnectionString(connectionUrl) + val database = Option(string.getDatabase).getOrElse(Option(info.getProperty(MongodbJdbcDriverPropertyInfoHelper.Database)).getOrElse("admin")) + val authDb = Option(info.getProperty(MongodbJdbcDriverPropertyInfoHelper.AuthDatabase)).getOrElse(Option(string.getDatabase).getOrElse("admin")) val provider = DatabaseProvider( MongoConfig( - string.getDatabase, + database, MongoConfig.DefaultHost, MongoConfig.DefaultPort, - string.getApplicationName, + Option(string.getApplicationName).filter(_.trim.nonEmpty).getOrElse(info.getProperty(MongodbJdbcDriverPropertyInfoHelper.ApplicationName)), username, password, - string.getDatabase, + authDb, serverAddressList = string.getHosts.asScala.toList.map(h => new ServerAddress(h)) ) ) @@ -47,7 +51,7 @@ class MongoJdbcDriver extends java.sql.Driver { internalUrl.startsWith("mongodb://") || internalUrl.startsWith("mongodb+srv://") } - override def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = ??? + override def getPropertyInfo(url: String, info: Properties): Array[DriverPropertyInfo] = propertyInfoHelper.getPropertyInfo override def getMajorVersion: Int = semVer.getMajor diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala deleted file mode 100644 index b0f182f5..00000000 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatement.scala +++ /dev/null @@ -1,306 +0,0 @@ -package dev.mongocamp.driver.mongodb.jdbc - -import java.io.{InputStream, Reader} -import java.net.URL -import java.sql -import java.sql.{Blob, Clob, Connection, Date, NClob, ParameterMetaData, PreparedStatement, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLFeatureNotSupportedException, SQLWarning, SQLXML, Time, Timestamp} -import java.util.Calendar - -class MongoPreparedStatement(connection: MongoJdbcConnection, private var query: String) extends PreparedStatement { - private var lastResultSet: ResultSet = null - private var _isClosed = false - private var maxRows = -1 - private var fetchSize = -1 - - override def executeQuery(sql: String): ResultSet = { - checkClosed() - query = sql - if (lastResultSet != null && !lastResultSet.isClosed) { - lastResultSet.close(); - } - if (query == null) { - throw new SQLException("Null statement."); - } - // todo: execute and generate result set - // lastResultSet = connection.getScriptEngine().execute(query, fetchSize); - lastResultSet - } - - override def executeUpdate(sql: String): Int = ??? - - override def executeQuery(): ResultSet = { - execute(query) - lastResultSet - } - - override def execute(sql: String): Boolean = { - executeQuery(sql) - lastResultSet != null - } - - override def executeUpdate(): Int = executeUpdate(query) - - override def setNull(parameterIndex: Int, sqlType: Int): Unit = {} - - override def setBoolean(parameterIndex: Int, x: Boolean): Unit = {} - - override def setByte(parameterIndex: Int, x: Byte): Unit = {} - - override def setShort(parameterIndex: Int, x: Short): Unit = {} - - override def setInt(parameterIndex: Int, x: Int): Unit = {} - - override def setLong(parameterIndex: Int, x: Long): Unit = {} - - override def setFloat(parameterIndex: Int, x: Float): Unit = {} - - override def setDouble(parameterIndex: Int, x: Double): Unit = {} - - override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = {} - - override def setString(parameterIndex: Int, x: String): Unit = {} - - override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = {} - - override def setDate(parameterIndex: Int, x: Date): Unit = {} - - override def setTime(parameterIndex: Int, x: Time): Unit = {} - - override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = {} - - override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} - - override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} - - override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = {} - - override def clearParameters(): Unit = {} - - override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = {} - - override def setObject(parameterIndex: Int, x: Any): Unit = {} - - override def execute(): Boolean = { - query != null && execute(query) - } - - override def addBatch(): Unit = {} - - override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = {} - - override def setRef(parameterIndex: Int, x: Ref): Unit = {} - - override def setBlob(parameterIndex: Int, x: Blob): Unit = {} - - override def setClob(parameterIndex: Int, x: Clob): Unit = {} - - override def setArray(parameterIndex: Int, x: sql.Array): Unit = {} - - override def getMetaData: ResultSetMetaData = { - null - } - - override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = {} - - override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = {} - - override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = {} - - override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = {} - - override def setURL(parameterIndex: Int, x: URL): Unit = {} - - override def getParameterMetaData: ParameterMetaData = null - - override def setRowId(parameterIndex: Int, x: RowId): Unit = {} - - override def setNString(parameterIndex: Int, value: String): Unit = {} - - override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = {} - - override def setNClob(parameterIndex: Int, value: NClob): Unit = {} - - override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = {} - - override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = {} - - override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = {} - - override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = {} - - override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int, scaleOrLength: Int): Unit = {} - - override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = {} - - override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = {} - - override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = {} - - override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = {} - - override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = {} - - override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = {} - - override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = {} - - override def setClob(parameterIndex: Int, reader: Reader): Unit = {} - - override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = {} - - override def setNClob(parameterIndex: Int, reader: Reader): Unit = {} - - override def close(): Unit = { - _isClosed = true - if (lastResultSet == null || lastResultSet.isClosed) { - return - } - lastResultSet.close() - } - - override def getMaxFieldSize: Int = { - 0 - } - - override def setMaxFieldSize(max: Int): Unit = {} - - override def getMaxRows: Int = maxRows - - override def setMaxRows(max: Int): Unit = maxRows = max - - override def setEscapeProcessing(enable: Boolean): Unit = {} - - override def getQueryTimeout: Int = { - checkClosed() - throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") - } - - override def setQueryTimeout(seconds: Int): Unit = { - checkClosed() - throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") - } - - override def cancel(): Unit = { - checkClosed() - throw new SQLFeatureNotSupportedException("MongoDB provides no support for query timeouts.") - } - - override def getWarnings: SQLWarning = { - checkClosed() - null - } - - override def clearWarnings(): Unit = { - checkClosed() - } - - override def setCursorName(name: String): Unit = { - checkClosed() - } - - override def getResultSet: ResultSet = { - checkClosed() - lastResultSet; - } - - override def getUpdateCount: Int = { - checkClosed() - -1 - } - - override def getMoreResults: Boolean = false - - override def setFetchDirection(direction: Int): Unit = {} - - override def getFetchDirection: Int = ResultSet.FETCH_FORWARD - - override def setFetchSize(rows: Int): Unit = { - if (rows <= 1) { - throw new SQLException("Fetch size must be > 1. Actual: " + rows) - } - fetchSize = rows - } - - override def getFetchSize: Int = fetchSize - - override def getResultSetConcurrency: Int = throw new SQLFeatureNotSupportedException(); - - override def getResultSetType: Int = ResultSet.TYPE_FORWARD_ONLY - - override def addBatch(sql: String): Unit = {} - - - override def clearBatch(): Unit = {} - - override def executeBatch(): Array[Int] = { - checkClosed() - null - } - - override def getConnection: Connection = { - checkClosed() - connection - } - - override def getMoreResults(current: Int): Boolean = { - checkClosed() - false - } - - override def getGeneratedKeys: ResultSet = { - checkClosed() - null - } - - override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = { - checkClosed() - 0 - } - - override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = { - checkClosed() - 0 - } - - override def executeUpdate(sql: String, columnNames: Array[String]): Int = { - checkClosed() - 0 - } - - override def execute(sql: String, autoGeneratedKeys: Int): Boolean = { - checkClosed() - false - } - - override def execute(sql: String, columnIndexes: Array[Int]): Boolean = { - checkClosed() - false - } - - override def execute(sql: String, columnNames: Array[String]): Boolean = { - checkClosed() - false - } - - override def getResultSetHoldability: Int = 0 - - override def isClosed: Boolean = _isClosed - - override def setPoolable(poolable: Boolean): Unit = {} - - override def isPoolable: Boolean = false - - override def closeOnCompletion(): Unit = {} - - override def isCloseOnCompletion: Boolean = false - - override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] - - override def isWrapperFor(iface: Class[_]): Boolean = false - - private def checkClosed(): Unit = { - if (isClosed) { - throw new SQLAlreadyClosedException(this.getClass.getSimpleName) - } - } -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala new file mode 100644 index 00000000..89ce812c --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala @@ -0,0 +1,32 @@ +package dev.mongocamp.driver.mongodb.jdbc +import MongodbJdbcDriverPropertyInfoHelper._ + +object MongodbJdbcDriverPropertyInfoHelper { + val ApplicationName = "appName" + val Database = "database" + + val AuthUser = "user" + val AuthPassword = "password" + val AuthDatabase = "auth_database" + val DefaultAuthDB = "admin" + val DefaultAppName = "mongodb-driver" +} + +case class MongodbJdbcDriverPropertyInfoHelper() { + + def getPropertyInfo: Array[java.sql.DriverPropertyInfo] = { + Array( + createPropertyInfo(AuthUser, null, Some("The username to authenticate")), + createPropertyInfo(AuthPassword, null, Some("The password to authenticate")), + createPropertyInfo(AuthDatabase, DefaultAuthDB, Some("The database where user info is stored. (most cases 'admin')")), + createPropertyInfo(ApplicationName, DefaultAppName, Some("The application name witch is visible in the MongoDB logs.")), + createPropertyInfo(Database, null, Some("The default database to connect to for the driver")) + ) + } + + private def createPropertyInfo(key: String, value: String, description: Option[String] = None): java.sql.DriverPropertyInfo = { + val prop = new java.sql.DriverPropertyInfo(key, value) + description.foreach(prop.description = _) + prop + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala deleted file mode 100644 index 4657c02e..00000000 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/SQLAlreadyClosedException.scala +++ /dev/null @@ -1,5 +0,0 @@ -package dev.mongocamp.driver.mongodb.jdbc - -import java.sql.SQLException - -class SQLAlreadyClosedException(name: String) extends SQLException(name + " has already been closed.") \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala new file mode 100644 index 00000000..88a28dd3 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala @@ -0,0 +1,897 @@ +package dev.mongocamp.driver.mongodb.jdbc.resultSet + +import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.bson.BsonConverter +import org.mongodb.scala.bson.{BsonArray, BsonBoolean, BsonDateTime, BsonInt32, BsonInt64, BsonNull, BsonNumber, BsonObjectId, BsonString} +import org.mongodb.scala.bson.collection.immutable.Document + +import java.io.{InputStream, Reader} +import java.net.{URI, URL} +import java.{sql, util} +import java.sql.{Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLWarning, SQLXML, Statement, Time, Timestamp} +import java.util.Calendar +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.jdbc.MongoJdbcCloseable + +import java.nio.charset.StandardCharsets + +class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], queryTimeOut: Int) extends ResultSet with MongoJdbcCloseable { + private var currentRow: Document = _ + private var index: Int = 0 + + private lazy val metaData = new MongoDbResultSetMetaData(collectionDao, data) + + def getDocument: Document = currentRow + + override def next(): Boolean = { + checkClosed() + if (data == null || data.isEmpty) { + false + } + else { + if (index == 0 || (currentRow != null && index < data.size)) { + currentRow = data(index) + index += 1 + true + } + else { + currentRow = null + false + } + } + } + + override def wasNull(): Boolean = { + checkClosed() + false + } + + override def getString(columnIndex: Int): String = { + checkClosed() + currentRow.getString(metaData.getColumnName(columnIndex)) + } + + override def getBoolean(columnIndex: Int): Boolean = { + checkClosed() + currentRow.getBoolean(metaData.getColumnName(columnIndex)) + } + + override def getByte(columnIndex: Int): Byte = { + checkClosed() + getInt(columnIndex).toByte + } + + override def getShort(columnIndex: Int): Short = { + checkClosed() + getInt(columnIndex).toShort + } + + override def getInt(columnIndex: Int): Int = { + checkClosed() + getLong(columnIndex).toInt + } + + override def getLong(columnIndex: Int): Long = { + checkClosed() + val value = currentRow.getValue(metaData.getColumnName(columnIndex)) + value match { + case b : BsonInt32 => b.longValue() + case b : BsonInt64 => b.longValue() + case _ => Option(value).flatMap(_.toString.toLongOption).getOrElse(0) + } + } + + override def getFloat(columnIndex: Int): Float = { + checkClosed() + getDouble(columnIndex).toFloat + } + + override def getDouble(columnIndex: Int): Double = { + checkClosed() + currentRow.getDouble(metaData.getColumnName(columnIndex)) + } + + override def getBigDecimal(columnIndex: Int, scale: Int): java.math.BigDecimal = { + checkClosed() + new java.math.BigDecimal(getDouble(columnIndex)).setScale(scale) + } + + override def getBytes(columnIndex: Int): Array[Byte] = { + checkClosed() + null + } + + override def getDate(columnIndex: Int): Date = { + checkClosed() + val javaDate = currentRow.getDateValue(metaData.getColumnName(columnIndex)) + new Date(javaDate.getTime) + } + + override def getTime(columnIndex: Int): Time = { + checkClosed() + val javaDate = currentRow.getDateValue(metaData.getColumnName(columnIndex)) + new Time(javaDate.getTime) + } + + override def getTimestamp(columnIndex: Int): Timestamp = { + checkClosed() + val javaDate = currentRow.getDateValue(metaData.getColumnName(columnIndex)) + new Timestamp(javaDate.getTime) + } + + override def getAsciiStream(columnIndex: Int): InputStream = { + checkClosed() + null + } + + override def getUnicodeStream(columnIndex: Int): InputStream = { + checkClosed() + null + } + + override def getBinaryStream(columnIndex: Int): InputStream = { + checkClosed() + null + } + + override def getString(columnLabel: String): String = { + checkClosed() + currentRow.get(columnLabel) match { + case Some(value) => + value match { + case v: BsonString => v.getValue + case v: BsonObjectId => v.asObjectId().getValue.toHexString + case _ => BsonConverter.fromBson(value).toString + } + case None => "" + } + } + + override def getBoolean(columnLabel: String): Boolean = { + checkClosed() + currentRow.getBoolean(columnLabel) + } + + override def getByte(columnLabel: String): Byte = { + checkClosed() + getInt(columnLabel).toByte + } + + override def getShort(columnLabel: String): Short = { + checkClosed() + getInt(columnLabel).toShort + } + + override def getInt(columnLabel: String): Int = { + checkClosed() + currentRow.getIntValue(columnLabel) + } + + override def getLong(columnLabel: String): Long = { + checkClosed() + currentRow.getLong(columnLabel) + } + + override def getFloat(columnLabel: String): Float = { + checkClosed() + getDouble(columnLabel).toFloat + } + + override def getDouble(columnLabel: String): Double = { + checkClosed() + currentRow.getDouble(columnLabel) + } + + override def getBigDecimal(columnLabel: String, scale: Int): java.math.BigDecimal = { + checkClosed() + new java.math.BigDecimal(getDouble(columnLabel)).setScale(scale) + } + + override def getBytes(columnLabel: String): Array[Byte] = { + checkClosed() + null + } + + override def getDate(columnLabel: String): Date = { + checkClosed() + val javaDate = currentRow.getDateValue(columnLabel) + new Date(javaDate.getTime) + } + + override def getTime(columnLabel: String): Time = { + checkClosed() + val javaDate = currentRow.getDateValue(columnLabel) + new Time(javaDate.getTime) + } + + override def getTimestamp(columnLabel: String): Timestamp = { + checkClosed() + val javaDate = currentRow.getDateValue(columnLabel) + new Timestamp(javaDate.getTime) + } + + override def getAsciiStream(columnLabel: String): InputStream = { + checkClosed() + null + } + + override def getUnicodeStream(columnLabel: String): InputStream = { + checkClosed() + null + } + + override def getBinaryStream(columnLabel: String): InputStream = { + checkClosed() + null + } + + override def getWarnings: SQLWarning = { + checkClosed() + null + } + + override def clearWarnings(): Unit = { + checkClosed() + } + + override def getCursorName: String = { + checkClosed() + null + } + + override def getMetaData: ResultSetMetaData = { + checkClosed() + new MongoDbResultSetMetaData(collectionDao, data) + } + + override def getObject(columnIndex: Int): AnyRef = { + checkClosed() + currentRow.get(metaData.getColumnName(columnIndex)) match { + case Some(value) => BsonConverter.fromBson(value).asInstanceOf[AnyRef] + case None => null + } + } + + override def getObject(columnLabel: String): AnyRef = { + checkClosed() + currentRow.get(columnLabel) match { + case Some(value) => BsonConverter.fromBson(value).asInstanceOf[AnyRef] + case None => null + } + } + + override def findColumn(columnLabel: String): Int = { + checkClosed() + metaData.getColumnIndex(columnLabel) + } + + override def getCharacterStream(columnIndex: Int): Reader = { + checkClosed() + null + } + + override def getCharacterStream(columnLabel: String): Reader = { + checkClosed() + null + } + + override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = { + checkClosed() + new java.math.BigDecimal(getDouble(columnIndex)) + } + + override def getBigDecimal(columnLabel: String): java.math.BigDecimal = { + checkClosed() + new java.math.BigDecimal(getDouble(columnLabel)) + } + + override def isBeforeFirst: Boolean = { + checkClosed() + index == 0 + } + + override def isAfterLast: Boolean = { + checkClosed() + index >= data.size + } + + override def isFirst: Boolean = { + checkClosed() + index == 1 + } + + override def isLast: Boolean = { + checkClosed() + index == data.size + } + + override def beforeFirst(): Unit = { + checkClosed() + } + + override def afterLast(): Unit = { + checkClosed() + } + + override def first(): Boolean = isBeforeFirst + + override def last(): Boolean = isLast + + override def getRow: Int = { + checkClosed() + if (currentRow == null) { + 0 + } + else { + index + } + } + + override def absolute(row: Int): Boolean = { + checkClosed() + false + } + + override def relative(rows: Int): Boolean = { + checkClosed() + false + } + + override def previous(): Boolean = { + checkClosed() + false + } + + override def setFetchDirection(direction: Int): Unit = sqlFeatureNotSupported() + + override def getFetchDirection: Int = { + checkClosed() + ResultSet.FETCH_FORWARD + } + + override def setFetchSize(rows: Int): Unit = { + checkClosed() + } + + override def getFetchSize: Int = { + checkClosed() + 1 + } + + override def getType: Int = { + checkClosed() + ResultSet.TYPE_FORWARD_ONLY + } + + override def getConcurrency: Int = sqlFeatureNotSupported() + + override def rowUpdated(): Boolean = { + checkClosed() + false + } + + override def rowInserted(): Boolean = { + checkClosed() + false + } + + override def rowDeleted(): Boolean = { + checkClosed() + false + } + + override def updateNull(columnIndex: Int): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNull()) + } + + override def updateNull(columnLabel: String): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNull()) + } + + override def updateBoolean(columnIndex: Int, x: Boolean): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonBoolean(x)) + } + + override def updateBoolean(columnLabel: String, x: Boolean): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonBoolean(x)) + } + + override def updateByte(columnIndex: Int, x: Byte): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateByte(columnLabel: String, x: Byte): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateShort(columnIndex: Int, x: Short): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateShort(columnLabel: String, x: Short): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateInt(columnIndex: Int, x: Int): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateInt(columnLabel: String, x: Int): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateLong(columnIndex: Int, x: Long): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateLong(columnLabel: String, x: Long): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateFloat(columnIndex: Int, x: Float): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateFloat(columnLabel: String, x: Float): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateDouble(columnIndex: Int, x: Double): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + } + + override def updateDouble(columnLabel: String, x: Double): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x)) + } + + override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x.doubleValue())) + } + + override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonNumber(x.doubleValue())) + } + + override def updateString(columnIndex: Int, x: String): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonString(x)) + } + + override def updateString(columnLabel: String, x: String): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonString(x)) + } + + override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonArray(x)) + } + + override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonArray(x)) + } + + override def updateDate(columnIndex: Int, x: Date): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + } + + override def updateDate(columnLabel: String, x: Date): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonDateTime(x)) + } + + override def updateTime(columnIndex: Int, x: Time): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + } + + override def updateTime(columnLabel: String, x: Time): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonDateTime(x)) + } + + override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + } + + override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonDateTime(x)) + } + + override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = sqlFeatureNotSupported() + + override def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = sqlFeatureNotSupported() + + override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = sqlFeatureNotSupported() + + override def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = sqlFeatureNotSupported() + + override def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = sqlFeatureNotSupported() + + override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = sqlFeatureNotSupported() + + override def updateObject(columnIndex: Int, x: Any, scaleOrLength: Int): Unit = { + checkClosed() + updateObject(columnIndex, x) + } + + override def updateObject(columnLabel: String, x: Any, scaleOrLength: Int): Unit = { + checkClosed() + updateObject(columnLabel, x) + } + + override def updateObject(columnIndex: Int, x: Any): Unit = { + checkClosed() + currentRow.updated(metaData.getColumnName(columnIndex), BsonConverter.toBson(x)) + } + + override def updateObject(columnLabel: String, x: Any): Unit = { + checkClosed() + currentRow.updated(columnLabel, BsonConverter.toBson(x)) + } + + override def insertRow(): Unit = { + checkClosed() + collectionDao.insertOne(currentRow).resultOption(queryTimeOut) + } + + override def updateRow(): Unit = { + checkClosed() + collectionDao.replaceOne(currentRow).resultOption(queryTimeOut) + } + + override def deleteRow(): Unit = { + checkClosed() + collectionDao.deleteOne(currentRow).resultOption(queryTimeOut) + } + + override def refreshRow(): Unit = { + checkClosed() + currentRow.get("_id") match { + case Some(id) => + collectionDao.find(Map("_id" -> id)).resultOption(queryTimeOut) match { + case Some(document) => currentRow = document + case None => throw new SQLException("Row not found") + } + case None => throw new SQLException("No _id field in current row") + } + } + + override def cancelRowUpdates(): Unit = sqlFeatureNotSupported() + + override def moveToInsertRow(): Unit = sqlFeatureNotSupported() + + override def moveToCurrentRow(): Unit = sqlFeatureNotSupported() + + override def getStatement: Statement = { + checkClosed() + null + } + + override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]): AnyRef = { + checkClosed() + if (map == null || map.isEmpty) { + getObject(columnIndex) + } + else { + sqlFeatureNotSupported() + } + } + + override def getObject(columnLabel: String, map: util.Map[String, Class[_]]): AnyRef = { + checkClosed() + if (map == null || map.isEmpty) { + getObject(columnLabel) + } + else { + sqlFeatureNotSupported() + } + } + + override def getObject[T](columnIndex: Int, `type`: Class[T]): T = { + checkClosed() + val ref = getObject(columnIndex) + ref match { + case t: T => t + case _ => throw new SQLException("Invalid type") + } + } + + override def getObject[T](columnLabel: String, `type`: Class[T]): T = { + checkClosed() + val ref = getObject(columnLabel) + ref match { + case t: T => t + case _ => throw new SQLException("Invalid type") + } + } + + override def getRef(columnIndex: Int): Ref = sqlFeatureNotSupported() + + override def getRef(columnLabel: String): Ref = sqlFeatureNotSupported() + + override def updateRef(columnIndex: Int, x: Ref): Unit = sqlFeatureNotSupported() + + override def updateRef(columnLabel: String, x: Ref): Unit = sqlFeatureNotSupported() + + override def getDate(columnIndex: Int, cal: Calendar): Date = { + checkClosed() + val date = getDate(columnIndex) + convertDateWithCalendar(cal, date) + } + + override def getDate(columnLabel: String, cal: Calendar): Date = { + checkClosed() + val date = getDate(columnLabel) + convertDateWithCalendar(cal, date) + } + + override def getTime(columnIndex: Int, cal: Calendar): Time = { + checkClosed() + val date = getDate(columnIndex, cal) + new Time(date.getTime) + } + + override def getTime(columnLabel: String, cal: Calendar): Time = { + checkClosed() + val date = getDate(columnLabel, cal) + new Time(date.getTime) + } + + override def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = { + checkClosed() + val date = getDate(columnIndex, cal) + new Timestamp(date.getTime) + } + + override def getTimestamp(columnLabel: String, cal: Calendar): Timestamp = { + checkClosed() + val date = getDate(columnLabel, cal) + new Timestamp(date.getTime) + } + + override def getURL(columnIndex: Int): URL = { + checkClosed() + new URI(getString(columnIndex)).toURL + } + + override def getURL(columnLabel: String): URL = { + checkClosed() + new URI(getString(columnLabel)).toURL + } + + override def getRowId(columnIndex: Int): RowId = sqlFeatureNotSupported() + override def getRowId(columnLabel: String): RowId = sqlFeatureNotSupported() + override def updateRowId(columnIndex: Int, x: RowId): Unit = sqlFeatureNotSupported() + override def updateRowId(columnLabel: String, x: RowId): Unit = sqlFeatureNotSupported() + + override def getHoldability: Int = sqlFeatureNotSupported() + + override def updateNString(columnIndex: Int, nString: String): Unit = sqlFeatureNotSupported() + override def updateNString(columnLabel: String, nString: String): Unit = sqlFeatureNotSupported() + override def getNString(columnIndex: Int): String = sqlFeatureNotSupported() + override def getNString(columnLabel: String): String = sqlFeatureNotSupported() + + override def getNClob(columnIndex: Int): NClob = sqlFeatureNotSupported() + override def getNClob(columnLabel: String): NClob = sqlFeatureNotSupported() + override def updateNClob(columnIndex: Int, nClob: NClob): Unit = sqlFeatureNotSupported() + override def updateNClob(columnLabel: String, nClob: NClob): Unit = sqlFeatureNotSupported() + + override def getSQLXML(columnIndex: Int): SQLXML = sqlFeatureNotSupported() + override def getSQLXML(columnLabel: String): SQLXML = sqlFeatureNotSupported() + override def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = sqlFeatureNotSupported() + override def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = sqlFeatureNotSupported() + + override def getNCharacterStream(columnIndex: Int): Reader = sqlFeatureNotSupported() + override def getNCharacterStream(columnLabel: String): Reader = sqlFeatureNotSupported() + override def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = sqlFeatureNotSupported() + override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = sqlFeatureNotSupported() + + override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = { + checkClosed() + val text = new String(x.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = sqlFeatureNotSupported() + override def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = sqlFeatureNotSupported() + override def updateCharacterStream(columnIndex: Int, x: Reader): Unit = sqlFeatureNotSupported() + override def updateCharacterStream(columnLabel: String, reader: Reader): Unit = sqlFeatureNotSupported() + + override def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = sqlFeatureNotSupported() + override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = sqlFeatureNotSupported() + + override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = { + checkClosed() + val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = { + checkClosed() + val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = { + checkClosed() + val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = { + checkClosed() + val text = convertReaderToString(reader) + updateString(columnIndex, text) + } + + override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = { + checkClosed() + val text = convertReaderToString(reader) + updateString(columnIndex, text) + } + + override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = { + checkClosed() + val text = convertReaderToString(reader) + updateString(columnLabel, text) + } + + override def updateNClob(columnIndex: Int, reader: Reader): Unit = { + checkClosed() + val text = convertReaderToString(reader) + updateString(columnIndex, text) + } + + override def updateNClob(columnLabel: String, reader: Reader): Unit = { + checkClosed() + val text = convertReaderToString(reader) + updateString(columnLabel, text) + } + + override def getBlob(columnIndex: Int): Blob = sqlFeatureNotSupported() + + override def getBlob(columnLabel: String): Blob = sqlFeatureNotSupported() + + override def updateBlob(columnIndex: Int, x: Blob): Unit = { + checkClosed() + val text = new String(x.getBinaryStream.readAllBytes(), StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateBlob(columnLabel: String, x: Blob): Unit = { + checkClosed() + val text = new String(x.getBinaryStream.readAllBytes(), StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def getClob(columnIndex: Int): Clob = sqlFeatureNotSupported() + + override def getClob(columnLabel: String): Clob = sqlFeatureNotSupported() + + override def updateClob(columnIndex: Int, x: Clob): Unit = { + val text = new String(x.getAsciiStream.readAllBytes(), StandardCharsets.UTF_8) + updateString(columnIndex, text) + } + + override def updateClob(columnLabel: String, x: Clob): Unit = { + val text = new String(x.getAsciiStream.readAllBytes(), StandardCharsets.UTF_8) + updateString(columnLabel, text) + } + + override def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = { + val text = convertReaderToString(reader) + updateString(columnLabel, text) + } + + override def updateClob(columnIndex: Int, reader: Reader): Unit = { + val text = convertReaderToString(reader) + updateString(columnIndex, text) + } + + override def updateClob(columnLabel: String, reader: Reader): Unit = { + val text = convertReaderToString(reader) + updateString(columnLabel, text) + } + + override def getArray(columnIndex: Int): sql.Array = sqlFeatureNotSupported() + + override def getArray(columnLabel: String): sql.Array = sqlFeatureNotSupported() + + override def updateArray(columnIndex: Int, x: sql.Array): Unit = sqlFeatureNotSupported() + + override def updateArray(columnLabel: String, x: sql.Array): Unit = sqlFeatureNotSupported() + + override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] + + override def isWrapperFor(iface: Class[_]): Boolean = false + + private def convertDateWithCalendar(cal: Calendar, date: Date) = { + if (cal != null) { + val calDate = cal.getTime + calDate.setTime(date.getTime) + new Date(calDate.getTime) + } + else { + date + } + } + + private def convertReaderToString(reader: Reader): String = { + val buffer = new StringBuilder + var c = reader.read() + while (c != -1) { + buffer.append(c.toChar) + c = reader.read() + } + buffer.toString() + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala new file mode 100644 index 00000000..06b1e774 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala @@ -0,0 +1,125 @@ +package dev.mongocamp.driver.mongodb.jdbc.resultSet + +import dev.mongocamp.driver.mongodb.MongoDAO +import org.mongodb.scala.Document +import org.mongodb.scala.bson.{BsonBoolean, BsonInt32, BsonInt64, BsonNumber, BsonString} +import dev.mongocamp.driver.mongodb._ + +import java.sql.{ResultSetMetaData, SQLException} + +class MongoDbResultSetMetaData extends ResultSetMetaData { + private var document: Document = _ + private var collectionDao: MongoDAO[Document] = _ + + def this(dao: MongoDAO[Document]) = { + this() + val row: Document = extractDocumentFromDataList(dao.findAggregated(List(Map("$sample" -> Map("size" -> 500)))).resultList()) + this.document = row + this.collectionDao = dao + } + + def this(dao: MongoDAO[Document], document: Document) = { + this() + this.document = document + this.collectionDao = dao + } + + def this(dao: MongoDAO[Document], data: List[Document]) = { + this() + val row: Document = extractDocumentFromDataList(data) + this.document = row + this.collectionDao = dao + } + + private def extractDocumentFromDataList(data: List[Document]) = { + var row = data.headOption.getOrElse(throw new SQLException("No data in ResultSet")).copy() + val distinctKeys = data.flatMap(_.keys).distinct + val missingKeys = distinctKeys.diff(row.keys.toSeq) + missingKeys.foreach(key => { + data + .find(_.get(key).nonEmpty) + .map(doc => row = row.updated(key, doc.get(key).get)) + }) + row + } + + override def getColumnCount: Int = document.size + + override def isAutoIncrement(column: Int): Boolean = false + + override def isCaseSensitive(column: Int): Boolean = true + + override def isSearchable(column: Int): Boolean = true + + override def isCurrency(column: Int): Boolean = false + + override def isNullable(column: Int): Int = ResultSetMetaData.columnNullable + + override def isSigned(column: Int): Boolean = false + + override def getColumnDisplaySize(column: Int): Int = Int.MaxValue + + override def getColumnLabel(column: Int): String = document.keys.toList(column - 1) + + override def getColumnName(column: Int): String = getColumnLabel(column) + + override def getSchemaName(column: Int): String = collectionDao.databaseName + + override def getPrecision(column: Int): Int = 0 + + override def getScale(column: Int): Int = 0 + + override def getTableName(column: Int): String = collectionDao.name + + override def getCatalogName(column: Int): String = collectionDao.name + + override def getColumnType(column: Int): Int = { + document.values.toList(column - 1) match { + case _: BsonInt32 => java.sql.Types.INTEGER + case _: BsonInt64 => java.sql.Types.BIGINT + case _: BsonNumber => java.sql.Types.DOUBLE + case _: BsonString => java.sql.Types.VARCHAR + case _: BsonBoolean => java.sql.Types.BOOLEAN + case _: Document => java.sql.Types.STRUCT + case _ => java.sql.Types.NULL + } + } + + override def getColumnTypeName(column: Int): String = { + getColumnType(column) match { + case java.sql.Types.INTEGER => "INTEGER" + case java.sql.Types.BIGINT => "BIGINT" + case java.sql.Types.DOUBLE => "DOUBLE" + case java.sql.Types.VARCHAR => "VARCHAR" + case java.sql.Types.BOOLEAN => "BOOLEAN" + case java.sql.Types.STRUCT => "STRUCT" + case _ => "NULL" + } + } + + override def isReadOnly(column: Int): Boolean = false + + override def isWritable(column: Int): Boolean = true + + override def isDefinitelyWritable(column: Int): Boolean = true + + override def getColumnClassName(column: Int): String = { + getColumnType(column) match { + case java.sql.Types.INTEGER => classOf[java.lang.Integer].getName + case java.sql.Types.BIGINT => classOf[java.lang.Long].getName + case java.sql.Types.DOUBLE => classOf[java.lang.Double].getName + case java.sql.Types.VARCHAR => classOf[java.lang.String].getName + case java.sql.Types.BOOLEAN => classOf[java.lang.Boolean].getName + case java.sql.Types.STRUCT => classOf[java.lang.Object].getName + case _ => classOf[java.lang.String].getName + } + } + + override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] + + override def isWrapperFor(iface: Class[_]): Boolean = false + + def getColumnIndex(columnLabel: String): Int = { + document.keys.toList.indexOf(columnLabel) + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala new file mode 100644 index 00000000..3251812f --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala @@ -0,0 +1,740 @@ +package dev.mongocamp.driver.mongodb.jdbc.statement + +import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb.{Converter, GenericObservable} +import dev.mongocamp.driver.mongodb.exception.SqlCommandNotSupportedException +import dev.mongocamp.driver.mongodb.jdbc.{MongoJdbcCloseable, MongoJdbcConnection} +import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet +import dev.mongocamp.driver.mongodb.sql.MongoSqlQueryHolder +import org.mongodb.scala.bson.collection.immutable.Document + +import java.io.{InputStream, Reader} +import java.net.URL +import java.{sql, util} +import java.sql.{Blob, CallableStatement, Clob, Connection, Date, NClob, ParameterMetaData, PreparedStatement, Ref, ResultSet, ResultSetMetaData, RowId, SQLWarning, SQLXML, Time, Timestamp} +import java.util.Calendar +import scala.collection.mutable + +case class MongoPreparedStatement(connection: MongoJdbcConnection) extends CallableStatement with MongoJdbcCloseable with LazyLogging{ + + def this(connection: MongoJdbcConnection, sql: String) = { + this(connection) + setSql(sql) + } + + private var _queryTimeout: Int = 10 + private var _sql: String = null + private var _org_sql: String = null + private var _lastResultSet: ResultSet = null + private var _lastUpdateCount: Int = -1 + + override def execute(sql: String): Boolean = { + checkClosed() + if (sql != null) { + try { + val response = MongoSqlQueryHolder(sql).run(connection.getDatabaseProvider).results(getQueryTimeout) + true + } + catch { + case e: Exception => + false + } + } + else { + false + } + } + + override def executeQuery(sql: String): ResultSet = { + checkClosed() + val queryHolder: MongoSqlQueryHolder = try { + MongoSqlQueryHolder(sql) + } + catch { + case e: SqlCommandNotSupportedException => + logger.error(e.getMessage, e) + null + } + if (queryHolder == null) { + new MongoDbResultSet(null, List.empty, 0) + } else { + var response = queryHolder.run(connection.getDatabaseProvider).results(getQueryTimeout) + if (response.isEmpty && queryHolder.selectFunctionCall) { + val emptyDocument = mutable.Map[String, Any]() + queryHolder.getKeysForEmptyDocument.foreach(key => { + emptyDocument.put(key, null) + }) + val doc = Converter.toDocument(emptyDocument.toMap) + response = Seq(doc) + } + val collectionName = Option(queryHolder.getCollection).map(c => connection.getDatabaseProvider.dao(c)) + if (!sql.toLowerCase().contains("_id")){ + response = response.map(doc => { + val newDoc = Document(doc - "_id") + newDoc + }) + } + val resultSet = new MongoDbResultSet(collectionName.orNull, response.toList, getQueryTimeout) + _lastResultSet = resultSet + resultSet + } + } + + + def setSql(sql: String): Unit = { + _sql = sql + } + + override def executeQuery(): ResultSet = { + checkClosed() + executeQuery(_sql) + } + + override def executeUpdate(): Int = { + executeUpdate(_sql) + } + + override def setNull(parameterIndex: Int, sqlType: Int): Unit = { + checkClosed() + setObject(parameterIndex, null) + } + + override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = { + checkClosed() + } + + override def setBoolean(parameterIndex: Int, x: Boolean): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setByte(parameterIndex: Int, x: Byte): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setShort(parameterIndex: Int, x: Short): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setInt(parameterIndex: Int, x: Int): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setLong(parameterIndex: Int, x: Long): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setFloat(parameterIndex: Int, x: Float): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setDouble(parameterIndex: Int, x: Double): Unit = { + checkClosed() + setObject(parameterIndex, x) + } + + override def setBigDecimal(parameterIndex: Int, x: java.math.BigDecimal): Unit = { + checkClosed() + setObject(parameterIndex, x.doubleValue()) + } + + override def setString(parameterIndex: Int, x: String): Unit = { + checkClosed() + setObject(parameterIndex, s"'$x'") + } + + override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = { + checkClosed() + } + + override def setDate(parameterIndex: Int, x: Date): Unit = { + checkClosed() + setObject(parameterIndex, s"'${x.toInstant.toString}'") + } + + override def setTime(parameterIndex: Int, x: Time): Unit = { + checkClosed() + setObject(parameterIndex, s"'${x.toInstant.toString}'") + } + + override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = { + checkClosed() + setObject(parameterIndex, s"'${x.toInstant.toString}'") + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + checkClosed() + } + + override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + checkClosed() + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { + checkClosed() + } + + override def clearParameters(): Unit = { + checkClosed() + _sql = _org_sql + } + + override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = { + setObject(parameterIndex, x) + } + + override def setObject(parameterIndex: Int, x: Any): Unit = { + checkClosed() + var newSql = "" + var paramCount = 0 + _org_sql = _sql + _sql.foreach(c => { + var replace = false + if (c == '?') { + if (paramCount == parameterIndex) { + replace = true + } + paramCount += 1 + } + if (replace) { + newSql += x.toString + } + else { + newSql += c + } + }) + _sql = newSql + } + + override def execute(): Boolean = { + execute(_sql) + } + + override def addBatch(): Unit = { + checkClosed() + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = { + checkClosed() + } + + override def setRef(parameterIndex: Int, x: Ref): Unit = { + checkClosed() + } + + override def setBlob(parameterIndex: Int, x: Blob): Unit = { + checkClosed() + } + + override def setClob(parameterIndex: Int, x: Clob): Unit = { + checkClosed() + } + + override def getMetaData: ResultSetMetaData = { + checkClosed() + null + } + + override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = { + setDate(parameterIndex, x) + } + + override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = { + setTime(parameterIndex, x) + } + + override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = { + setTimestamp(parameterIndex, x) + } + + override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = { + setNull(parameterIndex, sqlType) + } + + override def setURL(parameterIndex: Int, x: URL): Unit = { + sqlFeatureNotSupported() + } + + override def getParameterMetaData: ParameterMetaData = { + sqlFeatureNotSupported() + } + + override def setRowId(parameterIndex: Int, x: RowId): Unit = { + checkClosed() + } + + override def setNString(parameterIndex: Int, value: String): Unit = { + checkClosed() + } + + override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = { + checkClosed() + } + + override def setNClob(parameterIndex: Int, value: NClob): Unit = { + checkClosed() + } + + override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { + checkClosed() + } + + override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = { + checkClosed() + } + + override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { + checkClosed() + } + + override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = { + checkClosed() + } + + override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int, scaleOrLength: Int): Unit = { + setObject(parameterIndex, x) + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { + checkClosed() + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { + checkClosed() + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = { + checkClosed() + } + + override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = { + checkClosed() + } + + override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = { + checkClosed() + } + + override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = { + checkClosed() + } + + override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = { + checkClosed() + } + + override def setClob(parameterIndex: Int, reader: Reader): Unit = { + checkClosed() + } + + override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = { + checkClosed() + } + + override def setNClob(parameterIndex: Int, reader: Reader): Unit = { + checkClosed() + } + + override def executeUpdate(sql: String): Int = { + checkClosed() + val updateResponse = executeQuery(sql) + updateResponse.next() + val updateCount = updateResponse.getInt("matchedCount") + _lastUpdateCount = updateCount + updateCount + } + + override def getMaxFieldSize: Int = { + checkClosed() + 0 + } + + override def setMaxFieldSize(max: Int): Unit = { + checkClosed() + } + + override def getMaxRows: Int = { + sqlFeatureNotSupported() + } + + override def setMaxRows(max: Int): Unit = { + sqlFeatureNotSupported() + } + + override def setEscapeProcessing(enable: Boolean): Unit = { + checkClosed() + } + + override def getQueryTimeout: Int = _queryTimeout + + override def setQueryTimeout(seconds: Int): Unit = _queryTimeout = seconds + + override def cancel(): Unit = { + sqlFeatureNotSupported("cancel not supported at MongoDb Driver") + } + + override def getWarnings: SQLWarning = { + checkClosed() + null + } + + override def clearWarnings(): Unit = { + checkClosed() + } + + override def setCursorName(name: String): Unit = { + checkClosed() + } + + override def getResultSet: ResultSet = { + checkClosed() + _lastResultSet + } + + override def getUpdateCount: Int = { + checkClosed() + _lastUpdateCount + } + + override def getMoreResults: Boolean = { + checkClosed() + false + } + + override def setFetchDirection(direction: Int): Unit = { + sqlFeatureNotSupported() + } + + override def getFetchDirection: Int = { + checkClosed() + ResultSet.FETCH_FORWARD + } + + override def setFetchSize(rows: Int): Unit = { + + } + + override def getFetchSize: Int = { + -1 + } + + override def getResultSetConcurrency: Int = { + sqlFeatureNotSupported() + } + + override def getResultSetType: Int = { + checkClosed() + ResultSet.TYPE_FORWARD_ONLY + } + + override def addBatch(sql: String): Unit = { + checkClosed() + } + + override def clearBatch(): Unit = { + checkClosed() + } + + override def executeBatch(): Array[Int] = { + checkClosed() + null + } + + override def getConnection: Connection = { + checkClosed() + connection + } + + override def getMoreResults(current: Int): Boolean = { + checkClosed() + false + } + + override def getGeneratedKeys: ResultSet = { + checkClosed() + null + } + + override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = { + executeUpdate(sql) + } + + override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = { + executeUpdate(sql) + } + + override def executeUpdate(sql: String, columnNames: Array[String]): Int = { + executeUpdate(sql) + } + + override def execute(sql: String, autoGeneratedKeys: Int): Boolean = { + execute(sql) + } + + override def execute(sql: String, columnIndexes: Array[Int]): Boolean = { + execute(sql) + } + + override def execute(sql: String, columnNames: Array[String]): Boolean = { + execute(sql) + } + + override def getResultSetHoldability: Int = { + checkClosed() + 0 + } + + override def setPoolable(poolable: Boolean): Unit = { + checkClosed() + 0 + } + + override def isPoolable: Boolean = { + checkClosed() + false + } + + override def closeOnCompletion(): Unit = { + checkClosed() + } + + override def isCloseOnCompletion: Boolean = { + checkClosed() + false + } + + override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] + + override def isWrapperFor(iface: Class[_]): Boolean = false + + override def registerOutParameter(parameterIndex: Int, sqlType: Int): Unit = ??? + + override def registerOutParameter(parameterIndex: Int, sqlType: Int, scale: Int): Unit = ??? + + override def wasNull(): Boolean = ??? + + override def getString(parameterIndex: Int): String = ??? + + override def getBoolean(parameterIndex: Int): Boolean = ??? + + override def getByte(parameterIndex: Int): Byte = ??? + + override def getShort(parameterIndex: Int): Short = ??? + + override def getInt(parameterIndex: Int): Int = ??? + + override def getLong(parameterIndex: Int): Long = ??? + + override def getFloat(parameterIndex: Int): Float = ??? + + override def getDouble(parameterIndex: Int): Double = ??? + + override def getBigDecimal(parameterIndex: Int, scale: Int): java.math.BigDecimal = ??? + + override def getBytes(parameterIndex: Int): Array[Byte] = ??? + + override def getDate(parameterIndex: Int): Date = ??? + + override def getTime(parameterIndex: Int): Time = ??? + + override def getTimestamp(parameterIndex: Int): Timestamp = ??? + + override def getObject(parameterIndex: Int): AnyRef = ??? + + override def getBigDecimal(parameterIndex: Int): java.math.BigDecimal = ??? + + override def getObject(parameterIndex: Int, map: util.Map[String, Class[_]]): AnyRef = ??? + + override def getRef(parameterIndex: Int): Ref = ??? + + override def getBlob(parameterIndex: Int): Blob = ??? + + override def getClob(parameterIndex: Int): Clob = ??? + + override def getArray(parameterIndex: Int): sql.Array = ??? + + override def getDate(parameterIndex: Int, cal: Calendar): Date = ??? + + override def getTime(parameterIndex: Int, cal: Calendar): Time = ??? + + override def getTimestamp(parameterIndex: Int, cal: Calendar): Timestamp = ??? + + override def registerOutParameter(parameterIndex: Int, sqlType: Int, typeName: String): Unit = ??? + + override def registerOutParameter(parameterName: String, sqlType: Int): Unit = ??? + + override def registerOutParameter(parameterName: String, sqlType: Int, scale: Int): Unit = ??? + + override def registerOutParameter(parameterName: String, sqlType: Int, typeName: String): Unit = ??? + + override def getURL(parameterIndex: Int): URL = ??? + + override def setURL(parameterName: String, `val`: URL): Unit = ??? + + override def setNull(parameterName: String, sqlType: Int): Unit = ??? + + override def setBoolean(parameterName: String, x: Boolean): Unit = ??? + + override def setByte(parameterName: String, x: Byte): Unit = ??? + + override def setShort(parameterName: String, x: Short): Unit = ??? + + override def setInt(parameterName: String, x: Int): Unit = ??? + + override def setLong(parameterName: String, x: Long): Unit = ??? + + override def setFloat(parameterName: String, x: Float): Unit = ??? + + override def setDouble(parameterName: String, x: Double): Unit = ??? + + override def setBigDecimal(parameterName: String, x: java.math.BigDecimal): Unit = ??? + + override def setString(parameterName: String, x: String): Unit = ??? + + override def setBytes(parameterName: String, x: Array[Byte]): Unit = ??? + + override def setDate(parameterName: String, x: Date): Unit = ??? + + override def setTime(parameterName: String, x: Time): Unit = ??? + + override def setTimestamp(parameterName: String, x: Timestamp): Unit = ??? + + override def setAsciiStream(parameterName: String, x: InputStream, length: Int): Unit = ??? + + override def setBinaryStream(parameterName: String, x: InputStream, length: Int): Unit = ??? + + override def setObject(parameterName: String, x: Any, targetSqlType: Int, scale: Int): Unit = ??? + + override def setObject(parameterName: String, x: Any, targetSqlType: Int): Unit = ??? + + override def setObject(parameterName: String, x: Any): Unit = ??? + + override def setCharacterStream(parameterName: String, reader: Reader, length: Int): Unit = ??? + + override def setDate(parameterName: String, x: Date, cal: Calendar): Unit = ??? + + override def setTime(parameterName: String, x: Time, cal: Calendar): Unit = ??? + + override def setTimestamp(parameterName: String, x: Timestamp, cal: Calendar): Unit = ??? + + override def setNull(parameterName: String, sqlType: Int, typeName: String): Unit = ??? + + override def getString(parameterName: String): String = ??? + + override def getBoolean(parameterName: String): Boolean = ??? + + override def getByte(parameterName: String): Byte = ??? + + override def getShort(parameterName: String): Short = ??? + + override def getInt(parameterName: String): Int = ??? + + override def getLong(parameterName: String): Long = ??? + + override def getFloat(parameterName: String): Float = ??? + + override def getDouble(parameterName: String): Double = ??? + + override def getBytes(parameterName: String): Array[Byte] = ??? + + override def getDate(parameterName: String): Date = ??? + + override def getTime(parameterName: String): Time = ??? + + override def getTimestamp(parameterName: String): Timestamp = ??? + + override def getObject(parameterName: String): AnyRef = ??? + + override def getBigDecimal(parameterName: String): java.math.BigDecimal = ??? + + override def getObject(parameterName: String, map: util.Map[String, Class[_]]): AnyRef = ??? + + override def getRef(parameterName: String): Ref = ??? + + override def getBlob(parameterName: String): Blob = ??? + + override def getClob(parameterName: String): Clob = ??? + + override def getArray(parameterName: String): sql.Array = ??? + + override def getDate(parameterName: String, cal: Calendar): Date = ??? + + override def getTime(parameterName: String, cal: Calendar): Time = ??? + + override def getTimestamp(parameterName: String, cal: Calendar): Timestamp = ??? + + override def getURL(parameterName: String): URL = ??? + + override def getRowId(parameterIndex: Int): RowId = ??? + + override def getRowId(parameterName: String): RowId = ??? + + override def setRowId(parameterName: String, x: RowId): Unit = ??? + + override def setNString(parameterName: String, value: String): Unit = ??? + + override def setNCharacterStream(parameterName: String, value: Reader, length: Long): Unit = ??? + + override def setNClob(parameterName: String, value: NClob): Unit = ??? + + override def setClob(parameterName: String, reader: Reader, length: Long): Unit = ??? + + override def setBlob(parameterName: String, inputStream: InputStream, length: Long): Unit = ??? + + override def setNClob(parameterName: String, reader: Reader, length: Long): Unit = ??? + + override def getNClob(parameterIndex: Int): NClob = ??? + + override def getNClob(parameterName: String): NClob = ??? + + override def setSQLXML(parameterName: String, xmlObject: SQLXML): Unit = ??? + + override def getSQLXML(parameterIndex: Int): SQLXML = ??? + + override def getSQLXML(parameterName: String): SQLXML = ??? + + override def getNString(parameterIndex: Int): String = ??? + + override def getNString(parameterName: String): String = ??? + + override def getNCharacterStream(parameterIndex: Int): Reader = ??? + + override def getNCharacterStream(parameterName: String): Reader = ??? + + override def getCharacterStream(parameterIndex: Int): Reader = ??? + + override def getCharacterStream(parameterName: String): Reader = ??? + + override def setBlob(parameterName: String, x: Blob): Unit = ??? + + override def setClob(parameterName: String, x: Clob): Unit = ??? + + override def setAsciiStream(parameterName: String, x: InputStream, length: Long): Unit = ??? + + override def setBinaryStream(parameterName: String, x: InputStream, length: Long): Unit = ??? + + override def setCharacterStream(parameterName: String, reader: Reader, length: Long): Unit = ??? + + override def setAsciiStream(parameterName: String, x: InputStream): Unit = ??? + + override def setBinaryStream(parameterName: String, x: InputStream): Unit = ??? + + override def setCharacterStream(parameterName: String, reader: Reader): Unit = ??? + + override def setNCharacterStream(parameterName: String, value: Reader): Unit = ??? + + override def setClob(parameterName: String, reader: Reader): Unit = ??? + + override def setBlob(parameterName: String, inputStream: InputStream): Unit = ??? + + override def setNClob(parameterName: String, reader: Reader): Unit = ??? + + override def getObject[T](parameterIndex: Int, `type`: Class[T]): T = ??? + + override def getObject[T](parameterName: String, `type`: Class[T]): T = ??? +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index d993461e..614ae986 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -4,6 +4,7 @@ import com.mongodb.client.model.DropIndexOptions import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator +import dev.mongocamp.driver.mongodb.exception.SqlCommandNotSupportedException import dev.mongocamp.driver.mongodb.sql.SQLCommandType.SQLCommandType import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } import net.sf.jsqlparser.expression.operators.relational._ @@ -11,20 +12,25 @@ import net.sf.jsqlparser.expression.{ Expression, Parenthesis } import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } import net.sf.jsqlparser.schema.Table import net.sf.jsqlparser.statement.UnsupportedStatement +import net.sf.jsqlparser.statement.alter.Alter import net.sf.jsqlparser.statement.create.index.CreateIndex +import net.sf.jsqlparser.statement.create.table.CreateTable import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop +import net.sf.jsqlparser.statement.execute.Execute import net.sf.jsqlparser.statement.insert.Insert import net.sf.jsqlparser.statement.select.{ FromItem, PlainSelect, Select, SelectItem } import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update import org.bson.conversions.Bson +import org.h2.command.ddl.AlterTable import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending -import org.mongodb.scala.{ Document, Observable } +import org.mongodb.scala.{ Document, Observable, SingleObservable } import java.sql.SQLException +import java.util.Date import java.util.concurrent.TimeUnit import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -39,6 +45,9 @@ class MongoSqlQueryHolder { private var setElement: Option[Bson] = None private val documentsToInsert: ArrayBuffer[Document] = ArrayBuffer.empty private var indexOptions: Option[IndexOptions] = None + private var callFunction: Option[String] = None + private var keepOneDocument: Boolean = false + private val keysForEmptyDocument: mutable.Set[String] = mutable.Set.empty def this(statement: net.sf.jsqlparser.statement.Statement) = { this() @@ -72,14 +81,14 @@ class MongoSqlQueryHolder { case "INDEX" => sqlCommandType = SQLCommandType.DropIndex sqlTable = drop.getName - if (!getCollection.contains(".")) { - throw new IllegalArgumentException("not supported drop index without collection specified in the name") + if (!getCollection.contains(CollectionSeparator)) { + throw new SqlCommandNotSupportedException("not supported drop index without collection specified in the name") } case "DATABASE" => sqlCommandType = SQLCommandType.DropDatabase sqlTable = drop.getName case _ => - throw new IllegalArgumentException("not supported drop command type") + throw new SqlCommandNotSupportedException("not supported drop command type") } } else if (classOf[Truncate].isAssignableFrom(statement.getClass)) { @@ -90,6 +99,18 @@ class MongoSqlQueryHolder { else if (classOf[ShowTablesStatement].isAssignableFrom(statement.getClass)) { sqlCommandType = SQLCommandType.ShowTables } + else if (classOf[Execute].isAssignableFrom(statement.getClass)) { + sqlCommandType = SQLCommandType.Execute + callFunction = Some(statement.asInstanceOf[Execute].getName) + } + else if (classOf[CreateTable].isAssignableFrom(statement.getClass)) { + val createTable = statement.asInstanceOf[CreateTable] + sqlCommandType = SQLCommandType.CreateTable + sqlTable = createTable.getTable + } + else if (classOf[Alter].isAssignableFrom(statement.getClass)) { + sqlCommandType = SQLCommandType.AlterTable + } else if (classOf[UnsupportedStatement].isAssignableFrom(statement.getClass)) { val unsupportedStatement = statement.asInstanceOf[UnsupportedStatement] val isShowDatabases = unsupportedStatement.toString.toLowerCase.contains("show databases") @@ -98,17 +119,17 @@ class MongoSqlQueryHolder { sqlCommandType = SQLCommandType.ShowDatabases } else { - throw new IllegalArgumentException("not supported sql command type") + throw new SqlCommandNotSupportedException(s"not supported sql command type <${statement.getClass.getSimpleName}>") } } else { - throw new IllegalArgumentException("not supported sql command type") + throw new SqlCommandNotSupportedException(s"not supported sql command type <${statement.getClass.getSimpleName}>") } "" } def getCollection: String = { - sqlTable.getFullyQualifiedName.replace(".", CollectionSeparator).replace("'", "").replace("`", "") + Option(sqlTable).map(_.getFullyQualifiedName.replace(".", CollectionSeparator).replace("'", "").replace("\"", "").replace("`", "")).orNull } def run(provider: DatabaseProvider, allowDiskUsage: Boolean = true): Observable[Document] = { @@ -124,7 +145,8 @@ class MongoSqlQueryHolder { }) case SQLCommandType.Select => - provider.dao(getCollection).findAggregated(aggregatePipeline.toList, allowDiskUsage) + val originalObservable = provider.dao(getCollection).findAggregated(aggregatePipeline.toList, allowDiskUsage) + originalObservable case SQLCommandType.Update => val updateSet = setElement.getOrElse(throw new IllegalArgumentException("update set element must be defined")) @@ -158,16 +180,41 @@ class MongoSqlQueryHolder { case SQLCommandType.ShowTables => provider.collections() + case SQLCommandType.ShowDatabases => provider.databases + case SQLCommandType.DropTable => provider.dao(getCollection).drop().map(_ => org.mongodb.scala.Document("wasAcknowledged" -> true)) + case SQLCommandType.CreateTable => + provider.dao(getCollection).createIndex(Map("_id" -> 1)).map(_ => org.mongodb.scala.Document("created" -> true)) + + case SQLCommandType.AlterTable => + SingleObservable(org.mongodb.scala.Document("changed" -> "true")) + + case SQLCommandType.Execute => + SingleObservable( + callFunction + .map(function => { + if (function.equalsIgnoreCase("current_schema")) { + org.mongodb.scala.Document("currentSchema" -> provider.DefaultDatabaseName) + } + else { + throw new SqlCommandNotSupportedException("not supported function") + } + }) + .getOrElse(Document()) + ) case _ => - throw new IllegalArgumentException("not supported sql command type") + throw new SqlCommandNotSupportedException("not supported sql command type") } } + def getKeysForEmptyDocument: Set[String] = keysForEmptyDocument.toSet + + def selectFunctionCall: Boolean = keepOneDocument + private def getUpdateOrDeleteFilter: Bson = { updateOrDeleteFilter.getOrElse(Map.empty).toMap } @@ -180,7 +227,16 @@ class MongoSqlQueryHolder { case e: net.sf.jsqlparser.expression.DateValue => e.getValue case e: net.sf.jsqlparser.expression.TimeValue => e.getValue case e: net.sf.jsqlparser.expression.TimestampValue => e.getValue - case e: net.sf.jsqlparser.expression.NullValue => null + case _: net.sf.jsqlparser.expression.NullValue => null + case t: net.sf.jsqlparser.expression.TimeKeyExpression => + t.getStringValue.toUpperCase match { + case "CURRENT_TIMESTAMP" => new Date() + case "NOW" => new Date() + case _ => t.getStringValue + } + case e: net.sf.jsqlparser.schema.Column => + val name = e.getColumnName + name.toIntOption.getOrElse(name.toBooleanOption.getOrElse(name)) case _ => throw new IllegalArgumentException("not supported value type") } @@ -241,6 +297,14 @@ class MongoSqlQueryHolder { } val functionName = if (e.isNot) "$nin" else "$in" queryMap.put(e.getLeftExpression.toString, Map(functionName -> value)) + case e: LikeExpression => + val value = Map("$regex" -> e.getRightExpression.toString.replace("%", "(.*?)"), "$options" -> "i") + if (e.isNot) { + queryMap.put(e.getLeftExpression.toString, Map("$not" -> value)) + } + else { + queryMap.put(e.getLeftExpression.toString, value) + } case e: IsNullExpression => if (e.isNot) { queryMap.put(e.getLeftExpression.toString, Map("$ne" -> null)) @@ -257,28 +321,65 @@ class MongoSqlQueryHolder { select.getSelectBody match { case plainSelect: PlainSelect => val selectItems = Option(plainSelect.getSelectItems).map(_.asScala).getOrElse(List.empty) - val aliasList = ArrayBuffer[String]() + val maybeDistinct = Option(plainSelect.getDistinct) + + selectItems.foreach(sI => { + if (classOf[net.sf.jsqlparser.expression.Function].isAssignableFrom(sI.getExpression.getClass)) { + keepOneDocument = maybeDistinct.isEmpty + } + }) + val aliasList = ArrayBuffer[String]() sqlCommandType = SQLCommandType.Select - Option(plainSelect.getGroupBy).foreach(gbEl => { + val maybeGroupByElement = Option(plainSelect.getGroupBy) + maybeGroupByElement.foreach(gbEl => { val groupBy = gbEl.getGroupByExpressionList.getExpressions.asScala.map(_.toString).toList val groupId = mutable.Map[String, Any]() val group = mutable.Map[String, Any]() groupBy.foreach(g => groupId += g -> ("$" + g)) selectItems.foreach { case e: SelectItem[Expression] => val expressionName = e.getExpression.toString - if (expressionName.contains("count")) { + if (expressionName.toLowerCase().contains("count")) { group += expressionName -> Map("$sum" -> 1) } else { if (!groupBy.contains(expressionName)) { val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) - group += expressionName -> Map(espr.head -> espr.last) + if (espr.head.equalsIgnoreCase("max")) { + group += expressionName -> Map(espr.head -> espr.last) + } + else { + group += expressionName -> Map(espr.head -> espr.last) + } } } } val groupMap = Map("_id" -> groupId) ++ group.toMap ++ groupId.keys.map(s => s -> Map("$first" -> ("$" + s))).toMap aggregatePipeline += Map("$group" -> groupMap) }) + if (maybeGroupByElement.isEmpty && keepOneDocument) { + val group = mutable.Map[String, Any]() + val idGroupMap = mutable.Map() + selectItems.foreach { case se: SelectItem[Expression] => + val expressionName = se.getExpression.toString + if (expressionName.toLowerCase().contains("count")) { + group += expressionName -> Map("$sum" -> 1) + } + else { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + val functionName: String = espr.head.toLowerCase match { + case "$max" => "$last" + case "$min" => "$first" + case _ => espr.head + } + val expression = if (functionName.equalsIgnoreCase(espr.last)) Map("$first" -> espr.last) else Map(functionName -> espr.last) + group += expressionName -> expression + } + keysForEmptyDocument += Option(se.getAlias).map(_.getName).getOrElse(expressionName) + } + + val groupMap = Map("_id" -> idGroupMap) ++ group.toMap + aggregatePipeline += Map("$group" -> groupMap) + } def convertFromItemToTable(fromItem: FromItem): Table = { val tableName = Option(fromItem.getAlias).map(a => fromItem.toString.replace(a.toString, "")).getOrElse(fromItem).toString new Table(tableName) @@ -394,7 +495,7 @@ class MongoSqlQueryHolder { "$replaceWith" -> Map("$mergeObjects" -> aliasList.map(string => if (string.startsWith("$")) string else "$" + string).toList) ) } - Option(plainSelect.getDistinct).foreach { distinct => + maybeDistinct.foreach { distinct => val groupMap: mutable.Map[String, Any] = mutable.Map() selectItems.foreach { case e: SelectItem[Expression] => val expressionName = e.getExpression.toString @@ -451,7 +552,7 @@ class MongoSqlQueryHolder { singleDocumentCreated = true } catch { - case _: Throwable => + case t: Throwable => throw new IllegalArgumentException("not supported expression list") } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala index ba60512b..f1052b67 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/SQLCommandType.scala @@ -6,6 +6,6 @@ object SQLCommandType extends Enumeration { type SQLCommandType = Value - val Delete, Select, Update, Insert, CreateIndex, DropTable, DropIndex, DropDatabase, ShowDatabases, ShowTables = Value + val Delete, Select, Update, Insert, CreateIndex, DropTable, DropIndex, DropDatabase, ShowDatabases, ShowTables, Execute, AlterTable, CreateTable = Value } \ No newline at end of file diff --git a/src/test/resources/json/people.json b/src/test/resources/json/people.json index 5ca35848..2c91dfa9 100644 --- a/src/test/resources/json/people.json +++ b/src/test/resources/json/people.json @@ -2,7 +2,7 @@ { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c448" }, "id" : { "$numberLong" : "1" }, "guid" : "19ebe4fe-f860-4cbc-ac0a-664a418e2173", "isActive" : true, "balance" : 2316.0, "picture" : "http://placehold.it/32x32", "age" : 25, "name" : "Bowen Leon", "gender" : "male", "email" : "bowenleon@inrt.com", "phone" : "+1 (904) 457-2017", "address" : "138 Miami Court, Urbana, Kansas, 1034", "about" : "Commodo in mollit laboris incididunt excepteur nulla cillum sunt do occaecat Lorem. Excepteur esse id magna pariatur irure anim officia exercitation veniam anim dolor. Sunt irure est dolore nisi nulla nulla. Nostrud aliquip exercitation ut adipisicing esse ullamco incididunt mollit laborum duis exercitation. Ipsum commodo excepteur nulla sit irure laboris magna ipsum Lorem.\r\n", "registered" : { "$date" : "2014-01-26T16:08:40.000+0000" }, "tags" : [ "ipsum", "qui", "proident", "sunt", "cillum", "veniam", "laboris" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Reyes Velasquez" }, { "id" : { "$numberLong" : "1" }, "name" : "Rosalie Hooper" }, { "id" : { "$numberLong" : "2" }, "name" : "Alyssa David" } ], "greeting" : "Hello, Bowen Leon! You have 9 unread messages.", "favoriteFruit" : "apple" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c449" }, "id" : { "$numberLong" : "2" }, "guid" : "6ee53e07-2e61-48cd-9bc9-b3505a0438f3", "isActive" : false, "balance" : 1527.0, "picture" : "http://placehold.it/32x32", "age" : 40, "name" : "Cecilia Lynn", "gender" : "female", "email" : "cecilialynn@medicroix.com", "phone" : "+1 (875) 525-3138", "address" : "124 Herzl Street, Greenwich, Arkansas, 5309", "about" : "Esse adipisicing ipsum esse consectetur eu ad sunt sit culpa enim velit elit velit deserunt. Aliqua nulla et laboris nulla aute excepteur Lorem. Ut aliquip non excepteur exercitation consectetur anim est ex irure dolore ut. Consequat enim enim dolor excepteur mollit consectetur. Magna sunt reprehenderit est quis.\r\n", "registered" : { "$date" : "2014-02-21T23:13:05.000+0000" }, "tags" : [ "eiusmod", "minim", "magna", "est", "laborum", "nisi", "qui" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Erika Harmon" }, { "id" : { "$numberLong" : "1" }, "name" : "Horn Larsen" }, { "id" : { "$numberLong" : "2" }, "name" : "Gertrude Fuller" }, { "id" : { "$numberLong" : "3" }, "name" : "Spencer Hutchinson" }, { "id" : { "$numberLong" : "4" }, "name" : "Beryl Buckley" } ], "greeting" : "Hello, Cecilia Lynn! You have 7 unread messages.", "favoriteFruit" : "strawberry" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44a" }, "id" : { "$numberLong" : "3" }, "guid" : "a01c8bb6-95ac-4235-b6b3-475734f0dd92", "isActive" : false, "balance" : 2682.0, "picture" : "http://placehold.it/32x32", "age" : 24, "name" : "Sylvia Ortega", "gender" : "female", "email" : "sylviaortega@viagrand.com", "phone" : "+1 (983) 470-3157", "address" : "617 Vernon Avenue, Advance, Connecticut, 7787", "about" : "Tempor aliquip dolor excepteur proident ex magna commodo laboris. Ullamco ex esse excepteur nostrud. Duis ex anim pariatur dolore ut irure. Consequat non Lorem laborum esse anim magna consequat voluptate dolor elit. Mollit sint consequat ipsum minim id anim aute reprehenderit eu velit voluptate commodo.\r\n", "registered" : { "$date" : "2014-01-13T07:33:15.000+0000" }, "tags" : [ "ut", "culpa", "reprehenderit", "ad", "amet", "officia", "nostrud" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Ferrell Rhodes" }, { "id" : { "$numberLong" : "1" }, "name" : "Ana Guy" }, { "id" : { "$numberLong" : "2" }, "name" : "Rosanne Griffin" }, { "id" : { "$numberLong" : "3" }, "name" : "Morrow Adams" }, { "id" : { "$numberLong" : "4" }, "name" : "Keri White" }, { "id" : { "$numberLong" : "5" }, "name" : "Tracey Sykes" } ], "greeting" : "Hello, Sylvia Ortega! You have 9 unread messages.", "favoriteFruit" : "apple" } -{ "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44b" }, "id" : { "$numberLong" : "4" }, "guid" : "4ded35ef-ba63-4eef-996f-d67e38553b0d", "isActive" : false, "balance" : 1159.0, "picture" : "http://placehold.it/32x32", "age" : 31, "name" : "Howe Briggs", "gender" : "male", "email" : "howebriggs@zappix.com", "phone" : "+1 (966) 518-3246", "address" : "963 Roosevelt Court, Bowden, Oregon, 6236", "about" : "Et eu culpa elit eiusmod ea proident ad est culpa elit. Dolor eiusmod officia nisi aliquip. Ut irure laborum qui dolor ut veniam est veniam nostrud consequat voluptate velit do duis. Irure excepteur excepteur reprehenderit nostrud reprehenderit voluptate quis ex aliquip. Sunt amet commodo pariatur fugiat est laborum. Est est non aliqua nisi laboris. Irure voluptate aute deserunt commodo nostrud amet anim reprehenderit nostrud cupidatat aliqua veniam anim.\r\n", "registered" : { "$date" : "2014-03-04T23:00:36.000+0000" }, "tags" : [ "veniam", "anim", "fugiat", "dolor", "elit", "nostrud", "quis" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Hunter Garner" }, { "id" : { "$numberLong" : "1" }, "name" : "Walls Wright" }, { "id" : { "$numberLong" : "2" }, "name" : "Christie Walker" }, { "id" : { "$numberLong" : "3" }, "name" : "Powell Woods" }, { "id" : { "$numberLong" : "4" }, "name" : "Doreen Carpenter" }, { "id" : { "$numberLong" : "5" }, "name" : "Beach Harrison" }, { "id" : { "$numberLong" : "6" }, "name" : "Perkins Mullins" }, { "id" : { "$numberLong" : "7" }, "name" : "Blake Goff" }, { "id" : { "$numberLong" : "8" }, "name" : "Clarke Spears" } ], "greeting" : "Hello, Howe Briggs! You have 8 unread messages.", "favoriteFruit" : "banana" } +{ "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44b" }, "id" : { "$numberLong" : "4" }, "guid" : "4ded35ef-ba63-4eef-996f-d67e38553b0d", "isActive" : false, "balance" : 1159.0, "picture" : "http://placehold.it/32x32", "age" : 31, "name" : "Howe Briggs", "gender" : "male", "email" : "howebriggs@zappix.com", "phone" : "+1 (966) 518-3246", "address" : "963 Roosevelt Court, Bowden, Oregon, 6236", "about" : "Et eu culpa elit eiusmod ea proident ad est culpa elit. Dolor eiusmod officia nisi aliquip. Ut irure laborum qui dolor ut veniam est veniam nostrud consequat voluptate velit do duis. Irure excepteur excepteur reprehenderit nostrud reprehenderit voluptate quis ex aliquip. Sunt amet commodo pariatur fugiat est laborum. Est est non aliqua nisi laboris. Irure voluptate aute deserunt commodo nostrud amet anim reprehenderit nostrud cupidatat aliqua veniam anim.\r\n", "registered" : { "$date" : "2014-03-04T23:00:36.000+0000" }, "tags" : [ "veniam", "anim", "fugiat", "dolor", "elit", "nostrud", "quis" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Hunter Garner" }, { "id" : { "$numberLong" : "1" }, "name" : "Walls Wright" }, { "id" : { "$numberLong" : "2" }, "name" : "Christie Walker" }, { "id" : { "$numberLong" : "3" }, "name" : "Powell Woods" }, { "id" : { "$numberLong" : "4" }, "name" : "Doreen Carpenter" }, { "id" : { "$numberLong" : "5" }, "name" : "Beach Harrison" }, { "id" : { "$numberLong" : "6" }, "name" : "Perkins Mullins" }, { "id" : { "$numberLong" : "7" }, "name" : "Blake Goff" }, { "id" : { "$numberLong" : "8" }, "name" : "Clarke Spears" } ], "bestFriend": { "id" : { "$numberLong" : "8" }, "name" : "Clarke Spears" }, "greeting" : "Hello, Howe Briggs! You have 8 unread messages.", "favoriteFruit" : "banana" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44c" }, "id" : { "$numberLong" : "5" }, "guid" : "4791d80d-0079-4dba-9f40-a1b317e1dedd", "isActive" : true, "balance" : 2132.0, "picture" : "http://placehold.it/32x32", "age" : 40, "name" : "Massey Sears", "gender" : "male", "email" : "masseysears@kog.com", "phone" : "+1 (914) 433-2474", "address" : "232 Claver Place, Omar, Kentucky, 1244", "about" : "Laboris nisi pariatur elit culpa tempor cupidatat voluptate officia labore. Magna exercitation amet sunt aliquip reprehenderit incididunt. Cupidatat id cupidatat ea officia duis ex minim. Lorem velit irure mollit non magna non consectetur tempor excepteur. Labore commodo in sit duis laborum ea nulla anim. Do voluptate adipisicing sit magna sit.\r\n", "registered" : { "$date" : "2014-04-14T11:26:33.000+0000" }, "tags" : [ "excepteur", "aliqua", "veniam", "pariatur", "incididunt", "sint", "duis" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Katie Holden" }, { "id" : { "$numberLong" : "1" }, "name" : "Payne French" }, { "id" : { "$numberLong" : "2" }, "name" : "Nannie Snyder" }, { "id" : { "$numberLong" : "3" }, "name" : "Yang Carey" }, { "id" : { "$numberLong" : "4" }, "name" : "Reilly Valdez" } ], "greeting" : "Hello, Massey Sears! You have 6 unread messages.", "favoriteFruit" : "strawberry" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44d" }, "id" : { "$numberLong" : "6" }, "guid" : "09552721-4ff8-4898-8066-16d4e8bbcea0", "isActive" : false, "balance" : 3872.0, "picture" : "http://placehold.it/32x32", "age" : 22, "name" : "Cecile Rogers", "gender" : "female", "email" : "cecilerogers@momentia.com", "phone" : "+1 (892) 476-2858", "address" : "149 Jerome Avenue, Crawfordsville, New Mexico, 6417", "about" : "Cupidatat ipsum enim eu nulla. Irure fugiat sint in ad dolore sunt duis sit culpa eu. Nisi ea est sint enim enim aliqua dolore labore proident ad. Ullamco cupidatat labore laboris cillum qui duis adipisicing officia cupidatat officia ullamco qui. Ut occaecat non qui labore consequat. Ex occaecat nulla sunt enim. Pariatur eu nisi ut non velit incididunt proident eu cillum culpa eu deserunt esse deserunt.\r\n", "registered" : { "$date" : "2014-03-27T19:24:33.000+0000" }, "tags" : [ "nostrud", "mollit", "ea", "eu", "consequat", "in", "veniam" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Rosario Spencer" }, { "id" : { "$numberLong" : "1" }, "name" : "Jacobson Sutton" }, { "id" : { "$numberLong" : "2" }, "name" : "Wooten Rivera" }, { "id" : { "$numberLong" : "3" }, "name" : "Guzman Johns" }, { "id" : { "$numberLong" : "4" }, "name" : "Liliana Campbell" }, { "id" : { "$numberLong" : "5" }, "name" : "Pamela Buchanan" }, { "id" : { "$numberLong" : "6" }, "name" : "Avila Dillon" } ], "greeting" : "Hello, Cecile Rogers! You have 8 unread messages.", "favoriteFruit" : "strawberry" } { "_id" : { "$oid" : "5e9ef66185c0145fa5d3c44e" }, "id" : { "$numberLong" : "7" }, "guid" : "943a13b8-bf7f-443b-bb36-280bf2328875", "isActive" : false, "balance" : 3815.0, "picture" : "http://placehold.it/32x32", "age" : 32, "name" : "Zelma Sweet", "gender" : "female", "email" : "zelmasweet@colaire.com", "phone" : "+1 (867) 535-3918", "address" : "358 Furman Street, Williams, South Dakota, 2685", "about" : "Et reprehenderit exercitation sint pariatur excepteur consectetur laboris. Minim aute esse non consectetur sunt aliquip tempor sunt magna aliqua est aliqua laboris. Excepteur do dolor sint mollit qui pariatur deserunt ipsum occaecat. Sunt enim ut mollit aliquip non do esse et nostrud dolor est occaecat. Labore ullamco ipsum ea eiusmod culpa. Cupidatat eu nisi tempor veniam consequat magna velit laborum eu incididunt minim quis. Ipsum anim cillum qui eiusmod Lorem aliqua incididunt adipisicing amet consequat velit.\r\n", "registered" : { "$date" : "2014-01-25T12:22:08.000+0000" }, "tags" : [ "minim", "dolore", "minim", "non", "velit", "mollit", "aliquip" ], "friends" : [ { "id" : { "$numberLong" : "0" }, "name" : "Erma Levine" }, { "id" : { "$numberLong" : "1" }, "name" : "Margaret Clayton" }, { "id" : { "$numberLong" : "2" }, "name" : "Norma Middleton" }, { "id" : { "$numberLong" : "3" }, "name" : "Susanne Bullock" }, { "id" : { "$numberLong" : "4" }, "name" : "Frazier Horn" }, { "id" : { "$numberLong" : "5" }, "name" : "Christy Young" }, { "id" : { "$numberLong" : "6" }, "name" : "Margarita Morales" }, { "id" : { "$numberLong" : "7" }, "name" : "Diana Hebert" }, { "id" : { "$numberLong" : "8" }, "name" : "Tonia Bell" }, { "id" : { "$numberLong" : "9" }, "name" : "Brandi Stafford" } ], "greeting" : "Hello, Zelma Sweet! You have 6 unread messages.", "favoriteFruit" : "strawberry" } diff --git a/src/test/resources/liquibase/00-init.xml b/src/test/resources/liquibase/00-init.xml new file mode 100755 index 00000000..6e85d128 --- /dev/null +++ b/src/test/resources/liquibase/00-init.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + Create table addressbook_entries for demonstrating refactorings + + + + + + + + + + + + + + + + + Create table addressbook_entries for demonstrating refactorings + + + + + + + + + + + + + + + + + + Load some Test Data + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/02-add-not-null-constraint.xml b/src/test/resources/liquibase/02-add-not-null-constraint.xml new file mode 100755 index 00000000..15b62c8d --- /dev/null +++ b/src/test/resources/liquibase/02-add-not-null-constraint.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/03-tag-database.xml b/src/test/resources/liquibase/03-tag-database.xml new file mode 100755 index 00000000..b20a0dee --- /dev/null +++ b/src/test/resources/liquibase/03-tag-database.xml @@ -0,0 +1,9 @@ + + + + + + + diff --git a/src/test/resources/liquibase/04-split-table.xml b/src/test/resources/liquibase/04-split-table.xml new file mode 100755 index 00000000..7811f11a --- /dev/null +++ b/src/test/resources/liquibase/04-split-table.xml @@ -0,0 +1,92 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/05-add-foreign-keys.xml b/src/test/resources/liquibase/05-add-foreign-keys.xml new file mode 100755 index 00000000..8a54e186 --- /dev/null +++ b/src/test/resources/liquibase/05-add-foreign-keys.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/06-change-column-type.xml b/src/test/resources/liquibase/06-change-column-type.xml new file mode 100755 index 00000000..ccdc1aa9 --- /dev/null +++ b/src/test/resources/liquibase/06-change-column-type.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/07-merge-columns.xml b/src/test/resources/liquibase/07-merge-columns.xml new file mode 100755 index 00000000..89aca2b4 --- /dev/null +++ b/src/test/resources/liquibase/07-merge-columns.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + diff --git a/src/test/resources/liquibase/08-create-view.xml b/src/test/resources/liquibase/08-create-view.xml new file mode 100755 index 00000000..9c4d48f1 --- /dev/null +++ b/src/test/resources/liquibase/08-create-view.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + SELECT FIRSTNAME,LASTNAME,STREET,POSTCODE,CITY,PHONE FROM ADDRESSBOOK_ENTRIES + JOIN ADDRESS_DATA ON ADDRESSBOOK_ENTRIES.ID = ADDRESS_DATA.ENTRY_ID + JOIN PHONE_DATA ON ADDRESSBOOK_ENTRIES.ID = PHONE_DATA.ENTRY_ID + + + diff --git a/src/test/resources/liquibase/09-add-default-columns.xml b/src/test/resources/liquibase/09-add-default-columns.xml new file mode 100755 index 00000000..8040c9ef --- /dev/null +++ b/src/test/resources/liquibase/09-add-default-columns.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/10-add-person.xml b/src/test/resources/liquibase/10-add-person.xml new file mode 100755 index 00000000..2cab2ee2 --- /dev/null +++ b/src/test/resources/liquibase/10-add-person.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/11-add-note.xml b/src/test/resources/liquibase/11-add-note.xml new file mode 100755 index 00000000..96d34b56 --- /dev/null +++ b/src/test/resources/liquibase/11-add-note.xml @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/12-add-task-relation.xml b/src/test/resources/liquibase/12-add-task-relation.xml new file mode 100755 index 00000000..f538d2b5 --- /dev/null +++ b/src/test/resources/liquibase/12-add-task-relation.xml @@ -0,0 +1,176 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/resources/liquibase/addressbook.csv b/src/test/resources/liquibase/addressbook.csv new file mode 100755 index 00000000..2f136d17 --- /dev/null +++ b/src/test/resources/liquibase/addressbook.csv @@ -0,0 +1,7 @@ +id,firstname,lastname,street_name,street_number,postcode,city,phone +1,Nicole,Theiss,Koenigstrasse,77,21279,Drestedt,04186952474 +2,Erik,Fried,Ollenhauer Str.,78,70499,Stuttgart Feuerbach,0711473923 +3,Torsten,Bieber,Albrechtstrasse,98,87413,Kempten,0831241900 +4,Andreas,Meyer,Neuer Jungfernstieg,91,84080,Laberweinting,08772071470 +5,Doreen,Grunewald,Güntzelstrasse,73,54472,Longkamp,06531854103 +6,Marko,Schneider,Buelowstrasse,79,57645,Nister,02662548331 diff --git a/src/test/resources/liquibase/changelog.xml b/src/test/resources/liquibase/changelog.xml new file mode 100755 index 00000000..fabe02fb --- /dev/null +++ b/src/test/resources/liquibase/changelog.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala index 3d5cccd7..04b2fcbf 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala @@ -20,7 +20,7 @@ class PersonDAOSpec extends PersonSpecification with MongoImplicits { "support columnNames" in { val columnNames = PersonDAO.columnNames(200) - columnNames.size mustEqual 19 + columnNames.size mustEqual 20 } "support results" in { diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala new file mode 100644 index 00000000..4bfdcff0 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala @@ -0,0 +1,23 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import better.files.{File, Resource} +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO + +import java.sql.{Connection, DriverManager} +import java.util.Properties + +class BaseJdbcSpec extends PersonSpecification { + var connection : Connection = _ + + override def beforeAll(): Unit = { + super.beforeAll() + val connectionProps = new Properties() + val driver = new MongoJdbcDriver() + DriverManager.registerDriver(driver) + connection = DriverManager.getConnection( + "jdbc:mongodb://localhost:27017/mongocamp-unit-test?retryWrites=true&loadBalanced=false&serverSelectionTimeoutMS=5000&connectTimeoutMS=10000", + connectionProps + ) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala new file mode 100644 index 00000000..558b14bf --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala @@ -0,0 +1,80 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.model.{Grade, Score} +import dev.mongocamp.driver.mongodb.test.TestDatabase +import org.bson.types.ObjectId + +import java.sql.{DriverManager, ResultSet, Types} +import java.util.Properties +import scala.collection.mutable.ArrayBuffer +import better.files.{File, Resource} +import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} +import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.model.{Grade, Score} +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO +import org.bson.types.ObjectId +import org.specs2.mutable.Specification +import org.specs2.specification.{BeforeAll, BeforeEach} + +class ExploreJdbcSpec extends BaseJdbcSpec { + + "Jdbc Connection" should { + + "get table names" in { + val tableNames = connection.getMetaData.getTables("%", "mongocamp-unit-test", "", Array.empty) + var tables = 0 + var tablePersonFound = false + while (tableNames.next()) { + tableNames.getString("TABLE_NAME") match { + case "people" => + tablePersonFound = true + tableNames.getString("TYPE_CAT") must beEqualTo("mongodb") + tableNames.getString("REMARKS") must beEqualTo("COLLECTION") + tableNames.getString("TABLE_TYPE") must beEqualTo("TABLE") + tableNames.getString("TABLE_SCHEM") must beEqualTo("mongocamp-unit-test") + case _ => + } + tables += 1 + } + tables must beGreaterThanOrEqualTo(1) + val columnNames = connection.getMetaData.getColumns("%", "mongocamp-unit-test", "people", "") + var columns = 0 + while (columnNames.next()) { + columnNames.getString("TABLE_CAT") must beEqualTo("mongodb") + columnNames.getString("TABLE_NAME") must beEqualTo("people") + columnNames.getString("TABLE_SCHEM") must beEqualTo("mongocamp-unit-test") + val KeyDataType = "DATA_TYPE" + columnNames.getString("COLUMN_NAME") match { + case "_id" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.VARCHAR) + case "id" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.BIGINT) + columnNames.getInt("DECIMAL_DIGITS") must beEqualTo(0) + case "guid" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.LONGVARCHAR) + case "isActive" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.BOOLEAN) + case "balance" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.DOUBLE) + columnNames.getInt("DECIMAL_DIGITS") must beEqualTo(Int.MaxValue) + case "registered" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.DATE) + case "tags" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.ARRAY) + case "friends" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.ARRAY) + case "bestFriend" => + columnNames.getInt(KeyDataType) must beEqualTo(Types.JAVA_OBJECT) + case _ => + } + columns += 1 + } + columns must beEqualTo(20) + tablePersonFound must beTrue + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala new file mode 100644 index 00000000..eb101900 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala @@ -0,0 +1,41 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import com.typesafe.scalalogging.LazyLogging +import liquibase.database.jvm.JdbcConnection +import liquibase.exception.LiquibaseException +import liquibase.resource.ClassLoaderResourceAccessor +import liquibase.{Contexts, LabelExpression, Liquibase} + +import scala.jdk.CollectionConverters._ +import scala.language.implicitConversions + + +class LiquibaseJdbcSpec extends BaseJdbcSpec with LazyLogging { + + "Jdbc Connection" should { + + "migrate database with liquibase" in { + val jdbcConnection = new JdbcConnection(connection) + val liquibase: Liquibase = new Liquibase("liquibase/changelog.xml", new ClassLoaderResourceAccessor(), jdbcConnection ) + val contexts = new Contexts() + val unrunChangesets = liquibase.listUnrunChangeSets(contexts, new LabelExpression()) + val changes = unrunChangesets.asScala.toList + if (changes.isEmpty) { + logger.info("liquibase - nothing to update") + true must beTrue + } + logger.info("liquibase - %s changesets to update".format(changes)) + try { + liquibase.update(contexts) + true must beTrue + } + catch { + case e: LiquibaseException => + logger.error(e.getMessage, e) + false must beTrue + } + + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala new file mode 100644 index 00000000..91cadbb7 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala @@ -0,0 +1,50 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.sql.ResultSet +import scala.collection.mutable.ArrayBuffer + +class SelectJDBCSpec extends BaseJdbcSpec { + + "Jdbc Connection" should { + + "execute simple select" in { + val stmt = connection.createStatement() + val result = stmt.executeQuery("select id, guid, name, age, balance from people where age < 30 order by id asc") + var i = 0 + val arrayBuffer = ArrayBuffer[ResultSet]() + while (result.next()) { + i += 1 + arrayBuffer += result + } + arrayBuffer.size must beEqualTo(99) + i must beEqualTo(99) + } + + "execute prepared statement" in { + val preparedStatement = connection.prepareStatement("select * from `mongocamp-unit-test`.people where age < ? order by id asc") + preparedStatement.setLong(0, 30) + val result = preparedStatement.executeQuery() + var i = 0 + val arrayBuffer = ArrayBuffer[ResultSet]() + while (result.next()) { + i += 1 + arrayBuffer += result + } + arrayBuffer.size must beEqualTo(99) + i must beEqualTo(99) + } + + "count on empty table" in { + val stmt = connection.createStatement() + val result = stmt.executeQuery("select count(*) as tmp, sum(age) from empty;") + var i = 0 + while (result.next()) { + result.getInt("tmp") must beEqualTo(0) + result.getInt("sum(age)") must beEqualTo(0) + i += 1 + } + i must beEqualTo(1) + } + + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala index d8015078..f92ce1f6 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala @@ -1,14 +1,11 @@ package dev.mongocamp.driver.mongodb.sql -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} -import dev.mongocamp.driver.mongodb.dao.PersonSpecification import dev.mongocamp.driver.mongodb.model.{Grade, Score} import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO +import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} import org.bson.types.ObjectId import org.specs2.mutable.Specification -import org.specs2.specification.{BeforeAll, BeforeEach} +import org.specs2.specification.BeforeEach class DeleteSqlSpec extends Specification with BeforeEach { sequential diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala index 23f3caaf..2926b9f3 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala @@ -10,6 +10,7 @@ import org.mongodb.scala.model.Sorts.ascending import org.specs2.mutable.Specification import org.specs2.specification.BeforeEach +import java.sql.SQLException import scala.concurrent.duration.DurationInt class OtherSqlSpec extends PersonSpecification with BeforeEach{ @@ -48,6 +49,17 @@ class OtherSqlSpec extends PersonSpecification with BeforeEach{ collections must not contain "universityGrades" } + "catch sql error on converting sql" in { + var errorCaught = false + try { + MongoSqlQueryHolder("blub from universityGrades;") + } catch { + case _: SQLException => + errorCaught = true + } + errorCaught mustEqual true + } + "truncate collection" in { val queryConverter = MongoSqlQueryHolder("TRUNCATE TABLE universityGrades;") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() @@ -99,7 +111,8 @@ class OtherSqlSpec extends PersonSpecification with BeforeEach{ val queryConverter = MongoSqlQueryHolder("show tables;") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() selectResponse.size must be greaterThanOrEqualTo(1) - selectResponse.head.getStringValue("name") mustEqual "mongo-sync-log" + val filteredDocuments = selectResponse.filter(d => d.getStringValue("name").equalsIgnoreCase("people")) + filteredDocuments.head.getStringValue("name") mustEqual "people" } "show databases" in { diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala index 4f750e66..63c539e1 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala @@ -16,11 +16,12 @@ class SelectSqlSpec extends PersonSpecification { } "simple sql with schema" in { - val queryConverter = MongoSqlQueryHolder("select * from `mongocamp-unit-test`.`friend`") + val queryConverter = MongoSqlQueryHolder("select * from `mongocamp-unit-test`.`people`") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1327 - selectResponse.head.getString("name") mustEqual "Castaneda Mccullough" - selectResponse.head.getLong("id") mustEqual 33 + queryConverter.getCollection mustEqual "mongocamp-unit-test:people" + selectResponse.size mustEqual 200 + selectResponse.head.getString("name") mustEqual "Cheryl Hoffman" + selectResponse.head.getLong("id") mustEqual 0 } "sql with in query" in { @@ -100,6 +101,15 @@ class SelectSqlSpec extends PersonSpecification { document.getInteger("age") mustEqual 25 } + "only count" in { + val queryConverter = MongoSqlQueryHolder("select count(*) as tmp, sum(age) from people;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + selectResponse.size mustEqual 1 + val document = selectResponse.head + document.getInteger("tmp") mustEqual 200 + document.getInteger("sum(age)") mustEqual 5961 + } + "group by with count" in { val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age order by age;") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() From 8a39da85d512f133ad861e979e038a75961b81c6 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Thu, 17 Oct 2024 08:30:39 +0200 Subject: [PATCH 16/22] feat: compatibility for scala 2.12 --- build.sbt | 4 +-- .../mongodb/schema/CirceProductSchema.scala | 18 +++++++++++++ .../mongodb/schema/CirceProductSchema.scala | 17 ++++++++++++ .../driver/mongodb/jdbc/MongoJdbcDriver.scala | 8 +++--- .../jdbc/resultSet/MongoDbResultSet.scala | 19 +++++++------- .../statement/MongoPreparedStatement.scala | 4 +-- .../driver/mongodb/schema/CirceSchema.scala | 11 +++++--- .../mongodb/schema/SchemaExplorer.scala | 26 +++++++++---------- .../mongodb/sql/MongoSqlQueryHolder.scala | 7 ++--- .../driver/mongodb/schema/SchemaSpec.scala | 4 ++- 10 files changed, 79 insertions(+), 39 deletions(-) create mode 100644 src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala create mode 100644 src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala diff --git a/build.sbt b/build.sbt index 0c009a04..137c0e55 100644 --- a/build.sbt +++ b/build.sbt @@ -40,9 +40,9 @@ developers := List( licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0.html")) -crossScalaVersions := Seq("2.13.13", "2.12.17") +crossScalaVersions := Seq("2.13.15", "2.12.20") -scalaVersion := crossScalaVersions.value.head +scalaVersion := crossScalaVersions.value.last scalacOptions += "-deprecation" diff --git a/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala b/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala new file mode 100644 index 00000000..c6cbf2e5 --- /dev/null +++ b/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala @@ -0,0 +1,18 @@ +package dev.mongocamp.driver.mongodb.schema + +import io.circe.Decoder.Result +import io.circe.{Decoder, Encoder, HCursor, Json} +import jdk.internal.reflect.Reflection +import org.bson.types.ObjectId +import org.joda.time.DateTime +import org.mongodb.scala.Document + +import java.util.Date + +trait CirceProductSchema { + + def productElementNames(internalProduct: Product): Iterator[String] = { + (internalProduct.getClass.getDeclaredFields ++ internalProduct.getClass.getFields).map(_.getName).iterator + } + +} \ No newline at end of file diff --git a/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala b/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala new file mode 100644 index 00000000..0776b6f7 --- /dev/null +++ b/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala @@ -0,0 +1,17 @@ +package dev.mongocamp.driver.mongodb.schema + +import io.circe.Decoder.Result +import io.circe.{ Decoder, Encoder, HCursor, Json } +import org.bson.types.ObjectId +import org.joda.time.DateTime +import org.mongodb.scala.Document + +import java.util.Date + +trait CirceProductSchema { + + def productElementNames(internalProduct: Product): Iterator[String] = { + internalProduct.productElementNames + } + +} \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala index ddbc856e..bdad2276 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcDriver.scala @@ -2,13 +2,13 @@ package dev.mongocamp.driver.mongodb.jdbc import com.vdurmont.semver4j.Semver import dev.mongocamp.driver.mongodb.BuildInfo -import dev.mongocamp.driver.mongodb.database.{ DatabaseProvider, MongoConfig } -import org.mongodb.scala.{ ConnectionString, ServerAddress } +import dev.mongocamp.driver.mongodb.database.{DatabaseProvider, MongoConfig} +import org.mongodb.scala.{ConnectionString, ServerAddress} -import java.sql.{ Connection, DriverPropertyInfo } +import java.sql.{Connection, DriverPropertyInfo} import java.util.Properties import java.util.logging.Logger -import scala.jdk.CollectionConverters.CollectionHasAsScala +import scala.jdk.CollectionConverters._ class MongoJdbcDriver extends java.sql.Driver { private val propertyInfoHelper = new MongodbJdbcDriverPropertyInfoHelper() diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala index 88a28dd3..36f7c50d 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala @@ -2,18 +2,19 @@ package dev.mongocamp.driver.mongodb.jdbc.resultSet import dev.mongocamp.driver.mongodb.MongoDAO import dev.mongocamp.driver.mongodb.bson.BsonConverter -import org.mongodb.scala.bson.{BsonArray, BsonBoolean, BsonDateTime, BsonInt32, BsonInt64, BsonNull, BsonNumber, BsonObjectId, BsonString} +import org.mongodb.scala.bson.{ BsonArray, BsonBoolean, BsonDateTime, BsonInt32, BsonInt64, BsonNull, BsonNumber, BsonObjectId, BsonString } import org.mongodb.scala.bson.collection.immutable.Document -import java.io.{InputStream, Reader} -import java.net.{URI, URL} -import java.{sql, util} -import java.sql.{Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLWarning, SQLXML, Statement, Time, Timestamp} +import java.io.{ InputStream, Reader } +import java.net.{ URI, URL } +import java.{ sql, util } +import java.sql.{ Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLWarning, SQLXML, Statement, Time, Timestamp } import java.util.Calendar import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.jdbc.MongoJdbcCloseable import java.nio.charset.StandardCharsets +import scala.util.Try class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], queryTimeOut: Int) extends ResultSet with MongoJdbcCloseable { private var currentRow: Document = _ @@ -75,9 +76,9 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], checkClosed() val value = currentRow.getValue(metaData.getColumnName(columnIndex)) value match { - case b : BsonInt32 => b.longValue() - case b : BsonInt64 => b.longValue() - case _ => Option(value).flatMap(_.toString.toLongOption).getOrElse(0) + case b: BsonInt32 => b.longValue() + case b: BsonInt64 => b.longValue() + case _ => Option(value).flatMap(v => Try(v.toString.toLong).toOption).getOrElse(0) } } @@ -632,7 +633,7 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], override def getRef(columnLabel: String): Ref = sqlFeatureNotSupported() - override def updateRef(columnIndex: Int, x: Ref): Unit = sqlFeatureNotSupported() + override def updateRef(columnIndex: Int, x: Ref): Unit = sqlFeatureNotSupported() override def updateRef(columnLabel: String, x: Ref): Unit = sqlFeatureNotSupported() diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala index 3251812f..a6f2e9ac 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala @@ -70,7 +70,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla val collectionName = Option(queryHolder.getCollection).map(c => connection.getDatabaseProvider.dao(c)) if (!sql.toLowerCase().contains("_id")){ response = response.map(doc => { - val newDoc = Document(doc - "_id") + val newDoc = doc - "_id" newDoc }) } @@ -507,7 +507,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla checkClosed() false } - +// todo override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] override def isWrapperFor(iface: Class[_]): Boolean = false diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala index b6fc8f5f..0c5d1121 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala @@ -8,7 +8,7 @@ import org.mongodb.scala.Document import java.util.Date -trait CirceSchema { +trait CirceSchema extends CirceProductSchema { implicit val DateFormat: Encoder[Date] with Decoder[Date] = new Encoder[Date] with Decoder[Date] { override def apply(a: Date): Json = Encoder.encodeString.apply(a.toInstant.toString) @@ -136,11 +136,11 @@ trait CirceSchema { .toList: _* ) case product: Product => - val productElementNames = product.productElementNames.toList - val fieldMap = productElementNames + val productElementKeys = productElementNames(product).toList + val fieldMap = productElementKeys .map( key => { - val index = productElementNames.indexOf(key) + val index = productElementKeys.indexOf(key) (key, product.productElement(index)) } ) @@ -159,4 +159,7 @@ trait CirceSchema { } } + + + } \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala index e94eaf49..fc536f6c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala @@ -23,18 +23,16 @@ class SchemaExplorer { private def schemaAggregation(deepth: Int, sampleSize: Option[Int]): List[PipelineStage] = { val buffer = ArrayBuffer[PipelineStage]() - buffer.addAll( - sampleSize.map(size => PipelineStage("sample", Map("size" -> size))) - ) + buffer ++= sampleSize.map(size => PipelineStage("sample", Map("size" -> size))) - buffer.addOne(PipelineStage("project", Map("_" -> processObject(deepth, 0, "$$ROOT", List()), "_id" -> 0))) + buffer += PipelineStage("project", Map("_" -> processObject(deepth, 0, "$$ROOT", List()), "_id" -> 0)) (0 to deepth).foreach(_ => { - buffer.addOne(PipelineStage("unwind", Map("path" -> "$_", "preserveNullAndEmptyArrays" -> true))) - buffer.addOne(PipelineStage("replaceRoot", Map("newRoot" -> Map("$cond" -> List(Map("$eq" -> List("$_", null)), "$$ROOT", "$_"))))) + buffer += PipelineStage("unwind", Map("path" -> "$_", "preserveNullAndEmptyArrays" -> true)) + buffer += PipelineStage("replaceRoot", Map("newRoot" -> Map("$cond" -> List(Map("$eq" -> List("$_", null)), "$$ROOT", "$_")))) }) - buffer.addAll( + buffer ++= List( PipelineStage("project", Map("_" -> 0)), PipelineStage("project", Map("l" -> "$$REMOVE", "n" -> 1, "t" -> 1, "v" -> "$$REMOVE")), @@ -49,7 +47,6 @@ class SchemaExplorer { PipelineStage("group", Map("T" -> Map("$push" -> "$$ROOT"), "_id" -> Map("n" -> "$n"), "c" -> Map("$sum" -> "$c"))), PipelineStage("project", Map("T" -> 1, "_id" -> 0, "c" -> 1, "n" -> "$_id.n")), PipelineStage("sort", Map("n" -> 1)) - ) ) buffer.toList } @@ -79,7 +76,7 @@ class SchemaExplorer { .foreach(string => { var fieldName = string if (fieldName.startsWith(NameSeparator)) { - responseArray.addOne(NameSeparator) + responseArray += NameSeparator fieldName = fieldName.substring(1) } val hasEndingSeperator: Boolean = if (fieldName.endsWith(NameSeparator)) { @@ -89,9 +86,9 @@ class SchemaExplorer { else { false } - responseArray.addOne(fieldName) + responseArray += fieldName if (hasEndingSeperator) { - responseArray.addOne(NameSeparator) + responseArray += NameSeparator } }) @@ -204,7 +201,7 @@ class SchemaExplorer { fieldsToJsonSchemaDefinition(map, fieldObjectName, field.subFields.toList) } if (field.percentageOfParent == 1.0) { - requiredFields.addOne(field.name) + requiredFields += field.name } if (field.fieldTypes.size == 1) { val t = field.fieldTypes.head @@ -346,7 +343,8 @@ class SchemaExplorer { private def convertToBsonPipeline(pipeline: List[PipelineStage]): Seq[Bson] = { val response: Seq[Bson] = pipeline.map(element => { val stage = if (element.stage.startsWith("$")) element.stage else "$" + element.stage - Map(stage -> element.value) + val bson: Bson = Map(stage -> element.value) + bson }) response } @@ -402,7 +400,7 @@ class SchemaExplorer { val newField = SchemaAnalysisField(name.replace(ArrayItemMark, ArrayElementText), fullName, types, fieldCount, percentage, ArrayBuffer()) - parent.subFields.addOne(newField) + parent.subFields.+=(newField) fieldsMap.put(s"$parentName$NameSeparator$name".replace("ROOT.", ""), newField) }) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index 614ae986..de2e4406 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -35,6 +35,7 @@ import java.util.concurrent.TimeUnit import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.jdk.CollectionConverters._ +import scala.util.Try class MongoSqlQueryHolder { private val aggregatePipeline: ArrayBuffer[Document] = ArrayBuffer() @@ -236,7 +237,7 @@ class MongoSqlQueryHolder { } case e: net.sf.jsqlparser.schema.Column => val name = e.getColumnName - name.toIntOption.getOrElse(name.toBooleanOption.getOrElse(name)) + Try(name.toInt).toOption.getOrElse(Try(name.toBoolean).toOption.getOrElse(name)) case _ => throw new IllegalArgumentException("not supported value type") } @@ -320,7 +321,7 @@ class MongoSqlQueryHolder { private def convertSelectStatement(select: Select): Unit = { select.getSelectBody match { case plainSelect: PlainSelect => - val selectItems = Option(plainSelect.getSelectItems).map(_.asScala).getOrElse(List.empty) + val selectItems = Option(plainSelect.getSelectItems).map(_.asScala).getOrElse(List.empty) val maybeDistinct = Option(plainSelect.getDistinct) selectItems.foreach(sI => { @@ -357,7 +358,7 @@ class MongoSqlQueryHolder { aggregatePipeline += Map("$group" -> groupMap) }) if (maybeGroupByElement.isEmpty && keepOneDocument) { - val group = mutable.Map[String, Any]() + val group = mutable.Map[String, Any]() val idGroupMap = mutable.Map() selectItems.foreach { case se: SelectItem[Expression] => val expressionName = se.getExpression.toString diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala index b46d1a33..29ceee04 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala @@ -19,7 +19,9 @@ class SchemaSpec extends Specification with Before { schemaJson.contains("\"title\":\"Friends\"") must beTrue schemaJson.contains("\"People\":") must beTrue schemaJson.contains("\"title\":\"People\"") must beTrue - schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") must beTrue + val idPattern1 = schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") + val idPattern2 = schemaJson.contains("\"_id\":{\"type\":\"string\",\"pattern\":\"^([a-fA-F0-9]{2})+$\"}") + (idPattern1 || idPattern2) must beTrue schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}") must beTrue } From 08e247f60108a1c868309b965f6e35b75ad23f98 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Sun, 20 Oct 2024 21:55:40 +0200 Subject: [PATCH 17/22] test: some errors on rerun tests --- .../mongodb/jdbc/MongoDatabaseMetaData.scala | 6 ++--- .../gridfs/GridfsDatabaseFunctions.scala | 4 +-- .../driver/mongodb/jdbc/ExploreJdbcSpec.scala | 27 +++++-------------- 3 files changed, 11 insertions(+), 26 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala index 01ca0974..7654f8bf 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala @@ -345,9 +345,9 @@ class MongoDatabaseMetaData(connection: MongoJdbcConnection) extends DatabaseMet case "double" => decimalDigits = Some(Int.MaxValue) Types.DOUBLE - case "array" => Types.ARRAY - case "bool" => Types.BOOLEAN - case "object" => Types.JAVA_OBJECT + case "array" => Types.ARRAY + case "bool" => Types.BOOLEAN + case "object" => Types.JAVA_OBJECT case _ => Types.VARCHAR } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala index fbde0e06..573611f9 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridfsDatabaseFunctions.scala @@ -16,9 +16,9 @@ trait GridfsDatabaseFunctions extends MongoImplicits { def deleteImage(id: ObjectId): Unit = ImageFilesDAO.deleteOne(id) - def dropImages(): Unit = ImageFilesDAO.drop() + def dropImages(): Unit = ImageFilesDAO.drop().result(60) - def imagesCount: Long = ImageFilesDAO.count() + def imagesCount: Long = ImageFilesDAO.count().result(60) def insertImage(path: String, metadata: AnyRef): ObjectId = { val file = File(path) diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala index 558b14bf..38430721 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala @@ -1,30 +1,15 @@ package dev.mongocamp.driver.mongodb.jdbc -import dev.mongocamp.driver.mongodb.MongoDAO -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.model.{Grade, Score} -import dev.mongocamp.driver.mongodb.test.TestDatabase -import org.bson.types.ObjectId - -import java.sql.{DriverManager, ResultSet, Types} -import java.util.Properties -import scala.collection.mutable.ArrayBuffer -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.model.{Grade, Score} -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO -import org.bson.types.ObjectId -import org.specs2.mutable.Specification -import org.specs2.specification.{BeforeAll, BeforeEach} +import java.sql.Types class ExploreJdbcSpec extends BaseJdbcSpec { + val schemaPattern: String = "mongocamp-unit-test$" + "Jdbc Connection" should { "get table names" in { - val tableNames = connection.getMetaData.getTables("%", "mongocamp-unit-test", "", Array.empty) + val tableNames = connection.getMetaData.getTables("%", schemaPattern, "", Array.empty) var tables = 0 var tablePersonFound = false while (tableNames.next()) { @@ -40,8 +25,8 @@ class ExploreJdbcSpec extends BaseJdbcSpec { tables += 1 } tables must beGreaterThanOrEqualTo(1) - val columnNames = connection.getMetaData.getColumns("%", "mongocamp-unit-test", "people", "") - var columns = 0 + val columnNames = connection.getMetaData.getColumns("%", schemaPattern, "people", "") + var columns = 0 while (columnNames.next()) { columnNames.getString("TABLE_CAT") must beEqualTo("mongodb") columnNames.getString("TABLE_NAME") must beEqualTo("people") From 3948d48029f9389e7b13f0e9961657ef9f4d1fb0 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Sun, 20 Oct 2024 22:03:22 +0200 Subject: [PATCH 18/22] chore: 13 dependency updates for mongodb-driver * ch.qos.logback:logback-classic:test : 1.5.5 -> 1.5.11 * com.github.jsqlparser:jsqlparser : 4.9 -> 5.0 * com.github.luben:zstd-jni:provided : 1.5.6-2 -> 1.5.6-6 * io.circe:circe-core : 0.14.6 -> 0.14.10 * io.circe:circe-generic : 0.14.6 -> 0.14.10 * io.circe:circe-parser : 0.14.6 -> 0.14.10 * joda-time:joda-time : 2.12.7 -> 2.13.0 * org.apache.lucene:lucene-queryparser : 9.10.0 -> 10.0.0 * org.liquibase:liquibase-core:test : 4.28.0 -> 4.29.2 * org.mongodb.scala:mongo-scala-driver : 5.0.1 -> 5.2.0 * org.scala-lang:scala-library : 2.12.20 -> 2.13.15 * org.specs2:specs2-core:test : 4.20.5 -> 4.20.9 * org.xerial.snappy:snappy-java:provided : 1.1.10.5 -> 1.1.10.7 --- build.sbt | 22 +++++++++---------- .../mongodb/lucene/LuceneQueryConverter.scala | 12 +++++----- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/build.sbt b/build.sbt index 137c0e55..cffa1d25 100644 --- a/build.sbt +++ b/build.sbt @@ -42,7 +42,7 @@ licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0.html crossScalaVersions := Seq("2.13.15", "2.12.20") -scalaVersion := crossScalaVersions.value.last +scalaVersion := crossScalaVersions.value.head scalacOptions += "-deprecation" @@ -61,13 +61,13 @@ resolvers += "Sonatype OSS Snapshots".at("https://oss.sonatype.org/content/repos // Test -libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.5" % Test +libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.9" % Test -libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.5" % Test +libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.11" % Test -libraryDependencies += "joda-time" % "joda-time" % "2.12.7" +libraryDependencies += "joda-time" % "joda-time" % "2.13.0" -val circeVersion = "0.14.6" +val circeVersion = "0.14.10" libraryDependencies ++= Seq( "io.circe" %% "circe-core", @@ -75,13 +75,13 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-parser" ).map(_ % circeVersion) -libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.0.1" +libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.2.0" -libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.5" % Provided +libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.7" % Provided -libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-2" % Provided +libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-6" % Provided -libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "9.10.0" +libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "10.0.0" val MongoJavaServerVersion = "1.45.0" @@ -99,9 +99,9 @@ libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % " libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" -libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "4.9" +libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "5.0" -libraryDependencies += "org.liquibase" % "liquibase-core" % "4.28.0" % Test +libraryDependencies += "org.liquibase" % "liquibase-core" % "4.29.2" % Test buildInfoPackage := "dev.mongocamp.driver.mongodb" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala index 2d4227ac..f0d4766f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala @@ -58,21 +58,21 @@ object LuceneQueryConverter extends LazyLogging { val listOfOr = ArrayBuffer[Map[String, Any]]() var nextTypeAnd = true subQueries.foreach(c => { - val queryMap = getMongoDbSearchMap(c.getQuery, c.isProhibited, searchWithValueAndString) + val queryMap = getMongoDbSearchMap(c.query(), c.isProhibited, searchWithValueAndString) var thisTypeAnd = true - if (c.getOccur == Occur.MUST) { + if (c.occur == Occur.MUST) { thisTypeAnd = true } - else if (c.getOccur == Occur.SHOULD) { + else if (c.occur == Occur.SHOULD) { thisTypeAnd = false } - else if (c.getOccur == Occur.MUST_NOT) { + else if (c.occur == Occur.MUST_NOT) { // searchMapResponse ++= queryMap } else { - logger.error(s"Unexpected Occur <${c.getOccur.name()}>") - throw new NotSupportedException(s"${c.getOccur.name()} currently not supported") + logger.error(s"Unexpected Occur <${c.occur.name()}>") + throw new NotSupportedException(s"${c.occur.name()} currently not supported") } if (nextTypeAnd && thisTypeAnd) { From 022cc63ed9fa5eaae628c2502b25c140bda7705e Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Mon, 21 Oct 2024 21:49:25 +0200 Subject: [PATCH 19/22] feat: refactor for jsqlparser 5.0 --- .../mongodb/sql/MongoSqlQueryHolder.scala | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index de2e4406..80cb8998 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -6,12 +6,12 @@ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator import dev.mongocamp.driver.mongodb.exception.SqlCommandNotSupportedException import dev.mongocamp.driver.mongodb.sql.SQLCommandType.SQLCommandType -import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } +import net.sf.jsqlparser.expression.operators.conditional.{AndExpression, OrExpression} import net.sf.jsqlparser.expression.operators.relational._ -import net.sf.jsqlparser.expression.{ Expression, Parenthesis } -import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } +import net.sf.jsqlparser.expression.{Expression, Parenthesis} +import net.sf.jsqlparser.parser.{CCJSqlParser, StreamProvider} import net.sf.jsqlparser.schema.Table -import net.sf.jsqlparser.statement.UnsupportedStatement +import net.sf.jsqlparser.statement.{ShowStatement, UnsupportedStatement} import net.sf.jsqlparser.statement.alter.Alter import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.create.table.CreateTable @@ -19,7 +19,7 @@ import net.sf.jsqlparser.statement.delete.Delete import net.sf.jsqlparser.statement.drop.Drop import net.sf.jsqlparser.statement.execute.Execute import net.sf.jsqlparser.statement.insert.Insert -import net.sf.jsqlparser.statement.select.{ FromItem, PlainSelect, Select, SelectItem } +import net.sf.jsqlparser.statement.select.{FromItem, PlainSelect, Select, SelectItem} import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update @@ -27,7 +27,7 @@ import org.bson.conversions.Bson import org.h2.command.ddl.AlterTable import org.mongodb.scala.model.IndexOptions import org.mongodb.scala.model.Sorts.ascending -import org.mongodb.scala.{ Document, Observable, SingleObservable } +import org.mongodb.scala.{Document, Observable, SingleObservable} import java.sql.SQLException import java.util.Date @@ -112,15 +112,14 @@ class MongoSqlQueryHolder { else if (classOf[Alter].isAssignableFrom(statement.getClass)) { sqlCommandType = SQLCommandType.AlterTable } - else if (classOf[UnsupportedStatement].isAssignableFrom(statement.getClass)) { - val unsupportedStatement = statement.asInstanceOf[UnsupportedStatement] - val isShowDatabases = unsupportedStatement.toString.toLowerCase.contains("show databases") - val isShowSchemas = unsupportedStatement.toString.toLowerCase.contains("show schemas") - if (isShowDatabases | isShowSchemas) { - sqlCommandType = SQLCommandType.ShowDatabases - } - else { - throw new SqlCommandNotSupportedException(s"not supported sql command type <${statement.getClass.getSimpleName}>") + else if (classOf[ShowStatement].isAssignableFrom(statement.getClass)) { + val unsupportedStatement = statement.asInstanceOf[ShowStatement] + unsupportedStatement.getName.trim.toUpperCase match { + case "DATABASES" => + sqlCommandType = SQLCommandType.ShowDatabases + case "SCHEMAS" => + sqlCommandType = SQLCommandType.ShowDatabases + case _ => } } else { @@ -289,8 +288,8 @@ class MongoSqlQueryHolder { parseWhere(e.getLeftExpression, left) parseWhere(e.getRightExpression, right) queryMap.put("$and", List(left, right)) - case e: Parenthesis => - parseWhere(e.getExpression, queryMap) + case e: ParenthesedExpressionList[Expression] => + e.asScala.foreach(ex => parseWhere(ex, queryMap)) case e: InExpression => val value = e.getRightExpression match { case l: ParenthesedExpressionList[Expression] => l.asScala.map(convertValue) From 6b9dca26eea3f40643b221992c026df948ac1625 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Tue, 22 Oct 2024 08:34:16 +0200 Subject: [PATCH 20/22] docs: documentation for jdbc and the dependencies for it (schema analysis and sql execution) --- build.sbt | 15 +-- changelog/config.js | 95 ---------------- changelog/header.hbs | 9 -- docs/.vitepress/config.ts | 14 ++- .../collection/analyse-schema.md | 18 ++++ docs/documentation/sql/index.md | 5 + docs/documentation/sql/jdbc-driver.md | 16 +++ docs/documentation/sql/queryholder.md | 24 +++++ docs/index.md | 4 +- package.json | 44 ++++---- .../statement/MongoPreparedStatement.scala | 2 +- .../mongodb/sql/MongoSqlQueryHolder.scala | 102 +++++++++--------- .../driver/mongodb/jdbc/BaseJdbcSpec.scala | 3 + .../driver/mongodb/schema/SchemaSpec.scala | 27 +++-- .../driver/mongodb/sql/SelectSqlSpec.scala | 21 ++++ 15 files changed, 205 insertions(+), 194 deletions(-) delete mode 100644 changelog/config.js delete mode 100644 changelog/header.hbs create mode 100644 docs/documentation/collection/analyse-schema.md create mode 100644 docs/documentation/sql/index.md create mode 100644 docs/documentation/sql/jdbc-driver.md create mode 100644 docs/documentation/sql/queryholder.md diff --git a/build.sbt b/build.sbt index cffa1d25..e6dd2963 100644 --- a/build.sbt +++ b/build.sbt @@ -75,20 +75,21 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-parser" ).map(_ % circeVersion) -libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.2.0" - -libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.7" % Provided - -libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-6" % Provided - -libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "10.0.0" +libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.1.4" +// MongoDB 5.2.0 not supported for de.bwaldvogel -> https://github.com/bwaldvogel/mongo-java-server/issues/233 val MongoJavaServerVersion = "1.45.0" libraryDependencies += "de.bwaldvogel" % "mongo-java-server" % MongoJavaServerVersion % Provided libraryDependencies += "de.bwaldvogel" % "mongo-java-server-h2-backend" % MongoJavaServerVersion % Provided +libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.7" % Provided + +libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-6" % Provided + +libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "10.0.0" + libraryDependencies += "com.github.pathikrit" %% "better-files" % "3.9.2" libraryDependencies += "com.typesafe" % "config" % "1.4.3" diff --git a/changelog/config.js b/changelog/config.js deleted file mode 100644 index 0f5f9b02..00000000 --- a/changelog/config.js +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env node -'use strict' - -const writerOpts = { - transform: (commit, context) => { - context.commit = 'commit'; - - const issues = []; - - commit.notes.forEach(note => { - note.title = `BREAKING CHANGES`; - }); - - if (commit.type === `feat`) { - commit.type = `Features`; - } else if (commit.type === `fix`) { - commit.type = `Bug Fixes`; - } else if (commit.type === `perf`) { - commit.type = `Performance Improvements`; - } else if (commit.type === `revert`) { - commit.type = `Reverts`; - } else if (commit.type === `refactor`) { - commit.type = `Code Refactoring`; - } else if (commit.type === `chore`) { - commit.type = `Maintenance`; - } else { - return; - } - if (commit.scope === `*`) { - commit.scope = ``; - } - - if (typeof commit.hash === `string`) { - commit.shortHash = commit.hash.substring(0, 7); - } - - if (typeof commit.subject === `string`) { - let url = `${context.packageData.bugs.url}/`; - if (url) { - // Issue URLs. - commit.subject = commit.subject.replace(/#([0-9]+)/g, (_, issue) => { - issues.push(issue); - return `[#${issue}](${url}${issue})`; - }); - } - if (context.host) { - // User URLs. - commit.subject = commit.subject.replace(/\B@([a-z0-9](?:-?[a-z0-9/]){0,38})/g, (_, username) => { - if (username.includes('/')) { - return `@${username}`; - } - - return `[@${username}](${context.host}/${username})`; - }); - } - } - - // remove references that already appear in the subject - commit.references = commit.references.filter(reference => { - return issues.indexOf(reference.issue) === -1; - }); - return commit; - }, - groupBy: `type`, - commitGroupsSort: `title`, - commitsSort: [`scope`, `subject`], - noteGroupsSort: `title` - -}; - - -var fs = require('fs'), path = require('path'), filePath = path.join(__dirname, 'commit.hbs'); - -fs.readFile(filePath, {encoding: 'utf-8'}, function (err, data) { - if (!err) { - writerOpts.commitPartial = data; - } else { - console.log(err); - } -}); - -var fs2 = require('fs'), path = require('path'), filePath2 = path.join(__dirname, 'header.hbs'); - -fs2.readFile(filePath2, {encoding: 'utf-8'}, function (err, data) { - if (!err) { - writerOpts.headerPartial = data; - } else { - console.log(err); - } -}); - -module.exports = { - writerOpts: writerOpts, - commit: "commit" -}; diff --git a/changelog/header.hbs b/changelog/header.hbs deleted file mode 100644 index d7efc8fb..00000000 --- a/changelog/header.hbs +++ /dev/null @@ -1,9 +0,0 @@ -## {{#if @root.linkCompare~}} - [{{version}}]({{compareUrlFormat}}) -{{~else}} - {{~version}} -{{~/if}} -{{~#if title}} "{{title}}" -{{~/if}} -{{~#if date}} ({{date}}) -{{/if}} diff --git a/docs/.vitepress/config.ts b/docs/.vitepress/config.ts index 316659c9..78283ab3 100644 --- a/docs/.vitepress/config.ts +++ b/docs/.vitepress/config.ts @@ -60,7 +60,8 @@ function nav() { {text: 'MongoDAO', link: '/documentation/mongo-dao/'}, {text: 'GridFsDAO', link: '/documentation/gridfs-dao/'}, {text: 'Collection', link: '/documentation/collection/'}, - {text: 'LocalServer', link: '/documentation/local-server'} + {text: 'LocalServer', link: '/documentation/local-server'}, + {text: 'SQL', link: '/documentation/sql'} ] }, { @@ -128,9 +129,18 @@ function sidebarDocumentation() { items: [ {text: 'Introduction', link: '/documentation/collection/'}, {text: 'Aggregation', link: '/documentation/collection/aggregation'}, - {text: 'Pagination', link: '/documentation/collection/pagination'} + {text: 'Pagination', link: '/documentation/collection/pagination'}, + {text: 'Schema analyse', link: '/documentation/collection/analyse-schema'}, ] }, + { + text: 'SQL', items: [ + {text: 'Query Holder', link: '/documentation/sql/queryholder'}, + {text: 'JDBC driver', link: '/documentation/sql/jdbc-driver'}, + ], + collapsible: true, + collapsed: true, + }, { text: 'LocalServer', link: '/documentation/local-server' diff --git a/docs/documentation/collection/analyse-schema.md b/docs/documentation/collection/analyse-schema.md new file mode 100644 index 00000000..6910c041 --- /dev/null +++ b/docs/documentation/collection/analyse-schema.md @@ -0,0 +1,18 @@ +# Analyse Schema +The driver supports an automated detection of the schema of an existing collection. The schema is used to detect the types of the columns. + +## Usage + +### Schema Analysis +Analyse a collection to detect the values for each field and the percentage distribution of the types. + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala#schema-analysis + +### Detect Schema +The Schema Detector can be used to detect the schema of a collection and is based on [Schema Anaysis](analyse-schema.md#schema-analysis). The schema is used to detect the types of the columns and generate a [JSON Schema](https://json-schema.org) for the collection. In case of multiple types of a field the Generation of the JSON Schema use the type with the most elements. + +:::tip +The [JSON Schema](https://json-schema.org) format can be use to validate or generate data, as well to secure your [Mongo Collection](https://www.mongodb.com/docs/manual/core/schema-validation/). +::: + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala#schema-explorer \ No newline at end of file diff --git a/docs/documentation/sql/index.md b/docs/documentation/sql/index.md new file mode 100644 index 00000000..f5a2ba6e --- /dev/null +++ b/docs/documentation/sql/index.md @@ -0,0 +1,5 @@ +# SQL Support + +Since Version 2.7.1 the driver supports [SQL queries](queryholder.md) on MongoDB. The SQL queries are converted to MongoDB queries and could executed on the MongoDB database. + +The driver also supports a [JDBC driver](jdbc-driver.md) to use the SQL queries in your application and migrate your database with liquibase for example. \ No newline at end of file diff --git a/docs/documentation/sql/jdbc-driver.md b/docs/documentation/sql/jdbc-driver.md new file mode 100644 index 00000000..ac6cf60c --- /dev/null +++ b/docs/documentation/sql/jdbc-driver.md @@ -0,0 +1,16 @@ +# JDBC driver + +The JDBC driver is a way to use the SQL queries in your application and run them like a 'normal' SQL database. The driver is based on the [MongoSqlQueryHolder](queryholder.md) to convert the SQL query to a Mongo query and execute it on the MongoDB database. + +## Usage + +### Register Driver +In some environments you have to register the driver manually. This is the case for example in the tests. + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala#register-driver + +After the driver is registered you can use the driver like a normal [JDBC driver](https://www.baeldung.com/java-jdbc). + +:::tip +The most default sql statements are supported, but because the difference between MongoDb and SQL the driver can't support SQL statements with subselects. +::: \ No newline at end of file diff --git a/docs/documentation/sql/queryholder.md b/docs/documentation/sql/queryholder.md new file mode 100644 index 00000000..f33ebddd --- /dev/null +++ b/docs/documentation/sql/queryholder.md @@ -0,0 +1,24 @@ +# SQL Converter to Mongo Query + +The MongoSqlQueryHolder provides a way to convert a SQL query to a Mongo query and execute it on a Mongo database. + +## Usage + +Initialize the query holder with the SQL query you want to analyse or execute. + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#initialize-query-holder + +In most cases you simply want to run the query and get the result as a `Seq[Document]`. +::: tip +The method run returns a classical MongoDb Observable use the implicits to convert it to a `Seq[Document]`. +::: +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#query-holder-run + +You can also get the information about the collection and the keys that are used in the query. + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#extract-collection +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#select-keys + +In some cases you need the information about the function calls in the query, for example in your own [jdbc driver](jdbc-driver.md) implementation. Because the difference of MongoDb and SQL for example a sql `select count(*) from empty-collection` is a list documents with one element and the MongoDb has no document in it. + +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#has-function-call diff --git a/docs/index.md b/docs/index.md index 0f1a34b4..4e3796d0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,11 +14,13 @@ hero: link: /documentation/getting-started - theme: alt text: View on GitHub - link: https://github.com/vuejs/vitepress + link: https://github.com/MongoCamp/mongodb-driver features: - title: Easy Config details: Easy Database Config with provider and MongoConfig + - title: SQL Support + details: Support for SQL Queries on MongoDB and JDBC Driver - title: DAO Pattern details: Implement the DAO Pattern for simple MongoDB usage [MongoDAO. - title: Pagination diff --git a/package.json b/package.json index a4d6f689..05ec5565 100644 --- a/package.json +++ b/package.json @@ -1,28 +1,28 @@ { - "name" : "mongodb-driver", - "organization" : "dev.mongocamp", - "version" : "2.7.1.snapshot", - "author" : "info@mongocamp.dev", - "license" : "Apache-2.0", - "type" : "module", - "repository" : { - "type" : "git", - "url" : "git+https://github.com/MongoCamp/mongodb-driver.git" + "name": "mongodb-driver", + "organization": "dev.mongocamp", + "version": "2.8.0", + "author": "info@mongocamp.dev", + "license": "Apache-2.0", + "type": "module", + "repository": { + "type": "git", + "url": "git+https://github.com/MongoCamp/mongodb-driver.git" }, - "bugs" : { - "url" : "https://github.com/MongoCamp/mongodb-driver/issues" + "bugs": { + "url": "https://github.com/MongoCamp/mongodb-driver/issues" }, - "homepage" : "https://mongodb-driver.mongocamp.dev/", - "scripts" : { - "docs:serve" : "vitepress serve docs --port 5555", - "docs:build" : "pnpm docs:external; vitepress build docs", - "docs:external" : "sh docs/external/fileloader.sh", - "docs:dev" : "pnpm docs:external; vitepress dev docs" + "homepage": "https://mongodb-driver.mongocamp.dev/", + "scripts": { + "docs:serve": "vitepress serve docs --port 5555", + "docs:build": "pnpm docs:external; vitepress build docs", + "docs:external": "sh docs/external/fileloader.sh", + "docs:dev": "pnpm docs:external; vitepress dev docs" }, - "devDependencies" : { - "@iconify-json/fluent-emoji" : "^1.1.18", - "@unocss/preset-icons" : "^0.58.5", - "unocss" : "^0.58.5", - "vitepress" : "1.0.0-rc.45" + "devDependencies": { + "@iconify-json/fluent-emoji": "^1.2.1", + "@unocss/preset-icons": "^0.63.4", + "unocss": "^0.63.4", + "vitepress": "1.4.1" } } \ No newline at end of file diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala index a6f2e9ac..81f5797c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala @@ -59,7 +59,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla new MongoDbResultSet(null, List.empty, 0) } else { var response = queryHolder.run(connection.getDatabaseProvider).results(getQueryTimeout) - if (response.isEmpty && queryHolder.selectFunctionCall) { + if (response.isEmpty && queryHolder.hasFunctionCallInSelect) { val emptyDocument = mutable.Map[String, Any]() queryHolder.getKeysForEmptyDocument.foreach(key => { emptyDocument.put(key, null) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index 80cb8998..268e2148 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -213,7 +213,7 @@ class MongoSqlQueryHolder { def getKeysForEmptyDocument: Set[String] = keysForEmptyDocument.toSet - def selectFunctionCall: Boolean = keepOneDocument + def hasFunctionCallInSelect: Boolean = keepOneDocument private def getUpdateOrDeleteFilter: Bson = { updateOrDeleteFilter.getOrElse(Map.empty).toMap @@ -330,56 +330,6 @@ class MongoSqlQueryHolder { }) val aliasList = ArrayBuffer[String]() sqlCommandType = SQLCommandType.Select - val maybeGroupByElement = Option(plainSelect.getGroupBy) - maybeGroupByElement.foreach(gbEl => { - val groupBy = gbEl.getGroupByExpressionList.getExpressions.asScala.map(_.toString).toList - val groupId = mutable.Map[String, Any]() - val group = mutable.Map[String, Any]() - groupBy.foreach(g => groupId += g -> ("$" + g)) - selectItems.foreach { case e: SelectItem[Expression] => - val expressionName = e.getExpression.toString - if (expressionName.toLowerCase().contains("count")) { - group += expressionName -> Map("$sum" -> 1) - } - else { - if (!groupBy.contains(expressionName)) { - val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) - if (espr.head.equalsIgnoreCase("max")) { - group += expressionName -> Map(espr.head -> espr.last) - } - else { - group += expressionName -> Map(espr.head -> espr.last) - } - } - } - } - val groupMap = Map("_id" -> groupId) ++ group.toMap ++ groupId.keys.map(s => s -> Map("$first" -> ("$" + s))).toMap - aggregatePipeline += Map("$group" -> groupMap) - }) - if (maybeGroupByElement.isEmpty && keepOneDocument) { - val group = mutable.Map[String, Any]() - val idGroupMap = mutable.Map() - selectItems.foreach { case se: SelectItem[Expression] => - val expressionName = se.getExpression.toString - if (expressionName.toLowerCase().contains("count")) { - group += expressionName -> Map("$sum" -> 1) - } - else { - val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) - val functionName: String = espr.head.toLowerCase match { - case "$max" => "$last" - case "$min" => "$first" - case _ => espr.head - } - val expression = if (functionName.equalsIgnoreCase(espr.last)) Map("$first" -> espr.last) else Map(functionName -> espr.last) - group += expressionName -> expression - } - keysForEmptyDocument += Option(se.getAlias).map(_.getName).getOrElse(expressionName) - } - - val groupMap = Map("_id" -> idGroupMap) ++ group.toMap - aggregatePipeline += Map("$group" -> groupMap) - } def convertFromItemToTable(fromItem: FromItem): Table = { val tableName = Option(fromItem.getAlias).map(a => fromItem.toString.replace(a.toString, "")).getOrElse(fromItem).toString new Table(tableName) @@ -441,6 +391,56 @@ class MongoSqlQueryHolder { "$match" -> filterQuery ) } + val maybeGroupByElement = Option(plainSelect.getGroupBy) + maybeGroupByElement.foreach(gbEl => { + val groupBy = gbEl.getGroupByExpressionList.getExpressions.asScala.map(_.toString).toList + val groupId = mutable.Map[String, Any]() + val group = mutable.Map[String, Any]() + groupBy.foreach(g => groupId += g -> ("$" + g)) + selectItems.foreach { case e: SelectItem[Expression] => + val expressionName = e.getExpression.toString + if (expressionName.toLowerCase().contains("count")) { + group += expressionName -> Map("$sum" -> 1) + } + else { + if (!groupBy.contains(expressionName)) { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + if (espr.head.equalsIgnoreCase("max")) { + group += expressionName -> Map(espr.head -> espr.last) + } + else { + group += expressionName -> Map(espr.head -> espr.last) + } + } + } + } + val groupMap = Map("_id" -> groupId) ++ group.toMap ++ groupId.keys.map(s => s -> Map("$first" -> ("$" + s))).toMap + aggregatePipeline += Map("$group" -> groupMap) + }) + if (maybeGroupByElement.isEmpty && keepOneDocument) { + val group = mutable.Map[String, Any]() + val idGroupMap = mutable.Map() + selectItems.foreach { case se: SelectItem[Expression] => + val expressionName = se.getExpression.toString + if (expressionName.toLowerCase().contains("count")) { + group += expressionName -> Map("$sum" -> 1) + } + else { + val espr = expressionName.split('(').map(_.trim.replace(")", "")).map(s => ("$" + s)) + val functionName: String = espr.head.toLowerCase match { + case "$max" => "$last" + case "$min" => "$first" + case _ => espr.head + } + val expression = if (functionName.equalsIgnoreCase(espr.last)) Map("$first" -> espr.last) else Map(functionName -> espr.last) + group += expressionName -> expression + } + keysForEmptyDocument += Option(se.getAlias).map(_.getName).getOrElse(expressionName) + } + + val groupMap = Map("_id" -> idGroupMap) ++ group.toMap + aggregatePipeline += Map("$group" -> groupMap) + } Option(plainSelect.getOrderByElements).foreach { orderBy => aggregatePipeline += Map( "$sort" -> orderBy.asScala.map(e => e.getExpression.toString -> (if (e.isAsc) 1 else -1)).toMap diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala index 4bfdcff0..8dac6347 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala @@ -12,6 +12,7 @@ class BaseJdbcSpec extends PersonSpecification { override def beforeAll(): Unit = { super.beforeAll() + // #region register-driver val connectionProps = new Properties() val driver = new MongoJdbcDriver() DriverManager.registerDriver(driver) @@ -19,5 +20,7 @@ class BaseJdbcSpec extends PersonSpecification { "jdbc:mongodb://localhost:27017/mongocamp-unit-test?retryWrites=true&loadBalanced=false&serverSelectionTimeoutMS=5000&connectTimeoutMS=10000", connectionProps ) + // #endregion register-driver } + } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala index 29ceee04..afdc6616 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala @@ -5,15 +5,35 @@ import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ import org.specs2.mutable.{Before, Specification} +import scala.util.Try + class SchemaSpec extends Specification with Before { sequential "Schema" should { + "analyse Json Schema from document dao" in { + // #region schema-analysis + val schemaExplorer = new SchemaExplorer() + val schemaAnalysis = schemaExplorer.analyzeSchema(PersonDocumentDAO) + // #endregion schema-analysis + schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue + schemaJson.contains("\"Friends\":") must beTrue + schemaJson.contains("\"title\":\"Friends\"") must beTrue + schemaJson.contains("\"People\":") must beTrue + schemaJson.contains("\"title\":\"People\"") must beTrue + val idPattern1 = schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") + val idPattern2 = schemaJson.contains("\"_id\":{\"type\":\"string\",\"pattern\":\"^([a-fA-F0-9]{2})+$\"}") + (idPattern1 || idPattern2) must beTrue + schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}") must beTrue + } + "detect Json Schema from document dao" in { + // #region schema-explorer val schemaExplorer = new SchemaExplorer() val schema = schemaExplorer.detectSchema(PersonDocumentDAO) val schemaJson = schema.toJson + // #endregion schema-explorer schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue schemaJson.contains("\"Friends\":") must beTrue schemaJson.contains("\"title\":\"Friends\"") must beTrue @@ -28,16 +48,11 @@ class SchemaSpec extends Specification with Before { } override def before: Any = { - - try { + Try { UserDAO.drop().result() LoginDAO.drop().result() SimplePersonDAO.drop().result() } - catch { - case e: Exception => - } - val personList = PersonDAO.find().resultList() personList.foreach { person => UserDAO.insertOne(User(person.id, person.name, person.guid)).result() diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala index 63c539e1..32fd1be3 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala @@ -2,7 +2,9 @@ package dev.mongocamp.driver.mongodb.sql import dev.mongocamp.driver.mongodb.GenericObservable import dev.mongocamp.driver.mongodb.dao.PersonSpecification +import dev.mongocamp.driver.mongodb.lucene.LuceneQueryConverter import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO class SelectSqlSpec extends PersonSpecification { @@ -48,6 +50,25 @@ class SelectSqlSpec extends PersonSpecification { selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" } + "and with count" in { + // #region initialize-query-holder + val queryConverter = MongoSqlQueryHolder("select count(*) as anz from people where age < 30 and (age < 30 or age > 30) order by id asc") + // #endregion initialize-query-holder + // #region query-holder-run + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + // #endregion query-holder-run + selectResponse.head.getInteger("anz") mustEqual 99 + // #region extract-collection + queryConverter.getCollection mustEqual "people" + // #endregion extract-collection + // #region select-keys + queryConverter.getKeysForEmptyDocument mustEqual Set("anz") + // #endregion select-keys + // #region has-function-call + queryConverter.hasFunctionCallInSelect mustEqual true + // #endregion has-function-call + } + "simple select all sql" in { val queryConverter = MongoSqlQueryHolder("select * from people where age < 30 order by id asc") val selectResponse = queryConverter.run(TestDatabase.provider).resultList() From d2f9727f78d74409ab2a2ea0d84df87b96bdc4d0 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Tue, 22 Oct 2024 08:56:47 +0200 Subject: [PATCH 21/22] fix: fix not compiling test for schema --- .../driver/mongodb/schema/SchemaSpec.scala | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala index afdc6616..e548be1c 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala @@ -1,9 +1,9 @@ package dev.mongocamp.driver.mongodb.schema -import dev.mongocamp.driver.mongodb.test.TestDatabase.{PersonDAO, PersonDocumentDAO} +import dev.mongocamp.driver.mongodb.test.TestDatabase.{ PersonDAO, PersonDocumentDAO } import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ -import org.specs2.mutable.{Before, Specification} +import org.specs2.mutable.{ Before, Specification } import scala.util.Try @@ -14,25 +14,25 @@ class SchemaSpec extends Specification with Before { "Schema" should { "analyse Json Schema from document dao" in { // #region schema-analysis - val schemaExplorer = new SchemaExplorer() + val schemaExplorer = new SchemaExplorer() val schemaAnalysis = schemaExplorer.analyzeSchema(PersonDocumentDAO) // #endregion schema-analysis - schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue - schemaJson.contains("\"Friends\":") must beTrue - schemaJson.contains("\"title\":\"Friends\"") must beTrue - schemaJson.contains("\"People\":") must beTrue - schemaJson.contains("\"title\":\"People\"") must beTrue - val idPattern1 = schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") - val idPattern2 = schemaJson.contains("\"_id\":{\"type\":\"string\",\"pattern\":\"^([a-fA-F0-9]{2})+$\"}") - (idPattern1 || idPattern2) must beTrue - schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}") must beTrue + schemaAnalysis.count must beEqualTo(200) + schemaAnalysis.sample must beEqualTo(200) + schemaAnalysis.percentageOfAnalysed must beEqualTo(1.0) + schemaAnalysis.fields.size must beEqualTo(20) + val idField = schemaAnalysis.fields.head + idField.name must beEqualTo("_id") + idField.fieldTypes.head.fieldType must beEqualTo("objectId") + idField.fieldTypes.head.count must beEqualTo(200) + idField.fieldTypes.head.percentageOfParent must beEqualTo(1.0) } "detect Json Schema from document dao" in { // #region schema-explorer - val schemaExplorer = new SchemaExplorer() - val schema = schemaExplorer.detectSchema(PersonDocumentDAO) - val schemaJson = schema.toJson + val schemaExplorer = new SchemaExplorer() + val schema = schemaExplorer.detectSchema(PersonDocumentDAO) + val schemaJson = schema.toJson // #endregion schema-explorer schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue schemaJson.contains("\"Friends\":") must beTrue From 51aa92708c5aa5585c1925b6c5d97f5adb9bcbd7 Mon Sep 17 00:00:00 2001 From: QuadStingray Date: Tue, 22 Oct 2024 08:58:39 +0200 Subject: [PATCH 22/22] refactor(java): drop java 11 and 19 support because `org.apache.lucene.queryparser` has dropped it BREAKING CHANGE: Lucene Query Parser has dropped Java 11 and 19 and only support Java 21 Support --- .github/workflows/main_test_release.yml | 2 +- .github/workflows/other_test.yml | 44 ++++++++++++------------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/.github/workflows/main_test_release.yml b/.github/workflows/main_test_release.yml index 0b2df28a..a887da59 100644 --- a/.github/workflows/main_test_release.yml +++ b/.github/workflows/main_test_release.yml @@ -14,7 +14,7 @@ jobs: strategy: matrix: mongodb-version: ['4.4', '5.0', '6.0', '7.0'] - java: [ '11', '17' ] + java: [ '21', '23' ] steps: - uses: actions/checkout@main - name: Setup TimeZone diff --git a/.github/workflows/other_test.yml b/.github/workflows/other_test.yml index a5fe1482..1e703bf2 100644 --- a/.github/workflows/other_test.yml +++ b/.github/workflows/other_test.yml @@ -13,26 +13,26 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - mongodb-version: ['4.4', '5.0', '6.0', '7.0'] - java: [ '11', '17' ] + mongodb-version: [ '4.4', '5.0', '6.0', '7.0' ] + java: [ '21', '23' ] steps: - - uses: actions/checkout@main - - name: Set up JDK ${{ matrix.Java }} - uses: coursier/setup-action@main - with: - jvm: corretto:${{ matrix.Java }} - apps: sbt scala scalac - - name: Setup TimeZone - uses: MathRobin/timezone-action@v1.1 - with: - timezoneLinux: "Europe/Berlin" - timezoneMacos: "Europe/Berlin" - timezoneWindows: "W. Europe Standard Time" - - name: Start MongoDB ${{ matrix.mongodb-version }} - uses: MongoCamp/mongodb-github-action@main - with: - mongodb-version: ${{ matrix.mongodb-version }} - - name: Run tests - run: | - timedatectl - sbt +test \ No newline at end of file + - uses: actions/checkout@main + - name: Set up JDK ${{ matrix.Java }} + uses: coursier/setup-action@main + with: + jvm: corretto:${{ matrix.Java }} + apps: sbt scala scalac + - name: Setup TimeZone + uses: MathRobin/timezone-action@v1.1 + with: + timezoneLinux: "Europe/Berlin" + timezoneMacos: "Europe/Berlin" + timezoneWindows: "W. Europe Standard Time" + - name: Start MongoDB ${{ matrix.mongodb-version }} + uses: MongoCamp/mongodb-github-action@main + with: + mongodb-version: ${{ matrix.mongodb-version }} + - name: Run tests + run: | + timedatectl + sbt +test \ No newline at end of file