diff --git a/.gitignore b/.gitignore index 44fbe0e4..c83ec207 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +databases/* out/* generate_bundles.rb .cache @@ -6,7 +7,7 @@ bin/* .idea* .classpath .project/* -.settings/* +**/.settings/* project/target/* project/project/* postgresql-async/target/* @@ -17,3 +18,6 @@ mysql-async/target/* .ruby-gemset *.jar *.iml +.project +.vagrant/* +vendor/* diff --git a/.travis.yml b/.travis.yml index c04d3e61..3e334f1a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,21 @@ language: scala scala: - - 2.10.2 + - 2.10.4 + - 2.11.7 + - 2.12.1 + jdk: - - oraclejdk7 - - openjdk7 - - openjdk6 + - oraclejdk8 + services: - postgresql - mysql +cache: + directories: + - vendor/bundle + - $HOME/.m2 + - $HOME/.ivy2 + - $HOME/.sbt before_script: - ./script/prepare_build.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index 97dab08e..ce4b61ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,78 @@ + + + +- [Changelog](#changelog) + - [0.2.19 - 2016-03-17](#0219---2016-03-17) + - [0.2.18 - 2015-08-08](#0218---2015-08-08) + - [0.2.17 - 2015-07-13](#0217---2015-07-13) + - [0.2.16 - 2015-01-04](#0216---2015-01-04) + - [0.2.15 - 2014-09-12](#0215---2014-09-12) + - [0.2.14 - 2014-08-30](#0214---2014-08-30) + - [0.2.13 - 2014-04-07](#0213---2014-04-07) + - [0.2.12 - 2014-01-11](#0212---2014-01-11) + - [0.2.11 - 2014-01-11](#0211---2014-01-11) + - [0.2.10 - 2013-12-18](#0210---2013-12-18) + - [0.2.9 - 2013-12-01](#029---2013-12-01) + - [0.2.8 - 2013-09-24](#028---2013-09-24) + - [0.2.7 - 2013-09-09](#027---2013-09-09) + - [0.2.5](#025) + - [0.2.4 - 2013-07-06](#024---2013-07-06) + - [0.2.3 - 2013-05-21](#023---2013-05-21) + - [0.2.2 - 2013-05-18](#022---2013-05-18) + - [0.1.1 - 2013-04-30](#011---2013-04-30) + - [0.1.0 - 2013-04-29](#010---2013-04-29) + + + # Changelog +## 0.2.20 - 2017-09-17 + +* Building for Scala 2.12; +* Fix SFL4J deprecation warning - #201 - @golem131; + +## 0.2.19 - 2016-03-17 + +* Always use `NUMERIC` when handling numbers in prepared statements in PostgreSQL; +* SSL support for PostgreSQL - @alexdupre - #85; + +## 0.2.18 - 2015-08-08 + +* Timeouts implemented queries for MySQL and PostgreSQL - @lifey - #147 + +## 0.2.17 - 2015-07-13 + +* Fixed pool leak issue - @haski +* Fixed date time formatting issue - #142 + +## 0.2.16 - 2015-01-04 + +* Add support to byte arrays for PostgreSQL 8 and older - @SattaiLanfear - #21; +* Make sure connections are returned to the pool before the result is returned to the user - @haski - #119; +* Support to `SEND_LONG_DATA` to MySQL - @mst-appear - #115; +* Support for `ByteBuffer` and `ByteBuf` for binary data - @mst-appear - #113 #112; +* Fixed encoding backslashes in PostgreSQL arrays - @dylex - #110; +* Included `escape` encoding method for bytes in PostgreSQL - @SattaiLanfear - #107; + +## 0.2.15 - 2014-09-12 + +* Fixes issue where PostgreSQL decoders fail to produce a NULL value if the null is wrapped by a `Some` instance - #99; +* Fixes issue where the 253 case of length encoded fields on MySQL produce a wrong value; + +## 0.2.14 - 2014-08-30 + +* Remove failed prepared statement from cache - @dboissin - #95 +* Added support to zeroed dates on MySQL - #93 +* Cross compilation to Scala 2.11 is functional - @lpiepiora +* Connect to older MySQL versions where auth protocol isn't provided - #37 +* Eclipse project support - @fwbrasil - #89 +* Make timeouts configurable - @fwbrasil - #90 + +## 0.2.13 - 2014-04-07 + +* Accepts MySQL old and unsafe auth methods - #37 +* Do not name every single logger as they all leak - @njeuk #86 + ## 0.2.12 - 2014-01-11 * Do not check for handshake requests after a real handshake has happened already - MySQL - #80; diff --git a/LICENCE.txt b/LICENSE.txt similarity index 99% rename from LICENCE.txt rename to LICENSE.txt index 61ca0ac4..fc389d02 100644 --- a/LICENCE.txt +++ b/LICENSE.txt @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file + limitations under the License. diff --git a/Procfile b/Procfile index 6c1b0717..13e2e8fd 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ -postgresql: postgres -D /Users/mauricio/databases/postgresql +postgresql: postgres -D vendor/postgresql mysql: mysqld --log-warnings --console \ No newline at end of file diff --git a/README.markdown b/README.markdown index b82dd377..79f4b057 100644 --- a/README.markdown +++ b/README.markdown @@ -1,4 +1,28 @@ -# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala - 2.10 + + + +- This project is not being maintained anymore, feel free to fork and work on it + - [Abstractions and integrations](#abstractions-and-integrations) + - [Include them as dependencies](#include-them-as-dependencies) + - [Database connections and encodings](#database-connections-and-encodings) + - [Prepared statements gotcha](#prepared-statements-gotcha) + - [What are the design goals?](#what-are-the-design-goals) + - [What is missing?](#what-is-missing) + - [How can you help?](#how-can-you-help) + - [Main public interface](#main-public-interface) + - [Connection](#connection) + - [QueryResult](#queryresult) + - [ResultSet](#resultset) + - [Prepared statements](#prepared-statements) + - [Transactions](#transactions) + - [Example usage (for PostgreSQL, but it looks almost the same on MySQL)](#example-usage-for-postgresql-but-it-looks-almost-the-same-on-mysql) + - [LISTEN/NOTIFY support (PostgreSQL only)](#listennotify-support-postgresql-only) + - [Contributing](#contributing) + - [Licence](#licence) + + + +# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) This project is not being maintained anymore, feel free to fork and work on it The main goal for this project is to implement simple, async, performant and reliable database drivers for PostgreSQL and MySQL in Scala. This is not supposed to be a JDBC replacement, these drivers aim to cover the common @@ -21,13 +45,16 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). driver allowing you to write less SQL and make use of a nice high level database access API; * [mod-mysql-postgresql](https://github.com/vert-x/mod-mysql-postgresql) - [vert.x](http://vertx.io/) module that integrates the driver into a vert.x application; +* [dbmapper](https://github.com/njeuk/dbmapper) - enables SQL queries with automatic mapping from the database table to the Scala + class and a mechanism to create a Table Date Gateway model with very little boiler plate code; +* [Quill](http://getquill.io) - A compile-time language integrated query library for Scala. ## Include them as dependencies And if you're in a hurry, you can include them in your build like this, if you're using PostgreSQL: ```scala -"com.github.mauricio" %% "postgresql-async" % "0.2.12" +"com.github.mauricio" %% "postgresql-async" % "0.2.21" ``` Or Maven: @@ -35,15 +62,24 @@ Or Maven: ```xml com.github.mauricio - postgresql-async_2.10 - 0.2.12 + postgresql-async_2.11 + 0.2.21 + +``` + +respectively for Scala 2.12: +```xml + + com.github.mauricio + postgresql-async_2.12 + 0.2.21 ``` And if you're into MySQL: ```scala -"com.github.mauricio" %% "mysql-async" % "0.2.12" +"com.github.mauricio" %% "mysql-async" % "0.2.21" ``` Or Maven: @@ -51,8 +87,16 @@ Or Maven: ```xml com.github.mauricio - mysql-async_2.10 - 0.2.12 + mysql-async_2.11 + 0.2.21 + +``` +respectively for Scala 2.12: +```xml + + com.github.mauricio + mysql-async_2.12 + 0.2.21 ``` @@ -123,7 +167,7 @@ So, prepared statements are awesome, but are not free. Use them judiciously. ### Connection Represents a connection to the database. This is the **root** object you will be using in your application. You will -find three classes that implement this trait, `PostgreSQLConnection`, `MySQLConnection` and `ConnectionPool`. +find three classes that implement this trait, `PostgreSQLConnection`, `MySQLConnection` and `ConnectionPool`. The difference between them is that `ConnectionPool` is, as the name implies, a pool of connections and you need to give it an connection factory so it can create connections and manage them. @@ -243,6 +287,21 @@ disconnect and the connection is closed. You can also use the `ConnectionPool` provided by the driver to simplify working with database connections in your app. Check the blog post above for more details and the project's ScalaDocs. +## LISTEN/NOTIFY support (PostgreSQL only) + +LISTEN/NOTIFY is a PostgreSQL-specific feature for database-wide publish-subscribe scenarios. You can listen to database +notifications as such: + +```scala + val connection: Connection = ... + + connection.sendQuery("LISTEN my_channel") + connection.registerNotifyListener { + message => + println(s"channel: ${message.channel}, payload: ${message.payload}") + } +``` + ## Contributing Contributing to the project is simple, fork it on Github, hack on what you're insterested in seeing done or at the diff --git a/bootstrap.sh b/bootstrap.sh new file mode 100644 index 00000000..451d77cf --- /dev/null +++ b/bootstrap.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +yum -y install mysql-server +service mysqld start +mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO root;" +mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_vagrant' IDENTIFIED BY 'generic_password' WITH GRANT OPTION"; \ No newline at end of file diff --git a/build.sbt b/build.sbt deleted file mode 100644 index 4ba2d92a..00000000 --- a/build.sbt +++ /dev/null @@ -1,3 +0,0 @@ -scalaVersion := "2.10.3" - -parallelExecution in ThisBuild := false \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index 70497e08..cde267cf 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -17,14 +17,16 @@ package com.github.mauricio.async.db import java.nio.charset.Charset -import scala.Predef._ -import scala.{None, Option, Int} + +import io.netty.buffer.{ByteBufAllocator, PooledByteBufAllocator} import io.netty.util.CharsetUtil -import io.netty.buffer.AbstractByteBufAllocator -import io.netty.buffer.PooledByteBufAllocator + +import scala.concurrent.duration._ object Configuration { val DefaultCharset = CharsetUtil.UTF_8 + + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser.DEFAULT or com.github.mauricio.async.db.mysql.util.URLParser.DEFAULT.", since = "0.2.20") val Default = new Configuration("postgres") } @@ -37,12 +39,18 @@ object Configuration { * @param port database port, defaults to 5432 * @param password password, defaults to no password * @param database database name, defaults to no database + * @param ssl ssl configuration * @param charset charset for the connection, defaults to UTF-8, make sure you know what you are doing if you * change this * @param maximumMessageSize the maximum size a message from the server could possibly have, this limits possible * OOM or eternal loop attacks the client could have, defaults to 16 MB. You can set this * to any value you would like but again, make sure you know what you are doing if you do * change it. + * @param allocator the netty buffer allocator to be used + * @param connectTimeout the timeout for connecting to servers + * @param testTimeout the timeout for connection tests performed by pools + * @param queryTimeout the optional query timeout + * */ case class Configuration(username: String, @@ -50,7 +58,10 @@ case class Configuration(username: String, port: Int = 5432, password: Option[String] = None, database: Option[String] = None, + ssl: SSLConfiguration = SSLConfiguration(), charset: Charset = Configuration.DefaultCharset, maximumMessageSize: Int = 16777216, - allocator: AbstractByteBufAllocator = PooledByteBufAllocator.DEFAULT - ) + allocator: ByteBufAllocator = PooledByteBufAllocator.DEFAULT, + connectTimeout: Duration = 5.seconds, + testTimeout: Duration = 5.seconds, + queryTimeout: Option[Duration] = None) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala new file mode 100644 index 00000000..9ae657fe --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala @@ -0,0 +1,31 @@ +package com.github.mauricio.async.db + +import java.io.File + +import SSLConfiguration.Mode + +/** + * + * Contains the SSL configuration necessary to connect to a database. + * + * @param mode whether and with what priority a SSL connection will be negotiated, default disabled + * @param rootCert path to PEM encoded trusted root certificates, None to use internal JDK cacerts, defaults to None + * + */ +case class SSLConfiguration(mode: Mode.Value = Mode.Disable, rootCert: Option[java.io.File] = None) + +object SSLConfiguration { + + object Mode extends Enumeration { + val Disable = Value("disable") // only try a non-SSL connection + val Prefer = Value("prefer") // first try an SSL connection; if that fails, try a non-SSL connection + val Require = Value("require") // only try an SSL connection, but don't verify Certificate Authority + val VerifyCA = Value("verify-ca") // only try an SSL connection, and verify that the server certificate is issued by a trusted certificate authority (CA) + val VerifyFull = Value("verify-full") // only try an SSL connection, verify that the server certificate is issued by a trusted CA and that the server host name matches that in the certificate + } + + def apply(properties: Map[String, String]): SSLConfiguration = SSLConfiguration( + mode = Mode.withName(properties.get("sslmode").getOrElse("disable")), + rootCert = properties.get("sslrootcert").map(new File(_)) + ) +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/DateEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/DateEncoderDecoder.scala index 63c01c6e..53c2f2dd 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/DateEncoderDecoder.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/DateEncoderDecoder.scala @@ -22,11 +22,16 @@ import com.github.mauricio.async.db.exceptions.DateEncoderNotAvailableException object DateEncoderDecoder extends ColumnEncoderDecoder { + private val ZeroedDate = "0000-00-00" + private val formatter = DateTimeFormat.forPattern("yyyy-MM-dd") - override def decode(value: String): LocalDate = { - this.formatter.parseLocalDate(value) - } + override def decode(value: String): LocalDate = + if ( ZeroedDate == value ) { + null + } else { + this.formatter.parseLocalDate(value) + } override def encode(value: Any): String = { value match { diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala new file mode 100644 index 00000000..ecac853d --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala @@ -0,0 +1,36 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.column + +import java.net.InetAddress +import sun.net.util.IPAddressUtil.{textToNumericFormatV4,textToNumericFormatV6} + +object InetAddressEncoderDecoder extends ColumnEncoderDecoder { + + override def decode(value: String): Any = { + if (value contains ':') { + InetAddress.getByAddress(textToNumericFormatV6(value)) + } else { + InetAddress.getByAddress(textToNumericFormatV4(value)) + } + } + + override def encode(value: Any): String = { + value.asInstanceOf[InetAddress].getHostAddress + } + +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/LocalDateTimeEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/LocalDateTimeEncoderDecoder.scala index 27d50383..6cb67ad9 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/LocalDateTimeEncoderDecoder.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/LocalDateTimeEncoderDecoder.scala @@ -21,6 +21,8 @@ import org.joda.time.LocalDateTime object LocalDateTimeEncoderDecoder extends ColumnEncoderDecoder { + private val ZeroedTimestamp = "0000-00-00 00:00:00" + private val optional = new DateTimeFormatterBuilder() .appendPattern(".SSSSSS").toParser @@ -33,5 +35,10 @@ object LocalDateTimeEncoderDecoder extends ColumnEncoderDecoder { format.print(value.asInstanceOf[LocalDateTime]) override def decode(value: String): LocalDateTime = - format.parseLocalDateTime(value) + if (ZeroedTimestamp == value) { + null + } else { + format.parseLocalDateTime(value) + } + } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala index 9a801775..a7d0c879 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala @@ -33,14 +33,16 @@ class TimeEncoderDecoder extends ColumnEncoderDecoder { .appendOptional(optional) .toFormatter + final private val printer = new DateTimeFormatterBuilder() + .appendPattern("HH:mm:ss.SSSSSS") + .toFormatter + def formatter = format - override def decode(value: String): LocalTime = { + override def decode(value: String): LocalTime = format.parseLocalTime(value) - } - override def encode(value: Any): String = { - this.format.print(value.asInstanceOf[LocalTime]) - } + override def encode(value: Any): String = + this.printer.print(value.asInstanceOf[LocalTime]) } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala new file mode 100644 index 00000000..11987835 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala @@ -0,0 +1,25 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.column + +import java.util.UUID + +object UUIDEncoderDecoder extends ColumnEncoderDecoder { + + override def decode(value: String): UUID = UUID.fromString(value) + +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala new file mode 100644 index 00000000..7e02c17c --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala @@ -0,0 +1,6 @@ +package com.github.mauricio.async.db.exceptions + +import com.github.mauricio.async.db.Connection + +class ConnectionTimeoutedException( val connection : Connection ) + extends DatabaseException( "The connection %s has a timeouted query and is being closed".format(connection) ) \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala new file mode 100644 index 00000000..0d2799df --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2016 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.exceptions + +/** + * Thrown to indicate that a URL Parser could not understand the provided URL. + */ +class UnableToParseURLException(message: String, base: Throwable) extends RuntimeException(message, base) { + def this(message: String) = this(message, null) +} \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala index c232a12a..fe582481 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala @@ -17,14 +17,10 @@ package com.github.mauricio.async.db.general import com.github.mauricio.async.db.RowData -import scala.collection.mutable -class ArrayRowData( columnCount : Int, row : Int, val mapping : Map[String, Int] ) - extends RowData +class ArrayRowData(row : Int, val mapping : Map[String, Int], val columns : Array[Any]) extends RowData { - private val columns = new Array[Any](columnCount) - /** * * Returns a column value by it's position in the originating query. @@ -51,16 +47,5 @@ class ArrayRowData( columnCount : Int, row : Int, val mapping : Map[String, Int] */ def rowNumber: Int = row - /** - * - * Sets a value to a column in this collection. - * - * @param i - * @param x - */ - - def update(i: Int, x: Any) = columns(i) = x - def length: Int = columns.length - } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala index 0422a4cf..00cc712b 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala @@ -31,22 +31,18 @@ class MutableResultSet[T <: ColumnData]( private val columnMapping: Map[String, Int] = this.columnTypes.indices.map( index => ( this.columnTypes(index).name, index ) ).toMap - + val columnNames : IndexedSeq[String] = this.columnTypes.map(c => c.name) + val types : IndexedSeq[Int] = this.columnTypes.map(c => c.dataType) + override def length: Int = this.rows.length override def apply(idx: Int): RowData = this.rows(idx) - def addRow( row : Seq[Any] ) { - val realRow = new ArrayRowData( columnTypes.size, this.rows.size, this.columnMapping ) - var x = 0 - while ( x < row.size ) { - realRow(x) = row(x) - x += 1 - } - this.rows += realRow + def addRow(row : Array[Any] ) { + this.rows += new ArrayRowData(this.rows.size, this.columnMapping, row) } -} \ No newline at end of file +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala index 39179737..3e4345a8 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala @@ -16,7 +16,7 @@ package com.github.mauricio.async.db.pool -import scala.concurrent.Future +import scala.concurrent.{ExecutionContext, Future, Promise} /** * @@ -70,11 +70,26 @@ trait AsyncObjectPool[T] { * @return f wrapped with take and giveBack */ - def use[A](f : T => Future[A])(implicit executionContext : scala.concurrent.ExecutionContext) : Future[A] = + def use[A](f: (T) => Future[A])(implicit executionContext: ExecutionContext): Future[A] = take.flatMap { item => - f(item).andThen { case _ => - giveBack(item) + val p = Promise[A]() + try { + f(item).onComplete { r => + giveBack(item).onComplete { _ => + p.complete(r) + } + } + } catch { + // calling f might throw exception. + // in that case the item will be removed from the pool if identified as invalid by the factory. + // the error returned to the user is the original error thrown by f. + case error: Throwable => + giveBack(item).onComplete { _ => + p.failure(error) + } } + + p.future } } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPool.scala new file mode 100644 index 00000000..8561ed54 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPool.scala @@ -0,0 +1,65 @@ +package com.github.mauricio.async.db.pool + +import scala.concurrent.Future +import com.github.mauricio.async.db.util.ExecutorServiceUtils +import scala.concurrent.Promise +import java.util.concurrent.ConcurrentHashMap +import scala.util.Success +import scala.util.Failure + +class PartitionedAsyncObjectPool[T]( + factory: ObjectFactory[T], + configuration: PoolConfiguration, + numberOfPartitions: Int) + extends AsyncObjectPool[T] { + + import ExecutorServiceUtils.CachedExecutionContext + + private val pools = + (0 until numberOfPartitions) + .map(_ -> new SingleThreadedAsyncObjectPool(factory, partitionConfig)) + .toMap + + private val checkouts = new ConcurrentHashMap[T, SingleThreadedAsyncObjectPool[T]] + + def take: Future[T] = { + val pool = currentPool + pool.take.andThen { + case Success(conn) => + checkouts.put(conn, pool) + case Failure(_) => + } + } + + def giveBack(item: T) = + checkouts + .remove(item) + .giveBack(item) + .map(_ => this) + + def close = + Future.sequence(pools.values.map(_.close)).map { + _ => this + } + + def availables: Traversable[T] = pools.values.map(_.availables).flatten + + def inUse: Traversable[T] = pools.values.map(_.inUse).flatten + + def queued: Traversable[Promise[T]] = pools.values.map(_.queued).flatten + + protected def isClosed = + pools.values.forall(_.isClosed) + + private def currentPool = + pools(currentThreadAffinity) + + private def currentThreadAffinity = + (Thread.currentThread.getId % numberOfPartitions).toInt + + private def partitionConfig = + configuration.copy( + maxObjects = configuration.maxObjects / numberOfPartitions, + maxQueueSize = configuration.maxQueueSize / numberOfPartitions + ) +} \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedConnectionPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedConnectionPool.scala new file mode 100644 index 00000000..698534f5 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PartitionedConnectionPool.scala @@ -0,0 +1,33 @@ +package com.github.mauricio.async.db.pool; + +import com.github.mauricio.async.db.util.ExecutorServiceUtils +import com.github.mauricio.async.db.{ QueryResult, Connection } +import scala.concurrent.{ ExecutionContext, Future } + +class PartitionedConnectionPool[T <: Connection]( + factory: ObjectFactory[T], + configuration: PoolConfiguration, + numberOfPartitions: Int, + executionContext: ExecutionContext = ExecutorServiceUtils.CachedExecutionContext) + extends PartitionedAsyncObjectPool[T](factory, configuration, numberOfPartitions) + with Connection { + + def disconnect: Future[Connection] = if (this.isConnected) { + this.close.map(item => this)(executionContext) + } else { + Future.successful(this) + } + + def connect: Future[Connection] = Future.successful(this) + + def isConnected: Boolean = !this.isClosed + + def sendQuery(query: String): Future[QueryResult] = + this.use(_.sendQuery(query))(executionContext) + + def sendPreparedStatement(query: String, values: Seq[Any] = List()): Future[QueryResult] = + this.use(_.sendPreparedStatement(query, values))(executionContext) + + override def inTransaction[A](f: Connection => Future[A])(implicit context: ExecutionContext = executionContext): Future[A] = + this.use(_.inTransaction[A](f)(context))(executionContext) +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala index a245de5c..0ac567f2 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala @@ -25,7 +25,7 @@ object PoolConfiguration { * Defines specific pieces of a pool's behavior. * * @param maxObjects how many objects this pool will hold - * @param maxIdle how long are objects going to be kept as idle (not in use by clients of the pool) + * @param maxIdle number of milliseconds for which the objects are going to be kept as idle (not in use by clients of the pool) * @param maxQueueSize when there are no more objects, the pool can queue up requests to serve later then there * are objects available, this is the maximum number of enqueued requests * @param validationInterval pools will use this value as the timer period to validate idle objects. diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala index 84387cb0..b4f25ae2 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala @@ -16,11 +16,14 @@ package com.github.mauricio.async.db.pool +import java.util.concurrent.RejectedExecutionException + import com.github.mauricio.async.db.util.{Log, Worker} import java.util.concurrent.atomic.AtomicLong -import java.util.{TimerTask, Timer} -import scala.collection.mutable.ArrayBuffer -import scala.concurrent.{Promise, Future} +import java.util.{Timer, TimerTask} + +import scala.collection.mutable.{ArrayBuffer, Queue} +import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success} object SingleThreadedAsyncObjectPool { @@ -49,9 +52,9 @@ class SingleThreadedAsyncObjectPool[T]( import SingleThreadedAsyncObjectPool.{Counter, log} private val mainPool = Worker() - private val poolables = new ArrayBuffer[PoolableHolder[T]](configuration.maxObjects) + private var poolables = List.empty[PoolableHolder[T]] private val checkouts = new ArrayBuffer[T](configuration.maxObjects) - private val waitQueue = new ArrayBuffer[Promise[T]](configuration.maxQueueSize) + private val waitQueue = new Queue[Promise[T]]() private val timer = new Timer("async-object-pool-timer-" + Counter.incrementAndGet(), true) timer.scheduleAtFixedRate(new TimerTask { def run() { @@ -93,15 +96,30 @@ class SingleThreadedAsyncObjectPool[T]( def giveBack(item: T): Future[AsyncObjectPool[T]] = { val promise = Promise[AsyncObjectPool[T]]() this.mainPool.action { - this.checkouts -= item - this.factory.validate(item) match { - case Success(item) => { - this.addBack(item, promise) + // Ensure it came from this pool + val idx = this.checkouts.indexOf(item) + if(idx >= 0) { + this.checkouts.remove(idx) + this.factory.validate(item) match { + case Success(item) => { + this.addBack(item, promise) + } + case Failure(e) => { + this.factory.destroy(item) + promise.failure(e) + } } - case Failure(e) => { - this.checkouts -= item - this.factory.destroy(item) - promise.failure(e) + } else { + // It's already a failure but lets doublecheck why + val isFromOurPool = (item match { + case x: AnyRef => this.poolables.find(holder => x eq holder.item.asInstanceOf[AnyRef]) + case _ => this.poolables.find(holder => item == holder.item) + }).isDefined + + if(isFromOurPool) { + promise.failure(new IllegalStateException("This item has already been returned")) + } else { + promise.failure(new IllegalArgumentException("The returned item did not come from this pool.")) } } } @@ -112,25 +130,28 @@ class SingleThreadedAsyncObjectPool[T]( def isFull: Boolean = this.poolables.isEmpty && this.checkouts.size == configuration.maxObjects def close: Future[AsyncObjectPool[T]] = { - val promise = Promise[AsyncObjectPool[T]]() - - this.mainPool.action { - if (!this.closed) { - try { - this.timer.cancel() - this.mainPool.shutdown - this.closed = true - (this.poolables.map(i => i.item) ++ this.checkouts).foreach(item => factory.destroy(item)) + try { + val promise = Promise[AsyncObjectPool[T]]() + this.mainPool.action { + if (!this.closed) { + try { + this.timer.cancel() + this.mainPool.shutdown + this.closed = true + (this.poolables.map(i => i.item) ++ this.checkouts).foreach(item => factory.destroy(item)) + promise.success(this) + } catch { + case e: Exception => promise.failure(e) + } + } else { promise.success(this) - } catch { - case e: Exception => promise.failure(e) } - } else { - promise.success(this) } + promise.future + } catch { + case e: RejectedExecutionException if this.closed => + Future.successful(this) } - - promise.future } def availables: Traversable[T] = this.poolables.map(item => item.item) @@ -150,10 +171,10 @@ class SingleThreadedAsyncObjectPool[T]( */ private def addBack(item: T, promise: Promise[AsyncObjectPool[T]]) { - this.poolables += new PoolableHolder[T](item) + this.poolables ::= new PoolableHolder[T](item) - if (!this.waitQueue.isEmpty) { - this.checkout(this.waitQueue.remove(0)) + if (this.waitQueue.nonEmpty) { + this.checkout(this.waitQueue.dequeue()) } promise.success(this) @@ -205,7 +226,9 @@ class SingleThreadedAsyncObjectPool[T]( case e: Exception => promise.failure(e) } } else { - val item = this.poolables.remove(0).item + val h :: t = this.poolables + this.poolables = t + val item = h.item this.checkouts += item promise.success(item) } @@ -238,10 +261,11 @@ class SingleThreadedAsyncObjectPool[T]( case Failure(e) => { log.error("Failed to validate object", e) removals += poolable + factory.destroy(poolable.item) } } } - this.poolables --= removals + this.poolables = this.poolables.diff(removals) } private class PoolableHolder[T](val item: T) { diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala new file mode 100644 index 00000000..d97a9ca1 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala @@ -0,0 +1,63 @@ +package com.github.mauricio.async.db.pool + +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.{TimeUnit, TimeoutException, ScheduledFuture} +import io.netty.channel.EventLoopGroup +import scala.concurrent.{ExecutionContext, Promise} +import scala.concurrent.duration.Duration + +trait TimeoutScheduler { + + private var isTimeoutedBool = new AtomicBoolean(false) + + /** + * + * The event loop group to be used for scheduling. + * + * @return + */ + + def eventLoopGroup : EventLoopGroup + + /** + * Implementors should decide here what they want to do when a timeout occur + */ + + def onTimeout // implementors should decide here what they want to do when a timeout occur + + /** + * + * We need this property as isClosed takes time to complete and + * we don't want the connection to be used again. + * + * @return + */ + + def isTimeouted : Boolean = + isTimeoutedBool.get + + def addTimeout[A]( + promise: Promise[A], + durationOption: Option[Duration]) + (implicit executionContext : ExecutionContext) : Option[ScheduledFuture[_]] = { + durationOption.map { + duration => + val scheduledFuture = schedule( + { + if (promise.tryFailure(new TimeoutException(s"Operation is timeouted after it took too long to return (${duration})"))) { + isTimeoutedBool.set(true) + onTimeout + } + }, + duration) + promise.future.onComplete(x => scheduledFuture.cancel(false)) + + scheduledFuture + } + } + + def schedule(block: => Unit, duration: Duration) : ScheduledFuture[_] = + eventLoopGroup.schedule(new Runnable { + override def run(): Unit = block + }, duration.toMillis, TimeUnit.MILLISECONDS) +} diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala new file mode 100644 index 00000000..e18de6e1 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala @@ -0,0 +1,175 @@ +/* + * Copyright 2016 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.util + +import java.net.{URI, URISyntaxException, URLDecoder} +import java.nio.charset.Charset + +import com.github.mauricio.async.db.exceptions.UnableToParseURLException +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import org.slf4j.LoggerFactory + +import scala.util.matching.Regex + +/** + * Common parser assisting methods for PG and MySQL URI parsers. + */ +abstract class AbstractURIParser { + import AbstractURIParser._ + + protected val logger = LoggerFactory.getLogger(getClass) + + /** + * Parses out userInfo into a tuple of optional username and password + * + * @param userInfo the optional user info string + * @return a tuple of optional username and password + */ + final protected def parseUserInfo(userInfo: Option[String]): (Option[String], Option[String]) = userInfo.map(_.split(":", 2).toList) match { + case Some(user :: pass :: Nil) ⇒ (Some(user), Some(pass)) + case Some(user :: Nil) ⇒ (Some(user), None) + case _ ⇒ (None, None) + } + + /** + * A Regex that will match the base name of the driver scheme, minus jdbc:. + * Eg: postgres(?:ul)? + */ + protected val SCHEME: Regex + + /** + * The default for this particular URLParser, ie: appropriate and specific to PG or MySQL accordingly + */ + val DEFAULT: Configuration + + + /** + * Parses the provided url and returns a Configuration based upon it. On an error, + * @param url the URL to parse. + * @param charset the charset to use. + * @return a Configuration. + */ + @throws[UnableToParseURLException]("if the URL does not match the expected type, or cannot be parsed for any reason") + def parseOrDie(url: String, + charset: Charset = DEFAULT.charset): Configuration = { + try { + val properties = parse(new URI(url).parseServerAuthority) + + assembleConfiguration(properties, charset) + } catch { + case e: URISyntaxException => + throw new UnableToParseURLException(s"Failed to parse URL: $url", e) + } + } + + + /** + * Parses the provided url and returns a Configuration based upon it. On an error, + * a default configuration is returned. + * @param url the URL to parse. + * @param charset the charset to use. + * @return a Configuration. + */ + def parse(url: String, + charset: Charset = DEFAULT.charset + ): Configuration = { + try { + parseOrDie(url, charset) + } catch { + case e: Exception => + logger.warn(s"Connection url '$url' could not be parsed.", e) + // Fallback to default to maintain current behavior + DEFAULT + } + } + + /** + * Assembles a configuration out of the provided property map. This is the generic form, subclasses may override to + * handle additional properties. + * @param properties the extracted properties from the URL. + * @param charset the charset passed in to parse or parseOrDie. + * @return + */ + protected def assembleConfiguration(properties: Map[String, String], charset: Charset): Configuration = { + DEFAULT.copy( + username = properties.getOrElse(USERNAME, DEFAULT.username), + password = properties.get(PASSWORD), + database = properties.get(DBNAME), + host = properties.getOrElse(HOST, DEFAULT.host), + port = properties.get(PORT).map(_.toInt).getOrElse(DEFAULT.port), + ssl = SSLConfiguration(properties), + charset = charset + ) + } + + + protected def parse(uri: URI): Map[String, String] = { + uri.getScheme match { + case SCHEME() => + val userInfo = parseUserInfo(Option(uri.getUserInfo)) + + val port = Some(uri.getPort).filter(_ > 0) + val db = Option(uri.getPath).map(_.stripPrefix("/")).filterNot(_.isEmpty) + val host = Option(uri.getHost) + + val builder = Map.newBuilder[String, String] + builder ++= userInfo._1.map(USERNAME -> _) + builder ++= userInfo._2.map(PASSWORD -> _) + builder ++= port.map(PORT -> _.toString) + builder ++= db.map(DBNAME -> _) + builder ++= host.map(HOST -> unwrapIpv6address(_)) + + // Parse query string parameters and just append them, overriding anything previously set + builder ++= (for { + qs <- Option(uri.getQuery).toSeq + parameter <- qs.split('&') + Array(name, value) = parameter.split('=') + if name.nonEmpty && value.nonEmpty + } yield URLDecoder.decode(name, "UTF-8") -> URLDecoder.decode(value, "UTF-8")) + + + builder.result + case "jdbc" => + handleJDBC(uri) + case _ => + throw new UnableToParseURLException("Unrecognized URI scheme") + } + } + + /** + * This method breaks out handling of the jdbc: prefixed uri's, allowing them to be handled differently + * without reimplementing all of parse. + */ + protected def handleJDBC(uri: URI): Map[String, String] = parse(new URI(uri.getSchemeSpecificPart)) + + + final protected def unwrapIpv6address(server: String): String = { + if (server.startsWith("[")) { + server.substring(1, server.length() - 1) + } else server + } + +} + +object AbstractURIParser { + // Constants and value names + val PORT = "port" + val DBNAME = "database" + val HOST = "host" + val USERNAME = "user" + val PASSWORD = "password" +} + diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/ChannelWrapper.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/ChannelWrapper.scala index 229dabff..94bca43e 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/ChannelWrapper.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/ChannelWrapper.scala @@ -43,19 +43,6 @@ class ChannelWrapper( val buffer : ByteBuf ) extends AnyVal { def readUntilEOF( charset: Charset ) = ByteBufferUtils.readUntilEOF(buffer, charset) - def read3BytesInt : Int = { - val first = buffer.readByte() - val second = buffer.readByte() - val third = buffer.readByte() - var i = third << 16 | second << 8 | first - - if ((third & 0x80) == 0x80) { - i |= 0xff000000 - } - - i - } - def readLengthEncodedString( charset : Charset ) : String = { val length = readBinaryLength readFixedString(length.asInstanceOf[Int], charset) @@ -70,7 +57,7 @@ class ChannelWrapper( val buffer : ByteBuf ) extends AnyVal { firstByte match { case MySQL_NULL => -1 case 252 => buffer.readUnsignedShort() - case 253 => read3BytesInt + case 253 => readLongInt case 254 => buffer.readLong() case _ => throw new UnknownLengthException(firstByte) } @@ -78,6 +65,14 @@ class ChannelWrapper( val buffer : ByteBuf ) extends AnyVal { } + def readLongInt : Int = { + val first = buffer.readByte() + val second = buffer.readByte() + val third = buffer.readByte() + + ( first & 0xff ) | (( second & 0xff ) << 8) | ((third & 0xff) << 16) + } + def writeLength( length : Long ) { if (length < 251) { buffer.writeByte( length.asInstanceOf[Byte]) @@ -109,4 +104,12 @@ class ChannelWrapper( val buffer : ByteBuf ) extends AnyVal { ByteBufferUtils.writePacketLength(buffer, sequence ) } + def mysqlReadInt() : Int = { + val first = buffer.readByte() + val last = buffer.readByte() + + (first & 0xff) | ((last & 0xff) << 8) + } + + } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala index 32f736e3..c9e09f1a 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala @@ -20,7 +20,7 @@ import io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory object NettyUtils { - InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory()) + InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE) lazy val DefaultEventLoopGroup = new NioEventLoopGroup(0, DaemonThreadsFactory("db-async-netty")) } \ No newline at end of file diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala new file mode 100644 index 00000000..7c8bfdc4 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala @@ -0,0 +1,229 @@ +package com.github.mauricio.async.db.pool + +import com.github.mauricio.async.db.pool.AbstractAsyncObjectPoolSpec.Widget +import org.mockito.Mockito.reset +import org.specs2.mock.Mockito +import org.specs2.mutable.Specification + +import scala.concurrent.{Await, Future} +import scala.util.Failure + +import scala.reflect.runtime.universe.TypeTag +import scala.util.Try +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.duration._ + +/** + * This spec is designed abstract to allow testing of any implementation of AsyncObjectPool, against the common + * requirements the interface expects. + * + * @tparam T the AsyncObjectPool being tested. + */ +abstract class AbstractAsyncObjectPoolSpec[T <: AsyncObjectPool[Widget]](implicit tag: TypeTag[T]) + extends Specification + with Mockito { + + import AbstractAsyncObjectPoolSpec._ + + protected def pool(factory: ObjectFactory[Widget] = new TestWidgetFactory, conf: PoolConfiguration = PoolConfiguration.Default): T + + // Evaluates to the type of AsyncObjectPool + s"the ${tag.tpe.erasure} variant of AsyncObjectPool" should { + + "successfully retrieve and return a Widget" in { + val p = pool() + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + val thePool = Await.result(p.giveBack(widget), Duration.Inf) + thePool must be(p) + } + + "reject Widgets that did not come from it" in { + val p = pool() + + Await.result(p.giveBack(Widget(null)), Duration.Inf) must throwAn[IllegalArgumentException] + } + + "scale contents" >> { + sequential + + val factory = spy(new TestWidgetFactory) + + val p = pool( + factory = factory, + conf = PoolConfiguration( + maxObjects = 5, + maxIdle = 2, + maxQueueSize = 5, + validationInterval = 2000 + )) + + + + var taken = Seq.empty[Widget] + "can take up to maxObjects" in { + taken = Await.result(Future.sequence(for (i <- 1 to 5) yield p.take), Duration.Inf) + + taken must have size 5 + taken.head must not beNull; + taken(1) must not beNull; + taken(2) must not beNull; + taken(3) must not beNull; + taken(4) must not beNull + } + + "does not attempt to expire taken items" in { + // Wait 3 seconds to ensure idle check has run at least once + there was after(3.seconds).no(factory).destroy(any[Widget]) + } + + reset(factory) // Considered bad form, but necessary as we depend on previous state in these tests + "takes maxObjects back" in { + val returns = Await.result(Future.sequence(for (widget <- taken) yield p.giveBack(widget)), Duration.Inf) + + returns must have size 5 + + returns.head must be(p) + returns(1) must be(p) + returns(2) must be(p) + returns(3) must be(p) + returns(4) must be(p) + } + + "protest returning an item that was already returned" in { + val resultFuture = p.giveBack(taken.head) + + Await.result(resultFuture, Duration.Inf) must throwAn[IllegalStateException] + } + + "destroy down to maxIdle widgets" in { + Thread.sleep(3000) + there were 5.times(factory).destroy(any[Widget]) + } + } + + "queue requests after running out" in { + val p = pool(conf = PoolConfiguration.Default.copy(maxObjects = 2, maxQueueSize = 1)) + + val widgets = Await.result(Future.sequence(for (i <- 1 to 2) yield p.take), Duration.Inf) + + val future = p.take + + // Wait five seconds + Thread.sleep(5000) + + val failedFuture = p.take + + // Cannot be done, would exceed maxObjects + future.isCompleted must beFalse + + Await.result(failedFuture, Duration.Inf) must throwA[PoolExhaustedException] + + Await.result(p.giveBack(widgets.head), Duration.Inf) must be(p) + + Await.result(future, Duration(5, SECONDS)) must be(widgets.head) + } + + "refuse to allow take after being closed" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + Await.result(p.take, Duration.Inf) must throwA[PoolAlreadyTerminatedException] + } + + "allow being closed more than once" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + Await.result(p.close, Duration.Inf) must be(p) + } + + + "destroy a failed widget" in { + val factory = spy(new TestWidgetFactory) + val p = pool(factory = factory) + + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + factory.validate(widget) returns Failure(new RuntimeException("This is a bad widget!")) + + Await.result(p.giveBack(widget), Duration.Inf) must throwA[RuntimeException](message = "This is a bad widget!") + + there was atLeastOne(factory).destroy(widget) + } + + "clean up widgets that die in the pool" in { + val factory = spy(new TestWidgetFactory) + // Deliberately make it impossible to expire (nearly) + val p = pool(factory = factory, conf = PoolConfiguration.Default.copy(maxIdle = Long.MaxValue, validationInterval = 2000)) + + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + Await.result(p.giveBack(widget), Duration.Inf) must be(p) + + there was atLeastOne(factory).validate(widget) + there were no(factory).destroy(widget) + + there was after(3.seconds).atLeastTwo(factory).validate(widget) + + factory.validate(widget) returns Failure(new RuntimeException("Test Exception, Not an Error")) + + there was after(3.seconds).one(factory).destroy(widget) + + Await.ready(p.take, Duration.Inf) + + there was two(factory).create + } + + } + +} + +object AbstractAsyncObjectPoolSpec { + + case class Widget(factory: TestWidgetFactory) + + class TestWidgetFactory extends ObjectFactory[Widget] { + + override def create: Widget = Widget(this) + + override def destroy(item: Widget) = {} + + override def validate(item: Widget): Try[Widget] = Try { + if (item.factory eq this) + item + else + throw new IllegalArgumentException("Not our item") + } + } + +} + + +class SingleThreadedAsyncObjectPoolSpec extends AbstractAsyncObjectPoolSpec[SingleThreadedAsyncObjectPool[Widget]] { + + import AbstractAsyncObjectPoolSpec._ + + override protected def pool(factory: ObjectFactory[Widget], conf: PoolConfiguration) = + new SingleThreadedAsyncObjectPool(factory, conf) + + "SingleThreadedAsyncObjectPool" should { + "successfully record a closed state" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + p.isClosed must beTrue + } + + } + +} diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala new file mode 100644 index 00000000..6935259e --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala @@ -0,0 +1,32 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.pool + +import java.util.concurrent.atomic.AtomicInteger +import com.github.mauricio.async.db.util.{NettyUtils, ExecutorServiceUtils} +import io.netty.channel.EventLoopGroup + +/** + * Implementation of TimeoutScheduler used for testing + */ +class DummyTimeoutScheduler extends TimeoutScheduler { + implicit val internalPool = ExecutorServiceUtils.CachedExecutionContext + private val timeOuts = new AtomicInteger + override def onTimeout = timeOuts.incrementAndGet + def timeoutCount = timeOuts.get() + def eventLoopGroup : EventLoopGroup = NettyUtils.DefaultEventLoopGroup +} diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala new file mode 100644 index 00000000..51d58fb0 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala @@ -0,0 +1,289 @@ +package com.github.mauricio.async.db.pool + +import java.util.concurrent.atomic.AtomicInteger + +import org.specs2.mutable.Specification +import scala.util.Try +import scala.concurrent.Await +import scala.concurrent.duration._ +import scala.concurrent.Future +import org.specs2.mutable.SpecificationWithJUnit +import language.reflectiveCalls +import com.github.mauricio.async.db.util.ExecutorServiceUtils +import scala.concurrent.ExecutionContext +import java.util.concurrent.Executors + +class PartitionedAsyncObjectPoolSpec extends SpecificationWithJUnit { + isolated + sequential + + val config = + PoolConfiguration(100, Long.MaxValue, 100, Int.MaxValue) + private var current = new AtomicInteger + val factory = new ObjectFactory[Int] { + var reject = Set[Int]() + var failCreate = false + + def create = + if (failCreate) + throw new IllegalStateException + else { + current.incrementAndGet() + } + def destroy(item: Int) = {} + def validate(item: Int) = + Try { + if (reject.contains(item)) + throw new IllegalStateException + else item + } + } + + val pool = new PartitionedAsyncObjectPool(factory, config, 2) + def maxObjects = config.maxObjects / 2 + def maxIdle = config.maxIdle / 2 + def maxQueueSize = config.maxQueueSize / 2 + + "pool contents" >> { + + "before exceed maxObjects" >> { + + "take one element" in { + takeAndWait(1) + + pool.inUse.size mustEqual 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "take one element and return it invalid" in { + takeAndWait(1) + factory.reject += 1 + + await(pool.giveBack(1)) must throwA[IllegalStateException] + + pool.inUse.size mustEqual 0 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "take one failed element" in { + factory.failCreate = true + takeAndWait(1) must throwA[IllegalStateException] + + pool.inUse.size mustEqual 0 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "take maxObjects" in { + takeAndWait(maxObjects) + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "take maxObjects - 1 and take one failed" in { + takeAndWait(maxObjects - 1) + + factory.failCreate = true + takeAndWait(1) must throwA[IllegalStateException] + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "take maxObjects and receive one back" in { + takeAndWait(maxObjects) + await(pool.giveBack(1)) + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 1 + } + + "take maxObjects and receive one invalid back" in { + takeAndWait(maxObjects) + factory.reject += 1 + await(pool.giveBack(1)) must throwA[IllegalStateException] + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + } + + "after exceed maxObjects" >> { + + takeAndWait(maxObjects) + + "before exceed maxQueueSize" >> { + + "one take queued" in { + pool.take + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual 1 + pool.availables.size mustEqual 0 + } + + "one take queued and receive one item back" in { + val taking = pool.take + + await(pool.giveBack(1)) + + await(taking) mustEqual 1 + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "one take queued and receive one invalid item back" in { + val taking = pool.take + factory.reject += 1 + await(pool.giveBack(1)) must throwA[IllegalStateException] + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 1 + pool.availables.size mustEqual 0 + } + + "maxQueueSize takes queued" in { + for (_ <- 0 until maxQueueSize) + pool.take + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual maxQueueSize + pool.availables.size mustEqual 0 + } + + "maxQueueSize takes queued and receive one back" in { + val taking = pool.take + for (_ <- 0 until maxQueueSize - 1) + pool.take + + await(pool.giveBack(10)) + + await(taking) mustEqual 10 + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual maxQueueSize - 1 + pool.availables.size mustEqual 0 + } + + "maxQueueSize takes queued and receive one invalid back" in { + for (_ <- 0 until maxQueueSize) + pool.take + + factory.reject += 11 + await(pool.giveBack(11)) must throwA[IllegalStateException] + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual maxQueueSize + pool.availables.size mustEqual 0 + } + } + + "after exceed maxQueueSize" >> { + + for (_ <- 0 until maxQueueSize) + pool.take + + "start to reject takes" in { + await(pool.take) must throwA[PoolExhaustedException] + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual maxQueueSize + pool.availables.size mustEqual 0 + } + + "receive an object back" in { + await(pool.giveBack(1)) + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual maxQueueSize - 1 + pool.availables.size mustEqual 0 + } + + "receive an invalid object back" in { + factory.reject += 1 + await(pool.giveBack(1)) must throwA[IllegalStateException] + + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual maxQueueSize + pool.availables.size mustEqual 0 + } + + "receive maxQueueSize objects back" in { + for (i <- 1 to maxQueueSize) + await(pool.giveBack(i)) + + pool.inUse.size mustEqual maxObjects + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + + "receive maxQueueSize invalid objects back" in { + for (i <- 1 to maxQueueSize) { + factory.reject += i + await(pool.giveBack(i)) must throwA[IllegalStateException] + } + + pool.inUse.size mustEqual maxObjects - maxQueueSize + pool.queued.size mustEqual maxQueueSize + pool.availables.size mustEqual 0 + } + + "receive maxQueueSize + 1 object back" in { + for (i <- 1 to maxQueueSize) + await(pool.giveBack(i)) + + await(pool.giveBack(1)) + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 1 + } + + "receive maxQueueSize + 1 invalid object back" in { + for (i <- 1 to maxQueueSize) + await(pool.giveBack(i)) + + factory.reject += 1 + await(pool.giveBack(1)) must throwA[IllegalStateException] + pool.inUse.size mustEqual maxObjects - 1 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 0 + } + } + } + } + + "gives back the connection to the original pool" in { + val executor = Executors.newFixedThreadPool(20) + implicit val context = ExecutionContext.fromExecutor(executor) + + val takes = + for (_ <- 0 until 30) yield { + Future().flatMap(_ => pool.take) + } + val takesAndReturns = + Future.sequence(takes).flatMap { items => + Future.sequence(items.map(pool.giveBack)) + } + + await(takesAndReturns) + + executor.shutdown + pool.inUse.size mustEqual 0 + pool.queued.size mustEqual 0 + pool.availables.size mustEqual 30 + } + + private def takeAndWait(objects: Int) = + for (_ <- 0 until objects) + await(pool.take) + + private def await[T](future: Future[T]) = + Await.result(future, Duration.Inf) +} diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala new file mode 100644 index 00000000..0c6d85b4 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala @@ -0,0 +1,70 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.pool + +import java.util.concurrent.{ScheduledFuture, TimeoutException} +import com.github.mauricio.async.db.util.{ByteBufferUtils, ExecutorServiceUtils} +import org.specs2.mutable.SpecificationWithJUnit +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.duration._ +import scala.concurrent.{Future, Promise} + +/** + * Tests for TimeoutScheduler + */ +class TimeoutSchedulerSpec extends SpecificationWithJUnit { + + val TIMEOUT_DID_NOT_PASS = "timeout did not pass" + + "test timeout did not pass" in { + val timeoutScheduler = new DummyTimeoutScheduler() + val promise = Promise[String]() + val scheduledFuture = timeoutScheduler.addTimeout(promise,Some(Duration(1000, MILLISECONDS))) + Thread.sleep(100); + promise.isCompleted === false + promise.success(TIMEOUT_DID_NOT_PASS) + Thread.sleep(1500) + promise.future.value.get.get === TIMEOUT_DID_NOT_PASS + scheduledFuture.get.isCancelled === true + timeoutScheduler.timeoutCount === 0 + } + + "test timeout passed" in { + val timeoutMillis = 100 + val promise = Promise[String]() + val timeoutScheduler = new DummyTimeoutScheduler() + val scheduledFuture = timeoutScheduler.addTimeout(promise,Some(Duration(timeoutMillis, MILLISECONDS))) + Thread.sleep(1000) + promise.isCompleted === true + scheduledFuture.get.isCancelled === false + promise.trySuccess(TIMEOUT_DID_NOT_PASS) + timeoutScheduler.timeoutCount === 1 + promise.future.value.get.get must throwA[TimeoutException](message = s"Operation is timeouted after it took too long to return \\(${timeoutMillis} milliseconds\\)") + } + + "test no timeout" in { + val timeoutScheduler = new DummyTimeoutScheduler() + val promise = Promise[String]() + val scheduledFuture = timeoutScheduler.addTimeout(promise,None) + Thread.sleep(1000) + scheduledFuture === None + promise.isCompleted === false + promise.success(TIMEOUT_DID_NOT_PASS) + promise.future.value.get.get === TIMEOUT_DID_NOT_PASS + timeoutScheduler.timeoutCount === 0 + } +} + diff --git a/mysql-async/README.md b/mysql-async/README.md index c6bcea16..3a152286 100644 --- a/mysql-async/README.md +++ b/mysql-async/README.md @@ -1,7 +1,17 @@ -# mysql-async - an async, Netty based, MySQL driver written in Scala 2.10 + + +**Table of Contents** -This is the MySQL part of the async driver collection. As the PostgreSQL version, it is not supposed to be a JDBC -replacement, but a simpler solution for those that need something that queries and then returns rows. +- [mysql-async - an async, Netty based, MySQL driver written in Scala 2.10 and 2.11](#mysql-async---an-async-netty-based-mysql-driver-written-in-scala-210) + - [What can it do now?](#what-can-it-do-now) + - [Gotchas](#gotchas) + - [Supported types](#supported-types) + + + +# mysql-async - an async, Netty based, MySQL driver written in Scala 2.10 and 2.11 + +This is the MySQL part of the async driver collection. As the PostgreSQL version, it is not supposed to be a JDBC replacement, but a simpler solution for those that need something that queries and then returns rows. You can find more information about the MySQL network protocol [here](http://dev.mysql.com/doc/internals/en/client-server-protocol.html). @@ -29,6 +39,7 @@ You can find more information about the MySQL network protocol [here](http://dev sense, that is how it was implemented at the database and as a driver we need to stay true to it, so, while you **can** send `java.sql.Time` and `LocalTime` objects to the database, when reading these values you will always receive a `scala.concurrent.Duration` object since it is the closest thing we have to what a `time` value in MySQL means. +* MySQL can store dates with values like `0000-00-00` or `0000-00-00 00:00:00` but it's not possible to represent dates like this in Java (nor there would actually be a date with a zero day or month, this is just MySQL being lenient on invalid dates) so the driver just returns `null` for any case like that. ## Supported types @@ -78,6 +89,10 @@ java.sql.Timestamp | timestamp java.sql.Time | time String | string Array[Byte] | blob +java.nio.ByteBuffer | blob +io.netty.buffer.ByteBuf | blob + +The maximum size of a blob is 2^24-9 bytes (almost 16 MiB). You don't have to match exact values when sending parameters for your prepared statements, MySQL is usually smart enough to understand that if you have sent an Int to `smallint` column it has to truncate the 4 bytes into 2. diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index a48e8739..cb4a85b0 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -16,21 +16,22 @@ package com.github.mauricio.async.db.mysql +import java.util.concurrent.atomic.{AtomicLong, AtomicReference} + import com.github.mauricio.async.db._ import com.github.mauricio.async.db.exceptions._ -import com.github.mauricio.async.db.mysql.codec.{MySQLHandlerDelegate, MySQLConnectionHandler} +import com.github.mauricio.async.db.mysql.codec.{MySQLConnectionHandler, MySQLHandlerDelegate} import com.github.mauricio.async.db.mysql.exceptions.MySQLException import com.github.mauricio.async.db.mysql.message.client._ import com.github.mauricio.async.db.mysql.message.server._ import com.github.mauricio.async.db.mysql.util.CharsetMapper +import com.github.mauricio.async.db.pool.TimeoutScheduler import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ -import java.util.concurrent.atomic.{AtomicLong,AtomicReference} -import scala.concurrent.{ExecutionContext, Promise, Future} -import io.netty.channel.{EventLoopGroup, ChannelHandlerContext} -import scala.util.Failure -import scala.Some -import scala.util.Success +import io.netty.channel.{ChannelHandlerContext, EventLoopGroup} + +import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.util.{Failure, Success} object MySQLConnection { final val Counter = new AtomicLong() @@ -42,10 +43,11 @@ class MySQLConnection( configuration: Configuration, charsetMapper: CharsetMapper = CharsetMapper.Instance, group : EventLoopGroup = NettyUtils.DefaultEventLoopGroup, - executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext + implicit val executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext ) extends MySQLHandlerDelegate with Connection + with TimeoutScheduler { import MySQLConnection.log @@ -53,10 +55,8 @@ class MySQLConnection( // validate that this charset is supported charsetMapper.toInt(configuration.charset) - private final val connectionCount = MySQLConnection.Counter.incrementAndGet() private final val connectionId = s"[mysql-connection-$connectionCount]" - private implicit val internalPool = executionContext private final val connectionHandler = new MySQLConnectionHandler( configuration, @@ -78,6 +78,8 @@ class MySQLConnection( def lastException : Throwable = this._lastException def count : Long = this.connectionCount + override def eventLoopGroup : EventLoopGroup = group + def connect: Future[Connection] = { this.connectionHandler.connect.onFailure { case e => this.connectionPromise.tryFailure(e) @@ -185,18 +187,17 @@ class MySQLConnection( def sendQuery(query: String): Future[QueryResult] = { this.validateIsReadyForQuery() - val promise = Promise[QueryResult] + val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.write(new QueryMessage(query)) + addTimeout(promise, configuration.queryTimeout) promise.future } private def failQueryPromise(t: Throwable) { - this.clearQueryPromise.foreach { _.tryFailure(t) } - } private def succeedQueryPromise(queryResult: QueryResult) { @@ -225,6 +226,7 @@ class MySQLConnection( } def disconnect: Future[Connection] = this.close + override def onTimeout = disconnect def isConnected: Boolean = this.connectionHandler.isConnected @@ -234,9 +236,10 @@ class MySQLConnection( if ( values.length != totalParameters ) { throw new InsufficientParametersException(totalParameters, values) } - val promise = Promise[QueryResult] + val promise = Promise[QueryResult]() this.setQueryPromise(promise) - this.connectionHandler.write(new PreparedStatementMessage(query, values)) + this.connectionHandler.sendPreparedStatement(query, values) + addTimeout(promise,configuration.queryTimeout) promise.future } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLQueryResult.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLQueryResult.scala index 7b9cfe57..e7619685 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLQueryResult.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLQueryResult.scala @@ -19,9 +19,9 @@ package com.github.mauricio.async.db.mysql import com.github.mauricio.async.db.{ResultSet, QueryResult} class MySQLQueryResult( - rowsAffected: Long, - message: String, - lastInsertId: Long, - statusFlags: Int, - warnings: Int, - rows: Option[ResultSet] = None) extends QueryResult(rowsAffected, message, rows) \ No newline at end of file + rowsAffected: Long, + message: String, + val lastInsertId: Long, + val statusFlags: Int, + val warnings: Int, + rows: Option[ResultSet] = None) extends QueryResult(rowsAffected, message, rows) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala index 0f59ca5e..22c6cee5 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala @@ -31,7 +31,7 @@ class BinaryRowDecoder { //import BinaryRowDecoder._ - def decode(buffer: ByteBuf, columns: Seq[ColumnDefinitionMessage]): IndexedSeq[Any] = { + def decode(buffer: ByteBuf, columns: Seq[ColumnDefinitionMessage]): Array[Any] = { //log.debug("columns are {} - {}", buffer.readableBytes(), columns) //log.debug( "decoding row\n{}", MySQLHelper.dumpAsHex(buffer)) @@ -79,7 +79,7 @@ class BinaryRowDecoder { throw new BufferNotFullyConsumedException(buffer) } - row + row.toArray } } \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala index c904259e..aff0b36f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala @@ -16,14 +16,13 @@ package com.github.mauricio.async.db.mysql.binary -import io.netty.buffer.{Unpooled, ByteBuf} +import java.nio.ByteBuffer import java.nio.charset.Charset + import com.github.mauricio.async.db.mysql.binary.encoder._ import com.github.mauricio.async.db.util._ +import io.netty.buffer.ByteBuf import org.joda.time._ -import scala.Some -import com.github.mauricio.async.db.mysql.column.ColumnTypes -import java.nio.ByteOrder object BinaryRowEncoder { final val log = Log.get[BinaryRowEncoder] @@ -31,8 +30,6 @@ object BinaryRowEncoder { class BinaryRowEncoder( charset : Charset ) { - import BinaryRowEncoder.log - private final val stringEncoder = new StringEncoder(charset) private final val encoders = Map[Class[_],BinaryEncoder]( classOf[String] -> this.stringEncoder, @@ -65,48 +62,7 @@ class BinaryRowEncoder( charset : Charset ) { classOf[java.lang.Boolean] -> BooleanEncoder ) - def encode( values : Seq[Any] ) : ByteBuf = { - - val nullBitsCount = (values.size + 7) / 8 - val nullBits = new Array[Byte](nullBitsCount) - val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) - val parameterTypesBuffer = ByteBufferUtils.mysqlBuffer(values.size * 2) - val parameterValuesBuffer = ByteBufferUtils.mysqlBuffer() - - - var index = 0 - - while ( index < values.length ) { - val value = values(index) - if ( value == null || value == None ) { - nullBits(index / 8) = (nullBits(index / 8) | (1 << (index & 7))).asInstanceOf[Byte] - parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) - } else { - value match { - case Some(v) => encode(parameterTypesBuffer, parameterValuesBuffer, v) - case _ => encode(parameterTypesBuffer, parameterValuesBuffer, value) - } - } - index += 1 - } - - bitMapBuffer.writeBytes(nullBits) - if ( values.size > 0 ) { - bitMapBuffer.writeByte(1) - } else { - bitMapBuffer.writeByte(0) - } - - Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) - } - - private def encode(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { - val encoder = encoderFor(value) - parameterTypesBuffer.writeShort(encoder.encodesTo) - encoder.encode(value, parameterValuesBuffer) - } - - private def encoderFor( v : Any ) : BinaryEncoder = { + def encoderFor( v : Any ) : BinaryEncoder = { this.encoders.get(v.getClass) match { case Some(encoder) => encoder @@ -128,6 +84,8 @@ class BinaryRowEncoder( charset : Charset ) { case v : java.sql.Time => SQLTimeEncoder case v : scala.concurrent.duration.Duration => DurationEncoder case v : java.util.Date => JavaDateEncoder + case v : ByteBuffer => ByteBufferEncoder + case v : ByteBuf => ByteBufEncoder } } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/DateDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/DateDecoder.scala index 681bfbe0..2d66e792 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/DateDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/DateDecoder.scala @@ -20,5 +20,13 @@ import io.netty.buffer.ByteBuf import org.joda.time.LocalDate object DateDecoder extends BinaryDecoder { - override def decode(buffer: ByteBuf): LocalDate = TimestampDecoder.decode(buffer).toLocalDate + override def decode(buffer: ByteBuf): LocalDate = { + val result = TimestampDecoder.decode(buffer) + + if ( result != null ) { + result.toLocalDate + } else { + null + } + } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/TimestampDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/TimestampDecoder.scala index b6fc5a00..b7476a7a 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/TimestampDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/decoder/TimestampDecoder.scala @@ -24,9 +24,7 @@ object TimestampDecoder extends BinaryDecoder { val size = buffer.readUnsignedByte() size match { - case 0 => LocalDateTime.now() - .withDate(0, 0, 0) - .withTime(0, 0, 0, 0) + case 0 => null case 4 => new LocalDateTime() .withDate(buffer.readUnsignedShort(), buffer.readUnsignedByte(), buffer.readUnsignedByte()) .withTime(0, 0, 0, 0) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala new file mode 100644 index 00000000..62b62560 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala @@ -0,0 +1,17 @@ +package com.github.mauricio.async.db.mysql.binary.encoder + +import com.github.mauricio.async.db.mysql.column.ColumnTypes +import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper +import io.netty.buffer.ByteBuf + +object ByteBufEncoder extends BinaryEncoder { + def encode(value: Any, buffer: ByteBuf) { + val bytes = value.asInstanceOf[ByteBuf] + + buffer.writeLength(bytes.readableBytes()) + buffer.writeBytes(bytes) + } + + def encodesTo: Int = ColumnTypes.FIELD_TYPE_BLOB + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala new file mode 100644 index 00000000..329709ad --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala @@ -0,0 +1,19 @@ +package com.github.mauricio.async.db.mysql.binary.encoder + +import java.nio.ByteBuffer + +import com.github.mauricio.async.db.mysql.column.ColumnTypes +import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper +import io.netty.buffer.ByteBuf + +object ByteBufferEncoder extends BinaryEncoder { + def encode(value: Any, buffer: ByteBuf) { + val bytes = value.asInstanceOf[ByteBuffer] + + buffer.writeLength(bytes.remaining()) + buffer.writeBytes(bytes) + } + + def encodesTo: Int = ColumnTypes.FIELD_TYPE_BLOB + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala index 40b51f24..0fdc790a 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala @@ -66,6 +66,8 @@ class LittleEndianByteBufAllocator extends ByteBufAllocator { def compositeDirectBuffer(maxNumComponents: Int): CompositeByteBuf = allocator.compositeDirectBuffer(maxNumComponents) + def calculateNewCapacity(minNewCapacity: Int, maxCapacity: Int): Int = allocator.calculateNewCapacity(minNewCapacity, maxCapacity) + private def littleEndian(b: ByteBuf) = b.order(ByteOrder.LITTLE_ENDIAN) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index d70b3623..792aff77 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -16,7 +16,12 @@ package com.github.mauricio.async.db.mysql.codec +import java.net.InetSocketAddress +import java.nio.ByteBuffer +import java.util.concurrent.TimeUnit + import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.exceptions.DatabaseException import com.github.mauricio.async.db.general.MutableResultSet import com.github.mauricio.async.db.mysql.binary.BinaryRowDecoder import com.github.mauricio.async.db.mysql.message.client._ @@ -25,16 +30,15 @@ import com.github.mauricio.async.db.mysql.util.CharsetMapper import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ import io.netty.bootstrap.Bootstrap -import io.netty.buffer.ByteBufAllocator +import io.netty.buffer.{ByteBuf, ByteBufAllocator, Unpooled} import io.netty.channel._ import io.netty.channel.socket.nio.NioSocketChannel import io.netty.handler.codec.CodecException -import java.net.InetSocketAddress -import scala.Some + import scala.annotation.switch import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.concurrent._ -import com.github.mauricio.async.db.exceptions.DatabaseException +import scala.concurrent.duration.Duration class MySQLConnectionHandler( configuration: Configuration, @@ -52,13 +56,14 @@ class MySQLConnectionHandler( private final val connectionPromise = Promise[MySQLConnectionHandler] private final val decoder = new MySQLFrameDecoder(configuration.charset, connectionId) private final val encoder = new MySQLOneToOneEncoder(configuration.charset, charsetMapper) + private final val sendLongDataEncoder = new SendLongDataEncoder() private final val currentParameters = new ArrayBuffer[ColumnDefinitionMessage]() private final val currentColumns = new ArrayBuffer[ColumnDefinitionMessage]() private final val parsedStatements = new HashMap[String,PreparedStatementHolder]() private final val binaryRowDecoder = new BinaryRowDecoder() private var currentPreparedStatementHolder : PreparedStatementHolder = null - private var currentPreparedStatement : PreparedStatementMessage = null + private var currentPreparedStatement : PreparedStatement = null private var currentQuery : MutableResultSet[ColumnDefinitionMessage] = null private var currentContext: ChannelHandlerContext = null @@ -70,6 +75,7 @@ class MySQLConnectionHandler( channel.pipeline.addLast( decoder, encoder, + sendLongDataEncoder, MySQLConnectionHandler.this) } @@ -185,25 +191,27 @@ class MySQLConnectionHandler( writeAndHandleError(message) } - def write( message : PreparedStatementMessage ) { + def sendPreparedStatement( query: String, values: Seq[Any] ): Future[ChannelFuture] = { + val preparedStatement = new PreparedStatement(query, values) this.currentColumns.clear() this.currentParameters.clear() - this.currentPreparedStatement = message + this.currentPreparedStatement = preparedStatement - this.parsedStatements.get(message.statement) match { + this.parsedStatements.get(preparedStatement.statement) match { case Some( item ) => { - this.executePreparedStatement(item.statementId, item.columns.size, message.values, item.parameters) + this.executePreparedStatement(item.statementId, item.columns.size, preparedStatement.values, item.parameters) } case None => { decoder.preparedStatementPrepareStarted() - writeAndHandleError( new PreparedStatementPrepareMessage(message.statement) ) + writeAndHandleError( new PreparedStatementPrepareMessage(preparedStatement.statement) ) } } } def write( message : HandshakeResponseMessage ) : ChannelFuture = { + decoder.hasDoneHandshake = true writeAndHandleError(message) } @@ -229,11 +237,60 @@ class MySQLConnectionHandler( } } - private def executePreparedStatement( statementId : Array[Byte], columnsCount : Int, values : Seq[Any], parameters : Seq[ColumnDefinitionMessage] ) { + private def executePreparedStatement( statementId : Array[Byte], columnsCount : Int, values : Seq[Any], parameters : Seq[ColumnDefinitionMessage] ): Future[ChannelFuture] = { decoder.preparedStatementExecuteStarted(columnsCount, parameters.size) this.currentColumns.clear() this.currentParameters.clear() - writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, parameters )) + + val (nonLongIndicesOpt, longValuesOpt) = values.zipWithIndex.map { + case (Some(value), index) if isLong(value) => (None, Some(index, value)) + case (value, index) if isLong(value) => (None, Some(index, value)) + case (_, index) => (Some(index), None) + }.unzip + val nonLongIndices: Seq[Int] = nonLongIndicesOpt.flatten + val longValues: Seq[(Int, Any)] = longValuesOpt.flatten + + if (longValues.nonEmpty) { + val (firstIndex, firstValue) = longValues.head + var channelFuture: Future[ChannelFuture] = sendLongParameter(statementId, firstIndex, firstValue) + longValues.tail foreach { case (index, value) => + channelFuture = channelFuture.flatMap { _ => + sendLongParameter(statementId, index, value) + } + } + channelFuture flatMap { _ => + writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices.toSet, parameters)) + } + } else { + writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices.toSet, parameters)) + } + } + + private def isLong(value: Any): Boolean = { + value match { + case v : Array[Byte] => v.length > SendLongDataEncoder.LONG_THRESHOLD + case v : ByteBuffer => v.remaining() > SendLongDataEncoder.LONG_THRESHOLD + case v : ByteBuf => v.readableBytes() > SendLongDataEncoder.LONG_THRESHOLD + + case _ => false + } + } + + private def sendLongParameter(statementId: Array[Byte], index: Int, longValue: Any): Future[ChannelFuture] = { + longValue match { + case v : Array[Byte] => + sendBuffer(Unpooled.wrappedBuffer(v), statementId, index) + + case v : ByteBuffer => + sendBuffer(Unpooled.wrappedBuffer(v), statementId, index) + + case v : ByteBuf => + sendBuffer(v, statementId, index) + } + } + + private def sendBuffer(buffer: ByteBuf, statementId: Array[Byte], paramId: Int): ChannelFuture = { + writeAndHandleError(new SendLongDataMessage(statementId, buffer, paramId)) } private def onPreparedStatementPrepareResponse( message : PreparedStatementPrepareResponse ) { @@ -264,17 +321,18 @@ class MySQLConnectionHandler( } private def writeAndHandleError( message : Any ) : ChannelFuture = { - if ( this.currentContext.channel().isActive ) { - val future = this.currentContext.writeAndFlush(message) + val res = this.currentContext.writeAndFlush(message) - future.onFailure { + res.onFailure { case e : Throwable => handleException(e) } - future + res } else { - throw new DatabaseException("This channel is not active and can't take messages") + val error = new DatabaseException("This channel is not active and can't take messages") + handleException(error) + this.currentContext.channel().newFailedFuture(error) } } @@ -296,4 +354,10 @@ class MySQLConnectionHandler( } } + def schedule(block: => Unit, duration: Duration): Unit = { + this.currentContext.channel().eventLoop().schedule(new Runnable { + override def run(): Unit = block + }, duration.toMillis, TimeUnit.MILLISECONDS) + } + } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoder.scala index 3d92929f..bd55f4fd 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoder.scala @@ -30,7 +30,7 @@ import java.nio.charset.Charset import java.util.concurrent.atomic.AtomicInteger -class MySQLFrameDecoder(charset: Charset, connectionId : String) extends ByteToMessageDecoder { +class MySQLFrameDecoder(charset: Charset, connectionId: String) extends ByteToMessageDecoder { private final val log = Log.getByName(s"[frame-decoder]${connectionId}") private final val messagesCount = new AtomicInteger() @@ -48,6 +48,7 @@ class MySQLFrameDecoder(charset: Charset, connectionId : String) extends ByteToM private[codec] var isPreparedStatementPrepare = false private[codec] var isPreparedStatementExecute = false private[codec] var isPreparedStatementExecuteRows = false + private[codec] var hasDoneHandshake = false private[codec] var totalParams = 0L private[codec] var processedParams = 0L @@ -77,121 +78,133 @@ class MySQLFrameDecoder(charset: Charset, connectionId : String) extends ByteToM val slice = buffer.readSlice(size) - if ( log.isTraceEnabled ) { + if (log.isTraceEnabled) { log.trace(s"Reading message type $messageType - " + - s"(count=$messagesCount,size=$size,isInQuery=$isInQuery,processingColumns=$processingColumns,processingParams=$processingParams,processedColumns=$processedColumns,processedParams=$processedParams)" + + s"(count=$messagesCount,hasDoneHandshake=$hasDoneHandshake,size=$size,isInQuery=$isInQuery,processingColumns=$processingColumns,processingParams=$processingParams,processedColumns=$processedColumns,processedParams=$processedParams)" + s"\n${BufferDumper.dumpAsHex(slice)}}") } slice.readByte() - val decoder = messageType match { - case ServerMessage.ServerProtocolVersion if !isInQuery => this.handshakeDecoder - case ServerMessage.Error => { - this.clear - this.errorDecoder - } - case ServerMessage.EOF => { - - if (this.processingParams && this.totalParams > 0) { - this.processingParams = false - if (this.totalColumns == 0) { - ParamAndColumnProcessingFinishedDecoder - } else { - ParamProcessingFinishedDecoder - } - } else { - if (this.processingColumns) { - this.processingColumns = false - ColumnProcessingFinishedDecoder - } else { - - if ( this.isInQuery ) { - this.clear - EOFMessageDecoder - } else { - this.authenticationSwitchDecoder - } - - } - } - - } - case ServerMessage.Ok => { - if (this.isPreparedStatementPrepare) { - this.preparedStatementPrepareDecoder - } else { - if (this.isPreparedStatementExecuteRows) { - null - } else { - this.clear - this.okDecoder - } + if (this.hasDoneHandshake) { + this.handleCommonFlow(messageType, slice, out) + } else { + val decoder = messageType match { + case ServerMessage.Error => { + this.clear + this.errorDecoder } + case _ => this.handshakeDecoder } - case _ => { + this.doDecoding(decoder, slice, out) + } + } else { + buffer.resetReaderIndex() + } - if (this.isInQuery) { - null - } else { - throw new ParserNotAvailableException(messageType) - } + } + } + private def handleCommonFlow(messageType: Byte, slice: ByteBuf, out: java.util.List[Object]) { + val decoder = messageType match { + case ServerMessage.Error => { + this.clear + this.errorDecoder + } + case ServerMessage.EOF => { + + if (this.processingParams && this.totalParams > 0) { + this.processingParams = false + if (this.totalColumns == 0) { + ParamAndColumnProcessingFinishedDecoder + } else { + ParamProcessingFinishedDecoder + } + } else { + if (this.processingColumns) { + this.processingColumns = false + ColumnProcessingFinishedDecoder + } else { + this.clear + EOFMessageDecoder } } - if (decoder == null) { - slice.readerIndex(slice.readerIndex() - 1) - val result = decodeQueryResult(slice) - - if (slice.readableBytes() != 0) { - throw new BufferNotFullyConsumedException(slice) - } - if (result != null) { - out.add(result) + } + case ServerMessage.Ok => { + if (this.isPreparedStatementPrepare) { + this.preparedStatementPrepareDecoder + } else { + if (this.isPreparedStatementExecuteRows) { + null + } else { + this.clear + this.okDecoder } + } + } + case _ => { + + if (this.isInQuery) { + null } else { - val result = decoder.decode(slice) + throw new ParserNotAvailableException(messageType) + } - result match { - case m: PreparedStatementPrepareResponse => { - this.hasReadColumnsCount = true - this.totalColumns = m.columnsCount - this.totalParams = m.paramsCount - } - case m: ParamAndColumnProcessingFinishedMessage => { - this.clear - } - case m: ColumnProcessingFinishedMessage if this.isPreparedStatementPrepare => { - this.clear - } - case m: ColumnProcessingFinishedMessage if this.isPreparedStatementExecute => { - this.isPreparedStatementExecuteRows = true - } - case _ => - } + } + } - if (slice.readableBytes() != 0) { - throw new BufferNotFullyConsumedException(slice) - } + doDecoding(decoder, slice, out) + } + + private def doDecoding(decoder: MessageDecoder, slice: ByteBuf, out: java.util.List[Object]) { + if (decoder == null) { + slice.readerIndex(slice.readerIndex() - 1) + val result = decodeQueryResult(slice) + + if (slice.readableBytes() != 0) { + throw new BufferNotFullyConsumedException(slice) + } + if (result != null) { + out.add(result) + } + } else { + val result = decoder.decode(slice) + + result match { + case m: PreparedStatementPrepareResponse => { + this.hasReadColumnsCount = true + this.totalColumns = m.columnsCount + this.totalParams = m.paramsCount + } + case m: ParamAndColumnProcessingFinishedMessage => { + this.clear + } + case m: ColumnProcessingFinishedMessage if this.isPreparedStatementPrepare => { + this.clear + } + case m: ColumnProcessingFinishedMessage if this.isPreparedStatementExecute => { + this.isPreparedStatementExecuteRows = true + } + case _ => + } - if (result != null) { - result match { - case m : PreparedStatementPrepareResponse => { - out.add(result) - if ( m.columnsCount == 0 && m.paramsCount == 0 ) { - this.clear - out.add(new ParamAndColumnProcessingFinishedMessage(new EOFMessage(0, 0)) ) - } - } - case _ => out.add(result) + if (slice.readableBytes() != 0) { + throw new BufferNotFullyConsumedException(slice) + } + + if (result != null) { + result match { + case m: PreparedStatementPrepareResponse => { + out.add(result) + if (m.columnsCount == 0 && m.paramsCount == 0) { + this.clear + out.add(new ParamAndColumnProcessingFinishedMessage(new EOFMessage(0, 0))) } } + case _ => out.add(result) } - } else { - buffer.resetReaderIndex() } - } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala index 074a8b6a..f666cbc8 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala @@ -32,7 +32,8 @@ object MySQLOneToOneEncoder { val log = Log.get[MySQLOneToOneEncoder] } -class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) extends MessageToMessageEncoder[Any] { +class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) + extends MessageToMessageEncoder[ClientMessage](classOf[ClientMessage]) { import MySQLOneToOneEncoder.log @@ -45,49 +46,43 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten private var sequence = 1 - def encode(ctx: ChannelHandlerContext, msg: Any, out: java.util.List[Object]): Unit = { - - msg match { - case message: ClientMessage => { - val encoder = (message.kind: @switch) match { - case ClientMessage.ClientProtocolVersion => this.handshakeResponseEncoder - case ClientMessage.Quit => { - sequence = 0 - QuitMessageEncoder - } - case ClientMessage.Query => { - sequence = 0 - this.queryEncoder - } - case ClientMessage.PreparedStatementExecute => { - sequence = 0 - this.executeEncoder - } - case ClientMessage.PreparedStatementPrepare => { - sequence = 0 - this.prepareEncoder - } - case ClientMessage.AuthSwitchResponse => { - sequence += 1 - this.authenticationSwitchEncoder - } - case _ => throw new EncoderNotAvailableException(message) - } - - val result = encoder.encode(message) + def encode(ctx: ChannelHandlerContext, message: ClientMessage, out: java.util.List[Object]): Unit = { + val encoder = (message.kind: @switch) match { + case ClientMessage.ClientProtocolVersion => this.handshakeResponseEncoder + case ClientMessage.Quit => { + sequence = 0 + QuitMessageEncoder + } + case ClientMessage.Query => { + sequence = 0 + this.queryEncoder + } + case ClientMessage.PreparedStatementExecute => { + sequence = 0 + this.executeEncoder + } + case ClientMessage.PreparedStatementPrepare => { + sequence = 0 + this.prepareEncoder + } + case ClientMessage.AuthSwitchResponse => { + sequence += 1 + this.authenticationSwitchEncoder + } + case _ => throw new EncoderNotAvailableException(message) + } - ByteBufferUtils.writePacketLength(result, sequence) + val result: ByteBuf = encoder.encode(message) - sequence += 1 + ByteBufferUtils.writePacketLength(result, sequence) - if ( log.isTraceEnabled ) { - log.trace(s"Writing message ${message.getClass.getName} - \n${BufferDumper.dumpAsHex(result)}") - } + sequence += 1 - out.add(result) - } + if ( log.isTraceEnabled ) { + log.trace(s"Writing message ${message.getClass.getName} - \n${BufferDumper.dumpAsHex(result)}") } + out.add(result) } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala similarity index 76% rename from mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala rename to mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala index 0e52dad6..08fb0d9f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala @@ -14,7 +14,6 @@ * under the License. */ -package com.github.mauricio.async.db.mysql.message.client +package com.github.mauricio.async.db.mysql.codec -case class PreparedStatementMessage ( statement : String, values : Seq[Any]) - extends ClientMessage( ClientMessage.PreparedStatement ) \ No newline at end of file +case class PreparedStatement ( statement : String, values : Seq[Any]) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala new file mode 100644 index 00000000..ce51140f --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -0,0 +1,40 @@ +package com.github.mauricio.async.db.mysql.codec + +import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} +import com.github.mauricio.async.db.util.{ByteBufferUtils, Log} +import io.netty.buffer.Unpooled +import io.netty.channel.ChannelHandlerContext +import io.netty.handler.codec.MessageToMessageEncoder + +object SendLongDataEncoder { + val log = Log.get[SendLongDataEncoder] + + val LONG_THRESHOLD = 1023 +} + +class SendLongDataEncoder + extends MessageToMessageEncoder[SendLongDataMessage](classOf[SendLongDataMessage]) { + + import com.github.mauricio.async.db.mysql.codec.SendLongDataEncoder.log + + def encode(ctx: ChannelHandlerContext, message: SendLongDataMessage, out: java.util.List[Object]): Unit = { + if ( log.isTraceEnabled ) { + log.trace(s"Writing message ${message.toString}") + } + + val sequence = 0 + + val headerBuffer = ByteBufferUtils.mysqlBuffer(3 + 1 + 1 + 4 + 2) + ByteBufferUtils.write3BytesInt(headerBuffer, 1 + 4 + 2 + message.value.readableBytes()) + headerBuffer.writeByte(sequence) + + headerBuffer.writeByte(ClientMessage.PreparedStatementSendLongData) + headerBuffer.writeBytes(message.statementId) + headerBuffer.writeShort(message.paramId) + + val result = Unpooled.wrappedBuffer(headerBuffer, message.value) + + out.add(result) + } + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/HandshakeV10Decoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/HandshakeV10Decoder.scala index 81cf2900..05feca12 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/HandshakeV10Decoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/HandshakeV10Decoder.scala @@ -16,58 +16,94 @@ package com.github.mauricio.async.db.mysql.decoder -import io.netty.buffer.ByteBuf -import com.github.mauricio.async.db.mysql.message.server.{HandshakeMessage, ServerMessage} -import com.github.mauricio.async.db.util.{Log, ByteBufferUtils} import java.nio.charset.Charset +import com.github.mauricio.async.db.mysql.encoder.auth.AuthenticationMethod +import com.github.mauricio.async.db.mysql.message.server.{HandshakeMessage, ServerMessage} +import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper +import com.github.mauricio.async.db.util.Log +import io.netty.buffer.ByteBuf +import io.netty.util.CharsetUtil + object HandshakeV10Decoder { final val log = Log.get[HandshakeV10Decoder] final val SeedSize = 8 final val SeedComplementSize = 12 final val Padding = 10 + final val ASCII = CharsetUtil.US_ASCII } class HandshakeV10Decoder(charset: Charset) extends MessageDecoder { - import HandshakeV10Decoder._ + import com.github.mauricio.async.db.mysql.decoder.HandshakeV10Decoder._ + import com.github.mauricio.async.db.mysql.util.MySQLIO._ def decode(buffer: ByteBuf): ServerMessage = { - val serverVersion = ByteBufferUtils.readCString(buffer, charset) - val connectionId = buffer.readInt() + val serverVersion = buffer.readCString(ASCII) + val connectionId = buffer.readUnsignedInt() - var seed = new Array[Byte]( SeedSize + SeedComplementSize ) + var seed = new Array[Byte](SeedSize + SeedComplementSize) buffer.readBytes(seed, 0, SeedSize) - buffer.readByte() + buffer.readByte() // filler - var serverCapabilityFlags: Int = buffer.readShort() + // read capability flags (lower 2 bytes) + var serverCapabilityFlags = buffer.readUnsignedShort() + /* New protocol with 16 bytes to describe server characteristics */ + // read character set (1 byte) val characterSet = buffer.readByte() & 0xff - val statusFlags = buffer.readShort() + // read status flags (2 bytes) + val statusFlags = buffer.readUnsignedShort() - serverCapabilityFlags += 65536 * buffer.readShort().asInstanceOf[Int] + // read capability flags (upper 2 bytes) + serverCapabilityFlags |= buffer.readUnsignedShort() << 16 - val authPluginDataLength = buffer.readUnsignedByte() - var authenticationMethod: Option[String] = None + var authPluginDataLength = 0 + var authenticationMethod = AuthenticationMethod.Native - if (authPluginDataLength > 0) { - buffer.readerIndex(buffer.readerIndex() + Padding) - buffer.readBytes(seed, SeedSize, SeedComplementSize) + if ((serverCapabilityFlags & CLIENT_PLUGIN_AUTH) != 0) { + // read length of auth-plugin-data (1 byte) + authPluginDataLength = buffer.readByte() & 0xff + } else { + // read filler ([00]) buffer.readByte() - authenticationMethod = Some(ByteBufferUtils.readUntilEOF(buffer, charset)) } - new HandshakeMessage( + // next 10 bytes are reserved (all [00]) + buffer.readerIndex(buffer.readerIndex() + Padding) + + log.debug(s"Auth plugin data length was ${authPluginDataLength}") + + if ((serverCapabilityFlags & CLIENT_SECURE_CONNECTION) != 0) { + val complement = if ( authPluginDataLength > 0 ) { + authPluginDataLength - 1 - SeedSize + } else { + SeedComplementSize + } + + buffer.readBytes(seed, SeedSize, complement) + buffer.readByte() + } + + if ((serverCapabilityFlags & CLIENT_PLUGIN_AUTH) != 0) { + authenticationMethod = buffer.readUntilEOF(ASCII) + } + + val message = new HandshakeMessage( serverVersion, connectionId, seed, serverCapabilityFlags, - characterSet = Some(characterSet), - statusFlags = Some(statusFlags), + characterSet = characterSet, + statusFlags = statusFlags, authenticationMethod = authenticationMethod ) + + log.debug(s"handshake message was ${message}") + + message } } \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/ResultSetRowDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/ResultSetRowDecoder.scala index 96288bf3..1a2a9fbd 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/ResultSetRowDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/decoder/ResultSetRowDecoder.scala @@ -16,11 +16,11 @@ package com.github.mauricio.async.db.mysql.decoder -import io.netty.buffer.ByteBuf +import java.nio.charset.Charset + import com.github.mauricio.async.db.mysql.message.server.{ResultSetRowMessage, ServerMessage} import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper -import java.nio.charset.Charset -import java.nio.ByteOrder +import io.netty.buffer.ByteBuf object ResultSetRowDecoder { @@ -28,21 +28,20 @@ object ResultSetRowDecoder { } -class ResultSetRowDecoder( charset : Charset ) extends MessageDecoder { +class ResultSetRowDecoder(charset: Charset) extends MessageDecoder { - import ResultSetRowDecoder.NULL + import com.github.mauricio.async.db.mysql.decoder.ResultSetRowDecoder.NULL def decode(buffer: ByteBuf): ServerMessage = { val row = new ResultSetRowMessage() - while (buffer.isReadable() ) { - if ( buffer.getUnsignedByte(buffer.readerIndex()) == NULL ) { + while (buffer.isReadable()) { + if (buffer.getUnsignedByte(buffer.readerIndex()) == NULL) { buffer.readByte() row += null } else { val length = buffer.readBinaryLength.asInstanceOf[Int] row += buffer.readBytes(length) - } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/HandshakeResponseEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/HandshakeResponseEncoder.scala index b60bf33d..9865e55e 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/HandshakeResponseEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/HandshakeResponseEncoder.scala @@ -16,24 +16,17 @@ package com.github.mauricio.async.db.mysql.encoder -import io.netty.buffer.ByteBuf +import java.nio.charset.Charset + import com.github.mauricio.async.db.exceptions.UnsupportedAuthenticationMethodException -import com.github.mauricio.async.db.mysql.encoder.auth.{AuthenticationMethod, MySQLNativePasswordAuthentication} -import com.github.mauricio.async.db.mysql.message.client.{HandshakeResponseMessage, ClientMessage} +import com.github.mauricio.async.db.mysql.encoder.auth.AuthenticationMethod +import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, HandshakeResponseMessage} import com.github.mauricio.async.db.mysql.util.CharsetMapper -import com.github.mauricio.async.db.util.{Log, ByteBufferUtils} -import java.nio.charset.Charset +import com.github.mauricio.async.db.util.{ByteBufferUtils, Log} +import io.netty.buffer.ByteBuf object HandshakeResponseEncoder { - final val CLIENT_PROTOCOL_41 = 0x0200 - final val CLIENT_SECURE_CONNECTION = 0x8000 - final val CLIENT_CONNECT_WITH_DB = 0x0008 - final val CLIENT_TRANSACTIONS = 0x2000 - final val CLIENT_MULTI_RESULTS = 0x200000 - final val CLIENT_LONG_FLAG = 0x0001 - final val CLIENT_PLUGIN_AUTH = 524288 - final val MAX_3_BYTES = 0x00ffffff final val PADDING: Array[Byte] = List.fill(23) { 0.toByte @@ -45,7 +38,8 @@ object HandshakeResponseEncoder { class HandshakeResponseEncoder(charset: Charset, charsetMapper: CharsetMapper) extends MessageEncoder { - import HandshakeResponseEncoder._ + import com.github.mauricio.async.db.mysql.encoder.HandshakeResponseEncoder._ + import com.github.mauricio.async.db.mysql.util.MySQLIO._ private val authenticationMethods = AuthenticationMethod.Availables @@ -75,10 +69,10 @@ class HandshakeResponseEncoder(charset: Charset, charsetMapper: CharsetMapper) e ByteBufferUtils.writeCString( m.username, buffer, charset ) if ( m.password.isDefined ) { - val method = m.authenticationMethod.get + val method = m.authenticationMethod val authenticator = this.authenticationMethods.getOrElse( method, { throw new UnsupportedAuthenticationMethodException(method) }) - val bytes = authenticator.generateAuthentication(charset, m.password, m.seed ) + val bytes = authenticator.generateAuthentication(charset, m.password, m.seed) buffer.writeByte(bytes.length) buffer.writeBytes(bytes) } else { @@ -89,14 +83,9 @@ class HandshakeResponseEncoder(charset: Charset, charsetMapper: CharsetMapper) e ByteBufferUtils.writeCString( m.database.get, buffer, charset ) } - if ( m.authenticationMethod.isDefined ) { - ByteBufferUtils.writeCString( m.authenticationMethod.get, buffer, charset ) - } else { - buffer.writeByte(0) - } + ByteBufferUtils.writeCString( m.authenticationMethod, buffer, charset ) buffer - } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala index e21b15f6..c52658c9 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.mysql.encoder import io.netty.buffer.{ByteBuf, Unpooled} +import com.github.mauricio.async.db.mysql.column.ColumnTypes import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder import com.github.mauricio.async.db.mysql.message.client.{PreparedStatementExecuteMessage, ClientMessage} import com.github.mauricio.async.db.util.ByteBufferUtils @@ -35,10 +36,49 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M if ( m.parameters.isEmpty ) { buffer } else { - val parametersBuffer = rowEncoder.encode(m.values) - Unpooled.wrappedBuffer(buffer, parametersBuffer) + Unpooled.wrappedBuffer(buffer, encodeValues(m.values, m.valuesToInclude)) } } + private[encoder] def encodeValues( values : Seq[Any], valuesToInclude: Set[Int] ) : ByteBuf = { + val nullBitsCount = (values.size + 7) / 8 + val nullBits = new Array[Byte](nullBitsCount) + val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) + val parameterTypesBuffer = ByteBufferUtils.mysqlBuffer(values.size * 2) + val parameterValuesBuffer = ByteBufferUtils.mysqlBuffer() + + var index = 0 + + while ( index < values.length ) { + val value = values(index) + if ( value == null || value == None ) { + nullBits(index / 8) = (nullBits(index / 8) | (1 << (index & 7))).asInstanceOf[Byte] + parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) + } else { + value match { + case Some(v) => encodeValue(parameterTypesBuffer, parameterValuesBuffer, v, valuesToInclude(index)) + case _ => encodeValue(parameterTypesBuffer, parameterValuesBuffer, value, valuesToInclude(index)) + } + } + index += 1 + } + + bitMapBuffer.writeBytes(nullBits) + if ( values.size > 0 ) { + bitMapBuffer.writeByte(1) + } else { + bitMapBuffer.writeByte(0) + } + + Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) + } + + private def encodeValue(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any, includeValue: Boolean) : Unit = { + val encoder = rowEncoder.encoderFor(value) + parameterTypesBuffer.writeShort(encoder.encodesTo) + if (includeValue) + encoder.encode(value, parameterValuesBuffer) + } + } \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/auth/AuthenticationMethod.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/auth/AuthenticationMethod.scala index 7ca9829e..50cf1073 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/auth/AuthenticationMethod.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/auth/AuthenticationMethod.scala @@ -19,9 +19,13 @@ package com.github.mauricio.async.db.mysql.encoder.auth import java.nio.charset.Charset object AuthenticationMethod { + + final val Native = "mysql_native_password" + final val Old = "mysql_old_password" + final val Availables = Map( - "mysql_native_password" -> MySQLNativePasswordAuthentication, - "mysql_old_password" -> OldPasswordAuthentication + Native -> MySQLNativePasswordAuthentication, + Old -> OldPasswordAuthentication ) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala index 72d0be13..2a2a1b1f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala @@ -20,13 +20,13 @@ import com.github.mauricio.async.db.KindedMessage object ClientMessage { - final val ClientProtocolVersion = 0x09 - final val Quit = 0x01 - final val Query = 0x03 - final val PreparedStatementPrepare = 0x16 - final val PreparedStatementExecute = 0x17 - final val PreparedStatement = 0x18 - final val AuthSwitchResponse = 0xfe + final val ClientProtocolVersion = 0x09 // COM_STATISTICS + final val Quit = 0x01 // COM_QUIT + final val Query = 0x03 // COM_QUERY + final val PreparedStatementPrepare = 0x16 // COM_STMT_PREPARE + final val PreparedStatementExecute = 0x17 // COM_STMT_EXECUTE + final val PreparedStatementSendLongData = 0x18 // COM_STMT_SEND_LONG_DATA + final val AuthSwitchResponse = 0xfe // AuthSwitchRequest } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/HandshakeResponseMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/HandshakeResponseMessage.scala index 77dfaee4..50b7e839 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/HandshakeResponseMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/HandshakeResponseMessage.scala @@ -22,7 +22,7 @@ case class HandshakeResponseMessage( username: String, charset: Charset, seed: Array[Byte], - authenticationMethod: Option[String] = None, + authenticationMethod: String, password: Option[String] = None, database: Option[String] = None ) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala index 805ef51e..f87ddede 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala @@ -21,5 +21,6 @@ import com.github.mauricio.async.db.mysql.message.server.ColumnDefinitionMessage case class PreparedStatementExecuteMessage ( statementId : Array[Byte], values : Seq[Any], + valuesToInclude : Set[Int], parameters : Seq[ColumnDefinitionMessage] ) extends ClientMessage( ClientMessage.PreparedStatementExecute ) \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala new file mode 100644 index 00000000..db66db1f --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala @@ -0,0 +1,8 @@ +package com.github.mauricio.async.db.mysql.message.client + +import io.netty.buffer.ByteBuf + +case class SendLongDataMessage ( + statementId : Array[Byte], + value : ByteBuf, + paramId : Int ) \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/server/HandshakeMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/server/HandshakeMessage.scala index 1bbcf12c..dd16044a 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/server/HandshakeMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/server/HandshakeMessage.scala @@ -18,11 +18,11 @@ package com.github.mauricio.async.db.mysql.message.server case class HandshakeMessage( serverVersion: String, - connectionId: Int, + connectionId: Long, seed: Array[Byte], serverCapabilities: Int, - characterSet: Option[Int] = None, - statusFlags: Option[Short] = None, - authenticationMethod: Option[String] = None + characterSet: Int, + statusFlags: Int, + authenticationMethod : String ) extends ServerMessage(ServerMessage.ServerProtocolVersion) \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala index 94aadfd8..273e76af 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala @@ -21,9 +21,8 @@ import com.github.mauricio.async.db.pool.ObjectFactory import com.github.mauricio.async.db.mysql.MySQLConnection import scala.util.Try import scala.concurrent.Await -import scala.concurrent.duration._ import com.github.mauricio.async.db.util.Log -import com.github.mauricio.async.db.exceptions.{ConnectionStillRunningQueryException, ConnectionNotConnectedException} +import com.github.mauricio.async.db.exceptions.{ConnectionTimeoutedException, ConnectionStillRunningQueryException, ConnectionNotConnectedException} object MySQLConnectionFactory { final val log = Log.get[MySQLConnectionFactory] @@ -50,7 +49,7 @@ class MySQLConnectionFactory( configuration : Configuration ) extends ObjectFact */ def create: MySQLConnection = { val connection = new MySQLConnection(configuration) - Await.result(connection.connect, 5.seconds ) + Await.result(connection.connect, configuration.connectTimeout ) connection } @@ -90,7 +89,9 @@ class MySQLConnectionFactory( configuration : Configuration ) extends ObjectFact */ def validate(item: MySQLConnection): Try[MySQLConnection] = { Try{ - + if ( item.isTimeouted ) { + throw new ConnectionTimeoutedException(item) + } if ( !item.isConnected ) { throw new ConnectionNotConnectedException(item) } @@ -121,7 +122,7 @@ class MySQLConnectionFactory( configuration : Configuration ) extends ObjectFact */ override def test(item: MySQLConnection): Try[MySQLConnection] = { Try { - Await.result(item.sendQuery("SELECT 0"), 5.seconds) + Await.result(item.sendQuery("SELECT 0"), configuration.testTimeout) item } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala new file mode 100644 index 00000000..3b56ecc0 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2014 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql.util + +object MySQLIO { + + final val CLIENT_PROTOCOL_41 = 0x0200 + final val CLIENT_CONNECT_WITH_DB = 0x0008 + final val CLIENT_TRANSACTIONS = 0x2000 + final val CLIENT_MULTI_RESULTS = 0x20000 + final val CLIENT_LONG_FLAG = 0x0001 + final val CLIENT_PLUGIN_AUTH = 0x00080000 + final val CLIENT_SECURE_CONNECTION = 0x00008000 + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala new file mode 100644 index 00000000..ba9c0333 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala @@ -0,0 +1,39 @@ +/* + * Copyright 2016 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.mysql.util + +import com.github.mauricio.async.db.util.AbstractURIParser +import com.github.mauricio.async.db.Configuration + +/** + * The MySQL URL parser. + */ +object URLParser extends AbstractURIParser { + + /** + * The default configuration for MySQL. + */ + override val DEFAULT = Configuration( + username = "root", + host = "127.0.0.1", //Matched JDBC default + port = 3306, + password = None, + database = None + ) + + override protected val SCHEME = "^mysql$".r + +} diff --git a/mysql-async/src/test/resources/logback.xml b/mysql-async/src/test/resources/logback.xml index d8159fac..e0084899 100644 --- a/mysql-async/src/test/resources/logback.xml +++ b/mysql-async/src/test/resources/logback.xml @@ -13,7 +13,7 @@ - + diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala index 22912620..6c7c1313 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala @@ -2,6 +2,8 @@ package com.github.mauricio.async.db.mysql import org.specs2.mutable.Specification import java.util.UUID +import java.nio.ByteBuffer +import io.netty.buffer.Unpooled import io.netty.util.CharsetUtil import com.github.mauricio.async.db.RowData @@ -96,6 +98,52 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { } + "support BLOB type" in { + + val bytes = (1 to 10).map(_.toByte).toArray + + testBlob(bytes) + + } + + "support BLOB type with large values" in { + + val bytes = (1 to 2100).map(_.toByte).toArray + + testBlob(bytes) + + } + + } + + def testBlob(bytes: Array[Byte]) = { + val create = + """CREATE TEMPORARY TABLE POSTS ( + | id INT NOT NULL, + | blob_column BLOB, + | primary key (id)) + """.stripMargin + + val insert = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" + val select = "SELECT id,blob_column FROM POSTS ORDER BY id" + + withConnection { + connection => + executeQuery(connection, create) + executePreparedStatement(connection, insert, 1, Some(bytes)) + executePreparedStatement(connection, insert, 2, ByteBuffer.wrap(bytes)) + executePreparedStatement(connection, insert, 3, Unpooled.wrappedBuffer(bytes)) + + val Some(rows) = executeQuery(connection, select).rows + rows(0)("id") === 1 + rows(0)("blob_column") === bytes + rows(1)("id") === 2 + rows(1)("blob_column") === bytes + rows(2)("id") === 3 + rows(2)("blob_column") === bytes + rows.size === 3 + } + } def compareBytes( row : RowData, column : String, expected : String ) = diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala new file mode 100644 index 00000000..ade3e6ce --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala @@ -0,0 +1,83 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import org.specs2.mutable.Specification + +class BitSpec extends Specification with ConnectionHelper { + + "when processing bit columns" should { + + "result in binary data" in { + + withConnection { + connection => + val create = """CREATE TEMPORARY TABLE binary_test + ( + id INT NOT NULL AUTO_INCREMENT, + some_bit BIT(1) NOT NULL, + PRIMARY KEY (id) + )""" + + executeQuery(connection, create) + executePreparedStatement(connection, + "INSERT INTO binary_test (some_bit) VALUES (B'0'),(B'1')") + + val rows = executePreparedStatement(connection, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + } + + } + + "result in binary data in BIT(2) column" in { + + withConnection { + connection => + val create = """CREATE TEMPORARY TABLE binary_test + ( + id INT NOT NULL AUTO_INCREMENT, + some_bit BIT(2) NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeQuery(connection, create) + executePreparedStatement(connection, + "INSERT INTO binary_test (some_bit) VALUES (B'00'),(B'01'),(B'10'),(B'11')") + + val rows = executePreparedStatement(connection, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + val bit2 = rows(2)("some_bit") + val bit3 = rows(3)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + bit2 === Array(2) + bit3 === Array(3) + } + + } + + } + +} \ No newline at end of file diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ClientPluginAuthDisabledSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ClientPluginAuthDisabledSpec.scala new file mode 100644 index 00000000..c4c8b6c7 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ClientPluginAuthDisabledSpec.scala @@ -0,0 +1,61 @@ +package com.github.mauricio.async.db.mysql + +import com.github.mauricio.async.db.Configuration +import org.specs2.mutable.Specification + +/** + * + * To run this spec you have to use the Vagrant file provided with the base project + * and you have to start MySQL there. The expected MySQL version is 5.1.73. + * Make sure the bootstrap.sh script is run, if it isn't, manually run it yourself. + * + */ + +class ClientPluginAuthDisabledSpec extends Specification with ConnectionHelper { + + "connection" should { + + "connect and query the database without a password" in { + + if (System.getenv("TRAVIS") == null) { + withConnection { + connection => + executeQuery(connection, "select version()") + success("did work") + } + } else { + skipped("not to be run on travis") + } + + } + + "connect and query the database with a password" in { + + if (System.getenv("TRAVIS") == null) { + withConfigurableConnection(vagrantConfiguration) { + connection => + executeQuery(connection, "select version()") + success("did work") + } + } else { + skipped("not to be run on travis") + } + + } + + } + + override def defaultConfiguration = new Configuration( + "root", + "localhost", + port = 3307 + ) + + def vagrantConfiguration = new Configuration( + "mysql_vagrant", + "localhost", + port = 3307, + password = Some("generic_password") + ) + +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala index ce323ce5..8ace95e7 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala @@ -115,9 +115,24 @@ trait ConnectionHelper { } - def withConnection[T]( fn : (MySQLConnection) => T ) : T = { + def withConfigurablePool[T]( configuration : Configuration )( fn : (ConnectionPool[MySQLConnection]) => T ) : T = { - val connection = new MySQLConnection(this.defaultConfiguration) + val factory = new MySQLConnectionFactory(configuration) + val pool = new ConnectionPool[MySQLConnection](factory, PoolConfiguration.Default) + + try { + fn(pool) + } finally { + awaitFuture( pool.close ) + } + + } + + def withConnection[T]( fn : (MySQLConnection) => T ) : T = + withConfigurableConnection(this.defaultConfiguration)(fn) + + def withConfigurableConnection[T]( configuration : Configuration )(fn : (MySQLConnection) => T) : T = { + val connection = new MySQLConnection(configuration) try { awaitFuture( connection.connect ) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala index aebf18dd..5e5500fa 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala @@ -30,8 +30,8 @@ class MySQLConnectionSpec extends Specification { database = Some("mysql_async_tests") ) - val rootConfiguration = new Configuration( - "root", + val configurationWithoutPassword = new Configuration( + "mysql_async_nopw", "localhost", port = 3306, password = None, @@ -39,7 +39,7 @@ class MySQLConnectionSpec extends Specification { ) val configurationWithoutDatabase = new Configuration( - "root", + "mysql_async_nopw", "localhost", port = 3306, password = None, @@ -69,7 +69,7 @@ class MySQLConnectionSpec extends Specification { withNonConnectedConnection({ connection => awaitFuture(connection.connect) === connection - }) (rootConfiguration) + }) (configurationWithoutPassword) } "connect to a MySQL instance without a database" in { diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/OldPasswordSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/OldPasswordSpec.scala deleted file mode 100644 index fb001e06..00000000 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/OldPasswordSpec.scala +++ /dev/null @@ -1,35 +0,0 @@ -package com.github.mauricio.async.db.mysql - -import org.specs2.mutable.Specification -import com.github.mauricio.async.db.Configuration -import com.github.mauricio.async.db.util.FutureUtils.awaitFuture -import com.github.mauricio.async.db.mysql.exceptions.MySQLException - -class OldPasswordSpec extends Specification with ConnectionHelper { - - "connection" should { - - "connect and query the database" in { - val connection = new MySQLConnection(defaultConfiguration) - try { - awaitFuture(connection.connect) - success - } catch { - case e : MySQLException => { - e.errorMessage.errorCode === 1275 - success - } - } - } - - } - - override def defaultConfiguration = new Configuration( - "mysql_async_old", - "localhost", - port = 3306, - password = Some("do_not_use_this"), - database = Some("mysql_async_tests") - ) - -} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QuerySpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QuerySpec.scala index 2f505169..4e249387 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QuerySpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QuerySpec.scala @@ -93,7 +93,7 @@ class QuerySpec extends Specification with ConnectionHelper { timestamp.getSecondOfMinute === 7 - result("created_at_time") === Duration( 3, TimeUnit.HOURS ) + Duration( 14, TimeUnit.MINUTES ) + Duration( 7, TimeUnit.SECONDS ) + result("created_at_time") === Duration(3, TimeUnit.HOURS) + Duration(14, TimeUnit.MINUTES) + Duration(7, TimeUnit.SECONDS) val year = result("created_at_year").asInstanceOf[Short] @@ -150,21 +150,21 @@ class QuerySpec extends Specification with ConnectionHelper { | primary key (id) )""".stripMargin val createIdeas = """CREATE TEMPORARY TABLE ideas ( - | id INT NOT NULL AUTO_INCREMENT, - | some_idea VARCHAR(255) NOT NULL, - | primary key (id) )""".stripMargin + | id INT NOT NULL AUTO_INCREMENT, + | some_idea VARCHAR(255) NOT NULL, + | primary key (id) )""".stripMargin val select = "SELECT * FROM posts" val selectIdeas = "SELECT * FROM ideas" - val matcher : QueryResult => List[MatchResult[IndexedSeq[String]]] = { result => + val matcher: QueryResult => List[MatchResult[IndexedSeq[String]]] = { result => val columns = result.rows.get.columnNames - List(columns must contain(allOf("id", "some_bytes")).inOrder, columns must have size(2)) + List(columns must contain(allOf("id", "some_bytes")).inOrder, columns must have size (2)) } - val ideasMatcher : QueryResult => List[MatchResult[IndexedSeq[String]]] = { result => + val ideasMatcher: QueryResult => List[MatchResult[IndexedSeq[String]]] = { result => val columns = result.rows.get.columnNames - List(columns must contain(allOf("id", "some_idea")).inOrder, columns must have size(2)) + List(columns must contain(allOf("id", "some_idea")).inOrder, columns must have size (2)) } withConnection { @@ -181,6 +181,7 @@ class QuerySpec extends Specification with ConnectionHelper { matcher(executeQuery(connection, select)) ideasMatcher(executeQuery(connection, selectIdeas)) + success("completed") } } @@ -203,10 +204,10 @@ class QuerySpec extends Specification with ConnectionHelper { executeQuery(connection, insert) val rows = executeQuery(connection, select).rows.get - rows(0)("bit_column") === Array(0,0,-128) + rows(0)("bit_column") === Array(0, 0, -128) val preparedRows = executePreparedStatement(connection, select).rows.get - preparedRows(0)("bit_column") === Array(0,0,-128) + preparedRows(0)("bit_column") === Array(0, 0, -128) } } @@ -263,6 +264,30 @@ class QuerySpec extends Specification with ConnectionHelper { } + "select from a large text column" in { + + val create = "create temporary table bombs (id char(4), bomb mediumtext character set ascii)" + + val insert = """ insert bombs values + | ('bomb', repeat(' ',65536+16384+8192+4096+2048+1024+512+256+128)), + | ('good', repeat(' ',65536+16384+8192+4096+2048+1024+512+256+128-1))""".stripMargin + + + withConnection { + connection => + executeQuery(connection, create) + executeQuery(connection, insert) + val result = executeQuery(connection, "select bomb from bombs").rows.get + + result.size === 2 + + result(0)("bomb").asInstanceOf[String].length === 98176 + result(1)("bomb").asInstanceOf[String].length === 98175 + } + + } + + } } diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala new file mode 100644 index 00000000..65827432 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala @@ -0,0 +1,80 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import java.util.concurrent.TimeoutException +import com.github.mauricio.async.db.Configuration +import org.specs2.execute.{AsResult, Success, ResultExecution} +import org.specs2.mutable.Specification +import scala.concurrent.Await +import scala.concurrent.duration._ + +class QueryTimeoutSpec extends Specification with ConnectionHelper { + implicit def unitAsResult: AsResult[Unit] = new AsResult[Unit] { + def asResult(r: =>Unit) = + ResultExecution.execute(r)(_ => Success()) + } + "Simple query with 1 nanosec timeout" in { + withConfigurablePool(shortTimeoutConfiguration) { + pool => { + val connection = Await.result(pool.take, Duration(10,SECONDS)) + connection.isTimeouted === false + connection.isConnected === true + val queryResultFuture = connection.sendQuery("select sleep(1)") + Await.result(queryResultFuture, Duration(10,SECONDS)) must throwA[TimeoutException]() + connection.isTimeouted === true + Await.ready(pool.giveBack(connection), Duration(10,SECONDS)) + pool.availables.count(_ == connection) === 0 // connection removed from pool + // we do not know when the connection will be closed. + } + } + } + + "Simple query with 5 sec timeout" in { + withConfigurablePool(longTimeoutConfiguration) { + pool => { + val connection = Await.result(pool.take, Duration(10,SECONDS)) + connection.isTimeouted === false + connection.isConnected === true + val queryResultFuture = connection.sendQuery("select sleep(1)") + Await.result(queryResultFuture, Duration(10,SECONDS)).rows.get.size === 1 + connection.isTimeouted === false + connection.isConnected === true + Await.ready(pool.giveBack(connection), Duration(10,SECONDS)) + pool.availables.count(_ == connection) === 1 // connection returned to pool + } + } + } + + def shortTimeoutConfiguration = new Configuration( + "mysql_async", + "localhost", + port = 3306, + password = Some("root"), + database = Some("mysql_async_tests"), + queryTimeout = Some(Duration(1,NANOSECONDS)) + ) + + def longTimeoutConfiguration = new Configuration( + "mysql_async", + "localhost", + port = 3306, + password = Some("root"), + database = Some("mysql_async_tests"), + queryTimeout = Some(Duration(5,SECONDS)) + ) +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala new file mode 100644 index 00000000..d8ff2142 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala @@ -0,0 +1,133 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import com.github.mauricio.async.db.ResultSet +import com.github.mauricio.async.db.util.FutureUtils._ +import org.specs2.mutable.Specification +import scala.concurrent.ExecutionContext.Implicits.global + +class StoredProceduresSpec extends Specification with ConnectionHelper { + + "connection" should { + + "be able to execute create stored procedure" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists helloWorld;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE helloWorld(OUT param1 VARCHAR(20)) + BEGIN + SELECT 'hello' INTO param1; + END + """ + ) + ) yield create + awaitFuture(future).statusMessage === "" + } + } + + "be able to call stored procedure" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists constTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE constTest(OUT param INT) + BEGIN + SELECT 125 INTO param; + END + """ + ); + call <- connection.sendQuery("CALL constTest(@arg)"); + arg <- connection.sendQuery("SELECT @arg") + ) yield arg + val result: Option[ResultSet] = awaitFuture(future).rows + result.isDefined === true + val rows = result.get + rows.size === 1 + rows(0)(rows.columnNames.head) === 125 + } + } + + "be able to call stored procedure with input parameter" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists addTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE addTest(IN a INT, IN b INT, OUT sum INT) + BEGIN + SELECT a+b INTO sum; + END + """ + ); + call <- connection.sendQuery("CALL addTest(132, 245, @sm)"); + res <- connection.sendQuery("SELECT @sm") + ) yield res + val result: Option[ResultSet] = awaitFuture(future).rows + result.isDefined === true + val rows = result.get + rows.size === 1 + rows(0)(rows.columnNames.head) === 377 + } + } + + "be able to remove stored procedure" in { + withConnection { + connection => + val createResult: Option[ResultSet] = awaitFuture( + for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists remTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE remTest(OUT cnst INT) + BEGIN + SELECT 987 INTO cnst; + END + """ + ); + routine <- connection.sendQuery( + """ + SELECT routine_name FROM INFORMATION_SCHEMA.ROUTINES WHERE routine_name="remTest" + """ + ) + ) yield routine + ).rows + createResult.isDefined === true + createResult.get.size === 1 + createResult.get(0)("routine_name") === "remTest" + val removeResult: Option[ResultSet] = awaitFuture( + for( + drop <- connection.sendQuery("DROP PROCEDURE remTest;"); + routine <- connection.sendQuery( + """ + SELECT routine_name FROM INFORMATION_SCHEMA.ROUTINES WHERE routine_name="remTest" + """ + ) + ) yield routine + ).rows + removeResult.isDefined === true + removeResult.get.isEmpty === true + } + } + } +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala index 9e209fff..83548c9b 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala @@ -1,15 +1,29 @@ package com.github.mauricio.async.db.mysql +import java.util.UUID +import java.util.concurrent.TimeUnit + import org.specs2.mutable.Specification -import com.github.mauricio.async.db.util.ExecutorServiceUtils._ import com.github.mauricio.async.db.util.FutureUtils.awaitFuture import com.github.mauricio.async.db.mysql.exceptions.MySQLException import com.github.mauricio.async.db.Connection +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Future} +import scala.concurrent.ExecutionContext.Implicits.global +import scala.util.{Success, Failure} + +object TransactionSpec { + + val BrokenInsert = """INSERT INTO users (id, name) VALUES (1, 'Maurício Aragão')""" + val InsertUser = """INSERT INTO users (name) VALUES (?)""" + val TransactionInsert = "insert into transaction_test (id) values (?)" + +} + class TransactionSpec extends Specification with ConnectionHelper { - val brokenInsert = """INSERT INTO users (id, name) VALUES (1, 'Maurício Aragão')""" - val insertUser = """INSERT INTO users (name) VALUES (?)""" + import TransactionSpec._ "connection in transaction" should { @@ -43,12 +57,12 @@ class TransactionSpec extends Specification with ConnectionHelper { val future = connection.inTransaction { c => - c.sendQuery(this.insert).flatMap(r => c.sendQuery(brokenInsert)) + c.sendQuery(this.insert).flatMap(r => c.sendQuery(BrokenInsert)) } try { awaitFuture(future) - ko("Should not have arrived here") + failure("should not have arrived here") } catch { case e : MySQLException => { @@ -58,7 +72,7 @@ class TransactionSpec extends Specification with ConnectionHelper { val result = executePreparedStatement(connection, this.select).rows.get result.size === 1 result(0)("name") === "Maurício Aragão" - ok("success") + success("correct result") } } } @@ -78,19 +92,51 @@ class TransactionSpec extends Specification with ConnectionHelper { val future = pool.inTransaction { c => connection = c - c.sendQuery(this.brokenInsert) + c.sendQuery(BrokenInsert) } try { awaitFuture(future) - ko("this should not be reached") + failure("this should not be reached") } catch { case e : MySQLException => { pool.availables must have size(0) pool.availables must not contain(connection.asInstanceOf[MySQLConnection]) - ok("success") + success("success") + } + } + + } + + } + + "runs commands for a transaction in a single connection" in { + + val id = UUID.randomUUID().toString + + withPool { + pool => + val operations = pool.inTransaction { + connection => + connection.sendPreparedStatement(TransactionInsert, List(id)).flatMap { + result => + connection.sendPreparedStatement(TransactionInsert, List(id)).map { + failure => + List(result, failure) + } + } + } + + Await.ready(operations, Duration(5, TimeUnit.SECONDS)) + + operations.value.get match { + case Success(e) => failure("should not have executed") + case Failure(e) => { + e.asInstanceOf[MySQLException].errorMessage.errorCode === 1062 + executePreparedStatement(pool, "select * from transaction_test where id = ?", id).rows.get.size === 0 + success("ok") } } diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ZeroDatesSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ZeroDatesSpec.scala new file mode 100644 index 00000000..b5a06aec --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ZeroDatesSpec.scala @@ -0,0 +1,58 @@ +package com.github.mauricio.async.db.mysql + +import org.specs2.mutable.Specification +import scala.concurrent.duration.Duration +import com.github.mauricio.async.db.RowData + +class ZeroDatesSpec extends Specification with ConnectionHelper { + + val createStatement = + """CREATE TEMPORARY TABLE dates ( + |`name` varchar (255) NOT NULL, + |`timestamp_column` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', + |`date_column` date NOT NULL DEFAULT '0000-00-00', + |`datetime_column` datetime NOT NULL DEFAULT '0000-00-00 00:00:00', + |`time_column` time NOT NULL DEFAULT '00:00:00', + |`year_column` year NOT NULL DEFAULT '0000' + |) + |ENGINE=MyISAM DEFAULT CHARSET=utf8;""".stripMargin + + val insertStatement = "INSERT INTO dates (name) values ('Joe')" + val selectStatement = "SELECT * FROM dates" + + def matchValues( result : RowData ) = { + result("name") === "Joe" + result("timestamp_column") must beNull + result("datetime_column") must beNull + result("date_column") must beNull + result("year_column") === 0 + result("time_column") === Duration.Zero + } + + "client" should { + + "correctly parse the MySQL zeroed dates as NULL values in text protocol" in { + + withConnection { + connection => + executeQuery(connection, createStatement) + executeQuery(connection, insertStatement) + + matchValues(executeQuery(connection, selectStatement).rows.get(0)) + } + } + + "correctly parse the MySQL zeroed dates as NULL values in binary protocol" in { + + withConnection { + connection => + executeQuery(connection, createStatement) + executeQuery(connection, insertStatement) + + matchValues(executePreparedStatement(connection, selectStatement).rows.get(0)) + } + } + + } + +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoderSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoderSpec.scala index 5ff9a563..8d8790e5 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoderSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/codec/MySQLFrameDecoderSpec.scala @@ -69,6 +69,7 @@ class MySQLFrameDecoderSpec extends Specification { "on a query process it should correctly send an OK" in { val decoder = new MySQLFrameDecoder(charset, "[mysql-connection]") + decoder.hasDoneHandshake = true val embedder = new EmbeddedChannel(decoder) embedder.config.setAllocator(LittleEndianByteBufAllocator.INSTANCE) @@ -89,6 +90,7 @@ class MySQLFrameDecoderSpec extends Specification { "on query process it should correctly send an error" in { val decoder = new MySQLFrameDecoder(charset, "[mysql-connection]") + decoder.hasDoneHandshake = true val embedder = new EmbeddedChannel(decoder) embedder.config.setAllocator(LittleEndianByteBufAllocator.INSTANCE) @@ -112,6 +114,7 @@ class MySQLFrameDecoderSpec extends Specification { "on query process it should correctly handle a result set" in { val decoder = new MySQLFrameDecoder(charset, "[mysql-connection]") + decoder.hasDoneHandshake = true val embedder = new EmbeddedChannel(decoder) embedder.config.setAllocator(LittleEndianByteBufAllocator.INSTANCE) @@ -165,7 +168,9 @@ class MySQLFrameDecoderSpec extends Specification { } def createPipeline(): EmbeddedChannel = { - val channel = new EmbeddedChannel(new MySQLFrameDecoder(charset, "[mysql-connection]")) + val decoder = new MySQLFrameDecoder(charset, "[mysql-connection]") + decoder.hasDoneHandshake = true + val channel = new EmbeddedChannel(decoder) channel.config.setAllocator(LittleEndianByteBufAllocator.INSTANCE) channel } diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala similarity index 62% rename from mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala rename to mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala index 78bce249..427dde17 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala @@ -14,28 +14,29 @@ * under the License. */ -package com.github.mauricio.async.db.mysql.binary +package com.github.mauricio.async.db.mysql.encoder -import org.specs2.mutable.Specification +import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder import io.netty.util.CharsetUtil +import org.specs2.mutable.Specification -class BinaryRowEncoderSpec extends Specification { +class PreparedStatementExecuteEncoderSpec extends Specification { - val encoder = new BinaryRowEncoder(CharsetUtil.UTF_8) + val encoder = new PreparedStatementExecuteEncoder(new BinaryRowEncoder(CharsetUtil.UTF_8)) "binary row encoder" should { "encode Some(value) like value" in { - val actual = encoder.encode(List(Some(1l), Some("foo"))) - val expected = encoder.encode(List(1l, "foo")) + val actual = encoder.encodeValues(List(Some(1l), Some("foo")), Set(0, 1)) + val expected = encoder.encodeValues(List(1l, "foo"), Set(0, 1)) actual mustEqual expected } "encode None as null" in { - val actual = encoder.encode(List(None)) - val expected = encoder.encode(List(null)) + val actual = encoder.encodeValues(List(None), Set(0)) + val expected = encoder.encodeValues(List(null), Set(0)) actual mustEqual expected } diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala new file mode 100644 index 00000000..b15ab779 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala @@ -0,0 +1,264 @@ +/* + * Copyright 2016 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql.util + +import java.nio.charset.Charset + +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import com.github.mauricio.async.db.exceptions.UnableToParseURLException +import io.netty.buffer.{ByteBufAllocator, PooledByteBufAllocator} +import org.specs2.mutable.Specification + +import scala.concurrent.duration.Duration + +class URLParserSpec extends Specification { + + "mysql URLParser" should { + import URLParser.{DEFAULT, parse, parseOrDie} + + + "have a reasonable default" in { + // This is a deliberate extra step, protecting the DEFAULT from frivilous changes. + // Any change to DEFAULT should require a change to this test. + + DEFAULT === Configuration( + username = "root", + host = "127.0.0.1", //Matched JDBC default + port = 3306, + password = None, + database = None + ) + } + + + // Divided into sections + // =========== jdbc:mysql =========== + + "create a jdbc:mysql connection with the available fields" in { + val connectionUri = "jdbc:mysql://128.167.54.90:9987/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection without port" in { + val connectionUri = "jdbc:mysql://128.167.54.90/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90" + ) + } + + + "create a connection without username and password" in { + val connectionUri = "jdbc:mysql://128.167.54.90:9987/my_database" + + parse(connectionUri) === DEFAULT.copy( + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection from a heroku like URL using 'mysql' protocol" in { + val connectionUri = "mysql://john:doe@128.167.54.90:9987/my_database" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection with the available fields and named server" in { + val connectionUri = "jdbc:mysql://localhost:9987/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "localhost", + port = 9987 + ) + } + + "create a connection from a heroku like URL with named server" in { + val connectionUri = "mysql://john:doe@psql.heroku.com:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "psql.heroku.com" + configuration.port === 9987 + } + + "create a connection with the available fields and ipv6" in { + val connectionUri = "jdbc:mysql://[::1]:9987/my_database?user=john&password=doe" + + val configuration = parse(connectionUri) + + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection from a heroku like URL and with ipv6" in { + val connectionUri = "mysql://john:doe@[::1]:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection with a missing hostname" in { + val connectionUri = "jdbc:mysql:/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database") + ) + } + + "create a connection with a missing database name" in { + val connectionUri = "jdbc:mysql://[::1]:9987/?user=john&password=doe" + + val configuration = parse(connectionUri) + + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === None + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection with all default fields" in { + val connectionUri = "jdbc:mysql:" + + val configuration = parse(connectionUri) + + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + } + + "create a connection with an empty (invalid) url" in { + val connectionUri = "" + + val configuration = parse(connectionUri) + + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + } + + + "recognise a mysql:// uri" in { + parse("mysql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "root", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognize a jdbc:mysql:// uri" in { + parse("jdbc:mysql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "root", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from URI credentials" in { + parse("jdbc:mysql://user:password@localhost:425/dbname") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from query string" in { + parse("jdbc:mysql://localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + // Included for consistency, so later changes aren't allowed to change behavior + "use the query string parameters to override URI credentials" in { + parse("jdbc:mysql://baduser:badpass@localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "successfully default the port to the mysql port" in { + parse("jdbc:mysql://baduser:badpass@localhost/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 3306, + host = "localhost" + ) + } + + "reject malformed ip addresses" in { + val connectionUri = "mysql://john:doe@128.567.54.90:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + + parseOrDie(connectionUri) must throwA[UnableToParseURLException] + } + + } + +} diff --git a/postgresql-async/README.md b/postgresql-async/README.md index dc4fde05..7702e907 100644 --- a/postgresql-async/README.md +++ b/postgresql-async/README.md @@ -1,4 +1,15 @@ -# postgresql-async - an async Netty based PostgreSQL driver written in Scala 2.10 + + +**Table of Contents** + +- [postgresql-async - an async Netty based PostgreSQL driver written in Scala 2.10 and 2.11](#postgresql-async---an-async-netty-based-postgresql-driver-written-in-scala-210) + - [What can it do now?](#what-can-it-do-now) + - [What is missing?](#what-is-missing) + - [Supported Scala/Java types and their destination types on PostgreSQL](#supported-scalajava-types-and-their-destination-types-on-postgresql) + + + +# postgresql-async - an async Netty based PostgreSQL driver written in Scala 2.10 and 2.11 The main goal of this project is to implement a performant and fully functional async PostgreSQL driver. This project has no interest in JDBC, it's supposed to be a clean room implementation for people interested in talking directly @@ -32,7 +43,7 @@ This driver contains Java code from the [JDBC PostgreSQL](http://jdbc.postgresql ## Supported Scala/Java types and their destination types on PostgreSQL -All types also support their array versions, but they are returned as `IndexedSeq` of the type and not +All types also support their array versions, but they are returned as `IndexedSeq` of the type and not pure `Array` types. PostgreSQL type | Scala/Java type @@ -53,7 +64,7 @@ date | LocalDate time | LocalTime bytea | Array[Byte] (PostgreSQL 9.0 and above only) -All other types are returned as String. +All other types are returned as String. Now from Scala/Java types to PostgreSQL types (when using prepared statements): @@ -70,6 +81,8 @@ BigInteger | numeric BigDecimal | numeric String | varchar Array[Byte] | bytea (PostgreSQL 9.0 and above only) +java.nio.ByteBuffer | bytea (PostgreSQL 9.0 and above only) +io.netty.buffer.ByteBuf | bytea (PostgreSQL 9.0 and above only) java.util.Date | timestamp_with_timezone java.sql.Timestamp | timestamp_with_timezone java.sql.Date | date @@ -79,6 +92,6 @@ LocalDateTime | timestamp DateTime | timestamp_with_timezone LocalTime | time -Array types are encoded with the kind of object they hold and not the array type itself. Java `Collection` and -Scala `Traversable` objects are also assumed to be arrays of the types they hold and will be sent to PostgreSQL +Array types are encoded with the kind of object they hold and not the array type itself. Java `Collection` and +Scala `Traversable` objects are also assumed to be arrays of the types they hold and will be sent to PostgreSQL like that. diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala index 70902513..470700c4 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala @@ -17,22 +17,26 @@ package com.github.mauricio.async.db.postgresql import com.github.mauricio.async.db.QueryResult -import com.github.mauricio.async.db.column.{ColumnEncoderRegistry, ColumnDecoderRegistry} -import com.github.mauricio.async.db.exceptions.{InsufficientParametersException, ConnectionStillRunningQueryException} +import com.github.mauricio.async.db.column.{ColumnDecoderRegistry, ColumnEncoderRegistry} +import com.github.mauricio.async.db.exceptions.{ConnectionStillRunningQueryException, InsufficientParametersException} import com.github.mauricio.async.db.general.MutableResultSet +import com.github.mauricio.async.db.pool.TimeoutScheduler import com.github.mauricio.async.db.postgresql.codec.{PostgreSQLConnectionDelegate, PostgreSQLConnectionHandler} import com.github.mauricio.async.db.postgresql.column.{PostgreSQLColumnDecoderRegistry, PostgreSQLColumnEncoderRegistry} import com.github.mauricio.async.db.postgresql.exceptions._ import com.github.mauricio.async.db.util._ import com.github.mauricio.async.db.{Configuration, Connection} -import java.util.concurrent.atomic.{AtomicLong,AtomicInteger,AtomicReference} +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong, AtomicReference} + import messages.backend._ import messages.frontend._ -import scala.Some + import scala.concurrent._ import io.netty.channel.EventLoopGroup import java.util.concurrent.CopyOnWriteArrayList +import com.github.mauricio.async.db.postgresql.util.URLParser + object PostgreSQLConnection { final val Counter = new AtomicLong() final val ServerVersionKey = "server_version" @@ -41,14 +45,15 @@ object PostgreSQLConnection { class PostgreSQLConnection ( - configuration: Configuration = Configuration.Default, + configuration: Configuration = URLParser.DEFAULT, encoderRegistry: ColumnEncoderRegistry = PostgreSQLColumnEncoderRegistry.Instance, decoderRegistry: ColumnDecoderRegistry = PostgreSQLColumnDecoderRegistry.Instance, group : EventLoopGroup = NettyUtils.DefaultEventLoopGroup, - executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext + implicit val executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext ) extends PostgreSQLConnectionDelegate - with Connection { + with Connection + with TimeoutScheduler { import PostgreSQLConnection._ @@ -63,7 +68,6 @@ class PostgreSQLConnection private final val currentCount = Counter.incrementAndGet() private final val preparedStatementsCounter = new AtomicInteger() - private final implicit val internalExecutionContext = executionContext private val parameterStatus = new scala.collection.mutable.HashMap[String, String]() private val parsedStatements = new scala.collection.mutable.HashMap[String, PreparedStatementHolder]() @@ -80,6 +84,7 @@ class PostgreSQLConnection private var queryResult: Option[QueryResult] = None + override def eventLoopGroup : EventLoopGroup = group def isReadyForQuery: Boolean = this.queryPromise.isEmpty def connect: Future[Connection] = { @@ -91,6 +96,7 @@ class PostgreSQLConnection } override def disconnect: Future[Connection] = this.connectionHandler.disconnect.map( c => this ) + override def onTimeout = disconnect override def isConnected: Boolean = this.connectionHandler.isConnected @@ -103,7 +109,7 @@ class PostgreSQLConnection this.setQueryPromise(promise) write(new QueryMessage(query)) - + addTimeout(promise,configuration.queryTimeout) promise.future } @@ -130,7 +136,7 @@ class PostgreSQLConnection holder.prepared = true new PreparedStatementOpeningMessage(holder.statementId, holder.realQuery, values, this.encoderRegistry) }) - + addTimeout(promise,configuration.queryTimeout) promise.future } @@ -150,6 +156,7 @@ class PostgreSQLConnection this.disconnect } + this.currentPreparedStatement.map(p => this.parsedStatements.remove(p.query)) this.currentPreparedStatement = None this.failQueryPromise(e) } @@ -187,11 +194,16 @@ class PostgreSQLConnection var x = 0 while ( x < m.values.size ) { - items(x) = if ( m.values(x) == null ) { + val buf = m.values(x) + items(x) = if ( buf == null ) { null } else { - val columnType = this.currentQuery.get.columnTypes(x) - this.decoderRegistry.decode(columnType, m.values(x), configuration.charset) + try { + val columnType = this.currentQuery.get.columnTypes(x) + this.decoderRegistry.decode(columnType, buf, configuration.charset) + } finally { + buf.release() + } } x += 1 } @@ -302,6 +314,7 @@ class PostgreSQLConnection private def succeedQueryPromise(result: QueryResult) { this.queryResult = None + this.currentQuery = None this.clearQueryPromise.foreach { _.success(result) } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PreparedStatementHolder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PreparedStatementHolder.scala index 41f6ac40..f8b78bcf 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PreparedStatementHolder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PreparedStatementHolder.scala @@ -18,7 +18,7 @@ package com.github.mauricio.async.db.postgresql import com.github.mauricio.async.db.postgresql.messages.backend.PostgreSQLColumnData -class PreparedStatementHolder( query : String, val statementId : Int ) { +class PreparedStatementHolder(val query : String, val statementId : Int ) { val (realQuery, paramsCount) = { val result = new StringBuilder(query.length+16) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala index 8a3d9fa5..5f210f72 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala @@ -17,7 +17,7 @@ package com.github.mauricio.async.db.postgresql.codec import com.github.mauricio.async.db.postgresql.exceptions.{MessageTooLongException} -import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage +import com.github.mauricio.async.db.postgresql.messages.backend.{ServerMessage, SSLResponseMessage} import com.github.mauricio.async.db.postgresql.parsers.{AuthenticationStartupParser, MessageParsersRegistry} import com.github.mauricio.async.db.util.{BufferDumper, Log} import java.nio.charset.Charset @@ -31,15 +31,21 @@ object MessageDecoder { val DefaultMaximumSize = 16777216 } -class MessageDecoder(charset: Charset, maximumMessageSize : Int = MessageDecoder.DefaultMaximumSize) extends ByteToMessageDecoder { +class MessageDecoder(sslEnabled: Boolean, charset: Charset, maximumMessageSize : Int = MessageDecoder.DefaultMaximumSize) extends ByteToMessageDecoder { import MessageDecoder.log private val parser = new MessageParsersRegistry(charset) + private var sslChecked = false + override def decode(ctx: ChannelHandlerContext, b: ByteBuf, out: java.util.List[Object]): Unit = { - if (b.readableBytes() >= 5) { + if (sslEnabled & !sslChecked) { + val code = b.readByte() + sslChecked = true + out.add(new SSLResponseMessage(code == 'S')) + } else if (b.readableBytes() >= 5) { b.markReaderIndex() diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala index 5cf5d480..30195a11 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala @@ -44,12 +44,13 @@ class MessageEncoder(charset: Charset, encoderRegistry: ColumnEncoderRegistry) e override def encode(ctx: ChannelHandlerContext, msg: AnyRef, out: java.util.List[Object]) = { val buffer = msg match { + case SSLRequestMessage => SSLMessageEncoder.encode() + case message: StartupMessage => startupEncoder.encode(message) case message: ClientMessage => { val encoder = (message.kind: @switch) match { case ServerMessage.Close => CloseMessageEncoder case ServerMessage.Execute => this.executeEncoder case ServerMessage.Parse => this.openEncoder - case ServerMessage.Startup => this.startupEncoder case ServerMessage.Query => this.queryEncoder case ServerMessage.PasswordMessage => this.credentialEncoder case _ => throw new EncoderNotAvailableException(message) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala index b53821ee..733cc5d1 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.postgresql.codec import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.SSLConfiguration.Mode import com.github.mauricio.async.db.column.{ColumnDecoderRegistry, ColumnEncoderRegistry} import com.github.mauricio.async.db.postgresql.exceptions._ import com.github.mauricio.async.db.postgresql.messages.backend._ @@ -38,6 +39,12 @@ import com.github.mauricio.async.db.postgresql.messages.backend.RowDescriptionMe import com.github.mauricio.async.db.postgresql.messages.backend.ParameterStatusMessage import io.netty.channel.socket.nio.NioSocketChannel import io.netty.handler.codec.CodecException +import io.netty.handler.ssl.{SslContextBuilder, SslHandler} +import io.netty.handler.ssl.util.InsecureTrustManagerFactory +import io.netty.util.concurrent.FutureListener +import javax.net.ssl.{SSLParameters, TrustManagerFactory} +import java.security.KeyStore +import java.io.FileInputStream object PostgreSQLConnectionHandler { final val log = Log.get[PostgreSQLConnectionHandler] @@ -79,7 +86,7 @@ class PostgreSQLConnectionHandler override def initChannel(ch: channel.Channel): Unit = { ch.pipeline.addLast( - new MessageDecoder(configuration.charset, configuration.maximumMessageSize), + new MessageDecoder(configuration.ssl.mode != Mode.Disable, configuration.charset, configuration.maximumMessageSize), new MessageEncoder(configuration.charset, encoderRegistry), PostgreSQLConnectionHandler.this) } @@ -120,13 +127,61 @@ class PostgreSQLConnectionHandler } override def channelActive(ctx: ChannelHandlerContext): Unit = { - ctx.writeAndFlush(new StartupMessage(this.properties)) + if (configuration.ssl.mode == Mode.Disable) + ctx.writeAndFlush(new StartupMessage(this.properties)) + else + ctx.writeAndFlush(SSLRequestMessage) } override def channelRead0(ctx: ChannelHandlerContext, msg: Object): Unit = { msg match { + case SSLResponseMessage(supported) => + if (supported) { + val ctxBuilder = SslContextBuilder.forClient() + if (configuration.ssl.mode >= Mode.VerifyCA) { + configuration.ssl.rootCert.fold { + val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()) + val ks = KeyStore.getInstance(KeyStore.getDefaultType()) + val cacerts = new FileInputStream(System.getProperty("java.home") + "/lib/security/cacerts") + try { + ks.load(cacerts, "changeit".toCharArray) + } finally { + cacerts.close() + } + tmf.init(ks) + ctxBuilder.trustManager(tmf) + } { path => + ctxBuilder.trustManager(path) + } + } else { + ctxBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE) + } + val sslContext = ctxBuilder.build() + val sslEngine = sslContext.newEngine(ctx.alloc(), configuration.host, configuration.port) + if (configuration.ssl.mode >= Mode.VerifyFull) { + val sslParams = sslEngine.getSSLParameters() + sslParams.setEndpointIdentificationAlgorithm("HTTPS") + sslEngine.setSSLParameters(sslParams) + } + val handler = new SslHandler(sslEngine) + ctx.pipeline().addFirst(handler) + handler.handshakeFuture.addListener(new FutureListener[channel.Channel]() { + def operationComplete(future: io.netty.util.concurrent.Future[channel.Channel]) { + if (future.isSuccess()) { + ctx.writeAndFlush(new StartupMessage(properties)) + } else { + connectionDelegate.onError(future.cause()) + } + } + }) + } else if (configuration.ssl.mode < Mode.Require) { + ctx.writeAndFlush(new StartupMessage(properties)) + } else { + connectionDelegate.onError(new IllegalArgumentException("SSL is not supported on server")) + } + case m: ServerMessage => { (m.kind : @switch) match { diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala index d69eeba4..b62e9629 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala @@ -19,7 +19,7 @@ package com.github.mauricio.async.db.postgresql.column import com.github.mauricio.async.db.column.ColumnDecoder import com.github.mauricio.async.db.postgresql.util.{ArrayStreamingParserDelegate, ArrayStreamingParser} import scala.collection.IndexedSeq -import scala.collection.mutable.{ArrayBuffer, Stack} +import scala.collection.mutable.ArrayBuffer import com.github.mauricio.async.db.general.ColumnData import io.netty.buffer.{Unpooled, ByteBuf} import java.nio.charset.Charset @@ -32,12 +32,13 @@ class ArrayDecoder(private val decoder: ColumnDecoder) extends ColumnDecoder { buffer.readBytes(bytes) val value = new String(bytes, charset) - val stack = new Stack[ArrayBuffer[Any]]() + var stack = List.empty[ArrayBuffer[Any]] var current: ArrayBuffer[Any] = null var result: IndexedSeq[Any] = null val delegate = new ArrayStreamingParserDelegate { override def arrayEnded { - result = stack.pop() + result = stack.head + stack = stack.tail } override def elementFound(element: String) { @@ -63,7 +64,7 @@ class ArrayDecoder(private val decoder: ColumnDecoder) extends ColumnDecoder { case None => {} } - stack.push(current) + stack ::= current } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayEncoderDecoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayEncoderDecoder.scala index bfaed46e..2ae1e7a4 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayEncoderDecoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayEncoderDecoder.scala @@ -18,7 +18,10 @@ package com.github.mauricio.async.db.postgresql.column import com.github.mauricio.async.db.column.ColumnEncoderDecoder import com.github.mauricio.async.db.postgresql.exceptions.ByteArrayFormatNotSupportedException -import com.github.mauricio.async.db.util.{Log, HexCodec} +import com.github.mauricio.async.db.util.{ Log, HexCodec } +import java.nio.ByteBuffer + +import io.netty.buffer.ByteBuf object ByteArrayEncoderDecoder extends ColumnEncoderDecoder { @@ -31,13 +34,72 @@ object ByteArrayEncoderDecoder extends ColumnEncoderDecoder { if (value.startsWith(HexStart)) { HexCodec.decode(value, 2) } else { - throw new ByteArrayFormatNotSupportedException() + // Default encoding is 'escape' + + // Size the buffer to the length of the string, the data can't be bigger + val buffer = ByteBuffer.allocate(value.length) + + val ci = value.iterator + + while (ci.hasNext) { + ci.next match { + case '\\' ⇒ getCharOrDie(ci) match { + case '\\' ⇒ buffer.put('\\'.toByte) + case firstDigit ⇒ + val secondDigit = getCharOrDie(ci) + val thirdDigit = getCharOrDie(ci) + // Must always be in triplets + buffer.put( + Integer.decode( + new String(Array('0', firstDigit, secondDigit, thirdDigit))).toByte) + } + case c ⇒ buffer.put(c.toByte) + } + } + + buffer.flip + val finalArray = new Array[Byte](buffer.remaining()) + buffer.get(finalArray) + + finalArray } } + /** + * This is required since {@link Iterator#next} when {@linke Iterator#hasNext} is false is undefined. + * @param ci the iterator source of the data + * @return the next character + * @throws IllegalArgumentException if there is no next character + */ + private [this] def getCharOrDie(ci: Iterator[Char]): Char = { + if (ci.hasNext) { + ci.next() + } else { + throw new IllegalArgumentException("Expected escape sequence character, found nothing") + } + } + override def encode(value: Any): String = { - HexCodec.encode(value.asInstanceOf[Array[Byte]], HexStartChars) + val array = value match { + case byteArray: Array[Byte] => byteArray + + case byteBuffer: ByteBuffer if byteBuffer.hasArray => byteBuffer.array() + + case byteBuffer: ByteBuffer => + val arr = new Array[Byte](byteBuffer.remaining()) + byteBuffer.get(arr) + arr + + case byteBuf: ByteBuf if byteBuf.hasArray => byteBuf.array() + + case byteBuf: ByteBuf => + val arr = new Array[Byte](byteBuf.readableBytes()) + byteBuf.getBytes(0, arr) + arr + } + + HexCodec.encode(array, HexStartChars) } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala index 7f15b0f6..93fef482 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala @@ -63,9 +63,12 @@ object ColumnTypes { final val MoneyArray = 791 final val NameArray = 1003 + final val UUID = 2950 final val UUIDArray = 2951 final val XMLArray = 143 + final val Inet = 869 + final val InetArray = 1041 } /* diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala index 734c0902..5b4a47a7 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala @@ -45,6 +45,8 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e private final val timeArrayDecoder = new ArrayDecoder(TimeEncoderDecoder.Instance) private final val timeWithTimestampArrayDecoder = new ArrayDecoder(TimeWithTimezoneEncoderDecoder) private final val intervalArrayDecoder = new ArrayDecoder(PostgreSQLIntervalEncoderDecoder) + private final val uuidArrayDecoder = new ArrayDecoder(UUIDEncoderDecoder) + private final val inetAddressArrayDecoder = new ArrayDecoder(InetAddressEncoderDecoder) override def decode(kind: ColumnData, value: ByteBuf, charset: Charset): Any = { decoderFor(kind.dataType).decode(kind, value, charset) @@ -108,10 +110,14 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e case MoneyArray => this.stringArrayDecoder case NameArray => this.stringArrayDecoder - case UUIDArray => this.stringArrayDecoder + case UUID => UUIDEncoderDecoder + case UUIDArray => this.uuidArrayDecoder case XMLArray => this.stringArrayDecoder case ByteA => ByteArrayEncoderDecoder + case Inet => InetAddressEncoderDecoder + case InetArray => this.inetAddressArrayDecoder + case _ => StringEncoderDecoder } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala index b5f32735..c9f95f43 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala @@ -16,9 +16,12 @@ package com.github.mauricio.async.db.postgresql.column +import java.nio.ByteBuffer + import com.github.mauricio.async.db.column._ +import io.netty.buffer.ByteBuf import org.joda.time._ -import scala.Some + import scala.collection.JavaConversions._ object PostgreSQLColumnEncoderRegistry { @@ -28,27 +31,31 @@ object PostgreSQLColumnEncoderRegistry { class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { private val classesSequence_ : List[(Class[_], (ColumnEncoder, Int))] = List( - classOf[Int] -> (IntegerEncoderDecoder -> ColumnTypes.Integer), - classOf[java.lang.Integer] -> (IntegerEncoderDecoder -> ColumnTypes.Integer), + classOf[Int] -> (IntegerEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Integer] -> (IntegerEncoderDecoder -> ColumnTypes.Numeric), - classOf[java.lang.Short] -> (ShortEncoderDecoder -> ColumnTypes.Smallint), - classOf[Short] -> (ShortEncoderDecoder -> ColumnTypes.Smallint), + classOf[java.lang.Short] -> (ShortEncoderDecoder -> ColumnTypes.Numeric), + classOf[Short] -> (ShortEncoderDecoder -> ColumnTypes.Numeric), - classOf[Long] -> (LongEncoderDecoder -> ColumnTypes.Bigserial), - classOf[java.lang.Long] -> (LongEncoderDecoder -> ColumnTypes.Bigserial), + classOf[Long] -> (LongEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Long] -> (LongEncoderDecoder -> ColumnTypes.Numeric), classOf[String] -> (StringEncoderDecoder -> ColumnTypes.Varchar), classOf[java.lang.String] -> (StringEncoderDecoder -> ColumnTypes.Varchar), - classOf[Float] -> (FloatEncoderDecoder -> ColumnTypes.Real), - classOf[java.lang.Float] -> (FloatEncoderDecoder -> ColumnTypes.Real), + classOf[Float] -> (FloatEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Float] -> (FloatEncoderDecoder -> ColumnTypes.Numeric), - classOf[Double] -> (DoubleEncoderDecoder -> ColumnTypes.Double), - classOf[java.lang.Double] -> (DoubleEncoderDecoder -> ColumnTypes.Double), + classOf[Double] -> (DoubleEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Double] -> (DoubleEncoderDecoder -> ColumnTypes.Numeric), classOf[BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), classOf[java.math.BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.net.InetAddress] -> (InetAddressEncoderDecoder -> ColumnTypes.Inet), + + classOf[java.util.UUID] -> (UUIDEncoderDecoder -> ColumnTypes.UUID), + classOf[LocalDate] -> ( DateEncoderDecoder -> ColumnTypes.Date ), classOf[LocalDateTime] -> (TimestampEncoderDecoder.Instance -> ColumnTypes.Timestamp), classOf[DateTime] -> (TimestampWithTimezoneEncoderDecoder -> ColumnTypes.TimestampWithTimezone), @@ -64,7 +71,9 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { classOf[java.sql.Timestamp] -> (TimestampWithTimezoneEncoderDecoder -> ColumnTypes.TimestampWithTimezone), classOf[java.util.Calendar] -> (TimestampWithTimezoneEncoderDecoder -> ColumnTypes.TimestampWithTimezone), classOf[java.util.GregorianCalendar] -> (TimestampWithTimezoneEncoderDecoder -> ColumnTypes.TimestampWithTimezone), - classOf[Array[Byte]] -> ( ByteArrayEncoderDecoder -> ColumnTypes.ByteA ) + classOf[Array[Byte]] -> ( ByteArrayEncoderDecoder -> ColumnTypes.ByteA ), + classOf[ByteBuffer] -> ( ByteArrayEncoderDecoder -> ColumnTypes.ByteA ), + classOf[ByteBuf] -> ( ByteArrayEncoderDecoder -> ColumnTypes.ByteA ) ) private final val classesSequence = (classOf[LocalTime] -> (TimeEncoderDecoder.Instance -> ColumnTypes.Time)) :: @@ -97,17 +106,12 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { if (encoder.isDefined) { encoder.get._1.encode(value) } else { - - val view: Option[Traversable[Any]] = value match { - case i: java.lang.Iterable[_] => Some(i.toIterable) - case i: Traversable[_] => Some(i) - case i: Array[_] => Some(i.toIterable) - case _ => None - } - - view match { - case Some(collection) => encodeArray(collection) - case None => { + value match { + case i: java.lang.Iterable[_] => encodeArray(i.toIterable) + case i: Traversable[_] => encodeArray(i) + case i: Array[_] => encodeArray(i.toIterable) + case p: Product => encodeComposite(p) + case _ => { this.classesSequence.find(entry => entry._1.isAssignableFrom(value.getClass)) match { case Some(parent) => parent._2._1.encode(value) case None => value.toString @@ -119,30 +123,34 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { } - private def encodeArray(collection: Traversable[_]): String = { - val builder = new StringBuilder() - - builder.append('{') - - val result = collection.map { + private def encodeComposite(p: Product): String = { + p.productIterator.map { item => - - if (item == null) { + if (item == null || item == None) { "NULL" } else { if (this.shouldQuote(item)) { - "\"" + this.encode(item).replaceAllLiterally("\"", """\"""") + "\"" + "\"" + this.encode(item).replaceAllLiterally("\\", """\\""").replaceAllLiterally("\"", """\"""") + "\"" } else { this.encode(item) } } + }.mkString("(", ",", ")") + } - }.mkString(",") - - builder.append(result) - builder.append('}') - - builder.toString() + private def encodeArray(collection: Traversable[_]): String = { + collection.map { + item => + if (item == null || item == None) { + "NULL" + } else { + if (this.shouldQuote(item)) { + "\"" + this.encode(item).replaceAllLiterally("\\", """\\""").replaceAllLiterally("\"", """\"""") + "\"" + } else { + this.encode(item) + } + } + }.mkString("{", ",", "}") } private def shouldQuote(value: Any): Boolean = { @@ -177,4 +185,5 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { } } } + } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoder.scala index 375b5043..f1c605c2 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoder.scala @@ -16,11 +16,10 @@ package com.github.mauricio.async.db.postgresql.encoders +import java.nio.charset.Charset + import com.github.mauricio.async.db.column.ColumnEncoderRegistry -import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage import com.github.mauricio.async.db.postgresql.messages.frontend.{ClientMessage, PreparedStatementExecuteMessage} -import com.github.mauricio.async.db.util.ByteBufferUtils -import java.nio.charset.Charset import io.netty.buffer.ByteBuf class ExecutePreparedStatementEncoder( diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/PreparedStatementEncoderHelper.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/PreparedStatementEncoderHelper.scala index 2ab3df15..4f0716b9 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/PreparedStatementEncoderHelper.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/PreparedStatementEncoderHelper.scala @@ -33,13 +33,17 @@ trait PreparedStatementEncoderHelper { def writeExecutePortal( statementIdBytes: Array[Byte], - query : String, + query: String, values: Seq[Any], encoder: ColumnEncoderRegistry, charset: Charset, writeDescribe: Boolean = false ): ByteBuf = { + if (log.isDebugEnabled) { + log.debug(s"Preparing execute portal to statement ($query) - values (${values.mkString(", ")}) - ${charset}") + } + val bindBuffer = Unpooled.buffer(1024) bindBuffer.writeByte(ServerMessage.Bind) @@ -54,14 +58,14 @@ trait PreparedStatementEncoderHelper { bindBuffer.writeShort(values.length) - val decodedValues = if ( log.isDebugEnabled ) { + val decodedValues = if (log.isDebugEnabled) { new ArrayBuffer[String](values.size) } else { null } for (value <- values) { - if (value == null || value == None) { + if (isNull(value)) { bindBuffer.writeInt(-1) if (log.isDebugEnabled) { @@ -70,25 +74,30 @@ trait PreparedStatementEncoderHelper { } else { val encodedValue = encoder.encode(value) - if ( log.isDebugEnabled ) { + if (log.isDebugEnabled) { decodedValues += encodedValue } - val content = encodedValue.getBytes(charset) - bindBuffer.writeInt(content.length) - bindBuffer.writeBytes( content ) + if (isNull(encodedValue)) { + bindBuffer.writeInt(-1) + } else { + val content = encodedValue.getBytes(charset) + bindBuffer.writeInt(content.length) + bindBuffer.writeBytes(content) + } + } } if (log.isDebugEnabled) { - log.debug(s"Executing query - statement id (${statementIdBytes.mkString("-")}) - statement ($query) - encoded values (${decodedValues.mkString(", ")}) - original values (${values.mkString(", ")})") + log.debug(s"Executing portal - statement id (${statementIdBytes.mkString("-")}) - statement ($query) - encoded values (${decodedValues.mkString(", ")}) - original values (${values.mkString(", ")})") } bindBuffer.writeShort(0) ByteBufferUtils.writeLength(bindBuffer) - if ( writeDescribe ) { + if (writeDescribe) { val describeLength = 1 + 4 + 1 + statementIdBytes.length + 1 val describeBuffer = bindBuffer describeBuffer.writeByte(ServerMessage.Describe) @@ -122,4 +131,6 @@ trait PreparedStatementEncoderHelper { } + def isNull(value: Any): Boolean = value == null || value == None + } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala new file mode 100644 index 00000000..aeec7435 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala @@ -0,0 +1,16 @@ +package com.github.mauricio.async.db.postgresql.encoders + +import io.netty.buffer.ByteBuf +import io.netty.buffer.Unpooled + +object SSLMessageEncoder { + + def encode(): ByteBuf = { + val buffer = Unpooled.buffer() + buffer.writeInt(8) + buffer.writeShort(1234) + buffer.writeShort(5679) + buffer + } + +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala index b8c97843..206fd2d3 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala @@ -21,13 +21,11 @@ import com.github.mauricio.async.db.util.ByteBufferUtils import java.nio.charset.Charset import io.netty.buffer.{Unpooled, ByteBuf} -class StartupMessageEncoder(charset: Charset) extends Encoder { +class StartupMessageEncoder(charset: Charset) { //private val log = Log.getByName("StartupMessageEncoder") - override def encode(message: ClientMessage): ByteBuf = { - - val startup = message.asInstanceOf[StartupMessage] + def encode(startup: StartupMessage): ByteBuf = { val buffer = Unpooled.buffer() buffer.writeInt(0) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala new file mode 100644 index 00000000..905ab688 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala @@ -0,0 +1,3 @@ +package com.github.mauricio.async.db.postgresql.messages.backend + +case class SSLResponseMessage(supported: Boolean) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala index c413ef4e..1fa5b9a2 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala @@ -43,7 +43,6 @@ object ServerMessage { final val Query = 'Q' final val RowDescription = 'T' final val ReadyForQuery = 'Z' - final val Startup = '0' final val Sync = 'S' } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala new file mode 100644 index 00000000..228c5e65 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala @@ -0,0 +1,3 @@ +package com.github.mauricio.async.db.postgresql.messages.frontend + +trait InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala new file mode 100644 index 00000000..c3bf84ff --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala @@ -0,0 +1,5 @@ +package com.github.mauricio.async.db.postgresql.messages.frontend + +import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage + +object SSLRequestMessage extends InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala index e4bb34c4..bb53390f 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala @@ -16,6 +16,4 @@ package com.github.mauricio.async.db.postgresql.messages.frontend -import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage - -class StartupMessage(val parameters: List[(String, Any)]) extends ClientMessage(ServerMessage.Startup) \ No newline at end of file +class StartupMessage(val parameters: List[(String, Any)]) extends InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala index de06a671..ae3c5255 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.postgresql.pool import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.exceptions.ConnectionTimeoutedException import com.github.mauricio.async.db.pool.ObjectFactory import com.github.mauricio.async.db.postgresql.PostgreSQLConnection import com.github.mauricio.async.db.util.Log @@ -50,7 +51,7 @@ class PostgreSQLConnectionFactory( def create: PostgreSQLConnection = { val connection = new PostgreSQLConnection(configuration, group = group, executionContext = executionContext) - Await.result(connection.connect, 5.seconds) + Await.result(connection.connect, configuration.connectTimeout) connection } @@ -69,6 +70,9 @@ class PostgreSQLConnectionFactory( def validate( item : PostgreSQLConnection ) : Try[PostgreSQLConnection] = { Try { + if ( item.isTimeouted ) { + throw new ConnectionTimeoutedException(item) + } if ( !item.isConnected || item.hasRecentError ) { throw new ClosedChannelException() } @@ -87,7 +91,7 @@ class PostgreSQLConnectionFactory( override def test(item: PostgreSQLConnection): Try[PostgreSQLConnection] = { val result : Try[PostgreSQLConnection] = Try({ - Await.result( item.sendQuery("SELECT 0"), 5.seconds ) + Await.result( item.sendQuery("SELECT 0"), configuration.testTimeout ) item }) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala deleted file mode 100644 index ce5fa180..00000000 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala +++ /dev/null @@ -1,56 +0,0 @@ -/** - * - */ -package com.github.mauricio.async.db.postgresql.util - -import org.slf4j.LoggerFactory - -/** - * @author gciuloaica - * - */ -object ParserURL { - - private val logger = LoggerFactory.getLogger(ParserURL.getClass()) - - val PGPORT = "port" - val PGDBNAME = "database" - val PGHOST = "host" - val PGUSERNAME = "username" - val PGPASSWORD = "password" - - val DEFAULT_PORT = "5432" - - private val pgurl1 = """(jdbc:postgresql):(?://([^/:]*|\[.+\])(?::(\d+))?)?(?:/([^/?]*))?(?:\?user=(.*)&password=(.*))?""".r - private val pgurl2 = """(postgres|postgresql)://(.*):(.*)@(.*):(\d+)/(.*)""".r - - def parse(connectionURL: String): Map[String, String] = { - val properties: Map[String, String] = Map() - - connectionURL match { - case pgurl1(protocol, server, port, dbname, username, password) => { - var result = properties - if (server != null) result += (PGHOST -> unwrapIpv6address(server)) - if (dbname != null && dbname.nonEmpty) result += (PGDBNAME -> dbname) - if(port != null) result += (PGPORT -> port) - if(username != null) result = (result + (PGUSERNAME -> username) + (PGPASSWORD -> password)) - result - } - case pgurl2(protocol, username, password, server, port, dbname) => { - properties + (PGHOST -> unwrapIpv6address(server)) + (PGPORT -> port) + (PGDBNAME -> dbname) + (PGUSERNAME -> username) + (PGPASSWORD -> password) - } - case _ => { - logger.warn(s"Connection url '$connectionURL' could not be parsed.") - properties - } - } - - } - - private def unwrapIpv6address(server: String): String = { - if (server.startsWith("[")) { - server.substring(1, server.length() - 1) - } else server - } - -} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala index f39f24ac..fcb9b3cf 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala @@ -1,48 +1,72 @@ -/* - * Copyright 2013 Maurício Linhares +/** * - * Maurício Linhares licenses this file to you under the Apache License, - * version 2.0 (the "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at: - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. */ - package com.github.mauricio.async.db.postgresql.util -import com.github.mauricio.async.db.Configuration +import java.net.URI import java.nio.charset.Charset -object URLParser { +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import com.github.mauricio.async.db.util.AbstractURIParser - private val Username = "username" - private val Password = "password" +/** + * The PostgreSQL URL parser. + */ +object URLParser extends AbstractURIParser { + import AbstractURIParser._ - import Configuration.Default + // Alias these for anyone still making use of them + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.PORT", since = "0.2.20") + val PGPORT = PORT - def parse(url: String, - charset: Charset = Default.charset - ): Configuration = { + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.DBNAME", since = "0.2.20") + val PGDBNAME = DBNAME - val properties = ParserURL.parse(url) + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.HOST", since = "0.2.20") + val PGHOST = HOST - val port = properties.get(ParserURL.PGPORT).getOrElse(ParserURL.DEFAULT_PORT).toInt + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.USERNAME", since = "0.2.20") + val PGUSERNAME = USERNAME - new Configuration( - username = properties.get(Username).getOrElse(Default.username), - password = properties.get(Password), - database = properties.get(ParserURL.PGDBNAME), - host = properties.getOrElse(ParserURL.PGHOST, Default.host), - port = port, - charset = charset - ) + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.PASSWORD", since = "0.2.20") + val PGPASSWORD = PASSWORD + + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser.DEFAULT.port", since = "0.2.20") + val DEFAULT_PORT = "5432" + /** + * The default configuration for PostgreSQL. + */ + override val DEFAULT = Configuration( + username = "postgres", + host = "localhost", + port = 5432, + password = None, + database = None, + ssl = SSLConfiguration() + ) + + override protected val SCHEME = "^postgres(?:ql)?$".r + + private val simplePGDB = "^postgresql:(\\w+)$".r + + override protected def handleJDBC(uri: URI): Map[String, String] = uri.getSchemeSpecificPart match { + case simplePGDB(db) => Map(DBNAME -> db) + case x => parse(new URI(x)) } + /** + * Assembles a configuration out of the provided property map. This is the generic form, subclasses may override to + * handle additional properties. + * + * @param properties the extracted properties from the URL. + * @param charset the charset passed in to parse or parseOrDie. + * @return + */ + override protected def assembleConfiguration(properties: Map[String, String], charset: Charset): Configuration = { + // Add SSL Configuration + super.assembleConfiguration(properties, charset).copy( + ssl = SSLConfiguration(properties) + ) + } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala new file mode 100644 index 00000000..5d321170 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2016 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.postgresql + +/** + * Contains package level aliases and type renames. + */ +package object util { + + /** + * Alias to help compatibility. + */ + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser", since = "0.2.20") + val ParserURL = URLParser + +} diff --git a/postgresql-async/src/test/resources/logback.xml b/postgresql-async/src/test/resources/logback.xml index 30bf6dbe..3ddb1518 100644 --- a/postgresql-async/src/test/resources/logback.xml +++ b/postgresql-async/src/test/resources/logback.xml @@ -13,8 +13,9 @@ - + + \ No newline at end of file diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala index 7396aeb3..5391588c 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala @@ -16,31 +16,45 @@ package com.github.mauricio.async.db.postgresql -import com.github.mauricio.async.db.column.TimestampWithTimezoneEncoderDecoder +import com.github.mauricio.async.db.column.{TimestampWithTimezoneEncoderDecoder, InetAddressEncoderDecoder} import org.specs2.mutable.Specification +import java.net.InetAddress class ArrayTypesSpec extends Specification with DatabaseTestHelper { - - val simpleCreate = """create temp table type_test_table ( - bigserial_column bigserial not null, - smallint_column integer[] not null, - text_column text[] not null, - timestamp_column timestamp with time zone[] not null, - constraint bigserial_column_pkey primary key (bigserial_column) - )""" + // `uniq` allows sbt to run the tests concurrently as there is no CREATE TEMP TYPE + def simpleCreate(uniq: String) = s"""DROP TYPE IF EXISTS dir_$uniq; + CREATE TYPE direction_$uniq AS ENUM ('in','out'); + DROP TYPE IF EXISTS endpoint_$uniq; + CREATE TYPE endpoint_$uniq AS (ip inet, port integer); + create temp table type_test_table_$uniq ( + bigserial_column bigserial not null, + smallint_column integer[] not null, + text_column text[] not null, + inet_column inet[] not null, + direction_column direction_$uniq[] not null, + endpoint_column endpoint_$uniq[] not null, + timestamp_column timestamp with time zone[] not null, + constraint bigserial_column_pkey primary key (bigserial_column) + )""" + def simpleDrop(uniq: String) = s"""drop table if exists type_test_table_$uniq; + drop type if exists endpoint_$uniq; + drop type if exists direction_$uniq""" val insert = - """insert into type_test_table - (smallint_column, text_column, timestamp_column) + """insert into type_test_table_cptat + (smallint_column, text_column, inet_column, direction_column, endpoint_column, timestamp_column) values ( '{1,2,3,4}', - '{"some,\"comma,separated,text","another line of text",NULL}', + '{"some,\"comma,separated,text","another line of text","fake\,backslash","real\\,backslash\\",NULL}', + '{"127.0.0.1","2002:15::1"}', + '{"in","out"}', + '{"(\"127.0.0.1\",80)","(\"2002:15::1\",443)"}', '{"2013-04-06 01:15:10.528-03","2013-04-06 01:15:08.528-03"}' )""" - val insertPreparedStatement = """insert into type_test_table - (smallint_column, text_column, timestamp_column) - values (?,?,?)""" + val insertPreparedStatement = """insert into type_test_table_csaups + (smallint_column, text_column, inet_column, direction_column, endpoint_column, timestamp_column) + values (?,?,?,?,?,?)""" "connection" should { @@ -48,41 +62,62 @@ class ArrayTypesSpec extends Specification with DatabaseTestHelper { withHandler { handler => - executeDdl(handler, simpleCreate) - executeDdl(handler, insert, 1) - val result = executeQuery(handler, "select * from type_test_table").rows.get - result(0)("smallint_column") === List(1,2,3,4) - result(0)("text_column") === List("some,\"comma,separated,text", "another line of text", null ) - result(0)("timestamp_column") === List( - TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), - TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") - ) + try { + executeDdl(handler, simpleCreate("cptat")) + executeDdl(handler, insert, 1) + val result = executeQuery(handler, "select * from type_test_table_cptat").rows.get + result(0)("smallint_column") === List(1,2,3,4) + result(0)("text_column") === List("some,\"comma,separated,text", "another line of text", "fake,backslash", "real\\,backslash\\", null ) + result(0)("timestamp_column") === List( + TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), + TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") + ) + } finally { + executeDdl(handler, simpleDrop("cptat")) + } } } "correctly send arrays using prepared statements" in { + case class Endpoint(ip: InetAddress, port: Int) val timestamps = List( TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") ) + val inets = List( + InetAddressEncoderDecoder.decode("127.0.0.1"), + InetAddressEncoderDecoder.decode("2002:15::1") + ) + val directions = List("in", "out") + val endpoints = List( + Endpoint(InetAddress.getByName("127.0.0.1"), 80), // case class + (InetAddress.getByName("2002:15::1"), 443) // tuple + ) val numbers = List(1,2,3,4) - val texts = List("some,\"comma,separated,text", "another line of text", null ) + val texts = List("some,\"comma,separated,text", "another line of text", "fake,backslash", "real\\,backslash\\", null ) withHandler { handler => - executeDdl(handler, simpleCreate) - executePreparedStatement( - handler, - this.insertPreparedStatement, - Array( numbers, texts, timestamps ) ) - - val result = executeQuery(handler, "select * from type_test_table").rows.get - - result(0)("smallint_column") === numbers - result(0)("text_column") === texts - result(0)("timestamp_column") === timestamps + try { + executeDdl(handler, simpleCreate("csaups")) + executePreparedStatement( + handler, + this.insertPreparedStatement, + Array( numbers, texts, inets, directions, endpoints, timestamps ) ) + + val result = executeQuery(handler, "select * from type_test_table_csaups").rows.get + + result(0)("smallint_column") === numbers + result(0)("text_column") === texts + result(0)("inet_column") === inets + result(0)("direction_column") === "{in,out}" // user type decoding not supported + result(0)("endpoint_column") === """{"(127.0.0.1,80)","(2002:15::1,443)"}""" // user type decoding not supported + result(0)("timestamp_column") === timestamps + } finally { + executeDdl(handler, simpleDrop("csaups")) + } } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala new file mode 100644 index 00000000..8c17f9af --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala @@ -0,0 +1,83 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification + +class BitSpec extends Specification with DatabaseTestHelper { + + "when processing bit columns" should { + + "result in binary data" in { + + withHandler { + handler => + val create = """CREATE TEMP TABLE binary_test + ( + id bigserial NOT NULL, + some_bit BYTEA NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeDdl(handler, create) + executePreparedStatement(handler, + "INSERT INTO binary_test (some_bit) VALUES (E'\\\\000'),(E'\\\\001')") + + val rows = executePreparedStatement(handler, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + } + + } + + "result in binary data in BIT(2) column" in { + + withHandler { + handler => + val create = """CREATE TEMP TABLE binary_test + ( + id bigserial NOT NULL, + some_bit BYTEA NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeDdl(handler, create) + executePreparedStatement(handler, + "INSERT INTO binary_test (some_bit) VALUES (E'\\\\000'),(E'\\\\001'),(E'\\\\002'),(E'\\\\003')") + + val rows = executePreparedStatement(handler, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + val bit2 = rows(2)("some_bit") + val bit3 = rows(3)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + bit2 === Array(2) + bit3 === Array(3) + } + + } + + } + +} \ No newline at end of file diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala index 40b35549..2659d372 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala @@ -18,10 +18,12 @@ package com.github.mauricio.async.db.postgresql import com.github.mauricio.async.db.util.Log import com.github.mauricio.async.db.{Connection, Configuration} +import java.io.File import java.util.concurrent.{TimeoutException, TimeUnit} -import scala.Some import scala.concurrent.duration._ import scala.concurrent.{Future, Await} +import com.github.mauricio.async.db.SSLConfiguration +import com.github.mauricio.async.db.SSLConfiguration.Mode object DatabaseTestHelper { val log = Log.get[DatabaseTestHelper] @@ -54,6 +56,16 @@ trait DatabaseTestHelper { withHandler(this.timeTestConfiguration, fn) } + def withSSLHandler[T](mode: SSLConfiguration.Mode.Value, host: String = "localhost", rootCert: Option[File] = Some(new File("script/server.crt")))(fn: (PostgreSQLConnection) => T): T = { + val config = new Configuration( + host = host, + port = databasePort, + username = "postgres", + database = databaseName, + ssl = SSLConfiguration(mode = mode, rootCert = rootCert)) + withHandler(config, fn) + } + def withHandler[T](configuration: Configuration, fn: (PostgreSQLConnection) => T): T = { val handler = new PostgreSQLConnection(configuration) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala index 14f0bed2..a033e3ee 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala @@ -27,7 +27,7 @@ import java.util class MessageDecoderSpec extends Specification { - val decoder = new MessageDecoder(CharsetUtil.UTF_8) + val decoder = new MessageDecoder(false, CharsetUtil.UTF_8) "message decoder" should { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala new file mode 100644 index 00000000..ad38a64e --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala @@ -0,0 +1,57 @@ +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification + +class NumericSpec extends Specification with DatabaseTestHelper { + + "when processing numeric columns" should { + + "support first update of num column with floating" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executePreparedStatement(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](1234, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + + id === 1 + } + + } + + "support first update of num column with integer" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executePreparedStatement(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](1234, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + + id === 1 + } + + } + + "support using first update with queries instead of prepared statements" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executeQuery(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executeQuery(handler, s"UPDATE numeric_test SET numcol = 1234 WHERE id = $id") + executeQuery(handler, s"UPDATE numeric_test SET numcol = 123.123 WHERE id = $id") + + id === 1 + } + + } + + } + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLColumnEncoderRegistrySpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLColumnEncoderRegistrySpec.scala index e30b7494..9e1b5e94 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLColumnEncoderRegistrySpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLColumnEncoderRegistrySpec.scala @@ -46,6 +46,16 @@ class PostgreSQLColumnEncoderRegistrySpec extends Specification { actual mustEqual expected } + "encodes Some(null) as null" in { + val actual = encoder.encode(Some(null)) + actual mustEqual null + } + + "encodes null as null" in { + val actual = encoder.encode(null) + actual mustEqual null + } + } } \ No newline at end of file diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala index 93244111..0e050477 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala @@ -14,20 +14,23 @@ * under the License. */ -package com.github.mauricio.postgresql +package com.github.mauricio.async.db.postgresql -import com.github.mauricio.async.db.column.{TimestampEncoderDecoder, TimeEncoderDecoder, DateEncoderDecoder} +import java.nio.ByteBuffer + +import com.github.mauricio.async.db.column.{DateEncoderDecoder, TimeEncoderDecoder, TimestampEncoderDecoder} import com.github.mauricio.async.db.exceptions.UnsupportedAuthenticationMethodException -import com.github.mauricio.async.db.postgresql.exceptions.{QueryMustNotBeNullOrEmptyException, GenericDatabaseException} +import com.github.mauricio.async.db.postgresql.exceptions.{GenericDatabaseException, QueryMustNotBeNullOrEmptyException} import com.github.mauricio.async.db.postgresql.messages.backend.InformationMessage -import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import com.github.mauricio.async.db.util.Log -import com.github.mauricio.async.db.{Configuration, QueryResult, Connection} -import concurrent.{Future, Await} +import com.github.mauricio.async.db.{Configuration, Connection, QueryResult} +import io.netty.buffer.Unpooled +import org.joda.time.LocalDateTime import org.specs2.mutable.Specification -import scala.concurrent.ExecutionContext.Implicits.global + import scala.concurrent.duration._ -import org.joda.time.LocalDateTime +import scala.concurrent.{Await, Future} +import scala.concurrent.ExecutionContext.Implicits.global object PostgreSQLConnectionSpec { val log = Log.get[PostgreSQLConnectionSpec] @@ -152,7 +155,7 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { row(10) === DateEncoderDecoder.decode("1984-08-06") row(11) === TimeEncoderDecoder.Instance.decode("22:13:45.888888") row(12) === true - row(13) must beAnInstanceOf[java.lang.Long] + row(13).asInstanceOf[AnyRef] must beAnInstanceOf[java.lang.Long] row(13).asInstanceOf[Long] must beGreaterThan(0L) @@ -282,16 +285,12 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { try { withHandler(configuration, { handler => - executeQuery(handler, "SELECT 0") - throw new IllegalStateException("should not have come here") + val result = executeQuery(handler, "SELECT 0") + throw new IllegalStateException("should not have arrived") }) } catch { - case e: GenericDatabaseException => { + case e: GenericDatabaseException => e.errorMessage.fields(InformationMessage.Routine) === "auth_failed" - } - case e: Exception => { - throw new IllegalStateException("should not have come here") - } } } @@ -406,10 +405,14 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { executeDdl(handler, create) log.debug("executed create") executePreparedStatement(handler, insert, Array( sampleArray )) + executePreparedStatement(handler, insert, Array( ByteBuffer.wrap(sampleArray) )) + executePreparedStatement(handler, insert, Array( Unpooled.copiedBuffer(sampleArray) )) log.debug("executed prepared statement") val rows = executeQuery(handler, select).rows.get rows(0)("content").asInstanceOf[Array[Byte]] === sampleArray + rows(1)("content").asInstanceOf[Array[Byte]] === sampleArray + rows(2)("content").asInstanceOf[Array[Byte]] === sampleArray } } @@ -428,6 +431,20 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { } + "insert without return after select" in { + + withHandler { + handler => + executeDdl(handler, this.preparedStatementCreate) + executeDdl(handler, this.preparedStatementInsert, 1) + executeDdl(handler, this.preparedStatementSelect, 1) + val result = executeQuery(handler, this.preparedStatementInsert2) + + result.rows === None + } + + } + } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala new file mode 100644 index 00000000..2e38adbb --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala @@ -0,0 +1,51 @@ +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification +import com.github.mauricio.async.db.SSLConfiguration.Mode +import javax.net.ssl.SSLHandshakeException + +class PostgreSQLSSLConnectionSpec extends Specification with DatabaseTestHelper { + + "ssl handler" should { + + "connect to the database in ssl without verifying CA" in { + + withSSLHandler(Mode.Require, "127.0.0.1", None) { handler => + handler.isReadyForQuery must beTrue + } + + } + + "connect to the database in ssl verifying CA" in { + + withSSLHandler(Mode.VerifyCA, "127.0.0.1") { handler => + handler.isReadyForQuery must beTrue + } + + } + + "connect to the database in ssl verifying CA and hostname" in { + + withSSLHandler(Mode.VerifyFull) { handler => + handler.isReadyForQuery must beTrue + } + + } + + "throws exception when CA verification fails" in { + + withSSLHandler(Mode.VerifyCA, rootCert = None) { handler => + } must throwA[SSLHandshakeException] + + } + + "throws exception when hostname verification fails" in { + + withSSLHandler(Mode.VerifyFull, "127.0.0.1") { handler => + } must throwA[SSLHandshakeException] + + } + + } + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala index 66f7a57b..660c1411 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala @@ -20,6 +20,8 @@ import org.specs2.mutable.Specification import org.joda.time.LocalDate import com.github.mauricio.async.db.util.Log import com.github.mauricio.async.db.exceptions.InsufficientParametersException +import java.util.UUID +import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException class PreparedStatementSpec extends Specification with DatabaseTestHelper { @@ -38,6 +40,7 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { val messagesInsertReverted = s"INSERT INTO messages $filler (moment,content) VALUES (?,?) RETURNING id" val messagesUpdate = "UPDATE messages SET content = ?, moment = ? WHERE id = ?" val messagesSelectOne = "SELECT id, content, moment FROM messages WHERE id = ?" + val messagesSelectByMoment = "SELECT id, content, moment FROM messages WHERE moment = ?" val messagesSelectAll = "SELECT id, content, moment FROM messages" val messagesSelectEscaped = "SELECT id, content, moment FROM messages WHERE content LIKE '%??%' AND id > ?" @@ -116,7 +119,7 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { executeDdl(handler, this.messagesCreate) executeDdl(handler, create) - 1.until(4).map { + foreach(1.until(4)) { x => executePreparedStatement(handler, this.messagesInsert, Array(message, moment)) executePreparedStatement(handler, insert, Array(otherMoment, otherMessage)) @@ -132,7 +135,6 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { otherResult.columnNames must contain(allOf("id", "other_moment", "other_content")).inOrder otherResult(x - 1)("other_moment") === otherMoment otherResult(x - 1)("other_content") === otherMessage - } } @@ -162,7 +164,40 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { rows(1)("id") === 2 rows(1)("content") === secondContent rows(1)("moment") === date + } + } + + "supports sending null first and then an actual value for the fields" in { + withHandler { + handler => + + val firstContent = "Some Moment" + val secondContent = "Some Other Moment" + val date = LocalDate.now() + + executeDdl(handler, this.messagesCreate) + executePreparedStatement(handler, this.messagesInsert, Array(firstContent, null)) + executePreparedStatement(handler, this.messagesInsert, Array(secondContent, date)) + + val rows = executePreparedStatement(handler, this.messagesSelectByMoment, Array(null)).rows.get + rows.size === 0 + + /* + PostgreSQL does not know how to handle NULL parameters for a query in a prepared statement, + you have to use IS NULL if you want to make use of it. + + rows.length === 1 + + rows(0)("id") === 1 + rows(0)("content") === firstContent + rows(0)("moment") === null + */ + val rowsWithoutNull = executePreparedStatement(handler, this.messagesSelectByMoment, Array(date)).rows.get + rowsWithoutNull.size === 1 + rowsWithoutNull(0)("id") === 2 + rowsWithoutNull(0)("content") === secondContent + rowsWithoutNull(0)("moment") === date } } @@ -250,7 +285,89 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { handler => val string = "someString" val result = executePreparedStatement(handler, "SELECT CAST(? AS VARCHAR)", Array(string)).rows.get - result(0)(0) == string + result(0)(0) === string + } + } + + "fail if prepared statement has more variables than it was given" in { + withHandler { + handler => + executeDdl(handler, messagesCreate) + + handler.sendPreparedStatement( + "SELECT * FROM messages WHERE content = ? AND moment = ?", + Array("some content")) must throwAn[InsufficientParametersException] + } + } + + "run prepared statement twice with bad and good values" in { + withHandler { + handler => + val content = "Some Moment" + + val query = "SELECT content FROM messages WHERE id = ?" + + executeDdl(handler, messagesCreate) + executePreparedStatement(handler, this.messagesInsert, Array(Some(content), None)) + + executePreparedStatement(handler, query, Array("undefined")) must throwA[GenericDatabaseException] + val result = executePreparedStatement(handler, query, Array(1)).rows.get + result(0)(0) === content + } + } + + "support UUID" in { + if ( System.getenv("TRAVIS") == null ) { + withHandler { + handler => + val create = """create temp table uuids + |( + |id bigserial primary key, + |my_id uuid + |);""".stripMargin + + val insert = "INSERT INTO uuids (my_id) VALUES (?) RETURNING id" + val select = "SELECT * FROM uuids" + + val uuid = UUID.randomUUID() + + executeDdl(handler, create) + executePreparedStatement(handler, insert, Array(uuid) ) + val result = executePreparedStatement(handler, select).rows.get + + result(0)("my_id").asInstanceOf[UUID] === uuid + } + success + } else { + pending + } + } + + "support UUID array" in { + if ( System.getenv("TRAVIS") == null ) { + withHandler { + handler => + val create = """create temp table uuids + |( + |id bigserial primary key, + |my_id uuid[] + |);""".stripMargin + + val insert = "INSERT INTO uuids (my_id) VALUES (?) RETURNING id" + val select = "SELECT * FROM uuids" + + val uuid1 = UUID.randomUUID() + val uuid2 = UUID.randomUUID() + + executeDdl(handler, create) + executePreparedStatement(handler, insert, Array(Array(uuid1, uuid2)) ) + val result = executePreparedStatement(handler, select).rows.get + + result(0)("my_id").asInstanceOf[Seq[UUID]] === Seq(uuid1, uuid2) + } + success + } else { + pending } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala index 0d6d98c9..03703f21 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala @@ -35,7 +35,7 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { )""" executeDdl(handler, create) - executeQuery(handler, "INSERT INTO messages (moment) VALUES ('04:05:06')") + executePreparedStatement(handler, "INSERT INTO messages (moment) VALUES (?)", Array[Any](new LocalTime(4, 5, 6))) val rows = executePreparedStatement(handler, "select * from messages").rows.get @@ -60,7 +60,7 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { )""" executeDdl(handler, create) - executeQuery(handler, "INSERT INTO messages (moment) VALUES ('04:05:06.134')") + executePreparedStatement(handler, "INSERT INTO messages (moment) VALUES (?)", Array[Any](new LocalTime(4, 5, 6, 134))) val rows = executePreparedStatement(handler, "select * from messages").rows.get @@ -128,7 +128,7 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { "support timestamp with timezone and microseconds" in { - 1.until(6).inclusive.map { + foreach(1.until(6)) { index => withHandler { handler => @@ -156,8 +156,6 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { dateTime.getMillis must be_>=(915779106000L) dateTime.getMillis must be_<(915779107000L) } - - } } @@ -190,14 +188,47 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { withTimeHandler { conn => - val date1 = new DateTime(2190319) + val date = new DateTime(2190319) + + executePreparedStatement(conn, "CREATE TEMP TABLE TEST(T TIMESTAMP)") + executePreparedStatement(conn, "INSERT INTO TEST(T) VALUES(?)", Array(date)) + val result = executePreparedStatement(conn, "SELECT T FROM TEST") + val date2 = result.rows.get.head(0) + date2 === date.toDateTime(DateTimeZone.UTC).toLocalDateTime + } + + } + + "supports sending a local date and later a date time object for the same field" in { + + withTimeHandler { + conn => + val date = new LocalDate(2016, 3, 5) + + executePreparedStatement(conn, "CREATE TEMP TABLE TEST(T TIMESTAMP)") + executePreparedStatement(conn, "INSERT INTO TEST(T) VALUES(?)", Array(date)) + val result = executePreparedStatement(conn, "SELECT T FROM TEST WHERE T = ?", Array(date)) + result.rows.get.size === 1 + + val dateTime = new LocalDateTime(2016, 3, 5, 0, 0, 0, 0) + val dateTimeResult = executePreparedStatement(conn, "SELECT T FROM TEST WHERE T = ?", Array(dateTime)) + dateTimeResult.rows.get.size === 1 + } + + } + + "handle sending a LocalDateTime and return a LocalDateTime for a timestamp without timezone column" in { + + withTimeHandler { + conn => + val date1 = new LocalDateTime(2190319) await(conn.sendPreparedStatement("CREATE TEMP TABLE TEST(T TIMESTAMP)")) await(conn.sendPreparedStatement("INSERT INTO TEST(T) VALUES(?)", Seq(date1))) val result = await(conn.sendPreparedStatement("SELECT T FROM TEST")) val date2 = result.rows.get.head(0) - date2 === date1.toDateTime(DateTimeZone.UTC).toLocalDateTime + date2 === date1 } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayDecoderSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayDecoderSpec.scala new file mode 100644 index 00000000..328e872f --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/ByteArrayDecoderSpec.scala @@ -0,0 +1,67 @@ +/* + * Copyright 2013 Maurício Linhares + * + * Maurício Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.postgresql.column + +import org.specs2.mutable.Specification + +class ByteArrayDecoderSpec extends Specification { + + val escapeTestData = + """\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027""" + + """\030\031\032\033\034\035\036\037 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^""" + + """_`abcdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216""" + + """\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246""" + + """\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276""" + + """\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326""" + + """\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356""" + + """\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377\377\376\375\374\373\372\371""" + + """\370\367\366\365\364\363\362\361\360\357\356\355\354\353\352\351\350\347\346\345\344\343\342\341""" + + """\340\337\336\335\334\333\332\331\330\327\326\325\324\323\322\321\320\317\316\315\314\313\312\311""" + + """\310\307\306\305\304\303\302\301\300\277\276\275\274\273\272\271\270\267\266\265\264\263\262\261""" + + """\260\257\256\255\254\253\252\251\250\247\246\245\244\243\242\241\240\237\236\235\234\233\232\231""" + + """\230\227\226\225\224\223\222\221\220\217\216\215\214\213\212\211\210\207\206\205\204\203\202\201""" + + """\200\177~}|{zyxwvutsrqponmlkjihgfedcba`_^]\\[ZYXWVUTSRQPONMLKJIHGFEDCBA@?>=<;:9876543210/.-,+*)(""" + + """'&%$#"! \037\036\035\034\033\032\031\030\027\026\025\024\023\022\021\020\017\016\015\014\013\012""" + + """\011\010\007\006\005\004\003\002\001\000""" + + val hexTestData = + """\x000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e""" + + """2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e""" + + """5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e""" + + """8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbe""" + + """bfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedee""" + + """eff0f1f2f3f4f5f6f7f8f9fafbfcfdfefffffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1""" + + """e0dfdedddcdbdad9d8d7d6d5d4d3d2d1d0cfcecdcccbcac9c8c7c6c5c4c3c2c1c0bfbebdbcbbbab9b8b7b6b5b4b3b2b1""" + + """b0afaeadacabaaa9a8a7a6a5a4a3a2a1a09f9e9d9c9b9a999897969594939291908f8e8d8c8b8a898887868584838281""" + + """807f7e7d7c7b7a797877767574737271706f6e6d6c6b6a696867666564636261605f5e5d5c5b5a595857565554535251""" + + """504f4e4d4c4b4a494847464544434241403f3e3d3c3b3a393837363534333231302f2e2d2c2b2a292827262524232221""" + + """201f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100""" + + val originalData = ((0 to 255) ++ ((0 to 255).reverse)).map(_.toByte).toArray + + "decoder" should { + + "parse escape data" in { + ByteArrayEncoderDecoder.decode(escapeTestData) === originalData + } + + "parse hex data" in { + ByteArrayEncoderDecoder.decode(hexTestData) === originalData + } + } + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/DefaultColumnEncoderRegistrySpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/DefaultColumnEncoderRegistrySpec.scala index 88965d49..1b41f447 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/DefaultColumnEncoderRegistrySpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/column/DefaultColumnEncoderRegistrySpec.scala @@ -26,7 +26,7 @@ class DefaultColumnEncoderRegistrySpec extends Specification { "correctly render an array of strings with nulls" in { val items = Array( "some", """text \ hoes " here to be seen""", null, "all, right" ) - registry.encode( items ) === """{"some","text \ hoes \" here to be seen",NULL,"all, right"}""" + registry.encode( items ) === """{"some","text \\ hoes \" here to be seen",NULL,"all, right"}""" } "correctly render an array of numbers" in { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoderSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoderSpec.scala new file mode 100644 index 00000000..9342a703 --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/encoders/ExecutePreparedStatementEncoderSpec.scala @@ -0,0 +1,30 @@ +package com.github.mauricio.async.db.postgresql.encoders + +import com.github.mauricio.async.db.postgresql.column.PostgreSQLColumnEncoderRegistry +import com.github.mauricio.async.db.postgresql.messages.frontend.PreparedStatementExecuteMessage +import io.netty.util.CharsetUtil +import org.specs2.mutable.Specification + +class ExecutePreparedStatementEncoderSpec extends Specification { + + val registry = new PostgreSQLColumnEncoderRegistry() + val encoder = new ExecutePreparedStatementEncoder(CharsetUtil.UTF_8, registry) + val sampleMessage = Array[Byte](66,0,0,0,18,49,0,49,0,0,0,0,1,-1,-1,-1,-1,0,0,69,0,0,0,10,49,0,0,0,0,0,83,0,0,0,4,67,0,0,0,7,80,49,0) + + "encoder" should { + + "correctly handle the case where an encoder returns null" in { + + val message = new PreparedStatementExecuteMessage(1, "select * from users", List(Some(null)), registry) + + val result = encoder.encode(message) + + val bytes = new Array[Byte](result.readableBytes()) + result.readBytes(bytes) + + bytes === sampleMessage + } + + } + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/parsers/ParserKSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/parsers/ParserKSpec.scala index e6b29dd4..3f2788fc 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/parsers/ParserKSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/parsers/ParserKSpec.scala @@ -34,11 +34,10 @@ class ParserKSpec extends Specification { val data = parser.parseMessage(buffer).asInstanceOf[ProcessData] - List( - data.kind === ServerMessage.BackendKeyData, - data.processId === 10, - data.secretKey === 20 - ) + data.kind === ServerMessage.BackendKeyData + data.processId === 10 + data.secretKey === 20 + } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala index 02295b16..c2471a75 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala @@ -16,12 +16,22 @@ package com.github.mauricio.async.db.postgresql.pool +import java.util.UUID + import com.github.mauricio.async.db.pool.{ConnectionPool, PoolConfiguration} +import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import org.specs2.mutable.Specification +import scala.concurrent.ExecutionContext.Implicits.global + +object ConnectionPoolSpec { + val Insert = "insert into transaction_test (id) values (?)" +} class ConnectionPoolSpec extends Specification with DatabaseTestHelper { + import ConnectionPoolSpec.Insert + "pool" should { "give you a connection when sending statements" in { @@ -51,6 +61,29 @@ class ConnectionPoolSpec extends Specification with DatabaseTestHelper { } } + "runs commands for a transaction in a single connection" in { + + val id = UUID.randomUUID().toString + + withPool { + pool => + val operations = pool.inTransaction { + connection => + connection.sendPreparedStatement(Insert, List(id)).flatMap { + result => + connection.sendPreparedStatement(Insert, List(id)).map { + failure => + List(result, failure) + } + } + } + + await(operations) must throwA[GenericDatabaseException] + + } + + } + } def withPool[R]( fn : (ConnectionPool[PostgreSQLConnection]) => R ) : R = { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala index d99a60d1..75da1ebd 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala @@ -16,12 +16,14 @@ package com.github.mauricio.async.db.postgresql.pool -import com.github.mauricio.async.db.pool.{SingleThreadedAsyncObjectPool, PoolExhaustedException, PoolConfiguration} +import com.github.mauricio.async.db.pool.{AsyncObjectPool, PoolConfiguration, PoolExhaustedException, SingleThreadedAsyncObjectPool} import com.github.mauricio.async.db.postgresql.{DatabaseTestHelper, PostgreSQLConnection} import java.nio.channels.ClosedChannelException import java.util.concurrent.TimeUnit + import org.specs2.mutable.Specification -import scala.concurrent.Await + +import scala.concurrent.{Await, Future} import scala.concurrent.duration._ import scala.language.postfixOps import com.github.mauricio.async.db.exceptions.ConnectionStillRunningQueryException @@ -47,23 +49,36 @@ class SingleThreadedAsyncObjectPoolSpec extends Specification with DatabaseTestH pool => val connection = get(pool) - val promises = List(pool.take, pool.take, pool.take) + val promises: List[Future[PostgreSQLConnection]] = List(pool.take, pool.take, pool.take) pool.availables.size === 0 pool.inUse.size === 1 + pool.queued.size must be_<=(3) + + /* pool.take call checkout that call this.mainPool.action, + so enqueuePromise called in executorService, + so there is no guaranties that all promises in queue at that moment + */ + val deadline = 5.seconds.fromNow + while(pool.queued.size < 3 || deadline.hasTimeLeft) { + Thread.sleep(50) + } + pool.queued.size === 3 executeTest(connection) pool.giveBack(connection) - promises.foreach { + val pools: List[Future[AsyncObjectPool[PostgreSQLConnection]]] = promises.map { promise => val connection = Await.result(promise, Duration(5, TimeUnit.SECONDS)) executeTest(connection) pool.giveBack(connection) } + Await.ready(pools.last, Duration(5, TimeUnit.SECONDS)) + pool.availables.size === 1 pool.inUse.size === 0 pool.queued.size === 0 diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala index 1e542f52..9d2d2828 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala @@ -17,72 +17,102 @@ package com.github.mauricio.async.db.postgresql.util import org.specs2.mutable.Specification -import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.SSLConfiguration.Mode +import com.github.mauricio.async.db.exceptions.UnableToParseURLException class URLParserSpec extends Specification { - "parser" should { + "postgresql URLParser" should { + import URLParser.{parse, parseOrDie, DEFAULT} - "create a connection with the available fields" in { - val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database?user=john&password=doe" + // Divided into sections + // =========== jdbc:postgresql =========== - val configuration = URLParser.parse(connectionUri) + // https://jdbc.postgresql.org/documentation/80/connect.html + "recognize a jdbc:postgresql:dbname uri" in { + val connectionUri = "jdbc:postgresql:dbname" + + parse(connectionUri) mustEqual DEFAULT.copy( + database = Some("dbname") + ) + } + + "create a jdbc:postgresql connection with the available fields" in { + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database?user=john&password=doe" + + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } "create a connection without port" in { - val connectionUri = "jdbc:postgresql://128.567.54.90/my_database?user=john&password=doe" + val connectionUri = "jdbc:postgresql://128.167.54.90/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 5432 } "create a connection without username and password" in { - val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database" + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database" - val configuration = URLParser.parse(connectionUri) - configuration.username === Configuration.Default.username + val configuration = parse(connectionUri) + configuration.username === DEFAULT.username configuration.password === None configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } + //========== postgresql:// ============== + "create a connection from a heroku like URL using 'postgresql' protocol" in { - val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database" + val connectionUri = "postgresql://john:doe@128.167.54.90:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } - "create a connection from a heroku like URL using 'postgres' protocol" in { - val connectionUri = "postgres://john:doe@128.567.54.90:9987/my_database" + "create a connection with SSL enabled" in { + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database?sslmode=verify-full" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) + configuration.username === DEFAULT.username + configuration.password === None + configuration.database === Some("my_database") + configuration.host === "128.167.54.90" + configuration.port === 9987 + configuration.ssl.mode === Mode.VerifyFull + } + + "create a connection with SSL enabled and root CA from a heroku like URL using 'postgresql' protocol" in { + val connectionUri = "postgresql://john:doe@128.167.54.90:9987/my_database?sslmode=verify-ca&sslrootcert=server.crt" + + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 + configuration.ssl.mode === Mode.VerifyCA + configuration.ssl.rootCert.map(_.getPath) === Some("server.crt") } "create a connection with the available fields and named server" in { val connectionUri = "jdbc:postgresql://localhost:9987/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -93,7 +123,7 @@ class URLParserSpec extends Specification { "create a connection from a heroku like URL with named server" in { val connectionUri = "postgresql://john:doe@psql.heroku.com:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -104,7 +134,7 @@ class URLParserSpec extends Specification { "create a connection with the available fields and ipv6" in { val connectionUri = "jdbc:postgresql://[::1]:9987/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -116,7 +146,7 @@ class URLParserSpec extends Specification { "create a connection from a heroku like URL and with ipv6" in { val connectionUri = "postgresql://john:doe@[::1]:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -127,7 +157,7 @@ class URLParserSpec extends Specification { "create a connection with a missing hostname" in { val connectionUri = "jdbc:postgresql:/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -139,7 +169,7 @@ class URLParserSpec extends Specification { "create a connection with a missing database name" in { val connectionUri = "jdbc:postgresql://[::1]:9987/?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -151,7 +181,7 @@ class URLParserSpec extends Specification { "create a connection with all default fields" in { val connectionUri = "jdbc:postgresql:" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "postgres" configuration.password === None @@ -163,7 +193,7 @@ class URLParserSpec extends Specification { "create a connection with an empty (invalid) url" in { val connectionUri = "" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "postgres" configuration.password === None @@ -172,6 +202,88 @@ class URLParserSpec extends Specification { configuration.port === 5432 } + + "recognise a postgresql:// uri" in { + parse("postgresql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognise a postgres:// uri" in { + parse("postgres://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognize a jdbc:postgresql:// uri" in { + parse("jdbc:postgresql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from URI credentials" in { + parse("jdbc:postgresql://user:password@localhost:425/dbname") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from query string" in { + parse("jdbc:postgresql://localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + // Included for consistency, so later changes aren't allowed to change behavior + "use the query string parameters to override URI credentials" in { + parse("jdbc:postgresql://baduser:badpass@localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "successfully default the port to the PostgreSQL port" in { + parse("jdbc:postgresql://baduser:badpass@localhost/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 5432, + host = "localhost" + ) + } + + "reject malformed ip addresses" in { + val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "postgres" + configuration.password === None + configuration.database === None + configuration.host === "localhost" + configuration.port === 5432 + + parseOrDie(connectionUri) must throwA[UnableToParseURLException] + } + } } diff --git a/project/Build.scala b/project/Build.scala index 5fa7e83b..b543b050 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -10,7 +10,11 @@ object ProjectBuild extends Build { lazy val root = Project( id = "db-async-base", base = file("."), - settings = Configuration.baseSettings, + settings = Configuration.baseSettings ++ Seq( + publish := (), + publishLocal := (), + publishArtifact := false + ), aggregate = Seq(common, postgresql, mysql) ) @@ -19,7 +23,7 @@ object ProjectBuild extends Build { base = file(commonName), settings = Configuration.baseSettings ++ Seq( name := commonName, - libraryDependencies := Configuration.commonDependencies + libraryDependencies ++= Configuration.commonDependencies ) ) @@ -30,7 +34,7 @@ object ProjectBuild extends Build { name := postgresqlName, libraryDependencies ++= Configuration.implementationDependencies ) - ) aggregate (common) dependsOn (common) + ) dependsOn (common) lazy val mysql = Project( id = mysqlName, @@ -39,26 +43,30 @@ object ProjectBuild extends Build { name := mysqlName, libraryDependencies ++= Configuration.implementationDependencies ) - ) aggregate (common) dependsOn (common) + ) dependsOn (common) } object Configuration { - val commonVersion = "0.2.13" - val projectScalaVersion = "2.10.3" + val commonVersion = "0.2.22-SNAPSHOT" + val projectScalaVersion = "2.12.1" + val specs2Version = "3.8.6" - val specs2Dependency = "org.specs2" %% "specs2" % "2.3.4" % "test" - val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.0.13" % "test" + val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" + val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" + val specs2MockDependency = "org.specs2" %% "specs2-mock" % specs2Version % "test" + val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.8" % "test" val commonDependencies = Seq( - "org.slf4j" % "slf4j-api" % "1.7.5", - "joda-time" % "joda-time" % "2.3", - "org.joda" % "joda-convert" % "1.5", - "org.scala-lang" % "scala-library" % projectScalaVersion, - "io.netty" % "netty-all" % "4.0.17.Final", - "org.javassist" % "javassist" % "3.18.1-GA", + "org.slf4j" % "slf4j-api" % "1.7.22", + "joda-time" % "joda-time" % "2.9.7", + "org.joda" % "joda-convert" % "1.8.1", + "io.netty" % "netty-all" % "4.1.6.Final", + "org.javassist" % "javassist" % "3.21.0-GA", specs2Dependency, + specs2JunitDependency, + specs2MockDependency, logbackDependency ) @@ -74,11 +82,13 @@ object Configuration { :+ Opts.compile.unchecked :+ "-feature" , + testOptions in Test += Tests.Argument(TestFrameworks.Specs2, "sequential"), scalacOptions in doc := Seq("-doc-external-doc:scala=http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library/"), - scalaVersion := projectScalaVersion, - javacOptions := Seq("-source", "1.5", "-target", "1.5", "-encoding", "UTF8"), + crossScalaVersions := Seq(projectScalaVersion, "2.10.6", "2.11.8"), + javacOptions := Seq("-source", "1.6", "-target", "1.6", "-encoding", "UTF8"), organization := "com.github.mauricio", version := commonVersion, + parallelExecution := false, publishArtifact in Test := false, publishMavenStyle := true, pomIncludeRepository := { diff --git a/project/build.properties b/project/build.properties index 0974fce4..e0cbc71d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.0 +sbt.version = 0.13.13 \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt new file mode 100644 index 00000000..0e9ec632 --- /dev/null +++ b/project/plugins.sbt @@ -0,0 +1,11 @@ +addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.5.0") + +addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0") + +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") + +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.0") + +resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases" + +// pgpSigningKey := Some(0xB98761578C650D77L) diff --git a/script/prepare_build.sh b/script/prepare_build.sh index 9992e442..068ab389 100755 --- a/script/prepare_build.sh +++ b/script/prepare_build.sh @@ -1,32 +1,46 @@ #!/usr/bin/env sh +SCRIPTDIR=`dirname $0` + echo "Preparing MySQL configs" mysql -u root -e 'create database mysql_async_tests;' +mysql -u root -e "create table mysql_async_tests.transaction_test (id varchar(255) not null, primary key (id))" mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async'@'localhost' IDENTIFIED BY 'root' WITH GRANT OPTION"; mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_old'@'localhost' WITH GRANT OPTION"; mysql -u root -e "UPDATE mysql.user SET Password = OLD_PASSWORD('do_not_use_this'), plugin = 'mysql_old_password' where User = 'mysql_async_old'; flush privileges;"; +mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_nopw'@'localhost' WITH GRANT OPTION"; echo "preparing postgresql configs" -psql -c 'create database netty_driver_test;' -U postgres -psql -c 'create database netty_driver_time_test;' -U postgres -psql -c "alter database netty_driver_time_test set timezone to 'GMT'" -U postgres -psql -c "CREATE USER postgres_md5 WITH PASSWORD 'postgres_md5'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_md5;" -U postgres -psql -c "CREATE USER postgres_cleartext WITH PASSWORD 'postgres_cleartext'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_cleartext;" -U postgres -psql -c "CREATE USER postgres_kerberos WITH PASSWORD 'postgres_kerberos'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_kerberos;" -U postgres -psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" +PGUSER=postgres +PGCONF=/etc/postgresql/9.1/main +PGDATA=/var/ramfs/postgresql/9.1/main + +psql -d "postgres" -c 'create database netty_driver_test;' -U $PGUSER +psql -d "postgres" -c 'create database netty_driver_time_test;' -U $PGUSER +psql -d "postgres" -c "alter database netty_driver_time_test set timezone to 'GMT'" -U $PGUSER +psql -d "netty_driver_test" -c "create table transaction_test ( id varchar(255) not null, constraint id_unique primary key (id))" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_md5 WITH PASSWORD 'postgres_md5'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_md5;" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_cleartext WITH PASSWORD 'postgres_cleartext'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_cleartext;" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_kerberos WITH PASSWORD 'postgres_kerberos'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_kerberos;" -U $PGUSER +psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" -U $PGUSER -sudo chmod 777 /etc/postgresql/9.1/main/pg_hba.conf +sudo chmod 666 $PGCONF/pg_hba.conf echo "pg_hba.conf goes as follows" -cat "/etc/postgresql/9.1/main/pg_hba.conf" +cat "$PGCONF/pg_hba.conf" -sudo echo "host all postgres 127.0.0.1/32 trust" > /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_md5 127.0.0.1/32 md5" >> /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_cleartext 127.0.0.1/32 password" >> /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_kerberos 127.0.0.1/32 krb5" >> /etc/postgresql/9.1/main/pg_hba.conf +sudo echo "local all all trust" > $PGCONF/pg_hba.conf +sudo echo "host all postgres 127.0.0.1/32 trust" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_md5 127.0.0.1/32 md5" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_cleartext 127.0.0.1/32 password" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_kerberos 127.0.0.1/32 krb5" >> $PGCONF/pg_hba.conf echo "pg_hba.conf is now like" -cat "/etc/postgresql/9.1/main/pg_hba.conf" +cat "$PGCONF/pg_hba.conf" + +sudo chmod 600 $PGCONF/pg_hba.conf + +sudo cp -f $SCRIPTDIR/server.crt $SCRIPTDIR/server.key $PGDATA -sudo /etc/init.d/postgresql restart \ No newline at end of file +sudo /etc/init.d/postgresql restart diff --git a/script/server.crt b/script/server.crt new file mode 100644 index 00000000..aeef86f2 --- /dev/null +++ b/script/server.crt @@ -0,0 +1,75 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 9913731310682600948 (0x8994a61a13e775f4) + Signature Algorithm: sha1WithRSAEncryption + Issuer: CN=localhost + Validity + Not Before: Mar 6 08:12:28 2016 GMT + Not After : Apr 5 08:12:28 2016 GMT + Subject: CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:ce:26:60:f9:0d:0f:f1:d6:ed:3e:79:91:55:6a: + 18:63:23:96:f2:60:50:3d:e3:dd:72:e8:c2:54:17: + 50:be:f0:9c:32:95:39:75:b1:04:a7:bb:f5:10:a4: + eb:d0:10:e2:17:45:d3:f9:35:8e:b4:8f:14:97:8f: + 27:93:d7:20:05:e2:dc:68:64:bc:fd:f2:19:17:94: + e8:2f:a6:b2:54:3f:df:3e:e7:8f:f1:52:15:7a:30: + 81:4d:bb:6f:22:8c:ca:e1:cb:6a:72:6d:fa:89:50: + e7:ee:07:d1:84:8a:71:07:dc:3f:6f:1f:db:10:e9: + 93:ad:01:c5:2b:51:ce:58:ef:12:95:00:16:e8:d4: + 46:07:35:ee:10:47:c4:f7:ff:47:17:52:a5:bb:5c: + cb:3c:f6:6b:c8:e7:d9:7c:18:39:a1:8f:e0:45:82: + 88:b5:27:f3:58:cb:ba:30:c0:8a:77:5b:00:bf:09: + 10:b1:ad:aa:f4:1b:2c:a1:f9:a5:59:57:c8:ef:de: + 54:ad:35:af:67:7e:29:bc:9a:2a:d2:f0:b1:9c:34: + 3c:bc:64:c9:4c:93:2c:7d:29:f4:1a:ac:f3:44:42: + a4:c9:06:1e:a4:73:e6:aa:67:d0:e4:02:02:ba:51: + 1e:97:44:b8:4b:4e:55:cd:e6:24:49:08:ac:9b:09: + 19:31 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Subject Key Identifier: + 2E:20:4D:E1:12:2A:B0:6F:52:7F:62:90:D4:78:7B:E3:7D:D5:60:10 + X509v3 Authority Key Identifier: + keyid:2E:20:4D:E1:12:2A:B0:6F:52:7F:62:90:D4:78:7B:E3:7D:D5:60:10 + + X509v3 Basic Constraints: + CA:TRUE + Signature Algorithm: sha1WithRSAEncryption + 9b:e8:50:8b:86:0f:bf:22:c6:b4:ef:3e:c9:a2:55:fb:69:fc: + ae:93:7b:5e:6a:b6:ed:5b:27:c2:9e:36:d6:f1:f1:0f:67:65: + 87:de:05:21:6e:0e:f4:df:ac:72:61:47:f8:fd:16:9b:3d:54: + ef:21:cf:b7:31:ba:bf:c9:1b:2c:a0:f9:f1:6b:45:5a:98:25: + b9:01:99:cf:e1:79:c5:6a:20:ce:ca:ca:3f:6d:56:f3:65:51: + 31:98:01:b9:96:99:04:9c:ab:ae:fb:3f:f8:ad:60:66:77:54: + b2:81:e3:7c:6b:c4:36:ae:ae:5c:c6:1a:09:5c:d6:13:da:2b: + ba:ef:3f:3e:b2:13:f2:51:15:c5:1b:9c:22:be:b4:55:9b:15: + 70:60:3d:98:6e:ef:53:4c:c7:20:60:3f:17:f3:cc:76:47:96: + 27:05:84:0e:db:21:e1:76:b7:9c:38:35:19:ef:52:d4:fc:bd: + ec:95:2e:eb:4b:5b:0b:c8:86:d7:23:c2:76:14:f3:93:6f:c0: + a9:b6:ca:f8:47:3e:9d:af:11:5d:73:79:68:70:26:f9:fd:39: + 60:c1:c3:c7:a9:fc:48:b5:c0:e6:b4:2e:07:de:6a:ca:ed:04: + 67:31:b8:0b:d0:48:fd:3b:4c:12:8a:34:5c:18:3f:38:85:f2: + 1c:96:39:50 +-----BEGIN CERTIFICATE----- +MIIC+zCCAeOgAwIBAgIJAImUphoT53X0MA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV +BAMMCWxvY2FsaG9zdDAeFw0xNjAzMDYwODEyMjhaFw0xNjA0MDUwODEyMjhaMBQx +EjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAM4mYPkND/HW7T55kVVqGGMjlvJgUD3j3XLowlQXUL7wnDKVOXWxBKe79RCk +69AQ4hdF0/k1jrSPFJePJ5PXIAXi3GhkvP3yGReU6C+mslQ/3z7nj/FSFXowgU27 +byKMyuHLanJt+olQ5+4H0YSKcQfcP28f2xDpk60BxStRzljvEpUAFujURgc17hBH +xPf/RxdSpbtcyzz2a8jn2XwYOaGP4EWCiLUn81jLujDAindbAL8JELGtqvQbLKH5 +pVlXyO/eVK01r2d+KbyaKtLwsZw0PLxkyUyTLH0p9Bqs80RCpMkGHqRz5qpn0OQC +ArpRHpdEuEtOVc3mJEkIrJsJGTECAwEAAaNQME4wHQYDVR0OBBYEFC4gTeESKrBv +Un9ikNR4e+N91WAQMB8GA1UdIwQYMBaAFC4gTeESKrBvUn9ikNR4e+N91WAQMAwG +A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAJvoUIuGD78ixrTvPsmiVftp +/K6Te15qtu1bJ8KeNtbx8Q9nZYfeBSFuDvTfrHJhR/j9Fps9VO8hz7cxur/JGyyg ++fFrRVqYJbkBmc/hecVqIM7Kyj9tVvNlUTGYAbmWmQScq677P/itYGZ3VLKB43xr +xDaurlzGGglc1hPaK7rvPz6yE/JRFcUbnCK+tFWbFXBgPZhu71NMxyBgPxfzzHZH +licFhA7bIeF2t5w4NRnvUtT8veyVLutLWwvIhtcjwnYU85NvwKm2yvhHPp2vEV1z +eWhwJvn9OWDBw8ep/Ei1wOa0LgfeasrtBGcxuAvQSP07TBKKNFwYPziF8hyWOVA= +-----END CERTIFICATE----- diff --git a/script/server.key b/script/server.key new file mode 100644 index 00000000..0e226429 --- /dev/null +++ b/script/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAziZg+Q0P8dbtPnmRVWoYYyOW8mBQPePdcujCVBdQvvCcMpU5 +dbEEp7v1EKTr0BDiF0XT+TWOtI8Ul48nk9cgBeLcaGS8/fIZF5ToL6ayVD/fPueP +8VIVejCBTbtvIozK4ctqcm36iVDn7gfRhIpxB9w/bx/bEOmTrQHFK1HOWO8SlQAW +6NRGBzXuEEfE9/9HF1Klu1zLPPZryOfZfBg5oY/gRYKItSfzWMu6MMCKd1sAvwkQ +sa2q9BssofmlWVfI795UrTWvZ34pvJoq0vCxnDQ8vGTJTJMsfSn0GqzzREKkyQYe +pHPmqmfQ5AICulEel0S4S05VzeYkSQismwkZMQIDAQABAoIBAH80v3Hu1X/tl8eN +TFjgdtv2Ahbdx6XpDaTya7doC7NG1ZuA6UvuR2kZWkdC/SAOyvSBaiPFIKHaCGLd +OxbHEEORkV/5iYVJ9qHOiNeejTvfjepLCU9nz0ju1VsZ5aH0LtzVoIGry4UgH32J +5YdbxhOLnLj9dzggabe/9+KbQDEveGTzkIvSJ1nbts7c8IRp6t/1nBz54BhawUjJ +IbaEbCH/mEmiCOUP914SCAUEfmgbMhdx8dc4V9nyxK+bulF3WIEpVZU1zj5Rpyni +P8gQ1geI64Erd8oa4DJ5C77eLuKKk0JBCkgh5x3hiAxuvN0zxHxW2Q75c6x9uDr5 +DXi20GECgYEA+NRW6heYBJw7Lt7+cQCRG5/WFOX9TmmK9EAidVPULWO4NN4wLZxa +exW/epg8w1Y+u+BHOzFq9idJaHsoLZCmoNWMkZsP+AzeEkklee6wgur3/Zs1HqHZ +1VA3EmvOecz++3o69zcjd0nzgk9ADhjA2dAahKTnn5RESD1dFBWU2+sCgYEA1Bcv +PiQe6ce86FlSPr0TBFvIJl2dfjrQijL3dhZMo+1Y5VTShGBoAQKfBhJITSbsmaEz +UQ/4rBMyTN9bwvSwsDpQZw/Y0YKiSQIOr4J0jyotY5RN2AH3AlCX8CrhoOmBaLUd +n2SGx5keodnXn1/GPkuGPIa7xnGib/gdL2AaZFMCgYBV5AX0XByPStZrAXJW01lD +bdLZ9+GOFYRvd0vtr/gHiupk5WU/+T6KSiGEUdR3oOeatnogBpjjSwBd3lUqFUpP +LieNgzbp6pclPLaA9lFbf3wGwHJ/lmK47S11YF0vUgGaEMEV4KSPYql5i52SwByh +kuH0c2+4d9dyECx26FQv7QKBgQDBtX83oWP+n6hhCpu8o5IH7BAtQlmDHhKz9oLf +/tP28OO9abBwqWC0c4Fs2SviE4gLdRjak9zKxSmu3l3//N6XxlsDFo0wJcE1L0Tc +dikhTSNxjNVgUcMaASQUfgXfowXH7YvltboH+UjqCH4QmTgGU5KCG4jLYaQ74gA9 +8eeI8wKBgDfclcMsJnY6FpFoR0Ub9VOrdbKtD9nXSxhTSFKjrp4JM7SBN3u6NPJK +FgKZyQxd1bX/RBioN1prrZ3rbg+9awc65KhyfwtNxiurCBZhYObhKJv7lZyjNgsT +EALMKvB+fdpMtPZOVtUl0MbHEBblrJ+oy4TPT/kvMuCudF/5arcZ +-----END RSA PRIVATE KEY-----