Skip to content

Commit

Permalink
Merge pull request #1774 from mgibowski/assertions
Browse files Browse the repository at this point in the history
Replace ShouldMatchers in tests with assertions
  • Loading branch information
mpilquist authored Feb 11, 2020
2 parents ff1ab05 + e183e9e commit 3fa7e24
Show file tree
Hide file tree
Showing 24 changed files with 1,309 additions and 1,071 deletions.
18 changes: 9 additions & 9 deletions core/jvm/src/test/scala/fs2/CompressSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class CompressSpec extends Fs2Spec {
)
.toVector

actual should equal(expected)
assert(actual == expected)
}

"inflate input" in forAll(strings, intsBetween(0, 9), booleans) {
Expand All @@ -76,7 +76,7 @@ class CompressSpec extends Fs2Spec {
.through(inflate(nowrap = nowrap))
.compile
.toVector
actualInflated should equal(Right(expectedInflated))
assert(actualInflated == Right(expectedInflated))
}

expectEqual(actualDeflated.toArray, expectedDeflated.toArray)
Expand All @@ -89,7 +89,7 @@ class CompressSpec extends Fs2Spec {
.through(compress.inflate())
.compile
.toVector
.asserting(_ shouldBe s.toVector)
.asserting(it => assert(it == s.toVector))
}

"deflate.compresses input" in {
Expand All @@ -102,7 +102,7 @@ class CompressSpec extends Fs2Spec {
val compressed =
Stream.chunk(Chunk.bytes(uncompressed)).through(deflate(9)).toVector

compressed.length should be < uncompressed.length
assert(compressed.length < uncompressed.length)
}

"gzip |> gunzip ~= id" in forAll { s: Stream[Pure, Byte] =>
Expand All @@ -111,7 +111,7 @@ class CompressSpec extends Fs2Spec {
.through(compress.gunzip[IO](8192))
.compile
.toVector
.asserting(_ shouldBe s.toVector)
.asserting(it => assert(it == s.toVector))
}

"gzip |> gunzip ~= id (mutually prime chunk sizes, compression larger)" in forAll {
Expand All @@ -121,7 +121,7 @@ class CompressSpec extends Fs2Spec {
.through(compress.gunzip[IO](509))
.compile
.toVector
.asserting(_ shouldBe s.toVector)
.asserting(it => assert(it == s.toVector))
}

"gzip |> gunzip ~= id (mutually prime chunk sizes, decompression larger)" in forAll {
Expand All @@ -131,7 +131,7 @@ class CompressSpec extends Fs2Spec {
.through(compress.gunzip[IO](1031))
.compile
.toVector
.asserting(_ shouldBe s.toVector)
.asserting(it => assert(it == s.toVector))
}

"gzip |> GZIPInputStream ~= id" in forAll { s: Stream[Pure, Byte] =>
Expand All @@ -150,7 +150,7 @@ class CompressSpec extends Fs2Spec {
read = gzis.read()
}

buffer.toVector shouldBe s.toVector
assert(buffer.toVector == s.toVector)
}

"gzip.compresses input" in {
Expand All @@ -166,7 +166,7 @@ class CompressSpec extends Fs2Spec {
.compile
.toVector

compressed.length should be < uncompressed.length
assert(compressed.length < uncompressed.length)
}
}
}
13 changes: 8 additions & 5 deletions core/jvm/src/test/scala/fs2/HashSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class HashSpec extends Fs2Spec {
.foldLeft(Stream.empty.covaryOutput[Byte])((acc, c) => acc ++ Stream.chunk(Chunk.bytes(c))
)

s.through(h).toList shouldBe digest(algo, str)
assert(s.through(h).toList == digest(algo, str))
}

"digests" - {
Expand All @@ -47,13 +47,16 @@ class HashSpec extends Fs2Spec {
}

"empty input" in {
Stream.empty.through(sha1).toList should have size (20)
assert(Stream.empty.through(sha1).toList.size == 20)
}

"zero or one output" in forAll { (lb: List[Array[Byte]]) =>
lb.foldLeft(Stream.empty.covaryOutput[Byte])((acc, b) => acc ++ Stream.chunk(Chunk.bytes(b)))
val size = lb
.foldLeft(Stream.empty.covaryOutput[Byte])((acc, b) => acc ++ Stream.chunk(Chunk.bytes(b)))
.through(sha1)
.toList should have size (20)
.toList
.size
assert(size == 20)
}

"thread-safety" in {
Expand All @@ -66,7 +69,7 @@ class HashSpec extends Fs2Spec {
once <- s.compile.toVector
oneHundred <- Vector.fill(100)(s.compile.toVector).parSequence
} yield (once, oneHundred)).asserting {
case (once, oneHundred) => oneHundred shouldBe Vector.fill(100)(once)
case (once, oneHundred) => assert(oneHundred == Vector.fill(100)(once))
}
}
}
28 changes: 14 additions & 14 deletions core/shared/src/test/scala/fs2/ChunkQueueSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,51 +7,51 @@ class ChunkQueueSpec extends Fs2Spec {
"take" in {
forAll { (chunks: List[Chunk[Int]], n: Int) =>
val result = Chunk.Queue(chunks: _*).take(n)
result.toChunk.toList shouldBe chunks.flatMap(_.toList).take(n)
result.chunks.size should be <= chunks.size
assert(result.toChunk.toList == chunks.flatMap(_.toList).take(n))
assert(result.chunks.size <= chunks.size)
}
}

"drop" in {
forAll { (chunks: List[Chunk[Int]], n: Int) =>
val result = Chunk.Queue(chunks: _*).drop(n)
result.toChunk.toList shouldBe chunks.flatMap(_.toList).drop(n)
result.chunks.size should be <= chunks.size
assert(result.toChunk.toList == chunks.flatMap(_.toList).drop(n))
assert(result.chunks.size <= chunks.size)
}
}

"takeRight" in {
forAll { (chunks: List[Chunk[Int]], n: Int) =>
val result = Chunk.Queue(chunks: _*).takeRight(n)
result.toChunk.toList shouldBe chunks.flatMap(_.toList).takeRight(n)
result.chunks.size should be <= chunks.size
assert(result.toChunk.toList == chunks.flatMap(_.toList).takeRight(n))
assert(result.chunks.size <= chunks.size)
}
}

"dropRight" in {
forAll { (chunks: List[Chunk[Int]], n: Int) =>
val result = Chunk.Queue(chunks: _*).dropRight(n)
result.toChunk.toList shouldBe chunks.flatMap(_.toList).dropRight(n)
result.chunks.size should be <= chunks.size
assert(result.toChunk.toList == chunks.flatMap(_.toList).dropRight(n))
assert(result.chunks.size <= chunks.size)
}
}

"equals" in {
forAll { (chunks: List[Chunk[Int]]) =>
val cq = Chunk.Queue(chunks: _*)
cq shouldBe cq
cq shouldBe Chunk.Queue(chunks: _*)
if (cq.size > 1) cq.drop(1) should not be cq
assert(cq == cq)
assert(cq == Chunk.Queue(chunks: _*))
if (cq.size > 1) assert(cq.drop(1) != cq)
else Succeeded
}
}

"hashCode" in {
forAll { (chunks: List[Chunk[Int]]) =>
val cq = Chunk.Queue(chunks: _*)
cq.hashCode shouldBe cq.hashCode
cq.hashCode shouldBe Chunk.Queue(chunks: _*).hashCode
if (cq.size > 1) cq.drop(1).hashCode should not be cq.hashCode
assert(cq.hashCode == cq.hashCode)
assert(cq.hashCode == Chunk.Queue(chunks: _*).hashCode)
if (cq.size > 1) assert(cq.drop(1).hashCode != cq.hashCode)
else Succeeded
}
}
Expand Down
71 changes: 36 additions & 35 deletions core/shared/src/test/scala/fs2/ChunkSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,40 +18,40 @@ class ChunkSpec extends Fs2Spec {

"Chunk" - {
"chunk-formation (1)" in {
Chunk.empty.toList shouldBe List()
Chunk.singleton(23).toList shouldBe List(23)
assert(Chunk.empty.toList == List())
assert(Chunk.singleton(23).toList == List(23))
}

"chunk-formation (2)" in forAll { (c: Vector[Int]) =>
Chunk.seq(c).toVector shouldBe c
Chunk.seq(c).toList shouldBe c.toList
Chunk.indexedSeq(c).toVector shouldBe c
Chunk.indexedSeq(c).toList shouldBe c.toList
assert(Chunk.seq(c).toVector == c)
assert(Chunk.seq(c).toList == c.toList)
assert(Chunk.indexedSeq(c).toVector == c)
assert(Chunk.indexedSeq(c).toList == c.toList)
}

"Chunk.apply is optimized" in {
Chunk(1) shouldBe a[Chunk.Singleton[_]]
Chunk("Hello") shouldBe a[Chunk.Singleton[_]]
assert(Chunk(1).isInstanceOf[Chunk.Singleton[_]])
assert(Chunk("Hello").isInstanceOf[Chunk.Singleton[_]])
// Varargs on Scala.js use a scala.scalajs.js.WrappedArray, which
// ends up falling through to the Chunk.indexedSeq constructor
if (isJVM) {
Chunk(1, 2, 3) shouldBe a[Chunk.Ints]
Chunk("Hello", "world") shouldBe a[Chunk.Boxed[_]]
assert(Chunk(1, 2, 3).isInstanceOf[Chunk.Ints])
assert(Chunk("Hello", "world").isInstanceOf[Chunk.Boxed[_]])
} else {
Succeeded
}
}

"Chunk.seq is optimized" in {
Chunk.seq(List(1)) shouldBe a[Chunk.Singleton[_]]
assert(Chunk.seq(List(1)).isInstanceOf[Chunk.Singleton[_]])
}

"Array casts in Chunk.seq are safe" in {
val as = collection.mutable.ArraySeq[Int](0, 1, 2)
val c = Chunk.seq(as)
try c shouldBe a[Chunk.Boxed[_]] // 2.11/2.12
try assert(c.isInstanceOf[Chunk.Boxed[_]]) // 2.11/2.12
catch {
case NonFatal(_) => c shouldBe a[Chunk.Ints] // 2.13+
case NonFatal(_) => assert(c.isInstanceOf[Chunk.Ints]) // 2.13+
}
}
}
Expand All @@ -65,54 +65,55 @@ class ChunkSpec extends Fs2Spec {
s"$name" - {
implicit val implicitChunkGenerator: Generator[Chunk[A]] = genChunk
"size" in forAll { (c: Chunk[A]) =>
c.size shouldBe c.toList.size
assert(c.size == c.toList.size)
}
"take" in forAll { (c: Chunk[A], n: PosZInt) =>
c.take(n).toVector shouldBe c.toVector.take(n)
assert(c.take(n).toVector == c.toVector.take(n))
}
"drop" in forAll { (c: Chunk[A], n: PosZInt) =>
c.drop(n).toVector shouldBe c.toVector.drop(n)
assert(c.drop(n).toVector == c.toVector.drop(n))
}
"isEmpty" in forAll { (c: Chunk[A]) =>
c.isEmpty shouldBe c.toList.isEmpty
assert(c.isEmpty == c.toList.isEmpty)
}
"toArray" in forAll { c: Chunk[A] =>
c.toArray.toVector shouldBe c.toVector
assert(c.toArray.toVector == c.toVector)
// Do it twice to make sure the first time didn't mutate state
c.toArray.toVector shouldBe c.toVector
assert(c.toArray.toVector == c.toVector)
}
"copyToArray" in forAll { c: Chunk[A] =>
val arr = new Array[A](c.size * 2)
c.copyToArray(arr, 0)
c.copyToArray(arr, c.size)
arr.toVector shouldBe (c.toVector ++ c.toVector)
assert(arr.toVector == (c.toVector ++ c.toVector))
}
"concat" in forAll { (c1: Chunk[A], c2: Chunk[A]) =>
Chunk
val result = Chunk
.concat(List(Chunk.empty, c1, Chunk.empty, c2))
.toVector shouldBe (c1.toVector ++ c2.toVector)
.toVector
assert(result == (c1.toVector ++ c2.toVector))
}
"concat empty" in {
Chunk.concat[A](List(Chunk.empty, Chunk.empty)) shouldEqual Chunk.empty
assert(Chunk.concat[A](List(Chunk.empty, Chunk.empty)) == Chunk.empty)
}
"scanLeft" in forAll { c: Chunk[A] =>
def step(acc: List[A], item: A) = acc :+ item
c.scanLeft(List[A]())(step).toList shouldBe (c.toList.scanLeft(List[A]())(step))
assert(c.scanLeft(List[A]())(step).toList == (c.toList.scanLeft(List[A]())(step)))
}
"scanLeftCarry" in forAll { c: Chunk[A] =>
def step(acc: List[A], item: A) = acc :+ item
val listScan = c.toList.scanLeft(List[A]())(step)
val (chunkScan, chunkCarry) = c.scanLeftCarry(List[A]())(step)

(chunkScan.toList, chunkCarry) shouldBe ((listScan.tail, listScan.last))
assert((chunkScan.toList, chunkCarry) == ((listScan.tail, listScan.last)))
}

if (implicitly[ClassTag[A]] == ClassTag.Byte)
"toByteBuffer.byte" in forAll { c: Chunk[A] =>
implicit val ev: A =:= Byte = null
val arr = new Array[Byte](c.size)
c.toByteBuffer.get(arr, 0, c.size)
arr.toVector shouldBe c.toArray.toVector
assert(arr.toVector == c.toArray.toVector)
}

import org.scalacheck.GeneratorCompat._
Expand Down Expand Up @@ -156,19 +157,19 @@ class ChunkSpec extends Fs2Spec {

"scanLeftCarry" - {
"returns empty and zero for empty Chunk" in {
Chunk[Int]().scanLeftCarry(0)(_ + _) shouldBe ((Chunk.empty, 0))
assert(Chunk[Int]().scanLeftCarry(0)(_ + _) == ((Chunk.empty, 0)))
}
"returns first result and first result for singleton" in {
Chunk(2).scanLeftCarry(1)(_ + _) shouldBe ((Chunk(3), 3))
assert(Chunk(2).scanLeftCarry(1)(_ + _) == ((Chunk(3), 3)))
}
"returns all results and last result for multiple elements" in {
Chunk(2, 3).scanLeftCarry(1)(_ + _) shouldBe ((Chunk(3, 6), 6))
assert(Chunk(2, 3).scanLeftCarry(1)(_ + _) == ((Chunk(3, 6), 6)))
}
}

"concat primitives" - {
def testEmptyConcat[A](mkChunk: List[Chunk[A]] => Chunk[A]) =
mkChunk(List(Chunk.empty, Chunk.empty)) shouldEqual Chunk.empty
assert(mkChunk(List(Chunk.empty, Chunk.empty)) === Chunk.empty)

"booleans" in testEmptyConcat(Chunk.concatBooleans)
"bytes" in testEmptyConcat(Chunk.concatBytes)
Expand All @@ -182,24 +183,24 @@ class ChunkSpec extends Fs2Spec {

"map andThen toArray" in {
val arr: Array[Int] = Chunk(0, 0).map(identity).toArray
arr shouldBe Array(0, 0)
assert(arr === Array(0, 0))
}

"mapAccumulate andThen toArray" in {
val arr: Array[Int] = Chunk(0, 0).mapAccumulate(0)((s, o) => (s, o))._2.toArray
arr shouldBe Array(0, 0)
assert(arr === Array(0, 0))
}

"scanLeft andThen toArray" in {
val arr: Array[Int] = Chunk(0, 0).scanLeft(0)((_, o) => o).toArray
arr shouldBe Array(0, 0, 0)
assert(arr === Array(0, 0, 0))
}

"zip andThen toArray" in {
val arr: Array[(Int, Int)] = Chunk(0, 0).zip(Chunk(0, 0)).toArray
arr shouldBe Array((0, 0), (0, 0))
assert(arr === Array((0, 0), (0, 0)))
val arr2: Array[Int] = Chunk(0, 0).zip(Chunk(0, 0)).map(_._1).toArray
arr2 shouldBe Array(0, 0)
assert(arr2 === Array(0, 0))
}

"Boxed toArray - regression #1745" in {
Expand Down
4 changes: 2 additions & 2 deletions core/shared/src/test/scala/fs2/CompositeFailureTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ class CompositeFailureTest extends Fs2Spec {
err(3),
List(CompositeFailure(err(4), err(5)))
)
compositeFailure.all.map(_.getMessage) shouldBe NonEmptyList.of("1", "2", "3", "4", "5")
compositeFailure.all.collect { case cf: CompositeFailure => cf } shouldBe empty
assert(compositeFailure.all.map(_.getMessage) == NonEmptyList.of("1", "2", "3", "4", "5"))
assert(compositeFailure.all.collect { case cf: CompositeFailure => cf }.isEmpty)
}
}
}
Loading

0 comments on commit 3fa7e24

Please # to comment.