Skip to content

Commit

Permalink
Merge pull request #42 from scalableminds/scala213
Browse files Browse the repository at this point in the history
scala 2.13, sbt 1.9, jvm 11, upgraded dependencies
  • Loading branch information
fm3 authored Oct 16, 2023
2 parents 3e5cecc + 02eb7d7 commit b875de1
Show file tree
Hide file tree
Showing 13 changed files with 51 additions and 55 deletions.
18 changes: 2 additions & 16 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,13 @@ jobs:
machine:
image: ubuntu-2004:202111-02
environment:
SBT_VERSION_TAG: sbt-0.13.15_mongo-3.2.17_node-8.x_jdk-8
SBT_VERSION_TAG: master__6469160879
USER_UID: 1000
USER_GID: 1000
TARGET_DIR: target/scala-2.12
TARGET_DIR: target/scala-2.13
steps:
- checkout

- run:
name: Prepare dependency folders
command: mkdir -p ~/.m2 ~/.ivy2 ~/.sbt

- restore_cache:
key: cache-{{ .Branch }}

- run:
name: Build server
command: docker-compose run -T -e CI=$CI sbt sbt assembly
Expand All @@ -26,13 +19,6 @@ jobs:
name: Get FossilDB version
command: docker-compose run -T sbt java -jar $TARGET_DIR/fossildb.jar --version > $TARGET_DIR/version

- save_cache:
key: cache-{{ .Branch }}
paths:
- "~/.m2"
- "~/.ivy2"
- "~/.sbt"

- run:
name: Build server docker image
command: |
Expand Down
1 change: 1 addition & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
## Breaking Changes

- The `GetMultipleKeys` call now takes a `startAfterKey` instead of a `key` for pagination. The returned list will only start *after* this key. [#38](https://github.com/scalableminds/fossildb/pull/38)
- Now needs Java 11+

## Fixes

Expand Down
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
FROM openjdk:8-jdk
FROM openjdk:11-jdk

RUN apt-get update && apt-get install -y --no-install-recommends gosu && rm -rf /var/lib/apt/lists/*
RUN mkdir -p /fossildb
WORKDIR /fossildb

COPY target/scala-2.12/fossildb.jar .
COPY target/scala-2.13/fossildb.jar .
COPY fossildb .

RUN groupadd -r fossildb \
Expand All @@ -13,7 +13,7 @@ RUN groupadd -r fossildb \
&& chmod 777 . \
&& chown -R fossildb .

RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
RUN GRPC_HEALTH_PROBE_VERSION=v0.4.20 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe

Expand Down
24 changes: 17 additions & 7 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,24 @@ def getVersionFromGit: String = {
}
}

ThisBuild / scalacOptions ++= Seq(
"-feature",
"-deprecation"
)

version := getVersionFromGit

scalaVersion := "2.12.15"
scalaVersion := "2.13.12"

libraryDependencies ++= Seq(
"ch.qos.logback" % "logback-classic" % "1.2.3",
"com.typesafe.scala-logging" %% "scala-logging" % "3.7.2",
"org.scalatest" % "scalatest_2.12" % "3.0.4" % "test",
"ch.qos.logback" % "logback-classic" % "1.4.7",
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.5",
"org.scalatest" % "scalatest_2.13" % "3.2.15" % "test",
"io.grpc" % "grpc-netty" % scalapb.compiler.Version.grpcJavaVersion,
"io.grpc" % "grpc-services" % scalapb.compiler.Version.grpcJavaVersion,
"com.thesamet.scalapb" %% "scalapb-runtime-grpc" % scalapb.compiler.Version.scalapbVersion,
"org.rocksdb" % "rocksdbjni" % "5.11.3",
"com.github.scopt" %% "scopt" % "3.7.0"
"org.rocksdb" % "rocksdbjni" % "7.10.2",
"com.github.scopt" %% "scopt" % "4.1.0"
)

Compile / managedSourceDirectories += target.value / "protobuf-generated"
Expand All @@ -39,6 +44,8 @@ Compile / mainClass := Some("com.scalableminds.fossildb.FossilDB")

assembly / assemblyMergeStrategy := {
case x if x.endsWith("io.netty.versions.properties") => MergeStrategy.first
// compare https://stackoverflow.com/questions/54834125/sbt-assembly-deduplicate-module-info-class
case x if x.endsWith("module-info.class") => MergeStrategy.concat
case x =>
val oldStrategy = (assembly / assemblyMergeStrategy).value
oldStrategy(x)
Expand All @@ -53,7 +60,10 @@ lazy val buildInfoSettings = Seq(
override def toString: String = {
try {
val extracted = new java.io.InputStreamReader(java.lang.Runtime.getRuntime.exec("git rev-parse HEAD").getInputStream)
new java.io.BufferedReader(extracted).readLine()
val str = new java.io.BufferedReader(extracted).readLine()
if (str == null) {
"get git hash failed"
} else str
} catch {
case t: Throwable => "get git hash failed"
}
Expand Down
4 changes: 1 addition & 3 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ services:
- USER_GID
- TZ
working_dir: /fossildb
user: ${USER_UID:-1000}:${USER_GID:-1000}
volumes:
- ".:/fossildb"
- "~/.m2:/home/sbt-user/.m2"
- "~/.ivy2:/home/sbt-user/.ivy2"
- "~/.sbt:/home/sbt-user/.sbt"
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version = 1.6.2
sbt.version = 1.9.6
4 changes: 2 additions & 2 deletions project/scalapb.sbt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.18")
addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.2")

libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.7.4"
libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.13"
2 changes: 1 addition & 1 deletion src/main/scala/com/scalableminds/fossildb/FossilDB.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ object FossilDB extends LazyLogging {
opt[String]('b', "backupDir").valueName("<path>").action( (x, c) =>
c.copy(backupDir = x) ).text("backup directory. Default: " + ConfigDefaults.backupDir)

opt[Seq[String]]('c', "columnFamilies").required.valueName("<cf1>,<cf2>...").action( (x, c) =>
opt[Seq[String]]('c', "columnFamilies").required().valueName("<cf1>,<cf2>...").action( (x, c) =>
c.copy(columnFamilies = x.toList) ).text("column families of the database (created if there is no db yet)")

opt[String]('r', "rocksOptionsFile").valueName("<filepath>").action( (x, c) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import io.grpc.health.v1.HealthCheckResponse
import com.typesafe.scalalogging.LazyLogging
import io.grpc.Server
import io.grpc.netty.NettyServerBuilder
import io.grpc.services.HealthStatusManager
import io.grpc.protobuf.services.HealthStatusManager

import scala.concurrent.ExecutionContext

Expand All @@ -17,7 +17,7 @@ class FossilDBServer(storeManager: StoreManager, port: Int, executionContext: Ex

def start(): Unit = {
healthStatusManager = new HealthStatusManager()
server = NettyServerBuilder.forPort(port).maxMessageSize(Int.MaxValue)
server = NettyServerBuilder.forPort(port).maxInboundMessageSize(Int.MaxValue)
.addService(FossilDBGrpc.bindService(new FossilDBGrpcImpl(storeManager), executionContext))
.addService(healthStatusManager.getHealthService)
.build.start
Expand Down
10 changes: 5 additions & 5 deletions src/main/scala/com/scalableminds/fossildb/db/RocksDBStore.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ import org.rocksdb._

import java.nio.file.{Files, Path}
import java.util
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.Future
import scala.jdk.CollectionConverters.{ListHasAsScala, BufferHasAsJava, SeqHasAsJava}
import scala.language.postfixOps

case class BackupInfo(id: Int, timestamp: Long, size: Long)
Expand Down Expand Up @@ -56,7 +56,7 @@ class RocksDBManager(dataDir: Path, columnFamilies: List[String], optionsFilePat
Files.createDirectories(backupDir)

RocksDB.loadLibrary()
val backupEngine = BackupEngine.open(Env.getDefault, new BackupableDBOptions(backupDir.toString))
val backupEngine = BackupEngine.open(Env.getDefault, new BackupEngineOptions(backupDir.toString))
backupEngine.createNewBackup(db)
backupEngine.purgeOldBackups(1)
backupEngine.getBackupInfo.asScala.headOption.map(info => BackupInfo(info.backupId, info.timestamp, info.size))
Expand All @@ -66,7 +66,7 @@ class RocksDBManager(dataDir: Path, columnFamilies: List[String], optionsFilePat
logger.info("Restoring from backup. RocksDB temporarily unavailable")
close()
RocksDB.loadLibrary()
val backupEngine = BackupEngine.open(Env.getDefault, new BackupableDBOptions(backupDir.toString))
val backupEngine = BackupEngine.open(Env.getDefault, new BackupEngineOptions(backupDir.toString))
backupEngine.restoreDbFromLatestBackup(dataDir.toString, dataDir.toString, new RestoreOptions(true))
logger.info("Restoring from backup complete. Reopening RocksDB")
}
Expand Down Expand Up @@ -101,7 +101,7 @@ class RocksDBKeyIterator(it: RocksIterator, prefix: Option[String]) extends Iter

override def hasNext: Boolean = it.isValid && prefix.forall(it.key().startsWith(_))

override def next: String = {
override def next(): String = {
val key = new String(it.key().map(_.toChar))
it.next()
key
Expand All @@ -117,7 +117,7 @@ class RocksDBIterator(it: RocksIterator, prefix: Option[String]) extends Iterato

override def hasNext: Boolean = it.isValid && prefix.forall(it.key().startsWith(_))

override def next: KeyValuePair[Array[Byte]] = {
override def next(): KeyValuePair[Array[Byte]] = {
val value = KeyValuePair(new String(it.key().map(_.toChar)), it.value())
it.next()
value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,14 +73,14 @@ class KeyOnlyIterator[T](underlying: RocksDBStore, startAfterKey: Option[String]

override def hasNext: Boolean = {
val it = underlying.scanKeysOnly(compositeKeyFor(currentKey), None)
if (it.hasNext && currentKey.isDefined && currentKey.contains(VersionedKey(it.peek).get.key)) it.next
if (it.hasNext && currentKey.isDefined && currentKey.contains(VersionedKey(it.peek).get.key)) it.next()
it.hasNext
}

override def next(): String = {
val it = underlying.scanKeysOnly(compositeKeyFor(currentKey), None)
if (it.hasNext && currentKey.isDefined && currentKey.contains(VersionedKey(it.peek).get.key)) it.next
val nextKey = VersionedKey(it.next).get.key
if (it.hasNext && currentKey.isDefined && currentKey.contains(VersionedKey(it.peek).get.key)) it.next()
val nextKey = VersionedKey(it.next()).get.key
currentKey = Some(nextKey)
nextKey
}
Expand All @@ -91,7 +91,7 @@ class KeyOnlyIterator[T](underlying: RocksDBStore, startAfterKey: Option[String]
class VersionedKeyValueStore(underlying: RocksDBStore) {

def get(key: String, version: Option[Long] = None): Option[VersionedKeyValuePair[Array[Byte]]] =
scanVersionValuePairs(key, version).toStream.headOption
scanVersionValuePairs(key, version).nextOption()

def getMultipleVersions(key: String, oldestVersion: Option[Long] = None, newestVersion: Option[Long] = None): (List[Array[Byte]], List[Long]) = {

Expand Down
11 changes: 6 additions & 5 deletions src/test/scala/com/scalableminds/fossildb/FossilDBSuite.scala
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
package com.scalableminds.fossildb

import java.io.File
import java.nio.file.{Files, Paths}
import java.nio.file.Paths
import com.google.protobuf.ByteString
import com.scalableminds.fossildb.db.StoreManager
import com.scalableminds.fossildb.proto.fossildbapi._
import com.typesafe.scalalogging.LazyLogging
import io.grpc.health.v1._
import io.grpc.netty.NettyChannelBuilder
import org.scalatest.{BeforeAndAfterEach, FlatSpec}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.flatspec.AnyFlatSpec

import scala.concurrent.ExecutionContext

class FossilDBSuite extends FlatSpec with BeforeAndAfterEach with TestHelpers with LazyLogging {
class FossilDBSuite extends AnyFlatSpec with BeforeAndAfterEach with TestHelpers with LazyLogging {
private val testTempDir = "testData1"
private val dataDir = Paths.get(testTempDir, "data")
private val backupDir = Paths.get(testTempDir, "backup")
Expand All @@ -33,7 +34,7 @@ class FossilDBSuite extends FlatSpec with BeforeAndAfterEach with TestHelpers wi
private val aNotherKey = "aNotherKey"
private val aThirdKey = "aThirdKey"

override def beforeEach: Unit = {
override def beforeEach(): Unit = {
deleteRecursively(new File(testTempDir))
new File(testTempDir).mkdir()

Expand All @@ -46,7 +47,7 @@ class FossilDBSuite extends FlatSpec with BeforeAndAfterEach with TestHelpers wi
serverOpt.foreach(_.start())
}

override def afterEach: Unit = {
override def afterEach(): Unit = {
serverOpt.foreach(_.stop())
deleteRecursively(new File(testTempDir))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@ package com.scalableminds.fossildb

import java.io.File
import java.nio.file.Paths

import com.scalableminds.fossildb.db.StoreManager
import org.rocksdb.{ColumnFamilyDescriptor, DBOptions, Env}
import org.scalatest.{BeforeAndAfterEach, FlatSpec}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.flatspec.AnyFlatSpec

import scala.collection.mutable
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.BufferHasAsJava


class RocksOptionsSuite extends FlatSpec with BeforeAndAfterEach with TestHelpers {
class RocksOptionsSuite extends AnyFlatSpec with BeforeAndAfterEach with TestHelpers {

private val testTempDir = "testData2"
private val dataDir = Paths.get(testTempDir, "data")
Expand All @@ -23,12 +23,12 @@ class RocksOptionsSuite extends FlatSpec with BeforeAndAfterEach with TestHelper
private val columnFamilies = List(collectionA, collectionB)


override def beforeEach: Unit = {
override def beforeEach(): Unit = {
deleteRecursively(new File(testTempDir))
new File(testTempDir).mkdir()
}

override def afterEach: Unit = {
override def afterEach(): Unit = {
deleteRecursively(new File(testTempDir))
}

Expand Down

0 comments on commit b875de1

Please sign in to comment.