diff --git a/content-api/collection-csv-actors/pom.xml b/content-api/collection-csv-actors/pom.xml index aeeb27ed9..57c017164 100644 --- a/content-api/collection-csv-actors/pom.xml +++ b/content-api/collection-csv-actors/pom.xml @@ -28,7 +28,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar @@ -78,9 +78,9 @@ test - com.typesafe.akka - akka-testkit_${scala.maj.version} - 2.5.22 + org.apache.pekko + pekko-testkit_${scala.maj.version} + 1.0.3 test diff --git a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/actors/CollectionCSVActor.scala b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/actors/CollectionCSVActor.scala index e3c520983..3d9737c25 100644 --- a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/actors/CollectionCSVActor.scala +++ b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/actors/CollectionCSVActor.scala @@ -13,8 +13,8 @@ import org.sunbird.managers.HierarchyManager import org.sunbird.telemetry.logger.TelemetryManager import javax.inject.Inject -import scala.collection.JavaConverters.mapAsJavaMapConverter import scala.collection.immutable.{HashMap, Map} +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} class CollectionCSVActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageService) extends BaseActor { diff --git a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala index eddfdf7af..4d93e1d8d 100644 --- a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala +++ b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala @@ -23,13 +23,11 @@ import java.nio.charset.StandardCharsets import java.util import java.util.logging.Logger import scala.collection.immutable.{ListMap, Map} -import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters.{asJavaIterableConverter, mapAsScalaMapConverter} +import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.concurrent.{ExecutionContext, Future} -import scala.collection.JavaConverters._ object CollectionCSVManager extends CollectionInputFileReader { @@ -239,7 +237,7 @@ object CollectionCSVManager extends CollectionInputFileReader { } private def populateFolderInfoMap(folderInfoMap: mutable.Map[String, AnyRef], csvRecords: util.List[CSVRecord], mode: String): Unit = { - csvRecords.map(csvRecord => { + csvRecords.asScala.map(csvRecord => { val csvRecordFolderHierarchyMap: Map[String, String] = csvRecord.toMap.asScala.toMap.filter(colData => { folderHierarchyHdrColumnsList.contains(colData._1) && colData._2.nonEmpty }) @@ -258,11 +256,11 @@ object CollectionCSVManager extends CollectionInputFileReader { if(nodeInfoMap.contains(CollectionTOCConstants.CHILDREN)) { var childrenSet = nodeInfoMap(CollectionTOCConstants.CHILDREN).asInstanceOf[Seq[String]] - childrenSet ++= Seq(getCode(sortedFoldersDataList.get(sortedFoldersDataKey.indexOf(folderData._1)+1))) + childrenSet ++= Seq(getCode(sortedFoldersDataList(sortedFoldersDataKey.indexOf(folderData._1)+1))) nodeInfoMap(CollectionTOCConstants.CHILDREN) = childrenSet } else { - val childrenList = Seq(getCode(sortedFoldersDataList.get(sortedFoldersDataKey.indexOf(folderData._1)+1))) + val childrenList = Seq(getCode(sortedFoldersDataList(sortedFoldersDataKey.indexOf(folderData._1)+1))) nodeInfoMap += (CollectionTOCConstants.CHILDREN -> childrenList) } folderInfoMap(folderDataHashCode) = nodeInfoMap @@ -304,7 +302,7 @@ object CollectionCSVManager extends CollectionInputFileReader { else { val childrenList = { if((sortedFoldersDataKey.indexOf(folderData._1)+1) != sortedFoldersDataList.size) - Seq(getCode(sortedFoldersDataList.get(sortedFoldersDataKey.indexOf(folderData._1)+1))) + Seq(getCode(sortedFoldersDataList(sortedFoldersDataKey.indexOf(folderData._1)+1))) else Seq.empty[String] } scala.collection.mutable.Map(CollectionTOCConstants.NAME -> folderData._2, CollectionTOCConstants.CHILDREN -> childrenList, CollectionTOCConstants.LEVEL -> folderData._1) @@ -470,9 +468,9 @@ object CollectionCSVManager extends CollectionInputFileReader { if(nodeInfo(CollectionTOCConstants.DIAL_CODES) != null && nodeInfo(CollectionTOCConstants.DIAL_CODES).toString.nonEmpty) new util.HashMap[String, String]{put(CollectionTOCConstants.IDENTIFIER, nodeInfo(CollectionTOCConstants.IDENTIFIER).toString); put(CollectionTOCConstants.DIALCODE, nodeInfo(CollectionTOCConstants.DIAL_CODES).toString)} else new util.HashMap[String, String]() - }).filter(record => record.nonEmpty).toList + }).filter(record => !record.isEmpty).toList - if(linkDIALCodeReqMap.nonEmpty) linkDIALCode(channelID, collectionID, linkDIALCodeReqMap) + if(!linkDIALCodeReqMap.isEmpty) linkDIALCode(channelID, collectionID, linkDIALCodeReqMap) } diff --git a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCUtil.scala b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCUtil.scala index c9cebe724..cdb5eccda 100644 --- a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCUtil.scala +++ b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCUtil.scala @@ -7,9 +7,9 @@ import org.sunbird.graph.OntologyEngineContext import org.sunbird.telemetry.logger.TelemetryManager import java.util -import scala.collection.JavaConverters._ import java.text.MessageFormat import scala.collection.immutable.Map +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext diff --git a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala index da0bee6ed..f3bc1042a 100644 --- a/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala +++ b/content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala @@ -16,8 +16,7 @@ import java.io.{File, FileInputStream, IOException, InputStreamReader} import java.nio.charset.StandardCharsets import java.text.MessageFormat import java.util -import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters.{asScalaBufferConverter, mapAsJavaMapConverter, mapAsScalaMapConverter} +import scala.jdk.CollectionConverters._ import scala.collection.immutable.{HashMap, ListMap, Map} import scala.concurrent.ExecutionContext @@ -26,17 +25,17 @@ object CollectionCSVValidator { val allowedNumberOfRecord: Integer = Platform.getInteger(CollectionTOCConstants.COLLECTION_TOC_MAX_CSV_ROWS,6500) val createCSVHeaders: Map[String, Integer] = Platform.getAnyRef(CollectionTOCConstants.COLLECTION_CREATION_CSV_TOC_HEADERS, Map[String, Integer]("Level 1 Folder"->0,"Level 2 Folder"->1,"Level 3 Folder"->2,"Level 4 Folder"->3,"Description"->4).asJava).asInstanceOf[util.Map[String, Integer]].asScala.toMap val updateCSVHeaders: Map[String, Integer] = Platform.getAnyRef(CollectionTOCConstants.COLLECTION_UPDATE_CSV_TOC_HEADERS, Map[String, Integer]("Collection Name"->0,"Folder Identifier"->1,"Level 1 Folder"->2,"Level 2 Folder"->3,"Level 3 Folder"->4,"Level 4 Folder"->5,"Description"->6,"Mapped Topics"->7,"Keywords"->8,"QR Code Required?"->9,"QR Code"->10,"Linked Content 1"->11,"Linked Content 2"->12,"Linked Content 3"->13,"Linked Content 4"->14,"Linked Content 5"->15,"Linked Content 6"->16,"Linked Content 7"->17,"Linked Content 8"->18,"Linked Content 9"->19,"Linked Content 10"->20,"Linked Content 11"->21,"Linked Content 12"->22,"Linked Content 13"->23,"Linked Content 14"->24,"Linked Content 15"->25,"Linked Content 16"->26,"Linked Content 17"->27,"Linked Content 18"->28,"Linked Content 19"->29,"Linked Content 20"->30,"Linked Content 21"->31,"Linked Content 22"->32,"Linked Content 23"->33,"Linked Content 24"->34,"Linked Content 25"->35,"Linked Content 26"->36,"Linked Content 27"->37,"Linked Content 28"->38,"Linked Content 29"->39,"Linked Content 30"->40).asJava).asInstanceOf[util.Map[String, Integer]].asScala.toMap - val createCSVMandatoryHeaderCols: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_TOC_CREATE_CSV_MANDATORY_FIELDS, java.util.Arrays.asList("Level 1 Folder")).toList - val updateCSVMandatoryHeaderCols: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_TOC_UPDATE_CSV_MANDATORY_FIELDS, java.util.Arrays.asList("Collection Name","Folder Identifier")).toList - val qrCodeHdrColsList: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_QR_COLUMNS, java.util.Arrays.asList("QR Code Required?","QR Code")).toList - val folderHierarchyHdrColumnsList: List[String] = Platform.getStringList(CollectionTOCConstants.FOLDER_HIERARCHY_COLUMNS, java.util.Arrays.asList("Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder")).toList - val linkedContentHdrColumnsList: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_LINKED_CONTENT_FIELDS, java.util.Arrays.asList("Linked Content 1","Linked Content 2","Linked Content 3","Linked Content 4","Linked Content 5","Linked Content 6","Linked Content 7","Linked Content 8","Linked Content 9","Linked Content 10","Linked Content 11","Linked Content 12","Linked Content 13","Linked Content 14","Linked Content 15","Linked Content 16","Linked Content 17","Linked Content 18","Linked Content 19","Linked Content 20","Linked Content 21","Linked Content 22","Linked Content 23","Linked Content 24","Linked Content 25","Linked Content 26","Linked Content 27","Linked Content 28","Linked Content 29","Linked Content 30")).toList + val createCSVMandatoryHeaderCols: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_TOC_CREATE_CSV_MANDATORY_FIELDS, java.util.Arrays.asList("Level 1 Folder")).asScala.toList + val updateCSVMandatoryHeaderCols: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_TOC_UPDATE_CSV_MANDATORY_FIELDS, java.util.Arrays.asList("Collection Name","Folder Identifier")).asScala.toList + val qrCodeHdrColsList: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_QR_COLUMNS, java.util.Arrays.asList("QR Code Required?","QR Code")).asScala.toList + val folderHierarchyHdrColumnsList: List[String] = Platform.getStringList(CollectionTOCConstants.FOLDER_HIERARCHY_COLUMNS, java.util.Arrays.asList("Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder")).asScala.toList + val linkedContentHdrColumnsList: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_LINKED_CONTENT_FIELDS, java.util.Arrays.asList("Linked Content 1","Linked Content 2","Linked Content 3","Linked Content 4","Linked Content 5","Linked Content 6","Linked Content 7","Linked Content 8","Linked Content 9","Linked Content 10","Linked Content 11","Linked Content 12","Linked Content 13","Linked Content 14","Linked Content 15","Linked Content 16","Linked Content 17","Linked Content 18","Linked Content 19","Linked Content 20","Linked Content 21","Linked Content 22","Linked Content 23","Linked Content 24","Linked Content 25","Linked Content 26","Linked Content 27","Linked Content 28","Linked Content 29","Linked Content 30")).asScala.toList val linkedContentColumnHeadersSeq: Map[String, Integer] = Platform.getAnyRef(CollectionTOCConstants.COLLECTION_CSV_LINKED_CONTENT_SEQ, Map[String, Integer]("Linked Content 1"->0,"Linked Content 2"->1,"Linked Content 3"->2,"Linked Content 4"->3,"Linked Content 5"->4,"Linked Content 6"->5,"Linked Content 7"->6,"Linked Content 8"->7,"Linked Content 9"->8,"Linked Content 10"->9,"Linked Content 11"->10,"Linked Content 12"->11,"Linked Content 13"->12,"Linked Content 14"->13,"Linked Content 15"->14,"Linked Content 16"->15,"Linked Content 17"->16,"Linked Content 18"->17,"Linked Content 19"->18,"Linked Content 20"->19,"Linked Content 21"->20,"Linked Content 22"->21,"Linked Content 23"->22,"Linked Content 24"->23,"Linked Content 25"->24,"Linked Content 26"->25,"Linked Content 27"->26,"Linked Content 28"->27,"Linked Content 29"->28,"Linked Content 30"->29).asJava).asInstanceOf[util.Map[String, Integer]].asScala.toMap - val collectionNameHeader: List[String] = Platform.getStringList(CollectionTOCConstants.CSV_COLLECTION_NAME_HEADER, java.util.Arrays.asList("Collection Name")).toList - val mappedTopicsHeader: List[String] = Platform.getStringList(CollectionTOCConstants.MAPPED_TOPICS_HEADER, java.util.Arrays.asList("Mapped Topics")).toList - val collectionNodeIdentifierHeader: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_IDENTIFIER_HEADER, java.util.Arrays.asList("Folder Identifier")).toList + val collectionNameHeader: List[String] = Platform.getStringList(CollectionTOCConstants.CSV_COLLECTION_NAME_HEADER, java.util.Arrays.asList("Collection Name")).asScala.toList + val mappedTopicsHeader: List[String] = Platform.getStringList(CollectionTOCConstants.MAPPED_TOPICS_HEADER, java.util.Arrays.asList("Mapped Topics")).asScala.toList + val collectionNodeIdentifierHeader: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_CSV_IDENTIFIER_HEADER, java.util.Arrays.asList("Folder Identifier")).asScala.toList val contentTypeToUnitTypeMapping: Map[String, String] = Platform.getAnyRef(CollectionTOCConstants.COLLECTION_TYPE_TO_UNIT_TYPE, Map[String, String]("TextBook"-> "TextBookUnit", "Course"-> "CourseUnit", "Collection"->"CollectionUnit").asJava).asInstanceOf[util.Map[String, String]].asScala.toMap - val collectionOutputTocHeaders: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_OUTPUT_TOC_HEADERS, java.util.Arrays.asList("Collection Name","Folder Identifier","Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder","Description","Mapped Topics","Keywords","QR Code Required?","QR Code","Linked Content 1","Linked Content 2","Linked Content 3","Linked Content 4","Linked Content 5","Linked Content 6","Linked Content 7","Linked Content 8","Linked Content 9","Linked Content 10","Linked Content 11","Linked Content 12","Linked Content 13","Linked Content 14","Linked Content 15","Linked Content 16","Linked Content 17","Linked Content 18","Linked Content 19","Linked Content 20","Linked Content 21","Linked Content 22","Linked Content 23","Linked Content 24","Linked Content 25","Linked Content 26","Linked Content 27","Linked Content 28","Linked Content 29","Linked Content 30")).toList + val collectionOutputTocHeaders: List[String] = Platform.getStringList(CollectionTOCConstants.COLLECTION_OUTPUT_TOC_HEADERS, java.util.Arrays.asList("Collection Name","Folder Identifier","Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder","Description","Mapped Topics","Keywords","QR Code Required?","QR Code","Linked Content 1","Linked Content 2","Linked Content 3","Linked Content 4","Linked Content 5","Linked Content 6","Linked Content 7","Linked Content 8","Linked Content 9","Linked Content 10","Linked Content 11","Linked Content 12","Linked Content 13","Linked Content 14","Linked Content 15","Linked Content 16","Linked Content 17","Linked Content 18","Linked Content 19","Linked Content 20","Linked Content 21","Linked Content 22","Linked Content 23","Linked Content 24","Linked Content 25","Linked Content 26","Linked Content 27","Linked Content 28","Linked Content 29","Linked Content 30")).asScala.toList val maxFolderLevels: Int = folderHierarchyHdrColumnsList.size val maxUnitFieldLength: Int = Platform.getInteger(CollectionTOCConstants.COLLECTION_UNIT_FIELD_MAX_LENGTH, 50) val maxDescFieldLength: Int = Platform.getInteger(CollectionTOCConstants.COLLECTION_DESC_FIELD_MAX_LENGTH, 250) @@ -61,7 +60,7 @@ object CollectionCSVValidator { if (null == csvHeaders || csvHeaders.isEmpty) throw new ClientException("BLANK_CSV_DATA", "Not data found in the file. Please correct and upload again.") //Check if the input CSV is 'CREATE' TOC file format or 'UPDATE' TOC file format - val mode = if (csvHeaders.containsKey(collectionNodeIdentifierHeader.head)) CollectionTOCConstants.UPDATE else CollectionTOCConstants.CREATE + val mode = if (csvHeaders.contains(collectionNodeIdentifierHeader.head)) CollectionTOCConstants.UPDATE else CollectionTOCConstants.CREATE TelemetryManager.log("CollectionCSVActor --> uploadTOC --> mode identified: " + mode) //Validate the headers format of the input CSV @@ -96,7 +95,7 @@ object CollectionCSVValidator { //Check if CSV Records are empty if (null == csvRecords || csvRecords.isEmpty) throw new ClientException("BLANK_CSV_DATA", "Not data found in the file. Please correct and upload again.") // check if records are more than allowed csv rows - if (csvRecords.nonEmpty && csvRecords.size > allowedNumberOfRecord) throw new ClientException("CSV_ROWS_EXCEEDS", s"Number of rows in the file exceeds the limit $allowedNumberOfRecord. Please reduce the number of folders and upload again.") + if (!csvRecords.isEmpty && csvRecords.size > allowedNumberOfRecord) throw new ClientException("CSV_ROWS_EXCEEDS", s"Number of rows in the file exceeds the limit $allowedNumberOfRecord. Please reduce the number of folders and upload again.") validateMandatoryHeaderCols(csvRecords, mode) validateDuplicateRows(csvRecords) @@ -140,7 +139,7 @@ object CollectionCSVValidator { // Check if data exists in mandatory columns - START val mandatoryDataHdrCols = if(mode.equals(CollectionTOCConstants.CREATE)) createCSVMandatoryHeaderCols else updateCSVMandatoryHeaderCols - val mandatoryMissingDataList = csvRecords.flatMap(csvRecord => { + val mandatoryMissingDataList = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if(mandatoryDataHdrCols.contains(colData._1) && colData._2.trim.isEmpty) MessageFormat.format("\nRow {0} - column: {1}", (csvRecord.getRecordNumber+1).toString,colData._1) @@ -156,7 +155,7 @@ object CollectionCSVValidator { // Check if data exists in hierarchy folder columns - START val hierarchyHeaders: Map[String, Integer] = if(mode.equals(CollectionTOCConstants.CREATE)) createCSVHeaders else updateCSVHeaders - val missingDataList = csvRecords.flatMap(csvRecord => { + val missingDataList = csvRecords.asScala.flatMap(csvRecord => { val csvRecordFolderHierarchyData = csvRecord.toMap.asScala.toMap.filter(colData => { folderHierarchyHdrColumnsList.contains(colData._1) && colData._2.trim.nonEmpty }) @@ -176,8 +175,8 @@ object CollectionCSVValidator { private def validateDuplicateRows(csvRecords: util.List[CSVRecord]): Unit = { // Verify if there are any duplicate hierarchy folder structure - START - val dupRecordsList = csvRecords.filter(csvRecord => { - csvRecords.exists(record => { + val dupRecordsList = csvRecords.asScala.filter(csvRecord => { + csvRecords.asScala.exists(record => { val csvRecordFolderHierarchy = csvRecord.toMap.asScala.toMap.map(colData => { if(folderHierarchyHdrColumnsList.contains(colData._1)) colData @@ -199,7 +198,7 @@ object CollectionCSVValidator { private def validateQRCodeColumns(csvRecords: util.List[CSVRecord]): Unit = { // Verify if there are any QR Codes data entry issues - START - val qrDataErrorMessage = csvRecords.map(csvRecord => { + val qrDataErrorMessage = csvRecords.asScala.map(csvRecord => { val csvRecordMap = csvRecord.toMap.asScala.toMap if((csvRecordMap(qrCodeHdrColsList.head).equalsIgnoreCase(CollectionTOCConstants.NO) || csvRecordMap(qrCodeHdrColsList.head).isEmpty) && csvRecordMap(qrCodeHdrColsList(1)).nonEmpty) @@ -212,8 +211,8 @@ object CollectionCSVValidator { throw new ClientException("ERROR_QR_CODE_ENTRY", "Following rows have incorrect QR Code entries. “QR Code Required?” should be “Yes” if there is a value in QR Code column. Please correct and upload again: " + qrDataErrorMessage) // Verify if there are any QR Codes data entry issues - END // Verify if there are any duplicate QR Codes - START - val dupQRListMsg = csvRecords.filter(csvRecord => { - csvRecords.exists(record => { + val dupQRListMsg = csvRecords.asScala.filter(csvRecord => { + csvRecords.asScala.exists(record => { record.get(CollectionTOCConstants.QR_CODE).nonEmpty && csvRecord.get(CollectionTOCConstants.QR_CODE).nonEmpty && record.get(CollectionTOCConstants.QR_CODE).equals(csvRecord.get(CollectionTOCConstants.QR_CODE)) && !csvRecord.getRecordNumber.equals(record.getRecordNumber) }) @@ -228,7 +227,7 @@ object CollectionCSVValidator { private def validateLinkedContentsColumns(csvRecords: util.List[CSVRecord]): Unit = { // Check if data exists in Linked content columns - START - val missingLinkedContentDataList = csvRecords.flatMap(csvRecord => { + val missingLinkedContentDataList = csvRecords.asScala.flatMap(csvRecord => { val csvRecordLinkedContentsData = csvRecord.toMap.asScala.toMap.filter(colData => { linkedContentHdrColumnsList.contains(colData._1) && colData._2.nonEmpty }) @@ -248,7 +247,7 @@ object CollectionCSVValidator { private def validateCollectionName(csvRecords: util.List[CSVRecord], collectionHierarchy: Map[String, AnyRef]): Unit = { // validate collection name column in CSV - START - val invalidCollectionNameErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidCollectionNameErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (collectionNameHeader.contains(colData._1) && (colData._2.trim.isEmpty || !colData._2.trim.equalsIgnoreCase(collectionHierarchy(CollectionTOCConstants.NAME).toString))) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString + " - " + colData._2) @@ -269,7 +268,7 @@ object CollectionCSVValidator { private def validateUnitFieldLength(csvRecords: util.List[CSVRecord]): Unit = { // validate Units' name length - val invalidUnitLengthErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidUnitLengthErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (folderHierarchyHdrColumnsList.contains(colData._1) && (colData._2.trim.nonEmpty && colData._2.trim.length>maxUnitFieldLength)) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString + " - " + colData._1) else "" @@ -281,7 +280,7 @@ object CollectionCSVValidator { private def validateDescFieldLength(csvRecords: util.List[CSVRecord]): Unit = { // validate Description column data length in CSV - val invalidDescLengthErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidDescLengthErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (colData._1.trim.equalsIgnoreCase("Description") && (colData._2.trim.nonEmpty && colData._2.trim.length>maxDescFieldLength)) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString) else "" @@ -295,7 +294,7 @@ object CollectionCSVValidator { // validate Folder Identifier column in CSV - START val collectionChildNodes = collectionHierarchy(CollectionTOCConstants.CHILD_NODES).asInstanceOf[List[String]] - val invalidCollectionNodeIDErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidCollectionNodeIDErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (collectionNodeIdentifierHeader.contains(colData._1) && (colData._2.isEmpty || !collectionChildNodes.contains(colData._2.trim))) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString) @@ -306,15 +305,15 @@ object CollectionCSVValidator { if (invalidCollectionNodeIDErrorMessage.trim.nonEmpty) throw new ClientException("CSV_INVALID_COLLECTION_NODE_ID", "Following rows have invalid “Folder Identifier”. Please correct and upload again: " + invalidCollectionNodeIDErrorMessage) - val folderIdentifierList = csvRecords.flatMap(csvRecord => { + val folderIdentifierList = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (collectionNodeIdentifierHeader.contains(colData._1) && collectionChildNodes.contains(colData._2.trim)) colData._2.trim else "" }) }).filter(msg => msg.nonEmpty) // Verify if there are any duplicate folder identifier - START - val dupFolderIdentifierList = csvRecords.filter(csvRecord => { - csvRecords.exists(record => { + val dupFolderIdentifierList = csvRecords.asScala.filter(csvRecord => { + csvRecords.asScala.exists(record => { val csvRecordFolderIdentifier = csvRecord.toMap.asScala.toMap.map(colData => { if(collectionNodeIdentifierHeader.contains(colData._1)) colData @@ -339,14 +338,14 @@ object CollectionCSVValidator { private def validateQRCodes(csvRecords: util.List[CSVRecord], collectionHierarchy: Map[String, AnyRef])(implicit oec: OntologyEngineContext, ec: ExecutionContext): Unit = { // Validate QR Codes with reserved DIAL codes - START - val csvQRCodesList: List[String] = csvRecords.map(csvRecord => { + val csvQRCodesList: List[String] = csvRecords.asScala.map(csvRecord => { csvRecord.toMap.asScala.toMap.get(qrCodeHdrColsList(1)).get.trim }).filter(msg => msg.nonEmpty).toList if(csvQRCodesList.nonEmpty) { val returnDIALCodes = validateDialCodes(collectionHierarchy(CollectionTOCConstants.CHANNEL).toString, csvQRCodesList) - val invalidQRCodeErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidQRCodeErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (qrCodeHdrColsList.contains(colData._1) && (csvQRCodesList diff returnDIALCodes).contains(colData._2.trim)) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString) @@ -363,7 +362,7 @@ object CollectionCSVValidator { private def validateMappedTopics(csvRecords: util.List[CSVRecord], collectionHierarchy: Map[String, AnyRef])(implicit oec: OntologyEngineContext, ec: ExecutionContext): Unit = { // Validate Mapped Topics with Collection Framework data - START - val mappedTopicsList = csvRecords.flatMap(csvRecord => { + val mappedTopicsList = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (mappedTopicsHeader.contains(colData._1) && colData._2.nonEmpty) colData._2.trim.split(",").mkString(",") else "" }) @@ -376,17 +375,17 @@ object CollectionCSVValidator { val frameworkId = collectionHierarchy(CollectionTOCConstants.FRAMEWORK).toString val frameworkGetResponse = getFrameworkTopics(frameworkId) val frameworkGetResult = frameworkGetResponse.getResult.getOrDefault(CollectionTOCConstants.FRAMEWORK, new util.HashMap[String, AnyRef]()).asInstanceOf[util.HashMap[String, AnyRef]].asScala.toMap[String, AnyRef] - val frameworkCategories = frameworkGetResult.getOrDefault(CollectionTOCConstants.CATEGORIES, new util.ArrayList[util.Map[String,AnyRef]]()).asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]] + val frameworkCategories = frameworkGetResult.getOrElse(CollectionTOCConstants.CATEGORIES, new util.ArrayList[util.Map[String,AnyRef]]()).asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]] - val frameworkTopicList = frameworkCategories.flatMap(categoryData => { - categoryData.map(colData => { - if (categoryData(CollectionTOCConstants.CODE).equals(CollectionTOCConstants.TOPIC) && colData._1.equalsIgnoreCase(CollectionTOCConstants.TERMS)) + val frameworkTopicList = frameworkCategories.asScala.flatMap(categoryData => { + categoryData.asScala.map(colData => { + if (categoryData.get(CollectionTOCConstants.CODE).equals(CollectionTOCConstants.TOPIC) && colData._1.equalsIgnoreCase(CollectionTOCConstants.TERMS)) colData._2.asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]].asScala.toList.map(rec => rec.asScala.toMap[String,AnyRef]).map(_.getOrElse(CollectionTOCConstants.NAME, "")).asInstanceOf[List[String]] else List.empty }) }).filter(topic => topic.nonEmpty).flatten - val invalidTopicsErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidTopicsErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (mappedTopicsHeader.contains(colData._1) && colData._2.trim.nonEmpty) { val topicsDataList: List[String] = colData._2.trim.split(",").toList @@ -406,7 +405,7 @@ object CollectionCSVValidator { private def validateLinkedContents(csvRecords: util.List[CSVRecord], collectionHierarchy: Map[String, AnyRef])(implicit oec: OntologyEngineContext, ec: ExecutionContext): List[Map[String, AnyRef]] = { // Validate Linked Contents authenticity - START - val csvLinkedContentsList: List[String] = csvRecords.flatMap(csvRecord => { + val csvLinkedContentsList: List[String] = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (linkedContentHdrColumnsList.contains(colData._1) && colData._2.trim.nonEmpty) colData._2.trim else "" }) @@ -420,7 +419,7 @@ object CollectionCSVValidator { TelemetryManager.info("CollectionCSVActor --> validateCSVRecordsDataAuthenticity --> linked content search response:: returnedLinkedContentsResult:: " + returnedLinkedContentsResult.toString()) val returnedLinkedContentsIdentifierList = returnedLinkedContentsResult.map(_.getOrElse(CollectionTOCConstants.IDENTIFIER, "")).asInstanceOf[List[String]] TelemetryManager.info("CollectionCSVActor --> validateCSVRecordsDataAuthenticity --> linked content search response:: csvLinkedContentsList:: " + csvLinkedContentsList.toString() + " || returnedLinkedContentsIdentifierList:: " + returnedLinkedContentsIdentifierList.toString()) - val invalidLinkedContentsErrorMessage = csvRecords.flatMap(csvRecord => { + val invalidLinkedContentsErrorMessage = csvRecords.asScala.flatMap(csvRecord => { csvRecord.toMap.asScala.toMap.map(colData => { if (linkedContentHdrColumnsList.contains(colData._1) && (csvLinkedContentsList.toSet.toList diff returnedLinkedContentsIdentifierList).contains(colData._2)) MessageFormat.format("\nRow {0}", (csvRecord.getRecordNumber + 1).toString + " - " + colData._2) diff --git a/content-api/collection-csv-actors/src/test/resources/application.conf b/content-api/collection-csv-actors/src/test/resources/application.conf index e49fdc833..b5654a37d 100644 --- a/content-api/collection-csv-actors/src/test/resources/application.conf +++ b/content-api/collection-csv-actors/src/test/resources/application.conf @@ -2,7 +2,7 @@ schema.base_path = "../../schemas" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala b/content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala index 39cc30e96..39df79b35 100644 --- a/content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala +++ b/content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala @@ -1,7 +1,7 @@ package org.sunbird.collectioncsv -import akka.actor.{ActorSystem, Props} -import akka.testkit.TestKit +import org.apache.pekko.actor.{ActorSystem, Props} +import org.apache.pekko.testkit.TestKit import org.scalamock.scalatest.MockFactory import org.scalatest.{FlatSpec, Matchers} import org.sunbird.cloudstore.StorageService diff --git a/content-api/content-actors/pom.xml b/content-api/content-actors/pom.xml index 45e1e286a..ca08ef154 100644 --- a/content-api/content-actors/pom.xml +++ b/content-api/content-actors/pom.xml @@ -28,7 +28,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar @@ -88,9 +88,9 @@ test - com.typesafe.akka - akka-testkit_${scala.maj.version} - 2.5.22 + org.apache.pekko + pekko-testkit_${scala.maj.version} + 1.0.3 test diff --git a/content-api/content-actors/src/main/scala/org/sunbird/channel/managers/ChannelManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/channel/managers/ChannelManager.scala index c0cabc2fe..82cdde8b6 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/channel/managers/ChannelManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/channel/managers/ChannelManager.scala @@ -13,8 +13,7 @@ import org.apache.commons.collections4.CollectionUtils import org.apache.commons.lang3.StringUtils import org.sunbird.common.JsonUtils -import scala.collection.JavaConverters._ -import scala.collection.convert.ImplicitConversions._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer object ChannelManager { @@ -52,7 +51,7 @@ object ChannelManager { } def validateObjectCategory(request: Request) = { - if (!util.Collections.disjoint(request.getRequest.keySet(), ChannelConstants.categoryKeyList)) { + if (!util.Collections.disjoint(request.getRequest.keySet(), ChannelConstants.categoryKeyList.asJava)) { val masterCategoriesList: List[String] = getMasterCategoryList() val errMsg: ListBuffer[String] = ListBuffer() compareWithMasterCategory(request, masterCategoriesList, errMsg) @@ -65,7 +64,7 @@ object ChannelManager { ChannelConstants.categoryKeyList.map(cat => { if (request.getRequest.containsKey(cat)) { val requestedCategoryList: util.List[String] = getRequestedCategoryList(request, cat) - if (!masterCat.containsAll(requestedCategoryList)) + if (!masterCat.asJava.containsAll(requestedCategoryList)) errMsg += cat } }) @@ -122,11 +121,13 @@ object ChannelManager { if (CollectionUtils.isEmpty(channelPrimaryCategories)) globalPrimaryCategories else { - val idsToIgnore = channelPrimaryCategories.map(cat => cat.get("identifier").asInstanceOf[String]) + val idsToIgnore = channelPrimaryCategories.asScala.map(cat => cat.get("identifier").asInstanceOf[String]) .map(id => id.replace("_"+channel, "_all")) - globalPrimaryCategories.filter(cat => { + val result = globalPrimaryCategories.asScala.filter(cat => { !idsToIgnore.contains(cat.get("identifier").asInstanceOf[String]) - }) ++ channelPrimaryCategories + }).toList.asJava + result.addAll(channelPrimaryCategories) + result } } @@ -136,7 +137,7 @@ object ChannelManager { if (200 != httpResponse.status) throw new ServerException("ERR_FETCHING_OBJECT_CATEGORY_DEFINITION", "Error while fetching primary categories.") val response: Response = JsonUtils.deserialize(httpResponse.body, classOf[Response]) val objectCategoryList: util.List[util.Map[String, AnyRef]] = response.getResult.getOrDefault(ChannelConstants.objectCategoryDefinitionKey, new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]] - objectCategoryList.asScala.map(cat => (cat - "objectType").asJava).asJava + objectCategoryList.asScala.map(cat => (cat.asScala - "objectType").asJava).toList.asJava } def getMasterCategoryList(): List[String] = { @@ -148,6 +149,6 @@ object ChannelManager { val objectCategoryList: util.List[util.Map[String, AnyRef]] = response.getResult.getOrDefault(ChannelConstants.OBJECT_CATEGORY, new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]] if (objectCategoryList.isEmpty) throw new ClientException("ERR_NO_MASTER_OBJECT_CATEGORY_DEFINED", "Master category object not present") - objectCategoryList.map(a => a.getOrDefault("name", "").asInstanceOf[String]).toList + objectCategoryList.asScala.map(a => a.getOrDefault("name", "").asInstanceOf[String]).toList } } diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/AppActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/AppActor.scala index 8bb6e5aea..13178a5e8 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/AppActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/AppActor.scala @@ -13,8 +13,8 @@ import org.sunbird.util.RequestUtil import java.util import javax.inject.Inject -import scala.collection.JavaConverters import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ /*** * TODO: rewrite this Actor after merging the Event and EventSet code. @@ -42,7 +42,7 @@ class AppActor @Inject() (implicit oec: OntologyEngineContext) extends BaseActor @throws[Exception] private def read(request: Request): Future[Response] = { - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { if (NodeUtil.isRetired(node)) ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name, "App not found with identifier: " + node.getIdentifier) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/CategoryActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/CategoryActor.scala index 5c466bebe..da40fbc4c 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/CategoryActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/CategoryActor.scala @@ -14,7 +14,7 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.util.RequestUtil -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} class CategoryActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { @@ -43,7 +43,7 @@ class CategoryActor @Inject()(implicit oec: OntologyEngineContext) extends BaseA @throws[Exception] private def read(request: Request): Future[Response] = { - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { if (NodeUtil.isRetired(node)) ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name, "Category not found with identifier: " + node.getIdentifier) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala index 837810741..946c1b167 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala @@ -19,6 +19,7 @@ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.managers.HierarchyManager +import scala.jdk.CollectionConverters._ import org.sunbird.managers.HierarchyManager.hierarchyPrefix import org.sunbird.telemetry.logger.TelemetryManager import org.sunbird.util.RequestUtil @@ -27,8 +28,6 @@ import java.io.File import java.util import java.util.concurrent.CompletionException import javax.inject.Inject -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageService) extends BaseActor { @@ -90,7 +89,7 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe def read(request: Request): Future[Response] = { val responseSchemaName: String = request.getContext.getOrDefault(ContentConstants.RESPONSE_SCHEMA_NAME, "").asInstanceOf[String] - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { val metadata: util.Map[String, AnyRef] = NodeUtil.serialize(node, fields, node.getObjectType.toLowerCase.replace("image", ""), request.getContext.get("version").asInstanceOf[String]) @@ -113,7 +112,7 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe def privateRead(request: Request): Future[Response] = { val responseSchemaName: String = request.getContext.getOrDefault(ContentConstants.RESPONSE_SCHEMA_NAME, "").asInstanceOf[String] - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) if (StringUtils.isBlank(request.getRequest.getOrDefault("channel", "").asInstanceOf[String])) throw new ClientException("ERR_INVALID_CHANNEL", "Please Provide Channel!") DataNode.read(request).map(node => { diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala index a1d672b9b..2cf4b56c2 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala @@ -11,7 +11,7 @@ import org.sunbird.graph.nodes.DataNode import java.util import javax.inject.Inject -import scala.collection.JavaConverters.asScalaBufferConverter +import scala.jdk.CollectionConverters._ import scala.concurrent.Future class EventActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageService) extends ContentActor { diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventSetActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventSetActor.scala index 2894cc228..f2a968d59 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventSetActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventSetActor.scala @@ -15,7 +15,7 @@ import org.sunbird.utils.HierarchyConstants import java.util import javax.inject.Inject -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.Future class EventSetActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageService) extends ContentActor { @@ -128,7 +128,7 @@ class EventSetActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageSe } def getHierarchy(request: Request): Future[Response] = { - val fields: util.List[String] = seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { val outRelations = if (node.getOutRelations == null) List[Relation]() else node.getOutRelations.asScala diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/LicenseActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/LicenseActor.scala index b637c6704..b1c925c26 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/LicenseActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/LicenseActor.scala @@ -14,7 +14,7 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.util.RequestUtil -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} class LicenseActor @Inject() (implicit oec: OntologyEngineContext) extends BaseActor { @@ -43,7 +43,7 @@ class LicenseActor @Inject() (implicit oec: OntologyEngineContext) extends BaseA @throws[Exception] private def read(request: Request): Future[Response] = { - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { if (NodeUtil.isRetired(node)) ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name, "License not found with identifier: " + node.getIdentifier) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ObjectActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ObjectActor.scala index 5a570c854..8a64eb23d 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ObjectActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ObjectActor.scala @@ -10,7 +10,7 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import java.util import javax.inject.Inject -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} class ObjectActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageService) extends BaseActor { @@ -25,7 +25,7 @@ class ObjectActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSer @throws[Exception] private def read(request: Request): Future[Response] = { - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put("fields", fields) DataNode.read(request).map(node => { if (NodeUtil.isRetired(node)) ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name, "Object not found with identifier: " + node.getIdentifier) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/dial/DIALManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/dial/DIALManager.scala index 9a67cb324..99d00146e 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/dial/DIALManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/dial/DIALManager.scala @@ -17,11 +17,10 @@ import org.sunbird.telemetry.logger.TelemetryManager import java.io.File import java.util import java.util.UUID -import scala.collection.JavaConverters._ import scala.collection.immutable.{HashMap, Map} import scala.collection.mutable.{Map => Mmap} import scala.concurrent.{ExecutionContext, Future} -import scala.util.parsing.json.JSON +import scala.jdk.CollectionConverters._ object DIALManager { @@ -345,9 +344,10 @@ object DIALManager { def createRequest(data: Map[String, AnyRef], channel: String, publisher: Option[String], rspObj: Response, request: Request)(implicit oec: OntologyEngineContext, ec: ExecutionContext) = { val qrCodeSpecString = request.getRequestString("qrcodespec", "") // Assuming this is a JSON string - val qrCodeSpec = JSON.parseFull(qrCodeSpecString) match { - case Some(map: Map[String, Any]) => map - case _ => Map.empty[String, Any] + val qrCodeSpec = if (StringUtils.isNotBlank(qrCodeSpecString)) { + ScalaJsonUtils.deserialize[Map[String, Any]](qrCodeSpecString) + } else { + Map.empty[String, Any] } val mergedConfig: Mmap[String, Any] = defaultConfig.++(qrCodeSpec) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/review/mgr/ReviewManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/review/mgr/ReviewManager.scala index 64297805b..072117588 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/review/mgr/ReviewManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/review/mgr/ReviewManager.scala @@ -6,8 +6,8 @@ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.nodes.DataNode import org.sunbird.mimetype.factory.MimeTypeManagerFactory +import scala.jdk.CollectionConverters._ import scala.collection.Map -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} object ReviewManager { diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/upload/mgr/UploadManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/upload/mgr/UploadManager.scala index e6a26dbe3..aa711a51d 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/upload/mgr/UploadManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/upload/mgr/UploadManager.scala @@ -15,7 +15,7 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.mimetype.factory.MimeTypeManagerFactory import org.sunbird.telemetry.util.LogTelemetryEventUtil -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import org.sunbird.kafka.client.KafkaClient @@ -54,7 +54,7 @@ object UploadManager { if (StringUtils.isNotBlank(artifactUrl)) { val updateReq = new Request(request) updateReq.getContext().put("identifier", identifier) - updateReq.getRequest.putAll(mapAsJavaMap(updatedResult)) + updateReq.getRequest.putAll(updatedResult.asJava) if( size > CONTENT_ARTIFACT_ONLINE_SIZE) updateReq.put("contentDisposition", "online-only") if (StringUtils.equalsIgnoreCase("Asset", objectType) && MEDIA_TYPE_LIST.contains(mediaType)) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala index d7ce9130f..13619f934 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala @@ -26,7 +26,7 @@ import org.sunbird.managers.{HierarchyManager, UpdateHierarchyManager} import org.sunbird.mimetype.factory.MimeTypeManagerFactory import org.sunbird.mimetype.mgr.impl.H5PMimeTypeMgrImpl -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/FlagManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/FlagManager.scala index 3de36faaf..80540c708 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/FlagManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/FlagManager.scala @@ -15,9 +15,8 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.telemetry.logger.TelemetryManager import org.sunbird.utils.HierarchyConstants +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} -import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters._ object FlagManager { private val FLAGGABLE_STATUS: util.List[String] = util.Arrays.asList("Live", "Unlisted", "Flagged") @@ -81,7 +80,7 @@ object FlagManager { private def fetchHierarchy(request: Request)(implicit ec: ExecutionContext, oec: OntologyEngineContext): Future[Any] = { oec.graphService.readExternalProps(request, List(HierarchyConstants.HIERARCHY)).map(resp => { - resp.getResult.toMap.getOrElse(HierarchyConstants.HIERARCHY, "").asInstanceOf[String] + resp.getResult.asScala.toMap.getOrElse(HierarchyConstants.HIERARCHY, "").asInstanceOf[String] }) recover { case e: ResourceNotFoundException => TelemetryManager.log("No hierarchy is present in cassandra for identifier:" + request.get(HierarchyConstants.IDENTIFIER)) } } @@ -105,7 +104,7 @@ object FlagManager { dataList }else{ responseDataList.addAll(dataList) - new util.ArrayList[String](responseDataList.toSet) + new util.ArrayList[String](responseDataList.asScala.toSet.asJava) } } } \ No newline at end of file diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala index 499bcc1d5..02f546cd2 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala @@ -22,8 +22,7 @@ import org.sunbird.parseq.Task import org.sunbird.telemetry.logger.TelemetryManager import org.sunbird.utils.HierarchyConstants -import scala.collection.JavaConverters._ -import scala.collection.convert.ImplicitConversions._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer import scala.concurrent.{ExecutionContext, Future} @@ -38,10 +37,10 @@ object RetireManager { if(CollectionUtils.isNotEmpty(shallowIds)){ throw new ClientException(ContentConstants.ERR_CONTENT_RETIRE, s"Content With Identifier [" + request.get(ContentConstants.IDENTIFIER) + "] Can Not Be Retired. It Has Been Adopted By Other Users.") } else { - val updateMetadataMap = Map(ContentConstants.STATUS -> "Retired", HierarchyConstants.LAST_UPDATED_ON -> DateUtils.formatCurrentDate, HierarchyConstants.LAST_STATUS_CHANGED_ON -> DateUtils.formatCurrentDate) + val updateMetadataMap = Map[String, AnyRef](ContentConstants.STATUS -> "Retired", HierarchyConstants.LAST_UPDATED_ON -> DateUtils.formatCurrentDate, HierarchyConstants.LAST_STATUS_CHANGED_ON -> DateUtils.formatCurrentDate) val futureList = Task.parallel[Response]( handleCollectionToRetire(node, request, updateMetadataMap), - updateNodesToRetire(request, mapAsJavaMap[String,AnyRef](updateMetadataMap))) + updateNodesToRetire(request, updateMetadataMap.asJava)) futureList.map(f => { val response = ResponseHandler.OK() response.put(ContentConstants.IDENTIFIER, request.get(ContentConstants.IDENTIFIER)) @@ -79,16 +78,16 @@ object RetireManager { req.getContext.put(ContentConstants.SCHEMA_NAME, ContentConstants.COLLECTION_SCHEMA_NAME) req.put(ContentConstants.IDENTIFIER, request.get(ContentConstants.IDENTIFIER)) oec.graphService.readExternalProps(req, List(HierarchyConstants.HIERARCHY)).flatMap(resp => { - val hierarchyString = resp.getResult.toMap.getOrElse(HierarchyConstants.HIERARCHY, "").asInstanceOf[String] + val hierarchyString = resp.getResult.asScala.toMap.getOrElse(HierarchyConstants.HIERARCHY, "").asInstanceOf[String] if (StringUtils.isNotBlank(hierarchyString)) { val hierarchyMap = JsonUtils.deserialize(hierarchyString, classOf[util.HashMap[String, AnyRef]]) val childIds = getChildrenIdentifiers(hierarchyMap) if (CollectionUtils.isNotEmpty(childIds)) { val topicName = Platform.getString("kafka.topics.graph.event", "sunbirddev.learning.graph.events") - childIds.foreach(id => kfClient.send(ScalaJsonUtils.serialize(getLearningGraphEvent(request, id)), topicName)) - RedisCache.delete(childIds.map(id => "hierarchy_" + id): _*) + childIds.asScala.foreach(id => kfClient.send(ScalaJsonUtils.serialize(getLearningGraphEvent(request, id)), topicName)) + RedisCache.delete(childIds.asScala.map(id => "hierarchy_" + id).toSeq: _*) } - hierarchyMap.putAll(updateMetadataMap) + hierarchyMap.putAll(updateMetadataMap.asJava) req.put(HierarchyConstants.HIERARCHY, ScalaJsonUtils.serialize(hierarchyMap)) oec.graphService.saveExternalProps(req) } else Future(ResponseHandler.OK()) @@ -124,9 +123,9 @@ object RetireManager { if (httpResponse.getStatus == 200) { val response: Response = JsonUtils.deserialize(httpResponse.getBody, classOf[Response]) if(response.get("count").asInstanceOf[Integer] > 0){ - response.get("content").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]].map(content => { + response.get("content").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]].asScala.map(content => { val originData = ScalaJsonUtils.deserialize[Map[String, AnyRef]](content.get("originData").asInstanceOf[String]) - val copyType = originData.getOrDefault("copyType", "").asInstanceOf[String] + val copyType = originData.getOrElse("copyType", "").asInstanceOf[String] if(StringUtils.isNotBlank(copyType) && StringUtils.equalsIgnoreCase(copyType , "shallow")){ result.add(content.get("identifier").asInstanceOf[String]) } else { @@ -143,13 +142,13 @@ object RetireManager { private def getChildrenIdentifiers(hierarchyMap: util.HashMap[String, AnyRef]): util.List[String] = { val childIds: ListBuffer[String] = ListBuffer[String]() - addChildIds(hierarchyMap.getOrElse(HierarchyConstants.CHILDREN, new util.ArrayList[util.HashMap[String, AnyRef]]()).asInstanceOf[util.ArrayList[util.HashMap[String, AnyRef]]], childIds) - bufferAsJavaList(childIds) + addChildIds(hierarchyMap.getOrDefault(HierarchyConstants.CHILDREN, new util.ArrayList[util.HashMap[String, AnyRef]]()).asInstanceOf[util.ArrayList[util.HashMap[String, AnyRef]]], childIds) + childIds.toList.asJava } private def addChildIds(childrenMaps: util.ArrayList[util.HashMap[String, AnyRef]], childrenIds: ListBuffer[String]): Unit = { if (CollectionUtils.isNotEmpty(childrenMaps)) { - childrenMaps.filter(child => StringUtils.equalsIgnoreCase(HierarchyConstants.PARENT, child.get(HierarchyConstants.VISIBILITY).asInstanceOf[String])).foreach(child => { + childrenMaps.asScala.filter(child => StringUtils.equalsIgnoreCase(HierarchyConstants.PARENT, child.get(HierarchyConstants.VISIBILITY).asInstanceOf[String])).foreach(child => { childrenIds += child.get(HierarchyConstants.IDENTIFIER).asInstanceOf[String] addChildIds(child.get(HierarchyConstants.CHILDREN).asInstanceOf[util.ArrayList[util.HashMap[String, AnyRef]]], childrenIds) }) diff --git a/content-api/content-actors/src/main/scala/org/sunbird/util/HttpUtil.scala b/content-api/content-actors/src/main/scala/org/sunbird/util/HttpUtil.scala index 9284421ae..2ed352a84 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/util/HttpUtil.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/util/HttpUtil.scala @@ -1,7 +1,7 @@ package org.sunbird.util import com.mashape.unirest.http.Unirest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * diff --git a/content-api/content-actors/src/test/resources/application.conf b/content-api/content-actors/src/test/resources/application.conf index 76bbc40c5..c43276724 100644 --- a/content-api/content-actors/src/test/resources/application.conf +++ b/content-api/content-actors/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/content-api/content-actors/src/test/scala/org/sunbird/channel/TestChannelManager.scala b/content-api/content-actors/src/test/scala/org/sunbird/channel/TestChannelManager.scala index 4d58324f9..6779699c7 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/channel/TestChannelManager.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/channel/TestChannelManager.scala @@ -1,4 +1,5 @@ package org.sunbird.channel +import scala.jdk.CollectionConverters._ import org.scalatest.{AsyncFlatSpec, Matchers} import org.sunbird.common.dto.Request diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/BaseSpec.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/BaseSpec.scala index 30c4742de..7fb85c7aa 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/BaseSpec.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/BaseSpec.scala @@ -1,10 +1,11 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ import java.util import java.util.concurrent.TimeUnit -import akka.actor.{ActorSystem, Props} -import akka.testkit.TestKit +import org.apache.pekko.actor.{ActorSystem, Props} +import org.apache.pekko.testkit.TestKit import org.scalatest.{FlatSpec, Matchers} import org.sunbird.common.dto.{Request, Response} import org.sunbird.graph.OntologyEngineContext @@ -77,7 +78,7 @@ class BaseSpec extends FlatSpec with Matchers { put("status", "Live") } }) - val masterCategories: scala.collection.immutable.Map[String, AnyRef] = Map( + val masterCategories: scala.collection.immutable.Map[String, AnyRef] = Map[String,AnyRef]( node.getMetadata.getOrDefault("code", "").asInstanceOf[String] -> Map[String, AnyRef]("code" -> node.getMetadata.getOrDefault("code", "").asInstanceOf[String], "orgIdFieldName" -> node.getMetadata.getOrDefault("orgIdFieldName", "").asInstanceOf[String], diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAppActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAppActor.scala index 3bc1d1327..4b777b075 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAppActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAppActor.scala @@ -1,6 +1,7 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.commons.lang3.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService @@ -11,8 +12,7 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import scala.concurrent.ExecutionContext.Implicits.global import java.util -import scala.collection.JavaConverters._ -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.Future class TestAppActor extends BaseSpec with MockFactory { @@ -39,9 +39,9 @@ class TestAppActor extends BaseSpec with MockFactory { request.getRequest.put("name", "Test Integration App") request.getRequest.put("logo", "logo url") request.getRequest.put("description", "Description of Test Integration App") - request.getRequest.put("provider", Map("name" -> "Test Organisation", "copyright" -> "CC BY 4.0").asJava) + request.getRequest.put("provider", Map[String,AnyRef]("name" -> "Test Organisation", "copyright" -> "CC BY 4.0").asJava) request.getRequest.put("osType", "Android") - request.getRequest.put("osMetadata", Map("packageId" -> "org.test.integration", "appVersion" -> "1.0", "compatibilityVer" -> "1.0").asJava) + request.getRequest.put("osMetadata", Map[String,AnyRef]("packageId" -> "org.test.integration", "appVersion" -> "1.0", "compatibilityVer" -> "1.0").asJava) request.setOperation("create") val response = callActor(request, Props(new AppActor())) assert("successful".equals(response.getParams.getStatus)) @@ -69,7 +69,7 @@ class TestAppActor extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc").asJava) request.setOperation("update") val response = callActor(request, Props(new AppActor())) assert("successful".equals(response.getParams.getStatus)) @@ -83,7 +83,7 @@ class TestAppActor extends BaseSpec with MockFactory { val node = getValidNode() (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getRequest() - request.putAll(mapAsJavaMap(Map("fields" -> ""))) + request.putAll(Map[String,AnyRef]("fields" -> "").asJava) request.setOperation("read") val response = callActor(request, Props(new AppActor())) assert("successful".equals(response.getParams.getStatus)) @@ -118,9 +118,9 @@ class TestAppActor extends BaseSpec with MockFactory { put("name", "Test Integration App") put("logo", "logo url") put("description", "Description of Test Integration App") - put("provider", Map("name" -> "Test Organisation", "copyright" -> "CC BY 4.0").asJava) + put("provider", Map[String,AnyRef]("name" -> "Test Organisation", "copyright" -> "CC BY 4.0").asJava) put("osType", "Android") - put("osMetadata", Map("packageId" -> "org.test.sunbird.integration", "appVersion" -> "1.0", "compatibilityVer" -> "1.0").asJava) + put("osMetadata", Map[String,AnyRef]("packageId" -> "org.test.sunbird.integration", "appVersion" -> "1.0", "compatibilityVer" -> "1.0").asJava) } }) node diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAssetActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAssetActor.scala index fcf4fa898..763208d5a 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAssetActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestAssetActor.scala @@ -1,6 +1,7 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.{Request, Response} @@ -9,7 +10,7 @@ import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.graph.{GraphService, OntologyEngineContext} import java.util -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -170,7 +171,7 @@ class TestAssetActor extends BaseSpec with MockFactory { node.setNodeType("DATA_NODE") node.setObjectType("Framework") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap(Map("name"-> "NCF"))) + node.setMetadata(Map[String,AnyRef]("name"-> "NCF").asJava) node } @@ -180,7 +181,7 @@ class TestAssetActor extends BaseSpec with MockFactory { node.setNodeType("DATA_NODE") node.setObjectType("Term") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap(Map("name"-> "CBSE"))) + node.setMetadata(Map[String,AnyRef]("name"-> "CBSE").asJava) node } } diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCategoryActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCategoryActor.scala index 59cd97fdc..edda7f3ec 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCategoryActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCategoryActor.scala @@ -1,8 +1,9 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.hadoop.util.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService @@ -11,7 +12,7 @@ import org.sunbird.common.exception.ResponseCode import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -37,7 +38,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ request.getRequest.put("targetIdFieldName", "targetStateIds") request.getRequest.put("searchIdFieldName", "se_stateIds") request.getRequest.put("searchLabelFieldName", "se_states") - request.putAll(mapAsJavaMap(Map("name" -> "do_1234", "code" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("name" -> "do_1234", "code" -> "do_1234").asJava) request.setOperation("createCategory") val response = callActor(request, Props(new CategoryActor())) assert(response.get("identifier") != null) @@ -53,7 +54,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ request.getRequest.put("targetIdFieldName", "targetStateIds") request.getRequest.put("searchIdFieldName", "se_stateIds") request.getRequest.put("searchLabelFieldName", "se_states") - request.putAll(mapAsJavaMap(Map("code" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("code" -> "do_1234").asJava) val response = callActor(request, Props(new CategoryActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) assert(StringUtils.equalsIgnoreCase(response.get("messages").asInstanceOf[util.ArrayList[String]].get(0).asInstanceOf[String], "Required Metadata name not set")) @@ -67,7 +68,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ request.getRequest.put("targetIdFieldName", "targetStateIds") request.getRequest.put("searchIdFieldName", "se_stateIds") request.getRequest.put("searchLabelFieldName", "se_states") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "code" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "code" -> "do_1234").asJava) request.setOperation("createCategory") val response = callActor(request, Props(new CategoryActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -87,7 +88,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ implicit val ss = mock[StorageService] val request = getCategoryRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "code" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "code" -> "do_1234").asJava) request.setOperation("updateCategory") val response = callActor(request, Props(new CategoryActor())) assert("successful".equals(response.getParams.getStatus)) @@ -103,7 +104,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ implicit val ss = mock[StorageService] val request = getCategoryRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("fields" -> ""))) + request.putAll(Map[String,AnyRef]("fields" -> "").asJava) request.setOperation("readCategory") val response = callActor(request, Props(new CategoryActor())) assert("successful".equals(response.getParams.getStatus)) @@ -122,7 +123,7 @@ class TestCategoryActor extends BaseSpec with MockFactory{ implicit val ss = mock[StorageService] val request = getCategoryRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "code" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "code" -> "do_1234").asJava) request.setOperation("retireCategory") val response = callActor(request, Props(new CategoryActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestChannelActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestChannelActor.scala index c261e6bdf..562ec7f3a 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestChannelActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestChannelActor.scala @@ -1,15 +1,15 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.channel.actors.ChannelActor import org.sunbird.common.dto.Request import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.graph.{GraphService, OntologyEngineContext} -import scala.collection.JavaConverters._ import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -53,7 +53,7 @@ class TestChannelActor extends BaseSpec with MockFactory { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB) - val node = new Node("domain",mapAsJavaMap(Map("identifier" -> "channel_test", "nodeType"->"DATA_NODE", "objectType"->"Channel"))) + val node = new Node("domain", Map[String,AnyRef]("identifier" -> "channel_test", "nodeType"->"DATA_NODE", "objectType"->"Channel").asJava) node.setIdentifier("channel_test") node.setObjectType("Channel") (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)) @@ -68,7 +68,7 @@ class TestChannelActor extends BaseSpec with MockFactory { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB) - val node = new Node("domain",mapAsJavaMap(Map("identifier" -> "channel_test", "nodeType"->"DATA_NODE", "objectType"->"Channel"))) + val node = new Node("domain", Map[String,AnyRef]("identifier" -> "channel_test", "nodeType"->"DATA_NODE", "objectType"->"Channel").asJava) node.setIdentifier("channel_test") node.setObjectType("Channel") (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCollectionActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCollectionActor.scala index 6782b4308..cd1d3792c 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCollectionActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestCollectionActor.scala @@ -1,8 +1,9 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.sunbird.common.dto.Request import org.sunbird.graph.OntologyEngineContext diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestContentActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestContentActor.scala index 2c8b9057a..769f3830f 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestContentActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestContentActor.scala @@ -1,6 +1,7 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.{Property, Request, Response, ResponseHandler} @@ -12,7 +13,6 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.kafka.client.KafkaClient import java.util -import scala.collection.JavaConverters._ import scala.collection.convert.ImplicitConversions._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -29,7 +29,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getContentRequest() - val content = mapAsJavaMap(Map("name" -> "New Content", "code" -> "1234", "mimeType"-> "application/pdf", "contentType" -> "Resource", + val content = (Map[String,AnyRef]("name" -> "New Content", "code" -> "1234", "mimeType"-> "application/pdf", "contentType" -> "Resource", "framework" -> "NCF", "organisationBoardIds" -> new util.ArrayList[String](){{add("ncf_board_cbse")}})) request.put("content", content) assert(true) @@ -51,7 +51,9 @@ class TestContentActor extends BaseSpec with MockFactory { })) val request = getContentRequest() - request.getRequest.putAll( mapAsJavaMap(Map("channel"-> "in.ekstep","name" -> "New", "code" -> "1234", "mimeType"-> "application/vnd.ekstep.content-collection", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep", "targetBoardIds" -> new util.ArrayList[String](){{add("ncf_board_cbse")}}))) + val targetBoardIds = new util.ArrayList[String]() + targetBoardIds.add("ncf_board_cbse") + request.getRequest.putAll( (Map[String,AnyRef]("channel"-> "in.ekstep","name" -> "New", "code" -> "1234", "mimeType"-> "application/vnd.ekstep.content-collection", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep", "targetBoardIds" -> targetBoardIds))) request.setOperation("createContent") val response = callActor(request, Props(new ContentActor())) assert(response.get("identifier") != null) @@ -69,7 +71,9 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getContentRequest() - request.getRequest.putAll( mapAsJavaMap(Map("name" -> "New Content", "code" -> "1234", "mimeType"-> "application/vnd.ekstep.plugin-archive", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep", "framework"-> "NCF", "organisationBoardIds" -> new util.ArrayList[String](){{add("ncf_board_cbse")}}))) + val organisationBoardIds = new util.ArrayList[String]() + organisationBoardIds.add("ncf_board_cbse") + request.getRequest.putAll( (Map[String,AnyRef]("name" -> "New Content", "code" -> "1234", "mimeType"-> "application/vnd.ekstep.plugin-archive", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep", "framework"-> "NCF", "organisationBoardIds" -> organisationBoardIds))) request.setOperation("createContent") val response = callActor(request, Props(new ContentActor())) assert(response.get("identifier") != null) @@ -81,7 +85,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val graphDB = mock[GraphService] val request = getContentRequest() - request.getRequest.putAll( mapAsJavaMap(Map("name" -> "New Content", "mimeType"-> "application/vnd.ekstep.plugin-archive", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep"))) + request.getRequest.putAll(Map[String,AnyRef]("name" -> "New Content", "mimeType"-> "application/vnd.ekstep.plugin-archive", "contentType" -> "Course", "primaryCategory" -> "Learning Resource", "channel" -> "in.ekstep").asJava) request.setOperation("createContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -95,7 +99,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss: StorageService = mock[StorageService] (ss.getSignedURL(_: String, _: Option[Int], _: Option[String])).expects(*, *, *).returns("cloud store url") val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("fileName" -> "presigned_url", "filePath" -> "/data/cloudstore/", "type" -> "assets", "identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("fileName" -> "presigned_url", "filePath" -> "/data/cloudstore/", "type" -> "assets", "identifier" -> "do_1234").asJava) request.setOperation("uploadPreSignedUrl") val response = callActor(request, Props(new ContentActor())) assert(response.get("identifier") != null) @@ -111,7 +115,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.deleteNode(_: String, _: String, _: Request)).expects(*, *, *).returns(Future(true)) implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.setOperation("discardContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.OK) @@ -127,7 +131,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(getInValidNodeToDiscard())) implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.setOperation("discardContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -138,7 +142,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do_1234.img") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234.img"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234.img").asJava) request.setOperation("retireContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -224,7 +228,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) request.setOperation("retireContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -239,7 +243,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -260,7 +264,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -281,7 +285,7 @@ class TestContentActor extends BaseSpec with MockFactory { val request = getContentRequest() request.getContext.put("identifier","do1234") request.getRequest.put("channel", "abc-123") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readPrivateContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -293,7 +297,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readPrivateContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -317,7 +321,7 @@ class TestContentActor extends BaseSpec with MockFactory { val request = getContentRequest() request.getContext.put("identifier","do1234") request.getRequest.put("channel", "abc") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readPrivateContent") val response = callActor(request, Props(new ContentActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -339,7 +343,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "versionKey" -> "test_123"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "versionKey" -> "test_123").asJava) request.setOperation("updateContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -360,7 +364,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "versionKey" -> "test_123"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "versionKey" -> "test_123").asJava) request.setOperation("updateContent") val response = callActor(request, Props(new ContentActor())) assert("failed".equals(response.getParams.getStatus)) @@ -373,7 +377,9 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("description" -> "updated description","framework" -> "NCF", "organisationBoardIds" -> new util.ArrayList[String](){{add("ncf_board_cbse")}}))) + val organisationBoardIds2 = new util.ArrayList[String]() + organisationBoardIds2.add("ncf_board_cbse") + request.putAll(Map[String,AnyRef]("description" -> "updated description","framework" -> "NCF", "organisationBoardIds" -> organisationBoardIds2).asJava) request.setOperation("updateContent") val response = callActor(request, Props(new ContentActor())) assert("failed".equals(response.getParams.getStatus)) @@ -395,9 +401,9 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "createdBy" -> "username_1", + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "createdBy" -> "username_1", "createdFor" -> new util.ArrayList[String]() {{ add("NCF2") }}, "framework" -> "NCF", - "organisation" -> new util.ArrayList[String]() {{ add("NCF2") }}))) + "organisation" -> new util.ArrayList[String]() {{ add("NCF2") }}).asJava) request.setOperation("copy") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -450,7 +456,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.upsertNode(_: String, _: Node, _: Request)).expects(*, *, *).returns(Future(node)) val request = getContentRequest() request.getContext.put("identifier", "do_1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "createdBy" -> "username_1", + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "createdBy" -> "username_1", "createdFor" -> new util.ArrayList[String]() {{ add("NCF2") }}, "framework" -> "NCF", "organisation" -> new util.ArrayList[String]() {{ add("NCF2") }}))) request.put("file", new File(Resources.getResource("jpegImage.jpeg").toURI)) @@ -476,7 +482,7 @@ class TestContentActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "versionKey" -> "test_123"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "versionKey" -> "test_123").asJava) request.setOperation("systemUpdate") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -558,7 +564,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.upsertNode(_: String, _: Node, _: Request)).expects(*, *, *).returns(Future(node)).anyNumberOfTimes() val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "versionKey" -> "test_123"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "versionKey" -> "test_123").asJava) request.setOperation("rejectContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -576,7 +582,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.upsertNode(_: String, _: Node, _: Request)).expects(*, *, *).returns(Future(node)).anyNumberOfTimes() val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "versionKey" -> "test_123"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc", "versionKey" -> "test_123").asJava) request.setOperation("rejectContent") val response = callActor(request, Props(new ContentActor())) assert("failed".equals(response.getParams.getStatus)) @@ -598,7 +604,7 @@ class TestContentActor extends BaseSpec with MockFactory { (graphDB.upsertNode(_: String, _: Node, _: Request)).expects(*, *, *).returns(Future(node)).anyNumberOfTimes() val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("rejectComment"->"Testing reject comment", "versionKey" -> "test_123","rejectReasons" -> Array("Incorrect Content")))) + request.putAll(Map[String,AnyRef]("rejectComment"->"Testing reject comment", "versionKey" -> "test_123","rejectReasons" -> Array("Incorrect Content").toList.asJava).asJava) request.setOperation("rejectContent") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) @@ -831,8 +837,8 @@ class TestContentActor extends BaseSpec with MockFactory { node.setNodeType("DATA_NODE") node.setObjectType("Content") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap( - ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"license\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }"))) + node.setMetadata( + ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"license\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }").asJava) node } @@ -852,7 +858,7 @@ class TestContentActor extends BaseSpec with MockFactory { node.setNodeType("DATA_NODE") node.setObjectType("Framework") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap(Map("name"-> "NCF"))) + node.setMetadata(Map[String,AnyRef]("name"-> "NCF").asJava) node } @@ -862,7 +868,7 @@ class TestContentActor extends BaseSpec with MockFactory { node.setNodeType("DATA_NODE") node.setObjectType("Term") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap(Map("name"-> "CBSE"))) + node.setMetadata(Map[String,AnyRef]("name"-> "CBSE").asJava) node } diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventActor.scala index 048aa6f46..0b44f63b2 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventActor.scala @@ -1,6 +1,7 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.Request @@ -9,7 +10,6 @@ import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.graph.{GraphService, OntologyEngineContext} import java.util -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -23,7 +23,7 @@ class TestEventActor extends BaseSpec with MockFactory { (graphDB.deleteNode(_: String, _: String, _: Request)).expects(*, *, *).returns(Future(true)) implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.getContext.put("objectType","Content") request.setOperation("discardContent") val response = callActor(request, Props(new EventActor())) @@ -46,7 +46,7 @@ class TestEventActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier", "do_1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) request.setOperation("publishContent") val response = callActor(request, Props(new EventActor())) assert(response.getResponseCode == ResponseCode.OK) @@ -61,7 +61,7 @@ class TestEventActor extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(getInValidNodeToDiscard())).anyNumberOfTimes() implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.getContext.put("objectType","Content") request.setOperation("discardContent") val response = callActor(request, Props(new EventActor())) @@ -73,7 +73,7 @@ class TestEventActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do_1234.img") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234.img"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234.img").asJava) request.getContext.put("objectType","Content") request.setOperation("retireContent") val graphDB = mock[GraphService] @@ -93,7 +93,7 @@ class TestEventActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) request.getContext.put("objectType","Content") request.setOperation("retireContent") val response = callActor(request, Props(new EventActor())) @@ -114,9 +114,9 @@ class TestEventActor extends BaseSpec with MockFactory { (graphDB.upsertNode(_: String, _: Node, _: Request)).expects(*, *, *).returns(Future(node)) val request = getContentRequest() request.getContext.put("identifier","do_1234") - request.putAll(mapAsJavaMap(Map("name" -> "New Content", "code" -> "1234", + request.putAll(Map[String,AnyRef]("name" -> "New Content", "code" -> "1234", "startDate" -> "2021-03-04", "endDate" -> "2021-03-04", "startTime" -> "11:00:00Z", "endTime" -> "11:00:00Z", - "registrationEndDate" -> "2021-03-04", "eventType" -> "Online", "versionKey" -> "test_123"))) + "registrationEndDate" -> "2021-03-04", "eventType" -> "Online", "versionKey" -> "test_123").asJava) request.setOperation("updateContent") val response = callActor(request, Props(new EventActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventSetActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventSetActor.scala index c0a239b14..f3a89d534 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventSetActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestEventSetActor.scala @@ -1,6 +1,7 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.JsonUtils @@ -11,8 +12,7 @@ import org.sunbird.graph.dac.model.{Node, Relation, SearchCriteria} import org.sunbird.graph.{GraphService, OntologyEngineContext} import java.util -import scala.collection.JavaConversions.mapAsJavaMap -import scala.collection.JavaConverters.seqAsJavaListConverter +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -28,17 +28,17 @@ class TestEventSetActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getContentRequest() - val eventSet = mapAsJavaMap(Map( + val eventSet = Map[String,AnyRef]( "name" -> "New Content", "code" -> "1234", "startDate"-> "2021/01/03", //wrong format "endDate"-> "2021-01-03", "schedule" -> - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z"))).asJava)), - "onlineProvider" -> "Zoom", + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z")).asJava), + "onlineProvider" -> "Zoom", "registrationEndDate" -> "2021-02-25", - "eventType" -> "Online")) - request.putAll(eventSet) + "eventType" -> "Online") + request.putAll(eventSet.asJava) assert(true) val response = callActor(request, Props(new EventSetActor())) println("Response: " + JsonUtils.serialize(response)) @@ -67,17 +67,17 @@ class TestEventSetActor extends BaseSpec with MockFactory { })).anyNumberOfTimes() (graphDB.createRelation _).expects(*, *).returns(Future(new Response())) val request = getContentRequest() - val eventSet = mapAsJavaMap(Map( + val eventSet = Map[String,AnyRef]( "name" -> "New Content", "code" -> "1234", "startDate"-> "2021-01-03", //wrong format "endDate"-> "2021-01-03", "schedule" -> - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z"))).asJava)), + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z")).asJava), "onlineProvider" -> "Zoom", "registrationEndDate" -> "2021-02-25", - "eventType" -> "Online")) - request.putAll(eventSet) + "eventType" -> "Online") + request.putAll(eventSet.asJava) request.setOperation("createContent") val response = callActor(request, Props(new EventSetActor())) assert(response.get("identifier") != null) @@ -112,18 +112,18 @@ class TestEventSetActor extends BaseSpec with MockFactory { val request = getContentRequest() - val eventSet = mapAsJavaMap(Map( + val eventSet = Map[String,AnyRef]( "name" -> "New Content", "code" -> "1234", "startDate"-> "2021-01-03", //wrong format "endDate"-> "2021-01-03", "schedule" -> - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z"))).asJava)), + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z")).asJava), "onlineProvider" -> "Zoom", "registrationEndDate" -> "2021-02-25", "eventType" -> "Online", - "versionKey" -> "test_123")) - request.putAll(eventSet) + "versionKey" -> "test_123") + request.putAll(eventSet.asJava) request.setOperation("updateContent") val response = callActor(request, Props(new EventSetActor())) assert(response.get("identifier") != null) @@ -139,7 +139,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { (graphDB.deleteNode(_: String, _: String, _: Request)).expects(*, *, *).returns(Future(true)) implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.setOperation("discardContent") val response = callActor(request, Props(new EventSetActor())) assert(response.getResponseCode == ResponseCode.OK) @@ -160,7 +160,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.setOperation("publishContent") val response = callActor(request, Props(new EventSetActor())) assert(response.getResponseCode == ResponseCode.OK) @@ -177,7 +177,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { (graphDB.updateNodes(_: String, _: util.List[String], _: util.HashMap[String, AnyRef])).expects(*, *, *).returns(Future(new util.HashMap[String, Node])).anyNumberOfTimes() implicit val ss = mock[StorageService] val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_12346"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_12346").asJava) request.setOperation("discardContent") val response = callActor(request, Props(new EventSetActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -194,7 +194,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) request.setOperation("retireContent") val response = callActor(request, Props(new EventSetActor())) assert("successful".equals(response.getParams.getStatus)) @@ -209,7 +209,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readContent") val response = callActor(request, Props(new EventSetActor())) assert("successful".equals(response.getParams.getStatus)) @@ -224,7 +224,7 @@ class TestEventSetActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getContentRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("getHierarchy") val response = callActor(request, Props(new EventSetActor())) assert("successful".equals(response.getParams.getStatus)) @@ -317,11 +317,11 @@ class TestEventSetActor extends BaseSpec with MockFactory { put("registrationEndDate", "2021-01-02") put("eventType", "Online") put("schedule", - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", - "endTime" -> "13:00:00Z"))).asJava))) + "endTime" -> "13:00:00Z")).asJava).asJava) } }) @@ -353,12 +353,12 @@ class TestEventSetActor extends BaseSpec with MockFactory { put("registrationEndDate", "2021-01-02") put("eventType", "Online") put("schedule", - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z", - "status" -> "Draft"))).asJava))) + "status" -> "Draft")).asJava).asJava) } }) @@ -388,12 +388,12 @@ class TestEventSetActor extends BaseSpec with MockFactory { put("registrationEndDate", "2021-01-02") put("eventType", "Online") put("schedule", - mapAsJavaMap(Map("type" -> "NON_RECURRING", - "value" -> List(mapAsJavaMap(Map("startDate" -> "2021-01-03", + Map[String,AnyRef]("type" -> "NON_RECURRING", + "value" -> List(Map[String,AnyRef]("startDate" -> "2021-01-03", "endDate" -> "2021-01-03", "startTime" -> "11:00:00Z", "endTime" -> "13:00:00Z", - "status" -> "Live"))).asJava))) + "status" -> "Live")).asJava).asJava) } }) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestLicenseActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestLicenseActor.scala index 5f57d6b0b..c6962d86d 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestLicenseActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestLicenseActor.scala @@ -1,8 +1,9 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.hadoop.util.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService @@ -11,7 +12,7 @@ import org.sunbird.common.exception.ResponseCode import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -54,7 +55,7 @@ class TestLicenseActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getLicenseRequest() - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) request.setOperation("createLicense") val response = callActor(request, Props(new LicenseActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -75,7 +76,7 @@ class TestLicenseActor extends BaseSpec with MockFactory { implicit val ss = mock[StorageService] val request = getLicenseRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc").asJava) request.setOperation("updateLicense") val response = callActor(request, Props(new LicenseActor())) assert("successful".equals(response.getParams.getStatus)) @@ -90,7 +91,7 @@ class TestLicenseActor extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() implicit val ss = mock[StorageService] val request = getLicenseRequest() - request.putAll(mapAsJavaMap(Map("fields" -> ""))) + request.putAll(Map[String,AnyRef]("fields" -> "").asJava) request.setOperation("readLicense") val response = callActor(request, Props(new LicenseActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestObjectActor.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestObjectActor.scala index 348adec88..a947b3072 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestObjectActor.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/actors/TestObjectActor.scala @@ -1,12 +1,13 @@ package org.sunbird.content.actors +import scala.jdk.CollectionConverters._ -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.Request import org.sunbird.graph.{GraphService, OntologyEngineContext} import java.util -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -27,7 +28,7 @@ class TestObjectActor extends BaseSpec with MockFactory{ implicit val ss = mock[StorageService] val request = getRequest() request.getContext.put("identifier","do1234") - request.putAll(mapAsJavaMap(Map("identifier" -> "do_1234", "fields" -> ""))) + request.putAll(Map[String,AnyRef]("identifier" -> "do_1234", "fields" -> "").asJava) request.setOperation("readObject") val response = callActor(request, Props(new ObjectActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/dial/DIALManagerTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/dial/DIALManagerTest.scala index c764ba56a..b3a31ce9f 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/dial/DIALManagerTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/dial/DIALManagerTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.dial +import scala.jdk.CollectionConverters._ import org.scalamock.matchers.Matchers import org.scalamock.scalatest.AsyncMockFactory @@ -214,7 +215,7 @@ class DIALManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory "validateDuplicateDIALCodes with duplicate dial codes" should "throw client exception" in { val exception = intercept[ClientException] { - DIALManager.validateDuplicateDIALCodes(Map("do_2222" -> List("E8B7Z6", "R4X2P2"), "do_1111" -> List("N4Z7D5", "E8B7Z6", "L4A6W8", "D2E1J9", "R4X2P2"))) + DIALManager.validateDuplicateDIALCodes(Map[String,AnyRef]("do_2222" -> List("E8B7Z6", "R4X2P2"), "do_1111" -> List("N4Z7D5", "E8B7Z6", "L4A6W8", "D2E1J9", "R4X2P2"))) } assert(exception.getErrCode == "ERR_DUPLICATE_DIAL_CODES") } diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/upload/mgr/UploadManagerTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/upload/mgr/UploadManagerTest.scala index 44ac88da3..7b55c2861 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/upload/mgr/UploadManagerTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/upload/mgr/UploadManagerTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.upload.mgr +import scala.jdk.CollectionConverters._ import java.util diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/CopyManagerTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/CopyManagerTest.scala index d9137d6f4..5ce970d3c 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/CopyManagerTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/CopyManagerTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import org.apache.commons.collections4.MapUtils import org.scalamock.scalatest.AsyncMockFactory @@ -11,7 +12,7 @@ import org.sunbird.graph.utils.ScalaJsonUtils import org.sunbird.graph.{GraphService, OntologyEngineContext} import java.util -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.Future class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory { @@ -44,7 +45,7 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory implicit val ss = mock[StorageService] val request = getInvalidCopyRequest_2() request.getContext.put("identifier","do_1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) val exception = intercept[ClientException] { CopyManager.validateRequest(request) } @@ -56,7 +57,7 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory implicit val ss = mock[StorageService] val request = getInvalidCopyRequest_1() request.getContext.put("identifier","do_1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) val exception = intercept[ClientException] { CopyManager.validateRequest(request) } @@ -68,7 +69,7 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory implicit val ss = mock[StorageService] val request = getInvalidCopyRequest_3() request.getContext.put("identifier","do_1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) val exception = intercept[ClientException] { CopyManager.validateRequest(request) } @@ -80,7 +81,7 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory implicit val ss = mock[StorageService] val request = getInvalidCopyRequest_3() request.getContext.put("identifier","do_1234") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "do_1234"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "do_1234").asJava) val metadata = new util.HashMap[String,Object]() CopyManager.updateToCopySchemeContentType(getValidCopyRequest_1(), "TextBook", metadata) assert(MapUtils.isNotEmpty(metadata)) @@ -293,8 +294,8 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory node.setNodeType("DATA_NODE") node.setObjectType("Content") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap( - ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }"))) + node.setMetadata( + ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }").asJava) node } @@ -304,8 +305,8 @@ class CopyManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory node.setNodeType("DATA_NODE") node.setObjectType("Content") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap( - ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }"))) + node.setMetadata( + ScalaJsonUtils.deserialize[Map[String,AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }").asJava) node } } diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/DiscardManagerTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/DiscardManagerTest.scala index db8d1704c..177832357 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/DiscardManagerTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/DiscardManagerTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.Request @@ -8,14 +9,14 @@ import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.dac.model.Node import java.util -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) class DiscardManagerTest extends BaseSpec with MockFactory { it should "discard node in Live state should return client error" in { implicit val oec: OntologyEngineContext = new OntologyEngineContext val request = getContentRequest() - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> ""))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "").asJava) request.setOperation("discardContent") val exception = intercept[ClientException] { DiscardManager.validateRequest(request) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/FlagManagerTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/FlagManagerTest.scala index 06344afbb..618c87e97 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/FlagManagerTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/FlagManagerTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import java.util diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/RequestUtilTest.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/RequestUtilTest.scala index b3cff3429..270fd40ce 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/RequestUtilTest.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/RequestUtilTest.scala @@ -1,4 +1,5 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import java.util diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAcceptFlagManager.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAcceptFlagManager.scala index f75aa1765..a0b7ae091 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAcceptFlagManager.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAcceptFlagManager.scala @@ -1,8 +1,9 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.{Property, Request, Response} @@ -10,7 +11,7 @@ import org.sunbird.content.actors.{BaseSpec, ContentActor} import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future @@ -42,7 +43,7 @@ class TestAcceptFlagManager extends BaseSpec with MockFactory { val request = getRequest() request.getContext.put("identifier","domain") - request.getRequest.putAll(mapAsJavaMap(Map("identifier" -> "domain"))) + request.getRequest.putAll(Map[String,AnyRef]("identifier" -> "domain").asJava) request.setOperation("acceptFlag") val response = callActor(request, Props(new ContentActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAssetManager.scala b/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAssetManager.scala index 62dc10c11..6a4b34203 100644 --- a/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAssetManager.scala +++ b/content-api/content-actors/src/test/scala/org/sunbird/content/util/TestAssetManager.scala @@ -1,4 +1,5 @@ package org.sunbird.content.util +import scala.jdk.CollectionConverters._ import java.util @@ -10,7 +11,7 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.Node import org.sunbird.graph.utils.ScalaJsonUtils -import scala.collection.JavaConversions.mapAsJavaMap +// import scala.jdk.CollectionConverters.mapAsJavaMap replaced with .asJava) import scala.concurrent.Future class TestAssetManager extends AsyncFlatSpec with Matchers with AsyncMockFactory { @@ -82,8 +83,8 @@ class TestAssetManager extends AsyncFlatSpec with Matchers with AsyncMockFactory node.setNodeType("DATA_NODE") node.setObjectType("Asset") node.setGraphId("domain") - node.setMetadata(mapAsJavaMap( - ScalaJsonUtils.deserialize[Map[String, AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }"))) + node.setMetadata( + ScalaJsonUtils.deserialize[Map[String, AnyRef]]("{\n \"objectCategoryDefinition\": {\n \"name\": \"Learning Resource\",\n \"description\": \"Content Playlist\",\n \"categoryId\": \"obj-cat:learning-resource\",\n \"targetObjectType\": \"Content\",\n \"objectMetadata\": {\n \"config\": {},\n \"schema\": {\n \"required\": [\n \"author\",\n \"copyright\",\n \"audience\"\n ],\n \"properties\": {\n \"audience\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"string\",\n \"enum\": [\n \"Student\",\n \"Teacher\"\n ]\n },\n \"default\": [\n \"Student\"\n ]\n },\n \"mimeType\": {\n \"type\": \"string\",\n \"enum\": [\n \"application/pdf\"\n ]\n }\n }\n }\n }\n }\n }").asJava) node } diff --git a/content-api/content-service/app/controllers/BaseController.scala b/content-api/content-service/app/controllers/BaseController.scala index 4e9b059db..8fc9e1624 100644 --- a/content-api/content-service/app/controllers/BaseController.scala +++ b/content-api/content-service/app/controllers/BaseController.scala @@ -5,16 +5,16 @@ import java.io.File import java.util import java.util.UUID -import akka.actor.ActorRef -import akka.pattern.Patterns +import org.apache.pekko.actor.ActorRef +import org.apache.pekko.pattern.Patterns import org.apache.commons.lang3.StringUtils import org.sunbird.common.{DateUtils, Platform} import org.sunbird.common.dto.{Response, ResponseHandler} import org.sunbird.common.exception.{ClientException, ResponseCode} import play.api.mvc._ import utils.{Constants, JavaJsonUtils} +import scala.jdk.CollectionConverters._ -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} abstract class BaseController(protected val cc: ControllerComponents)(implicit exec: ExecutionContext) extends AbstractController(cc) { @@ -63,7 +63,7 @@ abstract class BaseController(protected val cc: ControllerComponents)(implicit e } def commonHeaders(ignoreHeaders: Option[List[String]] = Option(List()))(implicit request: Request[AnyContent]): java.util.Map[String, Object] = { - val customHeaders = Map("x-channel-id" -> "channel", "X-Consumer-ID" -> "consumerId", "X-App-Id" -> "appId").filterKeys(key => !ignoreHeaders.getOrElse(List()).contains(key)) + val customHeaders = Map("x-channel-id" -> "channel", "X-Consumer-ID" -> "consumerId", "X-App-Id" -> "appId").view.filterKeys(key => !ignoreHeaders.getOrElse(List()).contains(key)).toMap customHeaders.map(ch => { val value = request.headers.get(ch._1) if (value.isDefined && !value.isEmpty) { diff --git a/content-api/content-service/app/controllers/HealthController.scala b/content-api/content-service/app/controllers/HealthController.scala index d278b7681..675af0259 100644 --- a/content-api/content-service/app/controllers/HealthController.scala +++ b/content-api/content-service/app/controllers/HealthController.scala @@ -1,6 +1,8 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ +import controllers.BaseController import handlers.SignalHandler import javax.inject._ import org.sunbird.common.JsonUtils diff --git a/content-api/content-service/app/controllers/v3/CategoryController.scala b/content-api/content-service/app/controllers/v3/CategoryController.scala index 59bcce644..16572c7b8 100644 --- a/content-api/content-service/app/controllers/v3/CategoryController.scala +++ b/content-api/content-service/app/controllers/v3/CategoryController.scala @@ -1,6 +1,7 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} @@ -8,7 +9,6 @@ import org.sunbird.content.util.CategoryConstants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext @Singleton diff --git a/content-api/content-service/app/controllers/v3/ChannelController.scala b/content-api/content-service/app/controllers/v3/ChannelController.scala index c798ca511..b888193f4 100644 --- a/content-api/content-service/app/controllers/v3/ChannelController.scala +++ b/content-api/content-service/app/controllers/v3/ChannelController.scala @@ -1,6 +1,7 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} diff --git a/content-api/content-service/app/controllers/v3/ContentController.scala b/content-api/content-service/app/controllers/v3/ContentController.scala index 27ff125eb..c17af6d95 100644 --- a/content-api/content-service/app/controllers/v3/ContentController.scala +++ b/content-api/content-service/app/controllers/v3/ContentController.scala @@ -1,6 +1,7 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} @@ -9,7 +10,6 @@ import org.sunbird.common.dto.ResponseHandler import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId, JavaJsonUtils} -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} diff --git a/content-api/content-service/app/controllers/v3/LicenseController.scala b/content-api/content-service/app/controllers/v3/LicenseController.scala index 9543f2759..267c32a22 100644 --- a/content-api/content-service/app/controllers/v3/LicenseController.scala +++ b/content-api/content-service/app/controllers/v3/LicenseController.scala @@ -1,6 +1,7 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} @@ -8,7 +9,6 @@ import org.sunbird.content.util.LicenseConstants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext @Singleton diff --git a/content-api/content-service/app/controllers/v4/AppController.scala b/content-api/content-service/app/controllers/v4/AppController.scala index 54e9fc8cb..aab57bc7c 100644 --- a/content-api/content-service/app/controllers/v4/AppController.scala +++ b/content-api/content-service/app/controllers/v4/AppController.scala @@ -1,6 +1,7 @@ package controllers.v4 -import akka.actor.ActorRef +import org.apache.pekko.actor.ActorRef +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import play.api.mvc.ControllerComponents @@ -9,7 +10,6 @@ import utils.{ActorNames, ApiId} import javax.inject.{Inject, Named} import scala.concurrent.ExecutionContext -import scala.collection.JavaConverters._ /*** * TODO: Re-write this controller after merging the Event and EventSet Controller. diff --git a/content-api/content-service/app/controllers/v4/AssetController.scala b/content-api/content-service/app/controllers/v4/AssetController.scala index 6fb5d26ce..96254c209 100644 --- a/content-api/content-service/app/controllers/v4/AssetController.scala +++ b/content-api/content-service/app/controllers/v4/AssetController.scala @@ -1,15 +1,22 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton +import scala.jdk.CollectionConverters._ import controllers.BaseController +import scala.jdk.CollectionConverters._ import javax.inject.{Inject, Named} +import scala.jdk.CollectionConverters._ import org.sunbird.models.UploadParams +import scala.jdk.CollectionConverters._ import play.api.mvc.ControllerComponents +import scala.jdk.CollectionConverters._ import utils.{ActorNames, ApiId} +import scala.jdk.CollectionConverters._ -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext} +import scala.jdk.CollectionConverters._ @Singleton class AssetController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: ActorRef, @Named(ActorNames.ASSET_ACTOR) assetActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { val objectType = "Asset" diff --git a/content-api/content-service/app/controllers/v4/CollectionController.scala b/content-api/content-service/app/controllers/v4/CollectionController.scala index f16a43c01..403869b24 100644 --- a/content-api/content-service/app/controllers/v4/CollectionController.scala +++ b/content-api/content-service/app/controllers/v4/CollectionController.scala @@ -1,6 +1,7 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import org.sunbird.collectioncsv.util.CollectionTOCConstants @@ -9,7 +10,6 @@ import javax.inject.{Inject, Named} import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext @Singleton class CollectionController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: ActorRef, @Named(ActorNames.COLLECTION_ACTOR) collectionActor: ActorRef, @Named(ActorNames.COLLECTION_CSV_ACTOR) collectionCSVActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { diff --git a/content-api/content-service/app/controllers/v4/ContentController.scala b/content-api/content-service/app/controllers/v4/ContentController.scala index c961d7928..0268655a6 100644 --- a/content-api/content-service/app/controllers/v4/ContentController.scala +++ b/content-api/content-service/app/controllers/v4/ContentController.scala @@ -1,6 +1,7 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} @@ -8,7 +9,6 @@ import org.sunbird.models.UploadParams import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext} diff --git a/content-api/content-service/app/controllers/v4/EventController.scala b/content-api/content-service/app/controllers/v4/EventController.scala index 1c5f2cc86..4dc7c41c6 100644 --- a/content-api/content-service/app/controllers/v4/EventController.scala +++ b/content-api/content-service/app/controllers/v4/EventController.scala @@ -1,12 +1,12 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import play.api.mvc.{Action, AnyContent, ControllerComponents} import utils.{ActorNames, ApiId, Constants} import javax.inject.{Inject, Named} -import scala.collection.JavaConverters.mapAsJavaMapConverter import scala.concurrent.ExecutionContext @Singleton @@ -67,4 +67,4 @@ class EventController @Inject()(@Named(ActorNames.EVENT_ACTOR) eventActor: Actor getResult(ApiId.PUBLISH_EVENT, eventActor, contentRequest, version = apiVersion) } -} \ No newline at end of file +} diff --git a/content-api/content-service/app/controllers/v4/EventSetController.scala b/content-api/content-service/app/controllers/v4/EventSetController.scala index c1dd509c3..d6ab5e46e 100644 --- a/content-api/content-service/app/controllers/v4/EventSetController.scala +++ b/content-api/content-service/app/controllers/v4/EventSetController.scala @@ -1,12 +1,12 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import scala.jdk.CollectionConverters._ import com.google.inject.Singleton import play.api.mvc.{Action, AnyContent, ControllerComponents} import utils.{ActorNames, ApiId, Constants} import javax.inject.{Inject, Named} -import scala.collection.JavaConverters.mapAsJavaMapConverter import scala.concurrent.ExecutionContext @Singleton @@ -75,4 +75,4 @@ class EventSetController @Inject()(@Named(ActorNames.EVENT_SET_ACTOR) eventSetAc } -} \ No newline at end of file +} diff --git a/content-api/content-service/app/controllers/v4/ObjectController.scala b/content-api/content-service/app/controllers/v4/ObjectController.scala index efb97610a..c55135659 100644 --- a/content-api/content-service/app/controllers/v4/ObjectController.scala +++ b/content-api/content-service/app/controllers/v4/ObjectController.scala @@ -1,11 +1,11 @@ package controllers.v4 -import akka.actor.ActorRef +import org.apache.pekko.actor.ActorRef +import scala.jdk.CollectionConverters._ import controllers.BaseController import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} import javax.inject.{Inject, Named} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext class ObjectController @Inject()(@Named(ActorNames.OBJECT_ACTOR) objectActor: ActorRef, cc: ControllerComponents)(implicit exec: ExecutionContext) extends BaseController(cc) { @@ -22,4 +22,4 @@ class ObjectController @Inject()(@Named(ActorNames.OBJECT_ACTOR) objectActor: A setRequestContext(readRequest, version,"Content","content") getResult(ApiId.READ_OBJECT, objectActor, readRequest, version = apiVersion) } -} \ No newline at end of file +} diff --git a/content-api/content-service/app/filters/AccessLogFilter.scala b/content-api/content-service/app/filters/AccessLogFilter.scala index 5e30b5579..93c698b48 100644 --- a/content-api/content-service/app/filters/AccessLogFilter.scala +++ b/content-api/content-service/app/filters/AccessLogFilter.scala @@ -1,6 +1,6 @@ package filters -import akka.util.ByteString +import org.apache.pekko.util.ByteString import javax.inject.Inject import org.sunbird.telemetry.util.TelemetryAccessEventUtil import play.api.Logging @@ -8,7 +8,7 @@ import play.api.libs.streams.Accumulator import play.api.mvc._ import scala.concurrent.ExecutionContext -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends EssentialFilter with Logging { @@ -27,7 +27,7 @@ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends Essentia val path = requestHeader.uri if(!path.contains("/health")){ - val headers = requestHeader.headers.headers.groupBy(_._1).mapValues(_.map(_._2)) + val headers = requestHeader.headers.headers.groupBy(_._1).view.mapValues(_.map(_._2)).toMap val appHeaders = headers.filter(header => xHeaderNames.keySet.contains(header._1.toLowerCase)) .map(entry => (xHeaderNames.get(entry._1.toLowerCase()).get, entry._2.head)) val otherDetails = Map[String, Any]("StartTime" -> startTime, "env" -> "content", diff --git a/content-api/content-service/app/handlers/SignalHandler.scala b/content-api/content-service/app/handlers/SignalHandler.scala index 4cad301c1..3945a5e32 100644 --- a/content-api/content-service/app/handlers/SignalHandler.scala +++ b/content-api/content-service/app/handlers/SignalHandler.scala @@ -2,13 +2,14 @@ package handlers import java.util.concurrent.TimeUnit -import akka.actor.ActorSystem +import org.apache.pekko.actor.ActorSystem import javax.inject.{Inject, Singleton} import org.slf4j.LoggerFactory import play.api.inject.DefaultApplicationLifecycle import sun.misc.Signal import scala.concurrent.duration.Duration +import scala.concurrent.ExecutionContext @Singleton class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: DefaultApplicationLifecycle) { @@ -22,10 +23,10 @@ class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: Defa // $COVERAGE-OFF$ Disabling scoverage as this code is impossible to test isShuttingDown = true println("Termination required, swallowing SIGTERM to allow current requests to finish. : " + System.currentTimeMillis()) - actorSystem.scheduler.scheduleOnce(STOP_DELAY)(() => { + actorSystem.scheduler.scheduleOnce(STOP_DELAY) { println("ApplicationLifecycle stop triggered... : " + System.currentTimeMillis()) lifecycle.stop() - })(actorSystem.dispatcher) + }(ExecutionContext.global) // $COVERAGE-ON } }) diff --git a/content-api/content-service/app/modules/ContentModule.scala b/content-api/content-service/app/modules/ContentModule.scala index 1615e7f6f..49fbc1796 100644 --- a/content-api/content-service/app/modules/ContentModule.scala +++ b/content-api/content-service/app/modules/ContentModule.scala @@ -1,29 +1,31 @@ package modules import com.google.inject.AbstractModule +import com.google.inject.name.Names +import org.apache.pekko.actor.ActorRef import org.sunbird.channel.actors.ChannelActor import org.sunbird.collectioncsv.actors.CollectionCSVActor import org.sunbird.content.actors.{AppActor, AssetActor, CategoryActor, CollectionActor, ContentActor, EventActor, EventSetActor, HealthActor, LicenseActor, ObjectActor} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames -class ContentModule extends AbstractModule with AkkaGuiceSupport { +class ContentModule extends AbstractModule with PekkoGuiceSupport { override def configure() = { // $COVERAGE-OFF$ Disabling scoverage as this code is impossible to test - //super.configure() - bindActor(classOf[HealthActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[ContentActor], ActorNames.CONTENT_ACTOR) - bindActor(classOf[LicenseActor], ActorNames.LICENSE_ACTOR) - bindActor(classOf[CollectionActor], ActorNames.COLLECTION_ACTOR) - bindActor(classOf[EventActor], ActorNames.EVENT_ACTOR) - bindActor(classOf[EventSetActor], ActorNames.EVENT_SET_ACTOR) - bindActor(classOf[ChannelActor], ActorNames.CHANNEL_ACTOR) - bindActor(classOf[CategoryActor], ActorNames.CATEGORY_ACTOR) - bindActor(classOf[AssetActor], ActorNames.ASSET_ACTOR) - bindActor(classOf[AppActor], ActorNames.APP_ACTOR) - bindActor(classOf[ObjectActor], ActorNames.OBJECT_ACTOR) - bindActor(classOf[CollectionCSVActor], ActorNames.COLLECTION_CSV_ACTOR) + super.configure() + bindActor[HealthActor](ActorNames.HEALTH_ACTOR) + bindActor[ContentActor](ActorNames.CONTENT_ACTOR) + bindActor[LicenseActor](ActorNames.LICENSE_ACTOR) + bindActor[CollectionActor](ActorNames.COLLECTION_ACTOR) + bindActor[EventActor](ActorNames.EVENT_ACTOR) + bindActor[EventSetActor](ActorNames.EVENT_SET_ACTOR) + bindActor[ChannelActor](ActorNames.CHANNEL_ACTOR) + bindActor[CategoryActor](ActorNames.CATEGORY_ACTOR) + bindActor[AssetActor](ActorNames.ASSET_ACTOR) + bindActor[AppActor](ActorNames.APP_ACTOR) + bindActor[ObjectActor](ActorNames.OBJECT_ACTOR) + bindActor[CollectionCSVActor](ActorNames.COLLECTION_CSV_ACTOR) println("Initialized application actors...") // $COVERAGE-ON } diff --git a/content-api/content-service/conf/application.conf b/content-api/content-service/conf/application.conf index eab5cbede..011ee337c 100644 --- a/content-api/content-service/conf/application.conf +++ b/content-api/content-service/conf/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -234,9 +234,9 @@ play.server.netty { # Whether the Netty wire should be logged log.wire = true - # If you run Play on Linux, you can use Netty's native socket transport - # for higher performance with less garbage. - transport = "native" + # Use NIO transport instead of native for better container compatibility + # Changed from "native" to "jdk" to avoid TCP_FASTOPEN issues in containers + transport = "jdk" } ## WS (HTTP Client) @@ -353,7 +353,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" content.hierarchy.removed_props_for_leafNodes=["collections","children","usedByContent","item_sets","methods","libraries","editorState"] @@ -443,7 +443,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/content-api/content-service/conf/logback.xml b/content-api/content-service/conf/logback.xml index 73529d622..ce441915b 100644 --- a/content-api/content-service/conf/logback.xml +++ b/content-api/content-service/conf/logback.xml @@ -4,10 +4,11 @@ - + + - %d %msg%n + %d{HH:mm:ss.SSS} %coloredLevel %logger{36} - %msg%n @@ -15,12 +16,17 @@ - + + + + + + diff --git a/content-api/content-service/conf/routes b/content-api/content-service/conf/routes index 2d7382948..09ec6c4a3 100644 --- a/content-api/content-service/conf/routes +++ b/content-api/content-service/conf/routes @@ -1,11 +1,11 @@ # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ -GET /health controllers.HealthController.health -GET /service/health controllers.HealthController.serviceHealth +GET /health controllers.HealthController.health() +GET /service/health controllers.HealthController.serviceHealth() # Content APIs -POST /content/v3/create controllers.v3.ContentController.create +POST /content/v3/create controllers.v3.ContentController.create() PATCH /content/v3/update/:identifier controllers.v3.ContentController.update(identifier:String) GET /content/v3/read/:identifier controllers.v3.ContentController.read(identifier:String, mode:Option[String], fields:Option[String]) POST /content/v3/upload/url/:identifier controllers.v3.ContentController.uploadPreSigned(identifier:String, type: Option[String]) @@ -16,7 +16,7 @@ POST /content/v3/import controllers.v3.ContentController.imp # Content APIs - with mock response POST /content/v3/flag/:identifier controllers.v3.ContentController.flag(identifier:String) -POST /content/v3/bundle controllers.v3.ContentController.bundle +POST /content/v3/bundle controllers.v3.ContentController.bundle() POST /content/v3/flag/accept/:identifier controllers.v3.ContentController.acceptFlag(identifier:String) POST /content/v3/flag/reject/:identifier controllers.v3.ContentController.rejectFlag(identifier:String) POST /content/v3/publish/:identifier controllers.v3.ContentController.publish(identifier:String) @@ -30,27 +30,27 @@ POST /content/v3/unlisted/publish/:identifier controllers.v3.ContentControl GET /content/v3/process/status/:processid controllers.v3.ContentController.getProcessIdStatus(processid: String) # Collection APIs -PATCH /content/v3/hierarchy/add controllers.v3.ContentController.addHierarchy -DELETE /content/v3/hierarchy/remove controllers.v3.ContentController.removeHierarchy -PATCH /content/v3/hierarchy/update controllers.v3.ContentController.updateHierarchy +PATCH /content/v3/hierarchy/add controllers.v3.ContentController.addHierarchy() +DELETE /content/v3/hierarchy/remove controllers.v3.ContentController.removeHierarchy() +PATCH /content/v3/hierarchy/update controllers.v3.ContentController.updateHierarchy() GET /content/v3/hierarchy/:identifier controllers.v3.ContentController.getHierarchy(identifier:String, mode:Option[String]) GET /content/v3/hierarchy/:identifier/:bookmarkId controllers.v3.ContentController.getBookmarkHierarchy(identifier: String, bookmarkId: String, mode: Option[String]) POST /collection/v3/dialcode/link/:identifier @controllers.v3.ContentController.collectionLinkDialCode(identifier:String) #License APIs -POST /license/v3/create controllers.v3.LicenseController.create +POST /license/v3/create controllers.v3.LicenseController.create() GET /license/v3/read/:identifier controllers.v3.LicenseController.read(identifier: String, fields:Option[String]) PATCH /license/v3/update/:identifier controllers.v3.LicenseController.update(identifier: String) DELETE /license/v3/retire/:identifier controllers.v3.LicenseController.retire(identifier: String) #These are routes for Channel -POST /channel/v3/create controllers.v3.ChannelController.create +POST /channel/v3/create controllers.v3.ChannelController.create() PATCH /channel/v3/update/:identifier controllers.v3.ChannelController.update(identifier: String) GET /channel/v3/read/:identifier controllers.v3.ChannelController.read(identifier: String) DELETE /channel/v3/retire/:identifier controllers.v3.ChannelController.retire(identifier: String) # Category APIs -POST /category/v3/create controllers.v3.CategoryController.create +POST /category/v3/create controllers.v3.CategoryController.create() GET /category/v3/read/:identifier controllers.v3.CategoryController.read(identifier: String, fields:Option[String]) PATCH /category/v3/update/:identifier controllers.v3.CategoryController.update(identifier: String) DELETE /category/v3/retire/:identifier controllers.v3.CategoryController.retire(identifier: String) @@ -59,7 +59,7 @@ DELETE /category/v3/retire/:identifier controllers.v3.CategoryControll POST /asset/v3/validate controllers.v4.AssetController.licenceValidate(field:Option[String]) #Asset V4 Api's -POST /asset/v4/create controllers.v4.AssetController.create +POST /asset/v4/create controllers.v4.AssetController.create() PATCH /asset/v4/update/:identifier controllers.v4.AssetController.update(identifier:String) GET /asset/v4/read/:identifier controllers.v4.AssetController.read(identifier:String, mode:Option[String], fields:Option[String]) POST /asset/v4/upload/:identifier controllers.v4.AssetController.upload(identifier:String, fileFormat: Option[String], validation: Option[String]) @@ -68,7 +68,7 @@ POST /asset/v4/copy/:identifier controllers.v4.AssetController. POST /asset/v4/validate controllers.v4.AssetController.licenceValidate(field:Option[String]) # Collection v4 Api's -POST /collection/v4/create controllers.v4.CollectionController.create +POST /collection/v4/create controllers.v4.CollectionController.create() PATCH /collection/v4/update/:identifier controllers.v4.CollectionController.update(identifier:String) GET /collection/v4/read/:identifier controllers.v4.CollectionController.read(identifier:String, mode:Option[String], fields:Option[String]) GET /collection/v4/private/read/:identifier controllers.v4.CollectionController.privateRead(identifier:String, mode:Option[String], fields:Option[String]) @@ -76,9 +76,9 @@ POST /collection/v4/flag/:identifier controllers.v4.CollectionCont POST /collection/v4/flag/accept/:identifier controllers.v4.CollectionController.acceptFlag(identifier:String) DELETE /collection/v4/discard/:identifier controllers.v4.CollectionController.discard(identifier:String) DELETE /collection/v4/retire/:identifier controllers.v4.CollectionController.retire(identifier:String) -PATCH /collection/v4/hierarchy/add controllers.v4.CollectionController.addHierarchy -DELETE /collection/v4/hierarchy/remove controllers.v4.CollectionController.removeHierarchy -PATCH /collection/v4/hierarchy/update controllers.v4.CollectionController.updateHierarchy +PATCH /collection/v4/hierarchy/add controllers.v4.CollectionController.addHierarchy() +DELETE /collection/v4/hierarchy/remove controllers.v4.CollectionController.removeHierarchy() +PATCH /collection/v4/hierarchy/update controllers.v4.CollectionController.updateHierarchy() GET /collection/v4/hierarchy/:identifier controllers.v4.CollectionController.getHierarchy(identifier:String, mode:Option[String]) GET /collection/v4/hierarchy/:identifier/:bookmarkId controllers.v4.CollectionController.getBookmarkHierarchy(identifier: String, bookmarkId: String, mode: Option[String]) POST /collection/v4/dialcode/link/:identifier controllers.v4.CollectionController.collectionLinkDialCode(identifier:String) @@ -89,7 +89,7 @@ POST /collection/v4/publish/:identifier controllers.v4.CollectionCon POST /collection/v4/unlisted/publish/:identifier controllers.v4.CollectionController.publishUnlisted(identifier:String) # Content v4 APIs -POST /content/v4/create controllers.v4.ContentController.create +POST /content/v4/create controllers.v4.ContentController.create() PATCH /content/v4/update/:identifier controllers.v4.ContentController.update(identifier:String) GET /content/v4/read/:identifier controllers.v4.ContentController.read(identifier:String, mode:Option[String], fields:Option[String]) GET /content/v4/private/read/:identifier controllers.v4.ContentController.privateRead(identifier:String, mode:Option[String], fields:Option[String]) @@ -112,12 +112,12 @@ POST /content/v4/unlisted/publish/:identifier controllers.v4.ContentContro GET /content/v4/process/status/:processid controllers.v4.ContentController.getProcessIdStatus(processid: String) # App v4 APIs -POST /app/v4/register controllers.v4.AppController.register +POST /app/v4/register controllers.v4.AppController.register() PATCH /app/v4/update/:identifier controllers.v4.AppController.update(identifier:String) GET /app/v4/read/:identifier controllers.v4.AppController.read(identifier:String, fields:Option[String]) # Event APIs -POST /event/v4/create controllers.v4.EventController.create +POST /event/v4/create controllers.v4.EventController.create() PATCH /event/v4/update/:identifier controllers.v4.EventController.update(identifier:String) POST /event/v4/publish/:identifier controllers.v4.EventController.publish(identifier:String) GET /event/v4/read/:identifier controllers.v4.EventController.read(identifier:String, mode:Option[String], fields:Option[String]) @@ -125,7 +125,7 @@ DELETE /event/v4/discard/:identifier controllers.v4.EventControll DELETE /private/event/v4/retire/:identifier controllers.v4.EventController.retire(identifier:String) # EventSet v4 Api's -POST /eventset/v4/create controllers.v4.EventSetController.create +POST /eventset/v4/create controllers.v4.EventSetController.create() PUT /eventset/v4/update/:identifier controllers.v4.EventSetController.update(identifier:String) POST /eventset/v4/publish/:identifier controllers.v4.EventSetController.publish(identifier:String) GET /eventset/v4/hierarchy/:identifier controllers.v4.EventSetController.getHierarchy(identifier:String, mode:Option[String], fields:Option[String]) diff --git a/content-api/content-service/pom.xml b/content-api/content-service/pom.xml index b940c08a6..9c7b170ff 100755 --- a/content-api/content-service/pom.xml +++ b/content-api/content-service/pom.xml @@ -11,9 +11,17 @@ - scalaz-bintray - Scalaz Bintray - releases - https://dl.bintray.com/scalaz/releases/ + maven-central + Maven Central Repository + https://repo1.maven.org/maven2 + + false + + + + typesafe-releases + Typesafe Repository + https://repo.typesafe.com/typesafe/releases/ false @@ -29,30 +37,128 @@ - 2.7.2 + 3.0.5 1.0.0-rc5 1.0.0 - 2.12 + 2.13 + 4.1.112.Final + + + + + io.netty + netty-common + ${netty.version} + + + io.netty + netty-buffer + ${netty.version} + + + io.netty + netty-transport + ${netty.version} + + + io.netty + netty-handler + ${netty.version} + + + io.netty + netty-codec + ${netty.version} + + + io.netty + netty-codec-http + ${netty.version} + + + io.netty + netty-resolver + ${netty.version} + + + io.netty + netty-transport-native-epoll + ${netty.version} + + + io.netty + netty-transport-native-unix-common + ${netty.version} + + + io.netty + netty-transport-classes-epoll + ${netty.version} + + + + com.google.guava guava - 18.0 + 32.1.3-jre + + + + + commons-io + commons-io + 2.6 + + + + + org.apache.tika + tika-core + 1.22 + + + + + io.netty + netty-codec-http + ${netty.version} + + + io.netty + netty-codec + ${netty.version} - com.google.inject - guice - 3.0 + io.netty + netty-handler + ${netty.version} - com.google.inject.extensions - guice-assistedinject - 3.0 + io.netty + netty-transport + ${netty.version} - com.typesafe.play + io.netty + netty-common + ${netty.version} + + + io.netty + netty-buffer + ${netty.version} + + + io.netty + netty-resolver + ${netty.version} + + + org.playframework play_${scala.major.version} ${play2.version} @@ -83,7 +189,7 @@ - com.typesafe.play + org.playframework play-guice_${scala.major.version} ${play2.version} @@ -137,33 +243,23 @@ ${fasterxml.jackson.version} - com.typesafe.play - filters-helpers_${scala.major.version} + org.playframework + play-filters-helpers_${scala.major.version} ${play2.version} - com.typesafe.play + org.playframework play-logback_${scala.major.version} ${play2.version} runtime - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - ch.qos.logback - logback-classic - 1.2.13 + org.slf4j + slf4j-api + 2.0.16 - com.typesafe.play + org.playframework play-netty-server_${scala.major.version} ${play2.version} runtime @@ -174,15 +270,11 @@ - + io.netty netty-codec-http - 4.1.44.Final + ${netty.version} - - io.netty - netty-codec - io.netty netty-handler @@ -192,12 +284,12 @@ io.netty netty-codec - 4.1.68.Final + ${netty.version} io.netty netty-handler - 4.1.46.Final + ${netty.version} org.scala-lang @@ -224,6 +316,17 @@ + + ${CLOUD_STORE_GROUP_ID} + ${CLOUD_STORE_ARTIFACT_ID} + ${CLOUD_STORE_VERSION} + + + com.google.inject.extensions + guice-multibindings + + + com.fasterxml.jackson.module jackson-module-scala_${scala.maj.version} @@ -252,7 +355,7 @@ test - com.typesafe.play + org.playframework play-specs2_${scala.maj.version} ${play2.version} test diff --git a/content-api/content-service/test/controllers/base/BaseSpec.scala b/content-api/content-service/test/controllers/base/BaseSpec.scala index 24f3f4070..da776d38a 100644 --- a/content-api/content-service/test/controllers/base/BaseSpec.scala +++ b/content-api/content-service/test/controllers/base/BaseSpec.scala @@ -12,11 +12,11 @@ import play.api.test.{FakeHeaders, FakeRequest} import scala.concurrent.Future class BaseSpec extends Specification { - implicit val app = new GuiceApplicationBuilder() + implicit val app: play.api.Application = new GuiceApplicationBuilder() .disable(classOf[modules.ContentModule]) .bindings(new TestModule) - .build - implicit val config = ConfigFactory.load(); + .build() + implicit val config: com.typesafe.config.Config = ConfigFactory.load(); def post(apiURL: String, request: String, h: FakeHeaders = FakeHeaders(Seq())) : Future[Result] = { @@ -24,13 +24,13 @@ class BaseSpec extends Specification { route(app, FakeRequest(POST, apiURL, headers, Json.toJson(Json.parse(request)))).get } - def isOK(response: Future[Result]) { + def isOK(response: Future[Result]): Unit = { status(response) must equalTo(OK) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""status":"successful"""") } - def hasClientError(response: Future[Result]) { + def hasClientError(response: Future[Result]): Unit = { status(response) must equalTo(BAD_REQUEST) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""err":"CLIENT_ERROR","status":"failed"""") diff --git a/content-api/content-service/test/controllers/v3/BadRequestSpec.scala b/content-api/content-service/test/controllers/v3/BadRequestSpec.scala index 3eb4c7218..8f7c99d68 100644 --- a/content-api/content-service/test/controllers/v3/BadRequestSpec.scala +++ b/content-api/content-service/test/controllers/v3/BadRequestSpec.scala @@ -10,7 +10,7 @@ import play.api.test.FakeRequest @RunWith(classOf[JUnitRunner]) class BadRequestSpec extends Specification { - implicit val app = new GuiceApplicationBuilder().build + implicit val app: play.api.Application = new GuiceApplicationBuilder().build() "Application" should { "send 404 on a bad request - /boum" in { route(app, FakeRequest(GET, "/boum")) must beSome.which (status(_) == NOT_FOUND) diff --git a/content-api/content-service/test/modules/TestModule.scala b/content-api/content-service/test/modules/TestModule.scala index 00d1a7acf..b56f81dc1 100644 --- a/content-api/content-service/test/modules/TestModule.scala +++ b/content-api/content-service/test/modules/TestModule.scala @@ -3,26 +3,26 @@ package modules import com.google.inject.AbstractModule import org.sunbird.actor.core.BaseActor import org.sunbird.common.dto.{Request, Response, ResponseHandler} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames import scala.concurrent.{ExecutionContext, Future} -class TestModule extends AbstractModule with AkkaGuiceSupport { +class TestModule extends AbstractModule with PekkoGuiceSupport { override def configure(): Unit = { - bindActor(classOf[TestActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[TestActor], ActorNames.CONTENT_ACTOR) - bindActor(classOf[TestActor], ActorNames.LICENSE_ACTOR) - bindActor(classOf[TestActor], ActorNames.COLLECTION_ACTOR) - bindActor(classOf[TestActor], ActorNames.CHANNEL_ACTOR) - bindActor(classOf[TestActor], ActorNames.CATEGORY_ACTOR) - bindActor(classOf[TestActor], ActorNames.ASSET_ACTOR) - bindActor(classOf[TestActor], ActorNames.APP_ACTOR) - bindActor(classOf[TestActor], ActorNames.EVENT_SET_ACTOR) - bindActor(classOf[TestActor], ActorNames.EVENT_ACTOR) - bindActor(classOf[TestActor], ActorNames.OBJECT_ACTOR) - bindActor(classOf[TestActor], ActorNames.COLLECTION_CSV_ACTOR) + bindActor[TestActor](ActorNames.HEALTH_ACTOR) + bindActor[TestActor](ActorNames.CONTENT_ACTOR) + bindActor[TestActor](ActorNames.LICENSE_ACTOR) + bindActor[TestActor](ActorNames.COLLECTION_ACTOR) + bindActor[TestActor](ActorNames.CHANNEL_ACTOR) + bindActor[TestActor](ActorNames.CATEGORY_ACTOR) + bindActor[TestActor](ActorNames.ASSET_ACTOR) + bindActor[TestActor](ActorNames.APP_ACTOR) + bindActor[TestActor](ActorNames.EVENT_SET_ACTOR) + bindActor[TestActor](ActorNames.EVENT_ACTOR) + bindActor[TestActor](ActorNames.OBJECT_ACTOR) + bindActor[TestActor](ActorNames.COLLECTION_CSV_ACTOR) println("Test Module is initialized...") } } diff --git a/content-api/hierarchy-manager/pom.xml b/content-api/hierarchy-manager/pom.xml index 52deb8505..185690277 100644 --- a/content-api/hierarchy-manager/pom.xml +++ b/content-api/hierarchy-manager/pom.xml @@ -14,7 +14,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar diff --git a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala index 74fe19e72..6d4bf6f8d 100644 --- a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala +++ b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala @@ -12,10 +12,7 @@ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.{NodeUtil, ScalaJsonUtils} -import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ -import scala.collection.JavaConverters.{asJavaIterableConverter, mapAsScalaMapConverter} +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import com.mashape.unirest.http.HttpResponse import com.mashape.unirest.http.Unirest @@ -65,7 +62,7 @@ object HierarchyManager { val leafNodesFuture = fetchLeafNodes(request) leafNodesFuture.map(leafNodes => { updateRootNode(rootNode, request, "add").map(node => { - val updateResponse = updateHierarchy(unitId, hierarchy, leafNodes, node, request, "add") + val updateResponse = updateHierarchy(unitId, hierarchy.asJava, leafNodes, node, request, "add") updateResponse.map(response => { if(!ResponseHandler.checkError(response)) { ResponseHandler.OK @@ -100,7 +97,7 @@ object HierarchyManager { Future{ResponseHandler.ERROR(ResponseCode.SERVER_ERROR, ResponseCode.SERVER_ERROR.name(), "hierarchy is empty")} } else { updateRootNode(rootNode, request, "remove").map(node =>{ - val updateResponse = updateHierarchy(unitId, hierarchy, null, node, request, "remove") + val updateResponse = updateHierarchy(unitId, hierarchy.asJava, null, node, request, "remove") updateResponse.map(response => { if(!ResponseHandler.checkError(response)) { ResponseHandler.OK.put("rootId", node.getIdentifier.replaceAll(imgSuffix, "")) @@ -140,7 +137,7 @@ object HierarchyManager { //TODO: Remove content Mapping for backward compatibility HierarchyBackwardCompatibilityUtil.setContentAndCategoryTypes(metadata) hierarchy.map(hierarchy => { - val children = hierarchy.getOrDefault("children", new util.ArrayList[java.util.Map[String, AnyRef]]).asInstanceOf[util.ArrayList[java.util.Map[String, AnyRef]]] + val children = hierarchy.getOrElse("children", new util.ArrayList[java.util.Map[String, AnyRef]]).asInstanceOf[util.ArrayList[java.util.Map[String, AnyRef]]] //updating relationalMetadata of children if(collRelationalMetadata.nonEmpty) { @@ -206,7 +203,7 @@ object HierarchyManager { def getPublishedHierarchy(request: Request)(implicit ec: ExecutionContext, oec: OntologyEngineContext): Future[Response] = { val redisHierarchy = RedisCache.get(hierarchyPrefix + request.get("rootId")) val hierarchyFuture = if (StringUtils.isNotEmpty(redisHierarchy)) { - Future(mapAsJavaMap(Map("content" -> JsonUtils.deserialize(redisHierarchy, classOf[java.util.Map[String, AnyRef]])))) + Future(Map("content" -> JsonUtils.deserialize(redisHierarchy, classOf[java.util.Map[String, AnyRef]])).asJava) } else getCassandraHierarchy(request) hierarchyFuture.map(result => { if (!result.isEmpty) { @@ -215,7 +212,7 @@ object HierarchyManager { if (StringUtils.isEmpty(bookmarkId)) { ResponseHandler.OK.put("content", rootHierarchy) } else { - val children = rootHierarchy.getOrElse("children", new util.ArrayList[util.Map[String, AnyRef]]()).asInstanceOf[util.List[util.Map[String, AnyRef]]] + val children = rootHierarchy.getOrDefault("children", new util.ArrayList[util.Map[String, AnyRef]]()).asInstanceOf[util.List[util.Map[String, AnyRef]]] val bookmarkHierarchy = filterBookmarkHierarchy(children, bookmarkId) if (MapUtils.isEmpty(bookmarkHierarchy)) { ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name(), "bookmarkId " + bookmarkId + " does not exist") @@ -258,10 +255,10 @@ object HierarchyManager { req.put("identifiers", leafNodes) val nodes = DataNode.list(req).map(nodes => { if(nodes.size() != leafNodes.size()) { - leafNodes.toList.filter(id => !nodes.contains(id)) + leafNodes.asScala.toList.filter(id => !nodes.asScala.contains(id)) throw new ClientException(ErrorCodes.ERR_BAD_REQUEST.name(), "Children which are not available are: " + leafNodes) } - else nodes.toList + else nodes.asScala.toList }) nodes } @@ -271,19 +268,19 @@ object HierarchyManager { val nodeMap:java.util.Map[String,AnyRef] = NodeUtil.serialize(node, null, node.getObjectType.toLowerCase().replace("image", ""), schemaVersion) nodeMap.keySet().removeAll(keyTobeRemoved) nodeMap - }) + }).asJava } def addChildrenToUnit(children: java.util.List[java.util.Map[String,AnyRef]], unitId:String, leafNodes: java.util.List[java.util.Map[String, AnyRef]], leafNodeIds: java.util.List[String], request: Request)(implicit oec: OntologyEngineContext, ec: ExecutionContext): Unit = { - val childNodes = children.filter(child => "Parent".equalsIgnoreCase(child.get("visibility").asInstanceOf[String]) && unitId.equalsIgnoreCase(child.get("identifier").asInstanceOf[String])).toList + val childNodes = children.asScala.filter(child => "Parent".equalsIgnoreCase(child.get("visibility").asInstanceOf[String]) && unitId.equalsIgnoreCase(child.get("identifier").asInstanceOf[String])).toList if(null != childNodes && childNodes.nonEmpty){ - val child = childNodes.get(0) - leafNodes.toList.foreach(leafNode => validateLeafNodes(child, leafNode, request)) + val child = childNodes.head + leafNodes.asScala.toList.foreach(leafNode => validateLeafNodes(child, leafNode, request)) val childList = child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]] val restructuredChildren: java.util.List[java.util.Map[String,AnyRef]] = restructureUnit(childList, leafNodes, leafNodeIds, child.get("depth").asInstanceOf[Integer] + 1, unitId) child.put("children", restructuredChildren) } else { - for(child <- children) { + for(child <- children.asScala) { if(null !=child.get("children") && !child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]].isEmpty) addChildrenToUnit(child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]], unitId, leafNodes, leafNodeIds, request) } @@ -291,22 +288,22 @@ object HierarchyManager { } def removeChildrenFromUnit(children: java.util.List[java.util.Map[String, AnyRef]], unitId: String, leafNodeIds: java.util.List[String]):Unit = { - val childNodes = children.filter(child => "Parent".equalsIgnoreCase(child.get("visibility").asInstanceOf[String]) && unitId.equalsIgnoreCase(child.get("identifier").asInstanceOf[String])).toList + val childNodes = children.asScala.filter(child => "Parent".equalsIgnoreCase(child.get("visibility").asInstanceOf[String]) && unitId.equalsIgnoreCase(child.get("identifier").asInstanceOf[String])).toList if(null != childNodes && childNodes.nonEmpty){ - val child = childNodes.get(0) + val child = childNodes.head if(null != child.get("children") && !child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]].isEmpty) { - val filteredLeafNodes = child.get("children").asInstanceOf[java.util.List[java.util.Map[String, AnyRef]]].filter(existingLeafNode => { - !leafNodeIds.contains(existingLeafNode.get("identifier").asInstanceOf[String]) - }) + val filteredLeafNodes = child.get("children").asInstanceOf[java.util.List[java.util.Map[String, AnyRef]]].asScala.filter(existingLeafNode => { + !leafNodeIds.asScala.contains(existingLeafNode.get("identifier").asInstanceOf[String]) + }).asJava var index: Integer = 1 - filteredLeafNodes.toList.sortBy(x => x.get("index").asInstanceOf[Integer]).foreach(node => { + filteredLeafNodes.asScala.toList.sortBy(x => x.get("index").asInstanceOf[Integer]).foreach(node => { node.put("index", index) index += 1 }) child.put("children", filteredLeafNodes) } } else { - for(child <- children) { + for(child <- children.asScala) { if(null !=child.get("children") && !child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]].isEmpty) removeChildrenFromUnit(child.get("children").asInstanceOf[java.util.List[java.util.Map[String,AnyRef]]], unitId, leafNodeIds) } @@ -317,12 +314,12 @@ object HierarchyManager { val req = new Request(request) val leafNodes = request.get("children").asInstanceOf[java.util.List[String]] val childNodes = new java.util.ArrayList[String]() - childNodes.addAll(rootNode.getMetadata.get("childNodes").asInstanceOf[Array[String]].toList) + childNodes.addAll(rootNode.getMetadata.get("childNodes").asInstanceOf[Array[String]].toList.asJava) if(operation.equalsIgnoreCase("add")) childNodes.addAll(leafNodes) if(operation.equalsIgnoreCase("remove")) childNodes.removeAll(leafNodes) - req.put("childNodes", childNodes.distinct.toArray) + req.put("childNodes", childNodes.asScala.toList.distinct.toArray) req.getContext.put("identifier", rootNode.getIdentifier.replaceAll(imgSuffix, "")) req.getContext.put("skipValidation", java.lang.Boolean.TRUE) DataNode.update(req) @@ -337,13 +334,13 @@ object HierarchyManager { val leafNodesMap: java.util.List[java.util.Map[String, AnyRef]] = convertNodeToMap(leafNodes) addChildrenToUnit(children, unitId, leafNodesMap, leafNodeIds, request) //add relationalMetadata for unit - if(collRelationalMetadata.nonEmpty && unitsHierarchyMetadata.nonEmpty) { + if(collRelationalMetadata.nonEmpty && !unitsHierarchyMetadata.isEmpty) { unitsHierarchyMetadata.get("children").asInstanceOf[java.util.List[String]].addAll(leafNodeIds) if (request.get("relationalMetadata") != null) { val rmSchemaValidator = SchemaValidatorFactory.getInstance(HierarchyConstants.RELATIONAL_METADATA.toLowerCase(), "1.0") val requestRM = request.get("relationalMetadata").asInstanceOf[java.util.Map[String, AnyRef]] - requestRM.foreach(rmChild => { - rmSchemaValidator.validate(rmChild._2.asInstanceOf[Map[String, AnyRef]]) + requestRM.asScala.foreach(rmChild => { + rmSchemaValidator.validate(rmChild._2.asInstanceOf[java.util.Map[String, AnyRef]]) }) if (unitsHierarchyMetadata.containsKey("relationalMetadata")) { unitsHierarchyMetadata.get("relationalMetadata").asInstanceOf[java.util.Map[String, AnyRef]].putAll(requestRM) @@ -356,9 +353,9 @@ object HierarchyManager { if ("remove".equalsIgnoreCase(operation)) { removeChildrenFromUnit(children, unitId, leafNodeIds) //remove relationalMetadata for unit - if(collRelationalMetadata.nonEmpty && unitsHierarchyMetadata.nonEmpty) { + if(collRelationalMetadata.nonEmpty && !unitsHierarchyMetadata.isEmpty) { unitsHierarchyMetadata.get("children").asInstanceOf[java.util.List[String]].removeAll(leafNodeIds) - leafNodeIds.foreach(rec => unitsHierarchyMetadata.get("relationalMetadata").asInstanceOf[java.util.Map[String, AnyRef]].remove(rec)) + leafNodeIds.asScala.foreach(rec => unitsHierarchyMetadata.get("relationalMetadata").asInstanceOf[java.util.Map[String, AnyRef]].remove(rec)) if (unitsHierarchyMetadata.get("relationalMetadata").asInstanceOf[java.util.Map[String, AnyRef]].size() == 0) unitsHierarchyMetadata.remove("relationalMetadata") } } @@ -374,8 +371,8 @@ object HierarchyManager { oec.graphService.saveExternalProps(req) }).flatMap(f => f).recoverWith { case clientException: ClientException => if(clientException.getMessage.equalsIgnoreCase("Validation Errors")) { - Future(ResponseHandler.ERROR(ResponseCode.CLIENT_ERROR, ResponseCode.CLIENT_ERROR.name(), clientException.getMessages.mkString(","))) - } else throw clientException + Future(ResponseHandler.ERROR(ResponseCode.CLIENT_ERROR, ResponseCode.CLIENT_ERROR.name(), clientException.getMessages.asScala.mkString(","))) + } else throw clientException case e: Exception => Future(ResponseHandler.ERROR(ResponseCode.SERVER_ERROR, ResponseCode.SERVER_ERROR.name(), e.getMessage)) } @@ -384,22 +381,22 @@ object HierarchyManager { def restructureUnit(childList: java.util.List[java.util.Map[String, AnyRef]], leafNodes: java.util.List[java.util.Map[String, AnyRef]], leafNodeIds: java.util.List[String], depth: Integer, parent: String): java.util.List[java.util.Map[String, AnyRef]] = { var maxIndex:Integer = 0 val leafNodeMap: java.util.Map[String, java.util.Map[String, AnyRef]] = new util.HashMap[String, java.util.Map[String, AnyRef]]() - for(leafNode <- leafNodes){ - leafNodeMap.put(leafNode.get("identifier").asInstanceOf[String], JavaConverters.mapAsJavaMapConverter(leafNode).asJava) + for(leafNode <- leafNodes.asScala){ + leafNodeMap.put(leafNode.get("identifier").asInstanceOf[String], leafNode) } var filteredLeafNodes: java.util.List[java.util.Map[String, AnyRef]] = new util.ArrayList[java.util.Map[String, AnyRef]]() if(null != childList && !childList.isEmpty) { - val childMap:Map[String, java.util.Map[String, AnyRef]] = childList.toList.map(f => f.get("identifier").asInstanceOf[String] -> f).toMap - val existingLeafNodes = childMap.filter(p => leafNodeIds.contains(p._1)) + val childMap:Map[String, java.util.Map[String, AnyRef]] = childList.asScala.toList.map(f => f.get("identifier").asInstanceOf[String] -> f).toMap + val existingLeafNodes = childMap.filter(p => leafNodeIds.asScala.contains(p._1)) existingLeafNodes.map(en => { leafNodeMap.get(en._1).put("index", en._2.get("index").asInstanceOf[Integer]) }) - filteredLeafNodes = bufferAsJavaList(childList.filter(existingLeafNode => { - !leafNodeIds.contains(existingLeafNode.get("identifier").asInstanceOf[String]) - })) + filteredLeafNodes = new util.ArrayList[java.util.Map[String, AnyRef]](childList.asScala.filter(existingLeafNode => { + !leafNodeIds.asScala.contains(existingLeafNode.get("identifier").asInstanceOf[String]) + }).toList.asJava) maxIndex = childMap.values.toList.map(child => child.get("index").asInstanceOf[Integer]).max } - leafNodeIds.foreach(id => { + leafNodeIds.asScala.foreach(id => { val node = leafNodeMap.get(id) node.put("parent", parent) node.put("depth", depth) @@ -419,9 +416,9 @@ object HierarchyManager { val responseFuture = oec.graphService.readExternalProps(req, List("hierarchy")) responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { - val hierarchyString = response.getResult.toMap.getOrDefault("hierarchy", "").asInstanceOf[String] + val hierarchyString = response.getResult.asScala.toMap.getOrElse("hierarchy", "").asInstanceOf[String] if (StringUtils.isNotEmpty(hierarchyString)) { - Future(JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).toMap) + Future(JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).asScala.toMap) } else Future(Map[String, AnyRef]()) } else if (ResponseHandler.checkError(response) && response.getResponseCode.code() == 404 && Platform.config.hasPath("collection.image.migration.enabled") && Platform.config.getBoolean("collection.image.migration.enabled")) { @@ -429,9 +426,9 @@ object HierarchyManager { val responseFuture = oec.graphService.readExternalProps(req, List("hierarchy")) responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { - val hierarchyString = response.getResult.toMap.getOrDefault("hierarchy", "").asInstanceOf[String] + val hierarchyString = response.getResult.asScala.toMap.getOrElse("hierarchy", "").asInstanceOf[String] if (StringUtils.isNotEmpty(hierarchyString)) { - JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).toMap + JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).asScala.toMap } else Map[String, AnyRef]() } else if (ResponseHandler.checkError(response) && response.getResponseCode.code() == 404) @@ -453,11 +450,11 @@ object HierarchyManager { val responseFuture = oec.graphService.readExternalProps(req, List("relational_metadata")) responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { - val relationalMetadataString = response.getResult.toMap.getOrDefault("relational_metadata", "").asInstanceOf[String] + val relationalMetadataString = response.getResult.asScala.toMap.getOrElse("relational_metadata", "").asInstanceOf[String] if (StringUtils.isNotEmpty(relationalMetadataString) && !relationalMetadataString.trim.isBlank) { val relMetadataJavaMap = JsonUtils.deserialize(relationalMetadataString, classOf[java.util.Map[String, AnyRef]]) - if(relMetadataJavaMap != null && relMetadataJavaMap.size()>0) Future(relMetadataJavaMap.toMap) else Future(Map[String, AnyRef]()) - } else + if(relMetadataJavaMap != null && relMetadataJavaMap.size()>0) Future(relMetadataJavaMap.asScala.toMap) else Future(Map[String, AnyRef]()) + } else Future(Map[String, AnyRef]()) } else { val req = new Request(request) @@ -465,9 +462,9 @@ object HierarchyManager { val responseFuture = oec.graphService.readExternalProps(req, List("relational_metadata")) responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { - val relationalMetadataString = response.getResult.toMap.getOrDefault("relational_metadata", "").asInstanceOf[String] + val relationalMetadataString = response.getResult.asScala.toMap.getOrElse("relational_metadata", "").asInstanceOf[String] if (StringUtils.isNotEmpty(relationalMetadataString) && !relationalMetadataString.trim.isBlank) { - Future(JsonUtils.deserialize(relationalMetadataString, classOf[java.util.Map[String, AnyRef]]).toMap) + Future(JsonUtils.deserialize(relationalMetadataString, classOf[java.util.Map[String, AnyRef]]).asScala.toMap) } else Future(Map[String, AnyRef]()) } else Future(Map[String, AnyRef]()) @@ -482,9 +479,9 @@ object HierarchyManager { val hierarchy = fetchHierarchy(request, request.getRequest.get("rootId").asInstanceOf[String]) hierarchy.map(hierarchy => { if (hierarchy.nonEmpty) { - if (StringUtils.isNotEmpty(hierarchy.getOrDefault("status", "").asInstanceOf[String]) && statusList.contains(hierarchy.getOrDefault("status", "").asInstanceOf[String])) { + if (StringUtils.isNotEmpty(hierarchy.getOrElse("status", "").asInstanceOf[String]) && statusList.contains(hierarchy.getOrElse("status", "").asInstanceOf[String])) { //TODO: Remove mapping - val hierarchyMap = mapPrimaryCategories(hierarchy) + val hierarchyMap = mapPrimaryCategories(hierarchy.asJava) rootHierarchy.put("content", hierarchyMap) RedisCache.set(hierarchyPrefix + request.get("rootId"), JsonUtils.serialize(hierarchyMap)) Future(rootHierarchy) @@ -499,8 +496,8 @@ object HierarchyManager { val parentHierarchy = fetchHierarchy(request, response.get("identifier").asInstanceOf[String]) parentHierarchy.map(hierarchy => { if (hierarchy.nonEmpty) { - if (StringUtils.isNoneEmpty(hierarchy.getOrDefault("status", "").asInstanceOf[String]) && statusList.contains(hierarchy.getOrDefault("status", "").asInstanceOf[String]) && CollectionUtils.isNotEmpty(mapAsJavaMap(hierarchy).get("children").asInstanceOf[util.ArrayList[util.HashMap[String, AnyRef]]])) { - val bookmarkHierarchy = filterBookmarkHierarchy(mapAsJavaMap(hierarchy).get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]], request.get("rootId").asInstanceOf[String]) + if (StringUtils.isNoneEmpty(hierarchy.getOrElse("status", "").asInstanceOf[String]) && statusList.contains(hierarchy.getOrElse("status", "").asInstanceOf[String]) && CollectionUtils.isNotEmpty(hierarchy.asJava.get("children").asInstanceOf[util.ArrayList[util.HashMap[String, AnyRef]]])) { + val bookmarkHierarchy = filterBookmarkHierarchy(hierarchy.asJava.get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]], request.get("rootId").asInstanceOf[String]) if (!bookmarkHierarchy.isEmpty) { //TODO: Remove mapping val hierarchyMap = mapPrimaryCategories(bookmarkHierarchy) @@ -565,15 +562,15 @@ object HierarchyManager { @tailrec def filterBookmarkHierarchy(children: util.List[util.Map[String, AnyRef]], bookmarkId: String)(implicit ec: ExecutionContext): util.Map[String, AnyRef] = { if (CollectionUtils.isNotEmpty(children)) { - val response = children.filter(_.get("identifier") == bookmarkId).toList - if (CollectionUtils.isNotEmpty(response)) { - response.get(0) + val response = children.asScala.filter(_.get("identifier") == bookmarkId).toList + if (response.nonEmpty) { + response.head } else { - val nextChildren = bufferAsJavaList(children.flatMap(child => { + val nextChildren = children.asScala.flatMap(child => { if (!child.isEmpty && CollectionUtils.isNotEmpty(child.get("children").asInstanceOf[util.List[util.Map[String, AnyRef]]])) - child.get("children").asInstanceOf[util.List[util.Map[String, AnyRef]]] - else new util.ArrayList[util.Map[String, AnyRef]] - })) + child.get("children").asInstanceOf[util.List[util.Map[String, AnyRef]]].asScala + else new util.ArrayList[util.Map[String, AnyRef]]().asScala + }).toList.asJava filterBookmarkHierarchy(nextChildren, bookmarkId) } } else { @@ -585,8 +582,8 @@ object HierarchyManager { if (StringUtils.isNotEmpty(identifier)) { val parentHierarchy = fetchHierarchy(request, identifier + imgSuffix) parentHierarchy.map(hierarchy => { - if (hierarchy.nonEmpty && CollectionUtils.isNotEmpty(mapAsJavaMap(hierarchy).get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]])) { - val bookmarkHierarchy = filterBookmarkHierarchy(mapAsJavaMap(hierarchy).get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]], request.get("rootId").asInstanceOf[String]) + if (hierarchy.nonEmpty && CollectionUtils.isNotEmpty(hierarchy.asJava.get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]])) { + val bookmarkHierarchy = filterBookmarkHierarchy(hierarchy.asJava.get("children").asInstanceOf[util.ArrayList[util.Map[String, AnyRef]]], request.get("rootId").asInstanceOf[String]) if (!bookmarkHierarchy.isEmpty) { bookmarkHierarchy } else { @@ -603,7 +600,7 @@ object HierarchyManager { def validateShallowCopied(rootNodeMap: util.Map[String, AnyRef], operation: String, identifier: String): Unit = { val originData = rootNodeMap.getOrDefault("originData", new util.HashMap[String, AnyRef]()).asInstanceOf[util.Map[String, AnyRef]] - if (StringUtils.equalsIgnoreCase(originData.getOrElse("copyType", "").asInstanceOf[String], HierarchyConstants.COPY_TYPE_SHALLOW)) { + if (StringUtils.equalsIgnoreCase(originData.getOrDefault("copyType", "").asInstanceOf[String], HierarchyConstants.COPY_TYPE_SHALLOW)) { operation match { case "add"=> throw new ClientException(HierarchyErrorCodes.ERR_ADD_HIERARCHY_DENIED, "Add Hierarchy is not allowed for partially (shallow) copied content : " + identifier) case "remove"=> throw new ClientException(HierarchyErrorCodes.ERR_REMOVE_HIERARCHY_DENIED, "Remove Hierarchy is not allowed for partially (shallow) copied content : " + identifier) @@ -613,7 +610,7 @@ object HierarchyManager { } def updateLatestLeafNodes(children: util.List[util.Map[String, AnyRef]], leafNodeMap: util.Map[String, AnyRef]): List[Any] = { - children.toList.map(content => { + children.asScala.toList.map(content => { if(StringUtils.equalsIgnoreCase("Default", content.getOrDefault("visibility", "").asInstanceOf[String])) { val metadata: util.Map[String, AnyRef] = leafNodeMap.getOrDefault(content.get("identifier").asInstanceOf[String], new java.util.HashMap[String, AnyRef]()).asInstanceOf[util.Map[String, AnyRef]] if(HierarchyConstants.RETIRED_STATUS.equalsIgnoreCase(metadata.getOrDefault("status", HierarchyConstants.RETIRED_STATUS).asInstanceOf[String])){ @@ -630,7 +627,7 @@ object HierarchyManager { } def fetchAllLeafNodes(children: util.List[util.Map[String, AnyRef]], leafNodeIds: util.List[String]): List[Any] = { - children.toList.map(content => { + children.asScala.toList.map(content => { if(StringUtils.equalsIgnoreCase("Default", content.getOrDefault("visibility", "").asInstanceOf[String])) { leafNodeIds.add(content.get("identifier").asInstanceOf[String]) leafNodeIds @@ -650,19 +647,19 @@ object HierarchyManager { }) request.put("identifiers", leafNodeIds) DataNode.list(request).map(nodes => { - val leafNodeMap: Map[String, AnyRef] = nodes.toList.map(node => (node.getIdentifier, NodeUtil.serialize(node, null, node.getObjectType.toLowerCase.replace("image", ""), HierarchyConstants.SCHEMA_VERSION, withoutRelations = true).asInstanceOf[AnyRef])).toMap - val imageNodeIds: util.List[String] = JavaConverters.seqAsJavaListConverter(leafNodeIds.toList.map(id => id + HierarchyConstants.IMAGE_SUFFIX)).asJava + val leafNodeMap: Map[String, AnyRef] = nodes.asScala.toList.map(node => (node.getIdentifier, NodeUtil.serialize(node, null, node.getObjectType.toLowerCase.replace("image", ""), HierarchyConstants.SCHEMA_VERSION, withoutRelations = true).asInstanceOf[AnyRef])).toMap + val imageNodeIds: util.List[String] = leafNodeIds.asScala.toList.map(id => id + HierarchyConstants.IMAGE_SUFFIX).asJava request.put("identifiers", imageNodeIds) DataNode.list(request).map(imageNodes => { //val imageLeafNodeMap: Map[String, AnyRef] = imageNodes.toList.map(imageNode => (imageNode.getIdentifier.replaceAll(HierarchyConstants.IMAGE_SUFFIX, ""), NodeUtil.serialize(imageNode, null, HierarchyConstants.CONTENT_SCHEMA_NAME, HierarchyConstants.SCHEMA_VERSION, true).asInstanceOf[AnyRef])).toMap - val imageLeafNodeMap: Map[String, AnyRef] = imageNodes.toList.map(imageNode => { + val imageLeafNodeMap: Map[String, AnyRef] = imageNodes.asScala.toList.map(imageNode => { val identifier = imageNode.getIdentifier.replaceAll(HierarchyConstants.IMAGE_SUFFIX, "") val metadata = NodeUtil.serialize(imageNode, null, imageNode.getObjectType.toLowerCase.replace("image", ""), HierarchyConstants.SCHEMA_VERSION, withoutRelations = true) metadata.replace("identifier", identifier) (identifier, metadata.asInstanceOf[AnyRef]) }).toMap val updatedMap = leafNodeMap ++ imageLeafNodeMap - JavaConverters.mapAsJavaMapConverter(updatedMap).asJava + updatedMap.asJava }) }).flatMap(f => f) } else { @@ -672,7 +669,7 @@ object HierarchyManager { } def updateRelationMetadataMappingInChildren(children: util.List[util.Map[String, AnyRef]], colRelationalMetadata: Map[String,AnyRef]): List[Any] = { - children.toList.map(content => { + children.asScala.toList.map(content => { if(colRelationalMetadata.contains(content.get("identifier")+"::"+content.get("parent"))) { val contentRelMetadata = colRelationalMetadata(content.get("identifier")+"::"+content.get("parent")) content.put(HierarchyConstants.RELATIONAL_METADATA,contentRelMetadata) @@ -682,7 +679,7 @@ object HierarchyManager { } def updateContentMappingInChildren(children: util.List[util.Map[String, AnyRef]]): List[Any] = { - children.toList.map(content => { + children.asScala.toList.map(content => { if (mapPrimaryCategoriesEnabled) HierarchyBackwardCompatibilityUtil.setContentAndCategoryTypes(content, content.get("objectType").asInstanceOf[String]) if (objectTypeAsContentEnabled) @@ -711,7 +708,7 @@ object HierarchyManager { val objectCategoryDefinition: ObjectCategoryDefinition = DefinitionNode.getObjectCategoryDefinition(primaryCategory, request.getContext.get(HierarchyConstants.SCHEMA_NAME).asInstanceOf[String].toLowerCase(), channel) val outRelations = DefinitionNode.getOutRelations(HierarchyConstants.GRAPH_ID, "1.0", request.getContext.get(HierarchyConstants.SCHEMA_NAME).asInstanceOf[String].toLowerCase(), objectCategoryDefinition) - val configObjTypes: List[String] = outRelations.find(_.keySet.contains("children")).orNull.getOrElse("children", Map()).asInstanceOf[java.util.Map[String, AnyRef]].getOrElse("objects", new util.ArrayList[String]()).asInstanceOf[java.util.List[String]].toList + val configObjTypes: List[String] = outRelations.find(_.keySet.contains("children")).map(_.getOrElse("children", Map()).asInstanceOf[java.util.Map[String, AnyRef]].getOrDefault("objects", new util.ArrayList[String]()).asInstanceOf[java.util.List[String]].asScala.toList).getOrElse(List()) if(configObjTypes.nonEmpty && !configObjTypes.contains(childNode.getOrDefault("objectType", "").asInstanceOf[String])) throw new ClientException("ERR_INVALID_CHILDREN", "Invalid Children objectType "+childNode.get("objectType")+" found for : "+childNode.get("identifier") + "| Please provide children having one of the objectType from "+ configObjTypes.asJava) } diff --git a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala index 7ec8c99d8..3d15bd970 100644 --- a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala +++ b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala @@ -14,10 +14,10 @@ import org.sunbird.graph.schema.DefinitionNode import org.sunbird.graph.utils.{NodeUtil, ScalaJsonUtils} import org.sunbird.schema.dto.ValidationResult import org.sunbird.schema.{ISchemaValidator, SchemaValidatorFactory} +import scala.jdk.CollectionConverters._ import org.sunbird.telemetry.logger.TelemetryManager import org.sunbird.utils.{HierarchyBackwardCompatibilityUtil, HierarchyConstants, HierarchyErrorCodes} -import scala.collection.JavaConverters._ import scala.collection.convert.ImplicitConversions._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} @@ -56,7 +56,7 @@ object UpdateHierarchyManager { val response = ResponseHandler.OK() response.put(HierarchyConstants.CONTENT_ID, rootId) idMap.remove(rootId) - response.put(HierarchyConstants.IDENTIFIERS, mapAsJavaMap(idMap)) + response.put(HierarchyConstants.IDENTIFIERS, idMap.asJava) if (request.getContext.getOrDefault("shouldImageDelete", false.asInstanceOf[AnyRef]).asInstanceOf[Boolean]) deleteHierarchy(request) Future(response) @@ -288,11 +288,16 @@ object UpdateHierarchyManager { if (CollectionUtils.isNotEmpty(nextLevel) && MapUtils.isNotEmpty(currentLevelNodes)) { nextLevel.foreach(e => { val parentId = e.get("parent").asInstanceOf[String] - currentLevelNodes.getOrDefault(parentId, List[java.util.Map[String, AnyRef]]()).foreach(parent => { - val children = parent.getOrDefault(HierarchyConstants.CHILDREN, new java.util.ArrayList[java.util.Map[String, AnyRef]]()).asInstanceOf[java.util.List[java.util.Map[String, AnyRef]]] - children.add(e) - parent.put(HierarchyConstants.CHILDREN, sortByIndex(children)) - }) + currentLevelNodes.get(parentId) match { + case Some(parentList) => parentList.foreach(parent => { + val existingChildren = parent.getOrDefault(HierarchyConstants.CHILDREN, new java.util.ArrayList[java.util.Map[String, AnyRef]]()).asInstanceOf[java.util.List[java.util.Map[String, AnyRef]]] + // Ensure we have a mutable ArrayList + val children = new java.util.ArrayList[java.util.Map[String, AnyRef]](existingChildren) + children.add(e) + parent.put(HierarchyConstants.CHILDREN, sortByIndex(children)) + }) + case None => // Parent not found, skip this node + } }) } } @@ -524,7 +529,10 @@ object UpdateHierarchyManager { } def sortByIndex(childrenMaps: java.util.List[java.util.Map[String, AnyRef]]): java.util.List[java.util.Map[String, AnyRef]] = { - bufferAsJavaList(childrenMaps.sortBy(_.get("index").asInstanceOf[Int])) + import scala.jdk.CollectionConverters._ + val sortedList = new java.util.ArrayList[java.util.Map[String, AnyRef]]() + childrenMaps.asScala.sortBy(_.get("index").asInstanceOf[Int]).foreach(sortedList.add) + sortedList } diff --git a/content-api/hierarchy-manager/src/main/scala/org/sunbird/utils/HierarchyBackwardCompatibilityUtil.scala b/content-api/hierarchy-manager/src/main/scala/org/sunbird/utils/HierarchyBackwardCompatibilityUtil.scala index 83aef6b53..9bcc743b3 100644 --- a/content-api/hierarchy-manager/src/main/scala/org/sunbird/utils/HierarchyBackwardCompatibilityUtil.scala +++ b/content-api/hierarchy-manager/src/main/scala/org/sunbird/utils/HierarchyBackwardCompatibilityUtil.scala @@ -5,8 +5,8 @@ import java.util import org.apache.commons.lang3.StringUtils import org.sunbird.common.Platform import org.sunbird.graph.dac.model.Node +import scala.jdk.CollectionConverters._ -import scala.collection.JavaConverters._ object HierarchyBackwardCompatibilityUtil { diff --git a/content-api/hierarchy-manager/src/test/resources/application.conf b/content-api/hierarchy-manager/src/test/resources/application.conf index 046ae1a25..617da11e1 100644 --- a/content-api/hierarchy-manager/src/test/resources/application.conf +++ b/content-api/hierarchy-manager/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/content-api/hierarchy-manager/src/test/scala/org/sunbird/managers/TestHierarchy.scala b/content-api/hierarchy-manager/src/test/scala/org/sunbird/managers/TestHierarchy.scala index c19f9ad66..2ffaa200e 100644 --- a/content-api/hierarchy-manager/src/test/scala/org/sunbird/managers/TestHierarchy.scala +++ b/content-api/hierarchy-manager/src/test/scala/org/sunbird/managers/TestHierarchy.scala @@ -9,7 +9,7 @@ import org.sunbird.common.dto.Request import org.sunbird.common.exception.{ClientException, ResourceNotFoundException} import org.sunbird.graph.OntologyEngineContext -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ class TestHierarchy extends BaseSpec { @@ -54,7 +54,7 @@ class TestHierarchy extends BaseSpec { request.put("rootId", "do_11283193441064550414") request.put("unitId", "do_11283193463014195215") request.put("children", util.Arrays.asList("do_11340096165525094411")) - request.put("relationalMetadata",mapAsJavaMap(Map("do_11340096165525094411" -> Map("name" -> "Test Name RM", "keywords" -> Array("Overwriting content Keywords") )))) + request.put("relationalMetadata",Map("do_11340096165525094411" -> Map("name" -> "Test Name RM", "keywords" -> Array("Overwriting content Keywords") )).asJava) request.put("mode","edit") val future = HierarchyManager.addLeafNodesToHierarchy(request) future.map(response => { diff --git a/content-api/pom.xml b/content-api/pom.xml index 052a4d0fe..900f0dee2 100755 --- a/content-api/pom.xml +++ b/content-api/pom.xml @@ -14,7 +14,7 @@ UTF-8 UTF-8 - 2.12.11 + 2.13.12 content-service diff --git a/knowlg-automation/helm_charts/content/content-service_application.conf b/knowlg-automation/helm_charts/content/content-service_application.conf index d73b3cc59..336d86502 100644 --- a/knowlg-automation/helm_charts/content/content-service_application.conf +++ b/knowlg-automation/helm_charts/content/content-service_application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -340,7 +340,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="https://sunbirddevbbpublic.blob.core.windows.net/sunbird-content-staging-knowlg/schemas/local" # Cassandra Configuration @@ -422,7 +422,7 @@ cloud_storage { } # Configuration -akka.request_timeout: 30 +pekko.request_timeout: 30 environment.id: 20000000 graph { dir: "/data/graphDB" diff --git a/knowlg-automation/helm_charts/dial/dial-service_application.conf b/knowlg-automation/helm_charts/dial/dial-service_application.conf index a82686311..2827b1aa9 100644 --- a/knowlg-automation/helm_charts/dial/dial-service_application.conf +++ b/knowlg-automation/helm_charts/dial/dial-service_application.conf @@ -6,7 +6,7 @@ # https://www.playframework.com/documentation/latest/JavaAkka#Configuration # ~~~~~ akka { - #loggers =["akka.event.Logging$DefaultLogger"] + #loggers =["pekko.event.Logging$DefaultLogger"] #log-config-on-start = true } diff --git a/knowlg-automation/helm_charts/learning/learning-service_application.conf b/knowlg-automation/helm_charts/learning/learning-service_application.conf index cb6887435..663f335de 100644 --- a/knowlg-automation/helm_charts/learning/learning-service_application.conf +++ b/knowlg-automation/helm_charts/learning/learning-service_application.conf @@ -139,7 +139,7 @@ defaultTokenCountAfterWord=10 # Neo4j Graph Configuration graph.dir="/data/graphDB" -akka.request_timeout=30 +pekko.request_timeout=30 environment.id="20000000" graph.passport.key.base=" graph_passport_key " route.domain="bolt://neo4j-db.knowlg-db.svc.cluster.local:7687" diff --git a/knowlg-automation/helm_charts/search/search-service_application.conf b/knowlg-automation/helm_charts/search/search-service_application.conf index e6f0753e5..272f904f8 100644 --- a/knowlg-automation/helm_charts/search/search-service_application.conf +++ b/knowlg-automation/helm_charts/search/search-service_application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -278,7 +278,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path = "../../schemas/" diff --git a/knowlg-automation/helm_charts/taxonomy/taxonomy-service_application.conf b/knowlg-automation/helm_charts/taxonomy/taxonomy-service_application.conf index 68b072a6d..dbd7c3c1d 100644 --- a/knowlg-automation/helm_charts/taxonomy/taxonomy-service_application.conf +++ b/knowlg-automation/helm_charts/taxonomy/taxonomy-service_application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -316,7 +316,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local" # Cassandra Configuration @@ -337,7 +337,7 @@ redis { } # Configuration -akka.request_timeout: 30 +pekko.request_timeout: 30 environment.id: 10000000 graph { dir: "/data/graphDB" diff --git a/kubernetes/content/content-service_application.conf b/kubernetes/content/content-service_application.conf index 34f18a850..ee4ac8bac 100644 --- a/kubernetes/content/content-service_application.conf +++ b/kubernetes/content/content-service_application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -340,7 +340,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="https://sunbirddevbbpublic.blob.core.windows.net/sunbird-content-staging-knowlg/schemas/local" # Cassandra Configuration @@ -422,7 +422,7 @@ cloud_storage { } # Configuration -akka.request_timeout: 30 +pekko.request_timeout: 30 environment.id: 20000000 graph { dir: "/data/graphDB" diff --git a/kubernetes/taxonomy/taxonomy-service_application.conf b/kubernetes/taxonomy/taxonomy-service_application.conf index b48fa7441..441d17123 100644 --- a/kubernetes/taxonomy/taxonomy-service_application.conf +++ b/kubernetes/taxonomy/taxonomy-service_application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -316,7 +316,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local" # Cassandra Configuration @@ -337,7 +337,7 @@ redis { } # Configuration -akka.request_timeout: 30 +pekko.request_timeout: 30 environment.id: 10000000 graph { dir: "/data/graphDB" diff --git a/ontology-engine/graph-common/src/test/resources/application.conf b/ontology-engine/graph-common/src/test/resources/application.conf index 6592a72e8..f132f362e 100644 --- a/ontology-engine/graph-common/src/test/resources/application.conf +++ b/ontology-engine/graph-common/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a @@ -463,7 +463,7 @@ framework.cache.read=true max.thumbnail.size.pixels=150 play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" //schema.base_path = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/" diff --git a/ontology-engine/graph-core_2.12/src/test/resources/application.conf b/ontology-engine/graph-core_2.12/src/test/resources/application.conf deleted file mode 100644 index ea8638fdc..000000000 --- a/ontology-engine/graph-core_2.12/src/test/resources/application.conf +++ /dev/null @@ -1,495 +0,0 @@ -# This is the main configuration file for the application. -# https://www.playframework.com/documentation/latest/ConfigFile -# ~~~~~ -# Play uses HOCON as its configuration file format. HOCON has a number -# of advantages over other config formats, but there are two things that -# can be used when modifying settings. -# -# You can include other configuration files in this main application.conf file: -#include "extra-config.conf" -# -# You can declare variables and substitute for them: -#mykey = ${some.value} -# -# And if an environment variable exists when there is no other substitution, then -# HOCON will fall back to substituting environment variable: -#mykey = ${JAVA_HOME} - -## Akka -# https://www.playframework.com/documentation/latest/ScalaAkka#Configuration -# https://www.playframework.com/documentation/latest/JavaAkka#Configuration -# ~~~~~ -# Play uses Akka internally and exposes Akka Streams and actors in Websockets and -# other streaming HTTP responses. -akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete - # configuration at INFO level, including defaults and overrides, so it s worth - # putting at the very top. - # - # Put the following in your conf/logback.xml file: - # - # - # - # And then uncomment this line to debug the configuration. - # - #log-config-on-start = true -} - -## Secret key -# http://www.playframework.com/documentation/latest/ApplicationSecret -# ~~~~~ -# The secret key is used to sign Play's session cookie. -# This must be changed for production, but we don't recommend you change it in this file. -play.http.secret.key = a-long-secret-to-calm-the-rage-of-the-entropy-gods - -## Modules -# https://www.playframework.com/documentation/latest/Modules -# ~~~~~ -# Control which modules are loaded when Play starts. Note that modules are -# the replacement for "GlobalSettings", which are deprecated in 2.5.x. -# Please see https://www.playframework.com/documentation/latest/GlobalSettings -# for more information. -# -# You can also extend Play functionality by using one of the publically available -# Play modules: https://playframework.com/documentation/latest/ModuleDirectory -play.modules { - # By default, Play will load any class called Module that is defined - # in the root package (the "app" directory), or you can define them - # explicitly below. - # If there are any built-in modules that you want to enable, you can list them here. - #enabled += my.application.Module - - # If there are any built-in modules that you want to disable, you can list them here. - #disabled += "" -} - -## IDE -# https://www.playframework.com/documentation/latest/IDE -# ~~~~~ -# Depending on your IDE, you can add a hyperlink for errors that will jump you -# directly to the code location in the IDE in dev mode. The following line makes -# use of the IntelliJ IDEA REST interface: -#play.editor="http://localhost:63342/api/file/?file=%s&line=%s" - -## Internationalisation -# https://www.playframework.com/documentation/latest/JavaI18N -# https://www.playframework.com/documentation/latest/ScalaI18N -# ~~~~~ -# Play comes with its own i18n settings, which allow the user's preferred language -# to map through to internal messages, or allow the language to be stored in a cookie. -play.i18n { - # The application languages - langs = [ "en" ] - - # Whether the language cookie should be secure or not - #langCookieSecure = true - - # Whether the HTTP only attribute of the cookie should be set to true - #langCookieHttpOnly = true -} - -## Play HTTP settings -# ~~~~~ -play.http { - ## Router - # https://www.playframework.com/documentation/latest/JavaRouting - # https://www.playframework.com/documentation/latest/ScalaRouting - # ~~~~~ - # Define the Router object to use for this application. - # This router will be looked up first when the application is starting up, - # so make sure this is the entry point. - # Furthermore, it's assumed your route file is named properly. - # So for an application router like `my.application.Router`, - # you may need to define a router file `conf/my.application.routes`. - # Default to Routes in the root package (aka "apps" folder) (and conf/routes) - #router = my.application.Router - - ## Action Creator - # https://www.playframework.com/documentation/latest/JavaActionCreator - # ~~~~~ - #actionCreator = null - - ## ErrorHandler - # https://www.playframework.com/documentation/latest/JavaRouting - # https://www.playframework.com/documentation/latest/ScalaRouting - # ~~~~~ - # If null, will attempt to load a class called ErrorHandler in the root package, - #errorHandler = null - - ## Session & Flash - # https://www.playframework.com/documentation/latest/JavaSessionFlash - # https://www.playframework.com/documentation/latest/ScalaSessionFlash - # ~~~~~ - session { - # Sets the cookie to be sent only over HTTPS. - #secure = true - - # Sets the cookie to be accessed only by the server. - #httpOnly = true - - # Sets the max-age field of the cookie to 5 minutes. - # NOTE: this only sets when the browser will discard the cookie. Play will consider any - # cookie value with a valid signature to be a valid session forever. To implement a server side session timeout, - # you need to put a timestamp in the session and check it at regular intervals to possibly expire it. - #maxAge = 300 - - # Sets the domain on the session cookie. - #domain = "example.com" - } - - flash { - # Sets the cookie to be sent only over HTTPS. - #secure = true - - # Sets the cookie to be accessed only by the server. - #httpOnly = true - } -} - -## Netty Provider -# https://www.playframework.com/documentation/latest/SettingsNetty -# ~~~~~ -play.server.netty { - # Whether the Netty wire should be logged - log.wire = true - - # If you run Play on Linux, you can use Netty's native socket transport - # for higher performance with less garbage. - transport = "native" -} - -## WS (HTTP Client) -# https://www.playframework.com/documentation/latest/ScalaWS#Configuring-WS -# ~~~~~ -# The HTTP client primarily used for REST APIs. The default client can be -# configured directly, but you can also create different client instances -# with customized settings. You must enable this by adding to build.sbt: -# -# libraryDependencies += ws // or javaWs if using java -# -play.ws { - # Sets HTTP requests not to follow 302 requests - #followRedirects = false - - # Sets the maximum number of open HTTP connections for the client. - #ahc.maxConnectionsTotal = 50 - - ## WS SSL - # https://www.playframework.com/documentation/latest/WsSSL - # ~~~~~ - ssl { - # Configuring HTTPS with Play WS does not require programming. You can - # set up both trustManager and keyManager for mutual authentication, and - # turn on JSSE debugging in development with a reload. - #debug.handshake = true - #trustManager = { - # stores = [ - # { type = "JKS", path = "exampletrust.jks" } - # ] - #} - } -} - -## Cache -# https://www.playframework.com/documentation/latest/JavaCache -# https://www.playframework.com/documentation/latest/ScalaCache -# ~~~~~ -# Play comes with an integrated cache API that can reduce the operational -# overhead of repeated requests. You must enable this by adding to build.sbt: -# -# libraryDependencies += cache -# -play.cache { - # If you want to bind several caches, you can bind the individually - #bindCaches = ["db-cache", "user-cache", "session-cache"] -} - -## Filter Configuration -# https://www.playframework.com/documentation/latest/Filters -# ~~~~~ -# There are a number of built-in filters that can be enabled and configured -# to give Play greater security. -# -play.filters { - - # Enabled filters are run automatically against Play. - # CSRFFilter, AllowedHostFilters, and SecurityHeadersFilters are enabled by default. - enabled = [] - - # Disabled filters remove elements from the enabled list. - # disabled += filters.CSRFFilter - - - ## CORS filter configuration - # https://www.playframework.com/documentation/latest/CorsFilter - # ~~~~~ - # CORS is a protocol that allows web applications to make requests from the browser - # across different domains. - # NOTE: You MUST apply the CORS configuration before the CSRF filter, as CSRF has - # dependencies on CORS settings. - cors { - # Filter paths by a whitelist of path prefixes - #pathPrefixes = ["/some/path", ...] - - # The allowed origins. If null, all origins are allowed. - #allowedOrigins = ["http://www.example.com"] - - # The allowed HTTP methods. If null, all methods are allowed - #allowedHttpMethods = ["GET", "POST"] - } - - ## Security headers filter configuration - # https://www.playframework.com/documentation/latest/SecurityHeaders - # ~~~~~ - # Defines security headers that prevent XSS attacks. - # If enabled, then all options are set to the below configuration by default: - headers { - # The X-Frame-Options header. If null, the header is not set. - #frameOptions = "DENY" - - # The X-XSS-Protection header. If null, the header is not set. - #xssProtection = "1; mode=block" - - # The X-Content-Type-Options header. If null, the header is not set. - #contentTypeOptions = "nosniff" - - # The X-Permitted-Cross-Domain-Policies header. If null, the header is not set. - #permittedCrossDomainPolicies = "master-only" - - # The Content-Security-Policy header. If null, the header is not set. - #contentSecurityPolicy = "default-src 'self'" - } - - ## Allowed hosts filter configuration - # https://www.playframework.com/documentation/latest/AllowedHostsFilter - # ~~~~~ - # Play provides a filter that lets you configure which hosts can access your application. - # This is useful to prevent cache poisoning attacks. - hosts { - # Allow requests to example.com, its subdomains, and localhost:9000. - #allowed = [".example.com", "localhost:9000"] - } -} - -# Learning-Service Configuration -content.metadata.visibility.parent=["textbookunit", "courseunit", "lessonplanunit"] - -# Cassandra Configuration -content.keyspace.name=content_store -content.keyspace.table=content_data -#TODO: Add Configuration for assessment. e.g: question_data -orchestrator.keyspace.name=script_store -orchestrator.keyspace.table=script_data -cassandra.lp.connection="127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042" -cassandra.lpa.connection="127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042" - -# Redis Configuration -redis.host=localhost -redis.port=6379 -redis.maxConnections=128 - -#Condition to enable publish locally -content.publish_task.enabled=true - -#directory location where store unzip file -dist.directory=/data/tmp/dist/ -output.zipfile=/data/tmp/story.zip -source.folder=/data/tmp/temp2/ -save.directory=/data/tmp/temp/ - -# Content 2 vec analytics URL -CONTENT_TO_VEC_URL="http://172.31.27.233:9000/content-to-vec" - -# FOR CONTENT WORKFLOW PIPELINE (CWP) - -#--Content Workflow Pipeline Mode -OPERATION_MODE=TEST - -#--Maximum Content Package File Size Limit in Bytes (50 MB) -MAX_CONTENT_PACKAGE_FILE_SIZE_LIMIT=52428800 - -#--Maximum Asset File Size Limit in Bytes (20 MB) -MAX_ASSET_FILE_SIZE_LIMIT=20971520 - -#--No of Retry While File Download Fails -RETRY_ASSET_DOWNLOAD_COUNT=1 - -#Google-vision-API -google.vision.tagging.enabled = false - -#Orchestrator env properties -env="https://dev.ekstep.in/api/learning" - -#Current environment -cloud_storage.env=dev - - -#Folder configuration -cloud_storage.content.folder=content -cloud_storage.asset.folder=assets -cloud_storage.artefact.folder=artifact -cloud_storage.bundle.folder=bundle -cloud_storage.media.folder=media -cloud_storage.ecar.folder=ecar_files - -# Media download configuration -content.media.base.url="https://dev.open-sunbird.org" -plugin.media.base.url="https://dev.open-sunbird.org" - -# Configuration -graph.dir=/data/testingGraphDB -akka.request_timeout=30 -environment.id=10000000 -graph.ids=["domain"] -graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a -route.domain="bolt://localhost:7687" -route.bolt.write.domain="bolt://localhost:7687" -route.bolt.read.domain="bolt://localhost:7687" -route.bolt.comment.domain="bolt://localhost:7687" -route.all="bolt://localhost:7687" -route.bolt.write.all="bolt://localhost:7687" -route.bolt.read.all="bolt://localhost:7687" -route.bolt.comment.all="bolt://localhost:7687" - -shard.id=1 -platform.auth.check.enabled=false -platform.cache.ttl=3600000 - -# Elasticsearch properties -search.es_conn_info="localhost:9200" -search.fields.query=["name^100","title^100","lemma^100","code^100","tags^100","domain","subject","description^10","keywords^25","ageGroup^10","filter^10","theme^10","genre^10","objects^25","contentType^100","language^200","teachingMode^25","skills^10","learningObjective^10","curriculum^100","gradeLevel^100","developer^100","attributions^10","owner^50","text","words","releaseNotes"] -search.fields.date=["lastUpdatedOn","createdOn","versionDate","lastSubmittedOn","lastPublishedOn"] -search.batch.size=500 -search.connection.timeout=30 -platform-api-url="http://localhost:8080/language-service" -MAX_ITERATION_COUNT_FOR_SAMZA_JOB=2 - - -# DIAL Code Configuration -dialcode.keyspace.name="dialcode_store" -dialcode.keyspace.table="dial_code" -dialcode.max_count=1000 - -# System Configuration -system.config.keyspace.name="dialcode_store" -system.config.table="system_config" - -#Publisher Configuration -publisher.keyspace.name="dialcode_store" -publisher.keyspace.table="publisher" - -#DIAL Code Generator Configuration -dialcode.strip.chars="0" -dialcode.length=6.0 -dialcode.large.prime_number=1679979167 - -#DIAL Code ElasticSearch Configuration -dialcode.index=true -dialcode.object_type="DialCode" - -framework.max_term_creation_limit=200 - -# Enable Suggested Framework in Get Channel API. -channel.fetch.suggested_frameworks=true - -# Kafka configuration details -kafka.topics.instruction="local.learning.job.request" -kafka.urls="localhost:9092" - -#Youtube Standard Licence Validation -learning.content.youtube.validate.license=true -learning.content.youtube.application.name=fetch-youtube-license -youtube.license.regex.pattern=["\\?vi?=([^&]*)", "watch\\?.*v=([^&]*)", "(?:embed|vi?)/([^/?]*)","^([A-Za-z0-9\\-\\_]*)"] - -#Top N Config for Search Telemetry -telemetry_env=dev -telemetry.search.topn=5 - -installation.id=ekstep - - -channel.default="in.ekstep" - -# DialCode Link API Config -learning.content.link_dialcode_validation=true -dialcode.api.search.url="http://localhost:8080/learning-service/v3/dialcode/search" -dialcode.api.authorization=auth_key - -# Language-Code Configuration -platform.language.codes=["as","bn","en","gu","hi","hoc","jun","ka","mai","mr","unx","or","san","sat","ta","te","urd", "pj"] - -# Kafka send event to topic enable -kafka.topic.send.enable=false - -learning.valid_license=["creativeCommon"] -learning.service_provider=["youtube"] - -stream.mime.type=video/mp4 -compositesearch.index.name="compositesearch" - -hierarchy.keyspace.name=hierarchy_store -content.hierarchy.table=content_hierarchy -framework.hierarchy.table=framework_hierarchy - -# Kafka topic for definition update event. -kafka.topic.system.command="dev.system.command" - -learning.reserve_dialcode.content_type=["TextBook"] -# restrict.metadata.objectTypes=["Content", "ContentImage", "AssessmentItem", "Channel", "Framework", "Category", "CategoryInstance", "Term"] - -#restrict.metadata.objectTypes="Content,ContentImage" - -publish.collection.fullecar.disable=true - -# Consistency Level for Multi Node Cassandra cluster -cassandra.lp.consistency.level=QUORUM - - - - -content.nested.fields="badgeAssertions,targets,badgeAssociations" - -content.cache.ttl=86400 -content.cache.enable=true -collection.cache.enable=true -content.discard.status=["Draft","FlagDraft"] - -framework.categories_cached=["subject", "medium", "gradeLevel", "board"] -framework.cache.ttl=86400 -framework.cache.read=true - - -# Max size(width/height) of thumbnail in pixels -max.thumbnail.size.pixels=150 - -play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB -schema.base_path="../../schemas/" -//schema.base_path = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/" - -collection.image.migration.enabled=true -collection.keyspace = "hierarchy_store" -content.keyspace = "content_store" -lock.keyspace = "lock_db" -languageCode { - assamese : "as" - bengali : "bn" - english : "en" - gujarati : "gu" - hindi : "hi" - kannada : "ka" - marathi : "mr" - odia : "or" - tamil : "ta" - telugu : "te" -} - -platform.language.codes=["as","bn","en","gu","hi","hoc","jun","ka","mai","mr","unx","or","san","sat","ta","te","urd"] -objectcategorydefinition.keyspace=category_store - -cloud_storage_container="sunbird-content-dev" -cloudstorage.metadata.replace_absolute_path=true -cloudstorage.relative_path_prefix= "CONTENT_STORAGE_BASE_PATH" -cloudstorage.read_base_path="https://sunbirddev.blob.core.windows.net" -cloudstorage.write_base_path=["https://sunbirddev.blob.core.windows.net","https://obj.dev.sunbird.org"] -cloudstorage.metadata.list=["appIcon","posterImage","artifactUrl","downloadUrl","variants","previewUrl","pdfUrl", "streamingUrl", "toc_url"] \ No newline at end of file diff --git a/ontology-engine/graph-core_2.12/pom.xml b/ontology-engine/graph-core_2.13/pom.xml similarity index 99% rename from ontology-engine/graph-core_2.12/pom.xml rename to ontology-engine/graph-core_2.13/pom.xml index 729c430e4..3941bf96d 100644 --- a/ontology-engine/graph-core_2.12/pom.xml +++ b/ontology-engine/graph-core_2.13/pom.xml @@ -7,7 +7,7 @@ 1.0-SNAPSHOT org.sunbird - graph-core_2.12 + graph-core_2.13 1.0-SNAPSHOT diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/GraphService.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/GraphService.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/GraphService.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/GraphService.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/OntologyEngineContext.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/OntologyEngineContext.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/OntologyEngineContext.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/OntologyEngineContext.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/exception/GraphErrorCodes.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/exception/GraphErrorCodes.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/exception/GraphErrorCodes.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/exception/GraphErrorCodes.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala similarity index 99% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala index af4e27689..3b0050825 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/ExternalPropsManager.scala @@ -7,8 +7,8 @@ import org.sunbird.graph.external.store.ExternalStoreFactory import org.sunbird.schema.SchemaValidatorFactory import java.util.UUID -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ object ExternalPropsManager { def saveProps(request: Request)(implicit ec: ExecutionContext): Future[Response] = { diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialGraphService.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialGraphService.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialGraphService.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialGraphService.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala similarity index 97% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala index 2f4562d5c..5cf85d1e5 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialPropsManager.scala @@ -4,8 +4,8 @@ import org.sunbird.common.dto.{Request, Response} import org.sunbird.schema.SchemaValidatorFactory import java.util -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ // $COVERAGE-OFF$ Disabling scoverage object DialPropsManager { def saveProps(request: Request)(implicit ec: ExecutionContext): Future[Response] = { diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala similarity index 96% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala index 5ccaa67c5..367705e5e 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialStore.scala @@ -13,6 +13,7 @@ import java.sql.Timestamp import java.util import java.util.{Date, UUID} import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.jdk.CollectionConverters._ // $COVERAGE-OFF$ Disabling scoverage class DialStore(keySpace: String, table: String, primaryKey: java.util.List[String]) extends CassandraStore(keySpace, table, primaryKey) { @@ -28,8 +29,7 @@ class DialStore(keySpace: String, table: String, primaryKey: java.util.List[Stri request.remove("last_updated_on") if (propsMapping.keySet.contains("last_updated_on")) insertQuery.value("last_updated_on", new Timestamp(new Date().getTime)) - import scala.collection.JavaConverters._ - for ((key, value) <- request.asScala) { + for ((key: String, value: AnyRef) <- request.asScala) { propsMapping.getOrElse(key, "") match { case "blob" => value match { case value: String => insertQuery.value(key, QueryBuilder.fcall("textAsBlob", value)) diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialStoreFactory.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialStoreFactory.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/dial/DialStoreFactory.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/dial/DialStoreFactory.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala similarity index 95% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala index 9bb8f75e1..961ebdb33 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala @@ -13,6 +13,7 @@ import java.sql.Timestamp import java.util import java.util.Date import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.jdk.CollectionConverters._ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.List[String]) extends CassandraStore(keySpace, table, primaryKey) { @@ -24,8 +25,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis request.remove("last_updated_on") if(propsMapping.keySet.contains("last_updated_on")) insertQuery.value("last_updated_on", new Timestamp(new Date().getTime)) - import scala.collection.JavaConverters._ - for ((key, value) <- request.asScala) { + for ((key: String, value: AnyRef) <- request.asScala) { propsMapping.getOrElse(key, "") match { case "blob" => value match { case value: String => insertQuery.value(key, QueryBuilder.fcall("textAsBlob", value)) @@ -60,9 +60,8 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis request.remove("last_updated_on") if (propsMapping.keySet.contains("last_updated_on")) insertQuery.value("last_updated_on", new Timestamp(new Date().getTime)) - import scala.collection.JavaConverters._ insertQuery.using(QueryBuilder.ttl(ttl)) - for ((key, value) <- request.asScala) { + for ((key: String, value: AnyRef) <- request.asScala) { propsMapping.getOrElse(key, "") match { case "blob" => value match { case value: String => insertQuery.value(key, QueryBuilder.fcall("textAsBlob", value)) @@ -117,7 +116,6 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis val row = resultSet.one() val externalMetadataMap = extProps.map(prop => prop -> row.getObject(prop)).toMap val response = ResponseHandler.OK() - import scala.collection.JavaConverters._ response.putAll(externalMetadataMap.asJava) response } else { @@ -145,9 +143,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis }) } val selectQuery = select.from(keySpace, table) - import scala.collection.JavaConverters._ - import scala.collection.convert.ImplicitConversions._ - val clause: Clause = QueryBuilder.in(primaryKey.get(0), seqAsJavaList(identifiers)) + val clause: Clause = QueryBuilder.in(primaryKey.get(0), identifiers.asJava) selectQuery.where.and(clause) try { val session: Session = CassandraConnector.getSession @@ -155,11 +151,10 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis futureResult.asScala.map(resultSet => { if (resultSet.iterator().hasNext) { val response = ResponseHandler.OK() - resultSet.iterator().toStream.map(row => { - import scala.collection.JavaConverters._ + resultSet.iterator().asScala.foreach(row => { val externalMetadataMap = extProps.map(prop => prop -> row.getObject(prop)).toMap.asJava response.put(row.getString(primaryKey.get(0)), externalMetadataMap) - }).toList + }) response } else { TelemetryManager.error("Entry is not found in external-store for object with identifiers: " + identifiers) @@ -176,8 +171,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis def delete(identifiers: List[String])(implicit ec: ExecutionContext): Future[Response] = { val delete = QueryBuilder.delete() - import scala.collection.JavaConverters._ - val deleteQuery = delete.from(keySpace, table).where(QueryBuilder.in(primaryKey.get(0), seqAsJavaList(identifiers))) + val deleteQuery = delete.from(keySpace, table).where(QueryBuilder.in(primaryKey.get(0), identifiers.asJava)) try { val session: Session = CassandraConnector.getSession session.executeAsync(deleteQuery).asScala.map(resultSet => { diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStoreFactory.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStoreFactory.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStoreFactory.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStoreFactory.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala similarity index 99% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala index c8c4b51d3..a4455ec7e 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/util/CSPMetaUtil.scala @@ -8,8 +8,8 @@ import org.slf4j.LoggerFactory import org.sunbird.common.dto.Property import org.sunbird.common.{JsonUtils, Platform} import org.sunbird.graph.dac.model.Node +import scala.jdk.CollectionConverters._ -import scala.collection.JavaConverters._ import scala.collection.immutable.Map object CSPMetaUtil { diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/util/ScalaJsonUtil.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/util/ScalaJsonUtil.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/util/ScalaJsonUtil.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/util/ScalaJsonUtil.scala diff --git a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala similarity index 98% rename from ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala rename to ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala index 3d533f7dd..847fe0a83 100644 --- a/ontology-engine/graph-core_2.12/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala +++ b/ontology-engine/graph-core_2.13/src/main/scala/org/sunbird/graph/validator/NodeValidator.scala @@ -9,9 +9,9 @@ import org.sunbird.graph.common.enums.SystemProperties import org.sunbird.graph.dac.model.{Filter, MetadataCriterion, Node, SearchConditions, SearchCriteria} import org.sunbird.graph.exception.GraphErrorCodes import org.sunbird.graph.service.operation.SearchAsyncOperations +import scala.jdk.CollectionConverters._ import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} object NodeValidator { diff --git a/ontology-engine/graph-core_2.12/src/test/resources/cassandra-unit.yaml b/ontology-engine/graph-core_2.13/src/test/resources/cassandra-unit.yaml similarity index 100% rename from ontology-engine/graph-core_2.12/src/test/resources/cassandra-unit.yaml rename to ontology-engine/graph-core_2.13/src/test/resources/cassandra-unit.yaml diff --git a/ontology-engine/graph-core_2.12/src/test/resources/logback.xml b/ontology-engine/graph-core_2.13/src/test/resources/logback.xml similarity index 100% rename from ontology-engine/graph-core_2.12/src/test/resources/logback.xml rename to ontology-engine/graph-core_2.13/src/test/resources/logback.xml diff --git a/ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/BaseSpec.scala b/ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/BaseSpec.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/BaseSpec.scala rename to ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/BaseSpec.scala diff --git a/ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/external/ExternalPropsManagerTest.scala b/ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/external/ExternalPropsManagerTest.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/external/ExternalPropsManagerTest.scala rename to ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/external/ExternalPropsManagerTest.scala diff --git a/ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala b/ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala similarity index 99% rename from ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala rename to ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala index 70c75dbbd..b534889cc 100644 --- a/ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala +++ b/ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/util/CSPMetaUtilTest.scala @@ -4,7 +4,8 @@ import org.sunbird.graph.BaseSpec import org.sunbird.graph.dac.model.Node import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ + class CSPMetaUtilTest extends BaseSpec { "saveExternalRelativePath" should "return map with relative Paths for question data" in { diff --git a/ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/util/ScalaJsonUtilTest.scala b/ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/util/ScalaJsonUtilTest.scala similarity index 100% rename from ontology-engine/graph-core_2.12/src/test/scala/org/sunbird/graph/util/ScalaJsonUtilTest.scala rename to ontology-engine/graph-core_2.13/src/test/scala/org/sunbird/graph/util/ScalaJsonUtilTest.scala diff --git a/ontology-engine/graph-dac-api/src/test/resources/application.conf b/ontology-engine/graph-dac-api/src/test/resources/application.conf index c2de3a014..ae7f7ded1 100644 --- a/ontology-engine/graph-dac-api/src/test/resources/application.conf +++ b/ontology-engine/graph-dac-api/src/test/resources/application.conf @@ -1,6 +1,6 @@ # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala b/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala deleted file mode 100644 index f0b3f9ae9..000000000 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala +++ /dev/null @@ -1,24 +0,0 @@ -package org.sunbird.graph.schema - -import com.twitter.storehaus.cache.Cache -import com.twitter.util.Duration -import org.sunbird.common.Platform - -object FrameworkMasterCategoryMap { - - val ttlMS = Platform.getLong("master.category.cache.ttl", 10000l) - var cache = Cache.ttl[String, Map[String, AnyRef]](Duration.fromMilliseconds(ttlMS)) - - def get(id: String):Map[String, AnyRef] = { - cache.getNonExpired(id).getOrElse(null) - } - - def put(id: String, data: Map[String, AnyRef]): Unit = { - val updated = cache.putClocked(id, data)._2 - cache = updated - } - - def containsKey(id: String): Boolean = { - cache.contains(id) - } -} diff --git a/ontology-engine/graph-engine_2.12/src/test/resources/application.conf b/ontology-engine/graph-engine_2.12/src/test/resources/application.conf deleted file mode 100644 index 8446d3dd6..000000000 --- a/ontology-engine/graph-engine_2.12/src/test/resources/application.conf +++ /dev/null @@ -1,488 +0,0 @@ -# This is the main configuration file for the application. -# https://www.playframework.com/documentation/latest/ConfigFile -# ~~~~~ -# Play uses HOCON as its configuration file format. HOCON has a number -# of advantages over other config formats, but there are two things that -# can be used when modifying settings. -# -# You can include other configuration files in this main application.conf file: -#include "extra-config.conf" -# -# You can declare variables and substitute for them: -#mykey = ${some.value} -# -# And if an environment variable exists when there is no other substitution, then -# HOCON will fall back to substituting environment variable: -#mykey = ${JAVA_HOME} - -## Akka -# https://www.playframework.com/documentation/latest/ScalaAkka#Configuration -# https://www.playframework.com/documentation/latest/JavaAkka#Configuration -# ~~~~~ -# Play uses Akka internally and exposes Akka Streams and actors in Websockets and -# other streaming HTTP responses. -akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete - # configuration at INFO level, including defaults and overrides, so it s worth - # putting at the very top. - # - # Put the following in your conf/logback.xml file: - # - # - # - # And then uncomment this line to debug the configuration. - # - #log-config-on-start = true -} - -## Secret key -# http://www.playframework.com/documentation/latest/ApplicationSecret -# ~~~~~ -# The secret key is used to sign Play's session cookie. -# This must be changed for production, but we don't recommend you change it in this file. -play.http.secret.key = a-long-secret-to-calm-the-rage-of-the-entropy-gods - -## Modules -# https://www.playframework.com/documentation/latest/Modules -# ~~~~~ -# Control which modules are loaded when Play starts. Note that modules are -# the replacement for "GlobalSettings", which are deprecated in 2.5.x. -# Please see https://www.playframework.com/documentation/latest/GlobalSettings -# for more information. -# -# You can also extend Play functionality by using one of the publically available -# Play modules: https://playframework.com/documentation/latest/ModuleDirectory -play.modules { - # By default, Play will load any class called Module that is defined - # in the root package (the "app" directory), or you can define them - # explicitly below. - # If there are any built-in modules that you want to enable, you can list them here. - #enabled += my.application.Module - - # If there are any built-in modules that you want to disable, you can list them here. - #disabled += "" -} - -## IDE -# https://www.playframework.com/documentation/latest/IDE -# ~~~~~ -# Depending on your IDE, you can add a hyperlink for errors that will jump you -# directly to the code location in the IDE in dev mode. The following line makes -# use of the IntelliJ IDEA REST interface: -#play.editor="http://localhost:63342/api/file/?file=%s&line=%s" - -## Internationalisation -# https://www.playframework.com/documentation/latest/JavaI18N -# https://www.playframework.com/documentation/latest/ScalaI18N -# ~~~~~ -# Play comes with its own i18n settings, which allow the user's preferred language -# to map through to internal messages, or allow the language to be stored in a cookie. -play.i18n { - # The application languages - langs = [ "en" ] - - # Whether the language cookie should be secure or not - #langCookieSecure = true - - # Whether the HTTP only attribute of the cookie should be set to true - #langCookieHttpOnly = true -} - -## Play HTTP settings -# ~~~~~ -play.http { - ## Router - # https://www.playframework.com/documentation/latest/JavaRouting - # https://www.playframework.com/documentation/latest/ScalaRouting - # ~~~~~ - # Define the Router object to use for this application. - # This router will be looked up first when the application is starting up, - # so make sure this is the entry point. - # Furthermore, it's assumed your route file is named properly. - # So for an application router like `my.application.Router`, - # you may need to define a router file `conf/my.application.routes`. - # Default to Routes in the root package (aka "apps" folder) (and conf/routes) - #router = my.application.Router - - ## Action Creator - # https://www.playframework.com/documentation/latest/JavaActionCreator - # ~~~~~ - #actionCreator = null - - ## ErrorHandler - # https://www.playframework.com/documentation/latest/JavaRouting - # https://www.playframework.com/documentation/latest/ScalaRouting - # ~~~~~ - # If null, will attempt to load a class called ErrorHandler in the root package, - #errorHandler = null - - ## Session & Flash - # https://www.playframework.com/documentation/latest/JavaSessionFlash - # https://www.playframework.com/documentation/latest/ScalaSessionFlash - # ~~~~~ - session { - # Sets the cookie to be sent only over HTTPS. - #secure = true - - # Sets the cookie to be accessed only by the server. - #httpOnly = true - - # Sets the max-age field of the cookie to 5 minutes. - # NOTE: this only sets when the browser will discard the cookie. Play will consider any - # cookie value with a valid signature to be a valid session forever. To implement a server side session timeout, - # you need to put a timestamp in the session and check it at regular intervals to possibly expire it. - #maxAge = 300 - - # Sets the domain on the session cookie. - #domain = "example.com" - } - - flash { - # Sets the cookie to be sent only over HTTPS. - #secure = true - - # Sets the cookie to be accessed only by the server. - #httpOnly = true - } -} - -## Netty Provider -# https://www.playframework.com/documentation/latest/SettingsNetty -# ~~~~~ -play.server.netty { - # Whether the Netty wire should be logged - log.wire = true - - # If you run Play on Linux, you can use Netty's native socket transport - # for higher performance with less garbage. - transport = "native" -} - -## WS (HTTP Client) -# https://www.playframework.com/documentation/latest/ScalaWS#Configuring-WS -# ~~~~~ -# The HTTP client primarily used for REST APIs. The default client can be -# configured directly, but you can also create different client instances -# with customized settings. You must enable this by adding to build.sbt: -# -# libraryDependencies += ws // or javaWs if using java -# -play.ws { - # Sets HTTP requests not to follow 302 requests - #followRedirects = false - - # Sets the maximum number of open HTTP connections for the client. - #ahc.maxConnectionsTotal = 50 - - ## WS SSL - # https://www.playframework.com/documentation/latest/WsSSL - # ~~~~~ - ssl { - # Configuring HTTPS with Play WS does not require programming. You can - # set up both trustManager and keyManager for mutual authentication, and - # turn on JSSE debugging in development with a reload. - #debug.handshake = true - #trustManager = { - # stores = [ - # { type = "JKS", path = "exampletrust.jks" } - # ] - #} - } -} - -## Cache -# https://www.playframework.com/documentation/latest/JavaCache -# https://www.playframework.com/documentation/latest/ScalaCache -# ~~~~~ -# Play comes with an integrated cache API that can reduce the operational -# overhead of repeated requests. You must enable this by adding to build.sbt: -# -# libraryDependencies += cache -# -play.cache { - # If you want to bind several caches, you can bind the individually - #bindCaches = ["db-cache", "user-cache", "session-cache"] -} - -## Filter Configuration -# https://www.playframework.com/documentation/latest/Filters -# ~~~~~ -# There are a number of built-in filters that can be enabled and configured -# to give Play greater security. -# -play.filters { - - # Enabled filters are run automatically against Play. - # CSRFFilter, AllowedHostFilters, and SecurityHeadersFilters are enabled by default. - enabled = [] - - # Disabled filters remove elements from the enabled list. - # disabled += filters.CSRFFilter - - - ## CORS filter configuration - # https://www.playframework.com/documentation/latest/CorsFilter - # ~~~~~ - # CORS is a protocol that allows web applications to make requests from the browser - # across different domains. - # NOTE: You MUST apply the CORS configuration before the CSRF filter, as CSRF has - # dependencies on CORS settings. - cors { - # Filter paths by a whitelist of path prefixes - #pathPrefixes = ["/some/path", ...] - - # The allowed origins. If null, all origins are allowed. - #allowedOrigins = ["http://www.example.com"] - - # The allowed HTTP methods. If null, all methods are allowed - #allowedHttpMethods = ["GET", "POST"] - } - - ## Security headers filter configuration - # https://www.playframework.com/documentation/latest/SecurityHeaders - # ~~~~~ - # Defines security headers that prevent XSS attacks. - # If enabled, then all options are set to the below configuration by default: - headers { - # The X-Frame-Options header. If null, the header is not set. - #frameOptions = "DENY" - - # The X-XSS-Protection header. If null, the header is not set. - #xssProtection = "1; mode=block" - - # The X-Content-Type-Options header. If null, the header is not set. - #contentTypeOptions = "nosniff" - - # The X-Permitted-Cross-Domain-Policies header. If null, the header is not set. - #permittedCrossDomainPolicies = "master-only" - - # The Content-Security-Policy header. If null, the header is not set. - #contentSecurityPolicy = "default-src 'self'" - } - - ## Allowed hosts filter configuration - # https://www.playframework.com/documentation/latest/AllowedHostsFilter - # ~~~~~ - # Play provides a filter that lets you configure which hosts can access your application. - # This is useful to prevent cache poisoning attacks. - hosts { - # Allow requests to example.com, its subdomains, and localhost:9000. - #allowed = [".example.com", "localhost:9000"] - } -} - -# Learning-Service Configuration -content.metadata.visibility.parent=["textbookunit", "courseunit", "lessonplanunit"] - -# Cassandra Configuration -content.keyspace.name=content_store -content.keyspace.table=content_data -#TODO: Add Configuration for assessment. e.g: question_data -orchestrator.keyspace.name=script_store -orchestrator.keyspace.table=script_data -cassandra.lp.connection="127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042" -cassandra.lpa.connection="127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042" - -# Redis Configuration -redis.host=localhost -redis.port=6379 -redis.maxConnections=128 - -#Condition to enable publish locally -content.publish_task.enabled=true - -#directory location where store unzip file -dist.directory=/data/tmp/dist/ -output.zipfile=/data/tmp/story.zip -source.folder=/data/tmp/temp2/ -save.directory=/data/tmp/temp/ - -# Content 2 vec analytics URL -CONTENT_TO_VEC_URL="http://172.31.27.233:9000/content-to-vec" - -# FOR CONTENT WORKFLOW PIPELINE (CWP) - -#--Content Workflow Pipeline Mode -OPERATION_MODE=TEST - -#--Maximum Content Package File Size Limit in Bytes (50 MB) -MAX_CONTENT_PACKAGE_FILE_SIZE_LIMIT=52428800 - -#--Maximum Asset File Size Limit in Bytes (20 MB) -MAX_ASSET_FILE_SIZE_LIMIT=20971520 - -#--No of Retry While File Download Fails -RETRY_ASSET_DOWNLOAD_COUNT=1 - -#Google-vision-API -google.vision.tagging.enabled = false - -#Orchestrator env properties -env="https://dev.ekstep.in/api/learning" - -#Current environment -cloud_storage.env=dev - - -#Folder configuration -cloud_storage.content.folder=content -cloud_storage.asset.folder=assets -cloud_storage.artefact.folder=artifact -cloud_storage.bundle.folder=bundle -cloud_storage.media.folder=media -cloud_storage.ecar.folder=ecar_files - -# Media download configuration -content.media.base.url="https://dev.open-sunbird.org" -plugin.media.base.url="https://dev.open-sunbird.org" - -# Configuration -graph.dir=/data/testingGraphDB -akka.request_timeout=30 -environment.id=10000000 -graph.ids=["domain"] -graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a -route.domain="bolt://localhost:7687" -route.bolt.write.domain="bolt://localhost:7687" -route.bolt.read.domain="bolt://localhost:7687" -route.bolt.comment.domain="bolt://localhost:7687" -route.all="bolt://localhost:7687" -route.bolt.write.all="bolt://localhost:7687" -route.bolt.read.all="bolt://localhost:7687" -route.bolt.comment.all="bolt://localhost:7687" - -shard.id=1 -platform.auth.check.enabled=false -platform.cache.ttl=3600000 - -# Elasticsearch properties -search.es_conn_info="localhost:9200" -search.fields.query=["name^100","title^100","lemma^100","code^100","tags^100","domain","subject","description^10","keywords^25","ageGroup^10","filter^10","theme^10","genre^10","objects^25","contentType^100","language^200","teachingMode^25","skills^10","learningObjective^10","curriculum^100","gradeLevel^100","developer^100","attributions^10","owner^50","text","words","releaseNotes"] -search.fields.date=["lastUpdatedOn","createdOn","versionDate","lastSubmittedOn","lastPublishedOn"] -search.batch.size=500 -search.connection.timeout=30 -platform-api-url="http://localhost:8080/language-service" -MAX_ITERATION_COUNT_FOR_SAMZA_JOB=2 - - -# DIAL Code Configuration -dialcode.keyspace.name="dialcode_store" -dialcode.keyspace.table="dial_code" -dialcode.max_count=1000 - -# System Configuration -system.config.keyspace.name="dialcode_store" -system.config.table="system_config" - -#Publisher Configuration -publisher.keyspace.name="dialcode_store" -publisher.keyspace.table="publisher" - -#DIAL Code Generator Configuration -dialcode.strip.chars="0" -dialcode.length=6.0 -dialcode.large.prime_number=1679979167 - -#DIAL Code ElasticSearch Configuration -dialcode.index=true -dialcode.object_type="DialCode" - -framework.max_term_creation_limit=200 - -# Enable Suggested Framework in Get Channel API. -channel.fetch.suggested_frameworks=true - -# Kafka configuration details -kafka.topics.instruction="local.learning.job.request" -kafka.urls="localhost:9092" - -#Youtube Standard Licence Validation -learning.content.youtube.validate.license=true -learning.content.youtube.application.name=fetch-youtube-license -youtube.license.regex.pattern=["\\?vi?=([^&]*)", "watch\\?.*v=([^&]*)", "(?:embed|vi?)/([^/?]*)","^([A-Za-z0-9\\-\\_]*)"] - -#Top N Config for Search Telemetry -telemetry_env=dev -telemetry.search.topn=5 - -installation.id=ekstep - - -channel.default="in.ekstep" - -# DialCode Link API Config -learning.content.link_dialcode_validation=true -dialcode.api.search.url="http://localhost:8080/learning-service/v3/dialcode/search" -dialcode.api.authorization=auth_key - -# Language-Code Configuration -platform.language.codes=["as","bn","en","gu","hi","hoc","jun","ka","mai","mr","unx","or","san","sat","ta","te","urd", "pj"] - -# Kafka send event to topic enable -kafka.topic.send.enable=false - -learning.valid_license=["creativeCommon"] -learning.service_provider=["youtube"] - -stream.mime.type=video/mp4 -compositesearch.index.name="compositesearch" - -hierarchy.keyspace.name=hierarchy_store -content.hierarchy.table=content_hierarchy -framework.hierarchy.table=framework_hierarchy - -# Kafka topic for definition update event. -kafka.topic.system.command="dev.system.command" - -learning.reserve_dialcode.content_type=["TextBook"] -# restrict.metadata.objectTypes=["Content", "ContentImage", "AssessmentItem", "Channel", "Framework", "Category", "CategoryInstance", "Term"] - -#restrict.metadata.objectTypes="Content,ContentImage" - -publish.collection.fullecar.disable=true - -# Consistency Level for Multi Node Cassandra cluster -cassandra.lp.consistency.level=QUORUM - - - - -content.nested.fields="badgeAssertions,targets,badgeAssociations" - -content.cache.ttl=86400 -content.cache.enable=true -collection.cache.enable=true -content.discard.status=["Draft","FlagDraft"] - -framework.categories_cached=["subject", "medium", "gradeLevel", "board"] -framework.cache.ttl=86400 -framework.cache.read=true - - -# Max size(width/height) of thumbnail in pixels -max.thumbnail.size.pixels=150 - -play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB -schema.base_path="../../schemas/" -//schema.base_path = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/" - -collection.image.migration.enabled=true -collection.keyspace = "hierarchy_store" -content.keyspace = "content_store" - -languageCode { - assamese : "as" - bengali : "bn" - english : "en" - gujarati : "gu" - hindi : "hi" - kannada : "ka" - marathi : "mr" - odia : "or" - tamil : "ta" - telugu : "te" -} - -platform.language.codes=["as","bn","en","gu","hi","hoc","jun","ka","mai","mr","unx","or","san","sat","ta","te","urd"] -objectcategorydefinition.keyspace=category_store diff --git a/ontology-engine/graph-engine_2.12/pom.xml b/ontology-engine/graph-engine_2.13/pom.xml similarity index 95% rename from ontology-engine/graph-engine_2.12/pom.xml rename to ontology-engine/graph-engine_2.13/pom.xml index 640531761..0333b8240 100644 --- a/ontology-engine/graph-engine_2.12/pom.xml +++ b/ontology-engine/graph-engine_2.13/pom.xml @@ -9,12 +9,12 @@ 4.0.0 - graph-engine_2.12 + graph-engine_2.13 org.sunbird - graph-core_2.12 + graph-core_2.13 1.0-SNAPSHOT @@ -33,9 +33,9 @@ 1.0-SNAPSHOT - com.twitter - storehaus-cache_${scala.maj.version} - 0.15.0 + com.github.ben-manes.caffeine + caffeine + 3.1.8 org.scalatest @@ -125,7 +125,7 @@ com.dimafeng - testcontainers-scala_2.12 + testcontainers-scala_2.13 0.39.5 test diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/engine/CaseClasses.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/engine/CaseClasses.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/engine/CaseClasses.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/engine/CaseClasses.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala similarity index 94% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala index 65b53264e..bb1b16b62 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/external/store/ExternalStore.scala @@ -15,6 +15,7 @@ import org.sunbird.common.exception.{ErrorCodes, ResponseCode, ServerException} import org.sunbird.telemetry.logger.TelemetryManager import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.jdk.CollectionConverters._ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.List[String]) extends CassandraStore(keySpace, table, primaryKey) { @@ -26,8 +27,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis request.remove("last_updated_on") if(propsMapping.keySet.contains("last_updated_on")) insertQuery.value("last_updated_on", new Timestamp(new Date().getTime)) - import scala.collection.JavaConverters._ - for ((key, value) <- request.asScala) { + for ((key: String, value: AnyRef) <- request.asScala) { propsMapping.getOrElse(key, "") match { case "blob" => value match { case value: String => insertQuery.value(key, QueryBuilder.fcall("textAsBlob", value)) @@ -83,7 +83,6 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis val row = resultSet.one() val externalMetadataMap = extProps.map(prop => prop -> row.getObject(prop)).toMap val response = ResponseHandler.OK() - import scala.collection.JavaConverters._ response.putAll(externalMetadataMap.asJava) response } else { @@ -100,8 +99,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis } def delete(identifiers: List[String])(implicit ec: ExecutionContext): Future[Response] = { val delete = QueryBuilder.delete() - import scala.collection.JavaConverters._ - val deleteQuery = delete.from(keySpace, table).where(QueryBuilder.in(primaryKey.get(0), seqAsJavaList(identifiers))) + val deleteQuery = delete.from(keySpace, table).where(QueryBuilder.in(primaryKey.get(0), identifiers.asJava)) try { val session: Session = CassandraConnector.getSession session.executeAsync(deleteQuery).asScala.map(resultSet => { @@ -128,9 +126,7 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis }) } val selectQuery = select.from(keySpace, table) - import scala.collection.JavaConverters._ - import scala.collection.convert.ImplicitConversions._ - val clause: Clause = QueryBuilder.in(primaryKey.get(0), seqAsJavaList(identifiers)) + val clause: Clause = QueryBuilder.in(primaryKey.get(0), identifiers.asJava) selectQuery.where.and(clause) try { val session: Session = CassandraConnector.getSession @@ -138,11 +134,10 @@ class ExternalStore(keySpace: String , table: String , primaryKey: java.util.Lis futureResult.asScala.map(resultSet => { if (resultSet.iterator().hasNext) { val response = ResponseHandler.OK() - resultSet.iterator().toStream.map(row => { - import scala.collection.JavaConverters._ + resultSet.iterator().asScala.foreach(row => { val externalMetadataMap = extProps.map(prop => prop -> row.getObject(prop)).toMap.asJava response.put(row.getString(primaryKey.get(0)), externalMetadataMap) - }).toList + }) response } else { TelemetryManager.error("Entry is not found in external-store for object with identifiers: " + identifiers) diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala similarity index 94% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala index ec0943ec7..1249bec6a 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/health/HealthCheckManager.scala @@ -9,9 +9,8 @@ import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.service.operation.NodeAsyncOperations -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ object HealthCheckManager extends CassandraConnector with RedisConnector { val CONNECTION_SUCCESS: String = "connection check is Successful" @@ -28,7 +27,7 @@ object HealthCheckManager extends CassandraConnector with RedisConnector { val allChecks: List[Map[String, Any]] = redisHealth ++ graphHealth ++ (if (cassandraEnabled) List(checkCassandraHealth()) else List()) val overAllHealth = allChecks.map(check => check.getOrElse("healthy",false).asInstanceOf[Boolean]).foldLeft(true)(_ && _) val response = ResponseHandler.OK() - response.put("checks", allChecks.map(m => JavaConverters.mapAsJavaMapConverter(m).asJava).asJava) + response.put("checks", allChecks.map(m => m.asJava).asJava) response.put("healthy", overAllHealth) Future(response) } diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/nodes/DataNode.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/nodes/DataNode.scala similarity index 99% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/nodes/DataNode.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/nodes/DataNode.scala index 1cd022a11..f9430e162 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/nodes/DataNode.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/nodes/DataNode.scala @@ -9,13 +9,13 @@ import org.sunbird.common.DateUtils import org.sunbird.common.dto.{Request, Response} import org.sunbird.common.exception.{ClientException, ErrorCodes, ResponseCode} import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ import org.sunbird.graph.common.enums.SystemProperties import org.sunbird.graph.dac.model.{Filter, MetadataCriterion, Node, Relation, SearchConditions, SearchCriteria} import org.sunbird.graph.schema.{DefinitionDTO, DefinitionFactory, DefinitionNode} import org.sunbird.parseq.Task import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala similarity index 97% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala index e7605e4e4..192390e46 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/path/DataSubGraph.scala @@ -9,13 +9,13 @@ import org.sunbird.graph.dac.model.{Node, Relation, SubGraph} import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.schema.{DefinitionFactory, DefinitionNode, ObjectCategoryDefinition} import org.sunbird.graph.utils.NodeUtil +import scala.jdk.CollectionConverters._ import org.sunbird.graph.utils.NodeUtil.{convertJsonProperties, handleKeyNames} import java.util import java.util.{ArrayList, HashMap, Map} -import scala.collection.JavaConversions.mapAsScalaMap -import scala.collection.JavaConverters._ -import scala.concurrent.{ExecutionContext, Future, future} +import scala.jdk.CollectionConverters._ +import scala.concurrent.{ExecutionContext, Future} import scala.util.Try object DataSubGraph { @@ -58,7 +58,7 @@ object DataSubGraph { finalDataMap.put(n.getIdentifier, newDataMap) finalDataMap }) - finalDataMap.map(entry => { + finalDataMap.asScala.map(entry => { val mapData = entry._2.asInstanceOf[java.util.Map[String, AnyRef]].asScala println("mapData " + mapData.toString()) val outRelations: util.List[Relation] = mapData.getOrElse("outRelations", new util.ArrayList[Relation]).asInstanceOf[util.List[Relation]] diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/AbstractRelation.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/AbstractRelation.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/AbstractRelation.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/AbstractRelation.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/AssociationRelation.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/AssociationRelation.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/AssociationRelation.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/AssociationRelation.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/IRelation.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/IRelation.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/IRelation.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/IRelation.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/RelationHandler.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/RelationHandler.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/RelationHandler.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/RelationHandler.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/SequenceMembershipRelation.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/SequenceMembershipRelation.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/relations/SequenceMembershipRelation.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/relations/SequenceMembershipRelation.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/CategoryDefinitionValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/CategoryDefinitionValidator.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/CategoryDefinitionValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/CategoryDefinitionValidator.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/CoreDomainObject.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/CoreDomainObject.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/CoreDomainObject.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/CoreDomainObject.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala similarity index 97% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala index 70413b352..6443a46ab 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionDTO.scala @@ -9,10 +9,11 @@ import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.common.Identifier import org.sunbird.graph.dac.enums.SystemNodeTypes import org.sunbird.graph.dac.model.Node +import scala.jdk.CollectionConverters._ import org.sunbird.graph.schema.validator._ -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ class DefinitionDTO(graphId: String, schemaName: String, version: String = "1.0", ocd: ObjectCategoryDefinition = ObjectCategoryDefinition())(implicit ec: ExecutionContext, oec: OntologyEngineContext) extends BaseDefinitionNode(graphId, schemaName, version, ocd) with VersionKeyValidator with VersioningNode with RelationValidator with FrameworkValidator with PropAsEdgeValidator with SchemaValidator { @@ -35,7 +36,7 @@ class DefinitionDTO(graphId: String, schemaName: String, version: String = "1.0" def getExternalProps(): List[String] = { if (schemaValidator.getConfig.hasPath("external.properties")) { val propsSet = Set.empty ++ schemaValidator.getConfig.getObject("external.properties").keySet().asScala - (for (prop <- propsSet) yield prop) (collection.breakOut) + (for (prop <- propsSet) yield prop).toList } else List() @@ -98,7 +99,7 @@ class DefinitionDTO(graphId: String, schemaName: String, version: String = "1.0" def getAllCopySchemes(): List[String] = schemaValidator.getConfig.hasPath("copy.scheme") match { case true => val copySchemeSet = Set.empty ++ schemaValidator.getConfig.getObject("copy.scheme").keySet().asScala - (for (prop <- copySchemeSet) yield prop) (collection.breakOut) + (for (prop <- copySchemeSet) yield prop).toList case false => List() } diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionFactory.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionFactory.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionFactory.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionFactory.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala similarity index 95% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala index 7471d5635..a7f8946ad 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/DefinitionNode.scala @@ -9,11 +9,11 @@ import org.apache.commons.lang3.StringUtils import org.sunbird.cache.impl.RedisCache import org.sunbird.common.JsonUtils import org.sunbird.common.dto.Request +import scala.jdk.CollectionConverters._ import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.dac.model.{Node, Relation} -import scala.collection.JavaConversions._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} object DefinitionNode { @@ -111,10 +111,10 @@ object DefinitionNode { else dbNode.setExternalData(inputNode.getExternalData) } - if (!removeProps.isEmpty) removeProps.toList.foreach(prop => dbNode.getMetadata.remove(prop)) + if (!removeProps.isEmpty) removeProps.asScala.foreach(prop => dbNode.getMetadata.remove(prop)) val validatedNode = if (!skipValidation) categoryDefinition.validate(dbNode, "update") else Future(dbNode) validatedNode.map(node => { - if (!removeProps.isEmpty) removeProps.toList.foreach(prop => dbNode.getMetadata.put(prop, null)) + if (!removeProps.isEmpty) removeProps.asScala.foreach(prop => dbNode.getMetadata.put(prop, null)) node }) @@ -177,13 +177,13 @@ object DefinitionNode { private def getNewRelationsList(dbRelations: util.List[Relation], newRelations: util.List[Relation], addRels: util.List[Relation], delRels: util.List[Relation]): Unit = { val relList = new util.ArrayList[String] - for (rel <- newRelations) { + for (rel <- newRelations.asScala) { addRels.add(rel) val relKey = rel.getStartNodeId + rel.getRelationType + rel.getEndNodeId if (!relList.contains(relKey)) relList.add(relKey) } if (null != dbRelations && !dbRelations.isEmpty) { - for (rel <- dbRelations) { + for (rel <- dbRelations.asScala) { val relKey = rel.getStartNodeId + rel.getRelationType + rel.getEndNodeId if (!relList.contains(relKey)) delRels.add(rel) } @@ -193,7 +193,7 @@ object DefinitionNode { def updateRelationMetadata(node: Node): Unit = { var relOcr = new util.HashMap[String, Integer]() val rels = node.getAddedRelations - for (rel <- rels) { + for (rel <- rels.asScala) { val relKey = rel.getStartNodeObjectType + rel.getRelationType + rel.getEndNodeObjectType if (relOcr.containsKey(relKey)) relOcr.put(relKey, relOcr.get(relKey) + 1) @@ -209,7 +209,7 @@ object DefinitionNode { def resetJsonProperties(node: Node, graphId: String, version: String, schemaName: String, ocd: ObjectCategoryDefinition = ObjectCategoryDefinition())(implicit ec: ExecutionContext, oec: OntologyEngineContext): Node = { val jsonPropList = fetchJsonProps(graphId, version, schemaName, ocd) if (!jsonPropList.isEmpty) { - node.getMetadata.entrySet().map(entry => { + node.getMetadata.entrySet().asScala.map(entry => { if (jsonPropList.contains(entry.getKey)) { entry.getValue match { case value: String => entry.setValue(JsonUtils.deserialize(value.asInstanceOf[String], classOf[Object])) @@ -227,9 +227,9 @@ object DefinitionNode { val relDefMap = getRelationDefinitionMap(graphId, version, schemaName, ocd); if (null != dbNode) { if (CollectionUtils.isNotEmpty(dbNode.getInRelations)) { - for (inRel <- dbNode.getInRelations()) { + for (inRel <- dbNode.getInRelations().asScala) { val key = inRel.getRelationType() + "_in_" + inRel.getStartNodeObjectType() - if (relDefMap.containsKey(key)) { + if (relDefMap.contains(key)) { val value = relDefMap.get(key).get if (!request.containsKey(value)) { inRelations.add(inRel) @@ -238,9 +238,9 @@ object DefinitionNode { } } if (CollectionUtils.isNotEmpty(dbNode.getOutRelations)) { - for (outRel <- dbNode.getOutRelations()) { + for (outRel <- dbNode.getOutRelations().asScala) { val key = outRel.getRelationType() + "_out_" + outRel.getEndNodeObjectType() - if (relDefMap.containsKey(key)) { + if (relDefMap.contains(key)) { val value = relDefMap.get(key).get if (!request.containsKey(value)) { outRelations.add(outRel) @@ -271,7 +271,7 @@ object DefinitionNode { val schema = node.getObjectType.toLowerCase.replace("image", "") val jsonProps = fetchJsonProps(graphId, version, schema) val metadata = node.getMetadata - metadata.filter(entry => jsonProps.contains(entry._1)).map(entry => node.getMetadata.put(entry._1, convertJsonProperties(entry, jsonProps))) + metadata.asScala.filter(entry => jsonProps.contains(entry._1)).map(entry => node.getMetadata.put(entry._1, convertJsonProperties(entry, jsonProps))) }) } diff --git a/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala new file mode 100644 index 000000000..d7bb02fd7 --- /dev/null +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/FrameworkMasterCategoryMap.scala @@ -0,0 +1,25 @@ +package org.sunbird.graph.schema + +import com.github.benmanes.caffeine.cache.{Cache, Caffeine} +import org.sunbird.common.Platform +import java.util.concurrent.TimeUnit + +object FrameworkMasterCategoryMap { + + val ttlMS = Platform.getLong("master.category.cache.ttl", 10000l) + val cache: Cache[String, Map[String, AnyRef]] = Caffeine.newBuilder() + .expireAfterWrite(ttlMS, TimeUnit.MILLISECONDS) + .build[String, Map[String, AnyRef]]() + + def get(id: String): Map[String, AnyRef] = { + Option(cache.getIfPresent(id)).orNull + } + + def put(id: String, data: Map[String, AnyRef]): Unit = { + cache.put(id, data) + } + + def containsKey(id: String): Boolean = { + cache.getIfPresent(id) != null + } +} diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/IDefinition.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/IDefinition.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/IDefinition.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/IDefinition.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinition.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinition.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinition.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinition.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala similarity index 55% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala index 91cae048c..78c5303a8 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/ObjectCategoryDefinitionMap.scala @@ -1,25 +1,27 @@ package org.sunbird.graph.schema -import com.twitter.storehaus.cache.Cache -import com.twitter.util.Duration +import com.github.benmanes.caffeine.cache.{Cache, Caffeine} import org.sunbird.common.{Platform, Slug} +import java.util.concurrent.TimeUnit +import scala.jdk.CollectionConverters._ object ObjectCategoryDefinitionMap { val ttlMS = Platform.getLong("object.categoryDefinition.cache.ttl", 10000l) - var cache = Cache.ttl[String, Map[String, AnyRef]](Duration.fromMilliseconds(ttlMS)) + val cache: Cache[String, Map[String, AnyRef]] = Caffeine.newBuilder() + .expireAfterWrite(ttlMS, TimeUnit.MILLISECONDS) + .build[String, Map[String, AnyRef]]() - def get(id: String):Map[String, AnyRef] = { - cache.getNonExpired(id).getOrElse(null) + def get(id: String): Map[String, AnyRef] = { + Option(cache.getIfPresent(id)).orNull } def put(id: String, data: Map[String, AnyRef]): Unit = { - val updated = cache.putClocked(id, data)._2 - cache = updated + cache.put(id, data) } def containsKey(id: String): Boolean = { - cache.contains(id) + cache.getIfPresent(id) != null } def prepareCategoryId(categoryName: String, objectType: String, channel: String = "all") = { diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala similarity index 99% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala index 10a11200d..f598886b2 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/BaseDefinitionNode.scala @@ -11,8 +11,8 @@ import org.sunbird.graph.dac.enums.SystemNodeTypes import org.sunbird.graph.dac.model.{Node, Relation} import org.sunbird.graph.schema.{IDefinition, ObjectCategoryDefinition} -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ class BaseDefinitionNode(graphId: String, schemaName: String, version: String = "1.0", ocd: ObjectCategoryDefinition = ObjectCategoryDefinition())(implicit ec: ExecutionContext, oec: OntologyEngineContext) extends IDefinition(graphId, schemaName, version, ocd)(ec, oec) { diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala similarity index 94% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala index 286140179..0d4a73bff 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/FrameworkValidator.scala @@ -9,14 +9,14 @@ import org.sunbird.cache.impl.RedisCache import org.sunbird.common.Platform import org.sunbird.common.exception.{ClientException, ResourceNotFoundException, ServerException} import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ import org.sunbird.graph.common.enums.SystemProperties import org.sunbird.graph.dac.model._ import org.sunbird.graph.schema.{FrameworkMasterCategoryMap, IDefinition} -import scala.collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters._ import scala.collection.Map import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ trait FrameworkValidator extends IDefinition { @@ -36,7 +36,7 @@ trait FrameworkValidator extends IDefinition { val framework: String = node.getMetadata.getOrDefault("framework", "").asInstanceOf[String] if (null != fwCategories && fwCategories.nonEmpty && framework.nonEmpty) { //prepare data for validation - val fwMetadata: Map[String, AnyRef] = node.getMetadata.asScala.filterKeys(key => fwCategories.contains(key)) + val fwMetadata: Map[String, AnyRef] = node.getMetadata.asScala.filter(entry => fwCategories.contains(entry._1)).toMap //validate data from cache if (fwMetadata.nonEmpty) { val errors: util.List[String] = new util.ArrayList[String] @@ -73,12 +73,12 @@ trait FrameworkValidator extends IDefinition { getValidatedTerms(node, orgFwTerms).map(orgTermMap => { val jsonPropsType = schemaValidator.getAllPropsType.asScala masterCategories.map(masterCategory => { - val orgIdFieldName = masterCategory.getOrDefault("orgIdFieldName", "").asInstanceOf[String] - val code = masterCategory.getOrDefault("code", "").asInstanceOf[String] + val orgIdFieldName = masterCategory.getOrElse("orgIdFieldName", "").asInstanceOf[String] + val code = masterCategory.getOrElse("code", "").asInstanceOf[String] if(StringUtils.isNotBlank(orgIdFieldName)){ val categoryData = fetchValidatedList(getList(orgIdFieldName, node), orgTermMap) if (CollectionUtils.isNotEmpty(categoryData) && StringUtils.isNotBlank(code)) { - val typeInfo = jsonPropsType.getOrDefault(code, "").asInstanceOf[String] + val typeInfo = jsonPropsType.getOrElse(code, "").asInstanceOf[String] if(StringUtils.isNotBlank(typeInfo) && typeInfo == "array"){ node.getMetadata.put(code, categoryData) } else { @@ -104,8 +104,8 @@ trait FrameworkValidator extends IDefinition { private def getOrgAndTargetFWData(graphId: String, objectType: String)(implicit ec: ExecutionContext, oec: OntologyEngineContext):Future[(List[String], List[String],List[Map[String, AnyRef]])] = { val masterCategories: Future[List[Map[String, AnyRef]]] = getMasterCategory(graphId, objectType) masterCategories.map(result => { - (result.map(cat => cat.getOrDefault("orgIdFieldName", "").asInstanceOf[String]), - result.map(cat => cat.getOrDefault("targetIdFieldName", "").asInstanceOf[String]), result.map(cat => cat.asInstanceOf[Map[String, AnyRef]])) + (result.map(cat => cat.getOrElse("orgIdFieldName", "").asInstanceOf[String]), + result.map(cat => cat.getOrElse("targetIdFieldName", "").asInstanceOf[String]), result.map(cat => cat.asInstanceOf[Map[String, AnyRef]])) }) } @@ -121,7 +121,7 @@ trait FrameworkValidator extends IDefinition { logger.warn(s"ALERT!... There are no master framework category objects[$objectType] defined. This will not enable framework category properties validation.") List[Map[String, AnyRef]]() } else { - val masterCategories: scala.collection.immutable.Map[String, AnyRef] = dataNodes.map( + val masterCategories: scala.collection.immutable.Map[String, AnyRef] = dataNodes.asScala.map( node => node.getMetadata.getOrDefault("code", "").asInstanceOf[String] -> Map[String, AnyRef]( "code" -> node.getMetadata.getOrDefault("code", "").asInstanceOf[String], "orgIdFieldName" -> node.getMetadata.getOrDefault("orgIdFieldName", "").asInstanceOf[String], diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/PropAsEdgeValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/PropAsEdgeValidator.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/PropAsEdgeValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/PropAsEdgeValidator.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala similarity index 98% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala index 64c113917..be85b003e 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/RelationValidator.scala @@ -15,8 +15,8 @@ import org.sunbird.graph.relations.{IRelation, RelationHandler} import org.sunbird.graph.schema.IDefinition import org.sunbird.graph.validator.NodeValidator -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ trait RelationValidator extends IDefinition { diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/SchemaValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/SchemaValidator.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/SchemaValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/SchemaValidator.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/VersionKeyValidator.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/VersionKeyValidator.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/VersionKeyValidator.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/VersionKeyValidator.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala similarity index 95% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala index dc0d53869..2249097cd 100644 --- a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala +++ b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/utils/NodeUtil.scala @@ -12,9 +12,8 @@ import org.sunbird.graph.common.enums.SystemProperties import org.sunbird.graph.dac.model.{Node, Relation} import org.sunbird.graph.schema.{DefinitionNode, ObjectCategoryDefinition, ObjectCategoryDefinitionMap} -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ object NodeUtil { val mapper: ObjectMapper = new ObjectMapper() @@ -98,7 +97,7 @@ object NodeUtil { if(MapUtils.isNotEmpty(nodeMap)) { node.setIdentifier(nodeMap.get("identifier").asInstanceOf[String]) node.setObjectType(nodeMap.get("objectType").asInstanceOf[String]) - val filteredMetadata: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef](JavaConverters.mapAsJavaMapConverter(nodeMap.asScala.filterNot(entry => relationMap.containsKey(entry._1)).toMap).asJava) + val filteredMetadata: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef](nodeMap.asScala.filterNot(entry => relationMap.containsKey(entry._1)).toMap.asJava) node.setMetadata(filteredMetadata) setRelation(node, nodeMap, relationMap) } @@ -165,14 +164,14 @@ object NodeUtil { "relation" -> rel.getRelationType) ++ relationObjectAttributes(objectType).map(key => (key -> rel.getEndNodeMetadata.get(key))).toMap val indexMap = if(rel.getRelationType.equals("hasSequenceMember")) Map("index" -> rel.getMetadata.getOrDefault("IL_SEQUENCE_INDEX",1.asInstanceOf[Number]).asInstanceOf[Number]) else Map() val completeRelData = relData ++ indexMap - mapAsJavaMap(completeRelData) + completeRelData.asJava } else { val objectType = rel.getStartNodeObjectType.replace("Image", "") val relData = Map("identifier" -> rel.getStartNodeId.replace(".img", ""), "name" -> rel.getStartNodeName, "objectType" -> objectType, "relation" -> rel.getRelationType) ++ relationObjectAttributes(objectType).map(key => (key -> rel.getStartNodeMetadata.get(key))).toMap - mapAsJavaMap(relData) + relData.asJava } } @@ -185,7 +184,7 @@ object NodeUtil { case _ => new util.ArrayList[String]() } if(CollectionUtils.isNotEmpty(languages)){ - JavaConverters.bufferAsJavaListConverter(languages.asScala.map(lang => if(Platform.config.hasPath("languageCode." + lang.toLowerCase)) Platform.config.getString("languageCode." + lang.toLowerCase) else "")).asJava + languages.asScala.map(lang => if(Platform.config.hasPath("languageCode." + lang.toLowerCase)) Platform.config.getString("languageCode." + lang.toLowerCase) else "").asJava }else{ languages } diff --git a/ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/utils/ScalaJsonUtils.scala b/ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/utils/ScalaJsonUtils.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/main/scala/org/sunbird/graph/utils/ScalaJsonUtils.scala rename to ontology-engine/graph-engine_2.13/src/main/scala/org/sunbird/graph/utils/ScalaJsonUtils.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/resources/cassandra-unit.yaml b/ontology-engine/graph-engine_2.13/src/test/resources/cassandra-unit.yaml similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/resources/cassandra-unit.yaml rename to ontology-engine/graph-engine_2.13/src/test/resources/cassandra-unit.yaml diff --git a/ontology-engine/graph-engine_2.12/src/test/resources/logback.xml b/ontology-engine/graph-engine_2.13/src/test/resources/logback.xml similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/resources/logback.xml rename to ontology-engine/graph-engine_2.13/src/test/resources/logback.xml diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/BaseSpec.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/BaseSpec.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/BaseSpec.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/BaseSpec.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/health/TestHealthCheckManager.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/health/TestHealthCheckManager.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/health/TestHealthCheckManager.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/health/TestHealthCheckManager.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/nodes/TestDataNode.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/nodes/TestDataNode.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/nodes/TestDataNode.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/nodes/TestDataNode.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/TestDefinitionNode.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/TestDefinitionNode.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/TestDefinitionNode.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/TestDefinitionNode.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala similarity index 83% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala index 0a61f1007..b9f71c4ed 100644 --- a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala +++ b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/TestObjectCategoryDefinitionMap.scala @@ -5,14 +5,14 @@ class TestObjectCategoryDefinitionMap extends BaseSpec { "CategoryDefinitionMap" should "store cache for given id and value" in { ObjectCategoryDefinitionMap.put("test-definition", Map("schema" -> Map(), "config" -> Map())) - ObjectCategoryDefinitionMap.cache.occupancy shouldBe(1) + ObjectCategoryDefinitionMap.cache.estimatedSize() shouldBe(1) } it should "store cache with default ttl 10 sec" in { val tempKey = "test-definition" val tempValue = Map("schema" -> Map(), "config" -> Map()) ObjectCategoryDefinitionMap.put(tempKey, tempValue) - ObjectCategoryDefinitionMap.cache.occupancy shouldBe(1) + ObjectCategoryDefinitionMap.cache.estimatedSize() shouldBe(1) ObjectCategoryDefinitionMap.get(tempKey) shouldBe(tempValue) Thread.sleep(10000) ObjectCategoryDefinitionMap.get(tempKey) shouldBe (null) diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/validator/TestSchemaValidator.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/validator/TestSchemaValidator.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/schema/validator/TestSchemaValidator.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/schema/validator/TestSchemaValidator.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/utils/NodeUtilTest.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/utils/NodeUtilTest.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/utils/NodeUtilTest.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/utils/NodeUtilTest.scala diff --git a/ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/utils/ScalaJsonUtilsTest.scala b/ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/utils/ScalaJsonUtilsTest.scala similarity index 100% rename from ontology-engine/graph-engine_2.12/src/test/scala/org/sunbird/graph/utils/ScalaJsonUtilsTest.scala rename to ontology-engine/graph-engine_2.13/src/test/scala/org/sunbird/graph/utils/ScalaJsonUtilsTest.scala diff --git a/ontology-engine/pom.xml b/ontology-engine/pom.xml index 3dc8826f6..91729a448 100644 --- a/ontology-engine/pom.xml +++ b/ontology-engine/pom.xml @@ -13,8 +13,8 @@ graph-common graph-dac-api - graph-core_2.12 - graph-engine_2.12 + graph-core_2.13 + graph-engine_2.13 parseq diff --git a/platform-core/actor-core/pom.xml b/platform-core/actor-core/pom.xml index 1eee0fb0f..94d5497c6 100644 --- a/platform-core/actor-core/pom.xml +++ b/platform-core/actor-core/pom.xml @@ -12,7 +12,7 @@ actor-core - 2.5.22 + 1.0.3 @@ -22,14 +22,14 @@ 1.0-SNAPSHOT - com.typesafe.akka - akka-actor_${scala.maj.version} - ${typesafe.akka.version} + org.apache.pekko + pekko-actor_${scala.maj.version} + ${pekko.version} - com.typesafe.akka - akka-slf4j_${scala.maj.version} - ${typesafe.akka.version} + org.apache.pekko + pekko-slf4j_${scala.maj.version} + ${pekko.version} org.reflections diff --git a/platform-core/actor-core/src/main/java/org/sunbird/actor/core/BaseActor.java b/platform-core/actor-core/src/main/java/org/sunbird/actor/core/BaseActor.java index e1d2371f9..17bf4b611 100644 --- a/platform-core/actor-core/src/main/java/org/sunbird/actor/core/BaseActor.java +++ b/platform-core/actor-core/src/main/java/org/sunbird/actor/core/BaseActor.java @@ -1,10 +1,10 @@ package org.sunbird.actor.core; -import akka.actor.AbstractActor; -import akka.actor.ActorRef; -import akka.dispatch.Futures; -import akka.dispatch.Recover; -import akka.pattern.Patterns; +import org.apache.pekko.actor.AbstractActor; +import org.apache.pekko.actor.ActorRef; +import org.apache.pekko.dispatch.Futures; +import org.apache.pekko.dispatch.Recover; +import org.apache.pekko.pattern.Patterns; import org.sunbird.common.dto.Request; import org.sunbird.common.dto.Response; import org.sunbird.common.dto.ResponseParams; diff --git a/platform-core/kafka-client/pom.xml b/platform-core/kafka-client/pom.xml index 322b145dd..69937139b 100644 --- a/platform-core/kafka-client/pom.xml +++ b/platform-core/kafka-client/pom.xml @@ -49,13 +49,13 @@ org.scalatest scalatest_${scala.maj.version} - 3.0.8 + 3.2.15 test - net.manub - scalatest-embedded-kafka_${scala.maj.version} - 1.1.0-kafka1.1-nosr + io.github.embeddedkafka + embedded-kafka_${scala.maj.version} + 3.4.0 test diff --git a/platform-core/kafka-client/src/test/scala/org/sunbird/kafka/test/BaseTest.scala b/platform-core/kafka-client/src/test/scala/org/sunbird/kafka/test/BaseTest.scala index aae895d3f..bb036d6b2 100644 --- a/platform-core/kafka-client/src/test/scala/org/sunbird/kafka/test/BaseTest.scala +++ b/platform-core/kafka-client/src/test/scala/org/sunbird/kafka/test/BaseTest.scala @@ -1,9 +1,11 @@ package org.sunbird.kafka.test import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} -import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatest.{BeforeAndAfterAll} -class BaseTest extends FlatSpec with Matchers with BeforeAndAfterAll with EmbeddedKafka { +class BaseTest extends AnyFlatSpec with Matchers with BeforeAndAfterAll with EmbeddedKafka { implicit val config = EmbeddedKafkaConfig(kafkaPort = 9092) diff --git a/platform-core/platform-cache/src/main/scala/org/sunbird/cache/impl/RedisCache.scala b/platform-core/platform-cache/src/main/scala/org/sunbird/cache/impl/RedisCache.scala index 7b739952d..8ee216d66 100644 --- a/platform-core/platform-cache/src/main/scala/org/sunbird/cache/impl/RedisCache.scala +++ b/platform-core/platform-cache/src/main/scala/org/sunbird/cache/impl/RedisCache.scala @@ -4,8 +4,8 @@ import org.apache.commons.lang3.StringUtils import org.slf4j.{Logger, LoggerFactory} import org.sunbird.cache.util.RedisConnector -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ /** * This Utility Object Provide Methods To Perform CRUD Operation With Redis diff --git a/platform-core/platform-common/pom.xml b/platform-core/platform-common/pom.xml index 5b119b0ed..312e5bf18 100644 --- a/platform-core/platform-common/pom.xml +++ b/platform-core/platform-common/pom.xml @@ -35,7 +35,7 @@ commons-io commons-io - 2.14.0 + 2.6 com.fasterxml.jackson.module diff --git a/platform-core/schema-validator/src/test/resources/application.conf b/platform-core/schema-validator/src/test/resources/application.conf index 577191fd1..6f912a959 100644 --- a/platform-core/schema-validator/src/test/resources/application.conf +++ b/platform-core/schema-validator/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a @@ -463,7 +463,7 @@ framework.cache.read=true max.thumbnail.size.pixels=150 play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" //schema.base_path = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/" diff --git a/platform-modules/import-manager/pom.xml b/platform-modules/import-manager/pom.xml index f9f36de48..f9e1a847e 100644 --- a/platform-modules/import-manager/pom.xml +++ b/platform-modules/import-manager/pom.xml @@ -24,7 +24,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar diff --git a/platform-modules/import-manager/src/main/scala/org/sunbird/object/importer/ImportManager.scala b/platform-modules/import-manager/src/main/scala/org/sunbird/object/importer/ImportManager.scala index 561cee2c1..98287310f 100644 --- a/platform-modules/import-manager/src/main/scala/org/sunbird/object/importer/ImportManager.scala +++ b/platform-modules/import-manager/src/main/scala/org/sunbird/object/importer/ImportManager.scala @@ -15,8 +15,7 @@ import org.sunbird.graph.common.Identifier import org.sunbird.graph.utils.ScalaJsonUtils import org.sunbird.telemetry.util.LogTelemetryEventUtil -import scala.collection.JavaConversions.mapAsJavaMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} @@ -110,9 +109,9 @@ class ImportManager(config: ImportConfig) { def validateStage(stage: String, validObjectStage: util.List[String]): Boolean = if(StringUtils.isNotBlank(stage)) validObjectStage.contains(stage) else true def getInstructionEvent(identifier: String, source: String, metadata: util.Map[String, AnyRef], collection: util.List[util.Map[String, AnyRef]], stage: String, originData: util.Map[String, AnyRef]): String = { - val actor = mapAsJavaMap[String, AnyRef](Map[String, AnyRef]("id" -> "Auto Creator", "type" -> "System")) - val context = mapAsJavaMap[String, AnyRef](Map[String, AnyRef]("pdata" -> mapAsJavaMap(Map[String, AnyRef]("id" -> "org.sunbird.platform", "ver" -> "1.0", "env" -> Platform.getString("cloud_storage.env", "dev"))), ImportConstants.CHANNEL -> metadata.getOrDefault(ImportConstants.CHANNEL, ""))) - val objectData = mapAsJavaMap[String, AnyRef](Map[String, AnyRef]("id" -> identifier, "ver" -> metadata.get(ImportConstants.VERSION_KEY))) + val actor = Map[String, AnyRef]("id" -> "Auto Creator", "type" -> "System").asJava + val context = Map[String, AnyRef]("pdata" -> Map[String, AnyRef]("id" -> "org.sunbird.platform", "ver" -> "1.0", "env" -> Platform.getString("cloud_storage.env", "dev")).asJava, ImportConstants.CHANNEL -> metadata.getOrDefault(ImportConstants.CHANNEL, "")).asJava + val objectData = Map[String, AnyRef]("id" -> identifier, "ver" -> metadata.get(ImportConstants.VERSION_KEY)).asJava val edata = mutable.Map[String, AnyRef]("action" -> "auto-create", "iteration" -> 1.asInstanceOf[AnyRef], ImportConstants.OBJECT_TYPE -> metadata.getOrDefault(ImportConstants.OBJECT_TYPE, "").asInstanceOf[String], if (StringUtils.isNotBlank(source)) ImportConstants.REPOSITORY -> source else ImportConstants.IDENTIFIER -> identifier, ImportConstants.METADATA -> metadata, if (CollectionUtils.isNotEmpty(collection)) ImportConstants.COLLECTION -> collection else ImportConstants.COLLECTION -> List().asJava, ImportConstants.STAGE -> stage, if(StringUtils.isNotBlank(source) && MapUtils.isNotEmpty(originData)) ImportConstants.ORIGIN_DATA -> originData else ImportConstants.ORIGIN_DATA -> new util.HashMap[String, AnyRef]()).asJava diff --git a/platform-modules/import-manager/src/test/scala/org/sunbird/object/importer/ImportManagerTest.scala b/platform-modules/import-manager/src/test/scala/org/sunbird/object/importer/ImportManagerTest.scala index 8cf27c05b..0c5a4576a 100644 --- a/platform-modules/import-manager/src/test/scala/org/sunbird/object/importer/ImportManagerTest.scala +++ b/platform-modules/import-manager/src/test/scala/org/sunbird/object/importer/ImportManagerTest.scala @@ -9,10 +9,10 @@ import org.scalamock.scalatest.AsyncMockFactory import org.sunbird.common.{HttpUtil, JsonUtils} import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.common.exception.ClientException +import scala.jdk.CollectionConverters._ import org.sunbird.graph.OntologyEngineContext import org.sunbird.kafka.client.KafkaClient -import scala.collection.JavaConverters._ class ImportManagerTest extends AsyncFlatSpec with Matchers with AsyncMockFactory { diff --git a/platform-modules/mimetype-manager/pom.xml b/platform-modules/mimetype-manager/pom.xml index e58542a92..d16c6a6c3 100644 --- a/platform-modules/mimetype-manager/pom.xml +++ b/platform-modules/mimetype-manager/pom.xml @@ -10,8 +10,8 @@ 4.0.0 mimetype-manager - 2.12 - 2.7.2 + 2.13 + 3.0.5 @@ -23,7 +23,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar @@ -142,7 +142,7 @@ org.apache.tika tika-core - 1.22 + 2.9.1 org.scalatest diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/ecml/processor/AssetsLicenseValidatorProcessor.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/ecml/processor/AssetsLicenseValidatorProcessor.scala index 8c81bd6bc..85c993ecc 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/ecml/processor/AssetsLicenseValidatorProcessor.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/ecml/processor/AssetsLicenseValidatorProcessor.scala @@ -6,7 +6,6 @@ import org.sunbird.common.exception.ClientException import org.sunbird.telemetry.logger.TelemetryManager import org.sunbird.url.mgr.impl.URLFactoryManager -import scala.collection.JavaConverters._ trait AssetsLicenseValidatorProcessor extends IProcessor { diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala index 94f9d2368..7259769c9 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala @@ -9,13 +9,13 @@ import org.apache.commons.io.{FileUtils, FilenameUtils} import org.apache.commons.lang3.StringUtils import org.apache.commons.validator.routines.UrlValidator import org.apache.tika.Tika +import scala.jdk.CollectionConverters._ import org.sunbird.cloudstore.StorageService import org.sunbird.common.exception.{ClientException, ServerException} import org.sunbird.common.{DateUtils, HttpUtil, Platform, Slug} import org.sunbird.graph.dac.model.Node import org.sunbird.telemetry.logger.TelemetryManager -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/CollectionMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/CollectionMimeTypeMgrImpl.scala index b1c7bd8bc..d05b1554a 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/CollectionMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/CollectionMimeTypeMgrImpl.scala @@ -9,12 +9,12 @@ import org.sunbird.common.Platform import org.sunbird.common.dto.{Request, ResponseHandler} import org.sunbird.common.exception.{ClientException, ServerException} import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.ScalaJsonUtils import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} -import scala.collection.JavaConverters._ import scala.collection.convert.ImplicitConversions._ import scala.concurrent.{ExecutionContext, Future} diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/DocumentMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/DocumentMimeTypeMgrImpl.scala index e2ffe0612..8f1711cc1 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/DocumentMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/DocumentMimeTypeMgrImpl.scala @@ -9,11 +9,11 @@ import org.sunbird.models.UploadParams import org.sunbird.cloudstore.StorageService import org.sunbird.common.Platform import org.sunbird.common.exception.ClientException +import scala.jdk.CollectionConverters._ import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.dac.model.Node import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} class DocumentMimeTypeMgrImpl(implicit ss: StorageService) extends BaseMimeTypeManager with MimeTypeManager { diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/EcmlMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/EcmlMimeTypeMgrImpl.scala index e1c3c77f0..b9f7d8e7e 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/EcmlMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/EcmlMimeTypeMgrImpl.scala @@ -9,6 +9,7 @@ import org.sunbird.common.Platform import org.sunbird.common.dto.{Request, ResponseHandler} import org.sunbird.common.exception.{ClientException, ServerException} import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.utils.ScalaJsonUtils import org.sunbird.mimetype.ecml.{ECMLExtractor, ECMLProcessor} @@ -17,7 +18,6 @@ import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} import org.sunbird.telemetry.logger.TelemetryManager import org.xml.sax.{InputSource, SAXException} -import scala.collection.JavaConverters._ import scala.collection.convert.ImplicitConversions._ import scala.concurrent.{ExecutionContext, Future} diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/H5PMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/H5PMimeTypeMgrImpl.scala index 5cb004f48..5421786db 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/H5PMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/H5PMimeTypeMgrImpl.scala @@ -9,6 +9,8 @@ import org.sunbird.cloudstore.StorageService import org.sunbird.common.Slug import org.sunbird.common.exception.ClientException import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ +import scala.util.{Failure, Success} import org.sunbird.graph.dac.model.Node import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} import org.sunbird.telemetry.logger.TelemetryManager @@ -40,7 +42,7 @@ class H5PMimeTypeMgrImpl(implicit ss: StorageService) extends BaseMimeTypeManage Future { extractH5PPackageInCloud(objectId, extractionBasePath, node, "snapshot", false).map(resp => TelemetryManager.info("H5P content snapshot folder upload success for " + objectId) - ) onFailure { case e: Throwable => + ).recover { case e: Throwable => TelemetryManager.error("H5P content snapshot folder upload failed for " + objectId, e.getCause) } } diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/PluginMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/PluginMimeTypeMgrImpl.scala index 2fb9215e9..e1db2aefd 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/PluginMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/PluginMimeTypeMgrImpl.scala @@ -9,12 +9,11 @@ import org.sunbird.common.JsonUtils import org.sunbird.common.exception.ClientException import org.sunbird.cloudstore.StorageService import org.sunbird.graph.OntologyEngineContext +import scala.jdk.CollectionConverters._ import org.sunbird.graph.dac.model.Node import org.sunbird.graph.utils.ScalaJsonUtils import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} class PluginMimeTypeMgrImpl(implicit ss: StorageService) extends BaseMimeTypeManager with MimeTypeManager { diff --git a/platform-modules/mimetype-manager/src/test/resources/application.conf b/platform-modules/mimetype-manager/src/test/resources/application.conf index 5fa5a0884..2d9af2c43 100644 --- a/platform-modules/mimetype-manager/src/test/resources/application.conf +++ b/platform-modules/mimetype-manager/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.sunbirded.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/pom.xml b/pom.xml index 709d4455f..603359543 100644 --- a/pom.xml +++ b/pom.xml @@ -12,10 +12,10 @@ UTF-8 1.4.1 - 2.12 - 2.12.11 + 2.13 + 2.13.12 3.0.8 - 2.13.5 + 2.15.3 diff --git a/search-api/pom.xml b/search-api/pom.xml index 95915182b..3b778d5f5 100644 --- a/search-api/pom.xml +++ b/search-api/pom.xml @@ -11,7 +11,7 @@ UTF-8 UTF-8 - 2.12 + 2.13 search-api diff --git a/search-api/search-actors/pom.xml b/search-api/search-actors/pom.xml index 7ca4ef1b0..29372e391 100644 --- a/search-api/search-actors/pom.xml +++ b/search-api/search-actors/pom.xml @@ -29,9 +29,9 @@ 1.0-SNAPSHOT - com.typesafe.akka - akka-testkit_${scala.maj.version} - 2.5.22 + org.apache.pekko + pekko-testkit_${scala.maj.version} + 1.0.3 test diff --git a/search-api/search-actors/src/main/java/org/sunbird/actors/AuditHistoryActor.java b/search-api/search-actors/src/main/java/org/sunbird/actors/AuditHistoryActor.java index b11d83977..f3c40fe6b 100644 --- a/search-api/search-actors/src/main/java/org/sunbird/actors/AuditHistoryActor.java +++ b/search-api/search-actors/src/main/java/org/sunbird/actors/AuditHistoryActor.java @@ -1,6 +1,6 @@ package org.sunbird.actors; -import akka.dispatch.Mapper; +import org.apache.pekko.dispatch.Mapper; import org.apache.commons.lang3.StringUtils; import org.sunbird.common.JsonUtils; import org.sunbird.common.dto.Request; diff --git a/search-api/search-actors/src/main/java/org/sunbird/actors/HealthActor.java b/search-api/search-actors/src/main/java/org/sunbird/actors/HealthActor.java index e4a01a449..a42fae082 100644 --- a/search-api/search-actors/src/main/java/org/sunbird/actors/HealthActor.java +++ b/search-api/search-actors/src/main/java/org/sunbird/actors/HealthActor.java @@ -1,6 +1,6 @@ package org.sunbird.actors; -import akka.dispatch.Futures; +import org.apache.pekko.dispatch.Futures; import org.sunbird.actor.core.BaseActor; import org.sunbird.common.Platform; import org.sunbird.common.dto.Request; diff --git a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java index 8858fc5e2..655276717 100644 --- a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java +++ b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java @@ -1,9 +1,9 @@ package org.sunbird.actors; -import akka.dispatch.Futures; -import akka.dispatch.Mapper; -import akka.dispatch.Recover; -import akka.util.Timeout; +import org.apache.pekko.dispatch.Futures; +import org.apache.pekko.dispatch.Mapper; +import org.apache.pekko.dispatch.Recover; +import org.apache.pekko.util.Timeout; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; diff --git a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchBaseActor.java b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchBaseActor.java index d071e62ad..23f77f58c 100644 --- a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchBaseActor.java +++ b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchBaseActor.java @@ -1,10 +1,10 @@ package org.sunbird.actors; -import akka.actor.AbstractActor; -import akka.actor.ActorRef; -import akka.dispatch.Futures; -import akka.dispatch.Recover; -import akka.pattern.Patterns; +import org.apache.pekko.actor.AbstractActor; +import org.apache.pekko.actor.ActorRef; +import org.apache.pekko.dispatch.Futures; +import org.apache.pekko.dispatch.Recover; +import org.apache.pekko.pattern.Patterns; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; diff --git a/search-api/search-actors/src/test/java/org/sunbird/actors/AuditHistoryActorTest.java b/search-api/search-actors/src/test/java/org/sunbird/actors/AuditHistoryActorTest.java index 2d14a96b0..68e17e4a7 100644 --- a/search-api/search-actors/src/test/java/org/sunbird/actors/AuditHistoryActorTest.java +++ b/search-api/search-actors/src/test/java/org/sunbird/actors/AuditHistoryActorTest.java @@ -1,6 +1,6 @@ package org.sunbird.actors; -import akka.testkit.TestKit; +import org.apache.pekko.testkit.TestKit; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; diff --git a/search-api/search-actors/src/test/java/org/sunbird/actors/HealthActorTest.java b/search-api/search-actors/src/test/java/org/sunbird/actors/HealthActorTest.java index a61193457..7b88cf672 100644 --- a/search-api/search-actors/src/test/java/org/sunbird/actors/HealthActorTest.java +++ b/search-api/search-actors/src/test/java/org/sunbird/actors/HealthActorTest.java @@ -1,7 +1,7 @@ package org.sunbird.actors; -import akka.actor.Props; -import akka.testkit.TestKit; +import org.apache.pekko.actor.Props; +import org.apache.pekko.testkit.TestKit; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; diff --git a/search-api/search-actors/src/test/java/org/sunbird/actors/SearchActorTest.java b/search-api/search-actors/src/test/java/org/sunbird/actors/SearchActorTest.java index c8ca505cd..2562fe317 100644 --- a/search-api/search-actors/src/test/java/org/sunbird/actors/SearchActorTest.java +++ b/search-api/search-actors/src/test/java/org/sunbird/actors/SearchActorTest.java @@ -1,6 +1,6 @@ package org.sunbird.actors; -import akka.testkit.TestKit; +import org.apache.pekko.testkit.TestKit; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.junit.After; diff --git a/search-api/search-actors/src/test/java/org/sunbird/actors/SearchBaseActorTest.java b/search-api/search-actors/src/test/java/org/sunbird/actors/SearchBaseActorTest.java index cd64e1201..2586dc125 100644 --- a/search-api/search-actors/src/test/java/org/sunbird/actors/SearchBaseActorTest.java +++ b/search-api/search-actors/src/test/java/org/sunbird/actors/SearchBaseActorTest.java @@ -1,9 +1,9 @@ package org.sunbird.actors; -import akka.actor.ActorRef; -import akka.actor.ActorSystem; -import akka.actor.Props; -import akka.pattern.Patterns; +import org.apache.pekko.actor.ActorRef; +import org.apache.pekko.actor.ActorSystem; +import org.apache.pekko.actor.Props; +import org.apache.pekko.pattern.Patterns; import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.junit.*; diff --git a/search-api/search-actors/src/test/resources/application.conf b/search-api/search-actors/src/test/resources/application.conf index 030260f66..f7c2b3fc8 100644 --- a/search-api/search-actors/src/test/resources/application.conf +++ b/search-api/search-actors/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -278,7 +278,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path = "../../schemas" diff --git a/search-api/search-core/pom.xml b/search-api/search-core/pom.xml index 3af6a5685..2f1076af4 100644 --- a/search-api/search-core/pom.xml +++ b/search-api/search-core/pom.xml @@ -13,15 +13,15 @@ UTF-8 - 2.5.22 - 2.12 + 1.0.3 + 2.13 - com.typesafe.akka - akka-actor_${scala.maj.version} - ${akka.version} + org.apache.pekko + pekko-actor_${scala.maj.version} + ${pekko.version} org.sunbird diff --git a/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java b/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java index c967ba4fe..a1f6d071e 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java @@ -3,7 +3,7 @@ */ package org.sunbird.search.client; -import akka.dispatch.Futures; +import org.apache.pekko.dispatch.Futures; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.collections.CollectionUtils; diff --git a/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java b/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java index e8a6fd58c..7012948f2 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java @@ -1,6 +1,6 @@ package org.sunbird.search.processor; -import akka.dispatch.Mapper; +import org.apache.pekko.dispatch.Mapper; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.collections.CollectionUtils; diff --git a/search-api/search-core/src/test/java/org/sunbird/search/SearchProcessorTest.java b/search-api/search-core/src/test/java/org/sunbird/search/SearchProcessorTest.java index 4581896c1..0d4061372 100644 --- a/search-api/search-core/src/test/java/org/sunbird/search/SearchProcessorTest.java +++ b/search-api/search-core/src/test/java/org/sunbird/search/SearchProcessorTest.java @@ -1,19 +1,13 @@ -/** - * - */ package org.sunbird.search; -import akka.dispatch.OnSuccess; -import org.apache.commons.lang.math.RandomUtils; -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; -import org.sunbird.search.dto.SearchDTO; -import org.sunbird.search.processor.SearchProcessor; -import org.sunbird.search.util.SearchConstants; -import scala.concurrent.ExecutionContext; +import org.junit.Assert; + +import scala.concurrent.Await; import scala.concurrent.Future; +import scala.concurrent.duration.Duration; + import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -22,1069 +16,1045 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.math.RandomUtils; +import org.apache.commons.lang3.StringUtils; + +import org.sunbird.search.dto.SearchDTO; +import org.sunbird.search.processor.SearchProcessor; +import org.sunbird.search.util.SearchConstants; /** - * @author pradyumna - * + * Restored SearchProcessorTest. The original test used asynchronous onSuccess/onComplete + * handlers; to simplify migration and keep tests deterministic we synchronously Await + * the scala.concurrent.Future results and assert on returned maps. This keeps changes + * confined to test sources only and avoids Pekko onSuccess->onComplete migration issues. */ - public class SearchProcessorTest extends BaseSearchTest { - private static SearchProcessor searchprocessor = new SearchProcessor(); - - @BeforeClass - public static void beforeTest() throws Exception { - createCompositeSearchIndex(); - insertTestRecords(); - Thread.sleep(3000); - - } - - private static void insertTestRecords() throws Exception { - for (int i = 1; i <= 30; i++) { - Map content = getContentTestRecord(null, i); - String id = (String) content.get("identifier"); - addToIndex(id, content); - } - Map content = getContentTestRecord("do_10000031", 31); - content.put("name", "31 check name match"); - content.put("description", "हिन्दी description"); - content.put("subject", Arrays.asList("English", "Mathematics")); - addToIndex("do_10000031", content); - - content = getContentTestRecord("do_10000032", 32); - content.put("name", "check ends with value32"); - content.put("subject", Arrays.asList("Mathematics")); - addToIndex("do_10000032", content); - } - - private static Map getContentTestRecord(String id, int index) { - String objectType = "Content"; - Date d = new Date(); - Map map = getTestRecord(id, index, "do", objectType); - map.put("name", "Content_" + System.currentTimeMillis() + "_name"); - map.put("code", "code_" + System.currentTimeMillis()); - map.put("contentType", getContentType()); - map.put("createdOn", new Date().toString()); - map.put("lastUpdatedOn", new Date().toString()); - if (index % 5 == 0) { - map.put("lastPublishedOn", d.toString()); - map.put("status", "Live"); - map.put("size", 1000432); - } else { - map.put("status", "Draft"); - if (index % 3 == 0) - map.put("size", 564738); - } - Set tagList = getTags(); - if (null != tagList && !tagList.isEmpty() && index % 7 != 0) - map.put("tags", tagList); - map.put("downloads", index); - return map; - } - - private static Map getTestRecord(String id, int index, String prefix, String objectType) { - Map map = new HashMap(); - if (StringUtils.isNotBlank(id)) - map.put("identifier", id); - else { - long suffix = 10000000 + index; - map.put("identifier", prefix + "_" + suffix); - } - map.put("objectType", objectType); - return map; - } - - private static String[] contentTypes = new String[] { "Story", "Worksheet", "Game", "Collection", "Asset" }; - - private static String getContentType() { - return contentTypes[RandomUtils.nextInt(5)]; - } - - private static String[] tags = new String[] { "hindi story", "NCERT", "Pratham", "एकस्टेप", "हिन्दी", - "हाथी और भालू", "worksheet", "test" }; - - private static Set getTags() { - Set list = new HashSet(); - int count = RandomUtils.nextInt(9); - for (int i = 0; i < count; i++) { - list.add(tags[RandomUtils.nextInt(8)]); - } - return list; - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testSearchByQuery() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("description"); - if (null != desc && desc.contains("हिन्दी")) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchByQueryForNotEquals() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("31 check name match")); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("name"); - if (null != desc && !StringUtils.equalsIgnoreCase("31 check name match", desc)) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchByQueryForNotIn() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("31 check name match")); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_IN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - boolean found = true; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("name"); - if (null != desc && StringUtils.equalsIgnoreCase("31 check name match", desc)) - found = false; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchByQueryFields() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - List fields = new ArrayList(); - fields.add("description"); - searchObj.setFields(fields); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("description"); - if (null != desc && desc.contains("हिन्दी")) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchArrayFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - List names = new ArrayList(); - names.add("31 check name match"); - names.add("check ends with value32"); - property = new HashMap(); - property.put(SearchConstants.values, names); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() == 2); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchStartsWithFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "31 check"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_STARTS_WITH); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() == 1); - Map content = (Map) results.get(0); - String identifier = (String) content.get("identifier"); - Assert.assertEquals("do_10000031", identifier); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchEndsWithFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "Value32"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_ENDS_WITH); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() == 1); - Map content = (Map) results.get(0); - String identifier = (String) content.get("identifier"); - Assert.assertEquals("do_10000032", identifier); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchLTFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LESS_THAN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - for (Object obj : results) { - Map content = (Map) obj; - Integer identifier = (Integer) content.get("size"); - if (null != identifier) - Assert.assertTrue(identifier < 1000432); - } - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchLEGEFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, - SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, - SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - for (Object obj : results) { - Map content = (Map) obj; - Integer identifier = (Integer) content.get("size"); - if (null != identifier) - Assert.assertTrue(identifier == 1000432); - } - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchGTFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 564738); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_GREATER_THAN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - for (Object obj : results) { - Map content = (Map) obj; - Integer identifier = (Integer) content.get("size"); - if (null != identifier) - Assert.assertTrue(identifier > 564738); - } - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchContainsFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "check"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_CONTAINS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - for (Object obj : results) { - Map content = (Map) obj; - String identifier = (String) content.get("name"); - Assert.assertTrue(identifier.contains("check")); - } - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchExistsCondition() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "objectType"); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EXISTS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - for (Object obj : results) { - Map content = (Map) obj; - String objectType = (String) content.get("objectType"); - Assert.assertNotNull(objectType); - } - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSoftConstraints() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - List fields = new ArrayList(); - fields.add("name"); - fields.add("medium"); - fields.add("subject"); - fields.add("contentType"); - searchObj.setFields(fields); - Map softConstraints = new HashMap(); - softConstraints.put("name", - Arrays.asList(100, Arrays.asList("31 check name match", "check ends with value32"))); - searchObj.setSoftConstraints(softConstraints); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - searchObj.setLimit(100); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertEquals("31 check name match", results.get(0).get("name")); - Assert.assertEquals("check ends with value32", results.get(1).get("name")); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testSearchFacets() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - List exists = new ArrayList(); - exists.add("size"); - exists.add("contentType"); - searchObj.setFacets(exists); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - searchObj.setLimit(100); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List list = (List) response.get("facets"); - Assert.assertNotNull(list); - Assert.assertTrue(list.size() > 1); - Map facet = (Map) list.get(0); - Assert.assertEquals("size", facet.get("name").toString()); - List values = (List) facet.get("values"); - Assert.assertEquals(2, values.size()); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings("rawtypes") - @Test - public void testSearchCount() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - searchObj.setLimit(100); - Map response = searchprocessor.processCount(searchObj); - Integer count = (Integer) response.get("count"); - Assert.assertNotNull(count); - - } - - /** - * - */ - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testFuzzySearchByQuery() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - searchObj.setFuzzySearch(true); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("description"); - if (null != desc && desc.contains("हिन्दी")) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchByQueryForNotEquals() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("31 check name match")); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("name"); - if (null != desc && !StringUtils.equalsIgnoreCase("31 check name match", desc)) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchByQueryForNotIn() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("31 check name match")); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_IN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchByQueryFields() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); - property.put(SearchConstants.propertyName, "*"); - property.put(SearchConstants.values, Arrays.asList("हिन्दी")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - List fields = new ArrayList(); - fields.add("description"); - searchObj.setFields(fields); - searchObj.setFuzzySearch(true); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() > 0); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - String desc = (String) content.get("description"); - if (null != desc && desc.contains("हिन्दी")) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchArrayFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - List names = new ArrayList(); - names.add("31 check name match"); - names.add("check ends with value32"); - property = new HashMap(); - property.put(SearchConstants.values, names); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 2); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchStartsWithFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "31 check"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_STARTS_WITH); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchEndsWithFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "value32"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_ENDS_WITH); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchLTFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LESS_THAN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchLEGEFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, - SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 1000432); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, - SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchGTFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, 564738); - property.put(SearchConstants.propertyName, "size"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_GREATER_THAN); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchContainsFilter() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "check"); - property.put(SearchConstants.propertyName, "name"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_CONTAINS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySearchExistsCondition() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, "objectType"); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EXISTS); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() >= 1); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test - public void testFuzzySoftConstraints() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - List fields = new ArrayList(); - fields.add("name"); - fields.add("medium"); - fields.add("subject"); - fields.add("contentType"); - searchObj.setFields(fields); - Map softConstraints = new HashMap(); - softConstraints.put("name", Arrays.asList(100, "31 check name match")); - searchObj.setSoftConstraints(softConstraints); - searchObj.setLimit(100); - searchObj.setFuzzySearch(true); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testSearchAndFilters() throws Exception { - SearchDTO searchObj = new SearchDTO(); - List properties = new ArrayList(); - Map property = new HashMap(); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_AND); - property.put(SearchConstants.propertyName, "subject"); - property.put(SearchConstants.values, Arrays.asList("English", "Mathematics")); - properties.add(property); - property = new HashMap(); - property.put(SearchConstants.values, Arrays.asList("Content")); - property.put(SearchConstants.propertyName, "objectType"); - property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); - properties.add(property); - searchObj.setProperties(properties); - searchObj.setLimit(100); - searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); - Future> res = searchprocessor.processSearch(searchObj, true); - res.onSuccess(new OnSuccess>() { - public void onSuccess(Map response) { - List results = (List) response.get("results"); - Assert.assertNotNull(results); - Assert.assertTrue(results.size() == 1); - boolean found = false; - for (Object obj : results) { - Map content = (Map) obj; - List desc = (List) content.get("subject"); - if (null != desc && desc.contains("English") && desc.contains("Mathematics")) - found = true; - } - Assert.assertTrue(found); - } - }, ExecutionContext.Implicits$.MODULE$.global()); - } -} + private static SearchProcessor searchprocessor = new SearchProcessor(); + private static final Duration AWAIT_TIMEOUT = Duration.create(10, TimeUnit.SECONDS); + + @BeforeClass + public static void beforeTest() throws Exception { + createCompositeSearchIndex(); + insertTestRecords(); + Thread.sleep(3000); + } + + private static void insertTestRecords() throws Exception { + for (int i = 1; i <= 30; i++) { + Map content = getContentTestRecord(null, i); + String id = (String) content.get("identifier"); + addToIndex(id, content); + } + Map content = getContentTestRecord("do_10000031", 31); + content.put("name", "31 check name match"); + content.put("description", "हिन्दी description"); + content.put("subject", Arrays.asList("English", "Mathematics")); + addToIndex("do_10000031", content); + + content = getContentTestRecord("do_10000032", 32); + content.put("name", "check ends with value32"); + content.put("subject", Arrays.asList("Mathematics")); + addToIndex("do_10000032", content); + } + + private static Map getContentTestRecord(String id, int index) { + String objectType = "Content"; + Date d = new Date(); + Map map = getTestRecord(id, index, "do", objectType); + map.put("name", "Content_" + System.currentTimeMillis() + "_name"); + map.put("code", "code_" + System.currentTimeMillis()); + map.put("contentType", getContentType()); + map.put("createdOn", new Date().toString()); + map.put("lastUpdatedOn", new Date().toString()); + if (index % 5 == 0) { + map.put("lastPublishedOn", d.toString()); + map.put("status", "Live"); + map.put("size", 1000432); + } else { + map.put("status", "Draft"); + if (index % 3 == 0) + map.put("size", 564738); + } + Set tagList = getTags(); + if (null != tagList && !tagList.isEmpty() && index % 7 != 0) + map.put("tags", tagList); + map.put("downloads", index); + return map; + } + + private static Map getTestRecord(String id, int index, String prefix, String objectType) { + Map map = new HashMap(); + if (StringUtils.isNotBlank(id)) + map.put("identifier", id); + else { + long suffix = 10000000 + index; + map.put("identifier", prefix + "_" + suffix); + } + map.put("objectType", objectType); + return map; + } + + private static String[] contentTypes = new String[] { "Story", "Worksheet", "Game", "Collection", "Asset" }; + + private static String getContentType() { + return contentTypes[RandomUtils.nextInt(5)]; + } + + private static String[] tags = new String[] { "hindi story", "NCERT", "Pratham", "एकस्टेप", "हिन्दी", + "हाथी और भालू", "worksheet", "test" }; + + private static Set getTags() { + Set list = new HashSet(); + int count = RandomUtils.nextInt(9); + for (int i = 0; i < count; i++) { + list.add(tags[RandomUtils.nextInt(8)]); + } + return list; + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Test + public void testSearchByQuery() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("description"); + if (null != desc && desc.contains("हिन्दी")) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchByQueryForNotEquals() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("31 check name match")); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("name"); + if (null != desc && !StringUtils.equalsIgnoreCase("31 check name match", desc)) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchByQueryForNotIn() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("31 check name match")); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_IN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + boolean found = true; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("name"); + if (null != desc && StringUtils.equalsIgnoreCase("31 check name match", desc)) + found = false; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchByQueryFields() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + List fields = new ArrayList(); + fields.add("description"); + searchObj.setFields(fields); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("description"); + if (null != desc && desc.contains("हिन्दी")) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchArrayFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + List names = new ArrayList(); + names.add("31 check name match"); + names.add("check ends with value32"); + property = new HashMap(); + property.put(SearchConstants.values, names); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() == 2); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchStartsWithFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "31 check"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_STARTS_WITH); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() == 1); + Map content = (Map) results.get(0); + String identifier = (String) content.get("identifier"); + Assert.assertEquals("do_10000031", identifier); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchEndsWithFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "Value32"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_ENDS_WITH); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() == 1); + Map content = (Map) results.get(0); + String identifier = (String) content.get("identifier"); + Assert.assertEquals("do_10000032", identifier); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchLTFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LESS_THAN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + for (Object obj : results) { + Map content = (Map) obj; + Integer identifier = (Integer) content.get("size"); + if (null != identifier) + Assert.assertTrue(identifier < 1000432); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchLEGEFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, + SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, + SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + for (Object obj : results) { + Map content = (Map) obj; + Integer identifier = (Integer) content.get("size"); + if (null != identifier) + Assert.assertTrue(identifier == 1000432); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchGTFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 564738); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_GREATER_THAN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + for (Object obj : results) { + Map content = (Map) obj; + Integer identifier = (Integer) content.get("size"); + if (null != identifier) + Assert.assertTrue(identifier > 564738); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchContainsFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "check"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_CONTAINS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + for (Object obj : results) { + Map content = (Map) obj; + String identifier = (String) content.get("name"); + Assert.assertTrue(identifier.contains("check")); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchExistsCondition() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "objectType"); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EXISTS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + for (Object obj : results) { + Map content = (Map) obj; + String objectType = (String) content.get("objectType"); + Assert.assertNotNull(objectType); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSoftConstraints() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + List fields = new ArrayList(); + fields.add("name"); + fields.add("medium"); + fields.add("subject"); + fields.add("contentType"); + searchObj.setFields(fields); + Map softConstraints = new HashMap(); + softConstraints.put("name", + Arrays.asList(100, Arrays.asList("31 check name match", "check ends with value32"))); + searchObj.setSoftConstraints(softConstraints); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + searchObj.setLimit(100); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertEquals("31 check name match", results.get(0).get("name")); + Assert.assertEquals("check ends with value32", results.get(1).get("name")); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchFacets() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + List exists = new ArrayList(); + exists.add("size"); + exists.add("contentType"); + searchObj.setFacets(exists); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + searchObj.setLimit(100); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List list = (List) success.get("facets"); + Assert.assertNotNull(list); + Assert.assertTrue(list.size() > 1); + Map facet = (Map) list.get(0); + Assert.assertEquals("size", facet.get("name").toString()); + List values = (List) facet.get("values"); + Assert.assertEquals(2, values.size()); + } + + @SuppressWarnings("rawtypes") + @Test + public void testSearchCount() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + searchObj.setLimit(100); + Map response = searchprocessor.processCount(searchObj); + Integer count = (Integer) response.get("count"); + Assert.assertNotNull(count); + + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchByQuery() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + searchObj.setFuzzySearch(true); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("description"); + if (null != desc && desc.contains("हिन्दी")) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchByQueryForNotEquals() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("31 check name match")); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("name"); + if (null != desc && !StringUtils.equalsIgnoreCase("31 check name match", desc)) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchByQueryForNotIn() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("31 check name match")); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_NOT_IN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchByQueryFields() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LIKE); + property.put(SearchConstants.propertyName, "*"); + property.put(SearchConstants.values, Arrays.asList("हिन्दी")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + List fields = new ArrayList(); + fields.add("description"); + searchObj.setFields(fields); + searchObj.setFuzzySearch(true); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() > 0); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + String desc = (String) content.get("description"); + if (null != desc && desc.contains("हिन्दी")) + found = true; + } + Assert.assertTrue(found); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchArrayFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + List names = new ArrayList(); + names.add("31 check name match"); + names.add("check ends with value32"); + property = new HashMap(); + property.put(SearchConstants.values, names); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 2); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchStartsWithFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "31 check"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_STARTS_WITH); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchEndsWithFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "value32"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_ENDS_WITH); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchLTFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_LESS_THAN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchLEGEFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, + SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 1000432); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, + SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchGTFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, 564738); + property.put(SearchConstants.propertyName, "size"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_GREATER_THAN); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchContainsFilter() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "check"); + property.put(SearchConstants.propertyName, "name"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_CONTAINS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySearchExistsCondition() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, "objectType"); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EXISTS); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() >= 1); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testFuzzySoftConstraints() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + List fields = new ArrayList(); + fields.add("name"); + fields.add("medium"); + fields.add("subject"); + fields.add("contentType"); + searchObj.setFields(fields); + Map softConstraints = new HashMap(); + softConstraints.put("name", Arrays.asList(100, "31 check name match")); + searchObj.setSoftConstraints(softConstraints); + searchObj.setLimit(100); + searchObj.setFuzzySearch(true); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Test + public void testSearchAndFilters() throws Exception { + SearchDTO searchObj = new SearchDTO(); + List properties = new ArrayList(); + Map property = new HashMap(); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_AND); + property.put(SearchConstants.propertyName, "subject"); + property.put(SearchConstants.values, Arrays.asList("English", "Mathematics")); + properties.add(property); + property = new HashMap(); + property.put(SearchConstants.values, Arrays.asList("Content")); + property.put(SearchConstants.propertyName, "objectType"); + property.put(SearchConstants.operation, SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + searchObj.setProperties(properties); + searchObj.setLimit(100); + searchObj.setOperation(SearchConstants.SEARCH_OPERATION_AND); + Future> res = searchprocessor.processSearch(searchObj, true); + + Map success = Await.result(res, AWAIT_TIMEOUT); + Assert.assertNotNull(success); + List results = (List) success.get("results"); + Assert.assertNotNull(results); + Assert.assertTrue(results.size() == 1); + boolean found = false; + for (Object obj : results) { + Map content = (Map) obj; + List desc = (List) content.get("subject"); + if (null != desc && desc.contains("English") && desc.contains("Mathematics")) + found = true; + } + Assert.assertTrue(found); + } +} \ No newline at end of file diff --git a/search-api/search-core/src/test/resources/application.conf b/search-api/search-core/src/test/resources/application.conf index 87db66626..48cbdbdab 100644 --- a/search-api/search-core/src/test/resources/application.conf +++ b/search-api/search-core/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -278,7 +278,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" diff --git a/search-api/search-service/app/controllers/AuditHistoryController.scala b/search-api/search-service/app/controllers/AuditHistoryController.scala index 7f5e433cd..e8a052fa5 100644 --- a/search-api/search-service/app/controllers/AuditHistoryController.scala +++ b/search-api/search-service/app/controllers/AuditHistoryController.scala @@ -1,6 +1,6 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Inject import com.google.inject.name.Named import handlers.LoggingAction @@ -8,13 +8,13 @@ import managers.SearchManager import org.sunbird.search.util.SearchConstants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} +import scala.jdk.CollectionConverters._ import java.util import org.apache.commons.lang3.StringUtils import org.sunbird.graph.common.enums.AuditProperties import org.sunbird.telemetry.logger.TelemetryManager -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext class AuditHistoryController @Inject()(@Named(ActorNames.AUDIT_HISTORY_ACTOR) auditHistoryActor: ActorRef, loggingAction: LoggingAction, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends SearchBaseController(cc) { diff --git a/search-api/search-service/app/controllers/HealthController.scala b/search-api/search-service/app/controllers/HealthController.scala index c5813ba20..a78f08877 100644 --- a/search-api/search-service/app/controllers/HealthController.scala +++ b/search-api/search-service/app/controllers/HealthController.scala @@ -1,6 +1,6 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import handlers.SignalHandler import javax.inject.{Inject, Named} import org.sunbird.common.JsonUtils diff --git a/search-api/search-service/app/controllers/SearchBaseController.scala b/search-api/search-service/app/controllers/SearchBaseController.scala index faecaf067..679dcccaf 100644 --- a/search-api/search-service/app/controllers/SearchBaseController.scala +++ b/search-api/search-service/app/controllers/SearchBaseController.scala @@ -1,7 +1,7 @@ package controllers -import akka.actor.ActorRef -import akka.pattern.Patterns +import org.apache.pekko.actor.ActorRef +import org.apache.pekko.pattern.Patterns import org.apache.commons.lang3.StringUtils import org.sunbird.common.dto.{RequestParams, Response, ResponseHandler} import org.sunbird.common.exception.ResponseCode @@ -11,8 +11,8 @@ import play.api.mvc._ import java.util import java.util.UUID -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ abstract class SearchBaseController(protected val cc: ControllerComponents)(implicit exec: ExecutionContext) extends AbstractController(cc) { @@ -26,15 +26,15 @@ abstract class SearchBaseController(protected val cc: ControllerComponents)(impl def commonHeaders()(implicit request: Request[AnyContent]): java.util.Map[String, Object] = { val customHeaders = Map("x-channel-id" -> "CHANNEL_ID", "x-consumer-id" -> "CONSUMER_ID", "x-app-id" -> "APP_ID", "x-session-id" -> "SESSION_ID", "x-device-id" -> "DEVICE_ID") - val headers = request.headers.headers.groupBy(_._1).mapValues(_.map(_._2)) + val headers = request.headers.headers.groupBy(_._1).view.mapValues(_.map(_._2)).toMap val appHeaders = headers.filter(header => customHeaders.keySet.contains(header._1.toLowerCase)) .map(entry => (customHeaders.get(entry._1.toLowerCase()).get, entry._2.head)) - val contextMap = { - if(appHeaders.contains("CHANNEL_ID")) - appHeaders - else appHeaders + ("CHANNEL_ID"-> DEFAULT_CHANNEL_ID) + val contextMap: Map[String, String] = { + if(appHeaders.toMap.contains("CHANNEL_ID")) + appHeaders.toMap + else appHeaders.toMap + ("CHANNEL_ID" -> DEFAULT_CHANNEL_ID) } - mapAsJavaMap(contextMap) + contextMap.asJava.asInstanceOf[java.util.Map[String, Object]] } def getRequest(input: java.util.Map[String, AnyRef], context: java.util.Map[String, AnyRef], operation: String): org.sunbird.common.dto.Request = { diff --git a/search-api/search-service/app/controllers/SearchController.scala b/search-api/search-service/app/controllers/SearchController.scala index 0b94f1f05..b31369d90 100644 --- a/search-api/search-service/app/controllers/SearchController.scala +++ b/search-api/search-service/app/controllers/SearchController.scala @@ -1,6 +1,6 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Inject import com.google.inject.name.Named import handlers.LoggingAction @@ -8,12 +8,12 @@ import managers.SearchManager import org.sunbird.search.util.SearchConstants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} +import scala.jdk.CollectionConverters._ import java.util import org.apache.commons.lang3.StringUtils import org.sunbird.common.Platform -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext class SearchController @Inject()(@Named(ActorNames.SEARCH_ACTOR) searchActor: ActorRef, loggingAction: LoggingAction, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends SearchBaseController(cc) { diff --git a/search-api/search-service/app/filters/AccessLogFilter.scala b/search-api/search-service/app/filters/AccessLogFilter.scala index 96e027a7a..1b801f9f5 100644 --- a/search-api/search-service/app/filters/AccessLogFilter.scala +++ b/search-api/search-service/app/filters/AccessLogFilter.scala @@ -1,14 +1,14 @@ package filters -import akka.util.ByteString +import org.apache.pekko.util.ByteString import javax.inject.Inject import play.api.Logging import org.sunbird.telemetry.util.TelemetryAccessEventUtil import play.api.libs.streams.Accumulator import play.api.mvc._ -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends EssentialFilter with Logging { @@ -27,7 +27,7 @@ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends Essentia val path = requestHeader.uri if(!path.contains("/health")){ - val headers = requestHeader.headers.headers.groupBy(_._1).mapValues(_.map(_._2)) + val headers = requestHeader.headers.headers.groupBy(_._1).view.mapValues(_.map(_._2)).toMap val appHeaders = headers.filter(header => xHeaderNames.keySet.contains(header._1.toLowerCase)) .map(entry => (xHeaderNames.get(entry._1.toLowerCase()).get, entry._2.head)) val otherDetails = Map[String, Any]("StartTime" -> startTime, "env" -> "search", diff --git a/search-api/search-service/app/handlers/SignalHandler.scala b/search-api/search-service/app/handlers/SignalHandler.scala index 4cad301c1..3945a5e32 100644 --- a/search-api/search-service/app/handlers/SignalHandler.scala +++ b/search-api/search-service/app/handlers/SignalHandler.scala @@ -2,13 +2,14 @@ package handlers import java.util.concurrent.TimeUnit -import akka.actor.ActorSystem +import org.apache.pekko.actor.ActorSystem import javax.inject.{Inject, Singleton} import org.slf4j.LoggerFactory import play.api.inject.DefaultApplicationLifecycle import sun.misc.Signal import scala.concurrent.duration.Duration +import scala.concurrent.ExecutionContext @Singleton class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: DefaultApplicationLifecycle) { @@ -22,10 +23,10 @@ class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: Defa // $COVERAGE-OFF$ Disabling scoverage as this code is impossible to test isShuttingDown = true println("Termination required, swallowing SIGTERM to allow current requests to finish. : " + System.currentTimeMillis()) - actorSystem.scheduler.scheduleOnce(STOP_DELAY)(() => { + actorSystem.scheduler.scheduleOnce(STOP_DELAY) { println("ApplicationLifecycle stop triggered... : " + System.currentTimeMillis()) lifecycle.stop() - })(actorSystem.dispatcher) + }(ExecutionContext.global) // $COVERAGE-ON } }) diff --git a/search-api/search-service/app/managers/SearchManager.java b/search-api/search-service/app/managers/SearchManager.java index bd5cdf4e7..2efa09768 100644 --- a/search-api/search-service/app/managers/SearchManager.java +++ b/search-api/search-service/app/managers/SearchManager.java @@ -1,10 +1,10 @@ package managers; -import akka.actor.ActorRef; -import akka.dispatch.ExecutionContexts; -import akka.dispatch.Futures; -import akka.dispatch.Mapper; -import akka.util.Timeout; +import org.apache.pekko.actor.ActorRef; +import org.apache.pekko.dispatch.ExecutionContexts; +import org.apache.pekko.dispatch.Futures; +import org.apache.pekko.dispatch.Mapper; +import org.apache.pekko.util.Timeout; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.collections.CollectionUtils; @@ -35,7 +35,7 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static akka.pattern.Patterns.ask; +import static org.apache.pekko.pattern.Patterns.ask; public class SearchManager { protected ObjectMapper mapper = new ObjectMapper(); diff --git a/search-api/search-service/app/modules/SearchModule.scala b/search-api/search-service/app/modules/SearchModule.scala index 1bfdeb695..c1a5b3582 100644 --- a/search-api/search-service/app/modules/SearchModule.scala +++ b/search-api/search-service/app/modules/SearchModule.scala @@ -3,16 +3,16 @@ package modules import com.google.inject.AbstractModule import org.sunbird.actors.{AuditHistoryActor, HealthActor, SearchActor} import org.sunbird.telemetry.TelemetryGenerator -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames -class SearchModule extends AbstractModule with AkkaGuiceSupport { +class SearchModule extends AbstractModule with PekkoGuiceSupport { override def configure() = { super.configure() - bindActor(classOf[HealthActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[SearchActor], ActorNames.SEARCH_ACTOR) - bindActor(classOf[AuditHistoryActor], ActorNames.AUDIT_HISTORY_ACTOR) + bindActor[HealthActor](ActorNames.HEALTH_ACTOR) + bindActor[SearchActor](ActorNames.SEARCH_ACTOR) + bindActor[AuditHistoryActor](ActorNames.AUDIT_HISTORY_ACTOR) TelemetryGenerator.setComponent("search-service") println("Initialized application actors for search-service") } diff --git a/search-api/search-service/conf/application.conf b/search-api/search-service/conf/application.conf index 9f9efe7e6..4483f237d 100644 --- a/search-api/search-service/conf/application.conf +++ b/search-api/search-service/conf/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -278,7 +278,7 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" diff --git a/search-api/search-service/conf/logback.xml b/search-api/search-service/conf/logback.xml index 73529d622..ce441915b 100644 --- a/search-api/search-service/conf/logback.xml +++ b/search-api/search-service/conf/logback.xml @@ -4,10 +4,11 @@ - + + - %d %msg%n + %d{HH:mm:ss.SSS} %coloredLevel %logger{36} - %msg%n @@ -15,12 +16,17 @@ - + + + + + + diff --git a/search-api/search-service/pom.xml b/search-api/search-service/pom.xml index 4cd7576bd..f23aff7a1 100644 --- a/search-api/search-service/pom.xml +++ b/search-api/search-service/pom.xml @@ -34,14 +34,83 @@ - 2.8.20 + 3.0.5 1.0.0-rc5 1.0.0 + 4.1.112.Final + + + + + io.netty + netty-bom + ${netty.version} + pom + import + + + io.netty + netty-common + ${netty.version} + + + io.netty + netty-buffer + ${netty.version} + + + io.netty + netty-transport + ${netty.version} + + + io.netty + netty-handler + ${netty.version} + + + io.netty + netty-codec + ${netty.version} + + + io.netty + netty-codec-http + ${netty.version} + + + io.netty + netty-codec-http2 + ${netty.version} + + + io.netty + netty-resolver + ${netty.version} + + + io.netty + netty-transport-native-epoll + ${netty.version} + + + io.netty + netty-transport-native-unix-common + ${netty.version} + + + io.netty + netty-transport-classes-epoll + ${netty.version} + + + + - com.typesafe.play + org.playframework play_${scala.major.version} ${play2.version} @@ -68,7 +137,7 @@ - com.typesafe.play + org.playframework play-guice_${scala.major.version} ${play2.version} @@ -87,29 +156,23 @@ - com.typesafe.play - filters-helpers_${scala.major.version} + org.playframework + play-filters-helpers_${scala.major.version} ${play2.version} - com.typesafe.play + org.playframework play-logback_${scala.major.version} ${play2.version} runtime - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-classic - 1.2.13 + org.slf4j + slf4j-api + 2.0.16 - com.typesafe.play + org.playframework play-netty-server_${scala.major.version} ${play2.version} runtime @@ -123,7 +186,31 @@ io.netty netty-handler - 4.1.118.Final + + + + io.netty + netty-codec + + + io.netty + netty-codec-http + + + io.netty + netty-common + + + io.netty + netty-buffer + + + io.netty + netty-transport + + + io.netty + netty-resolver com.fasterxml.jackson.dataformat @@ -184,7 +271,7 @@ test - com.typesafe.play + org.playframework play-specs2_${scala.maj.version} ${play2.version} test diff --git a/search-api/search-service/test/controllers/BaseSpec.scala b/search-api/search-service/test/controllers/BaseSpec.scala index fa68c9ccc..7aeb75e9e 100644 --- a/search-api/search-service/test/controllers/BaseSpec.scala +++ b/search-api/search-service/test/controllers/BaseSpec.scala @@ -14,11 +14,11 @@ import scala.concurrent.Future class BaseSpec extends Specification { - implicit val app = new GuiceApplicationBuilder() + implicit val app: play.api.Application = new GuiceApplicationBuilder() .disable(classOf[modules.SearchModule]) .bindings(new TestModule) - .build - implicit val config = ConfigFactory.load(); + .build() + implicit val config: com.typesafe.config.Config = ConfigFactory.load(); def post(apiURL: String, request: String, h: FakeHeaders = FakeHeaders(Seq())) : Future[Result] = { @@ -26,13 +26,13 @@ class BaseSpec extends Specification { route(app, FakeRequest(POST, apiURL, headers, Json.toJson(Json.parse(request)))).get } - def isOK(response: Future[Result]) { + def isOK(response: Future[Result]): Unit = { status(response) must equalTo(OK) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""status":"successful"""") } - def hasClientError(response: Future[Result]) { + def hasClientError(response: Future[Result]): Unit = { status(response) must equalTo(BAD_REQUEST) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""err":"CLIENT_ERROR","status":"failed"""") diff --git a/search-api/search-service/test/modules/TestModule.scala b/search-api/search-service/test/modules/TestModule.scala index 0b68711db..9f3d349ad 100644 --- a/search-api/search-service/test/modules/TestModule.scala +++ b/search-api/search-service/test/modules/TestModule.scala @@ -3,18 +3,18 @@ package modules import com.google.inject.AbstractModule import org.sunbird.actor.core.BaseActor import org.sunbird.common.dto.{Request, Response, ResponseHandler} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames import scala.concurrent.{ExecutionContext, Future} -class TestModule extends AbstractModule with AkkaGuiceSupport{ +class TestModule extends AbstractModule with PekkoGuiceSupport{ override def configure() = { super.configure() - bindActor(classOf[TestActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[TestActor], ActorNames.SEARCH_ACTOR) - bindActor(classOf[TestActor], ActorNames.AUDIT_HISTORY_ACTOR) + bindActor[TestActor](ActorNames.HEALTH_ACTOR) + bindActor[TestActor](ActorNames.SEARCH_ACTOR) + bindActor[TestActor](ActorNames.AUDIT_HISTORY_ACTOR) println("Initialized application actors for search-service") } } diff --git a/taxonomy-api/pom.xml b/taxonomy-api/pom.xml index 021ae730a..bd1f25432 100644 --- a/taxonomy-api/pom.xml +++ b/taxonomy-api/pom.xml @@ -19,7 +19,7 @@ UTF-8 UTF-8 - 2.12 + 2.13 diff --git a/taxonomy-api/taxonomy-actors/pom.xml b/taxonomy-api/taxonomy-actors/pom.xml index f37c10c7f..cb06780f7 100644 --- a/taxonomy-api/taxonomy-actors/pom.xml +++ b/taxonomy-api/taxonomy-actors/pom.xml @@ -23,7 +23,7 @@ org.sunbird - graph-engine_2.12 + graph-engine_2.13 1.0-SNAPSHOT jar @@ -40,9 +40,9 @@ test - com.typesafe.akka - akka-testkit_${scala.maj.version} - 2.5.22 + org.apache.pekko + pekko-testkit_${scala.maj.version} + 1.0.3 test diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/CategoryInstanceActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/CategoryInstanceActor.scala index 6833c37f7..9d6378787 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/CategoryInstanceActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/CategoryInstanceActor.scala @@ -14,10 +14,10 @@ import org.sunbird.graph.utils.NodeUtil import org.sunbird.utils.{Constants, RequestUtil} import java.util +import scala.jdk.CollectionConverters._ import java.util.Map import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} -import scala.collection.JavaConverters._ class CategoryInstanceActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { implicit val ec: ExecutionContext = getContext().dispatcher diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/FrameworkActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/FrameworkActor.scala index e528ad0e7..5e739f683 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/FrameworkActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/FrameworkActor.scala @@ -17,9 +17,8 @@ import org.sunbird.utils.{Constants, RequestUtil} import java.util import javax.inject.Inject -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ class FrameworkActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { @@ -70,7 +69,7 @@ class FrameworkActor @Inject()(implicit oec: OntologyEngineContext) extends Base @throws[Exception] private def read(request: Request): Future[Response] = { val frameworkId = request.get("identifier").asInstanceOf[String] - val returnCategories: java.util.List[String] = seqAsJavaListConverter(request.get("categories").asInstanceOf[String].split(",").filter(category => StringUtils.isNotBlank(category) && !StringUtils.equalsIgnoreCase(category, "null"))).asJava + val returnCategories: java.util.List[String] = request.get("categories").asInstanceOf[String].split(",").filter(category => StringUtils.isNotBlank(category) && !StringUtils.equalsIgnoreCase(category, "null")).toList.asJava request.getRequest.put("categories", returnCategories) if (StringUtils.isNotBlank(frameworkId)) { val framework = FrameworkCache.get(frameworkId, returnCategories) @@ -82,7 +81,7 @@ class FrameworkActor @Inject()(implicit oec: OntologyEngineContext) extends Base val frameworkData: Future[Map[String, AnyRef]] = if (Platform.getBoolean("service.db.cassandra.enabled", true)) FrameworkManager.getFrameworkHierarchy(request) else { val frameworkStr = RedisCache.get("fw:"+frameworkId, (key: String) => "{}") - Future(JsonUtils.deserialize(frameworkStr, classOf[java.util.Map[String, AnyRef]]).toMap) + Future(JsonUtils.deserialize(frameworkStr, classOf[java.util.Map[String, AnyRef]]).asScala.toMap) } frameworkData.map(framework => { if (framework.isEmpty) { @@ -94,9 +93,10 @@ class FrameworkActor @Inject()(implicit oec: OntologyEngineContext) extends Base }) } else { Future { - val filterFrameworkData = FrameworkManager.filterFrameworkCategories(framework, returnCategories) + val filterFrameworkData: Map[String, AnyRef] = FrameworkManager.filterFrameworkCategories(framework.asJava, returnCategories) FrameworkCache.save(filterFrameworkData, returnCategories) - ResponseHandler.OK.put(Constants.FRAMEWORK, filterFrameworkData.asJava) + val javaMap: java.util.Map[String, AnyRef] = filterFrameworkData.asJava + ResponseHandler.OK.put(Constants.FRAMEWORK, javaMap) } } }).flatMap(f => f) diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/LockActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/LockActor.scala index e25b75197..4f36f002c 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/LockActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/LockActor.scala @@ -6,8 +6,7 @@ import org.apache.commons.lang3.StringUtils import java.sql.Timestamp import java.util import java.util.{Date, TimeZone, UUID} -import scala.collection.JavaConverters._ -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ import org.sunbird.common.{JsonUtils, Platform} import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.common.exception.{ClientException, ServerException} @@ -53,14 +52,14 @@ class LockActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor val externalProps = DefinitionNode.getExternalProps(request.getContext.get("graph_id").asInstanceOf[String], request.getContext.get("version").asInstanceOf[String], request.getContext.get("schemaName").asInstanceOf[String]) oec.graphService.readExternalProps(request, externalProps).map(response => { if (!ResponseHandler.checkError(response)) { - if(request.getRequest.get("userId") == response.getResult.toMap.getOrDefault("createdby", "") && - request.getRequest.get("deviceId") == response.getResult.toMap.getOrDefault("deviceid", "") && - request.getRequest.get("resourceType") == response.getResult.toMap.getOrDefault("resourcetype", "")){ + if(request.getRequest.get("userId") == response.getResult.asScala.toMap.getOrElse("createdby", "") && + request.getRequest.get("deviceId") == response.getResult.asScala.toMap.getOrElse("deviceid", "") && + request.getRequest.get("resourceType") == response.getResult.asScala.toMap.getOrElse("resourcetype", "")){ Future { - ResponseHandler.OK.put("lockKey", response.getResult.toMap.getOrDefault("lockid", "")).put("expiresAt", formatExpiryDate(response.getResult.toMap.getOrDefault("expiresat", "").asInstanceOf[Date])).put("expiresIn", defaultLockExpiryTime / 60) + ResponseHandler.OK.put("lockKey", response.getResult.asScala.toMap.getOrElse("lockid", "")).put("expiresAt", formatExpiryDate(response.getResult.asScala.toMap.getOrElse("expiresat", "").asInstanceOf[Date])).put("expiresIn", defaultLockExpiryTime / 60) } } - else if (request.getRequest.get("userId") == response.getResult.toMap.getOrDefault("createdby", "")) + else if (request.getRequest.get("userId") == response.getResult.asScala.toMap.getOrElse("createdby", "")) throw new ClientException("RESOURCE_SELF_LOCKED", "Error due to self lock , Resource already locked by user ") else { val creatorInfoStr = response.getResult.get("creatorinfo").asInstanceOf[String] @@ -123,8 +122,8 @@ class LockActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor else throw new ClientException("ERR_LOCK_REFRESHING_FAILED", "no data found from db for refreshing lock") } else { - val lockId = response.getResult.toMap.getOrDefault("lockid", "") - val createdBy = response.getResult.toMap.getOrDefault("createdby", "") + val lockId = response.getResult.asScala.toMap.getOrElse("lockid", "") + val createdBy = response.getResult.asScala.toMap.getOrElse("createdby", "") if (createdBy != userId) throw new ClientException("ERR_LOCK_REFRESHING_FAILED", "Unauthorized to refresh this lock") request.put("fields", List("expiresat")) @@ -154,7 +153,7 @@ class LockActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor val externalProps = DefinitionNode.getExternalProps(request.getContext.get("graph_id").asInstanceOf[String], request.getContext.get("version").asInstanceOf[String], request.getContext.get("schemaName").asInstanceOf[String]) oec.graphService.readExternalProps(request, externalProps).map(response => { if (!ResponseHandler.checkError(response)) { - val createdBy = response.getResult.toMap.getOrDefault("createdby", "") + val createdBy = response.getResult.asScala.toMap.getOrElse("createdby", "") if (createdBy != userId) throw new ClientException("ERR_LOCK_RETIRING_FAILED", "Unauthorized to retire lock") request.put("identifiers", List(resourceId)) @@ -183,7 +182,7 @@ class LockActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor request.getRequest.put("identifier", resourceId) } else { - request.getRequest.put("identifiers", resourceId.asInstanceOf[java.util.List[String]].toList) + request.getRequest.put("identifiers", resourceId.asInstanceOf[java.util.List[String]].asScala.toList) } val externalProps = DefinitionNode.getExternalProps(request.getContext.get("graph_id").asInstanceOf[String], request.getContext.get("version").asInstanceOf[String], request.getContext.get("schemaName").asInstanceOf[String]) oec.graphService.readExternalProps(request, externalProps).map(response => { diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryActor.scala index 4b91c9df9..d4a69096e 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryActor.scala @@ -13,8 +13,8 @@ import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.utils.{Constants, RequestUtil} -import scala.collection.JavaConverters import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ class ObjectCategoryActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { @@ -42,7 +42,7 @@ class ObjectCategoryActor @Inject()(implicit oec: OntologyEngineContext) extends @throws[Exception] private def read(request: Request): Future[Response] = { - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get(Constants.FIELDS).asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get(Constants.FIELDS).asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put(Constants.FIELDS, fields) DataNode.read(request).map(node => { val metadata: util.Map[String, AnyRef] = NodeUtil.serialize(node, fields, request.getContext.get(Constants.SCHEMA_NAME).asInstanceOf[String], request.getContext.get(Constants.VERSION).asInstanceOf[String]) diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryDefinitionActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryDefinitionActor.scala index 7112d62a4..c3a1ec36b 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryDefinitionActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/ObjectCategoryDefinitionActor.scala @@ -15,9 +15,8 @@ import org.sunbird.graph.schema.DefinitionNode import org.sunbird.graph.utils.NodeUtil import org.sunbird.utils.{Constants, RequestUtil} -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ class ObjectCategoryDefinitionActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { @@ -73,7 +72,7 @@ class ObjectCategoryDefinitionActor @Inject()(implicit oec: OntologyEngineContex val identifier = Constants.CATEGORY_PREFIX + Slug.makeSlug(categoryName) + "_" + Slug.makeSlug(objectType) + "_" + Slug.makeSlug(channel) request.getRequest.put(Constants.IDENTIFIER, identifier) } - val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get(Constants.FIELDS).asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + val fields: util.List[String] = request.get(Constants.FIELDS).asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null")).toList.asJava request.getRequest.put(Constants.FIELDS, fields) DataNode.read(request) recoverWith { case e: ResourceNotFoundException => { @@ -85,6 +84,15 @@ class ObjectCategoryDefinitionActor @Inject()(implicit oec: OntologyEngineContex } else throw e } + case e: CompletionException if e.getCause.isInstanceOf[ResourceNotFoundException] => { + val id = request.get(Constants.IDENTIFIER).asInstanceOf[String] + println("ObjectCategoryDefinitionActor ::: read ::: node not found with id :" + id + " | Fetching node with _all") + if (StringUtils.equalsAnyIgnoreCase("POST", requestMethod) && !StringUtils.endsWithIgnoreCase(id, "_all")) { + request.put(Constants.IDENTIFIER, id.replace(id.substring(id.lastIndexOf("_") + 1), "all")) + DataNode.read(request) + } else + throw e.getCause + } case ex: Throwable => throw ex } map (node => { val metadata: util.Map[String, AnyRef] = NodeUtil.serialize(node, fields, request.getContext.get(Constants.SCHEMA_NAME).asInstanceOf[String], request.getContext.get(Constants.VERSION).asInstanceOf[String]) diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/TermActor.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/TermActor.scala index 32f892d3f..91e71b8ca 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/TermActor.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/actors/TermActor.scala @@ -13,8 +13,8 @@ import org.sunbird.graph.utils.NodeUtil import org.sunbird.utils.{Constants, RequestUtil} import java.util import javax.inject.Inject -import scala.collection.JavaConverters.asScalaBufferConverter import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ class TermActor @Inject()(implicit oec: OntologyEngineContext) extends BaseActor { implicit val ec: ExecutionContext = getContext().dispatcher diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/mangers/FrameworkManager.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/mangers/FrameworkManager.scala index dd6773a85..ce6fb0f98 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/mangers/FrameworkManager.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/mangers/FrameworkManager.scala @@ -17,9 +17,7 @@ import org.sunbird.graph.utils.NodeUtil.{convertJsonProperties, handleKeyNames} import java.util import java.util.{Collections, Optional} import java.util.concurrent.{CompletionException, Executors} -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import org.sunbird.utils.Constants @@ -37,33 +35,33 @@ object FrameworkManager { def filterFrameworkCategories(framework: util.Map[String, AnyRef], categoryNames: util.List[String]): Map[String, AnyRef] = { val categories = framework.getOrDefault("categories", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]] - val newCategoryNames = categoryNames.map(_.toLowerCase) + val newCategoryNames = categoryNames.asScala.map(_.toLowerCase) if (!categories.isEmpty && !newCategoryNames.isEmpty) { - val filteredCategories = categories.filter(category => { + val filteredCategories = categories.asScala.filter(category => { val code = category.get("code").asInstanceOf[String] newCategoryNames.contains(code.toLowerCase()) }).toList.asJava - val filteredData = framework.-("categories") ++ Map("categories" -> filteredCategories) - val finalCategories = removeAssociations(filteredData.toMap, newCategoryNames) - (filteredData.-("categories") ++ Map("categories" -> finalCategories)).toMap + val filteredData = framework.asScala.toMap - "categories" + ("categories" -> filteredCategories) + val finalCategories = removeAssociations(filteredData, newCategoryNames.asJava) + (filteredData - "categories" + ("categories" -> finalCategories)) } else { - framework.toMap + framework.asScala.toMap } } private def removeAssociations(responseMap: Map[String, AnyRef], returnCategories: java.util.List[String]): util.List[util.Map[String, AnyRef]] = { - val categories = responseMap.getOrDefault("categories", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]] - categories.map( category => { + val categories = responseMap.getOrElse("categories", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]] + categories.asScala.map( category => { removeTermAssociations(category.getOrDefault("terms", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]], returnCategories) }) categories } private def removeTermAssociations(terms: util.List[util.Map[String, AnyRef]], returnCategories: java.util.List[String]): Unit = { - terms.map(term => { + terms.asScala.map(term => { val associations = term.getOrDefault("associations", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]] - if (associations.nonEmpty) { - val filteredAssociations = associations.filter(p => p != null && returnCategories.contains(p.get("category"))) + if (!associations.isEmpty) { + val filteredAssociations = associations.asScala.filter(p => p != null && returnCategories.contains(p.get("category"))).asJava term.put("associations", filteredAssociations) if (filteredAssociations.isEmpty) term.remove("associations") @@ -81,25 +79,25 @@ object FrameworkManager { val channel = node.getMetadata.getOrDefault("channel", "all").asInstanceOf[String] val definition: ObjectCategoryDefinition = DefinitionNode.getObjectCategoryDefinition("", objectType, channel) val jsonProps = DefinitionNode.fetchJsonProps(node.getGraphId, schemaVersion, objectType, definition) - val updatedMetadata: util.Map[String, AnyRef] = metadata.entrySet().asScala.filter(entry => null != entry.getValue) + val updatedMetadata: util.Map[String, AnyRef] = (metadata.entrySet().asScala.filter(entry => null != entry.getValue) .map((entry: util.Map.Entry[String, AnyRef]) => handleKeyNames(entry, null) -> convertJsonProperties(entry, jsonProps)).toMap ++ - Map("objectType" -> node.getObjectType, "identifier" -> node.getIdentifier, "languageCode" -> NodeUtil.getLanguageCodes(node)) + Map("objectType" -> node.getObjectType, "identifier" -> node.getIdentifier, "languageCode" -> NodeUtil.getLanguageCodes(node))).asJava val fields =DefinitionNode.getMetadataFields(node.getGraphId, schemaVersion, objectType, definition) - val filteredData: util.Map[String, AnyRef] = if(fields.nonEmpty) updatedMetadata.filterKeys(key => fields.contains(key)) else updatedMetadata + val filteredData: util.Map[String, AnyRef] = if(fields.nonEmpty) updatedMetadata.asScala.filter(entry => fields.contains(entry._1)).asJava else updatedMetadata val relationDef = DefinitionNode.getRelationDefinitionMap(node.getGraphId, schemaVersion, objectType, definition) - val outRelations = relations.filter((rel: Relation) => { + val outRelations = relations.asScala.filter((rel: Relation) => { StringUtils.equals(rel.getStartNodeId.toString(), node.getIdentifier) - }).sortBy((rel: Relation) => rel.getMetadata.get("IL_SEQUENCE_INDEX").asInstanceOf[Long])(Ordering.Long).toList + }).sortBy((rel: Relation) => rel.getMetadata.get("IL_SEQUENCE_INDEX").asInstanceOf[Long])(Ordering.Long).toList.asJava if(includeRelations){ val relMetadata = getRelationAsMetadata(relationDef, outRelations, "out") - val childHierarchy = relMetadata.map(x => (x._1, x._2.map(a => { + val childHierarchy = relMetadata.map(x => (x._1, x._2.asScala.map(a => { val identifier = a.getOrElse("identifier", "") val childNode = nodes.get(identifier) val index = a.getOrElse("index", 1).asInstanceOf[Number] - val metaData = (childNode.getMetadata ++ Map("index" -> index)).asJava + val metaData = (childNode.getMetadata.asScala ++ Map("index" -> index)).asJava childNode.setMetadata(metaData) if("associations".equalsIgnoreCase(x._1)){ getCompleteMetadata(childNode.getIdentifier, subGraph, false) @@ -107,7 +105,7 @@ object FrameworkManager { getCompleteMetadata(childNode.getIdentifier, subGraph, true) } }).toList.asJava)) - (filteredData ++ childHierarchy).asJava + (filteredData.asScala ++ childHierarchy).asJava } else { filteredData } @@ -118,18 +116,18 @@ object FrameworkManager { { val endObjectType = rel.getEndNodeObjectType.replace("Image", "") val relKey: String = rel.getRelationType + "_" + direction + "_" + endObjectType - if (definitionMap.containsKey(relKey)) { + if (definitionMap.contains(relKey)) { val relData =Map[String, Object]("identifier" -> rel.getEndNodeId.replace(".img", ""), "name"-> rel.getEndNodeName, "objectType"-> endObjectType, "relation"-> rel.getRelationType, - "KEY" -> definitionMap.getOrDefault(relKey, "").asInstanceOf[String] + "KEY" -> definitionMap.getOrElse(relKey, "").asInstanceOf[String] ) ++ rel.getMetadata.asScala val indexMap = if(rel.getRelationType.equals("hasSequenceMember")) Map("index" -> rel.getMetadata.getOrDefault("IL_SEQUENCE_INDEX",1.asInstanceOf[Number]).asInstanceOf[Number]) else Map() relData ++ indexMap } else Map[String, Object]() }).filter(x => x.nonEmpty) - .groupBy(x => x.getOrDefault("KEY", "").asInstanceOf[String]) + .groupBy(x => x.getOrElse("KEY", "").asInstanceOf[String]) .map(x => (x._1, (x._2.toList.map(x => { x.-("KEY") x.-("IL_SEQUENCE_INDEX") @@ -147,9 +145,9 @@ object FrameworkManager { val responseFuture = oec.graphService.readExternalProps(request, externalProps) responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { - val hierarchyString = response.getResult.toMap.getOrDefault("hierarchy", "").asInstanceOf[String] + val hierarchyString = response.getResult.asScala.toMap.getOrElse("hierarchy", "").asInstanceOf[String] if (StringUtils.isNotEmpty(hierarchyString)) { - Future(JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).toMap) + Future(JsonUtils.deserialize(hierarchyString, classOf[java.util.Map[String, AnyRef]]).asScala.toMap) } else Future(Map[String, AnyRef]()) } else if (ResponseHandler.checkError(response) && response.getResponseCode.code() == 404) @@ -190,7 +188,7 @@ object FrameworkManager { val definition: ObjectCategoryDefinition = DefinitionNode.getObjectCategoryDefinition("", objectType, channel) val relationDef = DefinitionNode.getRelationDefinitionMap(node.getGraphId, schemaVersion, objectType, definition) val frameworkId = request.getContext.getOrDefault("frameworkId", "").asInstanceOf[String] - val outRelations = node.getOutRelations.filter((rel: Relation) => { + val outRelations = node.getOutRelations.asScala.filter((rel: Relation) => { StringUtils.equals(rel.getStartNodeId, node.getIdentifier) }).toList @@ -212,7 +210,7 @@ object FrameworkManager { val relKey: String = rel.getRelationType + "_out_" + endObjectType var endNodeId = rel.getEndNodeId() endNodeId = endNodeId.replaceFirst(oldId.toLowerCase(), newId.toLowerCase()) - if (relationDef.containsKey(relKey)) { + if (relationDef.contains(relKey)) { val relReq = new Request(request) relReq.getContext.put(Constants.SCHEMA_NAME, rel.getEndNodeObjectType) relReq.getContext.put(Constants.VERSION, schemaVersion) @@ -244,11 +242,11 @@ object FrameworkManager { var relMap = request.getContext.getOrDefault("relationMap", new util.HashMap[String, Object]()).asInstanceOf[util.Map[String, Object]] if (!relMap.isEmpty) { val relKey = relMap.getOrDefault("KEY", "").asInstanceOf[String] - relMap = relMap.toMap.-("KEY") - if (!relationDef.getOrDefault(relKey, "").asInstanceOf[String].isEmpty) { + relMap = (relMap.asScala.toMap - "KEY").asJava + if (!relationDef.getOrElse(relKey, "").asInstanceOf[String].isEmpty) { val tempArr = new util.ArrayList[util.Map[String, Object]]() tempArr.add(relMap) - req.put(relationDef.getOrDefault(relKey, "").asInstanceOf[String], tempArr) + req.put(relationDef.getOrElse(relKey, "").asInstanceOf[String], tempArr) } } req.getContext.remove("relationMap") diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/CategoryCache.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/CategoryCache.scala index 754b3bdda..4601eb511 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/CategoryCache.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/CategoryCache.scala @@ -3,14 +3,14 @@ package org.sunbird.utils import java.util import org.sunbird.cache.impl.RedisCache import org.sunbird.telemetry.logger.TelemetryManager -import scala.collection.JavaConversions._ +import scala.jdk.CollectionConverters._ object CategoryCache{ def setFramework(id: String, framework: util.Map[String, AnyRef]): Unit = { if (null != framework && !framework.isEmpty) { - val categories = framework.getOrDefault("categories", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]].toList + val categories = framework.getOrDefault("categories", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]].asScala.toList categories.map(category => { val catName = category.get("code").asInstanceOf[String] val terms = getTerms(category, "terms") @@ -27,20 +27,20 @@ object CategoryCache{ private def getTerms(category: util.Map[String, AnyRef], key: String): List[String] = { val returnTerms = new util.ArrayList[String] - if (category != null && category.nonEmpty) { - val terms = category.getOrDefault(key, new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]].toList + if (category != null && !category.isEmpty) { + val terms = category.getOrDefault(key, new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]].asScala.toList if (terms != null && terms.nonEmpty) { for (term <- terms) { - val termName = term.getOrElse("name", "").asInstanceOf[String] + val termName = term.getOrDefault("name", "").asInstanceOf[String] if (termName != null && termName.trim.nonEmpty) { - returnTerms += termName + returnTerms.add(termName) val childTerms = getTerms(term, "associations") if (childTerms.nonEmpty) - returnTerms ++= childTerms + returnTerms.addAll(childTerms.asJava) } } } } - returnTerms.toList + returnTerms.asScala.toList } } \ No newline at end of file diff --git a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/FrameworkCache.scala b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/FrameworkCache.scala index 788334b59..f4b98dada 100644 --- a/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/FrameworkCache.scala +++ b/taxonomy-api/taxonomy-actors/src/main/scala/org/sunbird/utils/FrameworkCache.scala @@ -6,7 +6,7 @@ import org.sunbird.common.{JsonUtils, Platform} import org.sunbird.graph.util.ScalaJsonUtil import java.util import java.util.Collections -import scala.collection.JavaConversions.asScalaBuffer +import scala.jdk.CollectionConverters._ object FrameworkCache{ @@ -17,12 +17,12 @@ object FrameworkCache{ def getFwCacheKey(identifier: String, categoryNames: util.List[String]): String = { Collections.sort(categoryNames) - CACHE_PREFIX + identifier.toLowerCase + "_" + categoryNames.map(_.toLowerCase).mkString("_") + CACHE_PREFIX + identifier.toLowerCase + "_" + categoryNames.asScala.map(_.toLowerCase).mkString("_") } def get(id: String, returnCategories: util.List[String]): util.Map[String, Object] = { if (cacheEnabled) { - if (returnCategories.nonEmpty) { + if (!returnCategories.isEmpty) { val categories = new util.ArrayList[String](returnCategories) Collections.sort(categories) val cachedCategories: String = RedisCache.get(getFwCacheKey(id, categories)) @@ -40,7 +40,7 @@ object FrameworkCache{ def save(framework: Map[String, AnyRef], categoryNames: util.List[String]): Unit = { val identifier = framework.getOrElse("identifier", "").asInstanceOf[String] - if (cacheEnabled && !framework.isEmpty && StringUtils.isNotBlank(identifier) && categoryNames.nonEmpty) { + if (cacheEnabled && !framework.isEmpty && StringUtils.isNotBlank(identifier) && !categoryNames.isEmpty) { val categories = new util.ArrayList[String](categoryNames) Collections.sort(categories) val key: String = getFwCacheKey(identifier, categories) diff --git a/taxonomy-api/taxonomy-actors/src/test/resources/application.conf b/taxonomy-api/taxonomy-actors/src/test/resources/application.conf index 178170722..fa6f37bf1 100644 --- a/taxonomy-api/taxonomy-actors/src/test/resources/application.conf +++ b/taxonomy-api/taxonomy-actors/src/test/resources/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -338,7 +338,7 @@ plugin.media.base.url="https://dev.open-sunbird.org" # Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/BaseSpec.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/BaseSpec.scala index 745c4d3eb..54ad4d539 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/BaseSpec.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/BaseSpec.scala @@ -3,8 +3,8 @@ package org.sunbird.actors import java.util import java.util.concurrent.TimeUnit -import akka.actor.{ActorSystem, Props} -import akka.testkit.TestKit +import org.apache.pekko.actor.{ActorSystem, Props} +import org.apache.pekko.testkit.TestKit import org.scalatest.{FlatSpec, Matchers} import org.sunbird.common.dto.{Request, Response} import org.sunbird.graph.OntologyEngineContext diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryActorTest.scala index 913b85174..8737c5e80 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryActorTest.scala @@ -2,14 +2,15 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.Request import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.utils.Constants -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -117,7 +118,7 @@ class CategoryActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)) val request = getCategoryRequest() request.getContext.put("identifier", "category_test") - request.putAll(mapAsJavaMap(Map("identifier" -> "category_test"))) + request.putAll(mutable.Map[String, AnyRef]("identifier" -> "category_test").asJava) request.setOperation("readCategory") val response = callActor(request, Props(new CategoryActor())) assert("successful".equals(response.getParams.getStatus)) @@ -134,7 +135,7 @@ class CategoryActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getCategoryRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc").asJava) request.setOperation(Constants.UPDATE_CATEGORY) val response = callActor(request, Props(new CategoryActor())) assert("successful".equals(response.getParams.getStatus)) @@ -145,7 +146,7 @@ class CategoryActorTest extends BaseSpec with MockFactory{ val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "identifier"-> "category_test"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc", "identifier"-> "category_test").asJava) request.setOperation(Constants.UPDATE_CATEGORY) val response = callActor(request, Props(new CategoryActor())) assert("failed".equals(response.getParams.getStatus)) @@ -156,7 +157,7 @@ class CategoryActorTest extends BaseSpec with MockFactory{ val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "code" -> "category_test"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc", "code" -> "category_test").asJava) request.setOperation(Constants.UPDATE_CATEGORY) val response = callActor(request, Props(new CategoryActor())) assert("failed".equals(response.getParams.getStatus)) diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryInstanceActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryInstanceActorTest.scala index 6ee90251a..9fb0a456c 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryInstanceActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/CategoryInstanceActorTest.scala @@ -1,7 +1,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.commons.lang3.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.{Request, Response} @@ -12,7 +12,8 @@ import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.utils.Constants import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable class CategoryInstanceActorTest extends BaseSpec with MockFactory { @@ -60,7 +61,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.createRelation _).expects(*, *).returns(Future(new Response())) val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF","code" -> "board" ,"name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF","code" -> "board" ,"name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("successful".equals(response.getParams.getStatus)) @@ -86,7 +87,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "code" -> "board", "name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "code" -> "board", "name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -110,7 +111,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "code" -> "", "name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "code" -> "", "name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -134,7 +135,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "code" -> "board", "name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "code" -> "board", "name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -145,7 +146,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("code" -> "board", "name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("code" -> "board", "name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -157,7 +158,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "", "code" -> "board", "name" -> "Board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "", "code" -> "board", "name" -> "Board").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -186,7 +187,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)) val request = getCategoryInstanceRequest() request.getContext.put("identifier", "ncf_board") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> "board").asJava) request.setOperation("readCategoryInstance") val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -197,7 +198,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "name" -> "Board", "frameworks" -> "[{identifier=NCF_TEST1}]}]"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "name" -> "Board", "frameworks" -> "[{identifier=NCF_TEST1}]}]").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -210,7 +211,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF","code" -> "board","status" -> "Live", "name" -> "Board", "frameworks" -> "[{identifier=NCF_TEST1}]}]"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF","code" -> "board","status" -> "Live", "name" -> "Board", "frameworks" -> "[{identifier=NCF_TEST1}]}]").asJava) request.setOperation(Constants.CREATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -226,7 +227,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)) val request = getCategoryInstanceRequest() request.getContext.put("identifier", "ncf_board") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board","identifier" -> "ncf_board", "channel" ->"sunbird","category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board","identifier" -> "ncf_board", "channel" ->"sunbird","category" -> "board").asJava) request.setOperation("readCategoryInstance") val response = callActor(request, Props(new CategoryInstanceActor())) assert("successful".equals(response.getParams.getStatus)) @@ -236,7 +237,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getCategoryInstanceRequest() request.getContext.put("identifier", "ncf_board") - request.putAll(mapAsJavaMap(Map("framework" -> "ncf", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> ""))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "ncf", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> "").asJava) request.setOperation("readCategoryInstance") val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -246,7 +247,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getCategoryInstanceRequest() request.getContext.put("identifier", "ncf_board") - request.putAll(mapAsJavaMap(Map("framework" -> "", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "", "name" -> "Board", "description" -> "Board", "code" -> "board", "identifier" -> "ncf_board", "channel" -> "sunbird", "category" -> "board").asJava) request.setOperation("readCategoryInstance") val response = callActor(request, Props(new CategoryInstanceActor())) assert("failed".equals(response.getParams.getStatus)) @@ -263,7 +264,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board", "channel" ->"sunbird","category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board", "channel" ->"sunbird","category" -> "board").asJava) request.setOperation(Constants.UPDATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert("successful".equals(response.getParams.getStatus)) @@ -277,7 +278,7 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getCategoryInstanceRequest() - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "name" -> "Board", "identifier"->"ncf_board", "description" -> "Board", "code" -> "board", "channel" -> "sunbird", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "name" -> "Board", "identifier"->"ncf_board", "description" -> "Board", "code" -> "board", "channel" -> "sunbird", "category" -> "board").asJava) request.setOperation(Constants.UPDATE_CATEGORY_INSTANCE) val response = callActor(request, Props(new CategoryInstanceActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -296,8 +297,8 @@ class CategoryInstanceActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getCategoryInstanceRequest() request.getContext.put("identifier", "ncf_board") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board", "channel" ->"sunbird","category" -> "board"))) - request.putAll(mapAsJavaMap(Map("identifier" -> "ncf_board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF","name" -> "Board", "description" -> "Board", "code" -> "board", "channel" ->"sunbird","category" -> "board").asJava) + request.putAll(mutable.Map[String, AnyRef]("identifier" -> "ncf_board").asJava) request.setOperation("retireCategoryInstance") val response = callActor(request, Props(new CategoryInstanceActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/FrameworkActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/FrameworkActorTest.scala index ba37125ca..df98b0e9c 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/FrameworkActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/FrameworkActorTest.scala @@ -1,7 +1,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.commons.lang3.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.cache.impl.RedisCache @@ -10,7 +10,8 @@ import org.sunbird.common.exception.ResponseCode import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, Relation, SearchCriteria, SubGraph} import org.sunbird.utils.Constants -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -42,7 +43,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { (graphDB.addNode(_: String, _: Node)).expects(*, *).returns(Future(getFrameworkOfNode())) val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map("name" ->"framework_test", "code"-> "framework_test", "description" -> "desc_test", "channel"->"channel_test"))) + request.putAll(mutable.Map[String, AnyRef]("name" ->"framework_test", "code"-> "framework_test", "description" -> "desc_test", "channel"->"channel_test").asJava) request.setOperation(Constants.CREATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("successful".equals(response.getParams.getStatus)) @@ -54,7 +55,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map("name" -> "framework_test", "code" -> "", "description" -> "desc_test", "channel" -> "channel_test"))) + request.putAll(mutable.Map[String, AnyRef]("name" -> "framework_test", "code" -> "", "description" -> "desc_test", "channel" -> "channel_test").asJava) request.setOperation(Constants.CREATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -65,7 +66,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { val graphDB = mock[GraphService] (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test"))) + request.putAll(mutable.Map[String, AnyRef]("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test").asJava) request.setOperation(Constants.CREATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -92,7 +93,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { translations.put("sta", "trnm") val request = getFrameworkRequest() request.put("translations", translations) - request.putAll(mapAsJavaMap(Map("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test", "channel" -> "channel_test"))) + request.putAll(mutable.Map[String, AnyRef]("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test", "channel" -> "channel_test").asJava) request.setOperation(Constants.CREATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -116,7 +117,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { }) (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test", "channel" -> "channel_test"))) + request.putAll(mutable.Map[String, AnyRef]("name" -> "framework_test", "code" -> "framework_test", "description" -> "desc_test", "channel" -> "channel_test").asJava) request.setOperation(Constants.CREATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -133,7 +134,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc").asJava) request.setOperation(Constants.UPDATE_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("successful".equals(response.getParams.getStatus)) @@ -170,7 +171,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map(Constants.IDENTIFIER -> "NCF", "createdBy" -> "username_1", Constants.CODE -> "NCF_COPY"))) + request.putAll(mutable.Map[String, AnyRef](Constants.IDENTIFIER -> "NCF", "createdBy" -> "username_1", Constants.CODE -> "NCF_COPY").asJava) request.setOperation(Constants.COPY_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("successful".equals(response.getParams.getStatus)) @@ -188,7 +189,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map(Constants.IDENTIFIER -> "NCF"))) + request.putAll(mutable.Map[String, AnyRef](Constants.IDENTIFIER -> "NCF").asJava) request.setOperation(Constants.COPY_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -206,7 +207,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getFrameworkRequest() - request.putAll(mapAsJavaMap(Map(Constants.IDENTIFIER -> "NCF", Constants.CODE -> "NCF"))) + request.putAll(mutable.Map[String, AnyRef](Constants.IDENTIFIER -> "NCF", Constants.CODE -> "NCF").asJava) request.setOperation(Constants.COPY_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("failed".equals(response.getParams.getStatus)) @@ -234,7 +235,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { val request = getFrameworkRequest() request.getContext.put(Constants.IDENTIFIER, "framework_test") - request.putAll(mapAsJavaMap(Map(Constants.IDENTIFIER -> "framework_test", "channel" -> "sunbird"))) + request.putAll(mutable.Map[String, AnyRef](Constants.IDENTIFIER -> "framework_test", "channel" -> "sunbird").asJava) request.setOperation(Constants.PUBLISH_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("successful".equals(response.getParams.getStatus)) @@ -252,7 +253,7 @@ class FrameworkActorTest extends BaseSpec with MockFactory { // RedisCache.set(cacheKey, frameworkMetadata) val request = getFrameworkRequest() request.getContext.put("identifier", "frameworkTest") - request.putAll(mapAsJavaMap(Map("identifier" -> "framework_test", "channel" -> "sunbird", Constants.CATEGORIES -> ""))) + request.putAll(mutable.Map[String, AnyRef]("identifier" -> "framework_test", "channel" -> "sunbird", Constants.CATEGORIES -> "").asJava) request.setOperation(Constants.READ_FRAMEWORK) val response = callActor(request, Props(new FrameworkActor())) assert("successful".equals(response.getParams.getStatus)) @@ -325,9 +326,9 @@ class FrameworkActorTest extends BaseSpec with MockFactory { } def getSubGraphData(): SubGraph = { - val nodeMap: Map[String, Node] = Map("framework_test" -> getValidNode()) + val nodeMap: Map[String, Node] = Map[String, Node]("framework_test" -> getValidNode()) val relationsList: util.List[Relation] = new util.ArrayList[Relation]() - val subGraphFData = new SubGraph(nodeMap, relationsList) + val subGraphFData = new SubGraph(nodeMap.asJava, relationsList) subGraphFData } diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/LockActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/LockActorTest.scala index e1c2e595a..864844c98 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/LockActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/LockActorTest.scala @@ -1,7 +1,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.{Request, Response, ResponseParams} import org.sunbird.common.exception.ResponseCode diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryActorTest.scala index b904dd686..339cb723d 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryActorTest.scala @@ -2,7 +2,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.commons.lang3.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.Request @@ -11,7 +11,8 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.utils.Constants -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -31,7 +32,7 @@ class ObjectCategoryActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getCategoryRequest() - request.putAll(mapAsJavaMap(Map("name" -> "1234"))) + request.putAll(Map[String,AnyRef]("name" -> "1234").asJava) request.setOperation(Constants.CREATE_OBJECT_CATEGORY) val response = callActor(request, Props(new ObjectCategoryActor())) assert(response.get(Constants.IDENTIFIER) != null) @@ -41,7 +42,7 @@ class ObjectCategoryActorTest extends BaseSpec with MockFactory { it should "return exception for categoryNode without name" in { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getCategoryRequest() - request.putAll(mapAsJavaMap(Map("translations" -> Map("en" -> "english", "hi" -> "hindi")))) + request.putAll(Map[String,AnyRef]("translations" -> Map[String,AnyRef]("en" -> "english", "hi" -> "hindi").asJava).asJava) request.setOperation(Constants.CREATE_OBJECT_CATEGORY) val response = callActor(request, Props(new ObjectCategoryActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -60,7 +61,7 @@ class ObjectCategoryActorTest extends BaseSpec with MockFactory { val request = getCategoryRequest() request.getContext.put(Constants.IDENTIFIER, "obj-cat:1234") - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(Map[String,AnyRef]("description" -> "test desc").asJava) request.setOperation(Constants.UPDATE_OBJECT_CATEGORY) val response = callActor(request, Props(new ObjectCategoryActor())) assert("successful".equals(response.getParams.getStatus)) @@ -75,7 +76,7 @@ class ObjectCategoryActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryRequest() request.getContext.put(Constants.IDENTIFIER, "obj-cat:1234") - request.putAll(mapAsJavaMap(Map("fields" -> ""))) + request.putAll(Map[String,AnyRef]("fields" -> "").asJava) request.setOperation(Constants.READ_OBJECT_CATEGORY) val response = callActor(request, Props(new ObjectCategoryActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryDefinitionActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryDefinitionActorTest.scala index 79f56bddc..ebd88843d 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryDefinitionActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/ObjectCategoryDefinitionActorTest.scala @@ -2,7 +2,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.apache.commons.lang3.StringUtils import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.{Request, Response} @@ -11,7 +11,8 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.utils.Constants -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -44,10 +45,10 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { val request = getCategoryDefintionRequest() val objectMetadata = new util.HashMap[String, AnyRef](){{ - put("schema", new util.HashMap()) - put("config", new util.HashMap()) + put("schema", new util.HashMap[String, AnyRef]()) + put("config", new util.HashMap[String, AnyRef]()) }} - request.putAll(mapAsJavaMap(Map("targetObjectType" -> "Content", "categoryId" -> "obj-cat:1234", "objectMetadata" -> objectMetadata))) + request.putAll(mutable.Map[String, AnyRef]("targetObjectType" -> "Content", "categoryId" -> "obj-cat:1234", "objectMetadata" -> objectMetadata).asJava) request.setOperation(Constants.CREATE_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) assert(response.get(Constants.IDENTIFIER) != null) @@ -60,7 +61,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { (oec.graphService _).expects().returns(graphDB).anyNumberOfTimes() (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(getCategoryDefinitionNode())) val request = getCategoryDefintionRequest() - request.putAll(mapAsJavaMap(Map("targetObjectType" -> "Content", "categoryId" -> "obj-cat:1234", "objectMetadata" -> Map("schema" -> Map()), "config" -> Map()))) + request.putAll(mutable.Map[String, AnyRef]("targetObjectType" -> "Content", "categoryId" -> "obj-cat:1234", "objectMetadata" -> mutable.Map[String, AnyRef]("schema" -> mutable.Map[String, AnyRef]().asJava, "config" -> mutable.Map[String, AnyRef]().asJava)).asJava) request.setOperation(Constants.CREATE_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -70,7 +71,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { it should "should throw exception for blank categoryId" in { implicit val oec: OntologyEngineContext = mock[OntologyEngineContext] val request = getCategoryDefintionRequest() - request.putAll(mapAsJavaMap(Map("tagetObjectType" -> "Content", "categoryId" -> ""))) + request.putAll(mutable.Map[String, AnyRef]("tagetObjectType" -> "Content", "categoryId" -> "").asJava) request.setOperation(Constants.CREATE_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -85,7 +86,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryDefintionRequest() request.getContext.put(Constants.IDENTIFIER, "obj-cat:1234_content_all") - request.putAll(mapAsJavaMap(Map("fields" -> ""))) + request.putAll(mutable.Map[String, AnyRef]("fields" -> "").asJava) request.setOperation(Constants.READ_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) val objectCategoryDefinition = response.getResult.getOrDefault("objectCategoryDefinition", new util.HashMap[String, AnyRef]()).asInstanceOf[util.Map[String, AnyRef]] @@ -100,7 +101,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { val node = getCategoryDefinitionNodeForRead() (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getCategoryDefintionRequest() - request.putAll(mapAsJavaMap(Map("fields" -> "", "REQ_METHOD" -> "POST", "objectType" -> "Content", "name" -> "1234"))) + request.putAll(mutable.Map[String, AnyRef]("fields" -> "", "REQ_METHOD" -> "POST", "objectType" -> "Content", "name" -> "1234").asJava) request.setOperation(Constants.READ_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) val objectCategoryDefinition = response.getResult.getOrDefault("objectCategoryDefinition", new util.HashMap[String, AnyRef]()).asInstanceOf[util.Map[String, AnyRef]] @@ -117,7 +118,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, "obj-cat:1234_content_all", *, *) .returns(Future(node)).anyNumberOfTimes() val request = getCategoryDefintionRequest() - request.putAll(mapAsJavaMap(Map("fields" -> "", "REQ_METHOD" -> "POST", "objectType" -> "Content", "name" -> "1234", "channel" -> "test"))) + request.putAll(mutable.Map[String, AnyRef]("fields" -> "", "REQ_METHOD" -> "POST", "objectType" -> "Content", "name" -> "1234", "channel" -> "test").asJava) request.setOperation(Constants.READ_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) val objectCategoryDefinition = response.getResult.getOrDefault("objectCategoryDefinition", new util.HashMap[String, AnyRef]()).asInstanceOf[util.Map[String, AnyRef]] @@ -149,7 +150,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { val request = getCategoryDefintionRequest() request.getContext.put(Constants.IDENTIFIER, "obj-cat:1234_content_all") - request.putAll(mapAsJavaMap(Map("description" -> "test desc"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc").asJava) request.setOperation(Constants.UPDATE_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) assert("successful".equals(response.getParams.getStatus)) @@ -173,7 +174,7 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { }) val request = getCategoryDefintionRequest() request.getContext.put(Constants.IDENTIFIER, "obj-cat:1234_content_all") - request.putAll(mapAsJavaMap(Map("description" -> "test desc", "categoryId" -> "obj-cat:test-1234", "channel" -> "abc"))) + request.putAll(mutable.Map[String, AnyRef]("description" -> "test desc", "categoryId" -> "obj-cat:test-1234", "channel" -> "abc").asJava) request.setOperation(Constants.UPDATE_OBJECT_CATEGORY_DEFINITION) val response = callActor(request, Props(new ObjectCategoryDefinitionActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -210,8 +211,8 @@ class ObjectCategoryDefinitionActorTest extends BaseSpec with MockFactory { put("targetObjectType", "Content") put("objectMetadata", new util.HashMap[String, AnyRef]() { { - put("config", new util.HashMap()) - put("schema", new util.HashMap()) + put("config", new util.HashMap[String, AnyRef]()) + put("schema", new util.HashMap[String, AnyRef]()) } }) } diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/TermActorTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/TermActorTest.scala index ce9a4e39e..5ac46d446 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/TermActorTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/actors/TermActorTest.scala @@ -1,7 +1,7 @@ package org.sunbird.actors import java.util -import akka.actor.Props +import org.apache.pekko.actor.Props import org.scalamock.scalatest.MockFactory import org.sunbird.common.dto.{Request, Response} import org.sunbird.common.exception.ResponseCode @@ -10,7 +10,8 @@ import org.sunbird.graph.{GraphService, OntologyEngineContext} import org.sunbird.graph.dac.model.{Node, SearchCriteria} import org.sunbird.utils.Constants -import scala.collection.JavaConversions.mapAsJavaMap +import scala.jdk.CollectionConverters._ +import scala.collection.mutable import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global @@ -46,7 +47,7 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.createRelation _).expects(*, *).returns(Future(new Response())) val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("term"-> mapAsJavaMap(Map("code"->"class1", "name"->"Class1","description"->"Class1")), "framework"->"NCF", "category"->"board"))) + request.putAll(mutable.Map[String, AnyRef]("term"-> mutable.Map[String, AnyRef]("code"->"class1", "name"->"Class1","description"->"Class1").asJava, "framework"->"NCF", "category"->"board").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert(response.get(Constants.NODE_ID).equals(new util.ArrayList[String]() { add("ncf_board_class1") })) @@ -68,7 +69,7 @@ class TermActorTest extends BaseSpec with MockFactory{ }) (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("code"->"class1", "name"->"Class1", "description"->"Class1", "framework"->"NCF", "category"->"board"))) + request.putAll(mutable.Map[String, AnyRef]("code"->"class1", "name"->"Class1", "description"->"Class1", "framework"->"NCF", "category"->"board").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -90,7 +91,7 @@ class TermActorTest extends BaseSpec with MockFactory{ }) (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" -> "board").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -112,7 +113,7 @@ class TermActorTest extends BaseSpec with MockFactory{ }) (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" ->"", "channel" -> "sunbird"))) + request.putAll(mutable.Map[String, AnyRef]("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" ->"", "channel" -> "sunbird").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -134,7 +135,7 @@ class TermActorTest extends BaseSpec with MockFactory{ }) (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("code" -> "class1", "name" -> "Class1", "description" -> "Class1", "framework" -> "", "category" -> "board").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -158,7 +159,7 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, *, *, *).returns(Future(node)).anyNumberOfTimes() val request = getTermRequest() - request.putAll(mapAsJavaMap(Map("code" -> "", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("code" -> "", "name" -> "Class1", "description" -> "Class1", "framework" -> "NCF", "category" -> "board").asJava) request.setOperation(Constants.CREATE_TERM) val response = callActor(request, Props(new TermActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -176,7 +177,7 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, "ncf_board_class1", *, *).returns(Future(termNode)) val request = getTermRequest() request.getContext.put("identifier", "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "term" -> "class1", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "term" -> "class1", "category" -> "board").asJava) request.setOperation(Constants.READ_TERM) val response = callActor(request, Props(new TermActor())) assert("successful".equals(response.getParams.getStatus)) @@ -205,7 +206,7 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, "ncf_board_class1", *, *).returns(Future(node)) val request = getTermRequest() request.getContext.put("identifier", "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "term" -> "class1", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "term" -> "class1", "category" -> "board").asJava) request.setOperation(Constants.READ_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -219,7 +220,7 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueId(_: String, _: String, _: Boolean, _: Request)).expects(*, "ncf_board", *, *).returns(Future(categoryInstanceNode)) val request = getTermRequest() request.getContext.put("identifier", "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "term" -> "", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "term" -> "", "category" -> "board").asJava) request.setOperation(Constants.READ_TERM) val response = callActor(request, Props(new TermActor())) assert("failed".equals(response.getParams.getStatus)) @@ -238,7 +239,7 @@ class TermActorTest extends BaseSpec with MockFactory{ val request = getTermRequest() request.getContext.put(Constants.IDENTIFIER, "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "term" ->"class1", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "term" ->"class1", "category" -> "board").asJava) request.setOperation(Constants.UPDATE_TERM) val response = callActor(request, Props(new TermActor())) assert("successful".equals(response.getParams.getStatus)) @@ -251,7 +252,7 @@ class TermActorTest extends BaseSpec with MockFactory{ val request = getTermRequest() request.getContext.put(Constants.IDENTIFIER, "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "identifier"->"ncf_board_class1","name" -> "Board", "description" -> "Board", "code" -> "board", "term" -> "class1", "channel" -> "sunbird", "category" -> "board"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "identifier"->"ncf_board_class1","name" -> "Board", "description" -> "Board", "code" -> "board", "term" -> "class1", "channel" -> "sunbird", "category" -> "board").asJava) request.setOperation(Constants.UPDATE_TERM) val response = callActor(request, Props(new TermActor())) assert(response.getResponseCode == ResponseCode.CLIENT_ERROR) @@ -270,8 +271,8 @@ class TermActorTest extends BaseSpec with MockFactory{ (graphDB.getNodeByUniqueIds(_: String, _: SearchCriteria)).expects(*, *).returns(Future(nodes)).anyNumberOfTimes() val request = getTermRequest() request.getContext.put("identifier", "ncf_board_class1") - request.putAll(mapAsJavaMap(Map("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "term" ->"class1" , "category" -> "board"))) - request.putAll(mapAsJavaMap(Map("identifier" -> "ncf_board_class1"))) + request.putAll(mutable.Map[String, AnyRef]("framework" -> "NCF", "name" -> "Board", "description" -> "Board", "code" -> "board", "term" ->"class1" , "category" -> "board").asJava) + request.putAll(mutable.Map[String, AnyRef]("identifier" -> "ncf_board_class1").asJava) request.setOperation(Constants.RETIRE_TERM) val response = callActor(request, Props(new TermActor())) assert("successful".equals(response.getParams.getStatus)) diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/managers/FrameworkManagerTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/managers/FrameworkManagerTest.scala index 0eb008eba..efd2b0fea 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/managers/FrameworkManagerTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/managers/FrameworkManagerTest.scala @@ -24,7 +24,7 @@ class FrameworkManagerTest extends FlatSpec with Matchers with MockFactory{ framework.put("systemDefault", "No") framework.put("objectType", "Framework") framework.put("status", "Live") - framework.put("categories", new util.ArrayList[util.Map[String, AnyRef]]()) + framework.put("categories", new util.ArrayList[util.Map[String,AnyRef]]()) framework.put("owner", "in.ekstep") framework.put("type", "K-12") @@ -34,7 +34,7 @@ class FrameworkManagerTest extends FlatSpec with Matchers with MockFactory{ val category2 = new util.HashMap[String, AnyRef]() category2.put("name", "Grade") category2.put("code", "grade") - val categories = new util.ArrayList[util.Map[String, AnyRef]]() + val categories = new util.ArrayList[util.Map[String,AnyRef]]() categories.add(category1) categories.add(category2) @@ -46,7 +46,7 @@ class FrameworkManagerTest extends FlatSpec with Matchers with MockFactory{ val term1 = new util.HashMap[String, AnyRef]() term1.put("name", "Term1") term1.put("code", "term1") - val associations1 = new util.ArrayList[util.Map[String, AnyRef]]() + val associations1 = new util.ArrayList[util.Map[String,AnyRef]]() val association1 = new util.HashMap[String, AnyRef]() association1.put("category", "Category1") associations1.add(association1) @@ -55,15 +55,15 @@ class FrameworkManagerTest extends FlatSpec with Matchers with MockFactory{ val term2 = new util.HashMap[String, AnyRef]() term2.put("name", "Term2") term2.put("code", "term2") - val associations2 = new util.ArrayList[util.Map[String, AnyRef]]() + val associations2 = new util.ArrayList[util.Map[String,AnyRef]]() val association2 = new util.HashMap[String, AnyRef]() association2.put("category", "Category2") associations2.add(association2) term2.put("associations", associations2) - category1.put("terms", new util.ArrayList[util.Map[String, AnyRef]]()) - category1.get("terms").asInstanceOf[util.List[util.Map[String, AnyRef]]].add(term1) - category1.get("terms").asInstanceOf[util.List[util.Map[String, AnyRef]]].add(term2) + category1.put("terms", new util.ArrayList[util.Map[String,AnyRef]]()) + category1.get("terms").asInstanceOf[util.List[util.Map[String,AnyRef]]].add(term1) + category1.get("terms").asInstanceOf[util.List[util.Map[String,AnyRef]]].add(term2) val returnCategories = new util.ArrayList[String]() returnCategories.add("Category1") @@ -73,9 +73,9 @@ class FrameworkManagerTest extends FlatSpec with Matchers with MockFactory{ val frameworkWithAssociationsRemoved = filterFrameworkCategories(framework, categoryNames) val filteredTerms = frameworkWithAssociationsRemoved - .getOrElse("categories", new util.ArrayList[util.Map[String, AnyRef]]()) - .asInstanceOf[util.List[util.Map[String, AnyRef]]] - .flatMap(_.getOrDefault("terms", new util.ArrayList[util.Map[String, AnyRef]]).asInstanceOf[util.List[util.Map[String, AnyRef]]]) + .getOrElse("categories", new util.ArrayList[util.Map[String,AnyRef]]()) + .asInstanceOf[util.List[util.Map[String,AnyRef]]] + .flatMap[util.Map[String,AnyRef]](_.getOrDefault("terms", new util.ArrayList[util.Map[String,AnyRef]]).asInstanceOf[util.List[util.Map[String,AnyRef]]]) assert(filteredTerms.contains(term1)) assert(filteredTerms.contains(term2)) diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/CategoryCacheTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/CategoryCacheTest.scala index d2c5a3188..2cee467a3 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/CategoryCacheTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/CategoryCacheTest.scala @@ -3,7 +3,6 @@ package org.sunbird.utils import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FlatSpec, Matchers} import org.sunbird.cache.impl.RedisCache import java.util -import scala.collection.JavaConverters._ class CategoryCacheTest extends FlatSpec with Matchers with BeforeAndAfterAll with BeforeAndAfterEach { @@ -20,9 +19,9 @@ class CategoryCacheTest extends FlatSpec with Matchers with BeforeAndAfterAll wi term1.put("name", "Term1") val term2 = new util.HashMap[String, AnyRef]() term2.put("name", "Term2") - val terms1 = List(term1, term2).asJava + val terms1 = util.Arrays.asList(term1, term2) category1.put("terms", terms1) - framework.put("categories", List(category1).asJava) + framework.put("categories", util.Arrays.asList(category1)) CategoryCache.setFramework(frameworkId, framework) val cachedTerms = RedisCache.getList("cat_framework_idCategory1") cachedTerms should contain theSameElementsAs List("Term1", "Term2") @@ -51,7 +50,7 @@ class CategoryCacheTest extends FlatSpec with Matchers with BeforeAndAfterAll wi val category1 = new util.HashMap[String, AnyRef]() category1.put("name", "Category1") category1.put("terms", new util.ArrayList[util.Map[String, AnyRef]]()) - framework.put("categories", List(category1).asJava) + framework.put("categories", util.Arrays.asList(category1)) CategoryCache.setFramework(frameworkId, framework) val cachedTerms = RedisCache.getList("cat_framework_idCategory1") cachedTerms shouldBe empty diff --git a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/FrameworkCacheTest.scala b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/FrameworkCacheTest.scala index 1dd356b21..c500c8bbb 100644 --- a/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/FrameworkCacheTest.scala +++ b/taxonomy-api/taxonomy-actors/src/test/scala/org/sunbird/utils/FrameworkCacheTest.scala @@ -4,6 +4,7 @@ import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import org.sunbird.cache.impl.RedisCache import org.sunbird.graph.util.ScalaJsonUtil import java.util +import scala.jdk.CollectionConverters._ class FrameworkCacheTest extends FlatSpec with Matchers with BeforeAndAfterAll{ @@ -43,10 +44,10 @@ class FrameworkCacheTest extends FlatSpec with Matchers with BeforeAndAfterAll{ "save" should "save framework metadata to cache" in { FrameworkCache.cacheEnabled = true - val framework = Map("identifier" -> "framework_id", "name" -> "Framework1") + val framework = Map[String,AnyRef]("identifier" -> "framework_id", "name" -> "Framework1") val categoryNames = new util.ArrayList[String]() categoryNames.add("Category1") - val identifier = framework.getOrElse("identifier", "") + val identifier = framework.getOrElse("identifier", "").asInstanceOf[String] val cacheKey = FrameworkCache.getFwCacheKey(identifier, categoryNames) RedisCache.set(cacheKey, "sample-cache-data", FrameworkCache.cacheTtl) FrameworkCache.save(framework, categoryNames) diff --git a/taxonomy-api/taxonomy-service/app/controllers/BaseController.scala b/taxonomy-api/taxonomy-service/app/controllers/BaseController.scala index 2e6cdd05f..7f282c5ad 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/BaseController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/BaseController.scala @@ -2,16 +2,17 @@ package controllers import java.util.UUID -import akka.actor.ActorRef -import akka.pattern.Patterns +import org.apache.pekko.actor.ActorRef +import org.apache.pekko.pattern.Patterns import org.sunbird.common.DateUtils import org.sunbird.common.dto.{Response, ResponseHandler} import org.sunbird.common.exception.ResponseCode import play.api.mvc._ -import utils.JavaJsonUtils +import utils.{Constants, JavaJsonUtils} + +import scala.jdk.CollectionConverters._ -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} diff --git a/taxonomy-api/taxonomy-service/app/controllers/HealthController.scala b/taxonomy-api/taxonomy-service/app/controllers/HealthController.scala index a495a9f40..b6f9f4420 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/HealthController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/HealthController.scala @@ -1,6 +1,7 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} +import controllers.BaseController import handlers.SignalHandler import javax.inject._ import play.api.mvc._ diff --git a/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryController.scala b/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryController.scala index c238d5494..93cf3f87d 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryController.scala @@ -1,12 +1,12 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import scala.concurrent.{ExecutionContext, Future} import controllers.BaseController import javax.inject.{Inject, Named, Singleton} -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId, JavaJsonUtils} diff --git a/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryInstanceController.scala b/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryInstanceController.scala index deb7e6584..0fdeaa3d9 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryInstanceController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v3/CategoryInstanceController.scala @@ -1,15 +1,14 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import controllers.BaseController import org.apache.commons.lang3.StringUtils import org.sunbird.common.exception.ClientException -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} import javax.inject.{Inject, Named} -import scala.collection.JavaConverters.asJavaIterableConverter import scala.concurrent.ExecutionContext class CategoryInstanceController @Inject()(@Named(ActorNames.CATEGORY_INSTANCE_ACTOR) categoryInstanceActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { diff --git a/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkController.scala b/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkController.scala index e86c43763..b4642545d 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkController.scala @@ -1,13 +1,15 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import scala.concurrent.{ExecutionContext, Future} +import scala.jdk.CollectionConverters._ + +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId, JavaJsonUtils} -import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext,Future} import org.sunbird.common.dto.ResponseHandler diff --git a/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkTermController.scala b/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkTermController.scala index 090eff314..2b3a248a3 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkTermController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v3/FrameworkTermController.scala @@ -1,8 +1,8 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import controllers.BaseController -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} diff --git a/taxonomy-api/taxonomy-service/app/controllers/v3/LockController.scala b/taxonomy-api/taxonomy-service/app/controllers/v3/LockController.scala index 5ae72d6e3..68a57ca0f 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v3/LockController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v3/LockController.scala @@ -1,9 +1,9 @@ package controllers.v3 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Singleton import controllers.BaseController -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} diff --git a/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryController.scala b/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryController.scala index 415f0c423..43ab6f79b 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryController.scala @@ -1,16 +1,17 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ + @Singleton class ObjectCategoryController @Inject()(@Named(ActorNames.OBJECT_CATEGORY_ACTOR) objectCategoryActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { diff --git a/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryDefinitionController.scala b/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryDefinitionController.scala index 630464ee2..58fa5ace5 100644 --- a/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryDefinitionController.scala +++ b/taxonomy-api/taxonomy-service/app/controllers/v4/ObjectCategoryDefinitionController.scala @@ -1,17 +1,18 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import controllers.BaseController import javax.inject.{Inject, Named} import org.apache.commons.lang3.StringUtils import org.sunbird.common.exception.ClientException -import org.sunbird.utils.Constants +import utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ + class ObjectCategoryDefinitionController @Inject()(@Named(ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR) objCategoryDefinitionActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { val OBJECT_TYPE = "ObjectCategoryDefinition" diff --git a/taxonomy-api/taxonomy-service/app/filters/AccessLogFilter.scala b/taxonomy-api/taxonomy-service/app/filters/AccessLogFilter.scala index aad9f8419..3b968119a 100644 --- a/taxonomy-api/taxonomy-service/app/filters/AccessLogFilter.scala +++ b/taxonomy-api/taxonomy-service/app/filters/AccessLogFilter.scala @@ -1,6 +1,6 @@ package filters -import akka.util.ByteString +import org.apache.pekko.util.ByteString import javax.inject.Inject import org.sunbird.telemetry.util.TelemetryAccessEventUtil import play.api.Logging @@ -8,7 +8,8 @@ import play.api.libs.streams.Accumulator import play.api.mvc._ import scala.concurrent.ExecutionContext -import scala.collection.JavaConverters._ + +import scala.jdk.CollectionConverters._ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends EssentialFilter with Logging { @@ -27,7 +28,7 @@ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends Essentia val path = requestHeader.uri if(!path.contains("/health")){ - val headers = requestHeader.headers.headers.groupBy(_._1).mapValues(_.map(_._2)) + val headers = requestHeader.headers.headers.groupBy(_._1).view.mapValues(_.map(_._2)).toMap val appHeaders = headers.filter(header => xHeaderNames.keySet.contains(header._1.toLowerCase)) .map(entry => (xHeaderNames.get(entry._1.toLowerCase()).get, entry._2.head)) val otherDetails = Map[String, Any]("StartTime" -> startTime, "env" -> "assessment", diff --git a/taxonomy-api/taxonomy-service/app/handlers/SignalHandler.scala b/taxonomy-api/taxonomy-service/app/handlers/SignalHandler.scala index 4cad301c1..3945a5e32 100644 --- a/taxonomy-api/taxonomy-service/app/handlers/SignalHandler.scala +++ b/taxonomy-api/taxonomy-service/app/handlers/SignalHandler.scala @@ -2,13 +2,14 @@ package handlers import java.util.concurrent.TimeUnit -import akka.actor.ActorSystem +import org.apache.pekko.actor.ActorSystem import javax.inject.{Inject, Singleton} import org.slf4j.LoggerFactory import play.api.inject.DefaultApplicationLifecycle import sun.misc.Signal import scala.concurrent.duration.Duration +import scala.concurrent.ExecutionContext @Singleton class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: DefaultApplicationLifecycle) { @@ -22,10 +23,10 @@ class SignalHandler @Inject()(implicit actorSystem: ActorSystem, lifecycle: Defa // $COVERAGE-OFF$ Disabling scoverage as this code is impossible to test isShuttingDown = true println("Termination required, swallowing SIGTERM to allow current requests to finish. : " + System.currentTimeMillis()) - actorSystem.scheduler.scheduleOnce(STOP_DELAY)(() => { + actorSystem.scheduler.scheduleOnce(STOP_DELAY) { println("ApplicationLifecycle stop triggered... : " + System.currentTimeMillis()) lifecycle.stop() - })(actorSystem.dispatcher) + }(ExecutionContext.global) // $COVERAGE-ON } }) diff --git a/taxonomy-api/taxonomy-service/app/modules/TaxonomyModule.scala b/taxonomy-api/taxonomy-service/app/modules/TaxonomyModule.scala index 8f9271efa..400ec2a53 100644 --- a/taxonomy-api/taxonomy-service/app/modules/TaxonomyModule.scala +++ b/taxonomy-api/taxonomy-service/app/modules/TaxonomyModule.scala @@ -2,21 +2,21 @@ package modules import com.google.inject.AbstractModule import org.sunbird.actors.{CategoryActor, CategoryInstanceActor, FrameworkActor, HealthActor, LockActor, ObjectCategoryActor, ObjectCategoryDefinitionActor, TermActor} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames -class TaxonomyModule extends AbstractModule with AkkaGuiceSupport { +class TaxonomyModule extends AbstractModule with PekkoGuiceSupport { override def configure() = { super.configure() - bindActor(classOf[HealthActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[ObjectCategoryActor], ActorNames.OBJECT_CATEGORY_ACTOR) - bindActor(classOf[ObjectCategoryDefinitionActor], ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR) - bindActor(classOf[FrameworkActor], ActorNames.FRAMEWORK_ACTOR) - bindActor(classOf[CategoryActor], ActorNames.CATEGORY_ACTOR) - bindActor(classOf[CategoryInstanceActor], ActorNames.CATEGORY_INSTANCE_ACTOR) - bindActor(classOf[TermActor], ActorNames.TERM_ACTOR) - bindActor(classOf[LockActor], ActorNames.LOCK_ACTOR) + bindActor[HealthActor](ActorNames.HEALTH_ACTOR) + bindActor[ObjectCategoryActor](ActorNames.OBJECT_CATEGORY_ACTOR) + bindActor[ObjectCategoryDefinitionActor](ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR) + bindActor[FrameworkActor](ActorNames.FRAMEWORK_ACTOR) + bindActor[CategoryActor](ActorNames.CATEGORY_ACTOR) + bindActor[CategoryInstanceActor](ActorNames.CATEGORY_INSTANCE_ACTOR) + bindActor[TermActor](ActorNames.TERM_ACTOR) + bindActor[LockActor](ActorNames.LOCK_ACTOR) println("Initialized application actors for taxonomy service") } } diff --git a/taxonomy-api/taxonomy-service/app/utils/Constants.scala b/taxonomy-api/taxonomy-service/app/utils/Constants.scala new file mode 100644 index 000000000..4076687a3 --- /dev/null +++ b/taxonomy-api/taxonomy-service/app/utils/Constants.scala @@ -0,0 +1,83 @@ +package utils + +object Constants { + val CREATE_OBJECT_CATEGORY: String = "createObjectCategory" + val READ_OBJECT_CATEGORY: String = "readObjectCategory" + val UPDATE_OBJECT_CATEGORY: String = "updateObjectCategory" + val IDENTIFIER: String = "identifier" + val NAME: String = "name" + val NODE_ID: String = "node_id" + val CATEGORY_PREFIX: String = "obj-cat:" + val VERSION: String = "version" + val SCHEMA_NAME: String = "schemaName" + val OBJECT_CATEGORY: String = "objectCategory" + val FIELDS: String = "fields" + val OBJECT_CATEGORY_SCHEMA_NAME="objectcategory" + val OBJECT_CATEGORY_SCHEMA_VERSION="1.0" + + // Actor Operations For ObjectCategoryDefinition + val CREATE_OBJECT_CATEGORY_DEFINITION: String = "createObjectCategoryDefinition" + val READ_OBJECT_CATEGORY_DEFINITION: String = "readObjectCategoryDefinition" + val UPDATE_OBJECT_CATEGORY_DEFINITION: String = "updateObjectCategoryDefinition" + // Constants For ObjectCategoryDefinition + val CATEGORY_ID = "categoryId" + val CHANNEL = "channel"; + val TARGET_OBJECT_TYPE = "targetObjectType" + val OBJECT_CATEGORY_DEFINITION: String = "objectCategoryDefinition" + + // Constants For Framework + val FRAMEWORK: String = "framework"; + val CREATE_FRAMEWORK: String = "createFramework"; + val READ_FRAMEWORK: String = "readFramework" + val UPDATE_FRAMEWORK: String = "updateFramework"; + val RETIRE_FRAMEWORK: String = "retireFramework"; + val COPY_FRAMEWORK: String = "copyFramework"; + val PUBLISH_FRAMEWORK: String = "publishFramework"; + val FRAMEWORK_SCHEMA_VERSION = "1.0"; + val FRAMEWORK_SCHEMA_NAME="framework" + val CATEGORIES: String = "categories" + val CHANNEL_SCHEMA_NAME="channel" + val CHANNEL_SCHEMA_VERSION = "1.0"; + val PUBLISH_STATUS: String = "publishStatus" + + val CATEGORY: String = "category"; + val CREATE_CATEGORY: String = "createCategory"; + val READ_CATEGORY: String = "readCategory" + val UPDATE_CATEGORY: String = "updateCategory" + val RETIRE_CATEGORY: String = "retireCategory" + val CATEGORY_SCHEMA_VERSION = "1.0"; + val CATEGORY_SCHEMA_NAME = "category" + val CODE: String = "code"; + + val CREATE_CATEGORY_INSTANCE: String = "createCategoryInstance"; + val READ_CATEGORY_INSTANCE: String = "readCategoryInstance" + val UPDATE_CATEGORY_INSTANCE: String = "updateCategoryInstance" + val RETIRE_CATEGORY_INSTANCE: String = "retireCategoryInstance" + val CATEGORY_INSTANCE_SCHEMA_VERSION: String = "1.0"; + val CATEGORY_INSTANCE_SCHEMA_NAME: String = "CategoryInstance" + val VERSION_KEY: String = "versionKey" + + val TERM: String = "term"; + val CREATE_TERM: String = "createTerm"; + val READ_TERM: String = "readTerm" + val UPDATE_TERM: String = "updateTerm" + val RETIRE_TERM: String = "retireTerm" + val TERM_SCHEMA_VERSION: String = "1.0"; + val TERM_SCHEMA_NAME: String = "term" + + // Constants For Lock + val LOCK: String = "lock"; + val CREATE_LOCK: String = "createLock"; + val REFRESH_LOCK: String = "refreshLock"; + val RETIRE_LOCK: String = "retireLock"; + val LIST_LOCK: String = "listLock"; + val LOCK_SCHEMA_VERSION = "1.0"; + val LOCK_SCHEMA_NAME="lock" + val X_DEVICE_ID="deviceId" + val X_AUTHENTICATED_USER_ID="userId" + val RESOURCE_ID="resourceId" + val RESOURCE_TYPE= "resourceType" + val RESOURCE_INFO= "resourceInfo" + val CREATOR_INFO= "creatorInfo" + val CREATED_BY= "createdBy" +} \ No newline at end of file diff --git a/taxonomy-api/taxonomy-service/conf/application.conf b/taxonomy-api/taxonomy-service/conf/application.conf index 955d5c53d..661eeb51c 100644 --- a/taxonomy-api/taxonomy-service/conf/application.conf +++ b/taxonomy-api/taxonomy-service/conf/application.conf @@ -22,13 +22,13 @@ # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -278,13 +278,13 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path="../../schemas/" # Graph Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/taxonomy-api/taxonomy-service/conf/logback.xml b/taxonomy-api/taxonomy-service/conf/logback.xml index 73529d622..ce441915b 100644 --- a/taxonomy-api/taxonomy-service/conf/logback.xml +++ b/taxonomy-api/taxonomy-service/conf/logback.xml @@ -4,10 +4,11 @@ - + + - %d %msg%n + %d{HH:mm:ss.SSS} %coloredLevel %logger{36} - %msg%n @@ -15,12 +16,17 @@ - + + + + + + diff --git a/taxonomy-api/taxonomy-service/conf/routes b/taxonomy-api/taxonomy-service/conf/routes index afbc8b8f0..2976b113e 100644 --- a/taxonomy-api/taxonomy-service/conf/routes +++ b/taxonomy-api/taxonomy-service/conf/routes @@ -1,19 +1,19 @@ # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ -GET /health controllers.HealthController.health +GET /health controllers.HealthController.health() # Framework API's -POST /framework/v3/create controllers.v3.FrameworkController.createFramework +POST /framework/v3/create controllers.v3.FrameworkController.createFramework() GET /framework/v3/read/:identifier controllers.v3.FrameworkController.readFramework(identifier:String, fields: Option[String],categories: Option[String]) PATCH /framework/v3/update/:identifier controllers.v3.FrameworkController.updateFramework(identifier:String) -POST /framework/v3/list controllers.v3.FrameworkController.listFramework +POST /framework/v3/list controllers.v3.FrameworkController.listFramework() DELETE /framework/v3/retire/:identifier controllers.v3.FrameworkController.retire(identifier:String) POST /framework/v3/copy/:identifier controllers.v3.FrameworkController.copyFramework(identifier:String) POST /framework/v3/publish/:identifier controllers.v3.FrameworkController.publish(identifier:String) # Category API's -POST /framework/v3/category/master/create controllers.v3.CategoryController.createCategory +POST /framework/v3/category/master/create controllers.v3.CategoryController.createCategory() GET /framework/v3/category/master/read/:identifier controllers.v3.CategoryController.readCategory(identifier:String) PATCH /framework/v3/category/master/update/:identifier controllers.v3.CategoryController.updateCategory(identifier:String) DELETE /framework/v3/category/master/retire/:identifier controllers.v3.CategoryController.retireCategory(identifier:String) @@ -31,18 +31,18 @@ PATCH /framework/v3/term/update/:termId controllers.v3.Framewor DELETE /framework/v3/term/retire/:termId controllers.v3.FrameworkTermController.retireFrameworkTerm(termId: String, framework: String, category: String) # Object Category API's -POST /object/category/v4/create controllers.v4.ObjectCategoryController.create +POST /object/category/v4/create controllers.v4.ObjectCategoryController.create() PATCH /object/category/v4/update/:identifier controllers.v4.ObjectCategoryController.update(identifier:String) GET /object/category/v4/read/:identifier controllers.v4.ObjectCategoryController.read(identifier:String, fields:Option[String]) # ObjectCategoryDefinition API's -POST /object/category/definition/v4/create controllers.v4.ObjectCategoryDefinitionController.create +POST /object/category/definition/v4/create controllers.v4.ObjectCategoryDefinitionController.create() GET /object/category/definition/v4/read/:identifier controllers.v4.ObjectCategoryDefinitionController.read(identifier:String, fields:Option[String]) PATCH /object/category/definition/v4/update/:identifier controllers.v4.ObjectCategoryDefinitionController.update(identifier:String) POST /object/category/definition/v4/read controllers.v4.ObjectCategoryDefinitionController.readCategoryDefinition(fields:Option[String]) # Lock API's -POST /v1/lock/create controllers.v3.LockController.createLock -PATCH /v1/lock/refresh controllers.v3.LockController.refreshLock -DELETE /v1/lock/retire controllers.v3.LockController.retireLock -POST /v1/lock/list controllers.v3.LockController.listLock \ No newline at end of file +POST /v1/lock/create controllers.v3.LockController.createLock() +PATCH /v1/lock/refresh controllers.v3.LockController.refreshLock() +DELETE /v1/lock/retire controllers.v3.LockController.retireLock() +POST /v1/lock/list controllers.v3.LockController.listLock() \ No newline at end of file diff --git a/taxonomy-api/taxonomy-service/pom.xml b/taxonomy-api/taxonomy-service/pom.xml index 5e495b8da..fa1e786e9 100644 --- a/taxonomy-api/taxonomy-service/pom.xml +++ b/taxonomy-api/taxonomy-service/pom.xml @@ -31,14 +31,83 @@ - 2.8.20 + 3.0.5 1.0.0-rc5 1.0.0 + 4.1.112.Final + + + + + io.netty + netty-bom + ${netty.version} + pom + import + + + io.netty + netty-common + ${netty.version} + + + io.netty + netty-buffer + ${netty.version} + + + io.netty + netty-transport + ${netty.version} + + + io.netty + netty-handler + ${netty.version} + + + io.netty + netty-codec + ${netty.version} + + + io.netty + netty-codec-http + ${netty.version} + + + io.netty + netty-codec-http2 + ${netty.version} + + + io.netty + netty-resolver + ${netty.version} + + + io.netty + netty-transport-native-epoll + ${netty.version} + + + io.netty + netty-transport-native-unix-common + ${netty.version} + + + io.netty + netty-transport-classes-epoll + ${netty.version} + + + + - com.typesafe.play + org.playframework play_${scala.major.version} ${play2.version} @@ -65,7 +134,7 @@ - com.typesafe.play + org.playframework play-guice_${scala.major.version} ${play2.version} @@ -84,33 +153,23 @@ - com.typesafe.play - filters-helpers_${scala.major.version} + org.playframework + play-filters-helpers_${scala.major.version} ${play2.version} - com.typesafe.play + org.playframework play-logback_${scala.major.version} ${play2.version} runtime - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - ch.qos.logback - logback-classic - 1.2.13 + org.slf4j + slf4j-api + 2.0.16 - com.typesafe.play + org.playframework play-netty-server_${scala.major.version} ${play2.version} runtime @@ -124,7 +183,31 @@ io.netty netty-handler - 4.1.118.Final + + + + io.netty + netty-codec + + + io.netty + netty-codec-http + + + io.netty + netty-common + + + io.netty + netty-buffer + + + io.netty + netty-transport + + + io.netty + netty-resolver com.fasterxml.jackson.dataformat @@ -175,7 +258,7 @@ test - com.typesafe.play + org.playframework play-specs2_${scala.maj.version} ${play2.version} test diff --git a/taxonomy-api/taxonomy-service/test/controllers/base/BaseSpec.scala b/taxonomy-api/taxonomy-service/test/controllers/base/BaseSpec.scala index 443b98a17..d328df022 100644 --- a/taxonomy-api/taxonomy-service/test/controllers/base/BaseSpec.scala +++ b/taxonomy-api/taxonomy-service/test/controllers/base/BaseSpec.scala @@ -12,11 +12,11 @@ import play.api.test.{FakeHeaders, FakeRequest} import scala.concurrent.Future class BaseSpec extends Specification { - implicit val app = new GuiceApplicationBuilder() + implicit val app: play.api.Application = new GuiceApplicationBuilder() .disable(classOf[modules.TaxonomyModule]) .bindings(new TestModule) - .build - implicit val config = ConfigFactory.load() + .build() + implicit val config: com.typesafe.config.Config = ConfigFactory.load() def post(apiURL: String, request: String, h: FakeHeaders = FakeHeaders(Seq())) : Future[Result] = { @@ -24,13 +24,13 @@ class BaseSpec extends Specification { route(app, FakeRequest(POST, apiURL, headers, Json.toJson(Json.parse(request)))).get } - def isOK(response: Future[Result]) { + def isOK(response: Future[Result]): Unit = { status(response) must equalTo(OK) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""status":"successful"""") } - def hasClientError(response: Future[Result]) { + def hasClientError(response: Future[Result]): Unit = { status(response) must equalTo(BAD_REQUEST) contentType(response) must beSome.which(_ == "application/json") contentAsString(response) must contain(""""err":"CLIENT_ERROR","status":"failed"""") diff --git a/taxonomy-api/taxonomy-service/test/modules/TestModule.scala b/taxonomy-api/taxonomy-service/test/modules/TestModule.scala index b6451ed49..bf12bac3f 100644 --- a/taxonomy-api/taxonomy-service/test/modules/TestModule.scala +++ b/taxonomy-api/taxonomy-service/test/modules/TestModule.scala @@ -1,24 +1,25 @@ package modules import com.google.inject.AbstractModule +import org.apache.pekko.actor.Props import org.sunbird.actor.core.BaseActor import org.sunbird.actors.ObjectCategoryActor import org.sunbird.common.dto.{Request, Response, ResponseHandler} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames import scala.concurrent.{ExecutionContext, Future} -class TestModule extends AbstractModule with AkkaGuiceSupport { +class TestModule extends AbstractModule with PekkoGuiceSupport { override def configure(): Unit = { - bindActor(classOf[TestActor], ActorNames.HEALTH_ACTOR) - bindActor(classOf[TestActor], ActorNames.OBJECT_CATEGORY_ACTOR) - bindActor(classOf[TestActor], ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR) - bindActor(classOf[TestActor], ActorNames.FRAMEWORK_ACTOR) - bindActor(classOf[TestActor], ActorNames.CATEGORY_ACTOR) - bindActor(classOf[TestActor], ActorNames.CATEGORY_INSTANCE_ACTOR) - bindActor(classOf[TestActor], ActorNames.TERM_ACTOR) - bindActor(classOf[TestActor], ActorNames.LOCK_ACTOR) + bindActor(ActorNames.HEALTH_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.OBJECT_CATEGORY_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.FRAMEWORK_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.CATEGORY_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.CATEGORY_INSTANCE_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.TERM_ACTOR, _ => Props(classOf[TestActor])) + bindActor(ActorNames.LOCK_ACTOR, _ => Props(classOf[TestActor])) println("Test Module is initialized...") } } diff --git a/taxonomy-service-sbt/app/controllers/BaseController.scala b/taxonomy-service-sbt/app/controllers/BaseController.scala index 07271a486..a602421da 100644 --- a/taxonomy-service-sbt/app/controllers/BaseController.scala +++ b/taxonomy-service-sbt/app/controllers/BaseController.scala @@ -2,8 +2,8 @@ package controllers import java.util.UUID -import akka.actor.ActorRef -import akka.pattern.Patterns +import org.apache.pekko.actor.ActorRef +import org.apache.pekko.pattern.Patterns import org.sunbird.common.DateUtils import org.sunbird.common.dto.{Response, ResponseHandler} import org.sunbird.common.exception.ResponseCode @@ -11,7 +11,6 @@ import play.api.mvc._ import utils.JavaJsonUtils -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} diff --git a/taxonomy-service-sbt/app/controllers/HealthController.scala b/taxonomy-service-sbt/app/controllers/HealthController.scala index 2c6749650..f0a2175bb 100644 --- a/taxonomy-service-sbt/app/controllers/HealthController.scala +++ b/taxonomy-service-sbt/app/controllers/HealthController.scala @@ -1,6 +1,6 @@ package controllers -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import javax.inject._ import play.api.mvc._ diff --git a/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryController.scala b/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryController.scala index 415f0c423..f09fdfd1f 100644 --- a/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryController.scala +++ b/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryController.scala @@ -1,6 +1,6 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import com.google.inject.Singleton import controllers.BaseController import javax.inject.{Inject, Named} @@ -8,7 +8,6 @@ import org.sunbird.utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext @Singleton diff --git a/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryDefinitionController.scala b/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryDefinitionController.scala index 630464ee2..0013b7c74 100644 --- a/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryDefinitionController.scala +++ b/taxonomy-service-sbt/app/controllers/v4/ObjectCategoryDefinitionController.scala @@ -1,6 +1,6 @@ package controllers.v4 -import akka.actor.{ActorRef, ActorSystem} +import org.apache.pekko.actor.{ActorRef, ActorSystem} import controllers.BaseController import javax.inject.{Inject, Named} import org.apache.commons.lang3.StringUtils @@ -9,7 +9,6 @@ import org.sunbird.utils.Constants import play.api.mvc.ControllerComponents import utils.{ActorNames, ApiId} -import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext class ObjectCategoryDefinitionController @Inject()(@Named(ActorNames.OBJECT_CATEGORY_DEFINITION_ACTOR) objCategoryDefinitionActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { diff --git a/taxonomy-service-sbt/app/filters/AccessLogFilter.scala b/taxonomy-service-sbt/app/filters/AccessLogFilter.scala index aad9f8419..92500bf9d 100644 --- a/taxonomy-service-sbt/app/filters/AccessLogFilter.scala +++ b/taxonomy-service-sbt/app/filters/AccessLogFilter.scala @@ -1,6 +1,6 @@ package filters -import akka.util.ByteString +import org.apache.pekko.util.ByteString import javax.inject.Inject import org.sunbird.telemetry.util.TelemetryAccessEventUtil import play.api.Logging @@ -8,7 +8,6 @@ import play.api.libs.streams.Accumulator import play.api.mvc._ import scala.concurrent.ExecutionContext -import scala.collection.JavaConverters._ class AccessLogFilter @Inject() (implicit ec: ExecutionContext) extends EssentialFilter with Logging { diff --git a/taxonomy-service-sbt/app/handlers/SignalHandler.scala b/taxonomy-service-sbt/app/handlers/SignalHandler.scala index 4cad301c1..db6ff3b9d 100644 --- a/taxonomy-service-sbt/app/handlers/SignalHandler.scala +++ b/taxonomy-service-sbt/app/handlers/SignalHandler.scala @@ -2,7 +2,7 @@ package handlers import java.util.concurrent.TimeUnit -import akka.actor.ActorSystem +import org.apache.pekko.actor.ActorSystem import javax.inject.{Inject, Singleton} import org.slf4j.LoggerFactory import play.api.inject.DefaultApplicationLifecycle diff --git a/taxonomy-service-sbt/app/modules/TaxonomyModule.scala b/taxonomy-service-sbt/app/modules/TaxonomyModule.scala index 25f2cd660..a1479cd39 100644 --- a/taxonomy-service-sbt/app/modules/TaxonomyModule.scala +++ b/taxonomy-service-sbt/app/modules/TaxonomyModule.scala @@ -2,10 +2,10 @@ package modules import com.google.inject.AbstractModule import org.sunbird.actors.{HealthActor, ObjectCategoryActor, ObjectCategoryDefinitionActor} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames -class TaxonomyModule extends AbstractModule with AkkaGuiceSupport { +class TaxonomyModule extends AbstractModule with PekkoGuiceSupport { override def configure() = { super.configure() diff --git a/taxonomy-service-sbt/build.sbt b/taxonomy-service-sbt/build.sbt index 57d79a760..a12a92080 100644 --- a/taxonomy-service-sbt/build.sbt +++ b/taxonomy-service-sbt/build.sbt @@ -7,7 +7,7 @@ lazy val root = (project in file(".")) .settings( name := "taxonomy-service-sbt", version := "1.0-SNAPSHOT", - scalaVersion := "2.12.8", + scalaVersion := "2.13.12", javacOptions ++= Seq("-source", "11", "-target", "11"), libraryDependencies ++= Seq( guice, @@ -16,15 +16,15 @@ lazy val root = (project in file(".")) "org.sunbird" % "taxonomy-actors" % "1.0-SNAPSHOT", "io.lemonlabs" %% "scala-uri" % "1.4.10", "net.codingwell" %% "scala-guice" % "4.2.5", - "com.typesafe.play" %% "play-specs2" % "2.7.9", - "org.scalatestplus.play" %% "scalatestplus-play" % "4.0.3" % Test + "org.playframework" %% "play-specs2" % "3.0.5", + "org.scalatestplus.play" %% "scalatestplus-play" % "7.0.1" % Test ) ) .settings( libraryDependencies += ("org.sunbird" % "taxonomy-actors" % "1.0-SNAPSHOT") - .exclude("com.typesafe.akka","akka-actor_2.11") - .exclude("org.scala-lang.modules","scala-java8-compat_2.11") - .exclude("org.scala-lang.modules","scala-parser-combinators_2.11") - .exclude("com.typesafe.akka","akka-slf4j_2.11") + .exclude("com.typesafe.akka","akka-actor_2.13") + .exclude("org.scala-lang.modules","scala-java8-compat_2.13") + .exclude("org.scala-lang.modules","scala-parser-combinators_2.13") + .exclude("com.typesafe.akka","akka-slf4j_2.13") ) resolvers += "Local Maven Repository" at "file:///"+Path.userHome+"/.m2/repository" diff --git a/taxonomy-service-sbt/conf/application.conf b/taxonomy-service-sbt/conf/application.conf index b67ddee9a..d930f9535 100644 --- a/taxonomy-service-sbt/conf/application.conf +++ b/taxonomy-service-sbt/conf/application.conf @@ -35,13 +35,13 @@ repository.dispatcher { # Play uses Akka internally and exposes Akka Streams and actors in Websockets and # other streaming HTTP responses. akka { - # "akka.log-config-on-start" is extraordinarly useful because it log the complete + # "pekko.log-config-on-start" is extraordinarly useful because it log the complete # configuration at INFO level, including defaults and overrides, so it s worth # putting at the very top. # # Put the following in your conf/logback.xml file: # - # + # # # And then uncomment this line to debug the configuration. # @@ -291,13 +291,13 @@ play.filters { } play.http.parser.maxMemoryBuffer = 50MB -akka.http.parsing.max-content-length = 50MB +pekko.http.parsing.max-content-length = 50MB schema.base_path = "../../schemas/" # Graph Configuration graph.dir=/data/testingGraphDB -akka.request_timeout=30 +pekko.request_timeout=30 environment.id=10000000 graph.ids=["domain"] graph.passport.key.base=31b6fd1c4d64e745c867e61a45edc34a diff --git a/taxonomy-service-sbt/conf/routes b/taxonomy-service-sbt/conf/routes index 4a1c21232..4f553f3c9 100644 --- a/taxonomy-service-sbt/conf/routes +++ b/taxonomy-service-sbt/conf/routes @@ -1,13 +1,13 @@ # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ -GET /health controllers.HealthController.health +GET /health controllers.HealthController.health() # Framework API's -POST /framework/v3/create controllers.v3.FrameworkController.createFramework +POST /framework/v3/create controllers.v3.FrameworkController.createFramework() GET /framework/v3/read/:identifier controllers.v3.FrameworkController.readFramework(identifier:String, categories: Option[String]) PATCH /framework/v3/update/:identifier controllers.v3.FrameworkController.updateFramework(identifier:String) -POST /framework/v3/list controllers.v3.FrameworkController.listFramework +POST /framework/v3/list controllers.v3.FrameworkController.listFramework() DELETE /framework/v3/retire/:identifier controllers.v3.FrameworkController.retire(identifier:String) POST /framework/v3/copy/:identifier controllers.v3.FrameworkController.copyFramework(identifier:String) POST /framework/v3/publish/:identifier controllers.v3.FrameworkController.publish(identifier:String) @@ -19,12 +19,12 @@ POST /framework/v3/publish/:identifier controllers.v3.Framewo # Object Category API's -POST /object/category/v4/create controllers.v4.ObjectCategoryController.create +POST /object/category/v4/create controllers.v4.ObjectCategoryController.create() PATCH /object/category/v4/update/:identifier controllers.v4.ObjectCategoryController.update(identifier:String) GET /object/category/v4/read/:identifier controllers.v4.ObjectCategoryController.read(identifier:String, fields:Option[String]) # ObjectCategoryDefinition API's -POST /object/category/definition/v4/create controllers.v4.ObjectCategoryDefinitionController.create +POST /object/category/definition/v4/create controllers.v4.ObjectCategoryDefinitionController.create() GET /object/category/definition/v4/read/:identifier controllers.v4.ObjectCategoryDefinitionController.read(identifier:String, fields:Option[String]) PATCH /object/category/definition/v4/update/:identifier controllers.v4.ObjectCategoryDefinitionController.update(identifier:String) POST /object/category/definition/v4/read controllers.v4.ObjectCategoryDefinitionController.readCategoryDefinition(fields:Option[String]) \ No newline at end of file diff --git a/taxonomy-service-sbt/project/plugins.sbt b/taxonomy-service-sbt/project/plugins.sbt index a027c82c9..8eb6db335 100644 --- a/taxonomy-service-sbt/project/plugins.sbt +++ b/taxonomy-service-sbt/project/plugins.sbt @@ -1,5 +1,5 @@ // The Play plugin -addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.7.9") +addSbtPlugin("org.playframework" % "sbt-plugin" % "3.0.5") // sbt-paradox, used for documentation addSbtPlugin("com.lightbend.paradox" % "sbt-paradox" % "0.4.4") diff --git a/taxonomy-service-sbt/test/modules/TestModule.scala b/taxonomy-service-sbt/test/modules/TestModule.scala index 71a40937b..719f00b74 100644 --- a/taxonomy-service-sbt/test/modules/TestModule.scala +++ b/taxonomy-service-sbt/test/modules/TestModule.scala @@ -4,12 +4,12 @@ import com.google.inject.AbstractModule import org.sunbird.actor.core.BaseActor import org.sunbird.actors.ObjectCategoryActor import org.sunbird.common.dto.{Request, Response, ResponseHandler} -import play.libs.akka.AkkaGuiceSupport +import play.api.libs.concurrent.PekkoGuiceSupport import utils.ActorNames import scala.concurrent.{ExecutionContext, Future} -class TestModule extends AbstractModule with AkkaGuiceSupport { +class TestModule extends AbstractModule with PekkoGuiceSupport { override def configure(): Unit = { bindActor(classOf[TestActor], ActorNames.HEALTH_ACTOR) bindActor(classOf[TestActor], ActorNames.OBJECT_CATEGORY_ACTOR)