Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
cjllanwarne committed Apr 16, 2021
2 parents dd4a21d + 64592db commit 14bb3ba
Show file tree
Hide file tree
Showing 41 changed files with 989 additions and 245 deletions.
67 changes: 67 additions & 0 deletions .github/workflows/combine_scalasteward_prs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: combine-scalasteward-prs

# With a thousand thanks to Qi who showed us this in Leonardo: https://github.com/DataBiosphere/leonardo/pull/1975
# Allows manually triggering of workflow on a selected branch via the GitHub Actions tab.
# GitHub blog demo: https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/.

on:
workflow_dispatch:
inputs:
prLimit:
description: 'How many scala-steward PRs to consolidate'
required: true
default: 1000

jobs:
build:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- name: Merge scala-steward PRs
id: msp
run: |
export GITHUB_TOKEN=${{ secrets.BROADBOT_GITHUB_TOKEN }}
git config user.name "broadbot"
echo "Bringing in PRs:"
gh pr list --limit 1000 | grep 'scala-steward:' | tac | head -n${{ github.event.inputs.prLimit }}
PR_LIST=$(gh pr list --limit 1000 | grep 'scala-steward:' | cut -d$'\t' -f 1 | tac | head -n${{ github.event.inputs.prLimit }})
BRANCH="consolidated-scala-steward-prs-$(date +'%Y-%m-%d_%H-%M')"
SUCCESSFUL_PRS=()
UNSUCCESSFUL_PRS=()
git fetch
git checkout develop
git checkout -B $BRANCH
while IFS= read -r pr
do
echo "Bringing in: $pr"
git fetch origin pull/${pr}/head:pr_${pr}
git checkout $BRANCH
git merge -m "merging PR #${pr}" pr_${pr} --allow-unrelated-histories && EXIT_CODE=$? || EXIT_CODE=$?
if [ "${EXIT_CODE}" == "0" ]
then
SUCCESSFUL_PRS+=( "
* #$pr" )
else
echo "Unexpected exit code: ${EXIT_CODE}"
git reset --hard HEAD
UNSUCCESSFUL_PRS+=( "
* #$pr" )
fi
done <<< "$PR_LIST"
git checkout $BRANCH
git push origin $BRANCH
gh pr create \
--title '[!! NEEDS Jira ID !!] Scala-steward shepherding consolidation' \
--body "This PR was generated automatically by the github action: https://github.com/broadinstitute/cromwell/actions/workflows/combine_scalasteward_prs.yml
Consolidates scala-steward PRs ${SUCCESSFUL_PRS[*]}.
Note that the following PRs were not able to be consolidated: ${UNSUCCESSFUL_PRS[*]}"
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Cromwell Change Log

## 61 Release Notes

### No labels update for Archived workflows

If **- and ONLY if -** you have metadata archiving turned on, then for a workflow whose metadata has been archived by Cromwell
according to the lifecycle policy, Cromwell will no longer add new labels or update existing labels for this workflow
coming through PATCH `/labels` endpoint.

## 60 Release Notes

### Java 11
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -444,6 +444,7 @@ trait StandardAsyncExecutionActor
|$out="$${tmpDir}/out.$$$$" $err="$${tmpDir}/err.$$$$"
|mkfifo "$$$out" "$$$err"
|trap 'rm "$$$out" "$$$err"' EXIT
|touch $stdoutRedirection $stderrRedirection
|tee $stdoutRedirection < "$$$out" &
|tee $stderrRedirection < "$$$err" >&2 &
|(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,20 @@ task large_dockerhub_image {
String docker_image
}
command {
which jq > /dev/null || (apt-get update > /dev/null && apt-get install -y jq > /dev/null)
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
which jq || (apt-get update && apt-get install -y jq)
NAME=`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/name`
ZONE=`basename \`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/zone\``
PROJECT=`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/project/project-id`
curl -s -H "Authorization: Bearer `gcloud auth print-access-token`" "https://www.googleapis.com/compute/v1/projects/$PROJECT/zones/$ZONE/disks/$NAME?fields=sizeGb" | jq -r '.sizeGb'
curl -s -H "Authorization: Bearer `gcloud auth print-access-token`" "https://www.googleapis.com/compute/v1/projects/$PROJECT/zones/$ZONE/disks/$NAME?fields=sizeGb" | jq -r '.sizeGb' > disksize.txt
}

runtime {
docker: docker_image
}

output {
Int bootDiskSize = read_int(stdout())
Int bootDiskSize = read_int("disksize.txt")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,20 @@ task large_gcr_image {
String docker_image
}
command {
which jq > /dev/null || (apt-get update > /dev/null && apt-get install -y jq > /dev/null)
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
which jq || (apt-get update && apt-get install -y jq)
NAME=`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/name`
ZONE=`basename \`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/zone\``
PROJECT=`curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/project/project-id`
curl -s -H "Authorization: Bearer `gcloud auth print-access-token`" "https://www.googleapis.com/compute/v1/projects/$PROJECT/zones/$ZONE/disks/$NAME?fields=sizeGb" | jq -r '.sizeGb'
curl -s -H "Authorization: Bearer `gcloud auth print-access-token`" "https://www.googleapis.com/compute/v1/projects/$PROJECT/zones/$ZONE/disks/$NAME?fields=sizeGb" | jq -r '.sizeGb' > disksize.txt
}

runtime {
docker: docker_image
}

output {
Int bootDiskSize = read_int(stdout())
Int bootDiskSize = read_int("disksize.txt")
}
}

Expand Down
2 changes: 2 additions & 0 deletions core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,6 @@ object WorkflowMetadataKeys {
val SummaryNameDecreasing = "WORKFLOW_METADATA_SUMMARY_ENTRY_DECREASING"

val Labels = "labels"
val MetadataArchiveStatus = "metadataArchiveStatus"
val Message = "message"
}
Original file line number Diff line number Diff line change
Expand Up @@ -127,14 +127,15 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config)
runTransaction(action, timeout = timeout)
}

override def streamMetadataEntries(workflowExecutionUuid: String,
fetchSize: Int): DatabasePublisher[MetadataEntry] = {
override def streamMetadataEntries(workflowExecutionUuid: String): DatabasePublisher[MetadataEntry] = {
val action = dataAccess.metadataEntriesForWorkflowExecutionUuid(workflowExecutionUuid)
.result
.withStatementParameters(
rsType = ResultSetType.ForwardOnly,
rsConcurrency = ResultSetConcurrency.ReadOnly,
fetchSize = fetchSize)
// Magic number alert: fetchSize is set to MIN_VALUE for MySQL to stream rather than cache in memory first.
// Inspired by: https://github.com/slick/slick/issues/1218
fetchSize = Integer.MIN_VALUE)
database.stream(action)
}

Expand Down Expand Up @@ -383,7 +384,7 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config)
}

override def updateMetadataArchiveStatus(workflowExecutionUuid: String, newArchiveStatus: Option[String]): Future[Int] = {
val action = dataAccess.metadataArchiveStatusByWorkflowIdOrRootWorkflowId(workflowExecutionUuid).update(newArchiveStatus)
val action = dataAccess.metadataArchiveStatusByWorkflowId(workflowExecutionUuid).update(newArchiveStatus)
runTransaction(action)
}

Expand Down Expand Up @@ -459,11 +460,11 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config)
runTransaction(action)
}

override def deleteNonLabelMetadataForWorkflowAndUpdateArchiveStatus(rootWorkflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] = {
override def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] = {
runTransaction {
for {
numDeleted <- dataAccess.metadataEntriesWithoutLabelsForRootWorkflowId(rootWorkflowId).delete
_ <- dataAccess.metadataArchiveStatusByWorkflowIdOrRootWorkflowId(rootWorkflowId).update(newArchiveStatus)
numDeleted <- dataAccess.metadataEntriesForWorkflowExecutionUuid(workflowId).delete
_ <- dataAccess.metadataArchiveStatusByWorkflowId(workflowId).update(newArchiveStatus)
} yield numDeleted
}
}
Expand All @@ -480,9 +481,9 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config)
)
}

override def queryRootWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[String]] = {
override def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[String]] = {
runAction(
dataAccess.rootWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp((archiveStatus, thresholdTimestamp, batchSize)).result
dataAccess.workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp((archiveStatus, thresholdTimestamp, batchSize)).result
)
}

Expand All @@ -497,4 +498,8 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config)
countSummaryQueueEntries()
)

override def getMetadataArchiveStatus(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = {
val action = dataAccess.metadataArchiveStatusByWorkflowId(workflowId).result.headOption
runTransaction(action).map(_.flatten)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,22 +77,6 @@ trait MetadataEntryComponent {
}.size
)

val metadataEntriesWithoutLabelsForRootWorkflowId = Compiled(
(rootWorkflowId: Rep[String]) => {
val targetWorkflowIds = for {
summary <- workflowMetadataSummaryEntries
// Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU`
if summary.rootWorkflowExecutionUuid === rootWorkflowId || summary.workflowExecutionUuid === rootWorkflowId
} yield summary.workflowExecutionUuid

for {
metadata <- metadataEntries
if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX`
if !(metadata.metadataKey like "labels:%")
} yield metadata
}
)

val metadataEntryExistsForWorkflowExecutionUuid = Compiled(
(workflowExecutionUuid: Rep[String]) => (for {
metadataEntry <- metadataEntries
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,10 @@ trait WorkflowMetadataSummaryEntryComponent {
val workflowMetadataSummaryEntryIdsAutoInc = workflowMetadataSummaryEntries returning
workflowMetadataSummaryEntries.map(_.workflowMetadataSummaryEntryId)

val rootWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp = Compiled(
val workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp = Compiled(
(metadataArchiveStatus: Rep[Option[String]], workflowEndTimestampThreshold: Rep[Timestamp], batchSize: ConstColumn[Long]) => {
(for {
summary <- workflowMetadataSummaryEntries
if summary.rootWorkflowExecutionUuid.isEmpty && summary.parentWorkflowExecutionUuid.isEmpty // is root workflow entry
if summary.metadataArchiveStatus === metadataArchiveStatus
if summary.endTimestamp <= workflowEndTimestampThreshold
} yield summary.workflowExecutionUuid).take(batchSize)
Expand Down Expand Up @@ -129,10 +128,10 @@ trait WorkflowMetadataSummaryEntryComponent {
}
)

val metadataArchiveStatusByWorkflowIdOrRootWorkflowId = Compiled(
val metadataArchiveStatusByWorkflowId = Compiled(
(workflowExecutionUuid: Rep[String]) => for {
workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries
if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid || workflowMetadataSummaryEntry.rootWorkflowExecutionUuid === workflowExecutionUuid
if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid
} yield workflowMetadataSummaryEntry.metadataArchiveStatus)

def concat(a: SQLActionBuilder, b: SQLActionBuilder): SQLActionBuilder = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,7 @@ trait MetadataSqlDatabase extends SqlDatabase {
timeout: Duration)
(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]]

def streamMetadataEntries(workflowExecutionUuid: String,
fetchSize: Int): DatabasePublisher[MetadataEntry]
def streamMetadataEntries(workflowExecutionUuid: String): DatabasePublisher[MetadataEntry]

def countMetadataEntries(workflowExecutionUuid: String,
expandSubWorkflows: Boolean,
Expand Down Expand Up @@ -178,15 +177,17 @@ trait MetadataSqlDatabase extends SqlDatabase {
includeSubworkflows: Boolean)
(implicit ec: ExecutionContext): Future[Int]

def deleteNonLabelMetadataForWorkflowAndUpdateArchiveStatus(rootWorkflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int]
def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(rootWorkflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int]

def isRootWorkflow(rootWorkflowId: String)(implicit ec: ExecutionContext): Future[Option[Boolean]]

def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]]

def queryRootWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSizeOpt: Long)(implicit ec: ExecutionContext): Future[Seq[String]]
def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSizeOpt: Long)(implicit ec: ExecutionContext): Future[Seq[String]]

def countRootWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp)(implicit ec: ExecutionContext): Future[Int]

def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int]

def getMetadataArchiveStatus(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]]
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ object LabelsManagerActor {
sealed abstract class LabelsManagerActorResponse
final case class BuiltLabelsManagerResponse(response: JsObject) extends LabelsManagerActorResponse
final case class FailedLabelsManagerResponse(reason: Throwable) extends LabelsManagerActorResponse
final case class WorkflowArchivedLabelsManagerResponse(response: JsObject) extends LabelsManagerActorResponse
}

class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with ActorLogging with DefaultJsonProtocol {
Expand Down
Loading

0 comments on commit 14bb3ba

Please sign in to comment.