-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
182 additions
and
182 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,149 +1,14 @@ | ||
package io.github.yuxiqian.phaker | ||
package source | ||
|
||
import source.PhakerDatabase.{colCount, idCount} | ||
|
||
import org.apache.flink.cdc.common.event._ | ||
import org.apache.flink.cdc.common.schema.Column | ||
import org.apache.flink.cdc.runtime.typeutils.BinaryRecordDataGenerator | ||
import org.apache.flink.streaming.api.functions.source.SourceFunction | ||
|
||
import java.util | ||
import org.apache.flink.cdc.common.event.Event | ||
import org.apache.flink.streaming.api.functions.source.datagen.{DataGenerator, DataGeneratorSource} | ||
|
||
class PhakerSourceFunction( | ||
tableId: TableId, | ||
rejectedTypes: Set[String], | ||
schemaEvolve: Boolean, | ||
maxColumnCount: Int, | ||
batchCount: Int, | ||
sleepTime: Int | ||
) extends SourceFunction[Event] { | ||
|
||
private type Context = SourceFunction.SourceContext[Event] | ||
|
||
private var isRunning = true | ||
|
||
override def run(ctx: Context): Unit = { | ||
|
||
ctx.collect( | ||
new CreateTableEvent( | ||
tableId, | ||
PhakerDatabase.genSchema | ||
) | ||
) | ||
|
||
while (isRunning) { | ||
PhakerDatabase.synchronized { | ||
println("Emitting insert events...") | ||
emitInsertEvents(ctx, batchCount) | ||
emitSchemaEvolutionEvents(ctx) | ||
|
||
println("Emitting update events...") | ||
emitUpdateEvents(ctx, batchCount) | ||
emitSchemaEvolutionEvents(ctx) | ||
|
||
println("Emitting delete events...") | ||
emitDeleteEvents(ctx, batchCount) | ||
emitSchemaEvolutionEvents(ctx) | ||
} | ||
Thread.sleep(sleepTime) | ||
} | ||
} | ||
|
||
private def emitInsertEvents(ctx: Context, count: Int): Unit = { | ||
for (_ <- 0 until count) { | ||
val insertedData = genRecord() | ||
ctx.collect( | ||
DataChangeEvent.insertEvent(tableId, insertedData) | ||
) | ||
} | ||
} | ||
|
||
private def emitUpdateEvents(ctx: Context, count: Int): Unit = { | ||
for (_ <- 0 until count) { | ||
val updateBeforeData = genRecord() | ||
ctx.collect( | ||
DataChangeEvent.insertEvent(tableId, updateBeforeData) | ||
) | ||
|
||
idCount.synchronized { | ||
idCount -= 1 | ||
} | ||
|
||
val updateAfterData = genRecord() | ||
ctx.collect( | ||
DataChangeEvent.updateEvent(tableId, updateBeforeData, updateAfterData) | ||
) | ||
} | ||
} | ||
|
||
private def emitDeleteEvents(ctx: Context, count: Int): Unit = { | ||
for (_ <- 0 until count) { | ||
val deleteBeforeData = genRecord() | ||
ctx.collect( | ||
DataChangeEvent.insertEvent(tableId, deleteBeforeData) | ||
) | ||
|
||
idCount.synchronized { | ||
idCount -= 1 | ||
} | ||
|
||
ctx.collect( | ||
DataChangeEvent.deleteEvent(tableId, deleteBeforeData) | ||
) | ||
} | ||
} | ||
|
||
private def genRecord() = { | ||
val generator = new BinaryRecordDataGenerator( | ||
PhakerDatabase.columnList.map(_._2) | ||
) | ||
val rowData = PhakerDatabase.columnList | ||
.map(col => PhakeDataGenerator.randomData(col._1, col._2)) | ||
|
||
println(s"Generated data record: ${rowData.mkString("Array(", ", ", ")")}") | ||
generator.generate( | ||
rowData | ||
) | ||
} | ||
|
||
private def emitSchemaEvolutionEvents(ctx: Context): Unit = { | ||
|
||
if (!schemaEvolve) { return } | ||
if (colCount > maxColumnCount) { | ||
return | ||
} | ||
|
||
println("Emitting schema change events...") | ||
|
||
val addedColumnName = colCount.synchronized { | ||
colCount += 1 | ||
s"column$colCount" | ||
} | ||
val addedColumnType = PhakeDataGenerator.randomType(rejectedTypes) | ||
|
||
PhakerDatabase.columnList.synchronized { | ||
|
||
PhakerDatabase.columnList :+= (addedColumnName, addedColumnType) | ||
ctx.collect( | ||
new AddColumnEvent( | ||
tableId, | ||
util.Arrays.asList( | ||
new AddColumnEvent.ColumnWithPosition( | ||
Column.physicalColumn( | ||
addedColumnName, | ||
addedColumnType | ||
) | ||
) | ||
) | ||
) | ||
) | ||
} | ||
|
||
println(s"Done, new schema: ${PhakerDatabase.genSchema}") | ||
} | ||
|
||
override def cancel(): Unit = { | ||
isRunning = false | ||
} | ||
} | ||
generator: DataGenerator[Event], | ||
rowsPerSecond: Long | ||
) extends DataGeneratorSource[Event]( | ||
generator, | ||
rowsPerSecond, | ||
null | ||
) {} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,141 @@ | ||
package io.github.yuxiqian.phaker | ||
package source | ||
|
||
import source.PhakerDatabase.{colCount, idCount} | ||
|
||
import org.apache.flink.cdc.common.event._ | ||
import org.apache.flink.cdc.common.schema.Column | ||
import org.apache.flink.cdc.runtime.typeutils.BinaryRecordDataGenerator | ||
import org.apache.flink.streaming.api.functions.source.datagen.RandomGenerator | ||
|
||
import java.util | ||
|
||
class PhakerSourceGenerator( | ||
tableId: TableId, | ||
rejectedTypes: Set[String], | ||
schemaEvolve: Boolean, | ||
maxColumnCount: Int | ||
) extends RandomGenerator[Event] { | ||
|
||
private val cachedEvents: util.List[Event] = { | ||
val cache = new util.ArrayList[Event] | ||
cache.add( | ||
new CreateTableEvent( | ||
tableId, | ||
PhakerDatabase.genSchema | ||
) | ||
) | ||
cache | ||
} | ||
|
||
override def next(): Event = { | ||
cachedEvents.synchronized { | ||
if (cachedEvents.isEmpty) { | ||
pushEvents() | ||
} | ||
cachedEvents.remove(0) | ||
} | ||
} | ||
|
||
private def pushEvents(): Unit = { | ||
PhakerDatabase.synchronized { | ||
println("Emitting insert events...") | ||
|
||
{ | ||
val insertedData = genRecord() | ||
cachedEvents.add( | ||
DataChangeEvent.insertEvent(tableId, insertedData) | ||
) | ||
} | ||
|
||
{ | ||
val updateBeforeData = genRecord() | ||
cachedEvents.add( | ||
DataChangeEvent.insertEvent(tableId, updateBeforeData) | ||
) | ||
|
||
idCount.synchronized { | ||
idCount -= 1 | ||
} | ||
|
||
val updateAfterData = genRecord() | ||
cachedEvents.add( | ||
DataChangeEvent.updateEvent( | ||
tableId, | ||
updateBeforeData, | ||
updateAfterData | ||
) | ||
) | ||
} | ||
|
||
{ | ||
|
||
val deleteBeforeData = genRecord() | ||
cachedEvents.add( | ||
DataChangeEvent.insertEvent(tableId, deleteBeforeData) | ||
) | ||
|
||
idCount.synchronized { | ||
idCount -= 1 | ||
} | ||
|
||
cachedEvents.add( | ||
DataChangeEvent.deleteEvent(tableId, deleteBeforeData) | ||
) | ||
} | ||
|
||
{ | ||
emitSchemaEvolutionEvents().foreach( | ||
cachedEvents.add | ||
) | ||
} | ||
} | ||
} | ||
|
||
private def genRecord() = { | ||
val generator = new BinaryRecordDataGenerator( | ||
PhakerDatabase.columnList.map(_._2) | ||
) | ||
val rowData = PhakerDatabase.columnList | ||
.map(col => PhakeDataGenerator.randomData(col._1, col._2)) | ||
|
||
println(s"Generated data record: ${rowData.mkString("Array(", ", ", ")")}") | ||
generator.generate( | ||
rowData | ||
) | ||
} | ||
|
||
private def emitSchemaEvolutionEvents(): Option[Event] = { | ||
|
||
if (!schemaEvolve) { return None } | ||
if (colCount > maxColumnCount) { | ||
return None | ||
} | ||
|
||
println("Emitting schema change events...") | ||
|
||
val addedColumnName = colCount.synchronized { | ||
colCount += 1 | ||
s"column$colCount" | ||
} | ||
val addedColumnType = PhakeDataGenerator.randomType(rejectedTypes) | ||
|
||
PhakerDatabase.columnList.synchronized { | ||
PhakerDatabase.columnList :+= (addedColumnName, addedColumnType) | ||
println(s"Done, new schema: ${PhakerDatabase.genSchema}") | ||
Some( | ||
new AddColumnEvent( | ||
tableId, | ||
util.Arrays.asList( | ||
new AddColumnEvent.ColumnWithPosition( | ||
Column.physicalColumn( | ||
addedColumnName, | ||
addedColumnType | ||
) | ||
) | ||
) | ||
) | ||
) | ||
} | ||
} | ||
} |
Oops, something went wrong.