-
Notifications
You must be signed in to change notification settings - Fork 60
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add some constructor for chunk capacity #180
base: develop
Are you sure you want to change the base?
Changes from all commits
75a0e94
159ae5f
6132996
8ab7569
7ffb3bc
5577823
6d39057
4f2ee7a
c866e8a
556fd6c
6cc4a15
ecbfbd0
ee88206
f9fd755
c3b7f37
5fe593f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -80,6 +80,36 @@ public AlignedChunkWriterImpl(VectorMeasurementSchema schema) { | |
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
public AlignedChunkWriterImpl(VectorMeasurementSchema schema, int rowCount) { | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. empyt line |
||
List<String> valueMeasurementIdList = schema.getSubMeasurementsList(); | ||
List<TSDataType> valueTSDataTypeList = schema.getSubMeasurementsTSDataTypeList(); | ||
List<TSEncoding> valueTSEncodingList = schema.getSubMeasurementsTSEncodingList(); | ||
List<Encoder> valueEncoderList = schema.getSubMeasurementsEncoderList(); | ||
|
||
valueChunkWriterList = new ArrayList<>(valueMeasurementIdList.size()); | ||
for (int i = 0; i < valueMeasurementIdList.size(); i++) { | ||
valueChunkWriterList.add( | ||
new ValueChunkWriter( | ||
valueMeasurementIdList.get(i), | ||
schema.getCompressor(), | ||
valueTSDataTypeList.get(i), | ||
valueTSEncodingList.get(i), | ||
valueEncoderList.get(i), | ||
rowCount)); | ||
} | ||
timeChunkWriter = | ||
new TimeChunkWriter( | ||
schema.getMeasurementId(), | ||
schema.getCompressor(), | ||
schema.getTimeTSEncoding(), | ||
schema.getTimeEncoder(), | ||
rowCount); | ||
|
||
this.valueIndex = 0; | ||
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
/** | ||
* This is used to rewrite file. The encoding and compression of the time column should be the | ||
* same as the source file. | ||
|
@@ -111,6 +141,32 @@ public AlignedChunkWriterImpl( | |
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
public AlignedChunkWriterImpl( | ||
IMeasurementSchema timeSchema, List<IMeasurementSchema> valueSchemaList, int rowCount) { | ||
|
||
valueChunkWriterList = new ArrayList<>(valueSchemaList.size()); | ||
for (int i = 0; i < valueSchemaList.size(); i++) { | ||
valueChunkWriterList.add( | ||
new ValueChunkWriter( | ||
valueSchemaList.get(i).getMeasurementId(), | ||
valueSchemaList.get(i).getCompressor(), | ||
valueSchemaList.get(i).getType(), | ||
valueSchemaList.get(i).getEncodingType(), | ||
valueSchemaList.get(i).getValueEncoder(), | ||
rowCount)); | ||
} | ||
timeChunkWriter = | ||
new TimeChunkWriter( | ||
timeSchema.getMeasurementId(), | ||
timeSchema.getCompressor(), | ||
timeSchema.getEncodingType(), | ||
timeSchema.getTimeEncoder(), | ||
rowCount); | ||
|
||
this.valueIndex = 0; | ||
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
/** | ||
* This is used to write 0-level file. The compression of the time column is 'LZ4' in the | ||
* configuration by default. The encoding of the time column is 'TS_2DIFF' in the configuration by | ||
|
@@ -146,6 +202,37 @@ public AlignedChunkWriterImpl(List<IMeasurementSchema> schemaList) { | |
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
public AlignedChunkWriterImpl(List<IMeasurementSchema> schemaList, int rowCount) { | ||
TSEncoding timeEncoding = | ||
TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()); | ||
TSDataType timeType = TSFileDescriptor.getInstance().getConfig().getTimeSeriesDataType(); | ||
CompressionType timeCompression = TSFileDescriptor.getInstance().getConfig().getCompressor(); | ||
|
||
valueChunkWriterList = new ArrayList<>(schemaList.size()); | ||
for (int i = 0; i < schemaList.size(); i++) { | ||
valueChunkWriterList.add( | ||
new ValueChunkWriter( | ||
schemaList.get(i).getMeasurementId(), | ||
schemaList.get(i).getCompressor(), | ||
schemaList.get(i).getType(), | ||
schemaList.get(i).getEncodingType(), | ||
schemaList.get(i).getValueEncoder(), | ||
rowCount)); | ||
} | ||
|
||
timeChunkWriter = | ||
new TimeChunkWriter( | ||
"", | ||
timeCompression, | ||
timeEncoding, | ||
TSEncodingBuilder.getEncodingBuilder(timeEncoding).getEncoder(timeType), | ||
rowCount); | ||
|
||
this.valueIndex = 0; | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. empty line |
||
this.remainingPointsNumber = timeChunkWriter.getRemainingPointNumberForCurrentPage(); | ||
} | ||
|
||
public void write(long time, int value, boolean isNull) { | ||
valueChunkWriterList.get(valueIndex++).write(time, value, isNull); | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -106,6 +106,40 @@ public ValueChunkWriter( | |
new ValuePageWriter(valueEncoder, ICompressor.getCompressor(compressionType), dataType); | ||
} | ||
|
||
public ValueChunkWriter( | ||
String measurementId, | ||
CompressionType compressionType, | ||
TSDataType dataType, | ||
TSEncoding encodingType, | ||
Encoder valueEncoder, | ||
int rowCount) { | ||
this.measurementId = measurementId; | ||
this.encodingType = encodingType; | ||
this.dataType = dataType; | ||
this.compressionType = compressionType; | ||
this.pageSizeThreshold = TSFileDescriptor.getInstance().getConfig().getPageSizeInByte(); | ||
this.maxNumberOfPointsInPage = | ||
TSFileDescriptor.getInstance().getConfig().getMaxNumberOfPointsInPage(); | ||
this.valueCountInOnePageForNextCheck = MINIMUM_RECORD_COUNT_FOR_CHECK; | ||
|
||
// init statistics for this chunk and page | ||
this.statistics = Statistics.getStatsByType(dataType); | ||
|
||
int bufferCount = | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. abstract function and add comments |
||
rowCount * dataType.getDataTypeSize() | ||
+ PageHeader.estimateMaxPageHeaderSizeWithoutStatistics(); | ||
this.pageBuffer = new PublicBAOS((bufferCount + 31) >> 5); | ||
int pageCapacity = | ||
Math.min( | ||
Math.min((int) pageSizeThreshold, bufferCount), | ||
MINIMUM_RECORD_COUNT_FOR_CHECK * rowCount | ||
+ PageHeader.estimateMaxPageHeaderSizeWithoutStatistics()); | ||
pageCapacity = (pageCapacity + 31) >> 5; | ||
this.pageWriter = | ||
new ValuePageWriter( | ||
valueEncoder, ICompressor.getCompressor(compressionType), dataType, pageCapacity); | ||
} | ||
|
||
public void write(long time, long value, boolean isNull) { | ||
pageWriter.write(time, value, isNull); | ||
} | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
redundant?