diff --git a/code-style.xml b/code-style.xml new file mode 100644 index 00000000..7bb6804e --- /dev/null +++ b/code-style.xml @@ -0,0 +1,337 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/pom.xml b/pom.xml index 84ebedd5..e43abfaa 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 cn.edu.tsinghua tsfile @@ -57,7 +58,7 @@ scm:git:git://github.com/thulab/tsfile.git scm:git:ssh://github.com:thulab/tsfile.git http://github.com/thulab/tsfile/tree/master - v0.8.0 + v0.8.0 @@ -136,6 +137,22 @@ + + net.revelc.code.formatter + formatter-maven-plugin + 2.7.5 + + + + format + + + UTF-8 + ${project.basedir}/code-style.xml + + + + org.codehaus.mojo build-helper-maven-plugin diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileConfig.java b/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileConfig.java index e64eb12f..7247b1d7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileConfig.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileConfig.java @@ -1,141 +1,140 @@ -package cn.edu.tsinghua.tsfile.common.conf; - -/** - * TSFileConfig is a configure class. Every variables is public and has default - * value. - * - * @author kangrong - */ -public class TSFileConfig { - // Memory configuration - /** - * Memory size threshold for flushing to disk or HDFS, default value is 128MB - */ - public int groupSizeInByte = 128 * 1024 * 1024; - /** - * The memory size for each series writer to pack page, default value is 64KB - */ - public int pageSizeInByte = 64 * 1024; - /** - * The maximum number of data points in a page, defalut value is 1024 * 1024 - */ - public int maxNumberOfPointsInPage = 1024 * 1024; - - // Data type configuration - /** - * Data type for input timestamp, TsFile supports INT32 or INT64 - */ - public String timeSeriesDataType = "INT64"; - /** - * Max length limitation of input string - */ - public int maxStringLength = 128; - /** - * Floating-point precision - */ - public int floatPrecision = 2; - - // Encoder configuration - /** - * Encoder of time series, TsFile supports TS_2DIFF, PLAIN and RLE(run-length encoding) - * Default value is TS_2DIFF - */ - public String timeSeriesEncoder = "TS_2DIFF"; - /** - * Encoder of value series. default value is PLAIN. - * For int, long data type, TsFile also supports TS_2DIFF and RLE(run-length encoding). - * For float, double data type, TsFile also supports TS_2DIFF, RLE(run-length encoding) and GORILLA. - * For text data type, TsFile only supports PLAIN. - */ - public String valueEncoder = "PLAIN"; - - // RLE configuration - /** - * Default bit width of RLE encoding is 8 - */ - public int rleBitWidth = 8; - public final int RLE_MIN_REPEATED_NUM = 8; - public final int RLE_MAX_REPEATED_NUM = 0x7FFF; - public final int RLE_MAX_BIT_PACKED_NUM = 63; - - // Gorilla encoding configuration - public final static int FLOAT_LENGTH = 32; - public final static int FLAOT_LEADING_ZERO_LENGTH = 5; - public final static int FLOAT_VALUE_LENGTH = 6; - public final static int DOUBLE_LENGTH = 64; - public final static int DOUBLE_LEADING_ZERO_LENGTH = 6; - public final static int DOUBLE_VALUE_LENGTH = 7; - - // TS_2DIFF configuration - /** - * Default block size of two-diff. delta encoding is 128 - */ - public int deltaBlockSize = 128; - - // Bitmap configuration - public final int BITMAP_BITWIDTH = 1; - - // Freq encoder configuration - /** - * Default frequency type is SINGLE_FREQ - */ - public String freqType = "SINGLE_FREQ"; - /** - * Default PLA max error is 100 - */ - public double plaMaxError = 100; - /** - * Default SDT max error is 100 - */ - public double sdtMaxError = 100; - /** - * Default DFT satisfy rate is 0.1 - */ - public double dftSatisfyRate = 0.1; - - // Compression configuration - /** - * Data compression method, TsFile supports UNCOMPRESSED or SNAPPY. - * Default value is UNCOMPRESSED which means no compression - */ - public String compressor = "UNCOMPRESSED"; - - // Don't change the following configuration - - /** - * Line count threshold for checking page memory occupied size - */ - public int pageCheckSizeThreshold = 100; - - /** - * Current version is 3 - */ - public static int currentVersion = 3; - - /** - * Default endian value is LITTLE_ENDIAN - */ - public String endian = "LITTLE_ENDIAN"; - - /** - * String encoder with UTF-8 encodes a character to at most 4 bytes. - */ - public static final int BYTE_SIZE_PER_CHAR = 4; - - public static final String STRING_ENCODING = "UTF-8"; - - public static final String CONFIG_FILE_NAME = "tsfile-format.properties"; - - /** - * The default grow size of class DynamicOneColumnData - */ - public static int dynamicDataSize = 1000; - - public static final String MAGIC_STRING = "TsFilev0.7.0"; - /** - * only can be used by TsFileDescriptor - */ - protected TSFileConfig() { - - } -} +package cn.edu.tsinghua.tsfile.common.conf; + +/** + * TSFileConfig is a configure class. Every variables is public and has default value. + * + * @author kangrong + */ +public class TSFileConfig { + // Memory configuration + /** + * Memory size threshold for flushing to disk or HDFS, default value is 128MB + */ + public int groupSizeInByte = 128 * 1024 * 1024; + /** + * The memory size for each series writer to pack page, default value is 64KB + */ + public int pageSizeInByte = 64 * 1024; + /** + * The maximum number of data points in a page, defalut value is 1024 * 1024 + */ + public int maxNumberOfPointsInPage = 1024 * 1024; + + // Data type configuration + /** + * Data type for input timestamp, TsFile supports INT32 or INT64 + */ + public String timeSeriesDataType = "INT64"; + /** + * Max length limitation of input string + */ + public int maxStringLength = 128; + /** + * Floating-point precision + */ + public int floatPrecision = 2; + + // Encoder configuration + /** + * Encoder of time series, TsFile supports TS_2DIFF, PLAIN and RLE(run-length encoding) Default + * value is TS_2DIFF + */ + public String timeSeriesEncoder = "TS_2DIFF"; + /** + * Encoder of value series. default value is PLAIN. For int, long data type, TsFile also supports + * TS_2DIFF and RLE(run-length encoding). For float, double data type, TsFile also supports + * TS_2DIFF, RLE(run-length encoding) and GORILLA. For text data type, TsFile only supports PLAIN. + */ + public String valueEncoder = "PLAIN"; + + // RLE configuration + /** + * Default bit width of RLE encoding is 8 + */ + public int rleBitWidth = 8; + public final int RLE_MIN_REPEATED_NUM = 8; + public final int RLE_MAX_REPEATED_NUM = 0x7FFF; + public final int RLE_MAX_BIT_PACKED_NUM = 63; + + // Gorilla encoding configuration + public final static int FLOAT_LENGTH = 32; + public final static int FLAOT_LEADING_ZERO_LENGTH = 5; + public final static int FLOAT_VALUE_LENGTH = 6; + public final static int DOUBLE_LENGTH = 64; + public final static int DOUBLE_LEADING_ZERO_LENGTH = 6; + public final static int DOUBLE_VALUE_LENGTH = 7; + + // TS_2DIFF configuration + /** + * Default block size of two-diff. delta encoding is 128 + */ + public int deltaBlockSize = 128; + + // Bitmap configuration + public final int BITMAP_BITWIDTH = 1; + + // Freq encoder configuration + /** + * Default frequency type is SINGLE_FREQ + */ + public String freqType = "SINGLE_FREQ"; + /** + * Default PLA max error is 100 + */ + public double plaMaxError = 100; + /** + * Default SDT max error is 100 + */ + public double sdtMaxError = 100; + /** + * Default DFT satisfy rate is 0.1 + */ + public double dftSatisfyRate = 0.1; + + // Compression configuration + /** + * Data compression method, TsFile supports UNCOMPRESSED or SNAPPY. Default value is UNCOMPRESSED + * which means no compression + */ + public String compressor = "UNCOMPRESSED"; + + // Don't change the following configuration + + /** + * Line count threshold for checking page memory occupied size + */ + public int pageCheckSizeThreshold = 100; + + /** + * Current version is 3 + */ + public static int currentVersion = 3; + + /** + * Default endian value is LITTLE_ENDIAN + */ + public String endian = "LITTLE_ENDIAN"; + + /** + * String encoder with UTF-8 encodes a character to at most 4 bytes. + */ + public static final int BYTE_SIZE_PER_CHAR = 4; + + public static final String STRING_ENCODING = "UTF-8"; + + public static final String CONFIG_FILE_NAME = "tsfile-format.properties"; + + /** + * The default grow size of class DynamicOneColumnData + */ + public static int dynamicDataSize = 1000; + + public static final String MAGIC_STRING = "TsFilev0.7.0"; + + /** + * only can be used by TsFileDescriptor + */ + protected TSFileConfig() { + + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileDescriptor.java b/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileDescriptor.java index 07e8e893..26ee3970 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileDescriptor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/conf/TSFileDescriptor.java @@ -1,116 +1,121 @@ -package cn.edu.tsinghua.tsfile.common.conf; - -import cn.edu.tsinghua.tsfile.common.constant.SystemConstant; -import cn.edu.tsinghua.tsfile.timeseries.utils.Loader; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.net.URL; -import java.util.Properties; -import java.util.Set; - -/** - * TSFileDescriptor is used to load TSFileConfig and provide configure - * information - * - * @author kangrong - */ -public class TSFileDescriptor { - private static final Logger LOGGER = LoggerFactory.getLogger(TSFileDescriptor.class); - - private static class TsfileDescriptorHolder { - private static final TSFileDescriptor INSTANCE = new TSFileDescriptor(); - } - - private TSFileConfig conf = new TSFileConfig(); - - private TSFileDescriptor() { - loadProps(); - } - - public static final TSFileDescriptor getInstance() { - return TsfileDescriptorHolder.INSTANCE; - } - - public TSFileConfig getConfig() { - return conf; - } - - private void multiplicityWarning(String resource, ClassLoader classLoader) { - try { - Set urlSet = Loader.getResources(resource, classLoader); - if (urlSet != null && urlSet.size() > 1) { - LOGGER.warn("Resource [{}] occurs multiple times on the classpath", resource); - for (URL url : urlSet) { - LOGGER.warn("Resource [{}] occurs at [{}]", resource, url.toString()); - } - } - } catch (IOException e) { - LOGGER.error("Failed to get url list for {}", resource); - } - } - - private URL getResource(String filename, ClassLoader classLoader){ - return Loader.getResource(filename, classLoader); - } - - /** - * load an .properties file and set TSFileConfig variables - */ - private void loadProps() { - InputStream inputStream = null; - String url = System.getProperty(SystemConstant.TSFILE_CONF, null); - if (url == null) { - url = System.getProperty(SystemConstant.TSFILE_HOME, null); - if (url != null) { - url = url + File.separator + "conf" + File.separator + TSFileConfig.CONFIG_FILE_NAME; - } else { - ClassLoader classLoader = Loader.getClassLoaderOfObject(this); - URL u = getResource(TSFileConfig.CONFIG_FILE_NAME, classLoader); - if(u == null){ - LOGGER.warn("Failed to find config file {} at classpath, use default configuration", TSFileConfig.CONFIG_FILE_NAME); - return; - } else{ - multiplicityWarning(TSFileConfig.CONFIG_FILE_NAME, classLoader); - url = u.getFile(); - } - } - } - try { - inputStream = new FileInputStream(new File(url)); - } catch (FileNotFoundException e) { - LOGGER.warn("Fail to find config file {}", url); - return; - } - - LOGGER.info("Start to read config file {}", url); - Properties properties = new Properties(); - try { - properties.load(inputStream); - conf.groupSizeInByte = Integer.parseInt(properties.getProperty("group_size_in_byte", conf.groupSizeInByte + "")); - conf.pageSizeInByte = Integer.parseInt(properties.getProperty("page_size_in_byte", conf.pageSizeInByte + "")); - conf.maxNumberOfPointsInPage = Integer.parseInt(properties.getProperty("max_number_of_points_in_page", conf.maxNumberOfPointsInPage + "")); - conf.timeSeriesDataType = properties.getProperty("time_series_data_type", conf.timeSeriesDataType); - conf.maxStringLength = Integer.parseInt(properties.getProperty("max_string_length", conf.maxStringLength + "")); - conf.floatPrecision = Integer.parseInt(properties.getProperty("float_precision", conf.floatPrecision + "")); - conf.timeSeriesEncoder = properties.getProperty("time_series_encoder", conf.timeSeriesEncoder); - conf.valueEncoder = properties.getProperty("value_encoder", conf.valueEncoder); - conf.compressor = properties.getProperty("compressor", conf.compressor); - } catch (IOException e) { - LOGGER.warn("Cannot load config file because {}, use default configuration", e.getMessage()); - } catch (Exception e) { - LOGGER.error("Loading settings {} failed because {}", url, e.getMessage()); - } finally { - if (inputStream != null) { - try { - inputStream.close(); - inputStream = null; - } catch (IOException e) { - LOGGER.error("Failed to close stream for loading config because {}", e.getMessage()); - } - } - } - } -} +package cn.edu.tsinghua.tsfile.common.conf; + +import cn.edu.tsinghua.tsfile.common.constant.SystemConstant; +import cn.edu.tsinghua.tsfile.timeseries.utils.Loader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.*; +import java.net.URL; +import java.util.Properties; +import java.util.Set; + +/** + * TSFileDescriptor is used to load TSFileConfig and provide configure information + * + * @author kangrong + */ +public class TSFileDescriptor { + private static final Logger LOGGER = LoggerFactory.getLogger(TSFileDescriptor.class); + + private static class TsfileDescriptorHolder { + private static final TSFileDescriptor INSTANCE = new TSFileDescriptor(); + } + + private TSFileConfig conf = new TSFileConfig(); + + private TSFileDescriptor() { + loadProps(); + } + + public static final TSFileDescriptor getInstance() { + return TsfileDescriptorHolder.INSTANCE; + } + + public TSFileConfig getConfig() { + return conf; + } + + private void multiplicityWarning(String resource, ClassLoader classLoader) { + try { + Set urlSet = Loader.getResources(resource, classLoader); + if (urlSet != null && urlSet.size() > 1) { + LOGGER.warn("Resource [{}] occurs multiple times on the classpath", resource); + for (URL url : urlSet) { + LOGGER.warn("Resource [{}] occurs at [{}]", resource, url.toString()); + } + } + } catch (IOException e) { + LOGGER.error("Failed to get url list for {}", resource); + } + } + + private URL getResource(String filename, ClassLoader classLoader) { + return Loader.getResource(filename, classLoader); + } + + /** + * load an .properties file and set TSFileConfig variables + */ + private void loadProps() { + InputStream inputStream = null; + String url = System.getProperty(SystemConstant.TSFILE_CONF, null); + if (url == null) { + url = System.getProperty(SystemConstant.TSFILE_HOME, null); + if (url != null) { + url = url + File.separator + "conf" + File.separator + TSFileConfig.CONFIG_FILE_NAME; + } else { + ClassLoader classLoader = Loader.getClassLoaderOfObject(this); + URL u = getResource(TSFileConfig.CONFIG_FILE_NAME, classLoader); + if (u == null) { + LOGGER.warn("Failed to find config file {} at classpath, use default configuration", + TSFileConfig.CONFIG_FILE_NAME); + return; + } else { + multiplicityWarning(TSFileConfig.CONFIG_FILE_NAME, classLoader); + url = u.getFile(); + } + } + } + try { + inputStream = new FileInputStream(new File(url)); + } catch (FileNotFoundException e) { + LOGGER.warn("Fail to find config file {}", url); + return; + } + + LOGGER.info("Start to read config file {}", url); + Properties properties = new Properties(); + try { + properties.load(inputStream); + conf.groupSizeInByte = + Integer.parseInt(properties.getProperty("group_size_in_byte", conf.groupSizeInByte + "")); + conf.pageSizeInByte = + Integer.parseInt(properties.getProperty("page_size_in_byte", conf.pageSizeInByte + "")); + conf.maxNumberOfPointsInPage = Integer.parseInt(properties + .getProperty("max_number_of_points_in_page", conf.maxNumberOfPointsInPage + "")); + conf.timeSeriesDataType = + properties.getProperty("time_series_data_type", conf.timeSeriesDataType); + conf.maxStringLength = + Integer.parseInt(properties.getProperty("max_string_length", conf.maxStringLength + "")); + conf.floatPrecision = + Integer.parseInt(properties.getProperty("float_precision", conf.floatPrecision + "")); + conf.timeSeriesEncoder = + properties.getProperty("time_series_encoder", conf.timeSeriesEncoder); + conf.valueEncoder = properties.getProperty("value_encoder", conf.valueEncoder); + conf.compressor = properties.getProperty("compressor", conf.compressor); + } catch (IOException e) { + LOGGER.warn("Cannot load config file because {}, use default configuration", e.getMessage()); + } catch (Exception e) { + LOGGER.error("Loading settings {} failed because {}", url, e.getMessage()); + } finally { + if (inputStream != null) { + try { + inputStream.close(); + inputStream = null; + } catch (IOException e) { + LOGGER.error("Failed to close stream for loading config because {}", e.getMessage()); + } + } + } + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/JsonFormatConstant.java b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/JsonFormatConstant.java index 02d5c251..8e4e137f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/JsonFormatConstant.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/JsonFormatConstant.java @@ -1,29 +1,28 @@ -package cn.edu.tsinghua.tsfile.common.constant; - -/*** - * this class define several constant string variables used in tsfile schema's - * keys. - * - * @author kangrong - * - */ -public class JsonFormatConstant { - public static final String JSON_SCHEMA = "schema"; - public static final String DELTA_TYPE = "delta_type"; - public static final String MEASUREMENT_UID = "measurement_id"; - public static final String DATA_TYPE = "data_type"; - public static final String MEASUREMENT_ENCODING = "encoding"; - public static final String ENUM_VALUES = "enum_values"; - public static final String ENUM_VALUES_SEPARATOR = ","; - public static final String MAX_POINT_NUMBER = "max_point_number"; - public static final String COMPRESS_TYPE = "compressor"; - public static final String FreqType = "freq_type"; - public static final String TSRECORD_SEPARATOR = ","; - public static final String MAX_STRING_LENGTH = "max_string_length"; - - public static final String ROW_GROUP_SIZE = "row_group_size"; - public static final String PAGE_SIZE = "page_size"; - - public static final String defaultDeltaType = "default_delta_type"; - public static final String PROPERTIES = "properties"; -} +package cn.edu.tsinghua.tsfile.common.constant; + +/*** + * this class define several constant string variables used in tsfile schema's keys. + * + * @author kangrong + * + */ +public class JsonFormatConstant { + public static final String JSON_SCHEMA = "schema"; + public static final String DELTA_TYPE = "delta_type"; + public static final String MEASUREMENT_UID = "measurement_id"; + public static final String DATA_TYPE = "data_type"; + public static final String MEASUREMENT_ENCODING = "encoding"; + public static final String ENUM_VALUES = "enum_values"; + public static final String ENUM_VALUES_SEPARATOR = ","; + public static final String MAX_POINT_NUMBER = "max_point_number"; + public static final String COMPRESS_TYPE = "compressor"; + public static final String FreqType = "freq_type"; + public static final String TSRECORD_SEPARATOR = ","; + public static final String MAX_STRING_LENGTH = "max_string_length"; + + public static final String ROW_GROUP_SIZE = "row_group_size"; + public static final String PAGE_SIZE = "page_size"; + + public static final String defaultDeltaType = "default_delta_type"; + public static final String PROPERTIES = "properties"; +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/QueryConstant.java b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/QueryConstant.java index 3f7138ca..94513737 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/QueryConstant.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/QueryConstant.java @@ -1,8 +1,8 @@ package cn.edu.tsinghua.tsfile.common.constant; public class QueryConstant { - // The start offset for the partition - public static final String PARTITION_START_OFFSET = "partition_start_offset"; - // The end offset for the partition - public static final String PARTITION_END_OFFSET = "partition_end_offset"; + // The start offset for the partition + public static final String PARTITION_START_OFFSET = "partition_start_offset"; + // The end offset for the partition + public static final String PARTITION_END_OFFSET = "partition_end_offset"; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/StatisticConstant.java b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/StatisticConstant.java index 87600b35..efde94ca 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/StatisticConstant.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/StatisticConstant.java @@ -2,20 +2,20 @@ public class StatisticConstant { - // names of statistics (aggregations), shared with IoTDB - public static final String MIN_TIME = "min_time"; - public static final String MAX_TIME = "max_time"; + // names of statistics (aggregations), shared with IoTDB + public static final String MIN_TIME = "min_time"; + public static final String MAX_TIME = "max_time"; - public static final String MAX_VALUE = "max_value"; - public static final String MIN_VALUE = "min_value"; + public static final String MAX_VALUE = "max_value"; + public static final String MIN_VALUE = "min_value"; - public static final String COUNT = "count"; + public static final String COUNT = "count"; - public static final String FIRST = "first"; + public static final String FIRST = "first"; - public static final String MEAN = "mean"; + public static final String MEAN = "mean"; - public static final String SUM = "sum"; - - public static final String LAST = "last"; + public static final String SUM = "sum"; + + public static final String LAST = "last"; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/SystemConstant.java b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/SystemConstant.java index 44bde215..178567b3 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/constant/SystemConstant.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/constant/SystemConstant.java @@ -1,9 +1,9 @@ -package cn.edu.tsinghua.tsfile.common.constant; - -public class SystemConstant { - public static final String TSFILE_HOME = "TSFILE_HOME"; - public static final String TSFILE_CONF = "TSFILE_CONF"; - public static final String PATH_SEPARATOR = "."; - public static final String PATH_SEPARATER_NO_REGEX = "\\."; - public static final String DEFAULT_DELTA_TYPE = "default_delta_type"; -} +package cn.edu.tsinghua.tsfile.common.constant; + +public class SystemConstant { + public static final String TSFILE_HOME = "TSFILE_HOME"; + public static final String TSFILE_CONF = "TSFILE_CONF"; + public static final String PATH_SEPARATOR = "."; + public static final String PATH_SEPARATER_NO_REGEX = "\\."; + public static final String DEFAULT_DELTA_TYPE = "default_delta_type"; +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/BadConfigurationException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/BadConfigurationException.java index c3d78c00..4275b1b4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/BadConfigurationException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/BadConfigurationException.java @@ -4,20 +4,19 @@ * Thrown when the input/output formats are not properly configured */ public class BadConfigurationException extends TSFileRuntimeException { - private static final long serialVersionUID = -5342992200738090898L; + private static final long serialVersionUID = -5342992200738090898L; - public BadConfigurationException() { - } + public BadConfigurationException() {} - public BadConfigurationException(String message, Throwable cause) { - super(message, cause); - } + public BadConfigurationException(String message, Throwable cause) { + super(message, cause); + } - public BadConfigurationException(String message) { - super(message); - } + public BadConfigurationException(String message) { + super(message); + } - public BadConfigurationException(Throwable cause) { - super(cause); - } + public BadConfigurationException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/CompressionTypeNotSupportedException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/CompressionTypeNotSupportedException.java index 01eb702f..1071308d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/CompressionTypeNotSupportedException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/CompressionTypeNotSupportedException.java @@ -5,20 +5,20 @@ * matching type defined in CompressionCodecName */ public class CompressionTypeNotSupportedException extends RuntimeException { - private static final long serialVersionUID = -2244072267816916609L; - private final Class codecClass; + private static final long serialVersionUID = -2244072267816916609L; + private final Class codecClass; - public CompressionTypeNotSupportedException(Class codecClass) { - super("codec not supported: " + codecClass.getName()); - this.codecClass = codecClass; - } + public CompressionTypeNotSupportedException(Class codecClass) { + super("codec not supported: " + codecClass.getName()); + this.codecClass = codecClass; + } - public CompressionTypeNotSupportedException(String codecType) { - super("codec not supported: " + codecType); - this.codecClass = null; - } + public CompressionTypeNotSupportedException(String codecType) { + super("codec not supported: " + codecType); + this.codecClass = null; + } - public Class getCodecClass() { - return codecClass; - } + public Class getCodecClass() { + return codecClass; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/DecoderNotFoundException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/DecoderNotFoundException.java index 71b8d02e..55d2592b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/DecoderNotFoundException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/DecoderNotFoundException.java @@ -7,9 +7,9 @@ * @author kangrong */ public class DecoderNotFoundException extends Exception { - private static final long serialVersionUID = -310868735953605021L; + private static final long serialVersionUID = -310868735953605021L; - public DecoderNotFoundException(String message) { - super(message); - } + public DecoderNotFoundException(String message) { + super(message); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterDataTypeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterDataTypeException.java index ce33e6cc..375abab5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterDataTypeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterDataTypeException.java @@ -3,24 +3,23 @@ /** * This Exception is used while filter data type is not consistent with the series type.
* e.g. you want to get result from a Double series used Integer filter.
- * This Exception extends super class - * {@link FilterDataTypeException} + * This Exception extends super class {@link FilterDataTypeException} * * @author CGF */ public class FilterDataTypeException extends RuntimeException { - private static final long serialVersionUID = 1888878519023495363L; + private static final long serialVersionUID = 1888878519023495363L; - public FilterDataTypeException(String message, Throwable cause) { - super(message, cause); - } + public FilterDataTypeException(String message, Throwable cause) { + super(message, cause); + } - public FilterDataTypeException(String message) { - super(message); - } + public FilterDataTypeException(String message) { + super(message); + } - public FilterDataTypeException(Throwable cause) { - super(cause); - } + public FilterDataTypeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterInvokeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterInvokeException.java index dc6dbea9..c8606010 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterInvokeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/FilterInvokeException.java @@ -2,24 +2,23 @@ /** * This Exception is used while invoke UnarySeriesFilter's accept method.
- * This Exception extends super class - * {@link FilterInvokeException} + * This Exception extends super class {@link FilterInvokeException} * * @author CGF */ public class FilterInvokeException extends RuntimeException { - private static final long serialVersionUID = 1888878519023495363L; + private static final long serialVersionUID = 1888878519023495363L; - public FilterInvokeException(String message, Throwable cause) { - super(message, cause); - } + public FilterInvokeException(String message, Throwable cause) { + super(message, cause); + } - public FilterInvokeException(String message) { - super(message); - } + public FilterInvokeException(String message) { + super(message); + } - public FilterInvokeException(Throwable cause) { - super(cause); - } + public FilterInvokeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/ProcessorException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/ProcessorException.java index 80def028..54863aa3 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/ProcessorException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/ProcessorException.java @@ -7,21 +7,21 @@ */ public class ProcessorException extends Exception { - private static final long serialVersionUID = 4137638418544201605L; + private static final long serialVersionUID = 4137638418544201605L; - public ProcessorException(String msg) { - super(msg); - } + public ProcessorException(String msg) { + super(msg); + } - public ProcessorException(Throwable e) { - super(e); - } + public ProcessorException(Throwable e) { + super(e); + } - public ProcessorException(Exception e) { - super(e); - } + public ProcessorException(Exception e) { + super(e); + } - public ProcessorException() { - super(); - } + public ProcessorException() { + super(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileDecodingException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileDecodingException.java index 5dd6ea60..c96fda82 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileDecodingException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileDecodingException.java @@ -2,26 +2,24 @@ /** * This Exception is used while decoding failed.
- * This Exception extends super class - * {@link TSFileRuntimeException} + * This Exception extends super class {@link TSFileRuntimeException} * * @author kangrong */ public class TSFileDecodingException extends TSFileRuntimeException { - private static final long serialVersionUID = -8632392900655017028L; + private static final long serialVersionUID = -8632392900655017028L; - public TSFileDecodingException() { - } + public TSFileDecodingException() {} - public TSFileDecodingException(String message, Throwable cause) { - super(message, cause); - } + public TSFileDecodingException(String message, Throwable cause) { + super(message, cause); + } - public TSFileDecodingException(String message) { - super(message); - } + public TSFileDecodingException(String message) { + super(message); + } - public TSFileDecodingException(Throwable cause) { - super(cause); - } + public TSFileDecodingException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileEncodingException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileEncodingException.java index eb34b785..a0d6879c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileEncodingException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileEncodingException.java @@ -2,26 +2,24 @@ /** * This Exception is used while encoding failed.
- * This Exception extends super class - * {@link TSFileRuntimeException} + * This Exception extends super class {@link TSFileRuntimeException} * * @author kangrong */ public class TSFileEncodingException extends TSFileRuntimeException { - private static final long serialVersionUID = -7225811149696714845L; + private static final long serialVersionUID = -7225811149696714845L; - public TSFileEncodingException() { - } + public TSFileEncodingException() {} - public TSFileEncodingException(String message, Throwable cause) { - super(message, cause); - } + public TSFileEncodingException(String message, Throwable cause) { + super(message, cause); + } - public TSFileEncodingException(String message) { - super(message); - } + public TSFileEncodingException(String message) { + super(message); + } - public TSFileEncodingException(Throwable cause) { - super(cause); - } + public TSFileEncodingException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileRuntimeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileRuntimeException.java index 286850bf..85a11055 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileRuntimeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/TSFileRuntimeException.java @@ -7,21 +7,21 @@ * @author kangrong */ abstract public class TSFileRuntimeException extends RuntimeException { - private static final long serialVersionUID = 6455048223316780984L; + private static final long serialVersionUID = 6455048223316780984L; - public TSFileRuntimeException() { - super(); - } + public TSFileRuntimeException() { + super(); + } - public TSFileRuntimeException(String message, Throwable cause) { - super(message, cause); - } + public TSFileRuntimeException(String message, Throwable cause) { + super(message, cause); + } - public TSFileRuntimeException(String message) { - super(message); - } + public TSFileRuntimeException(String message) { + super(message); + } - public TSFileRuntimeException(Throwable cause) { - super(cause); - } + public TSFileRuntimeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnSupportedDataTypeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnSupportedDataTypeException.java index d8a7a392..b03cd8a8 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnSupportedDataTypeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnSupportedDataTypeException.java @@ -5,9 +5,9 @@ */ public class UnSupportedDataTypeException extends RuntimeException { - private static final long serialVersionUID = 6399248887091915203L; + private static final long serialVersionUID = 6399248887091915203L; - public UnSupportedDataTypeException(String dataTypeName) { - super("UnSupported dataType: " + dataTypeName); - } + public UnSupportedDataTypeException(String dataTypeName) { + super("UnSupported dataType: " + dataTypeName); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnknownColumnTypeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnknownColumnTypeException.java index c58a9fb4..9e69181e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnknownColumnTypeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/UnknownColumnTypeException.java @@ -2,17 +2,16 @@ /** * This Exception is used while getting an unknown column type.
- * This Exception extends super class - * {@link TSFileRuntimeException} + * This Exception extends super class {@link TSFileRuntimeException} * * @author kangrong */ public class UnknownColumnTypeException extends TSFileRuntimeException { - private static final long serialVersionUID = -4003170165687174659L; - public String type; + private static final long serialVersionUID = -4003170165687174659L; + public String type; - public UnknownColumnTypeException(String type) { - super("Column type not found: " + type); - this.type = type; - } + public UnknownColumnTypeException(String type) { + super("Column type not found: " + type); + this.type = type; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/cache/CacheException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/cache/CacheException.java index 333fec3a..17075722 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/cache/CacheException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/cache/CacheException.java @@ -4,22 +4,22 @@ * Created by zhangjinrui on 2017/12/25. */ public class CacheException extends Exception { - public CacheException() { - } + public CacheException() {} - public CacheException(String message) { - super(message); - } + public CacheException(String message) { + super(message); + } - public CacheException(String message, Throwable cause) { - super(message, cause); - } + public CacheException(String message, Throwable cause) { + super(message, cause); + } - public CacheException(Throwable cause) { - super(cause); - } + public CacheException(Throwable cause) { + super(cause); + } - public CacheException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } + public CacheException(String message, Throwable cause, boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterDataTypeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterDataTypeException.java index 79791417..8f8d6e6a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterDataTypeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterDataTypeException.java @@ -3,24 +3,23 @@ /** * This Exception is used while filter data type is not consistent with the series type.
* e.g. you want to get result from a Double series used Integer filter.
- * This Exception extends super class - * {@link FilterDataTypeException} + * This Exception extends super class {@link FilterDataTypeException} * * @author CGF */ public class FilterDataTypeException extends RuntimeException { - private static final long serialVersionUID = 1888878519023495363L; + private static final long serialVersionUID = 1888878519023495363L; - public FilterDataTypeException(String message, Throwable cause) { - super(message, cause); - } + public FilterDataTypeException(String message, Throwable cause) { + super(message, cause); + } - public FilterDataTypeException(String message) { - super(message); - } + public FilterDataTypeException(String message) { + super(message); + } - public FilterDataTypeException(Throwable cause) { - super(cause); - } + public FilterDataTypeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterInvokeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterInvokeException.java index 92e2497d..c7eecacf 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterInvokeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/FilterInvokeException.java @@ -2,24 +2,23 @@ /** * This Exception is used while invoke UnarySeriesFilter's accept method.
- * This Exception extends super class - * {@link FilterInvokeException} + * This Exception extends super class {@link FilterInvokeException} * * @author CGF */ public class FilterInvokeException extends RuntimeException { - private static final long serialVersionUID = 1888878519023495363L; + private static final long serialVersionUID = 1888878519023495363L; - public FilterInvokeException(String message, Throwable cause) { - super(message, cause); - } + public FilterInvokeException(String message, Throwable cause) { + super(message, cause); + } - public FilterInvokeException(String message) { - super(message); - } + public FilterInvokeException(String message) { + super(message); + } - public FilterInvokeException(Throwable cause) { - super(cause); - } + public FilterInvokeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/UnSupportFilterDataTypeException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/UnSupportFilterDataTypeException.java index f2f6e1a8..4ff7688a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/UnSupportFilterDataTypeException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/filter/UnSupportFilterDataTypeException.java @@ -5,15 +5,15 @@ */ public class UnSupportFilterDataTypeException extends RuntimeException { - public UnSupportFilterDataTypeException(String message, Throwable cause) { - super(message, cause); - } + public UnSupportFilterDataTypeException(String message, Throwable cause) { + super(message, cause); + } - public UnSupportFilterDataTypeException(String message) { - super(message); - } + public UnSupportFilterDataTypeException(String message) { + super(message); + } - public UnSupportFilterDataTypeException(Throwable cause) { - super(cause); - } + public UnSupportFilterDataTypeException(Throwable cause) { + super(cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/metadata/MetadataArgsErrorException.java b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/metadata/MetadataArgsErrorException.java index d213bfb7..a1200304 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/exception/metadata/MetadataArgsErrorException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/exception/metadata/MetadataArgsErrorException.java @@ -1,17 +1,17 @@ package cn.edu.tsinghua.tsfile.common.exception.metadata; /** - * If query metadata constructs schema but passes illegal parameters to - * EncodingConvertor or DataTypeConvertor,this exception will be threw. + * If query metadata constructs schema but passes illegal parameters to EncodingConvertor or + * DataTypeConvertor,this exception will be threw. * * @author kangrong */ public class MetadataArgsErrorException extends Exception { - private static final long serialVersionUID = 3415275599091623570L; + private static final long serialVersionUID = 3415275599091623570L; - public MetadataArgsErrorException(String msg) { - super(msg); - } + public MetadataArgsErrorException(String msg) { + super(msg); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Binary.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Binary.java index 019ddeb1..33ad18f8 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Binary.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Binary.java @@ -1,93 +1,92 @@ package cn.edu.tsinghua.tsfile.common.utils; import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; - import java.io.Serializable; import java.nio.charset.Charset; import java.util.Arrays; /** - * Override compareTo() and equals() function to Binary class. This class is - * used to accept Java String type + * Override compareTo() and equals() function to Binary class. This class is used to accept Java + * String type * * @author xuyi */ public class Binary implements Comparable, Serializable { - private static final long serialVersionUID = 6394197743397020735L; - public byte[] values; - private String textEncodingType = TSFileConfig.STRING_ENCODING; - - public Binary(byte[] v) { - this.values = v; - } - - public Binary(String s) { - this.values = (s == null) ? null : s.getBytes(Charset.forName(this.textEncodingType)); - } - - public static Binary valueOf(String value) { - return new Binary(BytesUtils.StringToBytes(value)); - } - - @Override - public int compareTo(Binary other) { - if (other == null) { - if (this.values == null) { - return 0; - } else { - return 1; - } - } - - int i = 0; - while (i < getLength() && i < other.getLength()) { - if (this.values[i] == other.values[i]) { - i++; - continue; - } - return this.values[i] - other.values[i]; - } - return getLength() - other.getLength(); - } - - @Override - public boolean equals(Object other) { - if (this == other) - return true; - if (other == null) - return false; - if (getClass() != other.getClass()) - return false; - - if (compareTo((Binary) other) == 0) - return true; - return false; - } - - @Override - public int hashCode() { - return Arrays.hashCode(values); - } - - public int getLength() { - if (this.values == null) - return -1; - return this.values.length; - } - - public String getStringValue() { - return new String(this.values, Charset.forName(this.textEncodingType)); - } - - public String getTextEncodingType() { - return textEncodingType; - } - - public void setTextEncodingType(String textEncodingType) { - this.textEncodingType = textEncodingType; + private static final long serialVersionUID = 6394197743397020735L; + public byte[] values; + private String textEncodingType = TSFileConfig.STRING_ENCODING; + + public Binary(byte[] v) { + this.values = v; + } + + public Binary(String s) { + this.values = (s == null) ? null : s.getBytes(Charset.forName(this.textEncodingType)); + } + + public static Binary valueOf(String value) { + return new Binary(BytesUtils.StringToBytes(value)); + } + + @Override + public int compareTo(Binary other) { + if (other == null) { + if (this.values == null) { + return 0; + } else { + return 1; + } } - public String toString() { - return getStringValue(); + int i = 0; + while (i < getLength() && i < other.getLength()) { + if (this.values[i] == other.values[i]) { + i++; + continue; + } + return this.values[i] - other.values[i]; } + return getLength() - other.getLength(); + } + + @Override + public boolean equals(Object other) { + if (this == other) + return true; + if (other == null) + return false; + if (getClass() != other.getClass()) + return false; + + if (compareTo((Binary) other) == 0) + return true; + return false; + } + + @Override + public int hashCode() { + return Arrays.hashCode(values); + } + + public int getLength() { + if (this.values == null) + return -1; + return this.values.length; + } + + public String getStringValue() { + return new String(this.values, Charset.forName(this.textEncodingType)); + } + + public String getTextEncodingType() { + return textEncodingType; + } + + public void setTextEncodingType(String textEncodingType) { + this.textEncodingType = textEncodingType; + } + + public String toString() { + return getStringValue(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtils.java index aea83c4f..49a7ea34 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtils.java @@ -3,15 +3,14 @@ import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.List; /** - * BytesUtils is a utility class. It provide conversion among byte array and - * other type including integer, long, float, boolean, double and string.
+ * BytesUtils is a utility class. It provide conversion among byte array and other type including + * integer, long, float, boolean, double and string.
* It also provide other usable function as follow:
* reading function which receives InputStream.
* concat function to join a list of byte array to one.
@@ -20,761 +19,746 @@ * @author kangrong */ public class BytesUtils { - private static final Logger LOG = LoggerFactory.getLogger(BytesUtils.class); - - /** - * integer convert to byte[4] - * - * @param i integer to convert - * @return byte[4] for integer - */ - public static byte[] intToBytes(int i) { - byte[] result = new byte[4]; - result[0] = (byte) ((i >> 24) & 0xFF); - result[1] = (byte) ((i >> 16) & 0xFF); - result[2] = (byte) ((i >> 8) & 0xFF); - result[3] = (byte) (i & 0xFF); - return result; - } - - public static byte[] intToTwoBytes(int i) { - assert i <= 0xFFFF; - byte[] ret = new byte[2]; - ret[1] = (byte) (i & 0xFF); - ret[0] = (byte) ((i >> 8) & 0xFF); - return ret; - } - - public static int twoBytesToInt(byte[] ret) { - assert ret.length == 2; - int value = 0; - value |= ret[0]; - value = value << 8; - value |= ret[1]; - return value; - } - - /** - * integer convert to byte array, then write four bytes to parameter desc - * start from index:offset - * - * @param i integer to convert - * @param desc byte array be written - * @param offset position in desc byte array that conversion result should start - * @return byte array - */ - public static byte[] intToBytes(int i, byte[] desc, int offset) { - assert desc.length - offset >= 4; - desc[0 + offset] = (byte) ((i >> 24) & 0xFF); - desc[1 + offset] = (byte) ((i >> 16) & 0xFF); - desc[2 + offset] = (byte) ((i >> 8) & 0xFF); - desc[3 + offset] = (byte) (i & 0xFF); - return desc; - } - - /** - * byte[4] convert to integer - * - * @param bytes input byte[] - * @return integer - */ - public static int bytesToInt(byte[] bytes) { - int value = 0; - // high bit to low - for (int i = 0; i < 4; i++) { - int shift = (4 - 1 - i) * 8; - value += (bytes[i] & 0x000000FF) << shift; - } - return value; - } - - /** - * convert four-bytes byte array cut from parameters to integer. - * - * @param bytes source bytes which length should be greater than 4 - * @param offset position in parameter byte array that conversion result should start - * @return integer - */ - public static int bytesToInt(byte[] bytes, int offset) { - assert bytes.length - offset >= 4; - int value = 0; - // high bit to low - for (int i = 0; i < 4; i++) { - int shift = (4 - 1 - i) * 8; - value += (bytes[offset + i] & 0x000000FF) << shift; - } - return value; - } - - /** - * convert float to byte array - * - * @param x float - * @return byte[4] - */ - public static byte[] floatToBytes(float x) { - byte[] b = new byte[4]; - int l = Float.floatToIntBits(x); - for (int i = 3; i >= 0; i--) { - b[i] = new Integer(l).byteValue(); - l = l >> 8; - } - return b; - } - - /** - * float convert to boolean, then write four bytes to parameter desc start - * from index:offset - * - * @param x float - * @param desc byte array be written - * @param offset position in desc byte array that conversion result should start - */ - public static void floatToBytes(float x, byte[] desc, int offset) { - assert desc.length - offset >= 4; - int l = Float.floatToIntBits(x); - for (int i = 3 + offset; i >= offset; i--) { - desc[i] = new Integer(l).byteValue(); - l = l >> 8; - } - } - - /** - * convert byte[4] to float - * - * @param b byte[4] - * @return float - */ - public static float bytesToFloat(byte[] b) { - assert b.length == 4; - int l; - l = b[3]; - l &= 0xff; - l |= ((long) b[2] << 8); - l &= 0xffff; - l |= ((long) b[1] << 16); - l &= 0xffffff; - l |= ((long) b[0] << 24); - return Float.intBitsToFloat(l); - } - - /** - * convert four-bytes byte array cut from parameters to float. - * - * @param b source bytes which length should be greater than 4 - * @param offset position in parameter byte array that conversion result should start - * @return float - */ - public static float bytesToFloat(byte[] b, int offset) { - assert b.length - offset >= 4; - int l; - l = b[offset + 3]; - l &= 0xff; - l |= ((long) b[offset + 2] << 8); - l &= 0xffff; - l |= ((long) b[offset + 1] << 16); - l &= 0xffffff; - l |= ((long) b[offset] << 24); - return Float.intBitsToFloat(l); - } - - /** - * convert double to byte array - * - * @param data double - * @return byte[8] - */ - public static byte[] doubleToBytes(double data) { - byte[] bytes = new byte[8]; - long value = Double.doubleToLongBits(data); - for (int i = 7; i >= 0; i--) { - bytes[i] = new Long(value).byteValue(); - value = value >> 8; - } - return bytes; - } - - /** - * convert double to byte into the given byte array started from offset. - * @param d input double - * @param bytes target byte[] - * @param offset start pos - */ - public static void doubleToBytes(double d, byte[] bytes, int offset) { - assert bytes.length - offset >= 8; - long value = Double.doubleToLongBits(d); - for (int i = 7; i >= 0; i--) { - bytes[offset + i] = new Long(value).byteValue(); - value = value >> 8; - } - } - - /** - * convert byte array to double - * - * @param bytes byte[8] - * @return double - */ - public static double bytesToDouble(byte[] bytes) { - long value = bytes[7]; - value &= 0xff; - value |= ((long) bytes[6] << 8); - value &= 0xffff; - value |= ((long) bytes[5] << 16); - value &= 0xffffff; - value |= ((long) bytes[4] << 24); - value &= 0xffffffffL; - value |= ((long) bytes[3] << 32); - value &= 0xffffffffffL; - value |= ((long) bytes[2] << 40); - value &= 0xffffffffffffL; - value |= ((long) bytes[1] << 48); - value &= 0xffffffffffffffL; - value |= ((long) bytes[0] << 56); - return Double.longBitsToDouble(value); - } - - /** - * convert eight-bytes byte array cut from parameters to double. - * - * @param bytes source bytes which length should be greater than 8 - * @param offset position in parameter byte array that conversion result should start - * @return double - */ - public static double bytesToDouble(byte[] bytes, int offset) { - assert bytes.length - offset >= 8; - long value = bytes[offset + 7]; - value &= 0xff; - value |= ((long) bytes[offset + 6] << 8); - value &= 0xffff; - value |= ((long) bytes[offset + 5] << 16); - value &= 0xffffff; - value |= ((long) bytes[offset + 4] << 24); - value &= 0xffffffffL; - value |= ((long) bytes[offset + 3] << 32); - value &= 0xffffffffffL; - value |= ((long) bytes[offset + 2] << 40); - value &= 0xffffffffffffL; - value |= ((long) bytes[offset + 1] << 48); - value &= 0xffffffffffffffL; - value |= ((long) bytes[offset] << 56); - return Double.longBitsToDouble(value); - } - - /** - * convert boolean to byte[1] - * - * @param x boolean - * @return byte[] - */ - public static byte[] boolToBytes(boolean x) { - byte[] b = new byte[1]; - if (x) - b[0] = 1; - else - b[0] = 0; - return b; - } - - /** - * boolean convert to byte array, then write four bytes to parameter desc - * start from index:offset - * - * @param x input boolean - * @param desc byte array be written - * @param offset position in desc byte array that conversion result should start - * @return byte[1] - */ - public static byte[] boolToBytes(boolean x, byte[] desc, int offset) { - if (x) - desc[offset] = 1; - else - desc[offset] = 0; - return desc; - } - - /** - * byte array to boolean - * - * @param b input byte[1] - * @return boolean - */ - public static boolean bytesToBool(byte[] b) { - assert b.length == 1; - if (b[0] == 0) - return false; - else - return true; - } - - /** - * convert one-bytes byte array cut from parameters to boolean. - * - * @param b source bytes which length should be greater than 1 - * @param offset position in parameter byte array that conversion result should start - * @return boolean - */ - public static boolean bytesToBool(byte[] b, int offset) { - assert b.length - offset >= 1; - if (b[offset] == 0) - return false; - else - return true; - } - - /** - * long to byte array with default converting length 8. It means the length - * of result byte array is 8 - * - * @param num long variable to be converted - * @return byte[8] - */ - public static byte[] longToBytes(long num) { - return longToBytes(num, 8); - } - - /** - * specify the result array length. then, convert long to Big-Endian byte - * from low to high.
- * e.g.
- * the binary presentation of long number 1000L is {6 bytes equal 0000000} - * 00000011 11101000
- * if len = 2, it will return byte array :{00000011 11101000}(Big-Endian) if - * len = 1, it will return byte array :{11101000} - * - * @param num long variable to be converted - * @param len length of result byte array - * @return byte array which length equals with parameter len - */ - public static byte[] longToBytes(long num, int len) { - byte[] byteNum = new byte[len]; - for (int ix = 0; ix < len; ix++) { - byteNum[len - ix - 1] = (byte) ((num >> ix * 8) & 0xFF); - } - return byteNum; - } - - /** - * long convert to byte array, then write four bytes to parameter desc start - * from index:offset - * - * @param num input long variable - * @param desc byte array be written - * @param offset_ position in desc byte array that conversion result should start - * @return byte array - */ - public static byte[] longToBytes(long num, byte[] desc, int offset_) { - for (int ix = 0; ix < 8; ++ix) { - int offset = 64 - (ix + 1) * 8; - desc[ix + offset_] = (byte) ((num >> offset) & 0xff); - } - return desc; - } - - /** - * convert byte array to long with default length 8. namely - * - * @param byteNum input byte array - * @return long - */ - public static long bytesToLong(byte[] byteNum) { - assert byteNum.length == 8; - return bytesToLong(byteNum, 8); - } - - /** - * specify the input byte array length. then, convert byte array to long - * value from low to high.
- * e.g.
- * the input byte array is {00000011 11101000}. if len = 2, return 1000 if - * len = 1, return 232(only calculate the low byte) - * - * @param byteNum byte array to be converted - * @param len length of input byte array to be converted - * @return long - */ - public static long bytesToLong(byte[] byteNum, int len) { - long num = 0; - for (int ix = 0; ix < len; ix++) { - num <<= 8; - num |= (byteNum[ix] & 0xff); - } - return num; - } - - /** - * convert eight-bytes byte array cut from parameters to long. - * - * @param byteNum source bytes which length should be greater than 8 - * @param len length of input byte array to be converted - * @param offset position in parameter byte array that conversion result should start - * @return long - */ - public static long bytesToLongFromOffset(byte[] byteNum, int len, int offset) { - assert byteNum.length - offset >= len; - long num = 0; - for (int ix = 0; ix < len; ix++) { - num <<= 8; - num |= (byteNum[offset + ix] & 0xff); - } - return num; - } - - /** - * convert string to byte array using UTF-8 encoding - * - * @param str input string - * @return byte array - */ - public static byte[] StringToBytes(String str) { - try { - return str.getBytes(TSFileConfig.STRING_ENCODING); - } catch (UnsupportedEncodingException e) { - LOG.error("catch UnsupportedEncodingException {}", str, e); - return null; - } - } - - public static String bytesToString(byte[] byteStr) { - try { - return new String(byteStr, TSFileConfig.STRING_ENCODING); - } catch (UnsupportedEncodingException e) { - LOG.error("catch UnsupportedEncodingException {}", byteStr, e); - return null; - } - } - - /** - * join two byte arrays to one - * - * @param a one of byte array - * @param b another byte array - * @return byte array after joining - */ - public static byte[] concatByteArray(byte[] a, byte[] b) { - byte[] c = new byte[a.length + b.length]; - System.arraycopy(a, 0, c, 0, a.length); - System.arraycopy(b, 0, c, a.length, b.length); - return c; - } - - /** - * join a list of byte arrays into one array - * - * @param list a list of byte array to join - * @return byte array after joining - */ - public static byte[] concatByteArrayList(List list) { - int size = list.size(); - int len = 0; - for (byte[] cs : list) { - len += cs.length; - } - byte[] result = new byte[len]; - int pos = 0; - for (int i = 0; i < size; i++) { - int l = list.get(i).length; - System.arraycopy(list.get(i), 0, result, pos, l); - pos += l; - } - return result; - } - - /** - * cut out specified length byte array from parameter start from input byte - * array src and return - * - * @param src input byte array - * @param start start index of src - * @param length cut off length - * @return byte array - */ - public static byte[] subBytes(byte[] src, int start, int length) { - if ((start + length) > src.length) - return null; - if (length <= 0) - return null; - byte[] result = new byte[length]; - for (int i = 0; i < length; i++) { - result[i] = src[start + i]; - } - return result; - } - - /** - * get one bit in input integer. the offset is from low to high and start - * with 0
- * e.g.
- * data:1000(00000000 00000000 00000011 11101000), if offset is 4, return - * 0(111 "0" 1000) if offset is 9, return 1(00000 "1" 1 11101000) - * - * @param data input int variable - * @param offset bit offset - * @return 0 or 1 - */ - public static int getIntN(int data, int offset) { - offset %= 32; - if ((data & (1 << (offset))) != 0) - return 1; - else - return 0; - } - - /** - * set one bit in input integer. the offset is from low to high and start - * with index 0
- * e.g.
- * data:1000({00000000 00000000 00000011 11101000}), if offset is 4, value - * is 1, return 1016({00000000 00000000 00000011 111 "1" 1000}) if offset is - * 9, value is 0 return 488({00000000 00000000 000000 "0" 1 11101000}) if - * offset is 0, value is 0 return 1000(no change) - * - * @param data input int variable - * @param offset bit offset - * @param value value to set - * @return int variable - */ - public static int setIntN(int data, int offset, int value) { - offset %= 32; - if (value == 1) - return (data | (1 << (offset))); - else - return (data & ~(1 << (offset))); - } - - /** - * get one bit in input byte. the offset is from low to high and start with - * 0
- * e.g.
- * data:16(00010000), if offset is 4, return 1(000 "1" 0000) if offset is 7, - * return 0("0" 0010000) - * - * @param data input byte variable - * @param offset bit offset - * @return 0/1 - */ - public static int getByteN(byte data, int offset) { - offset %= 8; - if ((data & (1 << (7 - offset))) != 0) - return 1; - else - return 0; - } - - /** - * set one bit in input byte. the offset is from low to high and start with - * index 0
- * e.g.
- * data:16(00010000), if offset is 4, value is 0, return 0({000 "0" 0000}) - * if offset is 1, value is 1, return 18({00010010}) if offset is 0, value - * is 0, return 16(no change) - * - * @param data input byte variable - * @param offset bit offset - * @param value value to set - * @return byte variable - */ - public static byte setByteN(byte data, int offset, int value) { - offset %= 8; - if (value == 1) - return (byte) (data | (1 << (7 - offset))); - else - return (byte) (data & ~(1 << (7 - offset))); - } - - /** - * get one bit in input long. the offset is from low to high and start with - * 0
- * - * @param data input long variable - * @param offset bit offset - * @return 0/1 - */ - public static int getLongN(long data, int offset) { - offset %= 64; - if ((data & (1l << (offset))) != 0) - return 1; - else - return 0; - } - - /** - * set one bit in input long. the offset is from low to high and start with - * index 0
- * - * @param data input long variable - * @param offset bit offset - * @param value value to set - * @return long variable - */ - public static long setLongN(long data, int offset, int value) { - offset %= 64; - if (value == 1) - return (data | (1l << (offset))); - else - return (data & ~(1l << (offset))); - } - - /** - * given a byte array, read width bits from specified position bits and - * convert it to an integer - * - * @param result input byte array - * @param pos bit offset rather than byte offset - * @param width bit-width - * @return integer variable - */ - public static int bytesToInt(byte[] result, int pos, int width) { - - int value = 0; - int temp = 0; - - for (int i = 0; i < width; i++) { - temp = (pos + width - 1 - i) / 8; - value = setIntN(value, i, getByteN(result[temp], pos + width - 1 - i)); - } - return value; - } - - /** - * convert an integer to a byte array which length is width, then copy this - * array to the parameter result from pos - * - * @param srcNum input integer variable - * @param result byte array to convert - * @param pos start position - * @param width bit-width - */ - public static void intToBytes(int srcNum, byte[] result, int pos, int width) { - int temp = 0; - for (int i = 0; i < width; i++) { - temp = (pos + width - 1 - i) / 8; - try { - result[temp] = setByteN(result[temp], pos + width - 1 - i, getIntN(srcNum, i)); - } catch (Exception e) { - LOG.error("tsfile-common BytesUtils: cannot convert an integer {} to a byte array, pos {}, width {}", - srcNum, pos, width, e); - } - - } - } - - /** - * convert an long to a byte array which length is width, then copy this - * array to the parameter result from pos - * - * @param srcNum input long variable - * @param result byte array to convert - * @param pos start position - * @param width bit-width - */ - public static void longToBytes(long srcNum, byte[] result, int pos, int width) { - int temp = 0; - for (int i = 0; i < width; i++) { - temp = (pos + width - 1 - i) / 8; - try { - result[temp] = setByteN(result[temp], pos + width - 1 - i, getLongN(srcNum, i)); - } catch (Exception e) { - LOG.error("tsfile-common BytesUtils: cannot convert a long {} to a byte array, pos {}, width {}", - srcNum, pos, width, e); - } - - } - } - - /** - * given a byte array, read width bits from specified pos bits and convert - * it to an long - * - * @param result input byte array - * @param pos bit offset rather than byte offset - * @param width bit-width - * @return long variable - */ - public static long bytesToLong(byte[] result, int pos, int width) { - long value = 0; - int temp = 0; - for (int i = 0; i < width; i++) { - temp = (pos + width - 1 - i) / 8; - value = setLongN(value, i, getByteN(result[temp], pos + width - 1 - i)); - } - return value; - } - - /** - * read 8-byte array from an InputStream and convert it to a double number - * - * @param in InputStream - * @return double - * @throws IOException cannot read double from InputStream - */ - public static double readDouble(InputStream in) throws IOException { - byte[] b = safeReadInputStreamToBytes(8, in); - return BytesUtils.bytesToDouble(b); - } - - /** - * read 4-byte array from an InputStream and convert it to a float number - * - * @param in InputStream - * @return float - * @throws IOException cannot read float from InputStream - */ - public static float readFloat(InputStream in) throws IOException { - byte[] b = safeReadInputStreamToBytes(4, in); - return BytesUtils.bytesToFloat(b); - } - - /** - * read 1-byte array from an InputStream and convert it to a integer number - * - * @param in InputStream - * @return boolean - * @throws IOException cannot read boolean from InputStream - */ - public static boolean readBool(InputStream in) throws IOException { - byte[] b = safeReadInputStreamToBytes(1, in); - return BytesUtils.bytesToBool(b); - } - - /** - * read 4-byte array from an InputStream and convert it to a integer number - * - * @param in InputStream - * @return integer - * @throws IOException cannot read int from InputStream - */ - public static int readInt(InputStream in) throws IOException { - byte[] b = safeReadInputStreamToBytes(4, in); - return BytesUtils.bytesToInt(b); - } - - /** - * read 8-byte array from an InputStream and convert it to a long number - * - * @param in InputStream - * @return long - * @throws IOException cannot read long from InputStream - */ - public static long readLong(InputStream in) throws IOException { - byte[] b = safeReadInputStreamToBytes(8, in); - return BytesUtils.bytesToLong(b); - } - - /** - * read bytes specified length from InputStream safely. - * - * @param count number of byte to read - * @param in InputStream - * @return byte array - * @throws IOException cannot read from InputStream - */ - public static byte[] safeReadInputStreamToBytes(int count, InputStream in) throws IOException { - byte[] bytes = new byte[count]; - int readCount = 0; - while (readCount < count) { - readCount += in.read(bytes, readCount, count - readCount); - } - return bytes; - } + private static final Logger LOG = LoggerFactory.getLogger(BytesUtils.class); + + /** + * integer convert to byte[4] + * + * @param i integer to convert + * @return byte[4] for integer + */ + public static byte[] intToBytes(int i) { + byte[] result = new byte[4]; + result[0] = (byte) ((i >> 24) & 0xFF); + result[1] = (byte) ((i >> 16) & 0xFF); + result[2] = (byte) ((i >> 8) & 0xFF); + result[3] = (byte) (i & 0xFF); + return result; + } + + public static byte[] intToTwoBytes(int i) { + assert i <= 0xFFFF; + byte[] ret = new byte[2]; + ret[1] = (byte) (i & 0xFF); + ret[0] = (byte) ((i >> 8) & 0xFF); + return ret; + } + + public static int twoBytesToInt(byte[] ret) { + assert ret.length == 2; + int value = 0; + value |= ret[0]; + value = value << 8; + value |= ret[1]; + return value; + } + + /** + * integer convert to byte array, then write four bytes to parameter desc start from index:offset + * + * @param i integer to convert + * @param desc byte array be written + * @param offset position in desc byte array that conversion result should start + * @return byte array + */ + public static byte[] intToBytes(int i, byte[] desc, int offset) { + assert desc.length - offset >= 4; + desc[0 + offset] = (byte) ((i >> 24) & 0xFF); + desc[1 + offset] = (byte) ((i >> 16) & 0xFF); + desc[2 + offset] = (byte) ((i >> 8) & 0xFF); + desc[3 + offset] = (byte) (i & 0xFF); + return desc; + } + + /** + * byte[4] convert to integer + * + * @param bytes input byte[] + * @return integer + */ + public static int bytesToInt(byte[] bytes) { + int value = 0; + // high bit to low + for (int i = 0; i < 4; i++) { + int shift = (4 - 1 - i) * 8; + value += (bytes[i] & 0x000000FF) << shift; + } + return value; + } + + /** + * convert four-bytes byte array cut from parameters to integer. + * + * @param bytes source bytes which length should be greater than 4 + * @param offset position in parameter byte array that conversion result should start + * @return integer + */ + public static int bytesToInt(byte[] bytes, int offset) { + assert bytes.length - offset >= 4; + int value = 0; + // high bit to low + for (int i = 0; i < 4; i++) { + int shift = (4 - 1 - i) * 8; + value += (bytes[offset + i] & 0x000000FF) << shift; + } + return value; + } + + /** + * convert float to byte array + * + * @param x float + * @return byte[4] + */ + public static byte[] floatToBytes(float x) { + byte[] b = new byte[4]; + int l = Float.floatToIntBits(x); + for (int i = 3; i >= 0; i--) { + b[i] = new Integer(l).byteValue(); + l = l >> 8; + } + return b; + } + + /** + * float convert to boolean, then write four bytes to parameter desc start from index:offset + * + * @param x float + * @param desc byte array be written + * @param offset position in desc byte array that conversion result should start + */ + public static void floatToBytes(float x, byte[] desc, int offset) { + assert desc.length - offset >= 4; + int l = Float.floatToIntBits(x); + for (int i = 3 + offset; i >= offset; i--) { + desc[i] = new Integer(l).byteValue(); + l = l >> 8; + } + } + + /** + * convert byte[4] to float + * + * @param b byte[4] + * @return float + */ + public static float bytesToFloat(byte[] b) { + assert b.length == 4; + int l; + l = b[3]; + l &= 0xff; + l |= ((long) b[2] << 8); + l &= 0xffff; + l |= ((long) b[1] << 16); + l &= 0xffffff; + l |= ((long) b[0] << 24); + return Float.intBitsToFloat(l); + } + + /** + * convert four-bytes byte array cut from parameters to float. + * + * @param b source bytes which length should be greater than 4 + * @param offset position in parameter byte array that conversion result should start + * @return float + */ + public static float bytesToFloat(byte[] b, int offset) { + assert b.length - offset >= 4; + int l; + l = b[offset + 3]; + l &= 0xff; + l |= ((long) b[offset + 2] << 8); + l &= 0xffff; + l |= ((long) b[offset + 1] << 16); + l &= 0xffffff; + l |= ((long) b[offset] << 24); + return Float.intBitsToFloat(l); + } + + /** + * convert double to byte array + * + * @param data double + * @return byte[8] + */ + public static byte[] doubleToBytes(double data) { + byte[] bytes = new byte[8]; + long value = Double.doubleToLongBits(data); + for (int i = 7; i >= 0; i--) { + bytes[i] = new Long(value).byteValue(); + value = value >> 8; + } + return bytes; + } + + /** + * convert double to byte into the given byte array started from offset. + * + * @param d input double + * @param bytes target byte[] + * @param offset start pos + */ + public static void doubleToBytes(double d, byte[] bytes, int offset) { + assert bytes.length - offset >= 8; + long value = Double.doubleToLongBits(d); + for (int i = 7; i >= 0; i--) { + bytes[offset + i] = new Long(value).byteValue(); + value = value >> 8; + } + } + + /** + * convert byte array to double + * + * @param bytes byte[8] + * @return double + */ + public static double bytesToDouble(byte[] bytes) { + long value = bytes[7]; + value &= 0xff; + value |= ((long) bytes[6] << 8); + value &= 0xffff; + value |= ((long) bytes[5] << 16); + value &= 0xffffff; + value |= ((long) bytes[4] << 24); + value &= 0xffffffffL; + value |= ((long) bytes[3] << 32); + value &= 0xffffffffffL; + value |= ((long) bytes[2] << 40); + value &= 0xffffffffffffL; + value |= ((long) bytes[1] << 48); + value &= 0xffffffffffffffL; + value |= ((long) bytes[0] << 56); + return Double.longBitsToDouble(value); + } + + /** + * convert eight-bytes byte array cut from parameters to double. + * + * @param bytes source bytes which length should be greater than 8 + * @param offset position in parameter byte array that conversion result should start + * @return double + */ + public static double bytesToDouble(byte[] bytes, int offset) { + assert bytes.length - offset >= 8; + long value = bytes[offset + 7]; + value &= 0xff; + value |= ((long) bytes[offset + 6] << 8); + value &= 0xffff; + value |= ((long) bytes[offset + 5] << 16); + value &= 0xffffff; + value |= ((long) bytes[offset + 4] << 24); + value &= 0xffffffffL; + value |= ((long) bytes[offset + 3] << 32); + value &= 0xffffffffffL; + value |= ((long) bytes[offset + 2] << 40); + value &= 0xffffffffffffL; + value |= ((long) bytes[offset + 1] << 48); + value &= 0xffffffffffffffL; + value |= ((long) bytes[offset] << 56); + return Double.longBitsToDouble(value); + } + + /** + * convert boolean to byte[1] + * + * @param x boolean + * @return byte[] + */ + public static byte[] boolToBytes(boolean x) { + byte[] b = new byte[1]; + if (x) + b[0] = 1; + else + b[0] = 0; + return b; + } + + /** + * boolean convert to byte array, then write four bytes to parameter desc start from index:offset + * + * @param x input boolean + * @param desc byte array be written + * @param offset position in desc byte array that conversion result should start + * @return byte[1] + */ + public static byte[] boolToBytes(boolean x, byte[] desc, int offset) { + if (x) + desc[offset] = 1; + else + desc[offset] = 0; + return desc; + } + + /** + * byte array to boolean + * + * @param b input byte[1] + * @return boolean + */ + public static boolean bytesToBool(byte[] b) { + assert b.length == 1; + if (b[0] == 0) + return false; + else + return true; + } + + /** + * convert one-bytes byte array cut from parameters to boolean. + * + * @param b source bytes which length should be greater than 1 + * @param offset position in parameter byte array that conversion result should start + * @return boolean + */ + public static boolean bytesToBool(byte[] b, int offset) { + assert b.length - offset >= 1; + if (b[offset] == 0) + return false; + else + return true; + } + + /** + * long to byte array with default converting length 8. It means the length of result byte array + * is 8 + * + * @param num long variable to be converted + * @return byte[8] + */ + public static byte[] longToBytes(long num) { + return longToBytes(num, 8); + } + + /** + * specify the result array length. then, convert long to Big-Endian byte from low to high.
+ * e.g.
+ * the binary presentation of long number 1000L is {6 bytes equal 0000000} 00000011 11101000
+ * if len = 2, it will return byte array :{00000011 11101000}(Big-Endian) if len = 1, it will + * return byte array :{11101000} + * + * @param num long variable to be converted + * @param len length of result byte array + * @return byte array which length equals with parameter len + */ + public static byte[] longToBytes(long num, int len) { + byte[] byteNum = new byte[len]; + for (int ix = 0; ix < len; ix++) { + byteNum[len - ix - 1] = (byte) ((num >> ix * 8) & 0xFF); + } + return byteNum; + } + + /** + * long convert to byte array, then write four bytes to parameter desc start from index:offset + * + * @param num input long variable + * @param desc byte array be written + * @param offset_ position in desc byte array that conversion result should start + * @return byte array + */ + public static byte[] longToBytes(long num, byte[] desc, int offset_) { + for (int ix = 0; ix < 8; ++ix) { + int offset = 64 - (ix + 1) * 8; + desc[ix + offset_] = (byte) ((num >> offset) & 0xff); + } + return desc; + } + + /** + * convert byte array to long with default length 8. namely + * + * @param byteNum input byte array + * @return long + */ + public static long bytesToLong(byte[] byteNum) { + assert byteNum.length == 8; + return bytesToLong(byteNum, 8); + } + + /** + * specify the input byte array length. then, convert byte array to long value from low to high. + *
+ * e.g.
+ * the input byte array is {00000011 11101000}. if len = 2, return 1000 if len = 1, return + * 232(only calculate the low byte) + * + * @param byteNum byte array to be converted + * @param len length of input byte array to be converted + * @return long + */ + public static long bytesToLong(byte[] byteNum, int len) { + long num = 0; + for (int ix = 0; ix < len; ix++) { + num <<= 8; + num |= (byteNum[ix] & 0xff); + } + return num; + } + + /** + * convert eight-bytes byte array cut from parameters to long. + * + * @param byteNum source bytes which length should be greater than 8 + * @param len length of input byte array to be converted + * @param offset position in parameter byte array that conversion result should start + * @return long + */ + public static long bytesToLongFromOffset(byte[] byteNum, int len, int offset) { + assert byteNum.length - offset >= len; + long num = 0; + for (int ix = 0; ix < len; ix++) { + num <<= 8; + num |= (byteNum[offset + ix] & 0xff); + } + return num; + } + + /** + * convert string to byte array using UTF-8 encoding + * + * @param str input string + * @return byte array + */ + public static byte[] StringToBytes(String str) { + try { + return str.getBytes(TSFileConfig.STRING_ENCODING); + } catch (UnsupportedEncodingException e) { + LOG.error("catch UnsupportedEncodingException {}", str, e); + return null; + } + } + + public static String bytesToString(byte[] byteStr) { + try { + return new String(byteStr, TSFileConfig.STRING_ENCODING); + } catch (UnsupportedEncodingException e) { + LOG.error("catch UnsupportedEncodingException {}", byteStr, e); + return null; + } + } + + /** + * join two byte arrays to one + * + * @param a one of byte array + * @param b another byte array + * @return byte array after joining + */ + public static byte[] concatByteArray(byte[] a, byte[] b) { + byte[] c = new byte[a.length + b.length]; + System.arraycopy(a, 0, c, 0, a.length); + System.arraycopy(b, 0, c, a.length, b.length); + return c; + } + + /** + * join a list of byte arrays into one array + * + * @param list a list of byte array to join + * @return byte array after joining + */ + public static byte[] concatByteArrayList(List list) { + int size = list.size(); + int len = 0; + for (byte[] cs : list) { + len += cs.length; + } + byte[] result = new byte[len]; + int pos = 0; + for (int i = 0; i < size; i++) { + int l = list.get(i).length; + System.arraycopy(list.get(i), 0, result, pos, l); + pos += l; + } + return result; + } + + /** + * cut out specified length byte array from parameter start from input byte array src and return + * + * @param src input byte array + * @param start start index of src + * @param length cut off length + * @return byte array + */ + public static byte[] subBytes(byte[] src, int start, int length) { + if ((start + length) > src.length) + return null; + if (length <= 0) + return null; + byte[] result = new byte[length]; + for (int i = 0; i < length; i++) { + result[i] = src[start + i]; + } + return result; + } + + /** + * get one bit in input integer. the offset is from low to high and start with 0
+ * e.g.
+ * data:1000(00000000 00000000 00000011 11101000), if offset is 4, return 0(111 "0" 1000) if + * offset is 9, return 1(00000 "1" 1 11101000) + * + * @param data input int variable + * @param offset bit offset + * @return 0 or 1 + */ + public static int getIntN(int data, int offset) { + offset %= 32; + if ((data & (1 << (offset))) != 0) + return 1; + else + return 0; + } + + /** + * set one bit in input integer. the offset is from low to high and start with index 0
+ * e.g.
+ * data:1000({00000000 00000000 00000011 11101000}), if offset is 4, value is 1, return + * 1016({00000000 00000000 00000011 111 "1" 1000}) if offset is 9, value is 0 return 488({00000000 + * 00000000 000000 "0" 1 11101000}) if offset is 0, value is 0 return 1000(no change) + * + * @param data input int variable + * @param offset bit offset + * @param value value to set + * @return int variable + */ + public static int setIntN(int data, int offset, int value) { + offset %= 32; + if (value == 1) + return (data | (1 << (offset))); + else + return (data & ~(1 << (offset))); + } + + /** + * get one bit in input byte. the offset is from low to high and start with 0
+ * e.g.
+ * data:16(00010000), if offset is 4, return 1(000 "1" 0000) if offset is 7, return 0("0" 0010000) + * + * @param data input byte variable + * @param offset bit offset + * @return 0/1 + */ + public static int getByteN(byte data, int offset) { + offset %= 8; + if ((data & (1 << (7 - offset))) != 0) + return 1; + else + return 0; + } + + /** + * set one bit in input byte. the offset is from low to high and start with index 0
+ * e.g.
+ * data:16(00010000), if offset is 4, value is 0, return 0({000 "0" 0000}) if offset is 1, value + * is 1, return 18({00010010}) if offset is 0, value is 0, return 16(no change) + * + * @param data input byte variable + * @param offset bit offset + * @param value value to set + * @return byte variable + */ + public static byte setByteN(byte data, int offset, int value) { + offset %= 8; + if (value == 1) + return (byte) (data | (1 << (7 - offset))); + else + return (byte) (data & ~(1 << (7 - offset))); + } + + /** + * get one bit in input long. the offset is from low to high and start with 0
+ * + * @param data input long variable + * @param offset bit offset + * @return 0/1 + */ + public static int getLongN(long data, int offset) { + offset %= 64; + if ((data & (1l << (offset))) != 0) + return 1; + else + return 0; + } + + /** + * set one bit in input long. the offset is from low to high and start with index 0
+ * + * @param data input long variable + * @param offset bit offset + * @param value value to set + * @return long variable + */ + public static long setLongN(long data, int offset, int value) { + offset %= 64; + if (value == 1) + return (data | (1l << (offset))); + else + return (data & ~(1l << (offset))); + } + + /** + * given a byte array, read width bits from specified position bits and convert it to an integer + * + * @param result input byte array + * @param pos bit offset rather than byte offset + * @param width bit-width + * @return integer variable + */ + public static int bytesToInt(byte[] result, int pos, int width) { + + int value = 0; + int temp = 0; + + for (int i = 0; i < width; i++) { + temp = (pos + width - 1 - i) / 8; + value = setIntN(value, i, getByteN(result[temp], pos + width - 1 - i)); + } + return value; + } + + /** + * convert an integer to a byte array which length is width, then copy this array to the parameter + * result from pos + * + * @param srcNum input integer variable + * @param result byte array to convert + * @param pos start position + * @param width bit-width + */ + public static void intToBytes(int srcNum, byte[] result, int pos, int width) { + int temp = 0; + for (int i = 0; i < width; i++) { + temp = (pos + width - 1 - i) / 8; + try { + result[temp] = setByteN(result[temp], pos + width - 1 - i, getIntN(srcNum, i)); + } catch (Exception e) { + LOG.error( + "tsfile-common BytesUtils: cannot convert an integer {} to a byte array, pos {}, width {}", + srcNum, pos, width, e); + } + + } + } + + /** + * convert an long to a byte array which length is width, then copy this array to the parameter + * result from pos + * + * @param srcNum input long variable + * @param result byte array to convert + * @param pos start position + * @param width bit-width + */ + public static void longToBytes(long srcNum, byte[] result, int pos, int width) { + int temp = 0; + for (int i = 0; i < width; i++) { + temp = (pos + width - 1 - i) / 8; + try { + result[temp] = setByteN(result[temp], pos + width - 1 - i, getLongN(srcNum, i)); + } catch (Exception e) { + LOG.error( + "tsfile-common BytesUtils: cannot convert a long {} to a byte array, pos {}, width {}", + srcNum, pos, width, e); + } + + } + } + + /** + * given a byte array, read width bits from specified pos bits and convert it to an long + * + * @param result input byte array + * @param pos bit offset rather than byte offset + * @param width bit-width + * @return long variable + */ + public static long bytesToLong(byte[] result, int pos, int width) { + long value = 0; + int temp = 0; + for (int i = 0; i < width; i++) { + temp = (pos + width - 1 - i) / 8; + value = setLongN(value, i, getByteN(result[temp], pos + width - 1 - i)); + } + return value; + } + + /** + * read 8-byte array from an InputStream and convert it to a double number + * + * @param in InputStream + * @return double + * @throws IOException cannot read double from InputStream + */ + public static double readDouble(InputStream in) throws IOException { + byte[] b = safeReadInputStreamToBytes(8, in); + return BytesUtils.bytesToDouble(b); + } + + /** + * read 4-byte array from an InputStream and convert it to a float number + * + * @param in InputStream + * @return float + * @throws IOException cannot read float from InputStream + */ + public static float readFloat(InputStream in) throws IOException { + byte[] b = safeReadInputStreamToBytes(4, in); + return BytesUtils.bytesToFloat(b); + } + + /** + * read 1-byte array from an InputStream and convert it to a integer number + * + * @param in InputStream + * @return boolean + * @throws IOException cannot read boolean from InputStream + */ + public static boolean readBool(InputStream in) throws IOException { + byte[] b = safeReadInputStreamToBytes(1, in); + return BytesUtils.bytesToBool(b); + } + + /** + * read 4-byte array from an InputStream and convert it to a integer number + * + * @param in InputStream + * @return integer + * @throws IOException cannot read int from InputStream + */ + public static int readInt(InputStream in) throws IOException { + byte[] b = safeReadInputStreamToBytes(4, in); + return BytesUtils.bytesToInt(b); + } + + /** + * read 8-byte array from an InputStream and convert it to a long number + * + * @param in InputStream + * @return long + * @throws IOException cannot read long from InputStream + */ + public static long readLong(InputStream in) throws IOException { + byte[] b = safeReadInputStreamToBytes(8, in); + return BytesUtils.bytesToLong(b); + } + + /** + * read bytes specified length from InputStream safely. + * + * @param count number of byte to read + * @param in InputStream + * @return byte array + * @throws IOException cannot read from InputStream + */ + public static byte[] safeReadInputStreamToBytes(int count, InputStream in) throws IOException { + byte[] bytes = new byte[count]; + int readCount = 0; + while (readCount < count) { + readCount += in.read(bytes, readCount, count - readCount); + } + return bytes; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileReader.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileReader.java index 647ebdb2..b54d007e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileReader.java @@ -9,15 +9,15 @@ */ public interface ITsRandomAccessFileReader { - void seek(long offset) throws IOException; + void seek(long offset) throws IOException; - int read() throws IOException; + int read() throws IOException; - int read(byte[] b, int off, int len) throws IOException; + int read(byte[] b, int off, int len) throws IOException; - long length() throws IOException; + long length() throws IOException; - int readInt() throws IOException; + int readInt() throws IOException; - void close() throws IOException; + void close() throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileWriter.java index 7d0474c4..61da2379 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ITsRandomAccessFileWriter.java @@ -13,15 +13,15 @@ * @author kangrong */ public interface ITsRandomAccessFileWriter { - long getPos() throws IOException; + long getPos() throws IOException; - void seek(long offset) throws IOException; + void seek(long offset) throws IOException; - void write(byte[] b) throws IOException; + void write(byte[] b) throws IOException; - void write(int b) throws IOException; + void write(int b) throws IOException; - void close() throws IOException; + void close() throws IOException; - OutputStream getOutputStream(); + OutputStream getOutputStream(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStream.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStream.java index 2b318156..e3f7b271 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStream.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStream.java @@ -13,76 +13,76 @@ * */ public class ListByteArrayOutputStream { - private List list; - private int totalSize = 0; + private List list; + private int totalSize = 0; - public ListByteArrayOutputStream(PublicBAOS... param) { - list = new ArrayList<>(); - for (PublicBAOS out : param) { - list.add(out); - totalSize += out.size(); - } + public ListByteArrayOutputStream(PublicBAOS... param) { + list = new ArrayList<>(); + for (PublicBAOS out : param) { + list.add(out); + totalSize += out.size(); } + } - /** - * Constructs ListByteArrayOutputStream using ByteArrayOutputStream. - * - * @param out the data source for constructing a ListByteArrayOutputStream - * @return a new ListByteArrayOutputStream containing data in out. - */ - public static ListByteArrayOutputStream from(PublicBAOS out) { - return new ListByteArrayOutputStream(out); - } + /** + * Constructs ListByteArrayOutputStream using ByteArrayOutputStream. + * + * @param out the data source for constructing a ListByteArrayOutputStream + * @return a new ListByteArrayOutputStream containing data in out. + */ + public static ListByteArrayOutputStream from(PublicBAOS out) { + return new ListByteArrayOutputStream(out); + } - /** - * Inputs an OutputStream as parameter. Writes the complete contents in list to - * the specified output stream argument. - * - * @param out the output stream to write the data. - * @throws IOException if an I/O error occurs. - */ - public void writeAllTo(OutputStream out) throws IOException { - for (PublicBAOS baos : list) - baos.writeTo(out); - } + /** + * Inputs an OutputStream as parameter. Writes the complete contents in list to the + * specified output stream argument. + * + * @param out the output stream to write the data. + * @throws IOException if an I/O error occurs. + */ + public void writeAllTo(OutputStream out) throws IOException { + for (PublicBAOS baos : list) + baos.writeTo(out); + } - /** - * get the total size of this class - * - * @return total size - */ - public int size() { - return totalSize; - } + /** + * get the total size of this class + * + * @return total size + */ + public int size() { + return totalSize; + } - /** - * Creates a new PublicBAOS which specified size is the current - * total size and write the current contents in list into it. - * - * @return the current contents of this class, as a byte array. - * @throws IOException if an I/O error occurs. - */ - public byte[] toByteArray() throws IOException { - PublicBAOS baos = new PublicBAOS(totalSize); - this.writeAllTo(baos); - return baos.getBuf(); - } + /** + * Creates a new PublicBAOS which specified size is the current total size and write + * the current contents in list into it. + * + * @return the current contents of this class, as a byte array. + * @throws IOException if an I/O error occurs. + */ + public byte[] toByteArray() throws IOException { + PublicBAOS baos = new PublicBAOS(totalSize); + this.writeAllTo(baos); + return baos.getBuf(); + } - /** - * Appends a ByteArrayOutputStream into this class. - * - * @param out a output stream to be appended. - */ - public void append(PublicBAOS out) { - list.add(out); - totalSize += out.size(); - } + /** + * Appends a ByteArrayOutputStream into this class. + * + * @param out a output stream to be appended. + */ + public void append(PublicBAOS out) { + list.add(out); + totalSize += out.size(); + } - /** - * Resets the list and totalSize fields. - */ - public void reset() { - list.clear(); - totalSize = 0; - } + /** + * Resets the list and totalSize fields. + */ + public void reset() { + list.clear(); + totalSize = 0; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Pair.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Pair.java index f9584716..f899d0a5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Pair.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/Pair.java @@ -9,47 +9,47 @@ * @author kangrong */ public class Pair { - public L left; - public R right; + public L left; + public R right; - public Pair(L l, R r) { - left = l; - right = r; - } + public Pair(L l, R r) { + left = l; + right = r; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((left == null) ? 0 : left.hashCode()); - result = prime * result + ((right == null) ? 0 : right.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((left == null) ? 0 : left.hashCode()); + result = prime * result + ((right == null) ? 0 : right.hashCode()); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Pair other = (Pair) obj; - if (left == null) { - if (other.left != null) - return false; - } else if (!left.equals(other.left)) - return false; - if (right == null) { - if (other.right != null) - return false; - } else if (!right.equals(other.right)) - return false; - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Pair other = (Pair) obj; + if (left == null) { + if (other.left != null) + return false; + } else if (!left.equals(other.left)) + return false; + if (right == null) { + if (other.right != null) + return false; + } else if (!right.equals(other.right)) + return false; + return true; + } - @Override - public String toString() { - return "<" + left + "," + right + ">"; - } + @Override + public String toString() { + return "<" + left + "," + right + ">"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/PublicBAOS.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/PublicBAOS.java index 976a96e5..1bf9a0d9 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/PublicBAOS.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/PublicBAOS.java @@ -4,36 +4,36 @@ import java.io.ByteArrayOutputStream; /** - * A subclass extending ByteArrayOutputStream. It's used to return the byte array directly. - * Note that the size of byte array is large than actual size of valid contents, thus it's used cooperating - * with size() or capacity = size + * A subclass extending ByteArrayOutputStream. It's used to return the byte array + * directly. Note that the size of byte array is large than actual size of valid contents, thus it's + * used cooperating with size() or capacity = size */ public class PublicBAOS extends ByteArrayOutputStream { - public PublicBAOS(int size) { - super(size); - } + public PublicBAOS(int size) { + super(size); + } - public PublicBAOS() { - super(); - } + public PublicBAOS() { + super(); + } - /** - * get current all bytes data - * - * @return all bytes data - */ - public byte[] getBuf() { + /** + * get current all bytes data + * + * @return all bytes data + */ + public byte[] getBuf() { - return this.buf; - } + return this.buf; + } - /** - * Construct one {@link ByteArrayInputStream} from the buff data - * - * @return one {@link ByteArrayInputStream} have all buff data - */ - public ByteArrayInputStream transformToInputStream() { - return new ByteArrayInputStream(this.buf, 0, size()); - } + /** + * Construct one {@link ByteArrayInputStream} from the buff data + * + * @return one {@link ByteArrayInputStream} have all buff data + */ + public ByteArrayInputStream transformToInputStream() { + return new ByteArrayInputStream(this.buf, 0, size()); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtils.java index 5151c9cc..9fae8c62 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtils.java @@ -10,180 +10,185 @@ */ public class ReadWriteStreamUtils { - /** - * check all number in a int list and find max bit width - * - * @param list input list - * @return max bit width - */ - public static int getIntMaxBitWidth(List list) { - int max = 1; - for (int num : list) { - int bitWidth = 32 - Integer.numberOfLeadingZeros(num); - max = bitWidth > max ? bitWidth : max; - } - return max; + /** + * check all number in a int list and find max bit width + * + * @param list input list + * @return max bit width + */ + public static int getIntMaxBitWidth(List list) { + int max = 1; + for (int num : list) { + int bitWidth = 32 - Integer.numberOfLeadingZeros(num); + max = bitWidth > max ? bitWidth : max; } + return max; + } - /** - * check all number in a long list and find max bit width - * - * @param list input list - * @return max bit width - */ - public static int getLongMaxBitWidth(List list) { - int max = 1; - for (long num : list) { - int bitWidth = 64 - Long.numberOfLeadingZeros(num); - max = bitWidth > max ? bitWidth : max; - } - return max; + /** + * check all number in a long list and find max bit width + * + * @param list input list + * @return max bit width + */ + public static int getLongMaxBitWidth(List list) { + int max = 1; + for (long num : list) { + int bitWidth = 64 - Long.numberOfLeadingZeros(num); + max = bitWidth > max ? bitWidth : max; } + return max; + } - public static byte[] getUnsignedVarInt(int value) { - int preValue = value; - int length = 0; - while ((value & 0xFFFFFF80) != 0L) { - length++; - value >>>= 7; - } - length++; + public static byte[] getUnsignedVarInt(int value) { + int preValue = value; + int length = 0; + while ((value & 0xFFFFFF80) != 0L) { + length++; + value >>>= 7; + } + length++; - byte[] res = new byte[length]; - value = preValue; - int i = 0; - while ((value & 0xFFFFFF80) != 0L) { - res[i] = (byte) ((value & 0x7F) | 0x80); - value >>>= 7; - i++; - } - res[i] = (byte) (value & 0x7F); - return res; + byte[] res = new byte[length]; + value = preValue; + int i = 0; + while ((value & 0xFFFFFF80) != 0L) { + res[i] = (byte) ((value & 0x7F) | 0x80); + value >>>= 7; + i++; } + res[i] = (byte) (value & 0x7F); + return res; + } - /** - * read an unsigned var int in stream and transform it to int format - * - * @param in stream to read an unsigned var int - * @return integer value - * @throws IOException exception in IO - */ - public static int readUnsignedVarInt(InputStream in) throws IOException { - int value = 0; - int i = 0; - int b; - while (((b = in.read()) & 0x80) != 0) { - value |= (b & 0x7F) << i; - i += 7; - } - return value | (b << i); + /** + * read an unsigned var int in stream and transform it to int format + * + * @param in stream to read an unsigned var int + * @return integer value + * @throws IOException exception in IO + */ + public static int readUnsignedVarInt(InputStream in) throws IOException { + int value = 0; + int i = 0; + int b; + while (((b = in.read()) & 0x80) != 0) { + value |= (b & 0x7F) << i; + i += 7; } + return value | (b << i); + } - /** - * write a value to stream using unsigned var int format. for example, int - * 123456789 has its binary format 111010-1101111-0011010-0010101, function - * writeUnsignedVarInt will split every seven bits and write them to stream - * from low bit to high bit like: 1-0010101 1-0011010 1-1101111 0-0111010 1 - * represents has next byte to write, 0 represents number end - * - * @param value value to write into stream - * @param out output stream - * @throws IOException exception in IO - */ - public static void writeUnsignedVarInt(int value, OutputStream out) throws IOException { - while ((value & 0xFFFFFF80) != 0L) { - out.write((value & 0x7F) | 0x80); - value >>>= 7; - } - out.write(value & 0x7F); + /** + * write a value to stream using unsigned var int format. for example, int 123456789 has its + * binary format 111010-1101111-0011010-0010101, function writeUnsignedVarInt will split every + * seven bits and write them to stream from low bit to high bit like: 1-0010101 1-0011010 + * 1-1101111 0-0111010 1 represents has next byte to write, 0 represents number end + * + * @param value value to write into stream + * @param out output stream + * @throws IOException exception in IO + */ + public static void writeUnsignedVarInt(int value, OutputStream out) throws IOException { + while ((value & 0xFFFFFF80) != 0L) { + out.write((value & 0x7F) | 0x80); + value >>>= 7; } + out.write(value & 0x7F); + } - /** - * write integer value using special bit to output stream - * - * @param value value to write to stream - * @param out output stream - * @param bitWidth bit length - * @throws IOException exception in IO - */ - public static void writeIntLittleEndianPaddedOnBitWidth(int value, OutputStream out, int bitWidth) - throws IOException { - int paddedByteNum = (bitWidth + 7) / 8; - if (paddedByteNum > 4) { - throw new IOException(String.format( - "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", paddedByteNum)); - } - int offset = 0; - while (paddedByteNum > 0) { - out.write((value >>> offset) & 0xFF); - offset += 8; - paddedByteNum--; - } + /** + * write integer value using special bit to output stream + * + * @param value value to write to stream + * @param out output stream + * @param bitWidth bit length + * @throws IOException exception in IO + */ + public static void writeIntLittleEndianPaddedOnBitWidth(int value, OutputStream out, int bitWidth) + throws IOException { + int paddedByteNum = (bitWidth + 7) / 8; + if (paddedByteNum > 4) { + throw new IOException(String.format( + "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", + paddedByteNum)); + } + int offset = 0; + while (paddedByteNum > 0) { + out.write((value >>> offset) & 0xFF); + offset += 8; + paddedByteNum--; } + } - /** - * write long value using special bit to output stream - * - * @param value value to write to stream - * @param out output stream - * @param bitWidth bit length - * @throws IOException exception in IO - */ - public static void writeLongLittleEndianPaddedOnBitWidth(long value, OutputStream out, int bitWidth) - throws IOException { - int paddedByteNum = (bitWidth + 7) / 8; - if (paddedByteNum > 8) { - throw new IOException(String.format( - "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", paddedByteNum)); - } - out.write(BytesUtils.longToBytes(value, paddedByteNum)); + /** + * write long value using special bit to output stream + * + * @param value value to write to stream + * @param out output stream + * @param bitWidth bit length + * @throws IOException exception in IO + */ + public static void writeLongLittleEndianPaddedOnBitWidth(long value, OutputStream out, + int bitWidth) throws IOException { + int paddedByteNum = (bitWidth + 7) / 8; + if (paddedByteNum > 8) { + throw new IOException(String.format( + "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", + paddedByteNum)); } + out.write(BytesUtils.longToBytes(value, paddedByteNum)); + } - /** - * read integer value using special bit from input stream - * - * @param in input stream - * @param bitWidth bit length - * @return integer value - * @throws IOException exception in IO - */ - public static int readIntLittleEndianPaddedOnBitWidth(InputStream in, int bitWidth) throws IOException { - int paddedByteNum = (bitWidth + 7) / 8; - if (paddedByteNum > 4) { - throw new IOException(String.format( - "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", paddedByteNum)); - } - int result = 0; - int offset = 0; - while (paddedByteNum > 0) { - int ch = in.read(); - result += ch << offset; - offset += 8; - paddedByteNum--; - } - return result; + /** + * read integer value using special bit from input stream + * + * @param in input stream + * @param bitWidth bit length + * @return integer value + * @throws IOException exception in IO + */ + public static int readIntLittleEndianPaddedOnBitWidth(InputStream in, int bitWidth) + throws IOException { + int paddedByteNum = (bitWidth + 7) / 8; + if (paddedByteNum > 4) { + throw new IOException(String.format( + "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", + paddedByteNum)); } + int result = 0; + int offset = 0; + while (paddedByteNum > 0) { + int ch = in.read(); + result += ch << offset; + offset += 8; + paddedByteNum--; + } + return result; + } - /** - * read long value using special bit from input stream - * - * @param in input stream - * @param bitWidth bit length - * @return long long value - * @throws IOException exception in IO - */ - public static long readLongLittleEndianPaddedOnBitWidth(InputStream in, int bitWidth) throws IOException { - int paddedByteNum = (bitWidth + 7) / 8; - if (paddedByteNum > 8) { - throw new IOException(String.format( - "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", paddedByteNum)); - } - long result = 0; - for (int i = 0; i < paddedByteNum; i++) { - int ch = in.read(); - result <<= 8; - result |= (ch & 0xff); - } - return result; + /** + * read long value using special bit from input stream + * + * @param in input stream + * @param bitWidth bit length + * @return long long value + * @throws IOException exception in IO + */ + public static long readLongLittleEndianPaddedOnBitWidth(InputStream in, int bitWidth) + throws IOException { + int paddedByteNum = (bitWidth + 7) / 8; + if (paddedByteNum > 8) { + throw new IOException(String.format( + "tsfile-common BytesUtils: encountered value (%d) that requires more than 4 bytes", + paddedByteNum)); + } + long result = 0; + for (int i = 0; i < paddedByteNum; i++) { + int ch = in.read(); + result <<= 8; + result |= (ch & 0xff); } + return result; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/TsRandomAccessFileWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/TsRandomAccessFileWriter.java index 37362be3..4ee5837d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/common/utils/TsRandomAccessFileWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/common/utils/TsRandomAccessFileWriter.java @@ -6,7 +6,8 @@ import java.io.RandomAccessFile; /** - * RandomAccessOutputStream implements the tsfile file writer interface and extends OutputStream.
+ * RandomAccessOutputStream implements the tsfile file writer interface and extends OutputStream. + *
* The main difference between RandomAccessOutputStream and general OutputStream * is:RandomAccessOutputStream provide method {@code getPos} for random accessing. It also * implements {@code getOutputStream} to return an OutputStream supporting tsfile-format @@ -14,64 +15,66 @@ * @author kangrong */ public class TsRandomAccessFileWriter implements ITsRandomAccessFileWriter { - private static final String DEFAULT_FILE_MODE = "rw"; - private RandomAccessFile out; - private OutputStream outputStream; + private static final String DEFAULT_FILE_MODE = "rw"; + private RandomAccessFile out; + private OutputStream outputStream; - public TsRandomAccessFileWriter(File file) throws IOException { - this(file, DEFAULT_FILE_MODE); - } + public TsRandomAccessFileWriter(File file) throws IOException { + this(file, DEFAULT_FILE_MODE); + } - public TsRandomAccessFileWriter(File file, String mode) throws IOException { - out = new RandomAccessFile(file, mode); - outputStream=new OutputStream() { - @Override - public void write(int b) throws IOException { - TsRandomAccessFileWriter.this.write(b); - } - @Override - public void write(byte b[], int off, int len) throws IOException { - out.write(b, off, len); - } - @Override - public void write(byte b[]) throws IOException { - TsRandomAccessFileWriter.this.write(b); - } + public TsRandomAccessFileWriter(File file, String mode) throws IOException { + out = new RandomAccessFile(file, mode); + outputStream = new OutputStream() { + @Override + public void write(int b) throws IOException { + TsRandomAccessFileWriter.this.write(b); + } - @Override - public void close() throws IOException { - TsRandomAccessFileWriter.this.close(); - } - }; - } - - @Override - public void write(int b) throws IOException { - out.write(b); - } + @Override + public void write(byte b[], int off, int len) throws IOException { + out.write(b, off, len); + } - @Override - public void write(byte b[]) throws IOException { - out.write(b); - } + @Override + public void write(byte b[]) throws IOException { + TsRandomAccessFileWriter.this.write(b); + } - @Override - public long getPos() throws IOException { - return out.length(); - } + @Override + public void close() throws IOException { + TsRandomAccessFileWriter.this.close(); + } + }; + } - @Override - public void seek(long offset) throws IOException { - out.seek(offset); - } + @Override + public void write(int b) throws IOException { + out.write(b); + } - @Override - public void close() throws IOException { - out.close(); - } + @Override + public void write(byte b[]) throws IOException { + out.write(b); + } - @Override - public OutputStream getOutputStream() { - return outputStream; - } + @Override + public long getPos() throws IOException { + return out.length(); + } + + @Override + public void seek(long offset) throws IOException { + out.seek(offset); + } + + @Override + public void close() throws IOException { + out.close(); + } + + @Override + public OutputStream getOutputStream() { + return outputStream; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/compress/Compressor.java b/src/main/java/cn/edu/tsinghua/tsfile/compress/Compressor.java index cccf1028..04ba977f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/compress/Compressor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/compress/Compressor.java @@ -7,75 +7,74 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xerial.snappy.Snappy; - import java.io.IOException; /** * compress data according to type in schema */ public abstract class Compressor { - public static Compressor getCompressor(String name) { - return getCompressor(CompressionTypeName.valueOf(name)); - } + public static Compressor getCompressor(String name) { + return getCompressor(CompressionTypeName.valueOf(name)); + } - public static Compressor getCompressor(CompressionTypeName name) { - if (name == null) { - throw new CompressionTypeNotSupportedException("NULL"); - } - switch (name) { - case UNCOMPRESSED: - return new NoCompressor(); - case SNAPPY: - return new SnappyCompressor(); - default: - throw new CompressionTypeNotSupportedException(name.toString()); - } + public static Compressor getCompressor(CompressionTypeName name) { + if (name == null) { + throw new CompressionTypeNotSupportedException("NULL"); + } + switch (name) { + case UNCOMPRESSED: + return new NoCompressor(); + case SNAPPY: + return new SnappyCompressor(); + default: + throw new CompressionTypeNotSupportedException(name.toString()); } + } - public abstract ListByteArrayOutputStream compress(ListByteArrayOutputStream ListByteArray); + public abstract ListByteArrayOutputStream compress(ListByteArrayOutputStream ListByteArray); - public abstract CompressionTypeName getCodecName(); + public abstract CompressionTypeName getCodecName(); - /** - * NoCompressor will do nothing for data and return the input data directly. - * - * @author kangrong - */ - static public class NoCompressor extends Compressor { + /** + * NoCompressor will do nothing for data and return the input data directly. + * + * @author kangrong + */ + static public class NoCompressor extends Compressor { - @Override - public ListByteArrayOutputStream compress(ListByteArrayOutputStream ListByteArray) { - return ListByteArray; - } + @Override + public ListByteArrayOutputStream compress(ListByteArrayOutputStream ListByteArray) { + return ListByteArray; + } - @Override - public CompressionTypeName getCodecName() { - return CompressionTypeName.UNCOMPRESSED; - } + @Override + public CompressionTypeName getCodecName() { + return CompressionTypeName.UNCOMPRESSED; } + } - static public class SnappyCompressor extends Compressor { - private static final Logger LOGGER = LoggerFactory.getLogger(SnappyCompressor.class); + static public class SnappyCompressor extends Compressor { + private static final Logger LOGGER = LoggerFactory.getLogger(SnappyCompressor.class); - @Override - public ListByteArrayOutputStream compress(ListByteArrayOutputStream listByteArray) { - if (listByteArray == null) { - return null; - } - PublicBAOS out = new PublicBAOS(); - try { - out.write(Snappy.compress(listByteArray.toByteArray())); - } catch (IOException e) { - LOGGER.error( - "tsfile-compression SnappyCompressor: errors occurs when compress input byte, ListByteArray is {}, ByteArrayOutputStream is {}", - listByteArray, out, e); - } - return ListByteArrayOutputStream.from(out); - } + @Override + public ListByteArrayOutputStream compress(ListByteArrayOutputStream listByteArray) { + if (listByteArray == null) { + return null; + } + PublicBAOS out = new PublicBAOS(); + try { + out.write(Snappy.compress(listByteArray.toByteArray())); + } catch (IOException e) { + LOGGER.error( + "tsfile-compression SnappyCompressor: errors occurs when compress input byte, ListByteArray is {}, ByteArrayOutputStream is {}", + listByteArray, out, e); + } + return ListByteArrayOutputStream.from(out); + } - @Override - public CompressionTypeName getCodecName() { - return CompressionTypeName.SNAPPY; - } + @Override + public CompressionTypeName getCodecName() { + return CompressionTypeName.SNAPPY; } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/compress/UnCompressor.java b/src/main/java/cn/edu/tsinghua/tsfile/compress/UnCompressor.java index f4bac317..636481bf 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/compress/UnCompressor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/compress/UnCompressor.java @@ -5,66 +5,65 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xerial.snappy.Snappy; - import java.io.IOException; /** * uncompress data according to type in metadata */ public abstract class UnCompressor { - public static UnCompressor getUnCompressor(CompressionTypeName name) { - if (name == null) { - throw new CompressionTypeNotSupportedException("NULL"); - } - switch (name) { - case UNCOMPRESSED: - return new NoUnCompressor(); - case SNAPPY: - return new SnappyUnCompressor(); - default: - throw new CompressionTypeNotSupportedException(name.toString()); - } + public static UnCompressor getUnCompressor(CompressionTypeName name) { + if (name == null) { + throw new CompressionTypeNotSupportedException("NULL"); + } + switch (name) { + case UNCOMPRESSED: + return new NoUnCompressor(); + case SNAPPY: + return new SnappyUnCompressor(); + default: + throw new CompressionTypeNotSupportedException(name.toString()); } + } - public abstract byte[] uncompress(byte[] byteArray); + public abstract byte[] uncompress(byte[] byteArray); - public abstract CompressionTypeName getCodecName(); + public abstract CompressionTypeName getCodecName(); - static public class NoUnCompressor extends UnCompressor { + static public class NoUnCompressor extends UnCompressor { - @Override - public byte[] uncompress(byte[] byteArray) { - return byteArray; - } + @Override + public byte[] uncompress(byte[] byteArray) { + return byteArray; + } - @Override - public CompressionTypeName getCodecName() { - return CompressionTypeName.UNCOMPRESSED; - } + @Override + public CompressionTypeName getCodecName() { + return CompressionTypeName.UNCOMPRESSED; } + } - static public class SnappyUnCompressor extends UnCompressor { - private static final Logger LOGGER = LoggerFactory.getLogger(SnappyUnCompressor.class); + static public class SnappyUnCompressor extends UnCompressor { + private static final Logger LOGGER = LoggerFactory.getLogger(SnappyUnCompressor.class); - @Override - public byte[] uncompress(byte[] bytes) { - if (bytes == null) { - return null; - } + @Override + public byte[] uncompress(byte[] bytes) { + if (bytes == null) { + return null; + } - try { - return Snappy.uncompress(bytes); - } catch (IOException e) { - LOGGER.error( - "tsfile-compression SnappyUnCompressor: errors occurs when uncompress input byte, bytes is {}", - bytes, e); - } - return null; - } + try { + return Snappy.uncompress(bytes); + } catch (IOException e) { + LOGGER.error( + "tsfile-compression SnappyUnCompressor: errors occurs when uncompress input byte, bytes is {}", + bytes, e); + } + return null; + } - @Override - public CompressionTypeName getCodecName() { - return CompressionTypeName.SNAPPY; - } + @Override + public CompressionTypeName getCodecName() { + return CompressionTypeName.SNAPPY; } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPacker.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPacker.java index 02ada119..fcc6042c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPacker.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPacker.java @@ -1,152 +1,154 @@ package cn.edu.tsinghua.tsfile.encoding.bitpacking; /** - * This class is used to encode(decode) Integer in Java with specified bit-width. - * User need to guarantee that the length of every given Integer in binary mode - * is less than or equal to the bit-width. - *

- * e.g., if bit-width is 4, then Integer '16'(10000)b is not allowed but '15'(1111)b is allowed. - *

- * For a full example, - * Width: 3 - * Input: 5 4 7 3 0 1 3 2 - *

- * Output: - *

- * +-----------------------+ +-----------------------+ +-----------------------+ - * |1 |0 |1 |1 |0 |0 |1 |1 | |1 |0 |1 |1 |0 |0 |0 |0 | |0 |1 |0 |1 |1 |0 |1 |0 | - * +-----------------------+ +-----------------------+ +-----------------------+ - * +-----+ +-----+ +---------+ +-----+ +-----+ +---------+ +-----+ +-----+ - * 5 4 7 3 0 1 3 2 + * This class is used to encode(decode) Integer in Java with specified bit-width. User need to + * guarantee that the length of every given Integer in binary mode is less than or equal to the + * bit-width. + *

+ * e.g., if bit-width is 4, then Integer '16'(10000)b is not allowed but '15'(1111)b is allowed. + *

+ * For a full example, Width: 3 Input: 5 4 7 3 0 1 3 2 + *

+ * Output: + *

+ * +-----------------------+ +-----------------------+ +-----------------------+ |1 |0 |1 |1 |0 |0 + * |1 |1 | |1 |0 |1 |1 |0 |0 |0 |0 | |0 |1 |0 |1 |1 |0 |1 |0 | +-----------------------+ + * +-----------------------+ +-----------------------+ +-----+ +-----+ +---------+ +-----+ +-----+ + * +---------+ +-----+ +-----+ 5 4 7 3 0 1 3 2 * - * @author Zhang Jinrui + * @author Zhang Jinrui */ public class IntPacker { - /** - * Number of Integers for each pack operation - */ - private static final int NUM_OF_INTS = 8; - /** - * bit-width - */ - private int width; + /** + * Number of Integers for each pack operation + */ + private static final int NUM_OF_INTS = 8; + /** + * bit-width + */ + private int width; - public IntPacker(int width) { - this.width = width; - } + public IntPacker(int width) { + this.width = width; + } - /** - * Encode 8 ({@link IntPacker#NUM_OF_INTS}) Integers from the array 'values' with specified bit-width to bytes - * - * @param values - array where '8 Integers' are in - * @param offset - the offset of first Integer to be encoded - * @param buf - encoded bytes, buf size must be equal to ({@link IntPacker#NUM_OF_INTS} * {@link IntPacker#width} / 8) - */ - public void pack8Values(int[] values, int offset, byte[] buf) { - int bufIdx = 0; - int valueIdx = offset; - //remaining bits for the current unfinished Integer - int leftBit = 0; + /** + * Encode 8 ({@link IntPacker#NUM_OF_INTS}) Integers from the array 'values' with specified + * bit-width to bytes + * + * @param values - array where '8 Integers' are in + * @param offset - the offset of first Integer to be encoded + * @param buf - encoded bytes, buf size must be equal to ({@link IntPacker#NUM_OF_INTS} * + * {@link IntPacker#width} / 8) + */ + public void pack8Values(int[] values, int offset, byte[] buf) { + int bufIdx = 0; + int valueIdx = offset; + // remaining bits for the current unfinished Integer + int leftBit = 0; - while (valueIdx < NUM_OF_INTS + offset) { - // buffer is used for saving 32 bits as a part of result - int buffer = 0; - // remaining size of bits in the 'buffer' - int leftSize = 32; + while (valueIdx < NUM_OF_INTS + offset) { + // buffer is used for saving 32 bits as a part of result + int buffer = 0; + // remaining size of bits in the 'buffer' + int leftSize = 32; - // encode the left bits of current Integer to 'buffer' - if (leftBit > 0) { - buffer |= (values[valueIdx] << (32 - leftBit)); - leftSize -= leftBit; - leftBit = 0; - valueIdx++; - } + // encode the left bits of current Integer to 'buffer' + if (leftBit > 0) { + buffer |= (values[valueIdx] << (32 - leftBit)); + leftSize -= leftBit; + leftBit = 0; + valueIdx++; + } - while (leftSize >= width && valueIdx < NUM_OF_INTS + offset) { - //encode one Integer to the 'buffer' - buffer |= (values[valueIdx] << (leftSize - width)); - leftSize -= width; - valueIdx++; - } - // If the remaining space of the buffer can not save the bits for one Integer, - if (leftSize > 0 && valueIdx < NUM_OF_INTS + offset) { - // put the first 'leftSize' bits of the Integer into remaining space of the buffer - buffer |= (values[valueIdx] >>> (width - leftSize)); - leftBit = width - leftSize; - leftSize = 0; - } + while (leftSize >= width && valueIdx < NUM_OF_INTS + offset) { + // encode one Integer to the 'buffer' + buffer |= (values[valueIdx] << (leftSize - width)); + leftSize -= width; + valueIdx++; + } + // If the remaining space of the buffer can not save the bits for one Integer, + if (leftSize > 0 && valueIdx < NUM_OF_INTS + offset) { + // put the first 'leftSize' bits of the Integer into remaining space of the buffer + buffer |= (values[valueIdx] >>> (width - leftSize)); + leftBit = width - leftSize; + leftSize = 0; + } - // put the buffer into the final result - for (int j = 0; j < 4; j++) { - buf[bufIdx] = (byte) ((buffer >>> ((3 - j) * 8)) & 0xFF); - bufIdx++; - if (bufIdx >= width) { - return; - } - } + // put the buffer into the final result + for (int j = 0; j < 4; j++) { + buf[bufIdx] = (byte) ((buffer >>> ((3 - j) * 8)) & 0xFF); + bufIdx++; + if (bufIdx >= width) { + return; } + } } + } - /** - * decode Integers from byte array. - * - * @param buf - array where bytes are in. - * @param offset - offset of first byte to be decoded in buf - * @param values - decoded result , the length of 'values' should be @{link IntPacker#NUM_OF_INTS} - */ - public void unpack8Values(byte[] buf, int offset, int[] values) { - int byteIdx = offset; - long buffer = 0; - //total bits which have read from 'buf' to 'buffer'. i.e., number of available bits to be decoded. - int totalBits = 0; - int valueIdx = 0; + /** + * decode Integers from byte array. + * + * @param buf - array where bytes are in. + * @param offset - offset of first byte to be decoded in buf + * @param values - decoded result , the length of 'values' should be @{link IntPacker#NUM_OF_INTS} + */ + public void unpack8Values(byte[] buf, int offset, int[] values) { + int byteIdx = offset; + long buffer = 0; + // total bits which have read from 'buf' to 'buffer'. i.e., number of available bits to be + // decoded. + int totalBits = 0; + int valueIdx = 0; - while (valueIdx < NUM_OF_INTS) { - //If current available bits are not enough to decode one Integer, then add next byte from buf to 'buffer' - //until totalBits >= width - while (totalBits < width) { - buffer = ((buffer << 8) | (buf[byteIdx] & 0xFF)); - byteIdx++; - totalBits += 8; - } + while (valueIdx < NUM_OF_INTS) { + // If current available bits are not enough to decode one Integer, then add next byte from buf + // to 'buffer' + // until totalBits >= width + while (totalBits < width) { + buffer = ((buffer << 8) | (buf[byteIdx] & 0xFF)); + byteIdx++; + totalBits += 8; + } - //If current available bits are enough to decode one Integer, then decode one Integer one by one - //until left bits in 'buffer' is not enough to decode one Integer. - while (totalBits >= width && valueIdx < 8) { - values[valueIdx] = (int) (buffer >>> (totalBits - width)); - valueIdx++; - totalBits -= width; - buffer = (buffer & ((1 << totalBits) - 1)); - } - } + // If current available bits are enough to decode one Integer, then decode one Integer one by + // one + // until left bits in 'buffer' is not enough to decode one Integer. + while (totalBits >= width && valueIdx < 8) { + values[valueIdx] = (int) (buffer >>> (totalBits - width)); + valueIdx++; + totalBits -= width; + buffer = (buffer & ((1 << totalBits) - 1)); + } } + } - /** - * decode all values from 'buf' with specified offset and length - * decoded result will be saved in the array named 'values'. - * - * @param buf: array where all bytes are in. - * @param offset: the offset of first byte to be decoded in buf. - * @param length: length of bytes to be decoded in buf. - * @param values: decoded result. - */ - public void unpackAllValues(byte[] buf, int offset, int length, int[] values) { - int idx = 0; - int k = 0; - while (idx < length) { - int[] tv = new int[8]; - //decode 8 values one time, current result will be saved in the array named 'tv' - unpack8Values(buf, idx, tv); - for (int i = 0; i < 8; i++) { - values[k + i] = tv[i]; - } - idx += width; - k += 8; - } + /** + * decode all values from 'buf' with specified offset and length decoded result will be saved in + * the array named 'values'. + * + * @param buf: array where all bytes are in. + * @param offset: the offset of first byte to be decoded in buf. + * @param length: length of bytes to be decoded in buf. + * @param values: decoded result. + */ + public void unpackAllValues(byte[] buf, int offset, int length, int[] values) { + int idx = 0; + int k = 0; + while (idx < length) { + int[] tv = new int[8]; + // decode 8 values one time, current result will be saved in the array named 'tv' + unpack8Values(buf, idx, tv); + for (int i = 0; i < 8; i++) { + values[k + i] = tv[i]; + } + idx += width; + k += 8; } + } - public void setWidth(int width) { - this.width = width; - } + public void setWidth(int width) { + this.width = width; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPacker.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPacker.java index a2bcfeff..289263f0 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPacker.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPacker.java @@ -1,168 +1,168 @@ package cn.edu.tsinghua.tsfile.encoding.bitpacking; /** - * This class is used to encode(decode) Long in Java with specified bit-width. - * User need to guarantee that the length of every given Long in binary mode - * is less than or equal to the bit-width. + * This class is used to encode(decode) Long in Java with specified bit-width. User need to + * guarantee that the length of every given Long in binary mode is less than or equal to the + * bit-width. *

- * e.g., if bit-width is 31, then Long '2147483648'(2^31) is not allowed but '2147483647'(2^31-1) is allowed. + * e.g., if bit-width is 31, then Long '2147483648'(2^31) is not allowed but '2147483647'(2^31-1) is + * allowed. *

- * For a full example, - * Width: 3 - * Input: 5 4 7 3 0 1 3 2 + * For a full example, Width: 3 Input: 5 4 7 3 0 1 3 2 *

* Output: *

- * +-----------------------+ +-----------------------+ +-----------------------+ - * |1 |0 |1 |1 |0 |0 |1 |1 | |1 |0 |1 |1 |0 |0 |0 |0 | |0 |1 |0 |1 |1 |0 |1 |0 | - * +-----------------------+ +-----------------------+ +-----------------------+ - * +-----+ +-----+ +---------+ +-----+ +-----+ +---------+ +-----+ +-----+ - * 5 4 7 3 0 1 3 2 + * +-----------------------+ +-----------------------+ +-----------------------+ |1 |0 |1 |1 |0 |0 + * |1 |1 | |1 |0 |1 |1 |0 |0 |0 |0 | |0 |1 |0 |1 |1 |0 |1 |0 | +-----------------------+ + * +-----------------------+ +-----------------------+ +-----+ +-----+ +---------+ +-----+ +-----+ + * +---------+ +-----+ +-----+ 5 4 7 3 0 1 3 2 * * @author Zhang Jinrui */ public class LongPacker { - /** - * Number of Long values for each pack operation - */ - private static final int NUM_OF_LONGS = 8; - /** - * bit-width - */ - private int width; + /** + * Number of Long values for each pack operation + */ + private static final int NUM_OF_LONGS = 8; + /** + * bit-width + */ + private int width; - public LongPacker(int width) { - this.width = width; - } + public LongPacker(int width) { + this.width = width; + } - /** - * Encode 8 ({@link LongPacker#NUM_OF_LONGS}) Longs from the array 'values' with specified bit-width to bytes - * - * @param values - array where '8 Longs' are in - * @param offset - the offset of first Long to be encoded - * @param buf - encoded bytes, buf size must be equal to ({@link LongPacker#NUM_OF_LONGS}} * {@link IntPacker#width} / 8) - */ - public void pack8Values(long[] values, int offset, byte[] buf) { + /** + * Encode 8 ({@link LongPacker#NUM_OF_LONGS}) Longs from the array 'values' with specified + * bit-width to bytes + * + * @param values - array where '8 Longs' are in + * @param offset - the offset of first Long to be encoded + * @param buf - encoded bytes, buf size must be equal to ({@link LongPacker#NUM_OF_LONGS}} * + * {@link IntPacker#width} / 8) + */ + public void pack8Values(long[] values, int offset, byte[] buf) { - int bufIdx = 0; - int valueIdx = offset; - //remaining bits for the current unfinished Integer - int leftBit = 0; + int bufIdx = 0; + int valueIdx = offset; + // remaining bits for the current unfinished Integer + int leftBit = 0; - while (valueIdx < NUM_OF_LONGS + offset) { - // buffer is used for saving 64 bits as a part of result - long buffer = 0; - //remaining size of bits in the 'buffer' - int leftSize = 64; + while (valueIdx < NUM_OF_LONGS + offset) { + // buffer is used for saving 64 bits as a part of result + long buffer = 0; + // remaining size of bits in the 'buffer' + int leftSize = 64; - // encode the left bits of current Long to 'buffer' - if (leftBit > 0) { - buffer |= (values[valueIdx] << (64 - leftBit)); - leftSize -= leftBit; - leftBit = 0; - valueIdx++; - } + // encode the left bits of current Long to 'buffer' + if (leftBit > 0) { + buffer |= (values[valueIdx] << (64 - leftBit)); + leftSize -= leftBit; + leftBit = 0; + valueIdx++; + } - while (leftSize >= width && valueIdx < NUM_OF_LONGS + offset) { - //encode one Long to the 'buffer' - buffer |= (values[valueIdx] << (leftSize - width)); - leftSize -= width; - valueIdx++; - } - // If the remaining space of the buffer can not save the bits for one Long - if (leftSize > 0 && valueIdx < NUM_OF_LONGS + offset) { - // put the first 'leftSize' bits of the Long into remaining space of the buffer - buffer |= (values[valueIdx] >>> (width - leftSize)); - leftBit = width - leftSize; - leftSize = 0; - } + while (leftSize >= width && valueIdx < NUM_OF_LONGS + offset) { + // encode one Long to the 'buffer' + buffer |= (values[valueIdx] << (leftSize - width)); + leftSize -= width; + valueIdx++; + } + // If the remaining space of the buffer can not save the bits for one Long + if (leftSize > 0 && valueIdx < NUM_OF_LONGS + offset) { + // put the first 'leftSize' bits of the Long into remaining space of the buffer + buffer |= (values[valueIdx] >>> (width - leftSize)); + leftBit = width - leftSize; + leftSize = 0; + } - // put the buffer into the final result - for (int j = 0; j < 8; j++) { - buf[bufIdx] = (byte) ((buffer >>> ((8 - j - 1) * 8)) & 0xFF); - bufIdx++; - if (bufIdx >= width * 8 / 8) { - return; - } - } + // put the buffer into the final result + for (int j = 0; j < 8; j++) { + buf[bufIdx] = (byte) ((buffer >>> ((8 - j - 1) * 8)) & 0xFF); + bufIdx++; + if (bufIdx >= width * 8 / 8) { + return; } + } } + } - /** - * decode values from byte array. - * - * @param buf - array where bytes are in. - * @param offset - offset of first byte to be decoded in buf - * @param values - decoded result , the size of values should be 8 - */ - public void unpack8Values(byte[] buf, int offset, long[] values) { - int byteIdx = offset; - int valueIdx = 0; - //left bit(s) available for current byte in 'buf' - int leftBits = 8; - //bits that has been read for current long value which is to be decoded - int totalBits = 0; + /** + * decode values from byte array. + * + * @param buf - array where bytes are in. + * @param offset - offset of first byte to be decoded in buf + * @param values - decoded result , the size of values should be 8 + */ + public void unpack8Values(byte[] buf, int offset, long[] values) { + int byteIdx = offset; + int valueIdx = 0; + // left bit(s) available for current byte in 'buf' + int leftBits = 8; + // bits that has been read for current long value which is to be decoded + int totalBits = 0; - //decode long value one by one - while (valueIdx < 8) { - //set all the 64 bits in current value to '0' - values[valueIdx] = 0; - //read until 'totalBits' is equal to width - while (totalBits < width) { - //If 'leftBits' in current byte belongs to current long value - if (width - totalBits >= leftBits) { - //then put left bits in current byte to current long value - values[valueIdx] = values[valueIdx] << leftBits; - values[valueIdx] = (values[valueIdx] | ((((1L << leftBits) - 1)) & buf[byteIdx])); - totalBits += leftBits; - //get next byte - byteIdx++; - //set 'leftBits' in next byte to 8 because the next byte has not been used - leftBits = 8; - //Else take part of bits in 'leftBits' to current value. - } else { - //numbers of bits to be take - int t = width - totalBits; - values[valueIdx] = values[valueIdx] << t; - values[valueIdx] = (values[valueIdx] - | ((((1L << leftBits) - 1)) & buf[byteIdx]) >>> (leftBits - t)); - leftBits -= t; - totalBits += t; - } - } - //Start to decode next long value - valueIdx++; - totalBits = 0; + // decode long value one by one + while (valueIdx < 8) { + // set all the 64 bits in current value to '0' + values[valueIdx] = 0; + // read until 'totalBits' is equal to width + while (totalBits < width) { + // If 'leftBits' in current byte belongs to current long value + if (width - totalBits >= leftBits) { + // then put left bits in current byte to current long value + values[valueIdx] = values[valueIdx] << leftBits; + values[valueIdx] = (values[valueIdx] | ((((1L << leftBits) - 1)) & buf[byteIdx])); + totalBits += leftBits; + // get next byte + byteIdx++; + // set 'leftBits' in next byte to 8 because the next byte has not been used + leftBits = 8; + // Else take part of bits in 'leftBits' to current value. + } else { + // numbers of bits to be take + int t = width - totalBits; + values[valueIdx] = values[valueIdx] << t; + values[valueIdx] = + (values[valueIdx] | ((((1L << leftBits) - 1)) & buf[byteIdx]) >>> (leftBits - t)); + leftBits -= t; + totalBits += t; } - + } + // Start to decode next long value + valueIdx++; + totalBits = 0; } - /** - * decode all values from 'buf' with specified offset and length - * decoded result will be saved in array named 'values'. - * - * @param buf: array where all bytes are in. - * @param offset: the offset of first byte to be decoded in buf. - * @param length: length of bytes to be decoded in buf. - * @param values: decoded result - */ - public void unpackAllValues(byte[] buf, int offset, int length, long[] values) { - int idx = 0; - int k = 0; - while (idx < length) { - long[] tv = new long[8]; - //decode 8 values one time, current result will be saved in the array named 'tv' - unpack8Values(buf, idx, tv); - for (int i = 0; i < 8; i++) { - values[k + i] = tv[i]; - } - idx += width; - k += 8; - } - } + } - public void setWidth(int width) { - this.width = width; + /** + * decode all values from 'buf' with specified offset and length decoded result will be saved in + * array named 'values'. + * + * @param buf: array where all bytes are in. + * @param offset: the offset of first byte to be decoded in buf. + * @param length: length of bytes to be decoded in buf. + * @param values: decoded result + */ + public void unpackAllValues(byte[] buf, int offset, int length, long[] values) { + int idx = 0; + int k = 0; + while (idx < length) { + long[] tv = new long[8]; + // decode 8 values one time, current result will be saved in the array named 'tv' + unpack8Values(buf, idx, tv); + for (int i = 0; i < 8; i++) { + values[k + i] = tv[i]; + } + idx += width; + k += 8; } + } + + public void setWidth(int width) { + this.width = width; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EncodingConfig.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EncodingConfig.java index 1b4239d3..667dfdea 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EncodingConfig.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EncodingConfig.java @@ -6,12 +6,12 @@ * @author xuyi */ public class EncodingConfig { - // if number n repeats more than 8(>= 8), use rle encoding, otherwise use bit-packing - public static final int RLE_MAX_REPEATED_NUM = 8; + // if number n repeats more than 8(>= 8), use rle encoding, otherwise use bit-packing + public static final int RLE_MAX_REPEATED_NUM = 8; - // when to start a new bit-pacing group - public static final int RLE_MAX_BIT_PACKED_NUM = 63; + // when to start a new bit-pacing group + public static final int RLE_MAX_BIT_PACKED_NUM = 63; - // bit width for Bitmap Encoding - public static final int BITMAP_BITWIDTH = 1; + // bit width for Bitmap Encoding + public static final int BITMAP_BITWIDTH = 1; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EndianType.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EndianType.java index 1ba2dcf8..1f8826f7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EndianType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/common/EndianType.java @@ -6,5 +6,5 @@ * @author xuyi */ public enum EndianType { - BIG_ENDIAN, LITTLE_ENDIAN + BIG_ENDIAN, LITTLE_ENDIAN } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoder.java index 70a1b00e..d5abaafc 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoder.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -23,202 +22,202 @@ * {@code } */ public class BitmapDecoder extends Decoder { - private static final Logger LOGGER = LoggerFactory.getLogger(BitmapDecoder.class); - - /** - * how many bytes for all encoded data in inputstream - */ - private int length; - - /** - * number of encoded data - */ - private int number; - - /** - * number of data left for reading in current buffer - */ - private int currentCount; - - /** - * each time decoder receives a inputstream, decoder creates a buffer to save all encoded data - */ - private ByteArrayInputStream byteCache; - - /** - * decoder reads all bitmap index from byteCache and save in Map - */ - private Map buffer; - - /** - * @param endianType deprecated - */ - public BitmapDecoder(EndianType endianType) { - super(TSEncoding.BITMAP); - byteCache = new ByteArrayInputStream(new byte[0]); - buffer = new HashMap<>(); - length = 0; - number = 0; - currentCount = 0; - LOGGER.debug("tsfile-encoding BitmapDecoder: init bitmap decoder"); + private static final Logger LOGGER = LoggerFactory.getLogger(BitmapDecoder.class); + + /** + * how many bytes for all encoded data in inputstream + */ + private int length; + + /** + * number of encoded data + */ + private int number; + + /** + * number of data left for reading in current buffer + */ + private int currentCount; + + /** + * each time decoder receives a inputstream, decoder creates a buffer to save all encoded data + */ + private ByteArrayInputStream byteCache; + + /** + * decoder reads all bitmap index from byteCache and save in Map + */ + private Map buffer; + + /** + * @param endianType deprecated + */ + public BitmapDecoder(EndianType endianType) { + super(TSEncoding.BITMAP); + byteCache = new ByteArrayInputStream(new byte[0]); + buffer = new HashMap<>(); + length = 0; + number = 0; + currentCount = 0; + LOGGER.debug("tsfile-encoding BitmapDecoder: init bitmap decoder"); + } + + @Override + public int readInt(InputStream in) { + if (currentCount == 0) { + try { + reset(); + getLengthAndNumber(in); + readNext(); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding BitmapDecoder: error occurs when reading next number. lenght {}, number {}, current number {}, result buffer {}", + length, number, currentCount, buffer, e); + } } - - @Override - public int readInt(InputStream in) { - if (currentCount == 0) { - try { - reset(); - getLengthAndNumber(in); - readNext(); - } catch (IOException e) { - LOGGER.error( - "tsfile-encoding BitmapDecoder: error occurs when reading next number. lenght {}, number {}, current number {}, result buffer {}", - length, number, currentCount, buffer, e); - } - } - int result = 0; - int index = (number - currentCount) / 8; - int offset = 7 - ((number - currentCount) % 8); - for (Map.Entry entry : buffer.entrySet()) { - byte[] tmp = entry.getValue(); - if ((tmp[index] & ((byte) 1 << offset)) != 0) { - result = entry.getKey(); - } - } - currentCount--; - return result; + int result = 0; + int index = (number - currentCount) / 8; + int offset = 7 - ((number - currentCount) % 8); + for (Map.Entry entry : buffer.entrySet()) { + byte[] tmp = entry.getValue(); + if ((tmp[index] & ((byte) 1 << offset)) != 0) { + result = entry.getKey(); + } } - - private void getLengthAndNumber(InputStream in) throws IOException { - this.length = ReadWriteStreamUtils.readUnsignedVarInt(in); - this.number = ReadWriteStreamUtils.readUnsignedVarInt(in); - byte[] tmp = new byte[length]; - in.read(tmp, 0, length); - this.byteCache = new ByteArrayInputStream(tmp); - } - - /** - * Decode all data from buffer and save them - */ - private void readNext() throws IOException { - int len = (this.number + 7) / 8; - while (byteCache.available() > 0) { - int value = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); - byte[] tmp = new byte[len]; - byteCache.read(tmp, 0, len); - buffer.put(value, tmp); - } - currentCount = number; + currentCount--; + return result; + } + + private void getLengthAndNumber(InputStream in) throws IOException { + this.length = ReadWriteStreamUtils.readUnsignedVarInt(in); + this.number = ReadWriteStreamUtils.readUnsignedVarInt(in); + byte[] tmp = new byte[length]; + in.read(tmp, 0, length); + this.byteCache = new ByteArrayInputStream(tmp); + } + + /** + * Decode all data from buffer and save them + */ + private void readNext() throws IOException { + int len = (this.number + 7) / 8; + while (byteCache.available() > 0) { + int value = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); + byte[] tmp = new byte[len]; + byteCache.read(tmp, 0, len); + buffer.put(value, tmp); } - - private void reset() { - this.length = 0; - this.number = 0; - this.currentCount = 0; - if (this.byteCache == null) { - new ByteArrayInputStream(new byte[0]); - } else { - this.byteCache.reset(); - } - if (this.buffer == null) { - this.buffer = new HashMap<>(); - } else { - this.buffer.clear(); - } + currentCount = number; + } + + private void reset() { + this.length = 0; + this.number = 0; + this.currentCount = 0; + if (this.byteCache == null) { + new ByteArrayInputStream(new byte[0]); + } else { + this.byteCache.reset(); } - - /** - * For special value in page list, get its bitmap index - * - * @param target value to get its bitmap index - * @param pageList input page list - * @return List(Pair of (length, bitmap index) ) - */ - public List> decodeAll(int target, List pageList) { - List> resultList = new ArrayList<>(); - for (InputStream inputStream : pageList) { - try { - reset(); - getLengthAndNumber(inputStream); - int byteArrayLength = (this.number + 7) / 8; - byte[] tmp = new byte[byteArrayLength]; - while (byteCache.available() > 0) { - int value = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); - if (value == target) { - byteCache.read(tmp, 0, byteArrayLength); - break; - } else { - byteCache.skip(byteArrayLength); - } - } - - resultList.add(new Pair<>(this.number, tmp)); - LOGGER.debug("tsfile-encoding BitmapDecoder: number {} in current page, byte length {}", - this.number, byteArrayLength); - } catch (IOException e) { - LOGGER.error( - "tsfile-encoding BitmapDecoder: error occurs when decoding all numbers in page {}, number {}", - inputStream, this.number, e); - } - } - return resultList; + if (this.buffer == null) { + this.buffer = new HashMap<>(); + } else { + this.buffer.clear(); } - - /** - * Check whether there is number left for reading - * - * @param in : decoded data saved in InputStream - * @return true or false to indicate whether there is number left - * @throws IOException cannot read next value - * @see Decoder#hasNext(java.io.InputStream) - */ - @Override - public boolean hasNext(InputStream in) throws IOException { - if (currentCount > 0 || in.available() > 0) { - return true; + } + + /** + * For special value in page list, get its bitmap index + * + * @param target value to get its bitmap index + * @param pageList input page list + * @return List(Pair of (length, bitmap index) ) + */ + public List> decodeAll(int target, List pageList) { + List> resultList = new ArrayList<>(); + for (InputStream inputStream : pageList) { + try { + reset(); + getLengthAndNumber(inputStream); + int byteArrayLength = (this.number + 7) / 8; + byte[] tmp = new byte[byteArrayLength]; + while (byteCache.available() > 0) { + int value = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); + if (value == target) { + byteCache.read(tmp, 0, byteArrayLength); + break; + } else { + byteCache.skip(byteArrayLength); + } } - return false; - } - - /** - * In current version, boolean value is equal to Enums value in schema - * - * @param in : decoded data saved in InputStream - * @throws TSFileDecodingException cannot read next value - * @see Decoder#readBoolean(java.io.InputStream) - */ - @Override - public boolean readBoolean(InputStream in) { - throw new TSFileDecodingException("Method readBoolean is not supported by BitmapDecoder"); - } - - @Override - public short readShort(InputStream in) { - throw new TSFileDecodingException("Method readShort is not supported by BitmapDecoder"); - } - @Override - public long readLong(InputStream in) { - throw new TSFileDecodingException("Method readLong is not supported by BitmapDecoder"); + resultList.add(new Pair<>(this.number, tmp)); + LOGGER.debug("tsfile-encoding BitmapDecoder: number {} in current page, byte length {}", + this.number, byteArrayLength); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding BitmapDecoder: error occurs when decoding all numbers in page {}, number {}", + inputStream, this.number, e); + } } - - @Override - public float readFloat(InputStream in) { - throw new TSFileDecodingException("Method readFloat is not supported by BitmapDecoder"); - } - - @Override - public double readDouble(InputStream in) { - throw new TSFileDecodingException("Method readDouble is not supported by BitmapDecoder"); - } - - @Override - public Binary readBinary(InputStream in) { - throw new TSFileDecodingException("Method readBinary is not supported by BitmapDecoder"); - } - - @Override - public BigDecimal readBigDecimal(InputStream in) { - throw new TSFileDecodingException("Method readBigDecimal is not supported by BitmapDecoder"); + return resultList; + } + + /** + * Check whether there is number left for reading + * + * @param in : decoded data saved in InputStream + * @return true or false to indicate whether there is number left + * @throws IOException cannot read next value + * @see Decoder#hasNext(java.io.InputStream) + */ + @Override + public boolean hasNext(InputStream in) throws IOException { + if (currentCount > 0 || in.available() > 0) { + return true; } + return false; + } + + /** + * In current version, boolean value is equal to Enums value in schema + * + * @param in : decoded data saved in InputStream + * @throws TSFileDecodingException cannot read next value + * @see Decoder#readBoolean(java.io.InputStream) + */ + @Override + public boolean readBoolean(InputStream in) { + throw new TSFileDecodingException("Method readBoolean is not supported by BitmapDecoder"); + } + + @Override + public short readShort(InputStream in) { + throw new TSFileDecodingException("Method readShort is not supported by BitmapDecoder"); + } + + @Override + public long readLong(InputStream in) { + throw new TSFileDecodingException("Method readLong is not supported by BitmapDecoder"); + } + + @Override + public float readFloat(InputStream in) { + throw new TSFileDecodingException("Method readFloat is not supported by BitmapDecoder"); + } + + @Override + public double readDouble(InputStream in) { + throw new TSFileDecodingException("Method readDouble is not supported by BitmapDecoder"); + } + + @Override + public Binary readBinary(InputStream in) { + throw new TSFileDecodingException("Method readBinary is not supported by BitmapDecoder"); + } + + @Override + public BigDecimal readBigDecimal(InputStream in) { + throw new TSFileDecodingException("Method readBigDecimal is not supported by BitmapDecoder"); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/Decoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/Decoder.java index cd82a327..1506374a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/Decoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/Decoder.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import cn.edu.tsinghua.tsfile.format.Encoding; - import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; @@ -15,70 +14,72 @@ * @author Zhang Jinrui */ public abstract class Decoder { - public TSEncoding type; + public TSEncoding type; - public Decoder(TSEncoding type) { - this.type = type; - } + public Decoder(TSEncoding type) { + this.type = type; + } - public static Decoder getDecoderByType(Encoding type, TSDataType dataType) { - // PLA and DFT encoding are not supported in current version - if (type == Encoding.PLAIN) { - return new PlainDecoder(EndianType.LITTLE_ENDIAN); - } else if (type == Encoding.RLE && dataType == TSDataType.BOOLEAN) { - return new IntRleDecoder(EndianType.LITTLE_ENDIAN); - } else if (type == Encoding.TS_2DIFF && dataType == TSDataType.INT32) { - return new DeltaBinaryDecoder.IntDeltaDecoder(); - } else if (type == Encoding.TS_2DIFF && dataType == TSDataType.INT64) { - return new DeltaBinaryDecoder.LongDeltaDecoder(); - } else if (type == Encoding.RLE && dataType == TSDataType.INT32) { - return new IntRleDecoder(EndianType.LITTLE_ENDIAN); - } else if (type == Encoding.RLE && dataType == TSDataType.INT64) { - return new LongRleDecoder(EndianType.LITTLE_ENDIAN); - } else if (type == Encoding.BITMAP && dataType == TSDataType.ENUMS) { - return new BitmapDecoder(EndianType.LITTLE_ENDIAN); - } else if ((dataType == TSDataType.FLOAT || dataType == TSDataType.DOUBLE) && (type == Encoding.RLE || type == Encoding.TS_2DIFF) ) { - return new FloatDecoder(TSEncoding.valueOf(type.toString()), dataType); - } else if (type == Encoding.GORILLA && dataType == TSDataType.FLOAT) { - return new SinglePrecisionDecoder(); - } else if (type == Encoding.GORILLA && dataType == TSDataType.DOUBLE) { - return new DoublePrecisionDecoder(); - } else { - throw new TSFileDecodingException("Decoder not found:" + type + " , DataType is :" + dataType); - } + public static Decoder getDecoderByType(Encoding type, TSDataType dataType) { + // PLA and DFT encoding are not supported in current version + if (type == Encoding.PLAIN) { + return new PlainDecoder(EndianType.LITTLE_ENDIAN); + } else if (type == Encoding.RLE && dataType == TSDataType.BOOLEAN) { + return new IntRleDecoder(EndianType.LITTLE_ENDIAN); + } else if (type == Encoding.TS_2DIFF && dataType == TSDataType.INT32) { + return new DeltaBinaryDecoder.IntDeltaDecoder(); + } else if (type == Encoding.TS_2DIFF && dataType == TSDataType.INT64) { + return new DeltaBinaryDecoder.LongDeltaDecoder(); + } else if (type == Encoding.RLE && dataType == TSDataType.INT32) { + return new IntRleDecoder(EndianType.LITTLE_ENDIAN); + } else if (type == Encoding.RLE && dataType == TSDataType.INT64) { + return new LongRleDecoder(EndianType.LITTLE_ENDIAN); + } else if (type == Encoding.BITMAP && dataType == TSDataType.ENUMS) { + return new BitmapDecoder(EndianType.LITTLE_ENDIAN); + } else if ((dataType == TSDataType.FLOAT || dataType == TSDataType.DOUBLE) + && (type == Encoding.RLE || type == Encoding.TS_2DIFF)) { + return new FloatDecoder(TSEncoding.valueOf(type.toString()), dataType); + } else if (type == Encoding.GORILLA && dataType == TSDataType.FLOAT) { + return new SinglePrecisionDecoder(); + } else if (type == Encoding.GORILLA && dataType == TSDataType.DOUBLE) { + return new DoublePrecisionDecoder(); + } else { + throw new TSFileDecodingException( + "Decoder not found:" + type + " , DataType is :" + dataType); } + } - public int readInt(InputStream in) { - throw new TSFileDecodingException("Method readInt is not supproted by Decoder"); - } + public int readInt(InputStream in) { + throw new TSFileDecodingException("Method readInt is not supproted by Decoder"); + } - public boolean readBoolean(InputStream in) { - throw new TSFileDecodingException("Method readBoolean is not supproted by Decoder"); - } + public boolean readBoolean(InputStream in) { + throw new TSFileDecodingException("Method readBoolean is not supproted by Decoder"); + } - public short readShort(InputStream in) { - throw new TSFileDecodingException("Method readShort is not supproted by Decoder"); - } + public short readShort(InputStream in) { + throw new TSFileDecodingException("Method readShort is not supproted by Decoder"); + } - public long readLong(InputStream in) { - throw new TSFileDecodingException("Method readLong is not supproted by Decoder"); - } + public long readLong(InputStream in) { + throw new TSFileDecodingException("Method readLong is not supproted by Decoder"); + } - public float readFloat(InputStream in) { - throw new TSFileDecodingException("Method readFloat is not supproted by Decoder"); - } + public float readFloat(InputStream in) { + throw new TSFileDecodingException("Method readFloat is not supproted by Decoder"); + } - public double readDouble(InputStream in) { - throw new TSFileDecodingException("Method readDouble is not supproted by Decoder"); - } + public double readDouble(InputStream in) { + throw new TSFileDecodingException("Method readDouble is not supproted by Decoder"); + } - public Binary readBinary(InputStream in) { - throw new TSFileDecodingException("Method readBinary is not supproted by Decoder"); - } + public Binary readBinary(InputStream in) { + throw new TSFileDecodingException("Method readBinary is not supproted by Decoder"); + } - public BigDecimal readBigDecimal(InputStream in) { - throw new TSFileDecodingException("Method readBigDecimal is not supproted by Decoder"); - } + public BigDecimal readBigDecimal(InputStream in) { + throw new TSFileDecodingException("Method readBigDecimal is not supproted by Decoder"); + } - public abstract boolean hasNext(InputStream in) throws IOException; + public abstract boolean hasNext(InputStream in) throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DeltaBinaryDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DeltaBinaryDecoder.java index eb464c0b..649642b1 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DeltaBinaryDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DeltaBinaryDecoder.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.InputStream; @@ -17,225 +16,225 @@ * @see DeltaBinaryEncoder */ public abstract class DeltaBinaryDecoder extends Decoder { - private static final Logger LOG = LoggerFactory.getLogger(DeltaBinaryDecoder.class); - protected long count = 0; - protected byte[] deltaBuf; - + private static final Logger LOG = LoggerFactory.getLogger(DeltaBinaryDecoder.class); + protected long count = 0; + protected byte[] deltaBuf; + + /** + * the first value in one pack. + */ + protected int readIntTotalCount = 0; + protected int nextReadIndex = 0; + /** + * max bit length of all value in a pack + */ + protected int packWidth; + /** + * data number in this pack + */ + protected int packNum; + + /** + * how many bytes data takes after encoding + */ + protected int encodingLength; + + public DeltaBinaryDecoder() { + super(TSEncoding.TS_2DIFF); + } + + protected abstract void readHeader(InputStream in) throws IOException; + + protected abstract void allocateDataArray(); + + protected abstract void readValue(int i); + + /** + * calculate the bytes length containing v bits + * + * @param v - number of bits + * @return number of bytes + */ + protected int ceil(int v) { + return (int) Math.ceil((double) (v) / 8.0); + } + + + @Override + public boolean hasNext(InputStream in) throws IOException { + return (nextReadIndex < readIntTotalCount) || in.available() > 0; + } + + + public static class IntDeltaDecoder extends DeltaBinaryDecoder { + private int firstValue; + private int[] data; + private int previous; /** - * the first value in one pack. + * minimum value for all difference */ - protected int readIntTotalCount = 0; - protected int nextReadIndex = 0; - /** - * max bit length of all value in a pack - */ - protected int packWidth; + private int minDeltaBase; + + public IntDeltaDecoder() { + super(); + } + /** - * data number in this pack + * if there's no decoded data left, decode next pack into {@code data} + * + * @param in InputStream + * @return int + * @throws IOException cannot read T from InputStream */ - protected int packNum; + protected int readT(InputStream in) throws IOException { + if (nextReadIndex == readIntTotalCount) + return loadIntBatch(in); + return data[nextReadIndex++]; + } + + @Override + public int readInt(InputStream in) { + try { + return readT(in); + } catch (IOException e) { + LOG.warn("meet IOException when load batch from InputStream, return 0"); + return 0; + } + } /** - * how many bytes data takes after encoding + * if remaining data has been run out, load next pack from InputStream + * + * @param in InputStream + * @return int + * @throws IOException cannot load batch from InputStream */ - protected int encodingLength; + protected int loadIntBatch(InputStream in) throws IOException { + packNum = BytesUtils.readInt(in); + packWidth = BytesUtils.readInt(in); + count++; + readHeader(in); + + encodingLength = ceil(packNum * packWidth); + deltaBuf = BytesUtils.safeReadInputStreamToBytes(encodingLength, in); + allocateDataArray(); + + previous = firstValue; + readIntTotalCount = packNum; + nextReadIndex = 0; + readPack(); + return firstValue; + } + + private void readPack() throws IOException { + for (int i = 0; i < packNum; i++) { + readValue(i); + previous = data[i]; + } + } - public DeltaBinaryDecoder() { - super(TSEncoding.TS_2DIFF); + @Override + protected void readHeader(InputStream in) throws IOException { + minDeltaBase = BytesUtils.readInt(in); + firstValue = BytesUtils.readInt(in); + } + + @Override + protected void allocateDataArray() { + data = new int[packNum]; } - protected abstract void readHeader(InputStream in) throws IOException; + @Override + protected void readValue(int i) { + int v = BytesUtils.bytesToInt(deltaBuf, packWidth * i, packWidth); + data[i] = previous + minDeltaBase + v; + } + } - protected abstract void allocateDataArray(); + public static class LongDeltaDecoder extends DeltaBinaryDecoder { + private long firstValue; + private long[] data; + private long previous; + /** + * minimum value for all difference + */ + private long minDeltaBase; - protected abstract void readValue(int i); + public LongDeltaDecoder() { + super(); + } /** - * calculate the bytes length containing v bits + * if there's no decoded data left, decode next pack into {@code data} * - * @param v - number of bits - * @return number of bytes + * @param in InputStream + * @return long value + * @throws IOException cannot read T from InputStream */ - protected int ceil(int v) { - return (int) Math.ceil((double) (v) / 8.0); + protected long readT(InputStream in) throws IOException { + if (nextReadIndex == readIntTotalCount) + return loadIntBatch(in); + return data[nextReadIndex++]; + } + + /*** + * if remaining data has been run out, load next pack from InputStream + * + * @param in InputStream + * @return long value + * @throws IOException cannot load batch from InputStream + */ + protected long loadIntBatch(InputStream in) throws IOException { + packNum = BytesUtils.readInt(in); + packWidth = BytesUtils.readInt(in); + count++; + readHeader(in); + + encodingLength = ceil(packNum * packWidth); + deltaBuf = BytesUtils.safeReadInputStreamToBytes(encodingLength, in); + allocateDataArray(); + + previous = firstValue; + readIntTotalCount = packNum; + nextReadIndex = 0; + readPack(); + return firstValue; + } + + + private void readPack() throws IOException { + for (int i = 0; i < packNum; i++) { + readValue(i); + previous = data[i]; + } + } + + @Override + public long readLong(InputStream in) { + try { + return readT(in); + } catch (IOException e) { + LOG.warn("meet IOException when load batch from InputStream, return 0"); + return 0; + } } + @Override + protected void readHeader(InputStream in) throws IOException { + minDeltaBase = BytesUtils.readLong(in); + firstValue = BytesUtils.readLong(in); + } @Override - public boolean hasNext(InputStream in) throws IOException { - return (nextReadIndex < readIntTotalCount) || in.available() > 0; - } - - - public static class IntDeltaDecoder extends DeltaBinaryDecoder { - private int firstValue; - private int[] data; - private int previous; - /** - * minimum value for all difference - */ - private int minDeltaBase; - - public IntDeltaDecoder() { - super(); - } - - /** - * if there's no decoded data left, decode next pack into {@code data} - * - * @param in InputStream - * @return int - * @throws IOException cannot read T from InputStream - */ - protected int readT(InputStream in) throws IOException { - if (nextReadIndex == readIntTotalCount) - return loadIntBatch(in); - return data[nextReadIndex++]; - } - - @Override - public int readInt(InputStream in) { - try { - return readT(in); - } catch (IOException e) { - LOG.warn("meet IOException when load batch from InputStream, return 0"); - return 0; - } - } - - /** - * if remaining data has been run out, load next pack from InputStream - * - * @param in InputStream - * @return int - * @throws IOException cannot load batch from InputStream - */ - protected int loadIntBatch(InputStream in) throws IOException { - packNum = BytesUtils.readInt(in); - packWidth = BytesUtils.readInt(in); - count++; - readHeader(in); - - encodingLength = ceil(packNum * packWidth); - deltaBuf = BytesUtils.safeReadInputStreamToBytes(encodingLength, in); - allocateDataArray(); - - previous = firstValue; - readIntTotalCount = packNum; - nextReadIndex = 0; - readPack(); - return firstValue; - } - - private void readPack() throws IOException { - for (int i = 0; i < packNum; i++) { - readValue(i); - previous = data[i]; - } - } - - @Override - protected void readHeader(InputStream in) throws IOException { - minDeltaBase = BytesUtils.readInt(in); - firstValue = BytesUtils.readInt(in); - } - - @Override - protected void allocateDataArray() { - data = new int[packNum]; - } - - @Override - protected void readValue(int i) { - int v = BytesUtils.bytesToInt(deltaBuf, packWidth * i, packWidth); - data[i] = previous + minDeltaBase + v; - } - } - - public static class LongDeltaDecoder extends DeltaBinaryDecoder { - private long firstValue; - private long[] data; - private long previous; - /** - * minimum value for all difference - */ - private long minDeltaBase; - - public LongDeltaDecoder() { - super(); - } - - /** - * if there's no decoded data left, decode next pack into {@code data} - * - * @param in InputStream - * @return long value - * @throws IOException cannot read T from InputStream - */ - protected long readT(InputStream in) throws IOException { - if (nextReadIndex == readIntTotalCount) - return loadIntBatch(in); - return data[nextReadIndex++]; - } - - /*** - * if remaining data has been run out, load next pack from InputStream - * - * @param in InputStream - * @return long value - * @throws IOException cannot load batch from InputStream - */ - protected long loadIntBatch(InputStream in) throws IOException { - packNum = BytesUtils.readInt(in); - packWidth = BytesUtils.readInt(in); - count++; - readHeader(in); - - encodingLength = ceil(packNum * packWidth); - deltaBuf = BytesUtils.safeReadInputStreamToBytes(encodingLength, in); - allocateDataArray(); - - previous = firstValue; - readIntTotalCount = packNum; - nextReadIndex = 0; - readPack(); - return firstValue; - } - - - private void readPack() throws IOException { - for (int i = 0; i < packNum; i++) { - readValue(i); - previous = data[i]; - } - } - - @Override - public long readLong(InputStream in) { - try { - return readT(in); - } catch (IOException e) { - LOG.warn("meet IOException when load batch from InputStream, return 0"); - return 0; - } - } - - @Override - protected void readHeader(InputStream in) throws IOException { - minDeltaBase = BytesUtils.readLong(in); - firstValue = BytesUtils.readLong(in); - } - - @Override - protected void allocateDataArray() { - data = new long[packNum]; - } - - @Override - protected void readValue(int i) { - long v = BytesUtils.bytesToLong(deltaBuf, packWidth * i, packWidth); - data[i] = previous + minDeltaBase + v; - } + protected void allocateDataArray() { + data = new long[packNum]; + } + @Override + protected void readValue(int i) { + long v = BytesUtils.bytesToLong(deltaBuf, packWidth * i, packWidth); + data[i] = previous + minDeltaBase + v; } + + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DoublePrecisionDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DoublePrecisionDecoder.java index 3c2385a2..a5614568 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DoublePrecisionDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/DoublePrecisionDecoder.java @@ -2,92 +2,91 @@ import java.io.IOException; import java.io.InputStream; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; /** * Decoder for value value using gorilla */ -public class DoublePrecisionDecoder extends GorillaDecoder{ - private static final Logger LOGGER = LoggerFactory.getLogger(DoublePrecisionDecoder.class); - private long preValue; - - public DoublePrecisionDecoder() { - } - - @Override - public double readDouble(InputStream in) { - if (!flag) { - flag = true; - try { - int[] buf = new int[8]; - for (int i = 0; i < 8; i++) - buf[i] = in.read(); - long res = 0L; - for (int i = 0; i < 8; i++) { - res += ((long) buf[i] << (i * 8)); - } - preValue = res; - double tmp = Double.longBitsToDouble(preValue); - leadingZeroNum = Long.numberOfLeadingZeros(preValue); - tailingZeroNum = Long.numberOfTrailingZeros(preValue); - fillBuffer(in); - getNextValue(in); - return tmp; - } catch (IOException e) { - LOGGER.error("DoublePrecisionDecoder cannot read first double number because: {}", e.getMessage()); - } - } else { - try { - double tmp = Double.longBitsToDouble(preValue); - getNextValue(in); - return tmp; - } catch (IOException e) { - LOGGER.error("DoublePrecisionDecoder cannot read following double number because: {}", e.getMessage()); - } - } - return Double.NaN; - } - - /** - * check whether there is any value to encode left - * - * @param in stream to read - * @throws IOException cannot read from stream - */ - private void getNextValue(InputStream in) throws IOException { - nextFlag1 = readBit(in); - // case: '0' - if (!nextFlag1) { - return; - } - nextFlag2 = readBit(in); - - if (!nextFlag2) { - // case: '10' - long tmp = 0; - for (int i = 0; i < TSFileConfig.DOUBLE_LENGTH - leadingZeroNum - tailingZeroNum; i++) { - long bit = readBit(in) ? 1 : 0; - tmp |= (bit << (TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNum - i)); - } - tmp ^= preValue; - preValue = tmp; - } else { - // case: '11' - int leadingZeroNumTmp = readIntFromStream(in, TSFileConfig.DOUBLE_LEADING_ZERO_LENGTH); - int lenTmp = readIntFromStream(in, TSFileConfig.DOUBLE_VALUE_LENGTH); - long tmp = readLongFromStream(in, lenTmp); - tmp <<= (TSFileConfig.DOUBLE_LENGTH - leadingZeroNumTmp - lenTmp); - tmp ^= preValue; - preValue = tmp; - } - leadingZeroNum = Long.numberOfLeadingZeros(preValue); - tailingZeroNum = Long.numberOfTrailingZeros(preValue); - if(Double.isNaN(Double.longBitsToDouble(preValue))){ - isEnd = true; - } - } +public class DoublePrecisionDecoder extends GorillaDecoder { + private static final Logger LOGGER = LoggerFactory.getLogger(DoublePrecisionDecoder.class); + private long preValue; + + public DoublePrecisionDecoder() {} + + @Override + public double readDouble(InputStream in) { + if (!flag) { + flag = true; + try { + int[] buf = new int[8]; + for (int i = 0; i < 8; i++) + buf[i] = in.read(); + long res = 0L; + for (int i = 0; i < 8; i++) { + res += ((long) buf[i] << (i * 8)); + } + preValue = res; + double tmp = Double.longBitsToDouble(preValue); + leadingZeroNum = Long.numberOfLeadingZeros(preValue); + tailingZeroNum = Long.numberOfTrailingZeros(preValue); + fillBuffer(in); + getNextValue(in); + return tmp; + } catch (IOException e) { + LOGGER.error("DoublePrecisionDecoder cannot read first double number because: {}", + e.getMessage()); + } + } else { + try { + double tmp = Double.longBitsToDouble(preValue); + getNextValue(in); + return tmp; + } catch (IOException e) { + LOGGER.error("DoublePrecisionDecoder cannot read following double number because: {}", + e.getMessage()); + } + } + return Double.NaN; + } + + /** + * check whether there is any value to encode left + * + * @param in stream to read + * @throws IOException cannot read from stream + */ + private void getNextValue(InputStream in) throws IOException { + nextFlag1 = readBit(in); + // case: '0' + if (!nextFlag1) { + return; + } + nextFlag2 = readBit(in); + + if (!nextFlag2) { + // case: '10' + long tmp = 0; + for (int i = 0; i < TSFileConfig.DOUBLE_LENGTH - leadingZeroNum - tailingZeroNum; i++) { + long bit = readBit(in) ? 1 : 0; + tmp |= (bit << (TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNum - i)); + } + tmp ^= preValue; + preValue = tmp; + } else { + // case: '11' + int leadingZeroNumTmp = readIntFromStream(in, TSFileConfig.DOUBLE_LEADING_ZERO_LENGTH); + int lenTmp = readIntFromStream(in, TSFileConfig.DOUBLE_VALUE_LENGTH); + long tmp = readLongFromStream(in, lenTmp); + tmp <<= (TSFileConfig.DOUBLE_LENGTH - leadingZeroNumTmp - lenTmp); + tmp ^= preValue; + preValue = tmp; + } + leadingZeroNum = Long.numberOfLeadingZeros(preValue); + tailingZeroNum = Long.numberOfTrailingZeros(preValue); + if (Double.isNaN(Double.longBitsToDouble(preValue))) { + isEnd = true; + } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoder.java index 620473f7..404a7471 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoder.java @@ -9,122 +9,120 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.InputStream; /** - * Decoder for float or double value using rle or two diff. For - * more info about encoding pattern, see {@link FloatEncoder} + * Decoder for float or double value using rle or two diff. For more info about encoding pattern, + * see {@link FloatEncoder} */ public class FloatDecoder extends Decoder { - private static final Logger LOGGER = LoggerFactory.getLogger(FloatDecoder.class); - private Decoder decoder; + private static final Logger LOGGER = LoggerFactory.getLogger(FloatDecoder.class); + private Decoder decoder; - /** - * maxPointValue = 10^(maxPointNumer) maxPointNumber can be read from stream - */ - private double maxPointValue; + /** + * maxPointValue = 10^(maxPointNumer) maxPointNumber can be read from stream + */ + private double maxPointValue; - /** - * flag to indicate whether we have read maxPointNumber and calculate - * maxPointValue - */ - private boolean isMaxPointNumberRead; + /** + * flag to indicate whether we have read maxPointNumber and calculate maxPointValue + */ + private boolean isMaxPointNumberRead; - public FloatDecoder(TSEncoding encodingType, TSDataType dataType) { - super(encodingType); - if (encodingType == TSEncoding.RLE) { - if (dataType == TSDataType.FLOAT) { - decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); - LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using int-rle and float"); - } else if (dataType == TSDataType.DOUBLE) { - decoder = new LongRleDecoder(EndianType.LITTLE_ENDIAN); - LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using long-rle and double"); - } else { - throw new TSFileDecodingException( - String.format("data type %s is not supported by FloatDecoder", dataType)); - } - } else if (encodingType == TSEncoding.TS_2DIFF) { - if (dataType == TSDataType.FLOAT) { - decoder = new DeltaBinaryDecoder.IntDeltaDecoder(); - LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using int-delta and float"); - } else if (dataType == TSDataType.DOUBLE) { - decoder = new DeltaBinaryDecoder.LongDeltaDecoder(); - LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using long-delta and double"); - } else { - throw new TSFileDecodingException( - String.format("data type %s is not supported by FloatDecoder", dataType)); - } - } else { - throw new TSFileDecodingException( - String.format("%s encoding is not supported by FloatDecoder", encodingType)); - } - isMaxPointNumberRead = false; + public FloatDecoder(TSEncoding encodingType, TSDataType dataType) { + super(encodingType); + if (encodingType == TSEncoding.RLE) { + if (dataType == TSDataType.FLOAT) { + decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); + LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using int-rle and float"); + } else if (dataType == TSDataType.DOUBLE) { + decoder = new LongRleDecoder(EndianType.LITTLE_ENDIAN); + LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using long-rle and double"); + } else { + throw new TSFileDecodingException( + String.format("data type %s is not supported by FloatDecoder", dataType)); + } + } else if (encodingType == TSEncoding.TS_2DIFF) { + if (dataType == TSDataType.FLOAT) { + decoder = new DeltaBinaryDecoder.IntDeltaDecoder(); + LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using int-delta and float"); + } else if (dataType == TSDataType.DOUBLE) { + decoder = new DeltaBinaryDecoder.LongDeltaDecoder(); + LOGGER.debug("tsfile-encoding FloatDecoder: init decoder using long-delta and double"); + } else { + throw new TSFileDecodingException( + String.format("data type %s is not supported by FloatDecoder", dataType)); + } + } else { + throw new TSFileDecodingException( + String.format("%s encoding is not supported by FloatDecoder", encodingType)); } + isMaxPointNumberRead = false; + } - @Override - public float readFloat(InputStream in) { - readMaxPointValue(in); - int value = decoder.readInt(in); - double result = value / maxPointValue; - return (float) result; - } + @Override + public float readFloat(InputStream in) { + readMaxPointValue(in); + int value = decoder.readInt(in); + double result = value / maxPointValue; + return (float) result; + } - @Override - public double readDouble(InputStream in) { - readMaxPointValue(in); - long value = decoder.readLong(in); - double result = value / maxPointValue; - return result; - } + @Override + public double readDouble(InputStream in) { + readMaxPointValue(in); + long value = decoder.readLong(in); + double result = value / maxPointValue; + return result; + } - private void readMaxPointValue(InputStream in) { - try { - if (!isMaxPointNumberRead) { - int maxPointNumber = ReadWriteStreamUtils.readUnsignedVarInt(in); - if (maxPointNumber <= 0) { - maxPointValue = 1; - } else { - maxPointValue = Math.pow(10, maxPointNumber); - } - isMaxPointNumberRead = true; - } - } catch (IOException e) { - LOGGER.error("tsfile-encoding FloatDecoder: error occurs when reading maxPointValue", e); + private void readMaxPointValue(InputStream in) { + try { + if (!isMaxPointNumberRead) { + int maxPointNumber = ReadWriteStreamUtils.readUnsignedVarInt(in); + if (maxPointNumber <= 0) { + maxPointValue = 1; + } else { + maxPointValue = Math.pow(10, maxPointNumber); } + isMaxPointNumberRead = true; + } + } catch (IOException e) { + LOGGER.error("tsfile-encoding FloatDecoder: error occurs when reading maxPointValue", e); } + } - @Override - public boolean hasNext(InputStream in) throws IOException { - if (decoder == null) { - return false; - } - return decoder.hasNext(in); + @Override + public boolean hasNext(InputStream in) throws IOException { + if (decoder == null) { + return false; } + return decoder.hasNext(in); + } - @Override - public Binary readBinary(InputStream in) { - throw new TSFileDecodingException("Method readBinary is not supproted by FloatDecoder"); - } + @Override + public Binary readBinary(InputStream in) { + throw new TSFileDecodingException("Method readBinary is not supproted by FloatDecoder"); + } - @Override - public boolean readBoolean(InputStream in) { - throw new TSFileDecodingException("Method readBoolean is not supproted by FloatDecoder"); - } + @Override + public boolean readBoolean(InputStream in) { + throw new TSFileDecodingException("Method readBoolean is not supproted by FloatDecoder"); + } - @Override - public short readShort(InputStream in) { - throw new TSFileDecodingException("Method readShort is not supproted by FloatDecoder"); - } + @Override + public short readShort(InputStream in) { + throw new TSFileDecodingException("Method readShort is not supproted by FloatDecoder"); + } - @Override - public int readInt(InputStream in) { - throw new TSFileDecodingException("Method readInt is not supproted by FloatDecoder"); - } + @Override + public int readInt(InputStream in) { + throw new TSFileDecodingException("Method readInt is not supproted by FloatDecoder"); + } - @Override - public long readLong(InputStream in) { - throw new TSFileDecodingException("Method readLong is not supproted by FloatDecoder"); - } + @Override + public long readLong(InputStream in) { + throw new TSFileDecodingException("Method readLong is not supproted by FloatDecoder"); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoder.java index 4a91301b..ddae4e69 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoder.java @@ -2,98 +2,100 @@ import java.io.IOException; import java.io.InputStream; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; public abstract class GorillaDecoder extends Decoder { - private static final Logger LOGGER = LoggerFactory.getLogger(GorillaDecoder.class); - protected static final int EOF = -1; - // flag to indicate whether the first value is read from stream - protected boolean flag; - protected int leadingZeroNum, tailingZeroNum; - protected boolean isEnd; - // 8-bit buffer of bits to write out - protected int buffer; - // number of bits remaining in buffer - protected int numberLeftInBuffer; - - protected boolean nextFlag1; - protected boolean nextFlag2; + private static final Logger LOGGER = LoggerFactory.getLogger(GorillaDecoder.class); + protected static final int EOF = -1; + // flag to indicate whether the first value is read from stream + protected boolean flag; + protected int leadingZeroNum, tailingZeroNum; + protected boolean isEnd; + // 8-bit buffer of bits to write out + protected int buffer; + // number of bits remaining in buffer + protected int numberLeftInBuffer; - public GorillaDecoder() { - super(TSEncoding.GORILLA); - this.flag = false; - this.isEnd = false; - } + protected boolean nextFlag1; + protected boolean nextFlag2; - @Override - public boolean hasNext(InputStream in) throws IOException { - if (in.available() > 0 || !isEnd) { - return true; - } - return false; - } + public GorillaDecoder() { + super(TSEncoding.GORILLA); + this.flag = false; + this.isEnd = false; + } - protected boolean isEmpty() { - return buffer == EOF; + @Override + public boolean hasNext(InputStream in) throws IOException { + if (in.available() > 0 || !isEnd) { + return true; } - - protected boolean readBit(InputStream in) throws IOException { - if(numberLeftInBuffer == 0 && !isEnd){ - fillBuffer(in); - } - if (isEmpty()) throw new IOException("Reading from empty input stream"); - numberLeftInBuffer--; - return ((buffer >> numberLeftInBuffer) & 1) == 1; + return false; + } + + protected boolean isEmpty() { + return buffer == EOF; + } + + protected boolean readBit(InputStream in) throws IOException { + if (numberLeftInBuffer == 0 && !isEnd) { + fillBuffer(in); } - - /** - * read one byte and save in buffer - * @param in stream to read - */ - protected void fillBuffer(InputStream in) { - try { - buffer = in.read(); - numberLeftInBuffer = 8; - } catch (IOException e) { - LOGGER.error("Failed to fill a new buffer, because {}",e.getMessage()); - buffer = EOF; - numberLeftInBuffer = -1; - } + if (isEmpty()) + throw new IOException("Reading from empty input stream"); + numberLeftInBuffer--; + return ((buffer >> numberLeftInBuffer) & 1) == 1; + } + + /** + * read one byte and save in buffer + * + * @param in stream to read + */ + protected void fillBuffer(InputStream in) { + try { + buffer = in.read(); + numberLeftInBuffer = 8; + } catch (IOException e) { + LOGGER.error("Failed to fill a new buffer, because {}", e.getMessage()); + buffer = EOF; + numberLeftInBuffer = -1; + } + } + + /** + * read some bits and convert them to a int value + * + * @param in stream to read + * @param len number of bit to read + * @return converted int value + * @throws IOException cannot read from stream + */ + protected int readIntFromStream(InputStream in, int len) throws IOException { + int num = 0; + for (int i = 0; i < len; i++) { + int bit = readBit(in) ? 1 : 0; + num |= bit << (len - 1 - i); + } + return num; + } + + /** + * read some bits and convert them to a long value + * + * @param in stream to read + * @param len number of bit to read + * @return converted long value + * @throws IOException cannot read from stream + */ + protected long readLongFromStream(InputStream in, int len) throws IOException { + long num = 0; + for (int i = 0; i < len; i++) { + long bit = (long) (readBit(in) ? 1 : 0); + num |= bit << (len - 1 - i); } - - /** - * read some bits and convert them to a int value - * @param in stream to read - * @param len number of bit to read - * @return converted int value - * @throws IOException cannot read from stream - */ - protected int readIntFromStream(InputStream in, int len) throws IOException{ - int num = 0; - for (int i = 0; i < len; i++) { - int bit = readBit(in) ? 1 : 0; - num |= bit << (len - 1 - i); - } - return num; - } - - /** - * read some bits and convert them to a long value - * @param in stream to read - * @param len number of bit to read - * @return converted long value - * @throws IOException cannot read from stream - */ - protected long readLongFromStream(InputStream in, int len) throws IOException{ - long num = 0; - for (int i = 0; i < len; i++) { - long bit = (long)(readBit(in) ? 1 : 0); - num |= bit << (len - 1 - i); - } - return num; - } + return num; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoder.java index 4a79134a..05c267be 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoder.java @@ -1,109 +1,112 @@ -package cn.edu.tsinghua.tsfile.encoding.decoder; - -import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.bitpacking.IntPacker; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; - -/** - * Decoder for int value using rle or bit-packing - */ -public class IntRleDecoder extends RleDecoder { - private static final Logger LOGGER = LoggerFactory.getLogger(IntRleDecoder.class); - - /** - * current value for rle repeated value - */ - private int currentValue; - - /** - * buffer to save all values in group using bit-packing - */ - private int[] currentBuffer; - - /** - * packer for unpacking int value - */ - private IntPacker packer; - - public IntRleDecoder(EndianType endianType) { - super(endianType); - currentValue = 0; - } - - @Override - public boolean readBoolean(InputStream in) { - return this.readInt(in) == 0 ? false : true; - } - - /** - * read a int value from InputStream - * - * @param in - InputStream - * @return value - current valid value - */ - @Override - public int readInt(InputStream in) { - if (!isLengthAndBitWidthReaded) { - //start to read a new rle+bit-packing pattern - try { - readLengthAndBitWidth(in); - } catch (IOException e) { - LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading length", e); - } - } - - if (currentCount == 0) { - try { - readNext(); - } catch (IOException e) { - LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading all encoding number, length is {}, bit width is {}", length, bitWidth, e); - } - } - --currentCount; - int result = 0; - switch (mode) { - case RLE: - result = currentValue; - break; - case BIT_PACKED: - result = currentBuffer[bitPackingNum - currentCount - 1]; - break; - default: - throw new TSFileDecodingException(String.format("tsfile-encoding IntRleDecoder: not a valid mode %s", mode)); - } - - if (!hasNextPackage()) { - isLengthAndBitWidthReaded = false; - } - return result; - } - - @Override - protected void initPacker() { - packer = new IntPacker(bitWidth); - } - - @Override - protected void readNumberInRLE() throws IOException { - currentValue = ReadWriteStreamUtils.readIntLittleEndianPaddedOnBitWidth(byteCache, bitWidth); - } - - @Override - protected void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) throws IOException { - currentBuffer = new int[bitPackedGroupCount * config.RLE_MIN_REPEATED_NUM]; - byte[] bytes = new byte[bitPackedGroupCount * bitWidth]; - int bytesToRead = bitPackedGroupCount * bitWidth; - bytesToRead = Math.min(bytesToRead, byteCache.available()); -// new DataInputStream(byteCache).readFully(bytes, 0, bytesToRead); - byteCache.read(bytes, 0, bytesToRead); - - // save all int values in currentBuffer - packer.unpackAllValues(bytes, 0, bytesToRead, currentBuffer); - } -} +package cn.edu.tsinghua.tsfile.encoding.decoder; + +import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.bitpacking.IntPacker; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.io.InputStream; + +/** + * Decoder for int value using rle or bit-packing + */ +public class IntRleDecoder extends RleDecoder { + private static final Logger LOGGER = LoggerFactory.getLogger(IntRleDecoder.class); + + /** + * current value for rle repeated value + */ + private int currentValue; + + /** + * buffer to save all values in group using bit-packing + */ + private int[] currentBuffer; + + /** + * packer for unpacking int value + */ + private IntPacker packer; + + public IntRleDecoder(EndianType endianType) { + super(endianType); + currentValue = 0; + } + + @Override + public boolean readBoolean(InputStream in) { + return this.readInt(in) == 0 ? false : true; + } + + /** + * read a int value from InputStream + * + * @param in - InputStream + * @return value - current valid value + */ + @Override + public int readInt(InputStream in) { + if (!isLengthAndBitWidthReaded) { + // start to read a new rle+bit-packing pattern + try { + readLengthAndBitWidth(in); + } catch (IOException e) { + LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading length", e); + } + } + + if (currentCount == 0) { + try { + readNext(); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding IntRleDecoder: error occurs when reading all encoding number, length is {}, bit width is {}", + length, bitWidth, e); + } + } + --currentCount; + int result = 0; + switch (mode) { + case RLE: + result = currentValue; + break; + case BIT_PACKED: + result = currentBuffer[bitPackingNum - currentCount - 1]; + break; + default: + throw new TSFileDecodingException( + String.format("tsfile-encoding IntRleDecoder: not a valid mode %s", mode)); + } + + if (!hasNextPackage()) { + isLengthAndBitWidthReaded = false; + } + return result; + } + + @Override + protected void initPacker() { + packer = new IntPacker(bitWidth); + } + + @Override + protected void readNumberInRLE() throws IOException { + currentValue = ReadWriteStreamUtils.readIntLittleEndianPaddedOnBitWidth(byteCache, bitWidth); + } + + @Override + protected void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) + throws IOException { + currentBuffer = new int[bitPackedGroupCount * config.RLE_MIN_REPEATED_NUM]; + byte[] bytes = new byte[bitPackedGroupCount * bitWidth]; + int bytesToRead = bitPackedGroupCount * bitWidth; + bytesToRead = Math.min(bytesToRead, byteCache.available()); + // new DataInputStream(byteCache).readFully(bytes, 0, bytesToRead); + byteCache.read(bytes, 0, bytesToRead); + + // save all int values in currentBuffer + packer.unpackAllValues(bytes, 0, bytesToRead, currentBuffer); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoder.java index 819076b9..6301e6f7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoder.java @@ -1,104 +1,107 @@ -package cn.edu.tsinghua.tsfile.encoding.decoder; - -import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.bitpacking.LongPacker; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; - -/** - * Decoder for long value using rle or bit-packing - */ -public class LongRleDecoder extends RleDecoder { - private static final Logger LOGGER = LoggerFactory.getLogger(LongRleDecoder.class); - - /** - * current value for rle repeated value - */ - private long currentValue; - - /** - * buffer to save all values in group using bit-packing - */ - private long[] currentBuffer; - - /** - * packer for unpacking long value - */ - private LongPacker packer; - - public LongRleDecoder(EndianType endianType) { - super(endianType); - currentValue = 0; - } - - /** - * read a long value from InputStream - * - * @param in - InputStream - * @return value - current valid value - */ - @Override - public long readLong(InputStream in) { - if (!isLengthAndBitWidthReaded) { - //start to read a new rle+bit-packing pattern - try { - readLengthAndBitWidth(in); - } catch (IOException e) { - LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading length", e); - } - } - - if (currentCount == 0) { - try { - readNext(); - } catch (IOException e) { - LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading all encoding number, length is {}, bit width is {}", length, bitWidth, e); - } - } - --currentCount; - long result = 0; - switch (mode) { - case RLE: - result = currentValue; - break; - case BIT_PACKED: - result = currentBuffer[bitPackingNum - currentCount - 1]; - break; - default: - throw new TSFileDecodingException(String.format("tsfile-encoding LongRleDecoder: not a valid mode %s", mode)); - } - - if (!hasNextPackage()) { - isLengthAndBitWidthReaded = false; - } - return result; - } - - @Override - protected void initPacker() { - packer = new LongPacker(bitWidth); - } - - @Override - protected void readNumberInRLE() throws IOException { - currentValue = ReadWriteStreamUtils.readLongLittleEndianPaddedOnBitWidth(byteCache, bitWidth); - } - - @Override - protected void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) throws IOException { - currentBuffer = new long[bitPackedGroupCount * config.RLE_MIN_REPEATED_NUM]; - byte[] bytes = new byte[bitPackedGroupCount * bitWidth]; - int bytesToRead = bitPackedGroupCount * bitWidth; - bytesToRead = Math.min(bytesToRead, byteCache.available()); -// new DataInputStream(byteCache).readFully(bytes, 0, bytesToRead); - byteCache.read(bytes, 0, bytesToRead); - - // save all long values in currentBuffer - packer.unpackAllValues(bytes, 0, bytesToRead, currentBuffer); - } -} +package cn.edu.tsinghua.tsfile.encoding.decoder; + +import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.bitpacking.LongPacker; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.io.InputStream; + +/** + * Decoder for long value using rle or bit-packing + */ +public class LongRleDecoder extends RleDecoder { + private static final Logger LOGGER = LoggerFactory.getLogger(LongRleDecoder.class); + + /** + * current value for rle repeated value + */ + private long currentValue; + + /** + * buffer to save all values in group using bit-packing + */ + private long[] currentBuffer; + + /** + * packer for unpacking long value + */ + private LongPacker packer; + + public LongRleDecoder(EndianType endianType) { + super(endianType); + currentValue = 0; + } + + /** + * read a long value from InputStream + * + * @param in - InputStream + * @return value - current valid value + */ + @Override + public long readLong(InputStream in) { + if (!isLengthAndBitWidthReaded) { + // start to read a new rle+bit-packing pattern + try { + readLengthAndBitWidth(in); + } catch (IOException e) { + LOGGER.error("tsfile-encoding IntRleDecoder: error occurs when reading length", e); + } + } + + if (currentCount == 0) { + try { + readNext(); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding IntRleDecoder: error occurs when reading all encoding number, length is {}, bit width is {}", + length, bitWidth, e); + } + } + --currentCount; + long result = 0; + switch (mode) { + case RLE: + result = currentValue; + break; + case BIT_PACKED: + result = currentBuffer[bitPackingNum - currentCount - 1]; + break; + default: + throw new TSFileDecodingException( + String.format("tsfile-encoding LongRleDecoder: not a valid mode %s", mode)); + } + + if (!hasNextPackage()) { + isLengthAndBitWidthReaded = false; + } + return result; + } + + @Override + protected void initPacker() { + packer = new LongPacker(bitWidth); + } + + @Override + protected void readNumberInRLE() throws IOException { + currentValue = ReadWriteStreamUtils.readLongLittleEndianPaddedOnBitWidth(byteCache, bitWidth); + } + + @Override + protected void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) + throws IOException { + currentBuffer = new long[bitPackedGroupCount * config.RLE_MIN_REPEATED_NUM]; + byte[] bytes = new byte[bitPackedGroupCount * bitWidth]; + int bytesToRead = bitPackedGroupCount * bitWidth; + bytesToRead = Math.min(bytesToRead, byteCache.available()); + // new DataInputStream(byteCache).readFully(bytes, 0, bytesToRead); + byteCache.read(bytes, 0, bytesToRead); + + // save all long values in currentBuffer + packer.unpackAllValues(bytes, 0, bytesToRead, currentBuffer); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/PlainDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/PlainDecoder.java index 7b3a843d..0f6e0257 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/PlainDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/PlainDecoder.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; @@ -16,111 +15,111 @@ */ public class PlainDecoder extends Decoder { - private static final Logger LOGGER = LoggerFactory.getLogger(PlainDecoder.class); - public EndianType endianType; + private static final Logger LOGGER = LoggerFactory.getLogger(PlainDecoder.class); + public EndianType endianType; - public PlainDecoder(EndianType endianType) { - super(TSEncoding.PLAIN); - this.endianType = endianType; - } + public PlainDecoder(EndianType endianType) { + super(TSEncoding.PLAIN); + this.endianType = endianType; + } - @Override - public boolean readBoolean(InputStream in) { - try { - int ch1 = in.read(); - if (ch1 == 0) { - return false; - } else { - return true; - } - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read boolean", e); - } + @Override + public boolean readBoolean(InputStream in) { + try { + int ch1 = in.read(); + if (ch1 == 0) { return false; + } else { + return true; + } + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read boolean", e); } + return false; + } - @Override - public short readShort(InputStream in) { - try { - int ch1 = in.read(); - int ch2 = in.read(); - if (this.endianType == EndianType.LITTLE_ENDIAN) { - return (short) ((ch2 << 8) + ch1); - } else { - LOGGER.error( - "tsfile-encoding PlainEncoder: current version does not support short value decoding"); - } - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read short", e); - } - return -1; + @Override + public short readShort(InputStream in) { + try { + int ch1 = in.read(); + int ch2 = in.read(); + if (this.endianType == EndianType.LITTLE_ENDIAN) { + return (short) ((ch2 << 8) + ch1); + } else { + LOGGER.error( + "tsfile-encoding PlainEncoder: current version does not support short value decoding"); + } + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read short", e); } + return -1; + } - @Override - public int readInt(InputStream in) { - try { - int ch1 = in.read(); - int ch2 = in.read(); - int ch3 = in.read(); - int ch4 = in.read(); - if (this.endianType == EndianType.LITTLE_ENDIAN) { - return ch1 + (ch2 << 8) + (ch3 << 16) + (ch4 << 24); - } else { - LOGGER.error( - "tsfile-encoding PlainEncoder: current version does not support int value encoding"); - } - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read int", e); - } - return -1; + @Override + public int readInt(InputStream in) { + try { + int ch1 = in.read(); + int ch2 = in.read(); + int ch3 = in.read(); + int ch4 = in.read(); + if (this.endianType == EndianType.LITTLE_ENDIAN) { + return ch1 + (ch2 << 8) + (ch3 << 16) + (ch4 << 24); + } else { + LOGGER.error( + "tsfile-encoding PlainEncoder: current version does not support int value encoding"); + } + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read int", e); } + return -1; + } - @Override - public long readLong(InputStream in) { - int[] buf = new int[8]; - try { - for (int i = 0; i < 8; i++) - buf[i] = in.read(); - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read long", e); - } - - Long res = 0L; - for (int i = 0; i < 8; i++) { - res += ((long) buf[i] << (i * 8)); - } - return res; + @Override + public long readLong(InputStream in) { + int[] buf = new int[8]; + try { + for (int i = 0; i < 8; i++) + buf[i] = in.read(); + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read long", e); } - @Override - public float readFloat(InputStream in) { - return Float.intBitsToFloat(readInt(in)); + Long res = 0L; + for (int i = 0; i < 8; i++) { + res += ((long) buf[i] << (i * 8)); } + return res; + } - @Override - public double readDouble(InputStream in) { - return Double.longBitsToDouble(readLong(in)); - } + @Override + public float readFloat(InputStream in) { + return Float.intBitsToFloat(readInt(in)); + } - @Override - public Binary readBinary(InputStream in) { - int length = readInt(in); - byte[] buf = new byte[length]; - try { - in.read(buf, 0, buf.length); - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read binary", e); - } - return new Binary(buf); - } + @Override + public double readDouble(InputStream in) { + return Double.longBitsToDouble(readLong(in)); + } - @Override - public boolean hasNext(InputStream in) throws IOException { - return in.available() > 0; + @Override + public Binary readBinary(InputStream in) { + int length = readInt(in); + byte[] buf = new byte[length]; + try { + in.read(buf, 0, buf.length); + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainDecoder: errors whewn read binary", e); } + return new Binary(buf); + } - @Override - public BigDecimal readBigDecimal(InputStream in) { - throw new TSFileDecodingException("Method readBigDecimal is not supproted by PlainDecoder"); - } + @Override + public boolean hasNext(InputStream in) throws IOException { + return in.available() > 0; + } + + @Override + public BigDecimal readBigDecimal(InputStream in) { + throw new TSFileDecodingException("Method readBigDecimal is not supproted by PlainDecoder"); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/RleDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/RleDecoder.java index 69ba33fa..2b15f406 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/RleDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/RleDecoder.java @@ -1,216 +1,214 @@ -package cn.edu.tsinghua.tsfile.encoding.decoder; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; -import cn.edu.tsinghua.tsfile.common.utils.Binary; -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.math.BigDecimal; - -/** - * Abstract class for all rle decoder. Decoding values according to - * following grammar: {@code }. For more - * information about rle format, see RleEncoder - */ -public abstract class RleDecoder extends Decoder { - // private static final Logger LOGGER = LoggerFactory.getLogger(RleDecoder.class); - public EndianType endianType; - protected TSFileConfig config = TSFileDescriptor.getInstance().getConfig(); - /** - * mode to indicate current encoding type - * 0 - RLE - * 1 - BIT_PACKED - */ - protected MODE mode; - /** - * bit width for bit-packing and rle to decode - */ - protected int bitWidth; - /** - * number of data left for reading in current buffer - */ - protected int currentCount; - /** - * how many bytes for all encoded data like [{@code }] in - * inputstream - */ - protected int length; - /** - * a flag to indicate whether current pattern is end. false - need to start - * reading a new page true - current page isn't over - */ - protected boolean isLengthAndBitWidthReaded; - /** - * buffer to save data format like [{@code }] for decoder - */ - protected ByteArrayInputStream byteCache; - /** - * number of bit-packing group in which is saved in header - */ - protected int bitPackingNum; - - public RleDecoder(EndianType endianType) { - super(TSEncoding.RLE); - this.endianType = endianType; - currentCount = 0; - isLengthAndBitWidthReaded = false; - bitPackingNum = 0; - byteCache = new ByteArrayInputStream(new byte[0]); - // LOGGER.debug("tsfile-encoding RleDecoder: init rle decoder"); - } - - /** - * get header for both rle and bit-packing current encode mode which is - * saved in first bit of header - * @return int value - * @throws IOException cannot get header - */ - public int getHeader() throws IOException { - int header = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); - mode = (header & 1) == 0 ? MODE.RLE : MODE.BIT_PACKED; - return header; - } - - /** - * get all encoded data according to mode - * - * @throws IOException cannot read next value - */ - protected void readNext() throws IOException { - int header = getHeader(); - switch (mode) { - case RLE: - currentCount = header >> 1; - readNumberInRLE(); - break; - case BIT_PACKED: - int bitPackedGroupCount = header >> 1; - // in last bit-packing group, there may be some useless value, - // lastBitPackedNum indicates how many values is useful - int lastBitPackedNum = byteCache.read(); - if (bitPackedGroupCount > 0) { - - currentCount = (bitPackedGroupCount - 1) * config.RLE_MIN_REPEATED_NUM + lastBitPackedNum; - bitPackingNum = currentCount; - } else { - throw new TSFileDecodingException(String.format( - "tsfile-encoding IntRleDecoder: bitPackedGroupCount %d, smaller than 1", bitPackedGroupCount)); - } - readBitPackingBuffer(bitPackedGroupCount, lastBitPackedNum); - break; - default: - throw new TSFileDecodingException( - String.format("tsfile-encoding IntRleDecoder: unknown encoding mode %s", mode)); - } - } - - /** - * read length and bit width of current package before we decode number - * - * @param in InputStream - * @throws IOException cannot read length and bit-width - */ - protected void readLengthAndBitWidth(InputStream in) throws IOException { - // long st = System.currentTimeMillis(); - length = ReadWriteStreamUtils.readUnsignedVarInt(in); - byte[] tmp = new byte[length]; - in.read(tmp, 0, length); - byteCache = new ByteArrayInputStream(tmp); - isLengthAndBitWidthReaded = true; - bitWidth = byteCache.read(); - initPacker(); - // long et = System.currentTimeMillis(); - } - - /** - * Check whether there is number left for reading - * - * @param in decoded data saved in InputStream - * @return true or false to indicate whether there is number left - * @throws IOException cannot check next value - */ - @Override - public boolean hasNext(InputStream in) throws IOException { - if (currentCount > 0 || in.available() > 0 || hasNextPackage()) { - return true; - } - return false; - } - - /** - * Check whether there is another pattern left for reading - * - * @return true or false to indicate whether there is another pattern left - */ - protected boolean hasNextPackage() { - return currentCount > 0 || byteCache.available() > 0; - } - - protected abstract void initPacker(); - - /** - * Read rle package and save them in buffer - * - * @throws IOException cannot read number - */ - protected abstract void readNumberInRLE() throws IOException; - - /** - * Read bit-packing package and save them in buffer - * - * @param bitPackedGroupCount number of group number - * @param lastBitPackedNum number of useful value in last group - * @throws IOException cannot read bit pack - */ - protected abstract void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) throws IOException; - - @Override - public boolean readBoolean(InputStream in) { - throw new TSFileDecodingException("Method readBoolean is not supproted by RleDecoder"); - } - - @Override - public short readShort(InputStream in) { - throw new TSFileDecodingException("Method readShort is not supproted by RleDecoder"); - } - - @Override - public int readInt(InputStream in) { - throw new TSFileDecodingException("Method readInt is not supproted by RleDecoder"); - } - - @Override - public long readLong(InputStream in) { - throw new TSFileDecodingException("Method readLong is not supproted by RleDecoder"); - } - - @Override - public float readFloat(InputStream in) { - throw new TSFileDecodingException("Method readFloat is not supproted by RleDecoder"); - } - - @Override - public double readDouble(InputStream in) { - throw new TSFileDecodingException("Method readDouble is not supproted by RleDecoder"); - } - - @Override - public Binary readBinary(InputStream in) { - throw new TSFileDecodingException("Method readBinary is not supproted by RleDecoder"); - } - - @Override - public BigDecimal readBigDecimal(InputStream in) { - throw new TSFileDecodingException("Method readBigDecimal is not supproted by RleDecoder"); - } - - protected static enum MODE { - RLE, BIT_PACKED - } -} +package cn.edu.tsinghua.tsfile.encoding.decoder; + +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.exception.TSFileDecodingException; +import cn.edu.tsinghua.tsfile.common.utils.Binary; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.math.BigDecimal; + +/** + * Abstract class for all rle decoder. Decoding values according to following grammar: + * {@code }. For more information about rle format, see RleEncoder + */ +public abstract class RleDecoder extends Decoder { + // private static final Logger LOGGER = LoggerFactory.getLogger(RleDecoder.class); + public EndianType endianType; + protected TSFileConfig config = TSFileDescriptor.getInstance().getConfig(); + /** + * mode to indicate current encoding type 0 - RLE 1 - BIT_PACKED + */ + protected MODE mode; + /** + * bit width for bit-packing and rle to decode + */ + protected int bitWidth; + /** + * number of data left for reading in current buffer + */ + protected int currentCount; + /** + * how many bytes for all encoded data like [{@code }] in inputstream + */ + protected int length; + /** + * a flag to indicate whether current pattern is end. false - need to start reading a new page + * true - current page isn't over + */ + protected boolean isLengthAndBitWidthReaded; + /** + * buffer to save data format like [{@code }] for decoder + */ + protected ByteArrayInputStream byteCache; + /** + * number of bit-packing group in which is saved in header + */ + protected int bitPackingNum; + + public RleDecoder(EndianType endianType) { + super(TSEncoding.RLE); + this.endianType = endianType; + currentCount = 0; + isLengthAndBitWidthReaded = false; + bitPackingNum = 0; + byteCache = new ByteArrayInputStream(new byte[0]); + // LOGGER.debug("tsfile-encoding RleDecoder: init rle decoder"); + } + + /** + * get header for both rle and bit-packing current encode mode which is saved in first bit of + * header + * + * @return int value + * @throws IOException cannot get header + */ + public int getHeader() throws IOException { + int header = ReadWriteStreamUtils.readUnsignedVarInt(byteCache); + mode = (header & 1) == 0 ? MODE.RLE : MODE.BIT_PACKED; + return header; + } + + /** + * get all encoded data according to mode + * + * @throws IOException cannot read next value + */ + protected void readNext() throws IOException { + int header = getHeader(); + switch (mode) { + case RLE: + currentCount = header >> 1; + readNumberInRLE(); + break; + case BIT_PACKED: + int bitPackedGroupCount = header >> 1; + // in last bit-packing group, there may be some useless value, + // lastBitPackedNum indicates how many values is useful + int lastBitPackedNum = byteCache.read(); + if (bitPackedGroupCount > 0) { + + currentCount = (bitPackedGroupCount - 1) * config.RLE_MIN_REPEATED_NUM + lastBitPackedNum; + bitPackingNum = currentCount; + } else { + throw new TSFileDecodingException( + String.format("tsfile-encoding IntRleDecoder: bitPackedGroupCount %d, smaller than 1", + bitPackedGroupCount)); + } + readBitPackingBuffer(bitPackedGroupCount, lastBitPackedNum); + break; + default: + throw new TSFileDecodingException( + String.format("tsfile-encoding IntRleDecoder: unknown encoding mode %s", mode)); + } + } + + /** + * read length and bit width of current package before we decode number + * + * @param in InputStream + * @throws IOException cannot read length and bit-width + */ + protected void readLengthAndBitWidth(InputStream in) throws IOException { + // long st = System.currentTimeMillis(); + length = ReadWriteStreamUtils.readUnsignedVarInt(in); + byte[] tmp = new byte[length]; + in.read(tmp, 0, length); + byteCache = new ByteArrayInputStream(tmp); + isLengthAndBitWidthReaded = true; + bitWidth = byteCache.read(); + initPacker(); + // long et = System.currentTimeMillis(); + } + + /** + * Check whether there is number left for reading + * + * @param in decoded data saved in InputStream + * @return true or false to indicate whether there is number left + * @throws IOException cannot check next value + */ + @Override + public boolean hasNext(InputStream in) throws IOException { + if (currentCount > 0 || in.available() > 0 || hasNextPackage()) { + return true; + } + return false; + } + + /** + * Check whether there is another pattern left for reading + * + * @return true or false to indicate whether there is another pattern left + */ + protected boolean hasNextPackage() { + return currentCount > 0 || byteCache.available() > 0; + } + + protected abstract void initPacker(); + + /** + * Read rle package and save them in buffer + * + * @throws IOException cannot read number + */ + protected abstract void readNumberInRLE() throws IOException; + + /** + * Read bit-packing package and save them in buffer + * + * @param bitPackedGroupCount number of group number + * @param lastBitPackedNum number of useful value in last group + * @throws IOException cannot read bit pack + */ + protected abstract void readBitPackingBuffer(int bitPackedGroupCount, int lastBitPackedNum) + throws IOException; + + @Override + public boolean readBoolean(InputStream in) { + throw new TSFileDecodingException("Method readBoolean is not supproted by RleDecoder"); + } + + @Override + public short readShort(InputStream in) { + throw new TSFileDecodingException("Method readShort is not supproted by RleDecoder"); + } + + @Override + public int readInt(InputStream in) { + throw new TSFileDecodingException("Method readInt is not supproted by RleDecoder"); + } + + @Override + public long readLong(InputStream in) { + throw new TSFileDecodingException("Method readLong is not supproted by RleDecoder"); + } + + @Override + public float readFloat(InputStream in) { + throw new TSFileDecodingException("Method readFloat is not supproted by RleDecoder"); + } + + @Override + public double readDouble(InputStream in) { + throw new TSFileDecodingException("Method readDouble is not supproted by RleDecoder"); + } + + @Override + public Binary readBinary(InputStream in) { + throw new TSFileDecodingException("Method readBinary is not supproted by RleDecoder"); + } + + @Override + public BigDecimal readBigDecimal(InputStream in) { + throw new TSFileDecodingException("Method readBigDecimal is not supproted by RleDecoder"); + } + + protected static enum MODE { + RLE, BIT_PACKED + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/SinglePrecisionDecoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/SinglePrecisionDecoder.java index c5e1280c..ba31cdf2 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/SinglePrecisionDecoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/decoder/SinglePrecisionDecoder.java @@ -2,89 +2,88 @@ import java.io.IOException; import java.io.InputStream; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; /** * Decoder for value value using gorilla */ public class SinglePrecisionDecoder extends GorillaDecoder { - private static final Logger LOGGER = LoggerFactory.getLogger(SinglePrecisionDecoder.class); - private int preValue; + private static final Logger LOGGER = LoggerFactory.getLogger(SinglePrecisionDecoder.class); + private int preValue; - public SinglePrecisionDecoder() { - } + public SinglePrecisionDecoder() {} - @Override - public float readFloat(InputStream in) { - if (!flag) { - flag = true; - try { - int ch1 = in.read(); - int ch2 = in.read(); - int ch3 = in.read(); - int ch4 = in.read(); - preValue = ch1 + (ch2 << 8) + (ch3 << 16) + (ch4 << 24); - leadingZeroNum = Integer.numberOfLeadingZeros(preValue); - tailingZeroNum = Integer.numberOfTrailingZeros(preValue); - float tmp = Float.intBitsToFloat(preValue); - fillBuffer(in); - getNextValue(in); - return tmp; - } catch (IOException e) { - LOGGER.error("SinglePrecisionDecoder cannot read first float number because: {}", e.getMessage()); - } - } else { - try { - float tmp = Float.intBitsToFloat(preValue); - getNextValue(in); - return tmp; - } catch (IOException e) { - LOGGER.error("SinglePrecisionDecoder cannot read following float number because: {}", e.getMessage()); - } - } - return Float.NaN; - } + @Override + public float readFloat(InputStream in) { + if (!flag) { + flag = true; + try { + int ch1 = in.read(); + int ch2 = in.read(); + int ch3 = in.read(); + int ch4 = in.read(); + preValue = ch1 + (ch2 << 8) + (ch3 << 16) + (ch4 << 24); + leadingZeroNum = Integer.numberOfLeadingZeros(preValue); + tailingZeroNum = Integer.numberOfTrailingZeros(preValue); + float tmp = Float.intBitsToFloat(preValue); + fillBuffer(in); + getNextValue(in); + return tmp; + } catch (IOException e) { + LOGGER.error("SinglePrecisionDecoder cannot read first float number because: {}", + e.getMessage()); + } + } else { + try { + float tmp = Float.intBitsToFloat(preValue); + getNextValue(in); + return tmp; + } catch (IOException e) { + LOGGER.error("SinglePrecisionDecoder cannot read following float number because: {}", + e.getMessage()); + } + } + return Float.NaN; + } - /** - * check whether there is any value to encode left - * - * @param in stream to read - * @throws IOException cannot read from stream - */ - private void getNextValue(InputStream in) throws IOException { - nextFlag1 = readBit(in); - // case: '0' - if (!nextFlag1) { - return; - } - nextFlag2 = readBit(in); + /** + * check whether there is any value to encode left + * + * @param in stream to read + * @throws IOException cannot read from stream + */ + private void getNextValue(InputStream in) throws IOException { + nextFlag1 = readBit(in); + // case: '0' + if (!nextFlag1) { + return; + } + nextFlag2 = readBit(in); - if (!nextFlag2) { - // case: '10' - int tmp = 0; - for (int i = 0; i < TSFileConfig.FLOAT_LENGTH - leadingZeroNum - tailingZeroNum; i++) { - int bit = readBit(in) ? 1 : 0; - tmp |= bit << (TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNum - i); - } - tmp ^= preValue; - preValue = tmp; - } else { - // case: '11' - int leadingZeroNumTmp = readIntFromStream(in, TSFileConfig.FLAOT_LEADING_ZERO_LENGTH); - int lenTmp = readIntFromStream(in, TSFileConfig.FLOAT_VALUE_LENGTH); - int tmp = readIntFromStream(in, lenTmp); - tmp <<= (TSFileConfig.FLOAT_LENGTH - leadingZeroNumTmp - lenTmp); - tmp ^= preValue; - preValue = tmp; - } - leadingZeroNum = Integer.numberOfLeadingZeros(preValue); - tailingZeroNum = Integer.numberOfTrailingZeros(preValue); - if(Float.isNaN(Float.intBitsToFloat(preValue))){ - isEnd = true; - } - } + if (!nextFlag2) { + // case: '10' + int tmp = 0; + for (int i = 0; i < TSFileConfig.FLOAT_LENGTH - leadingZeroNum - tailingZeroNum; i++) { + int bit = readBit(in) ? 1 : 0; + tmp |= bit << (TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNum - i); + } + tmp ^= preValue; + preValue = tmp; + } else { + // case: '11' + int leadingZeroNumTmp = readIntFromStream(in, TSFileConfig.FLAOT_LEADING_ZERO_LENGTH); + int lenTmp = readIntFromStream(in, TSFileConfig.FLOAT_VALUE_LENGTH); + int tmp = readIntFromStream(in, lenTmp); + tmp <<= (TSFileConfig.FLOAT_LENGTH - leadingZeroNumTmp - lenTmp); + tmp ^= preValue; + preValue = tmp; + } + leadingZeroNum = Integer.numberOfLeadingZeros(preValue); + tailingZeroNum = Integer.numberOfTrailingZeros(preValue); + if (Float.isNaN(Float.intBitsToFloat(preValue))) { + isEnd = true; + } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/BitmapEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/BitmapEncoder.java index 72c39c4c..1228f669 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/BitmapEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/BitmapEncoder.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; @@ -15,6 +14,7 @@ /** * Encodes values using bitmap, according to the following grammar: + * *

  * {@code
  * bitmap-encoding:   
@@ -28,87 +28,88 @@
  * 
*/ public class BitmapEncoder extends Encoder { - private static final Logger LOGGER = LoggerFactory.getLogger(BitmapEncoder.class); + private static final Logger LOGGER = LoggerFactory.getLogger(BitmapEncoder.class); - /** - * Bitmap Encoder stores all current values in a list temporally - */ - private List values; + /** + * Bitmap Encoder stores all current values in a list temporally + */ + private List values; - /** - * @param endianType deprecated - */ - public BitmapEncoder(EndianType endianType) { - super(TSEncoding.BITMAP); - this.values = new ArrayList(); - LOGGER.debug("tsfile-encoding BitmapEncoder: init bitmap encoder"); - } + /** + * @param endianType deprecated + */ + public BitmapEncoder(EndianType endianType) { + super(TSEncoding.BITMAP); + this.values = new ArrayList(); + LOGGER.debug("tsfile-encoding BitmapEncoder: init bitmap encoder"); + } - /** - * Each time encoder receives a value, encoder doesn't write it to OutputStream immediately. - * Encoder stores current value in a list. When all value is received, flush() method will be - * invoked. Encoder encodes all values and writes them to OutputStream - * - * @param value value to encode - * @param out OutputStream to write encoded stream - * @throws IOException cannot encode value - * @see Encoder#encode(int, java.io.ByteArrayOutputStream) - */ - @Override - public void encode(int value, ByteArrayOutputStream out) throws IOException { - values.add(value); - } + /** + * Each time encoder receives a value, encoder doesn't write it to OutputStream immediately. + * Encoder stores current value in a list. When all value is received, flush() method will be + * invoked. Encoder encodes all values and writes them to OutputStream + * + * @param value value to encode + * @param out OutputStream to write encoded stream + * @throws IOException cannot encode value + * @see Encoder#encode(int, java.io.ByteArrayOutputStream) + */ + @Override + public void encode(int value, ByteArrayOutputStream out) throws IOException { + values.add(value); + } - /** - * When all data received, encoder now encodes values in list and write them to OutputStream - * - * @param out OutputStream to write encoded stream - * @throws IOException cannot flush to OutputStream - * @see Encoder#flush(java.io.ByteArrayOutputStream) - */ - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - // byteCache stores all and we know its size - ByteArrayOutputStream byteCache = new ByteArrayOutputStream(); - Set valueType = new HashSet(values); - int byteNum = (values.size() + 7) / 8; - if (byteNum == 0) { - reset(); - return; - } - int len = values.size(); -// LOGGER.debug("tsfile-encoding BitmapEncoder: number of data in list is {}", len); - for (int value : valueType) { - byte[] buffer = new byte[byteNum]; - for (int i = 0; i < len; i++) { - if (values.get(i) == value) { - int index = i / 8; - int offset = 7 - (i % 8); - // Encoder use 1 bit in byte to indicate that value appears - buffer[index] |= ((byte) 1 << offset); - } - } - ReadWriteStreamUtils.writeUnsignedVarInt(value, byteCache); - byteCache.write(buffer); -// LOGGER.debug("tsfile-encoding BitmapEncoder: encode value {}, bitmap index {}", value, buffer); + /** + * When all data received, encoder now encodes values in list and write them to OutputStream + * + * @param out OutputStream to write encoded stream + * @throws IOException cannot flush to OutputStream + * @see Encoder#flush(java.io.ByteArrayOutputStream) + */ + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + // byteCache stores all and we know its size + ByteArrayOutputStream byteCache = new ByteArrayOutputStream(); + Set valueType = new HashSet(values); + int byteNum = (values.size() + 7) / 8; + if (byteNum == 0) { + reset(); + return; + } + int len = values.size(); + // LOGGER.debug("tsfile-encoding BitmapEncoder: number of data in list is {}", len); + for (int value : valueType) { + byte[] buffer = new byte[byteNum]; + for (int i = 0; i < len; i++) { + if (values.get(i) == value) { + int index = i / 8; + int offset = 7 - (i % 8); + // Encoder use 1 bit in byte to indicate that value appears + buffer[index] |= ((byte) 1 << offset); } - ReadWriteStreamUtils.writeUnsignedVarInt(byteCache.size(), out); - ReadWriteStreamUtils.writeUnsignedVarInt(len, out); - out.write(byteCache.toByteArray()); - reset(); + } + ReadWriteStreamUtils.writeUnsignedVarInt(value, byteCache); + byteCache.write(buffer); + // LOGGER.debug("tsfile-encoding BitmapEncoder: encode value {}, bitmap index {}", value, + // buffer); } + ReadWriteStreamUtils.writeUnsignedVarInt(byteCache.size(), out); + ReadWriteStreamUtils.writeUnsignedVarInt(len, out); + out.write(byteCache.toByteArray()); + reset(); + } - private void reset() { - values.clear(); - } + private void reset() { + values.clear(); + } - public int getOneItemMaxSize() { - return 1; - } + public int getOneItemMaxSize() { + return 1; + } - public long getMaxByteSize() { - //byteCacheSize + byteDictSize + (byte array + array length) * byteDictSize - return 4 + 4 + ((values.size() + 7) / 8 + 4) * values.size(); - } + public long getMaxByteSize() { + // byteCacheSize + byteDictSize + (byte array + array length) * byteDictSize + return 4 + 4 + ((values.size() + 7) / 8 + 4) * values.size(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DeltaBinaryEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DeltaBinaryEncoder.java index 0d7b5b6a..9fdbd378 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DeltaBinaryEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DeltaBinaryEncoder.java @@ -4,7 +4,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -28,285 +27,285 @@ * @author kangrong */ abstract public class DeltaBinaryEncoder extends Encoder { - protected static final int BLOCK_DEFAULT_SIZE = 128; - private static final Logger LOG = LoggerFactory.getLogger(DeltaBinaryEncoder.class); - protected ByteArrayOutputStream out; - protected int blockSize; - // input value is stored in deltaBlackBuffer temporarily - protected byte[] encodingBlockBuffer; + protected static final int BLOCK_DEFAULT_SIZE = 128; + private static final Logger LOG = LoggerFactory.getLogger(DeltaBinaryEncoder.class); + protected ByteArrayOutputStream out; + protected int blockSize; + // input value is stored in deltaBlackBuffer temporarily + protected byte[] encodingBlockBuffer; - protected int writeIndex = -1; - protected int writeWidth = 0; + protected int writeIndex = -1; + protected int writeWidth = 0; - /** - * @param size - the number how many numbers to be packed into a block. - */ - public DeltaBinaryEncoder(int size) { - super(TSEncoding.TS_2DIFF); - blockSize = size; + /** + * @param size - the number how many numbers to be packed into a block. + */ + public DeltaBinaryEncoder(int size) { + super(TSEncoding.TS_2DIFF); + blockSize = size; + } + + protected abstract void writeHeader() throws IOException; + + protected abstract void writeValueToBytes(int i); + + protected abstract void calcTwoDiff(int i); + + protected abstract void reset(); + + protected abstract int calculateBitWidthsForDeltaBlockBuffer(); + + + /** + * write all data into {@code encodingBlockBuffer}. + */ + private void writeDataWithMinWidth() { + for (int i = 0; i < writeIndex; i++) { + writeValueToBytes(i); + } + int encodingLength = (int) Math.ceil((double) (writeIndex * writeWidth) / 8.0); + out.write(encodingBlockBuffer, 0, encodingLength); + } + + private void writeHeaderToBytes() throws IOException { + out.write(BytesUtils.intToBytes(writeIndex)); + out.write(BytesUtils.intToBytes(writeWidth)); + writeHeader(); + } + + + private void flushBlockBuffer(ByteArrayOutputStream out) throws IOException { + if (writeIndex == -1) + return; + // since we store the min delta, the deltas will be converted to be the + // difference to min delta and all positive + this.out = out; + for (int i = 0; i < writeIndex; i++) { + calcTwoDiff(i); } + writeWidth = calculateBitWidthsForDeltaBlockBuffer(); + // System.out.println("write width:"+writeWidth); + writeHeaderToBytes(); + writeDataWithMinWidth(); + + reset(); + writeIndex = -1; + } + + + /** + * calling this method to flush all values which haven't encoded to result byte array + */ + @Override + public void flush(ByteArrayOutputStream out) { + try { + flushBlockBuffer(out); + } catch (IOException e) { + LOG.error("flush data to stream failed!"); + } + } - protected abstract void writeHeader() throws IOException; + static public class IntDeltaEncoder extends DeltaBinaryEncoder { + private int[] deltaBlockBuffer; + private int firstValue; + private int previousValue; + private int minDeltaBase; - protected abstract void writeValueToBytes(int i); + public IntDeltaEncoder() { + this(BLOCK_DEFAULT_SIZE); + } - protected abstract void calcTwoDiff(int i); + public IntDeltaEncoder(int size) { + super(size); + deltaBlockBuffer = new int[this.blockSize]; + encodingBlockBuffer = new byte[blockSize * 4]; + reset(); + } - protected abstract void reset(); + protected int calculateBitWidthsForDeltaBlockBuffer() { + int width = 0; + for (int i = 0; i < writeIndex; i++) { + width = Math.max(width, getValueWidth(deltaBlockBuffer[i])); + } + return width; + } - protected abstract int calculateBitWidthsForDeltaBlockBuffer(); + private void calcDelta(Integer value) { + Integer delta = value - previousValue;// calculate delta + if (delta < minDeltaBase) { + minDeltaBase = delta; + } + deltaBlockBuffer[writeIndex++] = delta; + } /** - * write all data into {@code encodingBlockBuffer}. + * input a integer + * + * @param value value to encode + * @param out the ByteArrayOutputStream which data encode into */ - private void writeDataWithMinWidth() { - for (int i = 0; i < writeIndex; i++) { - writeValueToBytes(i); - } - int encodingLength = (int) Math.ceil((double) (writeIndex * writeWidth) / 8.0); - out.write(encodingBlockBuffer, 0, encodingLength); + public void encodeValue(int value, ByteArrayOutputStream out) { + if (writeIndex == -1) { + writeIndex++; + firstValue = value; + previousValue = firstValue; + return; + } + calcDelta(value); + previousValue = value; + if (writeIndex == blockSize) { + flush(out); + } } - private void writeHeaderToBytes() throws IOException { - out.write(BytesUtils.intToBytes(writeIndex)); - out.write(BytesUtils.intToBytes(writeWidth)); - writeHeader(); + @Override + protected void reset() { + firstValue = 0; + previousValue = 0; + minDeltaBase = Integer.MAX_VALUE; + for (int i = 0; i < blockSize; i++) { + encodingBlockBuffer[i] = 0; + deltaBlockBuffer[i] = 0; + } } - private void flushBlockBuffer(ByteArrayOutputStream out) throws IOException { - if (writeIndex == -1) - return; - // since we store the min delta, the deltas will be converted to be the - // difference to min delta and all positive - this.out = out; - for (int i = 0; i < writeIndex; i++) { - calcTwoDiff(i); - } - writeWidth = calculateBitWidthsForDeltaBlockBuffer(); - // System.out.println("write width:"+writeWidth); - writeHeaderToBytes(); - writeDataWithMinWidth(); - - reset(); - writeIndex = -1; + private int getValueWidth(int v) { + return 32 - Integer.numberOfLeadingZeros(v); } + @Override + protected void writeValueToBytes(int i) { + BytesUtils.intToBytes(deltaBlockBuffer[i], encodingBlockBuffer, writeWidth * i, writeWidth); + } + + @Override + protected void calcTwoDiff(int i) { + deltaBlockBuffer[i] = deltaBlockBuffer[i] - minDeltaBase; + } + + @Override + protected void writeHeader() throws IOException { + out.write(BytesUtils.intToBytes(minDeltaBase)); + out.write(BytesUtils.intToBytes(firstValue)); + } + + @Override + public void encode(int value, ByteArrayOutputStream out) { + encodeValue(value, out); + } + + @Override + public int getOneItemMaxSize() { + return 4; + } - /** - * calling this method to flush all values which haven't encoded to result byte array - */ @Override - public void flush(ByteArrayOutputStream out) { - try { - flushBlockBuffer(out); - } catch (IOException e) { - LOG.error("flush data to stream failed!"); - } + public long getMaxByteSize() { + // The meaning of 24 is: index(4)+width(4)+minDeltaBase(4)+firstValue(4) + return 24 + writeIndex * 4; } + } - static public class IntDeltaEncoder extends DeltaBinaryEncoder { - private int[] deltaBlockBuffer; - private int firstValue; - private int previousValue; - private int minDeltaBase; - - public IntDeltaEncoder() { - this(BLOCK_DEFAULT_SIZE); - } - - public IntDeltaEncoder(int size) { - super(size); - deltaBlockBuffer = new int[this.blockSize]; - encodingBlockBuffer = new byte[blockSize * 4]; - reset(); - } - - protected int calculateBitWidthsForDeltaBlockBuffer() { - int width = 0; - for (int i = 0; i < writeIndex; i++) { - width = Math.max(width, getValueWidth(deltaBlockBuffer[i])); - } - return width; - } - - private void calcDelta(Integer value) { - Integer delta = value - previousValue;// calculate delta - if (delta < minDeltaBase) { - minDeltaBase = delta; - } - deltaBlockBuffer[writeIndex++] = delta; - - } - - /** - * input a integer - * - * @param value value to encode - * @param out the ByteArrayOutputStream which data encode into - */ - public void encodeValue(int value, ByteArrayOutputStream out) { - if (writeIndex == -1) { - writeIndex++; - firstValue = value; - previousValue = firstValue; - return; - } - calcDelta(value); - previousValue = value; - if (writeIndex == blockSize) { - flush(out); - } - } - - @Override - protected void reset() { - firstValue = 0; - previousValue = 0; - minDeltaBase = Integer.MAX_VALUE; - for (int i = 0; i < blockSize; i++) { - encodingBlockBuffer[i] = 0; - deltaBlockBuffer[i] = 0; - } - } - - - private int getValueWidth(int v) { - return 32 - Integer.numberOfLeadingZeros(v); - } - - @Override - protected void writeValueToBytes(int i) { - BytesUtils.intToBytes(deltaBlockBuffer[i], encodingBlockBuffer, writeWidth * i, writeWidth); - } - - @Override - protected void calcTwoDiff(int i) { - deltaBlockBuffer[i] = deltaBlockBuffer[i] - minDeltaBase; - } - - @Override - protected void writeHeader() throws IOException { - out.write(BytesUtils.intToBytes(minDeltaBase)); - out.write(BytesUtils.intToBytes(firstValue)); - } - - @Override - public void encode(int value, ByteArrayOutputStream out) { - encodeValue(value, out); - } - - @Override - public int getOneItemMaxSize() { - return 4; - } - - @Override - public long getMaxByteSize() { - // The meaning of 24 is: index(4)+width(4)+minDeltaBase(4)+firstValue(4) - return 24 + writeIndex * 4; - } + static public class LongDeltaEncoder extends DeltaBinaryEncoder { + private long[] deltaBlockBuffer; + private long firstValue; + private long previousValue; + private long minDeltaBase; + + public LongDeltaEncoder() { + this(BLOCK_DEFAULT_SIZE); + } + + public LongDeltaEncoder(int size) { + super(size); + deltaBlockBuffer = new long[this.blockSize]; + encodingBlockBuffer = new byte[blockSize * 8]; + reset(); + } + + private void calcDelta(Long value) { + Long delta = value - previousValue;// calculate delta + if (delta < minDeltaBase) { + minDeltaBase = delta; + } + deltaBlockBuffer[writeIndex++] = delta; + } + + @Override + protected void reset() { + firstValue = 0l; + previousValue = 0l; + minDeltaBase = Long.MAX_VALUE; + for (int i = 0; i < blockSize; i++) { + encodingBlockBuffer[i] = 0; + deltaBlockBuffer[i] = 0l; + } + } + + private int getValueWidth(Long v) { + return 64 - Long.numberOfLeadingZeros(v); + } + + @Override + protected void writeValueToBytes(int i) { + BytesUtils.longToBytes(deltaBlockBuffer[i], encodingBlockBuffer, writeWidth * i, writeWidth); + } + + @Override + protected void calcTwoDiff(int i) { + deltaBlockBuffer[i] = deltaBlockBuffer[i] - minDeltaBase; + } + + @Override + protected void writeHeader() throws IOException { + out.write(BytesUtils.longToBytes(minDeltaBase)); + out.write(BytesUtils.longToBytes(firstValue)); + } + + @Override + public void encode(long value, ByteArrayOutputStream out) { + encodeValue(value, out); + } + + @Override + public int getOneItemMaxSize() { + return 8; + } + + @Override + public long getMaxByteSize() { + // The meaning of 24 is: index(4)+width(4)+minDeltaBase(8)+firstValue(8) + return 24 + writeIndex * 8; + } + + /** + * input a integer or long value. + * + * @param value value to encode + * @param out - the ByteArrayOutputStream which data encode into + */ + public void encodeValue(long value, ByteArrayOutputStream out) { + if (writeIndex == -1) { + writeIndex++; + firstValue = value; + previousValue = firstValue; + return; + } + calcDelta(value); + previousValue = value; + if (writeIndex == blockSize) { + flush(out); + } } - static public class LongDeltaEncoder extends DeltaBinaryEncoder { - private long[] deltaBlockBuffer; - private long firstValue; - private long previousValue; - private long minDeltaBase; - - public LongDeltaEncoder() { - this(BLOCK_DEFAULT_SIZE); - } - - public LongDeltaEncoder(int size) { - super(size); - deltaBlockBuffer = new long[this.blockSize]; - encodingBlockBuffer = new byte[blockSize * 8]; - reset(); - } - - private void calcDelta(Long value) { - Long delta = value - previousValue;// calculate delta - if (delta < minDeltaBase) { - minDeltaBase = delta; - } - deltaBlockBuffer[writeIndex++] = delta; - } - - @Override - protected void reset() { - firstValue = 0l; - previousValue = 0l; - minDeltaBase = Long.MAX_VALUE; - for (int i = 0; i < blockSize; i++) { - encodingBlockBuffer[i] = 0; - deltaBlockBuffer[i] = 0l; - } - } - - private int getValueWidth(Long v) { - return 64 - Long.numberOfLeadingZeros(v); - } - - @Override - protected void writeValueToBytes(int i) { - BytesUtils.longToBytes(deltaBlockBuffer[i], encodingBlockBuffer, writeWidth * i, writeWidth); - } - - @Override - protected void calcTwoDiff(int i) { - deltaBlockBuffer[i] = deltaBlockBuffer[i] - minDeltaBase; - } - - @Override - protected void writeHeader() throws IOException { - out.write(BytesUtils.longToBytes(minDeltaBase)); - out.write(BytesUtils.longToBytes(firstValue)); - } - - @Override - public void encode(long value, ByteArrayOutputStream out) { - encodeValue(value, out); - } - - @Override - public int getOneItemMaxSize() { - return 8; - } - - @Override - public long getMaxByteSize() { - // The meaning of 24 is: index(4)+width(4)+minDeltaBase(8)+firstValue(8) - return 24 + writeIndex * 8; - } - - /** - * input a integer or long value. - * - * @param value value to encode - * @param out - the ByteArrayOutputStream which data encode into - */ - public void encodeValue(long value, ByteArrayOutputStream out) { - if (writeIndex == -1) { - writeIndex++; - firstValue = value; - previousValue = firstValue; - return; - } - calcDelta(value); - previousValue = value; - if (writeIndex == blockSize) { - flush(out); - } - } - - protected int calculateBitWidthsForDeltaBlockBuffer() { - int width = 0; - for (int i = 0; i < writeIndex; i++) { - width = Math.max(width, getValueWidth(deltaBlockBuffer[i])); - } - return width; - } + protected int calculateBitWidthsForDeltaBlockBuffer() { + int width = 0; + for (int i = 0; i < writeIndex; i++) { + width = Math.max(width, getValueWidth(deltaBlockBuffer[i])); + } + return width; } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DoublePrecisionEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DoublePrecisionEncoder.java index c47ef66f..a86fe8f2 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DoublePrecisionEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/DoublePrecisionEncoder.java @@ -2,7 +2,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; /** @@ -10,80 +9,83 @@ * */ public class DoublePrecisionEncoder extends GorillaEncoder { - private long preValue; - - public DoublePrecisionEncoder() { - } + private long preValue; - @Override - public void encode(double value, ByteArrayOutputStream out) throws IOException { - if (!flag) { - // case: write first 8 byte value without any encoding - flag = true; - preValue = Double.doubleToLongBits(value); - leadingZeroNum = Long.numberOfLeadingZeros(preValue); - tailingZeroNum = Long.numberOfTrailingZeros(preValue); - byte[] bufferBig = new byte[8]; - byte[] bufferLittle = new byte[8]; + public DoublePrecisionEncoder() {} - for (int i = 0; i < 8; i++) { - bufferLittle[i] = (byte) (((preValue) >> (i * 8)) & 0xFF); - bufferBig[8 - i - 1] = (byte) (((preValue) >> (i * 8)) & 0xFF); - } - out.write(bufferLittle); - } else { - long nextValue = Double.doubleToLongBits(value); - long tmp = nextValue ^ preValue; - if (tmp == 0) { - // case: write '0' - writeBit(false, out); - } else { - int leadingZeroNumTmp = Long.numberOfLeadingZeros(tmp); - int tailingZeroNumTmp = Long.numberOfTrailingZeros(tmp); - if (leadingZeroNumTmp >= leadingZeroNum && tailingZeroNumTmp >= tailingZeroNum) { - // case: write '10' and effective bits without first leadingZeroNum '0' and last tailingZeroNum '0' - writeBit(true, out); - writeBit(false, out); - writeBits(tmp, out, TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNum, tailingZeroNum); - } else { - // case: write '11', leading zero num of value, effective bits len and effective bit value - writeBit(true, out); - writeBit(true, out); - writeBits(leadingZeroNumTmp, out, TSFileConfig.DOUBLE_LEADING_ZERO_LENGTH - 1, 0); - writeBits(TSFileConfig.DOUBLE_LENGTH - leadingZeroNumTmp - tailingZeroNumTmp, out, TSFileConfig.DOUBLE_VALUE_LENGTH - 1, 0); - writeBits(tmp, out, TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNumTmp, tailingZeroNumTmp); - } - } - preValue = nextValue; - leadingZeroNum = Long.numberOfLeadingZeros(preValue); - tailingZeroNum = Long.numberOfTrailingZeros(preValue); - } - } + @Override + public void encode(double value, ByteArrayOutputStream out) throws IOException { + if (!flag) { + // case: write first 8 byte value without any encoding + flag = true; + preValue = Double.doubleToLongBits(value); + leadingZeroNum = Long.numberOfLeadingZeros(preValue); + tailingZeroNum = Long.numberOfTrailingZeros(preValue); + byte[] bufferBig = new byte[8]; + byte[] bufferLittle = new byte[8]; - private void writeBits(long num, ByteArrayOutputStream out, int start, int end) { - for (int i = start; i >= end; i--) { - long bit = num & (1L << i); - writeBit(bit, out); - } - } - - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - encode(Double.NaN, out); - clearBuffer(out); - reset(); - } - - @Override - public int getOneItemMaxSize() { - // case '11' - // 2bit + 6bit + 7bit + 64bit = 79bit - return 10; + for (int i = 0; i < 8; i++) { + bufferLittle[i] = (byte) (((preValue) >> (i * 8)) & 0xFF); + bufferBig[8 - i - 1] = (byte) (((preValue) >> (i * 8)) & 0xFF); + } + out.write(bufferLittle); + } else { + long nextValue = Double.doubleToLongBits(value); + long tmp = nextValue ^ preValue; + if (tmp == 0) { + // case: write '0' + writeBit(false, out); + } else { + int leadingZeroNumTmp = Long.numberOfLeadingZeros(tmp); + int tailingZeroNumTmp = Long.numberOfTrailingZeros(tmp); + if (leadingZeroNumTmp >= leadingZeroNum && tailingZeroNumTmp >= tailingZeroNum) { + // case: write '10' and effective bits without first leadingZeroNum '0' and last + // tailingZeroNum '0' + writeBit(true, out); + writeBit(false, out); + writeBits(tmp, out, TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNum, tailingZeroNum); + } else { + // case: write '11', leading zero num of value, effective bits len and effective bit value + writeBit(true, out); + writeBit(true, out); + writeBits(leadingZeroNumTmp, out, TSFileConfig.DOUBLE_LEADING_ZERO_LENGTH - 1, 0); + writeBits(TSFileConfig.DOUBLE_LENGTH - leadingZeroNumTmp - tailingZeroNumTmp, out, + TSFileConfig.DOUBLE_VALUE_LENGTH - 1, 0); + writeBits(tmp, out, TSFileConfig.DOUBLE_LENGTH - 1 - leadingZeroNumTmp, + tailingZeroNumTmp); + } + } + preValue = nextValue; + leadingZeroNum = Long.numberOfLeadingZeros(preValue); + tailingZeroNum = Long.numberOfTrailingZeros(preValue); } + } - @Override - public long getMaxByteSize() { - // max(first 8 byte, case '11' 2bit + 6bit + 7bit + 64bit = 79bit ) + NaN(2bit + 6bit + 7bit + 64bit = 79bit) = 158bit - return 20; + private void writeBits(long num, ByteArrayOutputStream out, int start, int end) { + for (int i = start; i >= end; i--) { + long bit = num & (1L << i); + writeBit(bit, out); } + } + + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + encode(Double.NaN, out); + clearBuffer(out); + reset(); + } + + @Override + public int getOneItemMaxSize() { + // case '11' + // 2bit + 6bit + 7bit + 64bit = 79bit + return 10; + } + + @Override + public long getMaxByteSize() { + // max(first 8 byte, case '11' 2bit + 6bit + 7bit + 64bit = 79bit ) + NaN(2bit + 6bit + 7bit + + // 64bit = 79bit) = 158bit + return 20; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/Encoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/Encoder.java index f19183fe..c355d51e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/Encoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/Encoder.java @@ -3,7 +3,6 @@ import cn.edu.tsinghua.tsfile.common.exception.TSFileEncodingException; import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; @@ -16,62 +15,62 @@ */ public abstract class Encoder { - public TSEncoding type; - - public Encoder(TSEncoding type) { - this.type = type; - } - - public void encode(boolean value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode boolean is not supported by Encoder"); - } - - public void encode(short value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode short is not supported by Encoder"); - } - - public void encode(int value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode int is not supported by Encoder"); - } - - public void encode(long value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode long is not supported by Encoder"); - } - - public void encode(float value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode float is not supported by Encoder"); - } - - public void encode(double value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode double is not supported by Encoder"); - } - - public void encode(Binary value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode Binary is not supported by Encoder"); - } - - public void encode(BigDecimal value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("Method encode BigDecimal is not supported by Encoder"); - } - - public abstract void flush(ByteArrayOutputStream out) throws IOException; - - /** - * return the maximal possible size of one data item. - * - * @return the maximal possible size of one data item encoded by this encoder - */ - public int getOneItemMaxSize() { - throw new UnsupportedOperationException(); - } - - /** - * this function returns the maximal possible memory size occupied by current Encoder. This statistic is extra - * memory size for Encoder and doesn't involve OutputStream. - * - * @return the maximal size of possible memory occupied by current encoder - */ - public long getMaxByteSize() { - throw new UnsupportedOperationException(); - } + public TSEncoding type; + + public Encoder(TSEncoding type) { + this.type = type; + } + + public void encode(boolean value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode boolean is not supported by Encoder"); + } + + public void encode(short value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode short is not supported by Encoder"); + } + + public void encode(int value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode int is not supported by Encoder"); + } + + public void encode(long value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode long is not supported by Encoder"); + } + + public void encode(float value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode float is not supported by Encoder"); + } + + public void encode(double value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode double is not supported by Encoder"); + } + + public void encode(Binary value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode Binary is not supported by Encoder"); + } + + public void encode(BigDecimal value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException("Method encode BigDecimal is not supported by Encoder"); + } + + public abstract void flush(ByteArrayOutputStream out) throws IOException; + + /** + * return the maximal possible size of one data item. + * + * @return the maximal possible size of one data item encoded by this encoder + */ + public int getOneItemMaxSize() { + throw new UnsupportedOperationException(); + } + + /** + * this function returns the maximal possible memory size occupied by current Encoder. This + * statistic is extra memory size for Encoder and doesn't involve OutputStream. + * + * @return the maximal size of possible memory occupied by current encoder + */ + public long getMaxByteSize() { + throw new UnsupportedOperationException(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/FloatEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/FloatEncoder.java index b8a5bacf..a6b14ed7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/FloatEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/FloatEncoder.java @@ -7,13 +7,12 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayOutputStream; import java.io.IOException; /** - * Encoder for float or double value using rle or two diff - * according to following grammar: + * Encoder for float or double value using rle or two diff according to following grammar: + * *
  * {@code
  * float encoder:  
@@ -23,113 +22,113 @@
  * 
*/ public class FloatEncoder extends Encoder { - private static final Logger LOGGER = LoggerFactory.getLogger(FloatEncoder.class); - private Encoder encoder; - - /** - * number for accuracy of decimal places - */ - private int maxPointNumber; - - /** - * maxPointValue = 10^(maxPointNumber) - */ - private double maxPointValue; - - /** - * flag to check whether maxPointNumber is saved in stream - */ - private boolean isMaxPointNumberSaved; - - public FloatEncoder(TSEncoding encodingType, TSDataType dataType, int maxPointNumber) { - super(encodingType); - this.maxPointNumber = maxPointNumber; - calculateMaxPonitNum(); - isMaxPointNumberSaved = false; - if (encodingType == TSEncoding.RLE) { - if (dataType == TSDataType.FLOAT) { - encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); - LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using int-rle and float"); - } else if (dataType == TSDataType.DOUBLE) { - encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); - LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using long-rle and double"); - } else { - throw new TSFileEncodingException( - String.format("data type %s is not supported by FloatEncoder", dataType)); - } - } else if (encodingType == TSEncoding.TS_2DIFF) { - if (dataType == TSDataType.FLOAT) { - encoder = new DeltaBinaryEncoder.IntDeltaEncoder(); - LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using int-delta and float"); - } else if (dataType == TSDataType.DOUBLE) { - encoder = new DeltaBinaryEncoder.LongDeltaEncoder(); - LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using long-delta and double"); - } else { - throw new TSFileEncodingException( - String.format("data type %s is not supported by FloatEncoder", dataType)); - } - } else { - throw new TSFileEncodingException( - String.format("%s encoding is not supported by FloatEncoder", encodingType)); - } + private static final Logger LOGGER = LoggerFactory.getLogger(FloatEncoder.class); + private Encoder encoder; + + /** + * number for accuracy of decimal places + */ + private int maxPointNumber; + + /** + * maxPointValue = 10^(maxPointNumber) + */ + private double maxPointValue; + + /** + * flag to check whether maxPointNumber is saved in stream + */ + private boolean isMaxPointNumberSaved; + + public FloatEncoder(TSEncoding encodingType, TSDataType dataType, int maxPointNumber) { + super(encodingType); + this.maxPointNumber = maxPointNumber; + calculateMaxPonitNum(); + isMaxPointNumberSaved = false; + if (encodingType == TSEncoding.RLE) { + if (dataType == TSDataType.FLOAT) { + encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); + LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using int-rle and float"); + } else if (dataType == TSDataType.DOUBLE) { + encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); + LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using long-rle and double"); + } else { + throw new TSFileEncodingException( + String.format("data type %s is not supported by FloatEncoder", dataType)); + } + } else if (encodingType == TSEncoding.TS_2DIFF) { + if (dataType == TSDataType.FLOAT) { + encoder = new DeltaBinaryEncoder.IntDeltaEncoder(); + LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using int-delta and float"); + } else if (dataType == TSDataType.DOUBLE) { + encoder = new DeltaBinaryEncoder.LongDeltaEncoder(); + LOGGER.debug("tsfile-encoding FloatEncoder: init encoder using long-delta and double"); + } else { + throw new TSFileEncodingException( + String.format("data type %s is not supported by FloatEncoder", dataType)); + } + } else { + throw new TSFileEncodingException( + String.format("%s encoding is not supported by FloatEncoder", encodingType)); } - - @Override - public void encode(float value, ByteArrayOutputStream out) throws IOException { - saveMaxPointNumber(out); - int valueInt = convertFloatToInt(value); - encoder.encode(valueInt, out); + } + + @Override + public void encode(float value, ByteArrayOutputStream out) throws IOException { + saveMaxPointNumber(out); + int valueInt = convertFloatToInt(value); + encoder.encode(valueInt, out); + } + + @Override + public void encode(double value, ByteArrayOutputStream out) throws IOException { + saveMaxPointNumber(out); + long valueLong = convertDoubleToLong(value); + encoder.encode(valueLong, out); + } + + private void calculateMaxPonitNum() { + if (maxPointNumber <= 0) { + maxPointNumber = 0; + maxPointValue = 1; + } else { + maxPointValue = Math.pow(10, maxPointNumber); } - - @Override - public void encode(double value, ByteArrayOutputStream out) throws IOException { - saveMaxPointNumber(out); - long valueLong = convertDoubleToLong(value); - encoder.encode(valueLong, out); + } + + private int convertFloatToInt(float value) { + return (int) Math.round(value * maxPointValue); + } + + private long convertDoubleToLong(double value) { + return Math.round(value * maxPointValue); + } + + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + encoder.flush(out); + reset(); + } + + private void reset() { + isMaxPointNumberSaved = false; + } + + private void saveMaxPointNumber(ByteArrayOutputStream out) throws IOException { + if (!isMaxPointNumberSaved) { + ReadWriteStreamUtils.writeUnsignedVarInt(maxPointNumber, out); + isMaxPointNumberSaved = true; } + } - private void calculateMaxPonitNum() { - if (maxPointNumber <= 0) { - maxPointNumber = 0; - maxPointValue = 1; - } else { - maxPointValue = Math.pow(10, maxPointNumber); - } - } + @Override + public int getOneItemMaxSize() { + return encoder.getOneItemMaxSize(); + } - private int convertFloatToInt(float value) { - return (int) Math.round(value * maxPointValue); - } - - private long convertDoubleToLong(double value) { - return Math.round(value * maxPointValue); - } - - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - encoder.flush(out); - reset(); - } - - private void reset() { - isMaxPointNumberSaved = false; - } - - private void saveMaxPointNumber(ByteArrayOutputStream out) throws IOException { - if (!isMaxPointNumberSaved) { - ReadWriteStreamUtils.writeUnsignedVarInt(maxPointNumber, out); - isMaxPointNumberSaved = true; - } - } - - @Override - public int getOneItemMaxSize() { - return encoder.getOneItemMaxSize(); - } - - @Override - public long getMaxByteSize() { - return encoder.getMaxByteSize(); - } + @Override + public long getMaxByteSize() { + return encoder.getMaxByteSize(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/GorillaEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/GorillaEncoder.java index 3feff798..62e29efe 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/GorillaEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/GorillaEncoder.java @@ -1,64 +1,67 @@ package cn.edu.tsinghua.tsfile.encoding.encoder; import java.io.ByteArrayOutputStream; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; /** - * Gorilla encoding. For more information about how it works, - * please see http://www.vldb.org/pvldb/vol8/p1816-teller.pdf + * Gorilla encoding. For more information about how it works, please see + * http://www.vldb.org/pvldb/vol8/p1816-teller.pdf */ -public abstract class GorillaEncoder extends Encoder{ - // flag to indicate whether the first value is saved - protected boolean flag; - protected int leadingZeroNum, tailingZeroNum; - // 8-bit buffer of bits to write out - protected byte buffer; - // number of bits remaining in buffer - protected int numberLeftInBuffer; - - public GorillaEncoder() { - super(TSEncoding.GORILLA); - this.flag = false; - } +public abstract class GorillaEncoder extends Encoder { + // flag to indicate whether the first value is saved + protected boolean flag; + protected int leadingZeroNum, tailingZeroNum; + // 8-bit buffer of bits to write out + protected byte buffer; + // number of bits remaining in buffer + protected int numberLeftInBuffer; + + public GorillaEncoder() { + super(TSEncoding.GORILLA); + this.flag = false; + } + + protected void writeBit(boolean b, ByteArrayOutputStream out) { + // add bit to buffer + buffer <<= 1; + if (b) + buffer |= 1; + + // if buffer is full (8 bits), write out as a single byte + numberLeftInBuffer++; + if (numberLeftInBuffer == 8) + clearBuffer(out); + } + + protected void writeBit(int i, ByteArrayOutputStream out) { + if (i == 0) { + writeBit(false, out); + } else { + writeBit(true, out); + } + } + + protected void writeBit(long i, ByteArrayOutputStream out) { + if (i == 0) { + writeBit(false, out); + } else { + writeBit(true, out); + } + } - protected void writeBit(boolean b, ByteArrayOutputStream out){ - // add bit to buffer - buffer <<= 1; - if (b) buffer |= 1; + protected void clearBuffer(ByteArrayOutputStream out) { + if (numberLeftInBuffer == 0) + return; + if (numberLeftInBuffer > 0) + buffer <<= (8 - numberLeftInBuffer); + out.write(buffer); + numberLeftInBuffer = 0; + buffer = 0; + } - // if buffer is full (8 bits), write out as a single byte - numberLeftInBuffer++; - if (numberLeftInBuffer == 8) clearBuffer(out); - } - - protected void writeBit(int i, ByteArrayOutputStream out){ - if(i == 0){ - writeBit(false, out); - } else{ - writeBit(true, out); - } - } - - protected void writeBit(long i, ByteArrayOutputStream out){ - if(i == 0){ - writeBit(false, out); - } else{ - writeBit(true, out); - } - } - - protected void clearBuffer(ByteArrayOutputStream out){ - if (numberLeftInBuffer == 0) return; - if (numberLeftInBuffer > 0) buffer <<= (8 - numberLeftInBuffer); - out.write(buffer); - numberLeftInBuffer = 0; - buffer = 0; - } - - protected void reset(){ - this.flag = false; - this.numberLeftInBuffer = 0; - this.buffer = 0; - } + protected void reset() { + this.flag = false; + this.numberLeftInBuffer = 0; + this.buffer = 0; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/IntRleEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/IntRleEncoder.java index 5575e8fb..a79f34f9 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/IntRleEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/IntRleEncoder.java @@ -1,118 +1,116 @@ -package cn.edu.tsinghua.tsfile.encoding.encoder; - -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.bitpacking.IntPacker; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; - - -/** - * Encoder for int value using rle or bit-packing - */ -public class IntRleEncoder extends RleEncoder { - - /** - * Packer for packing int value - */ - private IntPacker packer; - - public IntRleEncoder(EndianType endianType) { - super(endianType); - bufferedValues = new Integer[config.RLE_MIN_REPEATED_NUM]; - preValue = 0; - values = new ArrayList(); - } - - @Override - public void encode(int value, ByteArrayOutputStream out) { - values.add(value); - } - - @Override - public void encode(boolean value, ByteArrayOutputStream out) { - if (value) { - this.encode(1, out); - } else { - this.encode(0, out); - } - } - - /** - * write all values buffered in cache to OutputStream - * - * @param out - byteArrayOutputStream - * @throws IOException cannot flush to OutputStream - */ - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - // we get bit width after receiving all data - this.bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(values); - packer = new IntPacker(bitWidth); - for (Integer value : values) { - encodeValue(value); - } - super.flush(out); - } - - @Override - protected void reset() { - super.reset(); - preValue = 0; - } - - /** - * write bytes to OutputStream using rle - * rle format: [header][value] - */ - @Override - protected void writeRleRun() throws IOException { - endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); - ReadWriteStreamUtils.writeUnsignedVarInt(repeatCount << 1, byteCache); - ReadWriteStreamUtils.writeIntLittleEndianPaddedOnBitWidth(preValue, byteCache, bitWidth); - repeatCount = 0; - numBufferedValues = 0; - } - - @Override - protected void clearBuffer() { - - for (int i = numBufferedValues; i < config.RLE_MIN_REPEATED_NUM; i++) { - bufferedValues[i] = 0; - } - } - - @Override - protected void convertBuffer() { - byte[] bytes = new byte[bitWidth]; - - int[] tmpBuffer = new int[config.RLE_MIN_REPEATED_NUM]; - for (int i = 0; i < config.RLE_MIN_REPEATED_NUM; i++) { - tmpBuffer[i] = (int) bufferedValues[i]; - } - packer.pack8Values(tmpBuffer, 0, bytes); - // we'll not write bit-packing group to OutputStream immediately - // we buffer them in list - bytesBuffer.add(bytes); - } - - @Override - public int getOneItemMaxSize() { - // The meaning of 45 is: - // 4 + 4 + max(4+4,1 + 4 + 4 * 8) - // length + bitwidth + max(rle-header + num, bit-header + lastNum + 8packer) - return 45; - } - - @Override - public long getMaxByteSize() { - if (values == null) { - return 0; - } - // try to caculate max value - int groupNum = (values.size() / 8 + 1) / 63 + 1; - return 8 + groupNum * 5 + values.size() * 4; - } -} \ No newline at end of file +package cn.edu.tsinghua.tsfile.encoding.encoder; + +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.bitpacking.IntPacker; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; + + +/** + * Encoder for int value using rle or bit-packing + */ +public class IntRleEncoder extends RleEncoder { + + /** + * Packer for packing int value + */ + private IntPacker packer; + + public IntRleEncoder(EndianType endianType) { + super(endianType); + bufferedValues = new Integer[config.RLE_MIN_REPEATED_NUM]; + preValue = 0; + values = new ArrayList(); + } + + @Override + public void encode(int value, ByteArrayOutputStream out) { + values.add(value); + } + + @Override + public void encode(boolean value, ByteArrayOutputStream out) { + if (value) { + this.encode(1, out); + } else { + this.encode(0, out); + } + } + + /** + * write all values buffered in cache to OutputStream + * + * @param out - byteArrayOutputStream + * @throws IOException cannot flush to OutputStream + */ + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + // we get bit width after receiving all data + this.bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(values); + packer = new IntPacker(bitWidth); + for (Integer value : values) { + encodeValue(value); + } + super.flush(out); + } + + @Override + protected void reset() { + super.reset(); + preValue = 0; + } + + /** + * write bytes to OutputStream using rle rle format: [header][value] + */ + @Override + protected void writeRleRun() throws IOException { + endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); + ReadWriteStreamUtils.writeUnsignedVarInt(repeatCount << 1, byteCache); + ReadWriteStreamUtils.writeIntLittleEndianPaddedOnBitWidth(preValue, byteCache, bitWidth); + repeatCount = 0; + numBufferedValues = 0; + } + + @Override + protected void clearBuffer() { + + for (int i = numBufferedValues; i < config.RLE_MIN_REPEATED_NUM; i++) { + bufferedValues[i] = 0; + } + } + + @Override + protected void convertBuffer() { + byte[] bytes = new byte[bitWidth]; + + int[] tmpBuffer = new int[config.RLE_MIN_REPEATED_NUM]; + for (int i = 0; i < config.RLE_MIN_REPEATED_NUM; i++) { + tmpBuffer[i] = (int) bufferedValues[i]; + } + packer.pack8Values(tmpBuffer, 0, bytes); + // we'll not write bit-packing group to OutputStream immediately + // we buffer them in list + bytesBuffer.add(bytes); + } + + @Override + public int getOneItemMaxSize() { + // The meaning of 45 is: + // 4 + 4 + max(4+4,1 + 4 + 4 * 8) + // length + bitwidth + max(rle-header + num, bit-header + lastNum + 8packer) + return 45; + } + + @Override + public long getMaxByteSize() { + if (values == null) { + return 0; + } + // try to caculate max value + int groupNum = (values.size() / 8 + 1) / 63 + 1; + return 8 + groupNum * 5 + values.size() * 4; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/LongRleEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/LongRleEncoder.java index 709497a2..5290fa2e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/LongRleEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/LongRleEncoder.java @@ -1,105 +1,105 @@ -package cn.edu.tsinghua.tsfile.encoding.encoder; - -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.bitpacking.LongPacker; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; - -/** - * Encoder for long value using rle or bit-packing - */ -public class LongRleEncoder extends RleEncoder { - - /** - * Packer for packing long value - */ - private LongPacker packer; - - public LongRleEncoder(EndianType endianType) { - super(endianType); - bufferedValues = new Long[config.RLE_MIN_REPEATED_NUM]; - preValue = (long) 0; - values = new ArrayList(); - } - - @Override - public void encode(long value, ByteArrayOutputStream out) { - values.add(value); - } - - /** - * write all values buffered in cache to OutputStream - * - * @param out - byteArrayOutputStream - * @throws IOException cannot flush to OutputStream - */ - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - // we get bit width after receiving all data - this.bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(values); - packer = new LongPacker(bitWidth); - for (Long value : values) { - encodeValue(value); - } - super.flush(out); - } - - @Override - protected void reset() { - super.reset(); - preValue = (long) 0; - } - - /** - * write bytes to OutputStream using rle rle format: [header][value] - * @throws IOException cannot write rle run - */ - @Override - protected void writeRleRun() throws IOException { - endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); - ReadWriteStreamUtils.writeUnsignedVarInt(repeatCount << 1, byteCache); - ReadWriteStreamUtils.writeLongLittleEndianPaddedOnBitWidth(preValue, byteCache, bitWidth); - repeatCount = 0; - numBufferedValues = 0; - } - - @Override - protected void clearBuffer() { - for (int i = numBufferedValues; i < config.RLE_MIN_REPEATED_NUM; i++) { - bufferedValues[i] = (long) 0; - } - } - - @Override - protected void convertBuffer() { - byte[] bytes = new byte[bitWidth]; - long[] tmpBuffer = new long[config.RLE_MIN_REPEATED_NUM]; - for (int i = 0; i < config.RLE_MIN_REPEATED_NUM; i++) { - tmpBuffer[i] = (long) bufferedValues[i]; - } - packer.pack8Values(tmpBuffer, 0, bytes); - // we'll not write bit-packing group to OutputStream immediately - // we buffer them in list - bytesBuffer.add(bytes); - } - - @Override - public int getOneItemMaxSize() { - // 4 + 4 + max(4+8,1 + 4 + 8 * 8) - // length + bitwidth + max(rle-header + num, bit-header + lastNum + 8packer) - return 77; - } - - @Override - public long getMaxByteSize() { - if (values == null) { - return 0; - } - // try to caculate max value - int groupNum = (values.size() / 8 + 1) / 63 + 1; - return 8 + groupNum * 5 + values.size() * 8; - } -} \ No newline at end of file +package cn.edu.tsinghua.tsfile.encoding.encoder; + +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.bitpacking.LongPacker; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; + +/** + * Encoder for long value using rle or bit-packing + */ +public class LongRleEncoder extends RleEncoder { + + /** + * Packer for packing long value + */ + private LongPacker packer; + + public LongRleEncoder(EndianType endianType) { + super(endianType); + bufferedValues = new Long[config.RLE_MIN_REPEATED_NUM]; + preValue = (long) 0; + values = new ArrayList(); + } + + @Override + public void encode(long value, ByteArrayOutputStream out) { + values.add(value); + } + + /** + * write all values buffered in cache to OutputStream + * + * @param out - byteArrayOutputStream + * @throws IOException cannot flush to OutputStream + */ + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + // we get bit width after receiving all data + this.bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(values); + packer = new LongPacker(bitWidth); + for (Long value : values) { + encodeValue(value); + } + super.flush(out); + } + + @Override + protected void reset() { + super.reset(); + preValue = (long) 0; + } + + /** + * write bytes to OutputStream using rle rle format: [header][value] + * + * @throws IOException cannot write rle run + */ + @Override + protected void writeRleRun() throws IOException { + endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); + ReadWriteStreamUtils.writeUnsignedVarInt(repeatCount << 1, byteCache); + ReadWriteStreamUtils.writeLongLittleEndianPaddedOnBitWidth(preValue, byteCache, bitWidth); + repeatCount = 0; + numBufferedValues = 0; + } + + @Override + protected void clearBuffer() { + for (int i = numBufferedValues; i < config.RLE_MIN_REPEATED_NUM; i++) { + bufferedValues[i] = (long) 0; + } + } + + @Override + protected void convertBuffer() { + byte[] bytes = new byte[bitWidth]; + long[] tmpBuffer = new long[config.RLE_MIN_REPEATED_NUM]; + for (int i = 0; i < config.RLE_MIN_REPEATED_NUM; i++) { + tmpBuffer[i] = (long) bufferedValues[i]; + } + packer.pack8Values(tmpBuffer, 0, bytes); + // we'll not write bit-packing group to OutputStream immediately + // we buffer them in list + bytesBuffer.add(bytes); + } + + @Override + public int getOneItemMaxSize() { + // 4 + 4 + max(4+8,1 + 4 + 8 * 8) + // length + bitwidth + max(rle-header + num, bit-header + lastNum + 8packer) + return 77; + } + + @Override + public long getMaxByteSize() { + if (values == null) { + return 0; + } + // try to caculate max value + int groupNum = (values.size() / 8 + 1) / 63 + 1; + return 8 + groupNum * 5 + values.size() * 8; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/PlainEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/PlainEncoder.java index 2750c5b1..db919796 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/PlainEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/PlainEncoder.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; @@ -18,144 +17,145 @@ * @author Zhang Jinrui */ public class PlainEncoder extends Encoder { - private static final Logger LOGGER = LoggerFactory.getLogger(PlainEncoder.class); - public EndianType endianType; - private TSDataType dataType; - private int maxStringLength; - - public PlainEncoder(EndianType endianType, TSDataType dataType, int maxStringLength) { - super(TSEncoding.PLAIN); - this.endianType = endianType; - this.dataType = dataType; - this.maxStringLength = maxStringLength; - } - - @Override - public void encode(boolean value, ByteArrayOutputStream out) { - if (value) { - out.write(1); - } else { - out.write(0); - } + private static final Logger LOGGER = LoggerFactory.getLogger(PlainEncoder.class); + public EndianType endianType; + private TSDataType dataType; + private int maxStringLength; + + public PlainEncoder(EndianType endianType, TSDataType dataType, int maxStringLength) { + super(TSEncoding.PLAIN); + this.endianType = endianType; + this.dataType = dataType; + this.maxStringLength = maxStringLength; + } + + @Override + public void encode(boolean value, ByteArrayOutputStream out) { + if (value) { + out.write(1); + } else { + out.write(0); } - - @Override - public void encode(short value, ByteArrayOutputStream out) { - if (this.endianType == EndianType.LITTLE_ENDIAN) { - out.write((value >> 0) & 0xFF); - out.write((value >> 8) & 0xFF); - } else if (this.endianType == EndianType.BIG_ENDIAN) { - LOGGER.error( - "tsfile-encoding PlainEncoder: current version does not support short value encoding"); - throw new TSFileEncodingException( - "tsfile-encoding PlainEncoder: current version does not support short value encoding"); - // out.write((value >> 8) & 0xFF); - // out.write((value >> 0) & 0xFF); - } + } + + @Override + public void encode(short value, ByteArrayOutputStream out) { + if (this.endianType == EndianType.LITTLE_ENDIAN) { + out.write((value >> 0) & 0xFF); + out.write((value >> 8) & 0xFF); + } else if (this.endianType == EndianType.BIG_ENDIAN) { + LOGGER.error( + "tsfile-encoding PlainEncoder: current version does not support short value encoding"); + throw new TSFileEncodingException( + "tsfile-encoding PlainEncoder: current version does not support short value encoding"); + // out.write((value >> 8) & 0xFF); + // out.write((value >> 0) & 0xFF); } - - @Override - public void encode(int value, ByteArrayOutputStream out) { - if (this.endianType == EndianType.LITTLE_ENDIAN) { - out.write((value >> 0) & 0xFF); - out.write((value >> 8) & 0xFF); - out.write((value >> 16) & 0xFF); - out.write((value >> 24) & 0xFF); - } else if (this.endianType == EndianType.BIG_ENDIAN) { - LOGGER.error( - "tsfile-encoding PlainEncoder: current version does not support int value encoding"); - throw new TSFileEncodingException( - "tsfile-encoding PlainEncoder: current version does not support int value encoding"); - // out.write((value >> 24) & 0xFF); - // out.write((value >> 16) & 0xFF); - // out.write((value >> 8) & 0xFF); - // out.write((value >> 0) & 0xFF); - } + } + + @Override + public void encode(int value, ByteArrayOutputStream out) { + if (this.endianType == EndianType.LITTLE_ENDIAN) { + out.write((value >> 0) & 0xFF); + out.write((value >> 8) & 0xFF); + out.write((value >> 16) & 0xFF); + out.write((value >> 24) & 0xFF); + } else if (this.endianType == EndianType.BIG_ENDIAN) { + LOGGER.error( + "tsfile-encoding PlainEncoder: current version does not support int value encoding"); + throw new TSFileEncodingException( + "tsfile-encoding PlainEncoder: current version does not support int value encoding"); + // out.write((value >> 24) & 0xFF); + // out.write((value >> 16) & 0xFF); + // out.write((value >> 8) & 0xFF); + // out.write((value >> 0) & 0xFF); } + } - @Override - public void encode(long value, ByteArrayOutputStream out) { - byte[] bufferBig = new byte[8]; - byte[] bufferLittle = new byte[8]; - - for (int i = 0; i < 8; i++) { - bufferLittle[i] = (byte) (((value) >> (i * 8)) & 0xFF); - bufferBig[8 - i - 1] = (byte) (((value) >> (i * 8)) & 0xFF); - } - try { - if (this.endianType == EndianType.LITTLE_ENDIAN) { - out.write(bufferLittle); - } else if (this.endianType == EndianType.BIG_ENDIAN) { - LOGGER.error( - "tsfile-encoding PlainEncoder: current version does not support long value encoding"); - throw new TSFileEncodingException( - "tsfile-encoding PlainEncoder: current version does not support long value encoding"); -// out.write(bufferBig); - } - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainEncoder: error occurs when encode long value {}", value, - e); - } - } + @Override + public void encode(long value, ByteArrayOutputStream out) { + byte[] bufferBig = new byte[8]; + byte[] bufferLittle = new byte[8]; - @Override - public void encode(float value, ByteArrayOutputStream out) { - encode(Float.floatToIntBits(value), out); + for (int i = 0; i < 8; i++) { + bufferLittle[i] = (byte) (((value) >> (i * 8)) & 0xFF); + bufferBig[8 - i - 1] = (byte) (((value) >> (i * 8)) & 0xFF); } - - @Override - public void encode(double value, ByteArrayOutputStream out) { - encode(Double.doubleToLongBits(value), out); + try { + if (this.endianType == EndianType.LITTLE_ENDIAN) { + out.write(bufferLittle); + } else if (this.endianType == EndianType.BIG_ENDIAN) { + LOGGER.error( + "tsfile-encoding PlainEncoder: current version does not support long value encoding"); + throw new TSFileEncodingException( + "tsfile-encoding PlainEncoder: current version does not support long value encoding"); + // out.write(bufferBig); + } + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainEncoder: error occurs when encode long value {}", value, + e); } - - @Override - public void encode(Binary value, ByteArrayOutputStream out) { - try { - // write the length of the bytes - encode(value.getLength(), out); - // write value - out.write(value.values); - } catch (IOException e) { - LOGGER.error("tsfile-encoding PlainEncoder: error occurs when encode Binary value {}", value, e); - } + } + + @Override + public void encode(float value, ByteArrayOutputStream out) { + encode(Float.floatToIntBits(value), out); + } + + @Override + public void encode(double value, ByteArrayOutputStream out) { + encode(Double.doubleToLongBits(value), out); + } + + @Override + public void encode(Binary value, ByteArrayOutputStream out) { + try { + // write the length of the bytes + encode(value.getLength(), out); + // write value + out.write(value.values); + } catch (IOException e) { + LOGGER.error("tsfile-encoding PlainEncoder: error occurs when encode Binary value {}", value, + e); } - - @Override - public void flush(ByteArrayOutputStream out) { - } - - @Override - public int getOneItemMaxSize() { - switch (dataType) { - case BOOLEAN: - return 1; - case INT32: - return 4; - case INT64: - return 8; - case FLOAT: - return 4; - case DOUBLE: - return 8; - case TEXT: - // refer to encode(Binary,ByteArrayOutputStream) - return 4 + TSFileConfig.BYTE_SIZE_PER_CHAR * maxStringLength; - case ENUMS: - return 4; - case BIGDECIMAL: - return 8; - default: - throw new UnsupportedOperationException(dataType.toString()); - } - } - - @Override - public long getMaxByteSize() { - return 0; - } - - @Override - public void encode(BigDecimal value, ByteArrayOutputStream out) throws IOException { - throw new TSFileEncodingException("tsfile-encoding PlainEncoder: current version does not support BigDecimal value encoding"); + } + + @Override + public void flush(ByteArrayOutputStream out) {} + + @Override + public int getOneItemMaxSize() { + switch (dataType) { + case BOOLEAN: + return 1; + case INT32: + return 4; + case INT64: + return 8; + case FLOAT: + return 4; + case DOUBLE: + return 8; + case TEXT: + // refer to encode(Binary,ByteArrayOutputStream) + return 4 + TSFileConfig.BYTE_SIZE_PER_CHAR * maxStringLength; + case ENUMS: + return 4; + case BIGDECIMAL: + return 8; + default: + throw new UnsupportedOperationException(dataType.toString()); } + } + + @Override + public long getMaxByteSize() { + return 0; + } + + @Override + public void encode(BigDecimal value, ByteArrayOutputStream out) throws IOException { + throw new TSFileEncodingException( + "tsfile-encoding PlainEncoder: current version does not support BigDecimal value encoding"); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/RleEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/RleEncoder.java index 5c79a239..c3a7d8d1 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/RleEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/RleEncoder.java @@ -1,312 +1,315 @@ -package cn.edu.tsinghua.tsfile.encoding.encoder; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.exception.TSFileEncodingException; -import cn.edu.tsinghua.tsfile.common.utils.Binary; -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -/** - * Encodes values using a combination of run length encoding and bit packing, - * according to the following grammar: - *
- * {@code
- * rle-bit-packing-hybrid:   
- * length := length of the   in bytes stored as 4 bytes little endian
- * bitwidth := bitwidth for all encoded data in 
- * encoded-data := *
- * run :=  | 
- * bit-packed-run :=   
- * bit-packed-header := varint-encode( << 1 | 1)
- * lastBitPackedNum := the number of useful value in last bit-pack may be less than 8, so lastBitPackedNum indicates how many values are useful
- * bit-packed-values :=  bit packed
- * rle-run :=  
- * rle-header := varint-encode( (number of times repeated) << 1)
- * repeated-value := value that is repeated, using a fixed-width of round-up-to-next-byte(bit-width)
- * }
- * 
- * - * @param data type T for RLE - */ -public abstract class RleEncoder> extends Encoder { - private static final Logger LOGGER = LoggerFactory.getLogger(RleEncoder.class); - public EndianType endianType; - - /** - * we save all value in a list and calculate its bitwidth - */ - protected List values; - - /** - * the bit width used for bit-packing and rle - */ - protected int bitWidth; - - /** - * for a given value now buffered, how many times it occurs - */ - protected int repeatCount; - - /** - * the number of group which using bit packing, it is saved in header - */ - protected int bitPackedGroupCount; - - /** - * the number of buffered value in array - */ - protected int numBufferedValues; - - /** - * we will write all bytes using bit-packing to OutputStream once. Before that, all bytes are - * saved in list - */ - protected List bytesBuffer; - - /** - * flag which indicate encoding mode false -- rle true -- bit-packing - */ - protected boolean isBitPackRun; - - /** - * previous value written, used to detect repeated values - */ - protected T preValue; - - /** - * array to buffer values temporarily - */ - protected T[] bufferedValues; - - protected boolean isBitWidthSaved; - - /** - * output stream to buffer {@code } - */ - protected ByteArrayOutputStream byteCache; - - protected TSFileConfig config = TSFileDescriptor.getInstance().getConfig(); - - public RleEncoder(EndianType endianType) { - super(TSEncoding.RLE); - this.endianType = endianType; - bytesBuffer = new ArrayList(); - isBitPackRun = false; - isBitWidthSaved = false; - byteCache = new ByteArrayOutputStream(); - } - - protected void reset() { - numBufferedValues = 0; - repeatCount = 0; - bitPackedGroupCount = 0; - bytesBuffer.clear(); - isBitPackRun = false; - isBitWidthSaved = false; - byteCache.reset(); - values.clear(); - } - - /** - * Write all values buffered in cache to OutputStream - * - * @param out - byteArrayOutputStream - * @throws IOException cannot flush to OutputStream - */ - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - int lastBitPackedNum = numBufferedValues; - if (repeatCount >= config.RLE_MIN_REPEATED_NUM) { - try { - writeRleRun(); - } catch (IOException e) { - LOGGER.error( - "tsfile-encoding RleEncoder : error occurs when writing nums to OutputStram when flushing left nums. " - + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", - numBufferedValues, repeatCount, bitPackedGroupCount, isBitPackRun, isBitWidthSaved, e); - throw e; - } - } else if (numBufferedValues > 0) { - clearBuffer(); - writeOrAppendBitPackedRun(); - endPreviousBitPackedRun(lastBitPackedNum); - } else { - endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); - } - //write length - ReadWriteStreamUtils.writeUnsignedVarInt(byteCache.size(), out); - out.write(byteCache.toByteArray()); - reset(); - } - - /** - * Write bytes to OutputStream using rle. - * rle format: {@code - * [header][value] - * header: (repeated value) << 1} - * @throws IOException cannot write RLE run - */ - protected abstract void writeRleRun() throws IOException; - - /** - * Start a bit-packing run transform values to bytes and buffer them in cache - */ - public void writeOrAppendBitPackedRun() { - if (bitPackedGroupCount >= config.RLE_MAX_BIT_PACKED_NUM) { - // we've packed as many values as we can for this run, - // end it and start a new one - endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); - } - if (!isBitPackRun) { - isBitPackRun = true; - } - - convertBuffer(); - - numBufferedValues = 0; - repeatCount = 0; - ++bitPackedGroupCount; - } - - /** - * End a bit-packing run write all bit-packing group to OutputStream bit-packing format: - * {@code - * [header][lastBitPackedNum][bit-packing group]+ - * [bit-packing group]+ are saved in List bytesBuffer - * } - * @param lastBitPackedNum - in last bit-packing group, it may have useful values less than 8. - * This param indicates how many values are useful - */ - protected void endPreviousBitPackedRun(int lastBitPackedNum) { - if (!isBitPackRun) { - return; - } - byte bitPackHeader = (byte) ((bitPackedGroupCount << 1) | 1); - byteCache.write(bitPackHeader); - byteCache.write(lastBitPackedNum); - for (byte[] bytes : bytesBuffer) { - byteCache.write(bytes, 0, bytes.length); - } - bytesBuffer.clear(); - isBitPackRun = false; - bitPackedGroupCount = 0; - } - - /** - * Encode T value using rle or bit-packing. - * It may not write to OutputStream immediately - * - * @param value - value to encode - */ - protected void encodeValue(T value) { - if (!isBitWidthSaved) { - // save bit width in header, - // perpare for read - byteCache.write(bitWidth); - isBitWidthSaved = true; - } - if (value.equals(preValue)) { - repeatCount++; - if (repeatCount >= config.RLE_MIN_REPEATED_NUM && repeatCount <= config.RLE_MAX_REPEATED_NUM) { - // value occurs more than RLE_MIN_REPEATED_NUM times but less than EncodingConfig.RLE_MAX_REPEATED_NUM - // we'll use rle, so just keep on counting repeats for now - // we'll write current value to OutputStream when we encounter a different value - return; - } else if (repeatCount == config.RLE_MAX_REPEATED_NUM + 1) { - // value occurs more than EncodingConfig.RLE_MAX_REPEATED_NUM - // we'll write current rle run to stream and keep on counting current value - repeatCount = config.RLE_MAX_REPEATED_NUM; - try { - writeRleRun(); - LOGGER.debug("tsfile-encoding RleEncoder : write full rle run to stream"); - } catch (IOException e) { - LOGGER.error( - " error occurs when writing full rle run to OutputStram when repeatCount = {}." - + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", - config.RLE_MAX_REPEATED_NUM + 1, numBufferedValues, repeatCount, bitPackedGroupCount, isBitPackRun, isBitWidthSaved, e); - } - repeatCount = 1; - preValue = value; - } - - } else { - // we encounter a differnt value - if (repeatCount >= config.RLE_MIN_REPEATED_NUM) { - try { - writeRleRun(); - } catch (IOException e) { - LOGGER.error( - "tsfile-encoding RleEncoder : error occurs when writing num to OutputStram when repeatCount > {}." - + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", - config.RLE_MIN_REPEATED_NUM, numBufferedValues, repeatCount, bitPackedGroupCount, isBitPackRun, isBitWidthSaved, e); - } - } - repeatCount = 1; - preValue = value; - } - bufferedValues[numBufferedValues] = value; - numBufferedValues++; - // if none of value we encountered occurs more MAX_REPEATED_NUM times - // we'll use bit-packing - if (numBufferedValues == config.RLE_MIN_REPEATED_NUM) { - writeOrAppendBitPackedRun(); - } - } - - /** - * clean all useless value in bufferedValues and set 0 - */ - protected abstract void clearBuffer(); - - protected abstract void convertBuffer(); - - @Override - public void encode(boolean value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(short value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(int value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(long value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(float value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(double value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(Binary value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } - - @Override - public void encode(BigDecimal value, ByteArrayOutputStream out) { - throw new TSFileEncodingException(getClass().getName()); - } -} +package cn.edu.tsinghua.tsfile.encoding.encoder; + +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.exception.TSFileEncodingException; +import cn.edu.tsinghua.tsfile.common.utils.Binary; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.List; + +/** + * Encodes values using a combination of run length encoding and bit packing, according to the + * following grammar: + * + *
+ * {@code
+ * rle-bit-packing-hybrid:   
+ * length := length of the   in bytes stored as 4 bytes little endian
+ * bitwidth := bitwidth for all encoded data in 
+ * encoded-data := *
+ * run :=  | 
+ * bit-packed-run :=   
+ * bit-packed-header := varint-encode( << 1 | 1)
+ * lastBitPackedNum := the number of useful value in last bit-pack may be less than 8, so lastBitPackedNum indicates how many values are useful
+ * bit-packed-values :=  bit packed
+ * rle-run :=  
+ * rle-header := varint-encode( (number of times repeated) << 1)
+ * repeated-value := value that is repeated, using a fixed-width of round-up-to-next-byte(bit-width)
+ * }
+ * 
+ * + * @param data type T for RLE + */ +public abstract class RleEncoder> extends Encoder { + private static final Logger LOGGER = LoggerFactory.getLogger(RleEncoder.class); + public EndianType endianType; + + /** + * we save all value in a list and calculate its bitwidth + */ + protected List values; + + /** + * the bit width used for bit-packing and rle + */ + protected int bitWidth; + + /** + * for a given value now buffered, how many times it occurs + */ + protected int repeatCount; + + /** + * the number of group which using bit packing, it is saved in header + */ + protected int bitPackedGroupCount; + + /** + * the number of buffered value in array + */ + protected int numBufferedValues; + + /** + * we will write all bytes using bit-packing to OutputStream once. Before that, all bytes are + * saved in list + */ + protected List bytesBuffer; + + /** + * flag which indicate encoding mode false -- rle true -- bit-packing + */ + protected boolean isBitPackRun; + + /** + * previous value written, used to detect repeated values + */ + protected T preValue; + + /** + * array to buffer values temporarily + */ + protected T[] bufferedValues; + + protected boolean isBitWidthSaved; + + /** + * output stream to buffer {@code } + */ + protected ByteArrayOutputStream byteCache; + + protected TSFileConfig config = TSFileDescriptor.getInstance().getConfig(); + + public RleEncoder(EndianType endianType) { + super(TSEncoding.RLE); + this.endianType = endianType; + bytesBuffer = new ArrayList(); + isBitPackRun = false; + isBitWidthSaved = false; + byteCache = new ByteArrayOutputStream(); + } + + protected void reset() { + numBufferedValues = 0; + repeatCount = 0; + bitPackedGroupCount = 0; + bytesBuffer.clear(); + isBitPackRun = false; + isBitWidthSaved = false; + byteCache.reset(); + values.clear(); + } + + /** + * Write all values buffered in cache to OutputStream + * + * @param out - byteArrayOutputStream + * @throws IOException cannot flush to OutputStream + */ + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + int lastBitPackedNum = numBufferedValues; + if (repeatCount >= config.RLE_MIN_REPEATED_NUM) { + try { + writeRleRun(); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding RleEncoder : error occurs when writing nums to OutputStram when flushing left nums. " + + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", + numBufferedValues, repeatCount, bitPackedGroupCount, isBitPackRun, isBitWidthSaved, e); + throw e; + } + } else if (numBufferedValues > 0) { + clearBuffer(); + writeOrAppendBitPackedRun(); + endPreviousBitPackedRun(lastBitPackedNum); + } else { + endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); + } + // write length + ReadWriteStreamUtils.writeUnsignedVarInt(byteCache.size(), out); + out.write(byteCache.toByteArray()); + reset(); + } + + /** + * Write bytes to OutputStream using rle. rle format: {@code + * [header][value] + * header: (repeated value) << 1} + * + * @throws IOException cannot write RLE run + */ + protected abstract void writeRleRun() throws IOException; + + /** + * Start a bit-packing run transform values to bytes and buffer them in cache + */ + public void writeOrAppendBitPackedRun() { + if (bitPackedGroupCount >= config.RLE_MAX_BIT_PACKED_NUM) { + // we've packed as many values as we can for this run, + // end it and start a new one + endPreviousBitPackedRun(config.RLE_MIN_REPEATED_NUM); + } + if (!isBitPackRun) { + isBitPackRun = true; + } + + convertBuffer(); + + numBufferedValues = 0; + repeatCount = 0; + ++bitPackedGroupCount; + } + + /** + * End a bit-packing run write all bit-packing group to OutputStream bit-packing format: {@code + * [header][lastBitPackedNum][bit-packing group]+ + * [bit-packing group]+ are saved in List bytesBuffer + * } + * + * @param lastBitPackedNum - in last bit-packing group, it may have useful values less than 8. + * This param indicates how many values are useful + */ + protected void endPreviousBitPackedRun(int lastBitPackedNum) { + if (!isBitPackRun) { + return; + } + byte bitPackHeader = (byte) ((bitPackedGroupCount << 1) | 1); + byteCache.write(bitPackHeader); + byteCache.write(lastBitPackedNum); + for (byte[] bytes : bytesBuffer) { + byteCache.write(bytes, 0, bytes.length); + } + bytesBuffer.clear(); + isBitPackRun = false; + bitPackedGroupCount = 0; + } + + /** + * Encode T value using rle or bit-packing. It may not write to OutputStream immediately + * + * @param value - value to encode + */ + protected void encodeValue(T value) { + if (!isBitWidthSaved) { + // save bit width in header, + // perpare for read + byteCache.write(bitWidth); + isBitWidthSaved = true; + } + if (value.equals(preValue)) { + repeatCount++; + if (repeatCount >= config.RLE_MIN_REPEATED_NUM + && repeatCount <= config.RLE_MAX_REPEATED_NUM) { + // value occurs more than RLE_MIN_REPEATED_NUM times but less than + // EncodingConfig.RLE_MAX_REPEATED_NUM + // we'll use rle, so just keep on counting repeats for now + // we'll write current value to OutputStream when we encounter a different value + return; + } else if (repeatCount == config.RLE_MAX_REPEATED_NUM + 1) { + // value occurs more than EncodingConfig.RLE_MAX_REPEATED_NUM + // we'll write current rle run to stream and keep on counting current value + repeatCount = config.RLE_MAX_REPEATED_NUM; + try { + writeRleRun(); + LOGGER.debug("tsfile-encoding RleEncoder : write full rle run to stream"); + } catch (IOException e) { + LOGGER.error( + " error occurs when writing full rle run to OutputStram when repeatCount = {}." + + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", + config.RLE_MAX_REPEATED_NUM + 1, numBufferedValues, repeatCount, bitPackedGroupCount, + isBitPackRun, isBitWidthSaved, e); + } + repeatCount = 1; + preValue = value; + } + + } else { + // we encounter a differnt value + if (repeatCount >= config.RLE_MIN_REPEATED_NUM) { + try { + writeRleRun(); + } catch (IOException e) { + LOGGER.error( + "tsfile-encoding RleEncoder : error occurs when writing num to OutputStram when repeatCount > {}." + + "numBufferedValues {}, repeatCount {}, bitPackedGroupCount{}, isBitPackRun {}, isBitWidthSaved {}", + config.RLE_MIN_REPEATED_NUM, numBufferedValues, repeatCount, bitPackedGroupCount, + isBitPackRun, isBitWidthSaved, e); + } + } + repeatCount = 1; + preValue = value; + } + bufferedValues[numBufferedValues] = value; + numBufferedValues++; + // if none of value we encountered occurs more MAX_REPEATED_NUM times + // we'll use bit-packing + if (numBufferedValues == config.RLE_MIN_REPEATED_NUM) { + writeOrAppendBitPackedRun(); + } + } + + /** + * clean all useless value in bufferedValues and set 0 + */ + protected abstract void clearBuffer(); + + protected abstract void convertBuffer(); + + @Override + public void encode(boolean value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(short value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(int value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(long value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(float value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(double value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(Binary value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } + + @Override + public void encode(BigDecimal value, ByteArrayOutputStream out) { + throw new TSFileEncodingException(getClass().getName()); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/SinglePrecisionEncoder.java b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/SinglePrecisionEncoder.java index c542d82e..53003fe6 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/SinglePrecisionEncoder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/encoding/encoder/SinglePrecisionEncoder.java @@ -2,83 +2,84 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; /** * Encoder for int value using gorilla encoding * */ -public class SinglePrecisionEncoder extends GorillaEncoder{ - private int preValue; +public class SinglePrecisionEncoder extends GorillaEncoder { + private int preValue; + + public SinglePrecisionEncoder() {} - public SinglePrecisionEncoder() { - } - - @Override - public void encode(float value, ByteArrayOutputStream out) throws IOException { - if(!flag){ - flag = true; - preValue = Float.floatToIntBits(value); - leadingZeroNum = Integer.numberOfLeadingZeros(preValue); - tailingZeroNum = Integer.numberOfTrailingZeros(preValue); - out.write((preValue >> 0) & 0xFF); - out.write((preValue >> 8) & 0xFF); - out.write((preValue >> 16) & 0xFF); - out.write((preValue >> 24) & 0xFF); - } else{ - int nextValue = Float.floatToIntBits(value); - int tmp = nextValue ^ preValue; - if(tmp == 0){ - // case: write '0' - writeBit(false, out); - } else{ - int leadingZeroNumTmp = Integer.numberOfLeadingZeros(tmp); - int tailingZeroNumTmp = Integer.numberOfTrailingZeros(tmp); - if(leadingZeroNumTmp >= leadingZeroNum && tailingZeroNumTmp >= tailingZeroNum){ - // case: write '10' and effective bits without first leadingZeroNum '0' and last tailingZeroNum '0' - writeBit(true, out); - writeBit(false, out); - writeBits(tmp, out, TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNum, tailingZeroNum); - } else{ - // case: write '11', leading zero num of value, effective bits len and effective bit value - writeBit(true, out); - writeBit(true, out); - writeBits(leadingZeroNumTmp, out, TSFileConfig.FLAOT_LEADING_ZERO_LENGTH - 1, 0); - writeBits(TSFileConfig.FLOAT_LENGTH - leadingZeroNumTmp - tailingZeroNumTmp, out, TSFileConfig.FLOAT_VALUE_LENGTH - 1, 0); - writeBits(tmp, out, TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNumTmp, tailingZeroNumTmp); - } - } - preValue = nextValue; - leadingZeroNum = Integer.numberOfLeadingZeros(preValue); - tailingZeroNum = Integer.numberOfTrailingZeros(preValue); + @Override + public void encode(float value, ByteArrayOutputStream out) throws IOException { + if (!flag) { + flag = true; + preValue = Float.floatToIntBits(value); + leadingZeroNum = Integer.numberOfLeadingZeros(preValue); + tailingZeroNum = Integer.numberOfTrailingZeros(preValue); + out.write((preValue >> 0) & 0xFF); + out.write((preValue >> 8) & 0xFF); + out.write((preValue >> 16) & 0xFF); + out.write((preValue >> 24) & 0xFF); + } else { + int nextValue = Float.floatToIntBits(value); + int tmp = nextValue ^ preValue; + if (tmp == 0) { + // case: write '0' + writeBit(false, out); + } else { + int leadingZeroNumTmp = Integer.numberOfLeadingZeros(tmp); + int tailingZeroNumTmp = Integer.numberOfTrailingZeros(tmp); + if (leadingZeroNumTmp >= leadingZeroNum && tailingZeroNumTmp >= tailingZeroNum) { + // case: write '10' and effective bits without first leadingZeroNum '0' and last + // tailingZeroNum '0' + writeBit(true, out); + writeBit(false, out); + writeBits(tmp, out, TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNum, tailingZeroNum); + } else { + // case: write '11', leading zero num of value, effective bits len and effective bit value + writeBit(true, out); + writeBit(true, out); + writeBits(leadingZeroNumTmp, out, TSFileConfig.FLAOT_LEADING_ZERO_LENGTH - 1, 0); + writeBits(TSFileConfig.FLOAT_LENGTH - leadingZeroNumTmp - tailingZeroNumTmp, out, + TSFileConfig.FLOAT_VALUE_LENGTH - 1, 0); + writeBits(tmp, out, TSFileConfig.FLOAT_LENGTH - 1 - leadingZeroNumTmp, tailingZeroNumTmp); } + } + preValue = nextValue; + leadingZeroNum = Integer.numberOfLeadingZeros(preValue); + tailingZeroNum = Integer.numberOfTrailingZeros(preValue); } - - @Override - public void flush(ByteArrayOutputStream out) throws IOException { - encode(Float.NaN, out); - clearBuffer(out); - reset(); - } - - private void writeBits(int num, ByteArrayOutputStream out, int start, int end){ - for(int i = start; i >= end; i--){ - int bit = num & (1 << i); - writeBit(bit, out); - } - } - - @Override - public int getOneItemMaxSize() { - // case '11' - // 2bit + 5bit + 6bit + 32bit = 45bit - return 6; - } + } - @Override - public long getMaxByteSize() { - // max(first 4 byte, case '11' bit + 5bit + 6bit + 32bit = 45bit) + NaN(case '11' bit + 5bit + 6bit + 32bit = 45bit) = 90bit - return 12; + @Override + public void flush(ByteArrayOutputStream out) throws IOException { + encode(Float.NaN, out); + clearBuffer(out); + reset(); + } + + private void writeBits(int num, ByteArrayOutputStream out, int start, int end) { + for (int i = start; i >= end; i--) { + int bit = num & (1 << i); + writeBit(bit, out); } + } + + @Override + public int getOneItemMaxSize() { + // case '11' + // 2bit + 5bit + 6bit + 32bit = 45bit + return 6; + } + + @Override + public long getMaxByteSize() { + // max(first 4 byte, case '11' bit + 5bit + 6bit + 32bit = 45bit) + NaN(case '11' bit + 5bit + + // 6bit + 32bit = 45bit) = 90bit + return 12; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaData.java index 3fa078cc..af34b3fb 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaData.java @@ -1,195 +1,197 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * For more information, see RowGroupMetaData in cn.edu.thu.tsfile.format package - */ -public class RowGroupMetaData implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(RowGroupMetaData.class); - - private String deltaObjectID; - - /** - * Number of rows in this row group - */ - private long numOfRows; - - /** - * Total byte size of all the uncompressed time series data in this row group - */ - private long totalByteSize; - - /** - * This path is relative to the current file. - */ - private String path; - - private List timeSeriesChunkMetaDataList; - - /** - * which schema/group does the delta object belongs to - */ - private String deltaObjectType; - - /** - * The time when endRowgroup() is called. - */ - private long writtenTime; - - public RowGroupMetaData() { - timeSeriesChunkMetaDataList = new ArrayList(); - } - - public RowGroupMetaData(String deltaObjectID, long numOfRows, long totalByteSize, - List timeSeriesChunkMetaDataList, String deltaObjectType) { - this.deltaObjectID = deltaObjectID; - this.numOfRows = numOfRows; - this.totalByteSize = totalByteSize; - this.timeSeriesChunkMetaDataList = timeSeriesChunkMetaDataList; - this.deltaObjectType = deltaObjectType; - } - - /** - * add time series chunk metadata to list. THREAD NOT SAFE - * - * @param metadata time series metadata to add - */ - public void addTimeSeriesChunkMetaData(TimeSeriesChunkMetaData metadata) { - if (timeSeriesChunkMetaDataList == null) { - timeSeriesChunkMetaDataList = new ArrayList(); - } - timeSeriesChunkMetaDataList.add(metadata); - } - - public List getMetaDatas() { - return timeSeriesChunkMetaDataList == null ? null - : Collections.unmodifiableList(timeSeriesChunkMetaDataList); - } - - @Override - public cn.edu.tsinghua.tsfile.format.RowGroupMetaData convertToThrift() { - try { - List timeSeriesChunkMetaDataListInThrift = null; - if (timeSeriesChunkMetaDataList != null) { - timeSeriesChunkMetaDataListInThrift = new ArrayList<>(); - for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : timeSeriesChunkMetaDataList) { - timeSeriesChunkMetaDataListInThrift.add(timeSeriesChunkMetaData.convertToThrift()); - } - } - cn.edu.tsinghua.tsfile.format.RowGroupMetaData metaDataInThrift = - new cn.edu.tsinghua.tsfile.format.RowGroupMetaData(timeSeriesChunkMetaDataListInThrift, - deltaObjectID, totalByteSize, numOfRows, deltaObjectType, writtenTime); - metaDataInThrift.setFile_path(path); - return metaDataInThrift; - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file RowGroupMetaData: failed to convert row group metadata from TSFile to thrift, row group metadata:{}", - this, e); - throw e; - } - } - - @Override - public void convertToTSF(cn.edu.tsinghua.tsfile.format.RowGroupMetaData metaDataInThrift) { - try { - deltaObjectID = metaDataInThrift.getDelta_object_id(); - numOfRows = metaDataInThrift.getMax_num_rows(); - totalByteSize = metaDataInThrift.getTotal_byte_size(); - path = metaDataInThrift.getFile_path(); - deltaObjectType = metaDataInThrift.getDelta_object_type(); - writtenTime = metaDataInThrift.getWrittenTime(); - List timeSeriesChunkMetaDataListInThrift = metaDataInThrift.getTsc_metadata(); - if (timeSeriesChunkMetaDataListInThrift == null) { - timeSeriesChunkMetaDataList = null; - } else { - if (timeSeriesChunkMetaDataList == null) { - timeSeriesChunkMetaDataList = new ArrayList<>(); - } - timeSeriesChunkMetaDataList.clear(); - for (cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaDataInThrift : timeSeriesChunkMetaDataListInThrift) { - TimeSeriesChunkMetaData timeSeriesChunkMetaData = new TimeSeriesChunkMetaData(); - timeSeriesChunkMetaData.convertToTSF(timeSeriesChunkMetaDataInThrift); - timeSeriesChunkMetaDataList.add(timeSeriesChunkMetaData); - } - } - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file RowGroupMetaData: failed to convert row group metadata from thrift to TSFile, row group metadata:{}", - metaDataInThrift, e); - throw e; - } - } - - @Override - public String toString() { - return String.format( - "RowGroupMetaData{ delta object id: %s, number of rows: %d, total byte size: %d, time series chunk list: %s }", - deltaObjectID, numOfRows, totalByteSize, timeSeriesChunkMetaDataList); - } - - public long getNumOfRows() { - return numOfRows; - } - - public void setNumOfRows(long numOfRows) { - this.numOfRows = numOfRows; - } - - public long getTotalByteSize() { - return totalByteSize; - } - - public void setTotalByteSize(long totalByteSize) { - this.totalByteSize = totalByteSize; - } - - public String getPath() { - return path; - } - - public void setPath(String path) { - this.path = path; - } - - public String getDeltaObjectID() { - return deltaObjectID; - } - - public void setDeltaObjectID(String deltaObjectUID) { - this.deltaObjectID = deltaObjectUID; - } - - public List getTimeSeriesChunkMetaDataList() { - return timeSeriesChunkMetaDataList; - } - - public void setTimeSeriesChunkMetaDataList( - List timeSeriesChunkMetaDataList) { - this.timeSeriesChunkMetaDataList = timeSeriesChunkMetaDataList; - } - - public String getDeltaObjectType() { - return deltaObjectType; - } - - public void setDeltaObjectType(String deltaObjectType) { - this.deltaObjectType = deltaObjectType; - } - - public long getWrittenTime() { - return writtenTime; - } - - public void setWrittenTime(long writtenTime) { - this.writtenTime = writtenTime; - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * For more information, see RowGroupMetaData in cn.edu.thu.tsfile.format package + */ +public class RowGroupMetaData + implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(RowGroupMetaData.class); + + private String deltaObjectID; + + /** + * Number of rows in this row group + */ + private long numOfRows; + + /** + * Total byte size of all the uncompressed time series data in this row group + */ + private long totalByteSize; + + /** + * This path is relative to the current file. + */ + private String path; + + private List timeSeriesChunkMetaDataList; + + /** + * which schema/group does the delta object belongs to + */ + private String deltaObjectType; + + /** + * The time when endRowgroup() is called. + */ + private long writtenTime; + + public RowGroupMetaData() { + timeSeriesChunkMetaDataList = new ArrayList(); + } + + public RowGroupMetaData(String deltaObjectID, long numOfRows, long totalByteSize, + List timeSeriesChunkMetaDataList, String deltaObjectType) { + this.deltaObjectID = deltaObjectID; + this.numOfRows = numOfRows; + this.totalByteSize = totalByteSize; + this.timeSeriesChunkMetaDataList = timeSeriesChunkMetaDataList; + this.deltaObjectType = deltaObjectType; + } + + /** + * add time series chunk metadata to list. THREAD NOT SAFE + * + * @param metadata time series metadata to add + */ + public void addTimeSeriesChunkMetaData(TimeSeriesChunkMetaData metadata) { + if (timeSeriesChunkMetaDataList == null) { + timeSeriesChunkMetaDataList = new ArrayList(); + } + timeSeriesChunkMetaDataList.add(metadata); + } + + public List getMetaDatas() { + return timeSeriesChunkMetaDataList == null ? null + : Collections.unmodifiableList(timeSeriesChunkMetaDataList); + } + + @Override + public cn.edu.tsinghua.tsfile.format.RowGroupMetaData convertToThrift() { + try { + List timeSeriesChunkMetaDataListInThrift = + null; + if (timeSeriesChunkMetaDataList != null) { + timeSeriesChunkMetaDataListInThrift = new ArrayList<>(); + for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : timeSeriesChunkMetaDataList) { + timeSeriesChunkMetaDataListInThrift.add(timeSeriesChunkMetaData.convertToThrift()); + } + } + cn.edu.tsinghua.tsfile.format.RowGroupMetaData metaDataInThrift = + new cn.edu.tsinghua.tsfile.format.RowGroupMetaData(timeSeriesChunkMetaDataListInThrift, + deltaObjectID, totalByteSize, numOfRows, deltaObjectType, writtenTime); + metaDataInThrift.setFile_path(path); + return metaDataInThrift; + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file RowGroupMetaData: failed to convert row group metadata from TSFile to thrift, row group metadata:{}", + this, e); + throw e; + } + } + + @Override + public void convertToTSF(cn.edu.tsinghua.tsfile.format.RowGroupMetaData metaDataInThrift) { + try { + deltaObjectID = metaDataInThrift.getDelta_object_id(); + numOfRows = metaDataInThrift.getMax_num_rows(); + totalByteSize = metaDataInThrift.getTotal_byte_size(); + path = metaDataInThrift.getFile_path(); + deltaObjectType = metaDataInThrift.getDelta_object_type(); + writtenTime = metaDataInThrift.getWrittenTime(); + List timeSeriesChunkMetaDataListInThrift = + metaDataInThrift.getTsc_metadata(); + if (timeSeriesChunkMetaDataListInThrift == null) { + timeSeriesChunkMetaDataList = null; + } else { + if (timeSeriesChunkMetaDataList == null) { + timeSeriesChunkMetaDataList = new ArrayList<>(); + } + timeSeriesChunkMetaDataList.clear(); + for (cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaDataInThrift : timeSeriesChunkMetaDataListInThrift) { + TimeSeriesChunkMetaData timeSeriesChunkMetaData = new TimeSeriesChunkMetaData(); + timeSeriesChunkMetaData.convertToTSF(timeSeriesChunkMetaDataInThrift); + timeSeriesChunkMetaDataList.add(timeSeriesChunkMetaData); + } + } + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file RowGroupMetaData: failed to convert row group metadata from thrift to TSFile, row group metadata:{}", + metaDataInThrift, e); + throw e; + } + } + + @Override + public String toString() { + return String.format( + "RowGroupMetaData{ delta object id: %s, number of rows: %d, total byte size: %d, time series chunk list: %s }", + deltaObjectID, numOfRows, totalByteSize, timeSeriesChunkMetaDataList); + } + + public long getNumOfRows() { + return numOfRows; + } + + public void setNumOfRows(long numOfRows) { + this.numOfRows = numOfRows; + } + + public long getTotalByteSize() { + return totalByteSize; + } + + public void setTotalByteSize(long totalByteSize) { + this.totalByteSize = totalByteSize; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public String getDeltaObjectID() { + return deltaObjectID; + } + + public void setDeltaObjectID(String deltaObjectUID) { + this.deltaObjectID = deltaObjectUID; + } + + public List getTimeSeriesChunkMetaDataList() { + return timeSeriesChunkMetaDataList; + } + + public void setTimeSeriesChunkMetaDataList( + List timeSeriesChunkMetaDataList) { + this.timeSeriesChunkMetaDataList = timeSeriesChunkMetaDataList; + } + + public String getDeltaObjectType() { + return deltaObjectType; + } + + public void setDeltaObjectType(String deltaObjectType) { + this.deltaObjectType = deltaObjectType; + } + + public long getWrittenTime() { + return writtenTime; + } + + public void setWrittenTime(long writtenTime) { + this.writtenTime = writtenTime; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaData.java index 732c304b..50c55001 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaData.java @@ -1,134 +1,134 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; -import cn.edu.tsinghua.tsfile.format.DataType; -import cn.edu.tsinghua.tsfile.format.FreqType; -import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * For more information, see TimeInTimeSeriesChunkMetaData - * in cn.edu.thu.tsfile.format package - */ -public class TInTimeSeriesChunkMetaData implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(TInTimeSeriesChunkMetaData.class); - - private TSDataType dataType; - private long startTime; - private long endTime; - - private TSFreqType freqType; - private List frequencies; - - /** - * If values for data consist of enum values, metadata will store all possible values in time - * series - */ - private List enumValues; - - public TInTimeSeriesChunkMetaData() { - } - - public TInTimeSeriesChunkMetaData(TSDataType dataType, long startTime, long endTime) { - this.dataType = dataType; - this.startTime = startTime; - this.endTime = endTime; - } - - @Override - public TimeInTimeSeriesChunkMetaData convertToThrift() { - try { - TimeInTimeSeriesChunkMetaData tTimeSeriesChunkMetaDataInThrift = - new TimeInTimeSeriesChunkMetaData( - dataType == null ? null : DataType.valueOf(dataType.toString()), startTime, endTime); - tTimeSeriesChunkMetaDataInThrift.setFreq_type(freqType == null ? null : FreqType.valueOf(freqType.toString())); - tTimeSeriesChunkMetaDataInThrift.setFrequencies(frequencies); - tTimeSeriesChunkMetaDataInThrift.setEnum_values(enumValues); - return tTimeSeriesChunkMetaDataInThrift; - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TInTimeSeriesChunkMetaData: failed to convert TimeInTimeSeriesChunkMetaData from TSFile to thrift, content is {}", - this, e); - throw e; - } - } - - @Override - public void convertToTSF(TimeInTimeSeriesChunkMetaData tTimeSeriesChunkMetaDataInThrift) { - try { - dataType = tTimeSeriesChunkMetaDataInThrift.getData_type() == null ? null : TSDataType.valueOf(tTimeSeriesChunkMetaDataInThrift.getData_type().toString()); - freqType = tTimeSeriesChunkMetaDataInThrift.getFreq_type() == null ? null : TSFreqType.valueOf(tTimeSeriesChunkMetaDataInThrift.getFreq_type().toString()); - frequencies = tTimeSeriesChunkMetaDataInThrift.getFrequencies(); - startTime = tTimeSeriesChunkMetaDataInThrift.getStartime(); - endTime = tTimeSeriesChunkMetaDataInThrift.getEndtime(); - enumValues = tTimeSeriesChunkMetaDataInThrift.getEnum_values(); - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TInTimeSeriesChunkMetaData: failed to convert TimeInTimeSeriesChunkMetaData from thrift to TSFile, content is {}", - tTimeSeriesChunkMetaDataInThrift, e); - throw e; - } - } - - @Override - public String toString() { - return String.format( - "TInTimeSeriesChunkMetaData{ TSDataType %s, TSFreqType %s, frequencies %s, starttime %d, endtime %d, enumValues %s }", - dataType, freqType, frequencies, startTime, endTime, enumValues); - } - - public TSDataType getDataType() { - return dataType; - } - - public void setDataType(TSDataType dataType) { - this.dataType = dataType; - } - - public TSFreqType getFreqType() { - return freqType; - } - - public void setFreqType(TSFreqType freqType) { - this.freqType = freqType; - } - - public List getFrequencies() { - return frequencies; - } - - public void setFrequencies(List frequencies) { - this.frequencies = frequencies; - } - - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - public List getEnumValues() { - return enumValues; - } - - public void setEnumValues(List enumValues) { - this.enumValues = enumValues; - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; +import cn.edu.tsinghua.tsfile.format.DataType; +import cn.edu.tsinghua.tsfile.format.FreqType; +import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.List; + +/** + * For more information, see TimeInTimeSeriesChunkMetaData in cn.edu.thu.tsfile.format package + */ +public class TInTimeSeriesChunkMetaData implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(TInTimeSeriesChunkMetaData.class); + + private TSDataType dataType; + private long startTime; + private long endTime; + + private TSFreqType freqType; + private List frequencies; + + /** + * If values for data consist of enum values, metadata will store all possible values in time + * series + */ + private List enumValues; + + public TInTimeSeriesChunkMetaData() {} + + public TInTimeSeriesChunkMetaData(TSDataType dataType, long startTime, long endTime) { + this.dataType = dataType; + this.startTime = startTime; + this.endTime = endTime; + } + + @Override + public TimeInTimeSeriesChunkMetaData convertToThrift() { + try { + TimeInTimeSeriesChunkMetaData tTimeSeriesChunkMetaDataInThrift = + new TimeInTimeSeriesChunkMetaData( + dataType == null ? null : DataType.valueOf(dataType.toString()), startTime, endTime); + tTimeSeriesChunkMetaDataInThrift + .setFreq_type(freqType == null ? null : FreqType.valueOf(freqType.toString())); + tTimeSeriesChunkMetaDataInThrift.setFrequencies(frequencies); + tTimeSeriesChunkMetaDataInThrift.setEnum_values(enumValues); + return tTimeSeriesChunkMetaDataInThrift; + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TInTimeSeriesChunkMetaData: failed to convert TimeInTimeSeriesChunkMetaData from TSFile to thrift, content is {}", + this, e); + throw e; + } + } + + @Override + public void convertToTSF(TimeInTimeSeriesChunkMetaData tTimeSeriesChunkMetaDataInThrift) { + try { + dataType = tTimeSeriesChunkMetaDataInThrift.getData_type() == null ? null + : TSDataType.valueOf(tTimeSeriesChunkMetaDataInThrift.getData_type().toString()); + freqType = tTimeSeriesChunkMetaDataInThrift.getFreq_type() == null ? null + : TSFreqType.valueOf(tTimeSeriesChunkMetaDataInThrift.getFreq_type().toString()); + frequencies = tTimeSeriesChunkMetaDataInThrift.getFrequencies(); + startTime = tTimeSeriesChunkMetaDataInThrift.getStartime(); + endTime = tTimeSeriesChunkMetaDataInThrift.getEndtime(); + enumValues = tTimeSeriesChunkMetaDataInThrift.getEnum_values(); + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TInTimeSeriesChunkMetaData: failed to convert TimeInTimeSeriesChunkMetaData from thrift to TSFile, content is {}", + tTimeSeriesChunkMetaDataInThrift, e); + throw e; + } + } + + @Override + public String toString() { + return String.format( + "TInTimeSeriesChunkMetaData{ TSDataType %s, TSFreqType %s, frequencies %s, starttime %d, endtime %d, enumValues %s }", + dataType, freqType, frequencies, startTime, endTime, enumValues); + } + + public TSDataType getDataType() { + return dataType; + } + + public void setDataType(TSDataType dataType) { + this.dataType = dataType; + } + + public TSFreqType getFreqType() { + return freqType; + } + + public void setFreqType(TSFreqType freqType) { + this.freqType = freqType; + } + + public List getFrequencies() { + return frequencies; + } + + public void setFrequencies(List frequencies) { + this.frequencies = frequencies; + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public List getEnumValues() { + return enumValues; + } + + public void setEnumValues(List enumValues) { + this.enumValues = enumValues; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaData.java index 7c5965cf..294f3cc5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaData.java @@ -1,253 +1,254 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; -import cn.edu.tsinghua.tsfile.format.CompressionType; -import cn.edu.tsinghua.tsfile.format.TimeSeriesChunkType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; - -/** - * For more information, see TimeSeriesChunkMetaData in cn.edu.thu.tsfile.format package - */ -public class TimeSeriesChunkMetaData - implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesChunkMetaData.class); - - private TimeSeriesChunkProperties properties; - - private long numRows; - - /** - * total byte size of all uncompressed pages in this time series chunk (including the headers) - */ - private long totalByteSize; - - /** - * Optional json metadata - */ - private List jsonMetaData; - - /** - * Byte offset from beginning of file to first data page - */ - private long dataPageOffset; - - /** - * Byte offset from beginning of file to root index page - */ - private long indexPageOffset; - - /** - * Byte offset from the beginning of file to first (only) dictionary page - */ - private long dictionaryPageOffset; - - /** - * one of TSeriesMetaData and VSeriesMetaData is not null - */ - private TInTimeSeriesChunkMetaData tInTimeSeriesChunkMetaData; - private VInTimeSeriesChunkMetaData vInTimeSeriesChunkMetaData; - - /** - * The maximum time of the tombstones that take effect on this chunk. Only data with larger timestamps than this - * should be exposed to user. - */ - private long maxTombstoneTime; - - /** - * The time when the RowGroup of this chunk is closed. This will not be written out and will only be set when read together - * with its RowGroup during querying. - */ - private long writtenTime; - - public TimeSeriesChunkMetaData() { - properties = new TimeSeriesChunkProperties(); - jsonMetaData = new ArrayList(); - } - - public TimeSeriesChunkMetaData(String measurementUID, TSChunkType tsChunkGroup, long fileOffset, - CompressionTypeName compression) { - this(); - this.properties = new TimeSeriesChunkProperties(measurementUID, tsChunkGroup, fileOffset, compression); - } - - public TimeSeriesChunkProperties getProperties() { - return properties; - } - - public void setProperties(TimeSeriesChunkProperties properties) { - this.properties = properties; - } - - @Override - public cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData convertToThrift() { - try { - cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift = initTimeSeriesChunkMetaDataInThrift(); - if (tInTimeSeriesChunkMetaData != null) { - metadataInThrift.setTime_tsc(tInTimeSeriesChunkMetaData.convertToThrift()); - } - if (vInTimeSeriesChunkMetaData != null) { - metadataInThrift.setValue_tsc(vInTimeSeriesChunkMetaData.convertToThrift()); - } - return metadataInThrift; - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TimeSeriesChunkMetaData: failed to convert TimeSeriesChunkMetaData from TSFile to thrift, content is {}", - this, e); - } - return null; - } - - @Override - public void convertToTSF(cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift) { - try { - initTimeSeriesChunkMetaDataInTSFile(metadataInThrift); - if (metadataInThrift.getTime_tsc() == null) { - tInTimeSeriesChunkMetaData = null; - } else { - if (tInTimeSeriesChunkMetaData == null) { - tInTimeSeriesChunkMetaData = new TInTimeSeriesChunkMetaData(); - } - tInTimeSeriesChunkMetaData.convertToTSF(metadataInThrift.getTime_tsc()); - } - if (metadataInThrift.getValue_tsc() == null) { - vInTimeSeriesChunkMetaData = null; - } else { - if (vInTimeSeriesChunkMetaData == null) { - vInTimeSeriesChunkMetaData = new VInTimeSeriesChunkMetaData(); - } - vInTimeSeriesChunkMetaData.convertToTSF(metadataInThrift.getValue_tsc()); - } - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TimeSeriesChunkMetaData: failed to convert TimeSeriesChunkMetaData from thrift to TSFile, content is {}", - metadataInThrift, e); - } - } - - private cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData initTimeSeriesChunkMetaDataInThrift() { - cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift = - new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData( - properties.getMeasurementUID(), - properties.getTsChunkType() == null ? null : TimeSeriesChunkType.valueOf(properties.getTsChunkType().toString()), - properties.getFileOffset(), - properties.getCompression() == null ? null : CompressionType.valueOf(properties.getCompression().toString())); - metadataInThrift.setNum_rows(numRows); - metadataInThrift.setTotal_byte_size(totalByteSize); - metadataInThrift.setJson_metadata(jsonMetaData); - metadataInThrift.setData_page_offset(dataPageOffset); - metadataInThrift.setIndex_page_offset(indexPageOffset); - metadataInThrift.setDictionary_page_offset(dictionaryPageOffset); - return metadataInThrift; - } - - private void initTimeSeriesChunkMetaDataInTSFile( - cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift) { - properties = new TimeSeriesChunkProperties( - metadataInThrift.getMeasurement_uid(), - metadataInThrift.getTimeseries_chunk_type() == null ? null : TSChunkType.valueOf(metadataInThrift.getTimeseries_chunk_type().toString()), - metadataInThrift.getFile_offset(), - metadataInThrift.getCompression_type() == null ? null : CompressionTypeName.valueOf(metadataInThrift.getCompression_type().toString())); - numRows = metadataInThrift.getNum_rows(); - totalByteSize = metadataInThrift.getTotal_byte_size(); - jsonMetaData = metadataInThrift.getJson_metadata(); - dataPageOffset = metadataInThrift.getData_page_offset(); - indexPageOffset = metadataInThrift.getIndex_page_offset(); - dictionaryPageOffset = metadataInThrift.getDictionary_page_offset(); - } - - @Override - public String toString() { - return String.format( - "TimeSeriesChunkProperties %s, numRows %d, totalByteSize %d, jsonMetaData %s, dataPageOffset %d, indexPageOffset %d, dictionaryPageOffset %s", - properties, numRows, totalByteSize, jsonMetaData, dataPageOffset, indexPageOffset, - dictionaryPageOffset); - } - - public long getNumRows() { - return numRows; - } - - public void setNumRows(long numRows) { - this.numRows = numRows; - } - - public long getTotalByteSize() { - return totalByteSize; - } - - public void setTotalByteSize(long totalByteSize) { - this.totalByteSize = totalByteSize; - } - - public List getJsonMetaData() { - return jsonMetaData; - } - - public void setJsonMetaData(List jsonMetaData) { - this.jsonMetaData = jsonMetaData; - } - - public long getDataPageOffset() { - return dataPageOffset; - } - - public void setDataPageOffset(long dataPageOffset) { - this.dataPageOffset = dataPageOffset; - } - - public long getIndexPageOffset() { - return indexPageOffset; - } - - public void setIndexPageOffset(long indexPageOffset) { - this.indexPageOffset = indexPageOffset; - } - - public long getDictionaryPageOffset() { - return dictionaryPageOffset; - } - - public void setDictionaryPageOffset(long dictionaryPageOffset) { - this.dictionaryPageOffset = dictionaryPageOffset; - } - - public TInTimeSeriesChunkMetaData getTInTimeSeriesChunkMetaData() { - return tInTimeSeriesChunkMetaData; - } - - public void setTInTimeSeriesChunkMetaData(TInTimeSeriesChunkMetaData tInTimeSeriesChunkMetaData) { - this.tInTimeSeriesChunkMetaData = tInTimeSeriesChunkMetaData; - } - - public VInTimeSeriesChunkMetaData getVInTimeSeriesChunkMetaData() { - return vInTimeSeriesChunkMetaData; - } - - public void setVInTimeSeriesChunkMetaData(VInTimeSeriesChunkMetaData vInTimeSeriesChunkMetaData) { - this.vInTimeSeriesChunkMetaData = vInTimeSeriesChunkMetaData; - } - - public long getMaxTombstoneTime() { - return maxTombstoneTime; - } - - public void setMaxTombstoneTime(long maxTombstoneTime) { - this.maxTombstoneTime = maxTombstoneTime; - } - - public long getWrittenTime() { - return writtenTime; - } - - public void setWrittenTime(long writtenTime) { - this.writtenTime = writtenTime; - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; +import cn.edu.tsinghua.tsfile.format.CompressionType; +import cn.edu.tsinghua.tsfile.format.TimeSeriesChunkType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.ArrayList; +import java.util.List; + +/** + * For more information, see TimeSeriesChunkMetaData in cn.edu.thu.tsfile.format package + */ +public class TimeSeriesChunkMetaData + implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesChunkMetaData.class); + + private TimeSeriesChunkProperties properties; + + private long numRows; + + /** + * total byte size of all uncompressed pages in this time series chunk (including the headers) + */ + private long totalByteSize; + + /** + * Optional json metadata + */ + private List jsonMetaData; + + /** + * Byte offset from beginning of file to first data page + */ + private long dataPageOffset; + + /** + * Byte offset from beginning of file to root index page + */ + private long indexPageOffset; + + /** + * Byte offset from the beginning of file to first (only) dictionary page + */ + private long dictionaryPageOffset; + + /** + * one of TSeriesMetaData and VSeriesMetaData is not null + */ + private TInTimeSeriesChunkMetaData tInTimeSeriesChunkMetaData; + private VInTimeSeriesChunkMetaData vInTimeSeriesChunkMetaData; + + /** + * The maximum time of the tombstones that take effect on this chunk. Only data with larger + * timestamps than this should be exposed to user. + */ + private long maxTombstoneTime; + + /** + * The time when the RowGroup of this chunk is closed. This will not be written out and will only + * be set when read together with its RowGroup during querying. + */ + private long writtenTime; + + public TimeSeriesChunkMetaData() { + properties = new TimeSeriesChunkProperties(); + jsonMetaData = new ArrayList(); + } + + public TimeSeriesChunkMetaData(String measurementUID, TSChunkType tsChunkGroup, long fileOffset, + CompressionTypeName compression) { + this(); + this.properties = + new TimeSeriesChunkProperties(measurementUID, tsChunkGroup, fileOffset, compression); + } + + public TimeSeriesChunkProperties getProperties() { + return properties; + } + + public void setProperties(TimeSeriesChunkProperties properties) { + this.properties = properties; + } + + @Override + public cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData convertToThrift() { + try { + cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift = + initTimeSeriesChunkMetaDataInThrift(); + if (tInTimeSeriesChunkMetaData != null) { + metadataInThrift.setTime_tsc(tInTimeSeriesChunkMetaData.convertToThrift()); + } + if (vInTimeSeriesChunkMetaData != null) { + metadataInThrift.setValue_tsc(vInTimeSeriesChunkMetaData.convertToThrift()); + } + return metadataInThrift; + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TimeSeriesChunkMetaData: failed to convert TimeSeriesChunkMetaData from TSFile to thrift, content is {}", + this, e); + } + return null; + } + + @Override + public void convertToTSF(cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift) { + try { + initTimeSeriesChunkMetaDataInTSFile(metadataInThrift); + if (metadataInThrift.getTime_tsc() == null) { + tInTimeSeriesChunkMetaData = null; + } else { + if (tInTimeSeriesChunkMetaData == null) { + tInTimeSeriesChunkMetaData = new TInTimeSeriesChunkMetaData(); + } + tInTimeSeriesChunkMetaData.convertToTSF(metadataInThrift.getTime_tsc()); + } + if (metadataInThrift.getValue_tsc() == null) { + vInTimeSeriesChunkMetaData = null; + } else { + if (vInTimeSeriesChunkMetaData == null) { + vInTimeSeriesChunkMetaData = new VInTimeSeriesChunkMetaData(); + } + vInTimeSeriesChunkMetaData.convertToTSF(metadataInThrift.getValue_tsc()); + } + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TimeSeriesChunkMetaData: failed to convert TimeSeriesChunkMetaData from thrift to TSFile, content is {}", + metadataInThrift, e); + } + } + + private cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData initTimeSeriesChunkMetaDataInThrift() { + cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift = + new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData(properties.getMeasurementUID(), + properties.getTsChunkType() == null ? null + : TimeSeriesChunkType.valueOf(properties.getTsChunkType().toString()), + properties.getFileOffset(), properties.getCompression() == null ? null + : CompressionType.valueOf(properties.getCompression().toString())); + metadataInThrift.setNum_rows(numRows); + metadataInThrift.setTotal_byte_size(totalByteSize); + metadataInThrift.setJson_metadata(jsonMetaData); + metadataInThrift.setData_page_offset(dataPageOffset); + metadataInThrift.setIndex_page_offset(indexPageOffset); + metadataInThrift.setDictionary_page_offset(dictionaryPageOffset); + return metadataInThrift; + } + + private void initTimeSeriesChunkMetaDataInTSFile( + cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData metadataInThrift) { + properties = new TimeSeriesChunkProperties(metadataInThrift.getMeasurement_uid(), + metadataInThrift.getTimeseries_chunk_type() == null ? null + : TSChunkType.valueOf(metadataInThrift.getTimeseries_chunk_type().toString()), + metadataInThrift.getFile_offset(), metadataInThrift.getCompression_type() == null ? null + : CompressionTypeName.valueOf(metadataInThrift.getCompression_type().toString())); + numRows = metadataInThrift.getNum_rows(); + totalByteSize = metadataInThrift.getTotal_byte_size(); + jsonMetaData = metadataInThrift.getJson_metadata(); + dataPageOffset = metadataInThrift.getData_page_offset(); + indexPageOffset = metadataInThrift.getIndex_page_offset(); + dictionaryPageOffset = metadataInThrift.getDictionary_page_offset(); + } + + @Override + public String toString() { + return String.format( + "TimeSeriesChunkProperties %s, numRows %d, totalByteSize %d, jsonMetaData %s, dataPageOffset %d, indexPageOffset %d, dictionaryPageOffset %s", + properties, numRows, totalByteSize, jsonMetaData, dataPageOffset, indexPageOffset, + dictionaryPageOffset); + } + + public long getNumRows() { + return numRows; + } + + public void setNumRows(long numRows) { + this.numRows = numRows; + } + + public long getTotalByteSize() { + return totalByteSize; + } + + public void setTotalByteSize(long totalByteSize) { + this.totalByteSize = totalByteSize; + } + + public List getJsonMetaData() { + return jsonMetaData; + } + + public void setJsonMetaData(List jsonMetaData) { + this.jsonMetaData = jsonMetaData; + } + + public long getDataPageOffset() { + return dataPageOffset; + } + + public void setDataPageOffset(long dataPageOffset) { + this.dataPageOffset = dataPageOffset; + } + + public long getIndexPageOffset() { + return indexPageOffset; + } + + public void setIndexPageOffset(long indexPageOffset) { + this.indexPageOffset = indexPageOffset; + } + + public long getDictionaryPageOffset() { + return dictionaryPageOffset; + } + + public void setDictionaryPageOffset(long dictionaryPageOffset) { + this.dictionaryPageOffset = dictionaryPageOffset; + } + + public TInTimeSeriesChunkMetaData getTInTimeSeriesChunkMetaData() { + return tInTimeSeriesChunkMetaData; + } + + public void setTInTimeSeriesChunkMetaData(TInTimeSeriesChunkMetaData tInTimeSeriesChunkMetaData) { + this.tInTimeSeriesChunkMetaData = tInTimeSeriesChunkMetaData; + } + + public VInTimeSeriesChunkMetaData getVInTimeSeriesChunkMetaData() { + return vInTimeSeriesChunkMetaData; + } + + public void setVInTimeSeriesChunkMetaData(VInTimeSeriesChunkMetaData vInTimeSeriesChunkMetaData) { + this.vInTimeSeriesChunkMetaData = vInTimeSeriesChunkMetaData; + } + + public long getMaxTombstoneTime() { + return maxTombstoneTime; + } + + public void setMaxTombstoneTime(long maxTombstoneTime) { + this.maxTombstoneTime = maxTombstoneTime; + } + + public long getWrittenTime() { + return writtenTime; + } + + public void setWrittenTime(long writtenTime) { + this.writtenTime = writtenTime; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkProperties.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkProperties.java index 2afefd4f..10227c83 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkProperties.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkProperties.java @@ -1,56 +1,55 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; - -/** - * store required members in TimeSeriesChunkMetaData - */ -public class TimeSeriesChunkProperties { - private String measurementUID; - - /** - * Type of this time series - */ - @Deprecated - private TSChunkType tsChunkType; - - /** - * Byte offset in file_path to the RowGroupMetaData - */ - private long fileOffset; - private CompressionTypeName compression; - - public TimeSeriesChunkProperties() { - } - - public TimeSeriesChunkProperties(String measurementUID, TSChunkType tsChunkType, long fileOffset, - CompressionTypeName compression) { - this.measurementUID = measurementUID; - this.tsChunkType = tsChunkType; - this.fileOffset = fileOffset; - this.compression = compression; - } - - public TSChunkType getTsChunkType() { - return tsChunkType; - } - - public long getFileOffset() { - return fileOffset; - } - - public CompressionTypeName getCompression() { - return compression; - } - - public String getMeasurementUID() { - return measurementUID; - } - - @Override - public String toString() { - return String.format("measurementUID %s, TSChunkType %s, fileOffset %d, CompressionTypeName %s", - measurementUID, tsChunkType, fileOffset, compression); - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; + +/** + * store required members in TimeSeriesChunkMetaData + */ +public class TimeSeriesChunkProperties { + private String measurementUID; + + /** + * Type of this time series + */ + @Deprecated + private TSChunkType tsChunkType; + + /** + * Byte offset in file_path to the RowGroupMetaData + */ + private long fileOffset; + private CompressionTypeName compression; + + public TimeSeriesChunkProperties() {} + + public TimeSeriesChunkProperties(String measurementUID, TSChunkType tsChunkType, long fileOffset, + CompressionTypeName compression) { + this.measurementUID = measurementUID; + this.tsChunkType = tsChunkType; + this.fileOffset = fileOffset; + this.compression = compression; + } + + public TSChunkType getTsChunkType() { + return tsChunkType; + } + + public long getFileOffset() { + return fileOffset; + } + + public CompressionTypeName getCompression() { + return compression; + } + + public String getMeasurementUID() { + return measurementUID; + } + + @Override + public String toString() { + return String.format("measurementUID %s, TSChunkType %s, fileOffset %d, CompressionTypeName %s", + measurementUID, tsChunkType, fileOffset, compression); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadata.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadata.java index 8503b563..a697627c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadata.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadata.java @@ -1,141 +1,141 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; -import cn.edu.tsinghua.tsfile.format.DataType; -import cn.edu.tsinghua.tsfile.format.FreqType; -import cn.edu.tsinghua.tsfile.format.TimeSeries; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * For more information, see TimeSeries in cn.edu.thu.tsfile.format package - */ -public class TimeSeriesMetadata implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesMetadata.class); - - private String measurementUID; - - private TSDataType type; - - /** - * If type is FIXED_LEN_BYTE_ARRAY, this is the byte length of the values. Otherwise, if - * specified, this is the maximum bit length to store any of the values. (e.g. a low cardinality - * INT timeseries could have this set to 32). Note that this is in the schema, and therefore fixed - * for the entire file. - */ - private int typeLength; - - private TSFreqType freqType; - private List frequencies; - - /** - * If values for data consist of enum values, metadata will store all possible values in time - * series - */ - private List enumValues; - - public TimeSeriesMetadata() { - } - - public TimeSeriesMetadata(String measurementUID, TSDataType dataType) { - this.measurementUID = measurementUID; - this.type = dataType; - } - - @Override - public TimeSeries convertToThrift() { - try { - TimeSeries timeSeriesInThrift = new TimeSeries(measurementUID, - type == null ? null : DataType.valueOf(type.toString()), "");//FIXME remove deltaType from TimeSeries.java - timeSeriesInThrift.setType_length(typeLength); - timeSeriesInThrift.setFreq_type(freqType == null ? null : FreqType.valueOf(freqType.toString())); - timeSeriesInThrift.setFrequencies(frequencies); - timeSeriesInThrift.setEnum_values(enumValues); - return timeSeriesInThrift; - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TimeSeriesMetadata: failed to convert TimeSeriesMetadata from TSFile to thrift, content is {}", - this, e); - throw e; - } - } - - @Override - public void convertToTSF(TimeSeries timeSeriesInThrift) { - try { - measurementUID = timeSeriesInThrift.getMeasurement_uid(); - type = timeSeriesInThrift.getType() == null ? null - : TSDataType.valueOf(timeSeriesInThrift.getType().toString()); - typeLength = timeSeriesInThrift.getType_length(); - freqType = timeSeriesInThrift.getFreq_type() == null ? null - : TSFreqType.valueOf(timeSeriesInThrift.getFreq_type().toString()); - frequencies = timeSeriesInThrift.getFrequencies(); - enumValues = timeSeriesInThrift.getEnum_values(); - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file TimeSeriesMetadata: failed to convert TimeSeriesMetadata from TSFile to thrift, content is {}", - timeSeriesInThrift, e); - } - } - - public String getMeasurementUID() { - return measurementUID; - } - - public void setMeasurementUID(String measurementUID) { - this.measurementUID = measurementUID; - } - - public int getTypeLength() { - return typeLength; - } - - public void setTypeLength(int typeLength) { - this.typeLength = typeLength; - } - - public TSDataType getType() { - return type; - } - - public void setType(TSDataType type) { - this.type = type; - } - - public TSFreqType getFreqType() { - return freqType; - } - - public void setFreqType(TSFreqType freqType) { - this.freqType = freqType; - } - - public List getFrequencies() { - return frequencies; - } - - public void setFrequencies(List frequencies) { - this.frequencies = frequencies; - } - - public List getEnumValues() { - return enumValues; - } - - public void setEnumValues(List enumValues) { - this.enumValues = enumValues; - } - - @Override - public String toString() { - return String.format( - "TimeSeriesMetadata: measurementUID %s, type length %d, DataType %s, FreqType %s,frequencies %s", - measurementUID, typeLength, type, freqType, frequencies); - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; +import cn.edu.tsinghua.tsfile.format.DataType; +import cn.edu.tsinghua.tsfile.format.FreqType; +import cn.edu.tsinghua.tsfile.format.TimeSeries; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.List; + +/** + * For more information, see TimeSeries in cn.edu.thu.tsfile.format package + */ +public class TimeSeriesMetadata implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(TimeSeriesMetadata.class); + + private String measurementUID; + + private TSDataType type; + + /** + * If type is FIXED_LEN_BYTE_ARRAY, this is the byte length of the values. Otherwise, if + * specified, this is the maximum bit length to store any of the values. (e.g. a low cardinality + * INT timeseries could have this set to 32). Note that this is in the schema, and therefore fixed + * for the entire file. + */ + private int typeLength; + + private TSFreqType freqType; + private List frequencies; + + /** + * If values for data consist of enum values, metadata will store all possible values in time + * series + */ + private List enumValues; + + public TimeSeriesMetadata() {} + + public TimeSeriesMetadata(String measurementUID, TSDataType dataType) { + this.measurementUID = measurementUID; + this.type = dataType; + } + + @Override + public TimeSeries convertToThrift() { + try { + TimeSeries timeSeriesInThrift = new TimeSeries(measurementUID, + type == null ? null : DataType.valueOf(type.toString()), "");// FIXME remove deltaType + // from TimeSeries.java + timeSeriesInThrift.setType_length(typeLength); + timeSeriesInThrift + .setFreq_type(freqType == null ? null : FreqType.valueOf(freqType.toString())); + timeSeriesInThrift.setFrequencies(frequencies); + timeSeriesInThrift.setEnum_values(enumValues); + return timeSeriesInThrift; + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TimeSeriesMetadata: failed to convert TimeSeriesMetadata from TSFile to thrift, content is {}", + this, e); + throw e; + } + } + + @Override + public void convertToTSF(TimeSeries timeSeriesInThrift) { + try { + measurementUID = timeSeriesInThrift.getMeasurement_uid(); + type = timeSeriesInThrift.getType() == null ? null + : TSDataType.valueOf(timeSeriesInThrift.getType().toString()); + typeLength = timeSeriesInThrift.getType_length(); + freqType = timeSeriesInThrift.getFreq_type() == null ? null + : TSFreqType.valueOf(timeSeriesInThrift.getFreq_type().toString()); + frequencies = timeSeriesInThrift.getFrequencies(); + enumValues = timeSeriesInThrift.getEnum_values(); + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file TimeSeriesMetadata: failed to convert TimeSeriesMetadata from TSFile to thrift, content is {}", + timeSeriesInThrift, e); + } + } + + public String getMeasurementUID() { + return measurementUID; + } + + public void setMeasurementUID(String measurementUID) { + this.measurementUID = measurementUID; + } + + public int getTypeLength() { + return typeLength; + } + + public void setTypeLength(int typeLength) { + this.typeLength = typeLength; + } + + public TSDataType getType() { + return type; + } + + public void setType(TSDataType type) { + this.type = type; + } + + public TSFreqType getFreqType() { + return freqType; + } + + public void setFreqType(TSFreqType freqType) { + this.freqType = freqType; + } + + public List getFrequencies() { + return frequencies; + } + + public void setFrequencies(List frequencies) { + this.frequencies = frequencies; + } + + public List getEnumValues() { + return enumValues; + } + + public void setEnumValues(List enumValues) { + this.enumValues = enumValues; + } + + @Override + public String toString() { + return String.format( + "TimeSeriesMetadata: measurementUID %s, type length %d, DataType %s, FreqType %s,frequencies %s", + measurementUID, typeLength, type, freqType, frequencies); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDeltaObject.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDeltaObject.java index 0e70407b..ab6e7578 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDeltaObject.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDeltaObject.java @@ -3,36 +3,36 @@ import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; import cn.edu.tsinghua.tsfile.format.DeltaObject; -public class TsDeltaObject implements IConverter{ - /** start position of RowGroupMetadataBlock in file **/ - public long offset; - - /** size of RowGroupMetadataBlock in byte **/ - public int metadataBlockSize; - - /** start time for a delta object **/ - public long startTime; - - /** end time for a delta object **/ - public long endTime; - - public TsDeltaObject(long offset, int metadataBlockSize, long startTime, long endTime){ - this.offset = offset; - this.metadataBlockSize = metadataBlockSize; - this.startTime = startTime; - this.endTime = endTime; - } - - @Override - public DeltaObject convertToThrift() { - return new DeltaObject(offset, metadataBlockSize, startTime, endTime); - } - - @Override - public void convertToTSF(DeltaObject metadata) { - this.offset = metadata.getOffset(); - this.metadataBlockSize = metadata.getMetadata_block_size(); - this.startTime = metadata.getStart_time(); - this.endTime = metadata.getEnd_time(); - } +public class TsDeltaObject implements IConverter { + /** start position of RowGroupMetadataBlock in file **/ + public long offset; + + /** size of RowGroupMetadataBlock in byte **/ + public int metadataBlockSize; + + /** start time for a delta object **/ + public long startTime; + + /** end time for a delta object **/ + public long endTime; + + public TsDeltaObject(long offset, int metadataBlockSize, long startTime, long endTime) { + this.offset = offset; + this.metadataBlockSize = metadataBlockSize; + this.startTime = startTime; + this.endTime = endTime; + } + + @Override + public DeltaObject convertToThrift() { + return new DeltaObject(offset, metadataBlockSize, startTime, endTime); + } + + @Override + public void convertToTSF(DeltaObject metadata) { + this.offset = metadata.getOffset(); + this.metadataBlockSize = metadata.getMetadata_block_size(); + this.startTime = metadata.getStart_time(); + this.endTime = metadata.getEnd_time(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDigest.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDigest.java index 0b0e8eaf..a8571e1f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDigest.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsDigest.java @@ -1,79 +1,77 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.format.Digest; - -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; - -/** - * For more information, see Digest in cn.edu.thu.tsfile.format package - */ -public class TsDigest implements IConverter { - /** - * Digest/statistics per row group and per page. - */ - public Map statistics; - - public TsDigest() { - } - - public TsDigest(Map statistics) { - this.statistics = statistics; - } - - public void setStatistics(Map statistics) { - this.statistics = statistics; - } - - public Map getStatistics(){ - return this.statistics; - } - - public void addStatistics(String key, ByteBuffer value) { - if(statistics == null) { - statistics = new HashMap<>(); - } - statistics.put(key, value); - } - - @Override - public String toString() { - return statistics != null ? statistics.toString() : ""; - } - - @Override - public Digest convertToThrift() { - Digest digest = new Digest(); - if (statistics != null) { - Map statisticsInThrift = new HashMap<>(); - for (String key : statistics.keySet()) { - statisticsInThrift.put(key, statistics.get(key)); - } - digest.setStatistics(statisticsInThrift); - } - return digest; - } - - @Override - public void convertToTSF(Digest digestInThrift) { - if (digestInThrift != null) { - Map statisticsInThrift = digestInThrift.getStatistics(); - if (statisticsInThrift != null) { - statistics = new HashMap<>(); - for (String key : statisticsInThrift.keySet()) { - statistics.put(key, byteBufferDeepCopy(statisticsInThrift.get(key))); - } - } else { - statistics = null; - } - } - } - - public ByteBuffer byteBufferDeepCopy(ByteBuffer src) { - ByteBuffer copy = ByteBuffer.allocate(src.remaining()).put(src.slice()); - copy.flip(); - return copy; - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.format.Digest; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * For more information, see Digest in cn.edu.thu.tsfile.format package + */ +public class TsDigest implements IConverter { + /** + * Digest/statistics per row group and per page. + */ + public Map statistics; + + public TsDigest() {} + + public TsDigest(Map statistics) { + this.statistics = statistics; + } + + public void setStatistics(Map statistics) { + this.statistics = statistics; + } + + public Map getStatistics() { + return this.statistics; + } + + public void addStatistics(String key, ByteBuffer value) { + if (statistics == null) { + statistics = new HashMap<>(); + } + statistics.put(key, value); + } + + @Override + public String toString() { + return statistics != null ? statistics.toString() : ""; + } + + @Override + public Digest convertToThrift() { + Digest digest = new Digest(); + if (statistics != null) { + Map statisticsInThrift = new HashMap<>(); + for (String key : statistics.keySet()) { + statisticsInThrift.put(key, statistics.get(key)); + } + digest.setStatistics(statisticsInThrift); + } + return digest; + } + + @Override + public void convertToTSF(Digest digestInThrift) { + if (digestInThrift != null) { + Map statisticsInThrift = digestInThrift.getStatistics(); + if (statisticsInThrift != null) { + statistics = new HashMap<>(); + for (String key : statisticsInThrift.keySet()) { + statistics.put(key, byteBufferDeepCopy(statisticsInThrift.get(key))); + } + } else { + statistics = null; + } + } + } + + public ByteBuffer byteBufferDeepCopy(ByteBuffer src) { + ByteBuffer copy = ByteBuffer.allocate(src.remaining()).put(src.slice()); + copy.flip(); + return copy; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaData.java index 8d24c4a3..7287c77d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaData.java @@ -1,263 +1,271 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.format.DeltaObject; -import cn.edu.tsinghua.tsfile.format.FileMetaData; -import cn.edu.tsinghua.tsfile.format.TimeSeries; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.*; - -/** - * TSFileMetaData collects all metadata info and saves in its data structure - */ -public class TsFileMetaData implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(TsFileMetaData.class); - - private Map deltaObjectMap; - - /** - * TSFile schema for this file. This schema contains metadata for all the time series. The schema - * is represented as a list. - */ - private List timeSeriesList; - - /** - * Version of this file - */ - private int currentVersion; - - /** - * Optional json metadata - */ - private List jsonMetaData; - - /** - * String for application that wrote this file. This should be in the format version - * (build ). e.g. impala version 1.0 (build SHA-1_hash_code) - */ - private String createdBy; - - /** - * User specified props - */ - private Map props; - - public TsFileMetaData() { - } - - /** - * @param timeSeriesList - time series info list - * @param currentVersion - current version - */ - public TsFileMetaData(Map deltaObjectMap, List timeSeriesList, int currentVersion) { - this.props = new HashMap<>(); - this.deltaObjectMap = deltaObjectMap; - this.timeSeriesList = timeSeriesList; - this.currentVersion = currentVersion; - } - - /** - * add time series metadata to list. THREAD NOT SAFE - * @param timeSeries series metadata to add - */ - public void addTimeSeriesMetaData(TimeSeriesMetadata timeSeries) { - if (timeSeriesList == null) { - timeSeriesList = new ArrayList<>(); - } - timeSeriesList.add(timeSeries); - } - -// /** -// * get all delta object uid and their types -// * -// * @return set of {@code Pair} -// */ -// public Set> getAllDeltaObjects() { -// // Pair -// Set> deltaObjectSet = new HashSet>(); -// if (rowGroupMetadataList != null) { -// for (RowGroupMetaData rowGroup : rowGroupMetadataList) { -// deltaObjectSet.add( -// new Pair(rowGroup.getDeltaObjectUID(), rowGroup.getDeltaObjectType())); -// } -// } -// return deltaObjectSet; -// } - - @Override - public String toString() { - return String.format("TSFMetaData { DeltaOjectMap: %s, timeSeries list %s, current version %d }", deltaObjectMap, - timeSeriesList, currentVersion); - } - - /** - * create file metadata in thrift format. For more information about file metadata - * in cn.edu.thu.tsfile.format package, see FileMetaData in tsfile-format - * - * @return file metadata in thrift format - */ - @Override - public FileMetaData convertToThrift() { - try { - List timeSeriesListInThrift = null; - if (timeSeriesList != null) { - timeSeriesListInThrift = new ArrayList(); - for (TimeSeriesMetadata timeSeries : timeSeriesList) { - timeSeriesListInThrift.add(timeSeries.convertToThrift()); - } - } - - Map deltaObjectMapInThrift = null; - if( deltaObjectMap != null){ - deltaObjectMapInThrift = new HashMap<>(); - for(Map.Entry entry : deltaObjectMap.entrySet()){ - TsDeltaObject object = entry.getValue(); - deltaObjectMapInThrift.put(entry.getKey(), new DeltaObject(object.offset, - object.metadataBlockSize, object.startTime, object.endTime)); - } - } - - FileMetaData metaDataInThrift = new FileMetaData(currentVersion, deltaObjectMapInThrift, timeSeriesListInThrift); - metaDataInThrift.setCreated_by(createdBy); - metaDataInThrift.setJson_metadata(jsonMetaData); - metaDataInThrift.setProperties(props); - return metaDataInThrift; - } catch (Exception e) { - LOGGER.error("TsFileMetaData: failed to convert file metadata from TSFile to thrift, content is {}", this, e); - throw e; - } - } - - /** - * receive file metadata in thrift format and convert it to tsfile format - * @param metadataInThrift - file metadata in thrift format - */ - @Override - public void convertToTSF(FileMetaData metadataInThrift) { - try { - if (metadataInThrift.getTimeseries_list() == null) { - timeSeriesList = null; - } else { - timeSeriesList = new ArrayList(); - - for (TimeSeries timeSeriesInThrift : metadataInThrift.getTimeseries_list()) { - TimeSeriesMetadata timeSeriesInTSFile = new TimeSeriesMetadata(); - timeSeriesInTSFile.convertToTSF(timeSeriesInThrift); - timeSeriesList.add(timeSeriesInTSFile); - } - } - - if(metadataInThrift.getDelta_object_map() == null){ - deltaObjectMap = null; - } else { - deltaObjectMap = new HashMap<>(); - for (Map.Entry entry : metadataInThrift.getDelta_object_map().entrySet()){ - DeltaObject object = entry.getValue(); - deltaObjectMap.put(entry.getKey(), new TsDeltaObject(object.getOffset(), - object.getMetadata_block_size(), object.getStart_time(), object.getEnd_time())); - } - } - - currentVersion = metadataInThrift.getVersion(); - createdBy = metadataInThrift.getCreated_by(); - jsonMetaData = metadataInThrift.getJson_metadata(); - props = metadataInThrift.getProperties(); - } catch (Exception e) { - LOGGER.error("TsFileMetaData: failed to convert file metadata from thrift to TSFile, content is {}",metadataInThrift, e); - throw e; - } - - } - - public List getTimeSeriesList() { - return timeSeriesList; - } - - public void setTimeSeriesList(List timeSeriesList) { - this.timeSeriesList = timeSeriesList; - } - - public int getCurrentVersion() { - return currentVersion; - } - - public void setCurrentVersion(int currentVersion) { - this.currentVersion = currentVersion; - } - - public List getJsonMetaData() { - return jsonMetaData; - } - - public void setJsonMetaData(List jsonMetaData) { - this.jsonMetaData = jsonMetaData; - } - - public String getCreatedBy() { - return createdBy; - } - - public void setCreatedBy(String createdBy) { - this.createdBy = createdBy; - } - - public void addProp(String key, String value) { - props.put(key, value); - } - - public Map getProps() { - return props; - } - - public void setProps(Map properties) { - this.props.clear(); - this.props.putAll(properties); - } - - public String getProp(String key) { - if (props.containsKey(key)) - return props.get(key); - else - return null; - } - - public Map getDeltaObjectMap() { - return deltaObjectMap; - } - - public void setDeltaObjectMap(Map deltaObjectMap) { - this.deltaObjectMap = deltaObjectMap; - } - - public boolean containsDeltaObject(String DeltaObjUID) { - return this.deltaObjectMap.containsKey(DeltaObjUID); - } - - public TsDeltaObject getDeltaObject(String DeltaObjUID) { - return this.deltaObjectMap.get(DeltaObjUID); - } - - //For Tsfile-Spark-Connector - public boolean containsMeasurement(String measurement) { - for(TimeSeriesMetadata ts: timeSeriesList ){ - if(ts.getMeasurementUID().equals(measurement)) { - return true; - } - } - return false; - } - - //For Tsfile-Spark-Connector - public TSDataType getType(String measurement) throws IOException{ - for(TimeSeriesMetadata ts: timeSeriesList ){ - if(ts.getMeasurementUID().equals(measurement)) { - return ts.getType(); - } - } - throw new IOException("Measurement " + measurement + " does not exist in the current file."); - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.format.DeltaObject; +import cn.edu.tsinghua.tsfile.format.FileMetaData; +import cn.edu.tsinghua.tsfile.format.TimeSeries; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.*; + +/** + * TSFileMetaData collects all metadata info and saves in its data structure + */ +public class TsFileMetaData implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(TsFileMetaData.class); + + private Map deltaObjectMap; + + /** + * TSFile schema for this file. This schema contains metadata for all the time series. The schema + * is represented as a list. + */ + private List timeSeriesList; + + /** + * Version of this file + */ + private int currentVersion; + + /** + * Optional json metadata + */ + private List jsonMetaData; + + /** + * String for application that wrote this file. This should be in the format version + * (build ). e.g. impala version 1.0 (build SHA-1_hash_code) + */ + private String createdBy; + + /** + * User specified props + */ + private Map props; + + public TsFileMetaData() {} + + /** + * @param timeSeriesList - time series info list + * @param currentVersion - current version + */ + public TsFileMetaData(Map deltaObjectMap, + List timeSeriesList, int currentVersion) { + this.props = new HashMap<>(); + this.deltaObjectMap = deltaObjectMap; + this.timeSeriesList = timeSeriesList; + this.currentVersion = currentVersion; + } + + /** + * add time series metadata to list. THREAD NOT SAFE + * + * @param timeSeries series metadata to add + */ + public void addTimeSeriesMetaData(TimeSeriesMetadata timeSeries) { + if (timeSeriesList == null) { + timeSeriesList = new ArrayList<>(); + } + timeSeriesList.add(timeSeries); + } + + // /** + // * get all delta object uid and their types + // * + // * @return set of {@code Pair} + // */ + // public Set> getAllDeltaObjects() { + // // Pair + // Set> deltaObjectSet = new HashSet>(); + // if (rowGroupMetadataList != null) { + // for (RowGroupMetaData rowGroup : rowGroupMetadataList) { + // deltaObjectSet.add( + // new Pair(rowGroup.getDeltaObjectUID(), rowGroup.getDeltaObjectType())); + // } + // } + // return deltaObjectSet; + // } + + @Override + public String toString() { + return String.format( + "TSFMetaData { DeltaOjectMap: %s, timeSeries list %s, current version %d }", deltaObjectMap, + timeSeriesList, currentVersion); + } + + /** + * create file metadata in thrift format. For more information about file metadata in + * cn.edu.thu.tsfile.format package, see FileMetaData in tsfile-format + * + * @return file metadata in thrift format + */ + @Override + public FileMetaData convertToThrift() { + try { + List timeSeriesListInThrift = null; + if (timeSeriesList != null) { + timeSeriesListInThrift = new ArrayList(); + for (TimeSeriesMetadata timeSeries : timeSeriesList) { + timeSeriesListInThrift.add(timeSeries.convertToThrift()); + } + } + + Map deltaObjectMapInThrift = null; + if (deltaObjectMap != null) { + deltaObjectMapInThrift = new HashMap<>(); + for (Map.Entry entry : deltaObjectMap.entrySet()) { + TsDeltaObject object = entry.getValue(); + deltaObjectMapInThrift.put(entry.getKey(), new DeltaObject(object.offset, + object.metadataBlockSize, object.startTime, object.endTime)); + } + } + + FileMetaData metaDataInThrift = + new FileMetaData(currentVersion, deltaObjectMapInThrift, timeSeriesListInThrift); + metaDataInThrift.setCreated_by(createdBy); + metaDataInThrift.setJson_metadata(jsonMetaData); + metaDataInThrift.setProperties(props); + return metaDataInThrift; + } catch (Exception e) { + LOGGER.error( + "TsFileMetaData: failed to convert file metadata from TSFile to thrift, content is {}", + this, e); + throw e; + } + } + + /** + * receive file metadata in thrift format and convert it to tsfile format + * + * @param metadataInThrift - file metadata in thrift format + */ + @Override + public void convertToTSF(FileMetaData metadataInThrift) { + try { + if (metadataInThrift.getTimeseries_list() == null) { + timeSeriesList = null; + } else { + timeSeriesList = new ArrayList(); + + for (TimeSeries timeSeriesInThrift : metadataInThrift.getTimeseries_list()) { + TimeSeriesMetadata timeSeriesInTSFile = new TimeSeriesMetadata(); + timeSeriesInTSFile.convertToTSF(timeSeriesInThrift); + timeSeriesList.add(timeSeriesInTSFile); + } + } + + if (metadataInThrift.getDelta_object_map() == null) { + deltaObjectMap = null; + } else { + deltaObjectMap = new HashMap<>(); + for (Map.Entry entry : metadataInThrift.getDelta_object_map() + .entrySet()) { + DeltaObject object = entry.getValue(); + deltaObjectMap.put(entry.getKey(), new TsDeltaObject(object.getOffset(), + object.getMetadata_block_size(), object.getStart_time(), object.getEnd_time())); + } + } + + currentVersion = metadataInThrift.getVersion(); + createdBy = metadataInThrift.getCreated_by(); + jsonMetaData = metadataInThrift.getJson_metadata(); + props = metadataInThrift.getProperties(); + } catch (Exception e) { + LOGGER.error( + "TsFileMetaData: failed to convert file metadata from thrift to TSFile, content is {}", + metadataInThrift, e); + throw e; + } + + } + + public List getTimeSeriesList() { + return timeSeriesList; + } + + public void setTimeSeriesList(List timeSeriesList) { + this.timeSeriesList = timeSeriesList; + } + + public int getCurrentVersion() { + return currentVersion; + } + + public void setCurrentVersion(int currentVersion) { + this.currentVersion = currentVersion; + } + + public List getJsonMetaData() { + return jsonMetaData; + } + + public void setJsonMetaData(List jsonMetaData) { + this.jsonMetaData = jsonMetaData; + } + + public String getCreatedBy() { + return createdBy; + } + + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + + public void addProp(String key, String value) { + props.put(key, value); + } + + public Map getProps() { + return props; + } + + public void setProps(Map properties) { + this.props.clear(); + this.props.putAll(properties); + } + + public String getProp(String key) { + if (props.containsKey(key)) + return props.get(key); + else + return null; + } + + public Map getDeltaObjectMap() { + return deltaObjectMap; + } + + public void setDeltaObjectMap(Map deltaObjectMap) { + this.deltaObjectMap = deltaObjectMap; + } + + public boolean containsDeltaObject(String DeltaObjUID) { + return this.deltaObjectMap.containsKey(DeltaObjUID); + } + + public TsDeltaObject getDeltaObject(String DeltaObjUID) { + return this.deltaObjectMap.get(DeltaObjUID); + } + + // For Tsfile-Spark-Connector + public boolean containsMeasurement(String measurement) { + for (TimeSeriesMetadata ts : timeSeriesList) { + if (ts.getMeasurementUID().equals(measurement)) { + return true; + } + } + return false; + } + + // For Tsfile-Spark-Connector + public TSDataType getType(String measurement) throws IOException { + for (TimeSeriesMetadata ts : timeSeriesList) { + if (ts.getMeasurementUID().equals(measurement)) { + return ts.getType(); + } + } + throw new IOException("Measurement " + measurement + " does not exist in the current file."); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsRowGroupBlockMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsRowGroupBlockMetaData.java index 4e4798a9..43588a8e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsRowGroupBlockMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/TsRowGroupBlockMetaData.java @@ -2,84 +2,85 @@ import java.util.ArrayList; import java.util.List; - import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; -public class TsRowGroupBlockMetaData implements IConverter{ - /** - * Row groups in this file - */ - private List rowGroupMetadataList; - - private String deltaObjectID; - - public TsRowGroupBlockMetaData(){ - rowGroupMetadataList = new ArrayList<>(); - } - - public TsRowGroupBlockMetaData(List rowGroupMetadataList){ - this.rowGroupMetadataList = rowGroupMetadataList; - } - - /** - * add row group metadata to rowGroups. THREAD NOT SAFE - * @param rowGroup - row group metadata to add - */ - public void addRowGroupMetaData(RowGroupMetaData rowGroup) { - if (rowGroupMetadataList == null) { - rowGroupMetadataList = new ArrayList(); - } - rowGroupMetadataList.add(rowGroup); - } - - public List getRowGroups() { - return rowGroupMetadataList; - } +public class TsRowGroupBlockMetaData implements IConverter { + /** + * Row groups in this file + */ + private List rowGroupMetadataList; + + private String deltaObjectID; - public void setRowGroups(List rowGroupMetadataList) { - this.rowGroupMetadataList = rowGroupMetadataList; + public TsRowGroupBlockMetaData() { + rowGroupMetadataList = new ArrayList<>(); + } + + public TsRowGroupBlockMetaData(List rowGroupMetadataList) { + this.rowGroupMetadataList = rowGroupMetadataList; + } + + /** + * add row group metadata to rowGroups. THREAD NOT SAFE + * + * @param rowGroup - row group metadata to add + */ + public void addRowGroupMetaData(RowGroupMetaData rowGroup) { + if (rowGroupMetadataList == null) { + rowGroupMetadataList = new ArrayList(); } + rowGroupMetadataList.add(rowGroup); + } + + public List getRowGroups() { + return rowGroupMetadataList; + } - @Override - public RowGroupBlockMetaData convertToThrift() { -// long numOfRows = 0; - List rowGroupMetaDataListInThrift = null; - if (rowGroupMetadataList != null) { - rowGroupMetaDataListInThrift = - new ArrayList(); - for (RowGroupMetaData rowGroupMetaData : rowGroupMetadataList) { -// numOfRows += rowGroupMetaData.getNumOfRows(); - rowGroupMetaDataListInThrift.add(rowGroupMetaData.convertToThrift()); - } - } - RowGroupBlockMetaData rowGroupBlockMetaData= new RowGroupBlockMetaData(rowGroupMetaDataListInThrift); - rowGroupBlockMetaData.setDelta_object_id(deltaObjectID); - return rowGroupBlockMetaData; - } + public void setRowGroups(List rowGroupMetadataList) { + this.rowGroupMetadataList = rowGroupMetadataList; + } - @Override - public void convertToTSF(RowGroupBlockMetaData metadataInThrift) { - List rowGroupMetaDataListInThrift = - metadataInThrift.getRow_groups_metadata(); - if (rowGroupMetaDataListInThrift == null) { - rowGroupMetadataList = null; - } else { - rowGroupMetadataList = new ArrayList(); - for (cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaDataInThrift : rowGroupMetaDataListInThrift) { - RowGroupMetaData rowGroupMetaDataInTSFile = new RowGroupMetaData(); - rowGroupMetaDataInTSFile.convertToTSF(rowGroupMetaDataInThrift); - rowGroupMetadataList.add(rowGroupMetaDataInTSFile); - } - } - this.deltaObjectID = metadataInThrift.getDelta_object_id(); - } + @Override + public RowGroupBlockMetaData convertToThrift() { + // long numOfRows = 0; + List rowGroupMetaDataListInThrift = null; + if (rowGroupMetadataList != null) { + rowGroupMetaDataListInThrift = + new ArrayList(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetadataList) { + // numOfRows += rowGroupMetaData.getNumOfRows(); + rowGroupMetaDataListInThrift.add(rowGroupMetaData.convertToThrift()); + } + } + RowGroupBlockMetaData rowGroupBlockMetaData = + new RowGroupBlockMetaData(rowGroupMetaDataListInThrift); + rowGroupBlockMetaData.setDelta_object_id(deltaObjectID); + return rowGroupBlockMetaData; + } + + @Override + public void convertToTSF(RowGroupBlockMetaData metadataInThrift) { + List rowGroupMetaDataListInThrift = + metadataInThrift.getRow_groups_metadata(); + if (rowGroupMetaDataListInThrift == null) { + rowGroupMetadataList = null; + } else { + rowGroupMetadataList = new ArrayList(); + for (cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaDataInThrift : rowGroupMetaDataListInThrift) { + RowGroupMetaData rowGroupMetaDataInTSFile = new RowGroupMetaData(); + rowGroupMetaDataInTSFile.convertToTSF(rowGroupMetaDataInThrift); + rowGroupMetadataList.add(rowGroupMetaDataInTSFile); + } + } + this.deltaObjectID = metadataInThrift.getDelta_object_id(); + } - public String getDeltaObjectID() { - return deltaObjectID; - } + public String getDeltaObjectID() { + return deltaObjectID; + } - public void setDeltaObjectID(String deltaObjectID) { - this.deltaObjectID = deltaObjectID; - } + public void setDeltaObjectID(String deltaObjectID) { + this.deltaObjectID = deltaObjectID; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaData.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaData.java index 3d3ce88c..37c0611a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaData.java @@ -1,113 +1,113 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.format.DataType; -import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * For more information, see ValueInTimeSeriesChunkMetaData - * in cn.edu.thu.tsfile.format package - */ -public class VInTimeSeriesChunkMetaData implements IConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(VInTimeSeriesChunkMetaData.class); - - private TSDataType dataType; - - private TsDigest digest; - private int maxError; - - /** - * If values for data consist of enum values, metadata will store all possible values in time - * series - */ - private List enumValues; - - public VInTimeSeriesChunkMetaData() { - } - - public VInTimeSeriesChunkMetaData(TSDataType dataType) { - this.dataType = dataType; - } - - @Override - public ValueInTimeSeriesChunkMetaData convertToThrift() { - try { - ValueInTimeSeriesChunkMetaData vTimeSeriesChunkMetaDataInThrift = new ValueInTimeSeriesChunkMetaData( - dataType == null ? null : DataType.valueOf(dataType.toString())); - vTimeSeriesChunkMetaDataInThrift.setMax_error(maxError); - vTimeSeriesChunkMetaDataInThrift.setEnum_values(enumValues); - vTimeSeriesChunkMetaDataInThrift.setDigest(digest == null ? null : digest.convertToThrift()); - return vTimeSeriesChunkMetaDataInThrift; - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file VInTimeSeriesChunkMetaData: failed to convert ValueInTimeSeriesChunkMetaData from TSFile to thrift, content is {}", - this, e); - throw e; - } - } - - @Override - public void convertToTSF(ValueInTimeSeriesChunkMetaData vTimeSeriesChunkMetaDataInThrift) { - try { - this.dataType = vTimeSeriesChunkMetaDataInThrift.getData_type() == null ? null : TSDataType.valueOf(vTimeSeriesChunkMetaDataInThrift.getData_type().toString()); - this.maxError = vTimeSeriesChunkMetaDataInThrift.getMax_error(); - this.enumValues = vTimeSeriesChunkMetaDataInThrift.getEnum_values(); - if (vTimeSeriesChunkMetaDataInThrift.getDigest() == null) { - this.digest = null; - } else { - this.digest = new TsDigest(); - this.digest.convertToTSF(vTimeSeriesChunkMetaDataInThrift.getDigest()); - } - } catch (Exception e) { - if (LOGGER.isErrorEnabled()) - LOGGER.error( - "tsfile-file VInTimeSeriesChunkMetaData: failed to convert ValueInTimeSeriesChunkMetaData from thrift to TSFile, content is {}", - vTimeSeriesChunkMetaDataInThrift, e); - throw e; - } - } - - @Override - public String toString() { - return String.format("VInTimeSeriesChunkMetaData{ TSDataType %s, TSDigest %s, maxError %d, enumValues %s }", dataType, digest, - maxError, enumValues); - } - - public TSDataType getDataType() { - return dataType; - } - - public void setDataType(TSDataType dataType) { - this.dataType = dataType; - } - - public TsDigest getDigest() { - return digest; - } - - public void setDigest(TsDigest digest) { - this.digest = digest; - } - - public int getMaxError() { - return maxError; - } - - public void setMaxError(int maxError) { - this.maxError = maxError; - } - - public List getEnumValues() { - return enumValues; - } - - public void setEnumValues(List enumValues) { - this.enumValues = enumValues; - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import cn.edu.tsinghua.tsfile.file.metadata.converter.IConverter; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.format.DataType; +import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.List; + +/** + * For more information, see ValueInTimeSeriesChunkMetaData in cn.edu.thu.tsfile.format package + */ +public class VInTimeSeriesChunkMetaData implements IConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(VInTimeSeriesChunkMetaData.class); + + private TSDataType dataType; + + private TsDigest digest; + private int maxError; + + /** + * If values for data consist of enum values, metadata will store all possible values in time + * series + */ + private List enumValues; + + public VInTimeSeriesChunkMetaData() {} + + public VInTimeSeriesChunkMetaData(TSDataType dataType) { + this.dataType = dataType; + } + + @Override + public ValueInTimeSeriesChunkMetaData convertToThrift() { + try { + ValueInTimeSeriesChunkMetaData vTimeSeriesChunkMetaDataInThrift = + new ValueInTimeSeriesChunkMetaData( + dataType == null ? null : DataType.valueOf(dataType.toString())); + vTimeSeriesChunkMetaDataInThrift.setMax_error(maxError); + vTimeSeriesChunkMetaDataInThrift.setEnum_values(enumValues); + vTimeSeriesChunkMetaDataInThrift.setDigest(digest == null ? null : digest.convertToThrift()); + return vTimeSeriesChunkMetaDataInThrift; + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file VInTimeSeriesChunkMetaData: failed to convert ValueInTimeSeriesChunkMetaData from TSFile to thrift, content is {}", + this, e); + throw e; + } + } + + @Override + public void convertToTSF(ValueInTimeSeriesChunkMetaData vTimeSeriesChunkMetaDataInThrift) { + try { + this.dataType = vTimeSeriesChunkMetaDataInThrift.getData_type() == null ? null + : TSDataType.valueOf(vTimeSeriesChunkMetaDataInThrift.getData_type().toString()); + this.maxError = vTimeSeriesChunkMetaDataInThrift.getMax_error(); + this.enumValues = vTimeSeriesChunkMetaDataInThrift.getEnum_values(); + if (vTimeSeriesChunkMetaDataInThrift.getDigest() == null) { + this.digest = null; + } else { + this.digest = new TsDigest(); + this.digest.convertToTSF(vTimeSeriesChunkMetaDataInThrift.getDigest()); + } + } catch (Exception e) { + if (LOGGER.isErrorEnabled()) + LOGGER.error( + "tsfile-file VInTimeSeriesChunkMetaData: failed to convert ValueInTimeSeriesChunkMetaData from thrift to TSFile, content is {}", + vTimeSeriesChunkMetaDataInThrift, e); + throw e; + } + } + + @Override + public String toString() { + return String.format( + "VInTimeSeriesChunkMetaData{ TSDataType %s, TSDigest %s, maxError %d, enumValues %s }", + dataType, digest, maxError, enumValues); + } + + public TSDataType getDataType() { + return dataType; + } + + public void setDataType(TSDataType dataType) { + this.dataType = dataType; + } + + public TsDigest getDigest() { + return digest; + } + + public void setDigest(TsDigest digest) { + this.digest = digest; + } + + public int getMaxError() { + return maxError; + } + + public void setMaxError(int maxError) { + this.maxError = maxError; + } + + public List getEnumValues() { + return enumValues; + } + + public void setEnumValues(List enumValues) { + this.enumValues = enumValues; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/IConverter.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/IConverter.java index d03324c6..ffcec759 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/IConverter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/IConverter.java @@ -1,22 +1,22 @@ -package cn.edu.tsinghua.tsfile.file.metadata.converter; - -/** - * convert metadata between TSFile format and thrift format - * - * @param TsFile-defined type - */ -public interface IConverter { - /** - * convert TSFile format metadata to thrift format - * - * @return metadata in thrift format - */ - T convertToThrift(); - - /** - * convert thrift format metadata to TSFile format - * - * @param metadata metadata in thrift format - */ - void convertToTSF(T metadata); -} +package cn.edu.tsinghua.tsfile.file.metadata.converter; + +/** + * convert metadata between TSFile format and thrift format + * + * @param TsFile-defined type + */ +public interface IConverter { + /** + * convert TSFile format metadata to thrift format + * + * @return metadata in thrift format + */ + T convertToThrift(); + + /** + * convert thrift format metadata to TSFile format + * + * @param metadata metadata in thrift format + */ + void convertToTSF(T metadata); +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/TsFileMetaDataConverter.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/TsFileMetaDataConverter.java index 4e114b48..d3b66d7d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/TsFileMetaDataConverter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/converter/TsFileMetaDataConverter.java @@ -1,55 +1,58 @@ -package cn.edu.tsinghua.tsfile.file.metadata.converter; - -import cn.edu.tsinghua.tsfile.file.metadata.TsFileMetaData; -import cn.edu.tsinghua.tsfile.format.FileMetaData; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * converter for file metadata - */ -public class TsFileMetaDataConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(TsFileMetaDataConverter.class); - - /** - * convert tsfile format file matadata to thrift format file matadata - * - * @param fileMetadataInTsFile file metadata in tsfile format - * @return file metadata in thrift format - */ - public FileMetaData toThriftFileMetadata(TsFileMetaData fileMetadataInTsFile) { - try { - return fileMetadataInTsFile.convertToThrift(); - } catch (Exception e) { - LOGGER.error("TsFileMetaDataConverter: failed to convert metadata from TsFile to thrift, content is {}", - fileMetadataInTsFile, e); - } - return null; - } - - /** - * convert thrift format file matadata to tsfile format file matadata - * - * @param fileMetaDataInThrift file metadata in thrift format - * @return file metadata in tsfile format - */ - public TsFileMetaData toTsFileMetadata(FileMetaData fileMetaDataInThrift) { - TsFileMetaData fileMetaDataInTSFile = new TsFileMetaData(); - try { - fileMetaDataInTSFile.convertToTSF(fileMetaDataInThrift); - } catch (Exception e) { - LOGGER.error("TsFileMetaDataConverter: failed to convert metadata from thrift to TSFile, content is {}", - fileMetaDataInThrift, e); - } - return fileMetaDataInTSFile; - } - - public int calculatePageHeaderSize(int digestSize) { - //PageHeader: PageType--4, uncompressedSize--4,compressedSize--4 - //DatapageHeader: numValues--4, numNulls--4, numRows--4, Encoding--4, isCompressed--1, maxTimestamp--8, minTimestamp--8 - //Digest: max ByteBuffer, min ByteBuffer - // * 2 to caculate max object size in memory - - return 2 * (45 + digestSize); - } -} +package cn.edu.tsinghua.tsfile.file.metadata.converter; + +import cn.edu.tsinghua.tsfile.file.metadata.TsFileMetaData; +import cn.edu.tsinghua.tsfile.format.FileMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * converter for file metadata + */ +public class TsFileMetaDataConverter { + private static final Logger LOGGER = LoggerFactory.getLogger(TsFileMetaDataConverter.class); + + /** + * convert tsfile format file matadata to thrift format file matadata + * + * @param fileMetadataInTsFile file metadata in tsfile format + * @return file metadata in thrift format + */ + public FileMetaData toThriftFileMetadata(TsFileMetaData fileMetadataInTsFile) { + try { + return fileMetadataInTsFile.convertToThrift(); + } catch (Exception e) { + LOGGER.error( + "TsFileMetaDataConverter: failed to convert metadata from TsFile to thrift, content is {}", + fileMetadataInTsFile, e); + } + return null; + } + + /** + * convert thrift format file matadata to tsfile format file matadata + * + * @param fileMetaDataInThrift file metadata in thrift format + * @return file metadata in tsfile format + */ + public TsFileMetaData toTsFileMetadata(FileMetaData fileMetaDataInThrift) { + TsFileMetaData fileMetaDataInTSFile = new TsFileMetaData(); + try { + fileMetaDataInTSFile.convertToTSF(fileMetaDataInThrift); + } catch (Exception e) { + LOGGER.error( + "TsFileMetaDataConverter: failed to convert metadata from thrift to TSFile, content is {}", + fileMetaDataInThrift, e); + } + return fileMetaDataInTSFile; + } + + public int calculatePageHeaderSize(int digestSize) { + // PageHeader: PageType--4, uncompressedSize--4,compressedSize--4 + // DatapageHeader: numValues--4, numNulls--4, numRows--4, Encoding--4, isCompressed--1, + // maxTimestamp--8, minTimestamp--8 + // Digest: max ByteBuffer, min ByteBuffer + // * 2 to caculate max object size in memory + + return 2 * (45 + digestSize); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/CompressionTypeName.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/CompressionTypeName.java index 4caee864..2ac7cf21 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/CompressionTypeName.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/CompressionTypeName.java @@ -1,53 +1,50 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -import cn.edu.tsinghua.tsfile.common.exception.CompressionTypeNotSupportedException; -import cn.edu.tsinghua.tsfile.format.CompressionType; - -public enum CompressionTypeName { - UNCOMPRESSED(CompressionType.UNCOMPRESSED, ""), - SNAPPY(CompressionType.SNAPPY, ".snappy"), - GZIP(CompressionType.GZIP, ".gz"), - LZO(CompressionType.LZO, ".lzo"), - SDT(CompressionType.SDT, ".sdt"), - PAA(CompressionType.PAA, ".paa"), - PLA(CompressionType.PLA, ".pla"); - - private final CompressionType tsfileCompressionType; - private final String extension; - private CompressionTypeName(CompressionType tsfileCompressionType, String extension) { - this.tsfileCompressionType = tsfileCompressionType; - this.extension = extension; - } - - public static CompressionTypeName fromConf(String name) { - if (name == null) { - return UNCOMPRESSED; - } - switch (name.trim().toUpperCase()) { - case "UNCOMPRESSED": - return UNCOMPRESSED; - case "SNAPPY": - return SNAPPY; - case "GZIP": - return GZIP; - case "LZO": - return LZO; - case "SDT": - return SDT; - case "PAA": - return PAA; - case "PLA": - return PLA; - default: - throw new CompressionTypeNotSupportedException(name); - } - } - - public CompressionType getTsfileCompressionCodec() { - return tsfileCompressionType; - } - - public String getExtension() { - return extension; - } -} \ No newline at end of file +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +import cn.edu.tsinghua.tsfile.common.exception.CompressionTypeNotSupportedException; +import cn.edu.tsinghua.tsfile.format.CompressionType; + +public enum CompressionTypeName { + UNCOMPRESSED(CompressionType.UNCOMPRESSED, ""), SNAPPY(CompressionType.SNAPPY, ".snappy"), GZIP( + CompressionType.GZIP, ".gz"), LZO(CompressionType.LZO, ".lzo"), SDT(CompressionType.SDT, + ".sdt"), PAA(CompressionType.PAA, ".paa"), PLA(CompressionType.PLA, ".pla"); + + private final CompressionType tsfileCompressionType; + private final String extension; + + private CompressionTypeName(CompressionType tsfileCompressionType, String extension) { + this.tsfileCompressionType = tsfileCompressionType; + this.extension = extension; + } + + public static CompressionTypeName fromConf(String name) { + if (name == null) { + return UNCOMPRESSED; + } + switch (name.trim().toUpperCase()) { + case "UNCOMPRESSED": + return UNCOMPRESSED; + case "SNAPPY": + return SNAPPY; + case "GZIP": + return GZIP; + case "LZO": + return LZO; + case "SDT": + return SDT; + case "PAA": + return PAA; + case "PLA": + return PLA; + default: + throw new CompressionTypeNotSupportedException(name); + } + } + + public CompressionType getTsfileCompressionCodec() { + return tsfileCompressionType; + } + + public String getExtension() { + return extension; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSChunkType.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSChunkType.java index 30147cdd..9f9188ca 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSChunkType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSChunkType.java @@ -1,5 +1,5 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -public enum TSChunkType { - TIME, VALUE -} +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +public enum TSChunkType { + TIME, VALUE +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSDataType.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSDataType.java index b10caec1..3b8c0c43 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSDataType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSDataType.java @@ -1,5 +1,5 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -public enum TSDataType { - BOOLEAN, INT32, INT64, INT96, FLOAT, DOUBLE, TEXT, FIXED_LEN_BYTE_ARRAY, ENUMS, BIGDECIMAL -} +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +public enum TSDataType { + BOOLEAN, INT32, INT64, INT96, FLOAT, DOUBLE, TEXT, FIXED_LEN_BYTE_ARRAY, ENUMS, BIGDECIMAL +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSEncoding.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSEncoding.java index 6c6f3d07..222cc814 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSEncoding.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSEncoding.java @@ -1,5 +1,5 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -public enum TSEncoding { - PLAIN, PLAIN_DICTIONARY, RLE, DIFF, TS_2DIFF, BITMAP, GORILLA -} +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +public enum TSEncoding { + PLAIN, PLAIN_DICTIONARY, RLE, DIFF, TS_2DIFF, BITMAP, GORILLA +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSFreqType.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSFreqType.java index e8a469a8..61e882d5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSFreqType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSFreqType.java @@ -1,5 +1,5 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -public enum TSFreqType { - SINGLE_FREQ, MULTI_FREQ, IRREGULAR_FREQ -} +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +public enum TSFreqType { + SINGLE_FREQ, MULTI_FREQ, IRREGULAR_FREQ +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSPageType.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSPageType.java index 4c3b77ba..84cc1df4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSPageType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/enums/TSPageType.java @@ -1,5 +1,5 @@ -package cn.edu.tsinghua.tsfile.file.metadata.enums; - -public enum TSPageType { - DATA, DICTIONARY, INDEX -} +package cn.edu.tsinghua.tsfile.file.metadata.enums; + +public enum TSPageType { + DATA, DICTIONARY, INDEX +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatistics.java index 758128a6..3b86c4e4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatistics.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; - import java.math.BigDecimal; /** @@ -10,114 +9,116 @@ * @author kangrong */ public class BigDecimalStatistics extends Statistics { - private BigDecimal max; - private BigDecimal min; - private BigDecimal first; - private double sum; - private BigDecimal last; - - @Override - public void updateStats(BigDecimal value) { - if (this.isEmpty) { - initializeStats(value, value, value, value.doubleValue(), value); - isEmpty = false; - } else { - updateStats(value, value, value, value.doubleValue(), value); - } - } - - private void updateStats(BigDecimal minValue, BigDecimal maxValue, BigDecimal firstValue, double sumValue, - BigDecimal lastValue) { - if (minValue.doubleValue() < min.doubleValue()) { - min = minValue; - } - if (maxValue.doubleValue() > max.doubleValue()) { - max = maxValue; - } - sum += sumValue; - this.last = lastValue; - } - - @Override - public BigDecimal getMax() { - return max; - } - - @Override - public BigDecimal getMin() { - return min; - } - - @Override - public BigDecimal getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public BigDecimal getLast() { - return last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - BigDecimalStatistics bigDecimalStats = (BigDecimalStatistics) stats; - if (this.isEmpty) { - initializeStats(bigDecimalStats.getMin(), bigDecimalStats.getMax(), bigDecimalStats.getFirst(), - bigDecimalStats.getSum(), bigDecimalStats.getLast()); - isEmpty = false; - } else { - updateStats(bigDecimalStats.getMin(), bigDecimalStats.getMax(), bigDecimalStats.getFirst(), - bigDecimalStats.getSum(), bigDecimalStats.getLast()); - } - - } - - public void initializeStats(BigDecimal min, BigDecimal max, BigDecimal first, double sum, BigDecimal last) { - this.min = min; - this.max = max; - this.first = first; - this.sum = sum; - this.last = last; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.doubleToBytes(max.doubleValue()); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.doubleToBytes(min.doubleValue()); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.doubleToBytes(first.doubleValue()); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes(){ - return BytesUtils.doubleToBytes(last.doubleValue()); - } - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = new BigDecimal(BytesUtils.bytesToDouble(maxBytes)); - min = new BigDecimal(BytesUtils.bytesToDouble(minBytes)); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private BigDecimal max; + private BigDecimal min; + private BigDecimal first; + private double sum; + private BigDecimal last; + + @Override + public void updateStats(BigDecimal value) { + if (this.isEmpty) { + initializeStats(value, value, value, value.doubleValue(), value); + isEmpty = false; + } else { + updateStats(value, value, value, value.doubleValue(), value); + } + } + + private void updateStats(BigDecimal minValue, BigDecimal maxValue, BigDecimal firstValue, + double sumValue, BigDecimal lastValue) { + if (minValue.doubleValue() < min.doubleValue()) { + min = minValue; + } + if (maxValue.doubleValue() > max.doubleValue()) { + max = maxValue; + } + sum += sumValue; + this.last = lastValue; + } + + @Override + public BigDecimal getMax() { + return max; + } + + @Override + public BigDecimal getMin() { + return min; + } + + @Override + public BigDecimal getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public BigDecimal getLast() { + return last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + BigDecimalStatistics bigDecimalStats = (BigDecimalStatistics) stats; + if (this.isEmpty) { + initializeStats(bigDecimalStats.getMin(), bigDecimalStats.getMax(), + bigDecimalStats.getFirst(), bigDecimalStats.getSum(), bigDecimalStats.getLast()); + isEmpty = false; + } else { + updateStats(bigDecimalStats.getMin(), bigDecimalStats.getMax(), bigDecimalStats.getFirst(), + bigDecimalStats.getSum(), bigDecimalStats.getLast()); + } + + } + + public void initializeStats(BigDecimal min, BigDecimal max, BigDecimal first, double sum, + BigDecimal last) { + this.min = min; + this.max = max; + this.first = first; + this.sum = sum; + this.last = last; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.doubleToBytes(max.doubleValue()); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.doubleToBytes(min.doubleValue()); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.doubleToBytes(first.doubleValue()); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.doubleToBytes(last.doubleValue()); + } + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = new BigDecimal(BytesUtils.bytesToDouble(maxBytes)); + min = new BigDecimal(BytesUtils.bytesToDouble(minBytes)); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BinaryStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BinaryStatistics.java index 2816f6b7..99f57641 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BinaryStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BinaryStatistics.java @@ -9,110 +9,114 @@ * @author CGF */ public class BinaryStatistics extends Statistics { - private Binary max = new Binary(""); - private Binary min = new Binary(""); - private Binary first = new Binary(""); - private double sum; - private Binary last = new Binary(""); - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = new Binary(maxBytes); - min = new Binary(minBytes); - } - - @Override - public Binary getMin() { - return min; - } - - @Override - public Binary getMax() { - return max; - } - - @Override - public Binary getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Binary getLast(){ - return last; - } - - public void initializeStats(Binary min, Binary max, Binary first, double sum,Binary last) { - this.min = min; - this.max = max; - this.first = first; - this.sum = sum; - this.last = last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - BinaryStatistics stringStats = (BinaryStatistics) stats; - if (isEmpty) { - initializeStats(stringStats.getMin(), stringStats.getMax(), stringStats.getFirst(), stringStats.getSum(),stringStats.getLast()); - isEmpty = false; - } else { - updateStats(stringStats.getMin(), stringStats.getMax(), stringStats.getFirst(), stringStats.getSum(),stringStats.getLast()); - } - } - - @Override - public void updateStats(Binary value) { - if (isEmpty) { - initializeStats(value, value, value, 0,value); - isEmpty = false; - } else { - updateStats(value, value, value, 0,value); - isEmpty = false; - } - } - - private void updateStats(Binary minValue, Binary maxValue, Binary firstValue, double sum,Binary lastValue) { - if (minValue.compareTo(min) < 0) { - min = minValue; - } - if (maxValue.compareTo(max) > 0) { - max = maxValue; - } - this.last = lastValue; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.StringToBytes(max.getStringValue()); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.StringToBytes(min.getStringValue()); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.StringToBytes(first.getStringValue()); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes(){ - return BytesUtils.StringToBytes(last.getStringValue()); - } - - @Override - public String toString(){ - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private Binary max = new Binary(""); + private Binary min = new Binary(""); + private Binary first = new Binary(""); + private double sum; + private Binary last = new Binary(""); + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = new Binary(maxBytes); + min = new Binary(minBytes); + } + + @Override + public Binary getMin() { + return min; + } + + @Override + public Binary getMax() { + return max; + } + + @Override + public Binary getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Binary getLast() { + return last; + } + + public void initializeStats(Binary min, Binary max, Binary first, double sum, Binary last) { + this.min = min; + this.max = max; + this.first = first; + this.sum = sum; + this.last = last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + BinaryStatistics stringStats = (BinaryStatistics) stats; + if (isEmpty) { + initializeStats(stringStats.getMin(), stringStats.getMax(), stringStats.getFirst(), + stringStats.getSum(), stringStats.getLast()); + isEmpty = false; + } else { + updateStats(stringStats.getMin(), stringStats.getMax(), stringStats.getFirst(), + stringStats.getSum(), stringStats.getLast()); + } + } + + @Override + public void updateStats(Binary value) { + if (isEmpty) { + initializeStats(value, value, value, 0, value); + isEmpty = false; + } else { + updateStats(value, value, value, 0, value); + isEmpty = false; + } + } + + private void updateStats(Binary minValue, Binary maxValue, Binary firstValue, double sum, + Binary lastValue) { + if (minValue.compareTo(min) < 0) { + min = minValue; + } + if (maxValue.compareTo(max) > 0) { + max = maxValue; + } + this.last = lastValue; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.StringToBytes(max.getStringValue()); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.StringToBytes(min.getStringValue()); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.StringToBytes(first.getStringValue()); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.StringToBytes(last.getStringValue()); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatistics.java index df458335..36b169f6 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatistics.java @@ -6,112 +6,114 @@ * @author CGF */ public class BooleanStatistics extends Statistics { - private boolean max; - private boolean min; - private boolean first; - private double sum; - private boolean last; - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = BytesUtils.bytesToBool(maxBytes); - min = BytesUtils.bytesToBool(minBytes); - } - - @Override - public void updateStats(boolean value) { - if (isEmpty) { - initializeStats(value, value, value, 0, value); - isEmpty = false; - } else { - updateStats(value, value, value, 0, value); - isEmpty = false; - } - } - - private void updateStats(boolean minValue, boolean maxValue, boolean firstValue, double sumValue, - boolean lastValue) { - if (!minValue && min) { - min = minValue; - } - if (maxValue && !max) { - max = maxValue; - } - this.last = lastValue; - } - - @Override - public Boolean getMax() { - return max; - } - - @Override - public Boolean getMin() { - return min; - } - - @Override - public Boolean getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Boolean getLast(){ - return last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - BooleanStatistics boolStats = (BooleanStatistics) stats; - if (isEmpty) { - initializeStats(boolStats.getMin(), boolStats.getMax(), boolStats.getFirst(), boolStats.getSum(), - boolStats.getLast()); - isEmpty = false; - } else { - updateStats(boolStats.getMin(), boolStats.getMax(), boolStats.getFirst(), boolStats.getSum(), - boolStats.getLast()); - } - } - - public void initializeStats(boolean min, boolean max, boolean firstValue, double sumValue, boolean lastValue) { - this.min = min; - this.max = max; - this.first = firstValue; - this.last = lastValue; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.boolToBytes(max); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.boolToBytes(min); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.boolToBytes(first); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes() { - return BytesUtils.boolToBytes(last); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private boolean max; + private boolean min; + private boolean first; + private double sum; + private boolean last; + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = BytesUtils.bytesToBool(maxBytes); + min = BytesUtils.bytesToBool(minBytes); + } + + @Override + public void updateStats(boolean value) { + if (isEmpty) { + initializeStats(value, value, value, 0, value); + isEmpty = false; + } else { + updateStats(value, value, value, 0, value); + isEmpty = false; + } + } + + private void updateStats(boolean minValue, boolean maxValue, boolean firstValue, double sumValue, + boolean lastValue) { + if (!minValue && min) { + min = minValue; + } + if (maxValue && !max) { + max = maxValue; + } + this.last = lastValue; + } + + @Override + public Boolean getMax() { + return max; + } + + @Override + public Boolean getMin() { + return min; + } + + @Override + public Boolean getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Boolean getLast() { + return last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + BooleanStatistics boolStats = (BooleanStatistics) stats; + if (isEmpty) { + initializeStats(boolStats.getMin(), boolStats.getMax(), boolStats.getFirst(), + boolStats.getSum(), boolStats.getLast()); + isEmpty = false; + } else { + updateStats(boolStats.getMin(), boolStats.getMax(), boolStats.getFirst(), boolStats.getSum(), + boolStats.getLast()); + } + } + + public void initializeStats(boolean min, boolean max, boolean firstValue, double sumValue, + boolean lastValue) { + this.min = min; + this.max = max; + this.first = firstValue; + this.last = lastValue; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.boolToBytes(max); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.boolToBytes(min); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.boolToBytes(first); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.boolToBytes(last); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatistics.java index 740fcce1..c1965bc3 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatistics.java @@ -8,112 +8,116 @@ * @author kangrong */ public class DoubleStatistics extends Statistics { - private double max; - private double min; - private double first; - private double sum; - private double last; - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = BytesUtils.bytesToDouble(maxBytes); - min = BytesUtils.bytesToDouble(minBytes); - } - - @Override - public void updateStats(double value) { - if (this.isEmpty) { - initializeStats(value, value, value, value,value); - isEmpty = false; - } else { - updateStats(value, value, value, value,value); - } - } - - private void updateStats(double minValue, double maxValue, double firstValue, double sumValue,double lastValue) { - if (minValue < min) { - min = minValue; - } - if (maxValue > max) { - max = maxValue; - } - sum += sumValue; - this.last = lastValue; - } - - @Override - public Double getMax() { - return max; - } - - @Override - public Double getMin() { - return min; - } - - @Override - public Double getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Double getLast(){ - return last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - DoubleStatistics doubleStats = (DoubleStatistics) stats; - if (this.isEmpty) { - initializeStats(doubleStats.getMin(), doubleStats.getMax(), doubleStats.getFirst(), doubleStats.getSum(),doubleStats.getLast()); - isEmpty = false; - } else { - updateStats(doubleStats.getMin(), doubleStats.getMax(), doubleStats.getFirst(), doubleStats.getSum(),doubleStats.getLast()); - } - - } - - public void initializeStats(double min, double max, double first, double sum,double last) { - this.min = min; - this.max = max; - this.first = first; - this.sum = sum; - this.last = last; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.doubleToBytes(max); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.doubleToBytes(min); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.doubleToBytes(first); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes(){ - return BytesUtils.doubleToBytes(last); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private double max; + private double min; + private double first; + private double sum; + private double last; + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = BytesUtils.bytesToDouble(maxBytes); + min = BytesUtils.bytesToDouble(minBytes); + } + + @Override + public void updateStats(double value) { + if (this.isEmpty) { + initializeStats(value, value, value, value, value); + isEmpty = false; + } else { + updateStats(value, value, value, value, value); + } + } + + private void updateStats(double minValue, double maxValue, double firstValue, double sumValue, + double lastValue) { + if (minValue < min) { + min = minValue; + } + if (maxValue > max) { + max = maxValue; + } + sum += sumValue; + this.last = lastValue; + } + + @Override + public Double getMax() { + return max; + } + + @Override + public Double getMin() { + return min; + } + + @Override + public Double getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Double getLast() { + return last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + DoubleStatistics doubleStats = (DoubleStatistics) stats; + if (this.isEmpty) { + initializeStats(doubleStats.getMin(), doubleStats.getMax(), doubleStats.getFirst(), + doubleStats.getSum(), doubleStats.getLast()); + isEmpty = false; + } else { + updateStats(doubleStats.getMin(), doubleStats.getMax(), doubleStats.getFirst(), + doubleStats.getSum(), doubleStats.getLast()); + } + + } + + public void initializeStats(double min, double max, double first, double sum, double last) { + this.min = min; + this.max = max; + this.first = first; + this.sum = sum; + this.last = last; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.doubleToBytes(max); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.doubleToBytes(min); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.doubleToBytes(first); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.doubleToBytes(last); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatistics.java index edea6889..b66ab7d5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatistics.java @@ -8,111 +8,115 @@ * @author kangrong */ public class FloatStatistics extends Statistics { - private float max; - private float min; - private float first; - private double sum; - private float last; - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = BytesUtils.bytesToFloat(maxBytes); - min = BytesUtils.bytesToFloat(minBytes); - } - - @Override - public void updateStats(float value) { - if (this.isEmpty) { - initializeStats(value, value, value, value,value); - isEmpty = false; - } else { - updateStats(value, value, value, value,value); - } - } - - private void updateStats(float minValue, float maxValue, float firstValue, double sumValue,float last) { - if (minValue < min) { - min = minValue; - } - if (maxValue > max) { - max = maxValue; - } - sum += sumValue; - this.last = last; - } - - @Override - public Float getMax() { - return max; - } - - @Override - public Float getMin() { - return min; - } - - @Override - public Float getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Float getLast(){ - return last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - FloatStatistics floatStats = (FloatStatistics) stats; - if (isEmpty) { - initializeStats(floatStats.getMin(), floatStats.getMax(), floatStats.getFirst(), floatStats.getSum(),floatStats.getLast()); - isEmpty = false; - } else { - updateStats(floatStats.getMin(), floatStats.getMax(), floatStats.getFirst(), floatStats.getSum(),floatStats.getLast()); - } - - } - - public void initializeStats(float min, float max, float first, double sum,float last) { - this.min = min; - this.max = max; - this.first = first; - this.sum = sum; - this.last = last; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.floatToBytes(max); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.floatToBytes(min); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.floatToBytes(first); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes(){ - return BytesUtils.floatToBytes(last); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private float max; + private float min; + private float first; + private double sum; + private float last; + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = BytesUtils.bytesToFloat(maxBytes); + min = BytesUtils.bytesToFloat(minBytes); + } + + @Override + public void updateStats(float value) { + if (this.isEmpty) { + initializeStats(value, value, value, value, value); + isEmpty = false; + } else { + updateStats(value, value, value, value, value); + } + } + + private void updateStats(float minValue, float maxValue, float firstValue, double sumValue, + float last) { + if (minValue < min) { + min = minValue; + } + if (maxValue > max) { + max = maxValue; + } + sum += sumValue; + this.last = last; + } + + @Override + public Float getMax() { + return max; + } + + @Override + public Float getMin() { + return min; + } + + @Override + public Float getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Float getLast() { + return last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + FloatStatistics floatStats = (FloatStatistics) stats; + if (isEmpty) { + initializeStats(floatStats.getMin(), floatStats.getMax(), floatStats.getFirst(), + floatStats.getSum(), floatStats.getLast()); + isEmpty = false; + } else { + updateStats(floatStats.getMin(), floatStats.getMax(), floatStats.getFirst(), + floatStats.getSum(), floatStats.getLast()); + } + + } + + public void initializeStats(float min, float max, float first, double sum, float last) { + this.min = min; + this.max = max; + this.first = first; + this.sum = sum; + this.last = last; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.floatToBytes(max); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.floatToBytes(min); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.floatToBytes(first); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.floatToBytes(last); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatistics.java index a9a9c069..7b880717 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatistics.java @@ -8,114 +8,116 @@ * @author kangrong */ public class IntegerStatistics extends Statistics { - private int max; - private int min; - private int first; - private double sum; - private int last; - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = BytesUtils.bytesToInt(maxBytes); - min = BytesUtils.bytesToInt(minBytes); - } - - @Override - public void updateStats(int value) { - if (isEmpty) { - initializeStats(value, value, value, value, value); - isEmpty = false; - } else { - updateStats(value, value, value, value, value); - isEmpty = false; - } - } - - private void updateStats(int minValue, int maxValue, int firstValue, double sumValue, int lastValue) { - if (minValue < min) { - min = minValue; - } - if (maxValue > max) { - max = maxValue; - } - sum += sumValue; - this.last = lastValue; - } - - @Override - public Integer getMax() { - return max; - } - - @Override - public Integer getMin() { - return min; - } - - @Override - public Integer getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Integer getLast() { - return last; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - IntegerStatistics intStats = (IntegerStatistics) stats; - if (isEmpty) { - initializeStats(intStats.getMin(), intStats.getMax(), intStats.getFirst(), intStats.getSum(), - intStats.getLast()); - isEmpty = false; - } else { - updateStats(intStats.getMin(), intStats.getMax(), intStats.getFirst(), intStats.getSum(), - intStats.getLast()); - } - - } - - private void initializeStats(int min, int max, int first, double sum, int last) { - this.min = min; - this.max = max; - this.first = first; - this.sum = sum; - this.last = last; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.intToBytes(max); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.intToBytes(min); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.intToBytes(first); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes() { - return BytesUtils.intToBytes(last); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } + private int max; + private int min; + private int first; + private double sum; + private int last; + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = BytesUtils.bytesToInt(maxBytes); + min = BytesUtils.bytesToInt(minBytes); + } + + @Override + public void updateStats(int value) { + if (isEmpty) { + initializeStats(value, value, value, value, value); + isEmpty = false; + } else { + updateStats(value, value, value, value, value); + isEmpty = false; + } + } + + private void updateStats(int minValue, int maxValue, int firstValue, double sumValue, + int lastValue) { + if (minValue < min) { + min = minValue; + } + if (maxValue > max) { + max = maxValue; + } + sum += sumValue; + this.last = lastValue; + } + + @Override + public Integer getMax() { + return max; + } + + @Override + public Integer getMin() { + return min; + } + + @Override + public Integer getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Integer getLast() { + return last; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + IntegerStatistics intStats = (IntegerStatistics) stats; + if (isEmpty) { + initializeStats(intStats.getMin(), intStats.getMax(), intStats.getFirst(), intStats.getSum(), + intStats.getLast()); + isEmpty = false; + } else { + updateStats(intStats.getMin(), intStats.getMax(), intStats.getFirst(), intStats.getSum(), + intStats.getLast()); + } + + } + + private void initializeStats(int min, int max, int first, double sum, int last) { + this.min = min; + this.max = max; + this.first = first; + this.sum = sum; + this.last = last; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.intToBytes(max); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.intToBytes(min); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.intToBytes(first); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.intToBytes(last); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatistics.java index 63776961..eaaac143 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatistics.java @@ -8,124 +8,126 @@ * @author kangrong */ public class LongStatistics extends Statistics { - private long max; - private long min; - private long first; - private double sum; - private long last; - - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - max = BytesUtils.bytesToLong(maxBytes); - min = BytesUtils.bytesToLong(minBytes); - } - - @Override - public Long getMin() { - return min; - } - - @Override - public Long getMax() { - return max; - } - - @Override - public Long getFirst() { - return first; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public Long getLast() { - return last; - } - - @Override - public void updateStats(long value) { - if (isEmpty) { - initializeStats(value, value, value, value, value); - isEmpty = false; - } else { - updateStats(value, value, value, value, value); - } - } - - private void updateStats(long minValue, long maxValue, long firstValue, double sumValue, long lastValue) { - if (minValue < min) { - min = minValue; - } - if (maxValue > max) { - max = maxValue; - } - sum += sumValue; - this.last = lastValue; - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - LongStatistics longStats = (LongStatistics) stats; - if (isEmpty) { - initializeStats(longStats.getMin(), longStats.getMax(), longStats.getFirst(), longStats.getSum(), - longStats.getLast()); - isEmpty = false; - } else { - updateStats(longStats.getMin(), longStats.getMax(), longStats.getFirst(), longStats.getSum(), - longStats.getLast()); - } - - } - - private void initializeStats(long min, long max, long firstValue, double sum, long last) { - this.min = min; - this.max = max; - this.first = firstValue; - this.sum += sum; - this.last = last; - } - - @Override - public byte[] getMaxBytes() { - return BytesUtils.longToBytes(max); - } - - @Override - public byte[] getMinBytes() { - return BytesUtils.longToBytes(min); - } - - @Override - public byte[] getFirstBytes() { - return BytesUtils.longToBytes(first); - } - - @Override - public byte[] getSumBytes() { - return BytesUtils.doubleToBytes(sum); - } - - @Override - public byte[] getLastBytes() { - return BytesUtils.longToBytes(last); - } - - @Override - public String toString() { - return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + "]"; - } - - @Override - public void updateStats(long minValue, long maxValue) { - if (minValue < min) { - min = minValue; - } - if (maxValue > max) { - max = maxValue; - } - } + private long max; + private long min; + private long first; + private double sum; + private long last; + + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { + max = BytesUtils.bytesToLong(maxBytes); + min = BytesUtils.bytesToLong(minBytes); + } + + @Override + public Long getMin() { + return min; + } + + @Override + public Long getMax() { + return max; + } + + @Override + public Long getFirst() { + return first; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public Long getLast() { + return last; + } + + @Override + public void updateStats(long value) { + if (isEmpty) { + initializeStats(value, value, value, value, value); + isEmpty = false; + } else { + updateStats(value, value, value, value, value); + } + } + + private void updateStats(long minValue, long maxValue, long firstValue, double sumValue, + long lastValue) { + if (minValue < min) { + min = minValue; + } + if (maxValue > max) { + max = maxValue; + } + sum += sumValue; + this.last = lastValue; + } + + @Override + protected void mergeStatisticsValue(Statistics stats) { + LongStatistics longStats = (LongStatistics) stats; + if (isEmpty) { + initializeStats(longStats.getMin(), longStats.getMax(), longStats.getFirst(), + longStats.getSum(), longStats.getLast()); + isEmpty = false; + } else { + updateStats(longStats.getMin(), longStats.getMax(), longStats.getFirst(), longStats.getSum(), + longStats.getLast()); + } + + } + + private void initializeStats(long min, long max, long firstValue, double sum, long last) { + this.min = min; + this.max = max; + this.first = firstValue; + this.sum += sum; + this.last = last; + } + + @Override + public byte[] getMaxBytes() { + return BytesUtils.longToBytes(max); + } + + @Override + public byte[] getMinBytes() { + return BytesUtils.longToBytes(min); + } + + @Override + public byte[] getFirstBytes() { + return BytesUtils.longToBytes(first); + } + + @Override + public byte[] getSumBytes() { + return BytesUtils.doubleToBytes(sum); + } + + @Override + public byte[] getLastBytes() { + return BytesUtils.longToBytes(last); + } + + @Override + public String toString() { + return "[max:" + max + ",min:" + min + ",first:" + first + ",sum:" + sum + ",last:" + last + + "]"; + } + + @Override + public void updateStats(long minValue, long maxValue) { + if (minValue < min) { + min = minValue; + } + if (maxValue > max) { + max = maxValue; + } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/NoStatistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/NoStatistics.java index 1b4a396e..94b536fb 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/NoStatistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/NoStatistics.java @@ -10,87 +10,81 @@ * @author kangrong */ public class NoStatistics extends Statistics { - @Override - public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) { - } - - @Override - public Long getMin() { - return null; - } - - @Override - public Long getMax() { - return null; - } - - @Override - public void updateStats(boolean value) { - } - - @Override - public void updateStats(int value) { - } - - @Override - public void updateStats(long value) { - } - - @Override - public void updateStats(Binary value) { - } - - @Override - protected void mergeStatisticsValue(Statistics stats) { - } - - @Override - public byte[] getMaxBytes() { - return new byte[0]; - } - - @Override - public byte[] getMinBytes() { - return new byte[0]; - } - - @Override - public String toString() { - return "no stats"; - } - - @Override - public Long getFirst() { - return null; - } - - @Override - public double getSum() { - return 0; - } - - @Override - public Long getLast(){ - return null; - } - - @Override - public byte[] getFirstBytes() { - return new byte[0]; - } - - @Override - public byte[] getSumBytes() { - return new byte[0]; - } - - @Override - public byte[] getLastBytes(){ - return new byte[0]; - } - - @Override - public void updateStats(long min, long max) { - throw new UnsupportedOperationException(); - } + @Override + public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes) {} + + @Override + public Long getMin() { + return null; + } + + @Override + public Long getMax() { + return null; + } + + @Override + public void updateStats(boolean value) {} + + @Override + public void updateStats(int value) {} + + @Override + public void updateStats(long value) {} + + @Override + public void updateStats(Binary value) {} + + @Override + protected void mergeStatisticsValue(Statistics stats) {} + + @Override + public byte[] getMaxBytes() { + return new byte[0]; + } + + @Override + public byte[] getMinBytes() { + return new byte[0]; + } + + @Override + public String toString() { + return "no stats"; + } + + @Override + public Long getFirst() { + return null; + } + + @Override + public double getSum() { + return 0; + } + + @Override + public Long getLast() { + return null; + } + + @Override + public byte[] getFirstBytes() { + return new byte[0]; + } + + @Override + public byte[] getSumBytes() { + return new byte[0]; + } + + @Override + public byte[] getLastBytes() { + return new byte[0]; + } + + @Override + public void updateStats(long min, long max) { + throw new UnsupportedOperationException(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/Statistics.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/Statistics.java index 732f5f64..c9445c0f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/Statistics.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/Statistics.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.math.BigDecimal; /** @@ -19,132 +18,131 @@ * @since 0.0.1 */ public abstract class Statistics { - private static final Logger LOG = LoggerFactory.getLogger(Statistics.class); - // isEmpty being false means this statistic has been initialized and the max - // and min is not null; - protected boolean isEmpty = true; - - /** - * static method providing statistic instance for respective data type. - * - * @param type - data type - * @return Statistics - */ - public static Statistics getStatsByType(TSDataType type) { - switch (type) { - case INT32: - return new IntegerStatistics(); - case INT64: - return new LongStatistics(); - case TEXT: - return new BinaryStatistics(); - case ENUMS: - return new NoStatistics(); - case BOOLEAN: - return new BooleanStatistics(); - case DOUBLE: - return new DoubleStatistics(); - case FLOAT: - return new FloatStatistics(); - case BIGDECIMAL: - return new BigDecimalStatistics(); - default: - throw new UnknownColumnTypeException(type.toString()); - } + private static final Logger LOG = LoggerFactory.getLogger(Statistics.class); + // isEmpty being false means this statistic has been initialized and the max + // and min is not null; + protected boolean isEmpty = true; + + /** + * static method providing statistic instance for respective data type. + * + * @param type - data type + * @return Statistics + */ + public static Statistics getStatsByType(TSDataType type) { + switch (type) { + case INT32: + return new IntegerStatistics(); + case INT64: + return new LongStatistics(); + case TEXT: + return new BinaryStatistics(); + case ENUMS: + return new NoStatistics(); + case BOOLEAN: + return new BooleanStatistics(); + case DOUBLE: + return new DoubleStatistics(); + case FLOAT: + return new FloatStatistics(); + case BIGDECIMAL: + return new BigDecimalStatistics(); + default: + throw new UnknownColumnTypeException(type.toString()); } + } - abstract public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes); - - abstract public T getMin(); - - abstract public T getMax(); - - abstract public T getFirst(); - - abstract public double getSum(); - - abstract public T getLast(); - - /** - * merge parameter to this statistic. Including - * - * @param stats input statistics - * @throws StatisticsClassException cannot merge statistics - */ - public void mergeStatistics(Statistics stats) throws StatisticsClassException { - if (stats == null) { - LOG.warn("tsfile-file parameter stats is null"); - return; - } - if (this.getClass() == stats.getClass()) { - if (!stats.isEmpty) { - mergeStatisticsValue(stats); - isEmpty = false; - } - } else { - LOG.warn("tsfile-file Statistics classes mismatched,no merge: " - + this.getClass().toString() + " vs. " + stats.getClass().toString()); - - throw new StatisticsClassException(this.getClass(), stats.getClass()); - } - } + abstract public void setMinMaxFromBytes(byte[] minBytes, byte[] maxBytes); - abstract protected void mergeStatisticsValue(Statistics stats); + abstract public T getMin(); - public boolean isEmpty() { - return isEmpty; - } + abstract public T getMax(); - public void updateStats(boolean value) { - throw new UnsupportedOperationException(); - } + abstract public T getFirst(); - public void updateStats(int value) { - throw new UnsupportedOperationException(); - } + abstract public double getSum(); - public void updateStats(long value) { - throw new UnsupportedOperationException(); - } + abstract public T getLast(); - /** - * This method with two parameters is only used by {@code overflow} which - * updates/inserts/deletes timestamp. - * - * @param min min timestamp - * @param max max timestamp - */ - public void updateStats(long min, long max){ - throw new UnsupportedOperationException(); + /** + * merge parameter to this statistic. Including + * + * @param stats input statistics + * @throws StatisticsClassException cannot merge statistics + */ + public void mergeStatistics(Statistics stats) throws StatisticsClassException { + if (stats == null) { + LOG.warn("tsfile-file parameter stats is null"); + return; } - - - public void updateStats(float value) { - throw new UnsupportedOperationException(); + if (this.getClass() == stats.getClass()) { + if (!stats.isEmpty) { + mergeStatisticsValue(stats); + isEmpty = false; + } + } else { + LOG.warn("tsfile-file Statistics classes mismatched,no merge: " + this.getClass().toString() + + " vs. " + stats.getClass().toString()); + + throw new StatisticsClassException(this.getClass(), stats.getClass()); } + } - public void updateStats(double value) { - throw new UnsupportedOperationException(); - } + abstract protected void mergeStatisticsValue(Statistics stats); - public void updateStats(BigDecimal value) { - throw new UnsupportedOperationException(); - } + public boolean isEmpty() { + return isEmpty; + } - public void updateStats(Binary value) { - throw new UnsupportedOperationException(); - } + public void updateStats(boolean value) { + throw new UnsupportedOperationException(); + } - public void reset() { - } + public void updateStats(int value) { + throw new UnsupportedOperationException(); + } + + public void updateStats(long value) { + throw new UnsupportedOperationException(); + } + + /** + * This method with two parameters is only used by {@code overflow} which updates/inserts/deletes + * timestamp. + * + * @param min min timestamp + * @param max max timestamp + */ + public void updateStats(long min, long max) { + throw new UnsupportedOperationException(); + } + + + public void updateStats(float value) { + throw new UnsupportedOperationException(); + } + + public void updateStats(double value) { + throw new UnsupportedOperationException(); + } + + public void updateStats(BigDecimal value) { + throw new UnsupportedOperationException(); + } + + public void updateStats(Binary value) { + throw new UnsupportedOperationException(); + } + + public void reset() {} + + abstract public byte[] getMaxBytes(); + + abstract public byte[] getMinBytes(); + + abstract public byte[] getFirstBytes(); - abstract public byte[] getMaxBytes(); + abstract public byte[] getSumBytes(); - abstract public byte[] getMinBytes(); - - abstract public byte[] getFirstBytes(); - - abstract public byte[] getSumBytes(); - - abstract public byte[] getLastBytes(); + abstract public byte[] getLastBytes(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StatisticsClassException.java b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StatisticsClassException.java index 1ffec644..d1037c5e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StatisticsClassException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StatisticsClassException.java @@ -3,10 +3,9 @@ import cn.edu.tsinghua.tsfile.common.exception.TSFileRuntimeException; public class StatisticsClassException extends TSFileRuntimeException { - private static final long serialVersionUID = -5445795844780183770L; + private static final long serialVersionUID = -5445795844780183770L; - public StatisticsClassException(Class className1, Class className2) { - super("tsfile-file Statistics classes mismatched: " + className1 + " vs. " - + className2); - } + public StatisticsClassException(Class className1, Class className2) { + super("tsfile-file Statistics classes mismatched: " + className1 + " vs. " + className2); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/file/utils/ReadWriteThriftFormatUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/file/utils/ReadWriteThriftFormatUtils.java index 444cec84..16103d3f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/file/utils/ReadWriteThriftFormatUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/file/utils/ReadWriteThriftFormatUtils.java @@ -1,259 +1,228 @@ -package cn.edu.tsinghua.tsfile.file.utils; - -import cn.edu.tsinghua.tsfile.common.constant.StatisticConstant; -import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; -import cn.edu.tsinghua.tsfile.file.metadata.statistics.Statistics; -import cn.edu.tsinghua.tsfile.format.DataPageHeader; -import cn.edu.tsinghua.tsfile.format.DictionaryPageHeader; -import cn.edu.tsinghua.tsfile.format.Digest; -import cn.edu.tsinghua.tsfile.format.Encoding; -import cn.edu.tsinghua.tsfile.format.FileMetaData; -import cn.edu.tsinghua.tsfile.format.PageHeader; -import cn.edu.tsinghua.tsfile.format.PageType; -import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; - -import org.apache.commons.io.IOUtils; -import org.apache.thrift.TBase; -import org.apache.thrift.TDeserializer; -import org.apache.thrift.TException; -import org.apache.thrift.TSerializer; -import org.apache.thrift.protocol.TCompactProtocol; -import org.apache.thrift.protocol.TProtocol; -import org.apache.thrift.transport.TIOStreamTransport; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; - -/** - * ConverterUtils is a utility class. It provide conversion between tsfile and - * thrift metadata class. It also provides function that read/write page header - * from/to stream - * - * @author XuYi xuyi556677@163.com - */ -public class ReadWriteThriftFormatUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(ReadWriteThriftFormatUtils.class); - - /** - * write file metadata(thrift format) to stream - * - * @param fileMetadata - * file metadata to write - * @param to - * OutputStream - * @throws IOException - * cannot write file metadata to OutputStream - */ - public static void writeFileMetaData(FileMetaData fileMetadata, OutputStream to) throws IOException { - write(fileMetadata, to); - } - - /** - * read file metadata(thrift format) from stream - * - * @param from - * InputStream - * @return metadata of TsFile - * @throws IOException - * cannot read file metadata from OutputStream - */ - public static FileMetaData readFileMetaData(InputStream from) throws IOException { - return read(from, new FileMetaData()); - } - - public static void writeRowGroupBlockMetadata(RowGroupBlockMetaData metadata, OutputStream to) throws IOException { - write(metadata, to); - } - - public static RowGroupBlockMetaData readRowGroupBlockMetaData(InputStream from) throws IOException { - return read(from, new RowGroupBlockMetaData()); - } - - public static RowGroupBlockMetaData readRowGroupBlockMetaData(ITsRandomAccessFileReader reader, long offset, - int size) throws IOException { - reader.seek(offset); - byte[] buf = new byte[size]; - reader.read(buf, 0, buf.length); - ByteArrayInputStream bais = new ByteArrayInputStream(buf); - - return readRowGroupBlockMetaData(bais); - } - - /** - * write DataPageHeader to output stream. For more information about - * DataPageHeader, see PageHeader and DataPageHeader in tsfile-format - * - * @param uncompressedSize - * uncompressed size in byte of one page size - * @param compressedSize - * compressed size in byte of one page size - * @param numValues - * number of value - * @param statistics - * statistics - * @param numRows - * number of row - * @param encoding - * encoding type - * @param to - * Outputstream - * @param max_timestamp - * max timestamp - * @param min_timestamp - * min timestamp - * @throws IOException - * cannot write data page header to OutputStream - */ - public static void writeDataPageHeader(int uncompressedSize, int compressedSize, int numValues, - Statistics statistics, int numRows, TSEncoding encoding, OutputStream to, long max_timestamp, - long min_timestamp) throws IOException { - ReadWriteThriftFormatUtils.writePageHeader(newDataPageHeader(uncompressedSize, compressedSize, numValues, - statistics, numRows, encoding, max_timestamp, min_timestamp), to); - } - - private static PageHeader newDataPageHeader(int uncompressedSize, int compressedSize, int numValues, - Statistics statistics, int numRows, TSEncoding encoding, long max_timestamp, long min_timestamp) { - PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); - // TODO: pageHeader crc uncomplete - - pageHeader.setData_page_header(new DataPageHeader(numValues, numRows, Encoding.valueOf(encoding.toString()), - max_timestamp, min_timestamp)); - if (!statistics.isEmpty()) { - Digest digest = new Digest(); - Map statisticsMap = new HashMap<>(); - // TODO add your statistics - statisticsMap.put(StatisticConstant.MAX_VALUE, ByteBuffer.wrap(statistics.getMaxBytes())); - statisticsMap.put(StatisticConstant.MIN_VALUE, ByteBuffer.wrap(statistics.getMinBytes())); - statisticsMap.put(StatisticConstant.FIRST, ByteBuffer.wrap(statistics.getFirstBytes())); - statisticsMap.put(StatisticConstant.SUM, ByteBuffer.wrap(statistics.getSumBytes())); - statisticsMap.put(StatisticConstant.LAST, ByteBuffer.wrap(statistics.getLastBytes())); - digest.setStatistics(statisticsMap); - - pageHeader.getData_page_header().setDigest(digest); - } - return pageHeader; - } - - /** - * write page header(thrift format) to stream - * - * @param pageHeader - * input page header - * @param to - * OutputStream - * @throws IOException - * cannot write page header to OutputStream - */ - public static void writePageHeader(PageHeader pageHeader, OutputStream to) throws IOException { - try { - pageHeader.write(protocol(to)); - } catch (TException e) { - LOGGER.error("tsfile-file Utils: can not write {}", pageHeader, e); - throw new IOException(e); - } - } - - /** - * read one page header from stream - * - * @param from - * InputStream - * @return page header - * @throws IOException - * cannot read page header from InputStream - */ - public static PageHeader readPageHeader(InputStream from) throws IOException { - return readPageHeader(from, new PageHeader()); - } - - private static PageHeader readPageHeader(InputStream from, PageHeader header) throws IOException { - try { - header.read(protocol(from)); - return header; - } catch (TException e) { - LOGGER.error("tsfile-file Utils: can not read {}", header, e); - throw new IOException(e); - } - } - - /** - * @param tbase - * input class in thrift format - * @param to - * OutputStream - * @throws IOException - * exception in IO - */ - public static void write(TBase tbase, OutputStream to) throws IOException { - try { - TSerializer serializer = new TSerializer(new TCompactProtocol.Factory()); - to.write(serializer.serialize(tbase)); - } catch (TException e) { - LOGGER.error("tsfile-file Utils: can not write {}", tbase, e); - throw new IOException(e); - } - } - - /** - * @param from - * InputStream - * @param tbase - * output class in thrift format - * @param - * class in thrift-format - * @return Class in thrift format - * @throws IOException - * exception in IO - */ - public static > T read(InputStream from, T tbase) throws IOException { - try { - TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); - deserializer.deserialize(tbase, IOUtils.toByteArray(from)); - return tbase; - } catch (TException e) { - LOGGER.error("tsfile-file Utils: can not read {}", tbase, e); - throw new IOException(e); - } - } - - private static TProtocol protocol(OutputStream to) { - return new TCompactProtocol((new TIOStreamTransport(to))); - } - - private static TProtocol protocol(InputStream from) { - return new TCompactProtocol((new TIOStreamTransport(from))); - } - - /** - * In current version, DictionaryPageHeader is not used - * - * @param uncompressedSize - * uncompressed size in byte of one page size - * @param compressedSize - * compressed size in byte of one page size - * @param numValues - * number of value - * @param encoding - * encoding type - * @param to - * Outputstream - * @throws IOException - * cannot write dictionary page header to OutputStream - */ - @Deprecated - public void writeDictionaryPageHeader(int uncompressedSize, int compressedSize, int numValues, TSEncoding encoding, - OutputStream to) throws IOException { - PageHeader pageHeader = new PageHeader(PageType.DICTIONARY_PAGE, uncompressedSize, compressedSize); - pageHeader - .setDictionary_page_header(new DictionaryPageHeader(numValues, Encoding.valueOf(encoding.toString()))); - ReadWriteThriftFormatUtils.writePageHeader(pageHeader, to); - } -} +package cn.edu.tsinghua.tsfile.file.utils; + +import cn.edu.tsinghua.tsfile.common.constant.StatisticConstant; +import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import cn.edu.tsinghua.tsfile.file.metadata.statistics.Statistics; +import cn.edu.tsinghua.tsfile.format.DataPageHeader; +import cn.edu.tsinghua.tsfile.format.DictionaryPageHeader; +import cn.edu.tsinghua.tsfile.format.Digest; +import cn.edu.tsinghua.tsfile.format.Encoding; +import cn.edu.tsinghua.tsfile.format.FileMetaData; +import cn.edu.tsinghua.tsfile.format.PageHeader; +import cn.edu.tsinghua.tsfile.format.PageType; +import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; +import org.apache.commons.io.IOUtils; +import org.apache.thrift.TBase; +import org.apache.thrift.TDeserializer; +import org.apache.thrift.TException; +import org.apache.thrift.TSerializer; +import org.apache.thrift.protocol.TCompactProtocol; +import org.apache.thrift.protocol.TProtocol; +import org.apache.thrift.transport.TIOStreamTransport; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +/** + * ConverterUtils is a utility class. It provide conversion between tsfile and thrift metadata + * class. It also provides function that read/write page header from/to stream + * + * @author XuYi xuyi556677@163.com + */ +public class ReadWriteThriftFormatUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(ReadWriteThriftFormatUtils.class); + + /** + * write file metadata(thrift format) to stream + * + * @param fileMetadata file metadata to write + * @param to OutputStream + * @throws IOException cannot write file metadata to OutputStream + */ + public static void writeFileMetaData(FileMetaData fileMetadata, OutputStream to) + throws IOException { + write(fileMetadata, to); + } + + /** + * read file metadata(thrift format) from stream + * + * @param from InputStream + * @return metadata of TsFile + * @throws IOException cannot read file metadata from OutputStream + */ + public static FileMetaData readFileMetaData(InputStream from) throws IOException { + return read(from, new FileMetaData()); + } + + public static void writeRowGroupBlockMetadata(RowGroupBlockMetaData metadata, OutputStream to) + throws IOException { + write(metadata, to); + } + + public static RowGroupBlockMetaData readRowGroupBlockMetaData(InputStream from) + throws IOException { + return read(from, new RowGroupBlockMetaData()); + } + + public static RowGroupBlockMetaData readRowGroupBlockMetaData(ITsRandomAccessFileReader reader, + long offset, int size) throws IOException { + reader.seek(offset); + byte[] buf = new byte[size]; + reader.read(buf, 0, buf.length); + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + + return readRowGroupBlockMetaData(bais); + } + + /** + * write DataPageHeader to output stream. For more information about DataPageHeader, see + * PageHeader and DataPageHeader in tsfile-format + * + * @param uncompressedSize uncompressed size in byte of one page size + * @param compressedSize compressed size in byte of one page size + * @param numValues number of value + * @param statistics statistics + * @param numRows number of row + * @param encoding encoding type + * @param to Outputstream + * @param max_timestamp max timestamp + * @param min_timestamp min timestamp + * @throws IOException cannot write data page header to OutputStream + */ + public static void writeDataPageHeader(int uncompressedSize, int compressedSize, int numValues, + Statistics statistics, int numRows, TSEncoding encoding, OutputStream to, + long max_timestamp, long min_timestamp) throws IOException { + ReadWriteThriftFormatUtils.writePageHeader(newDataPageHeader(uncompressedSize, compressedSize, + numValues, statistics, numRows, encoding, max_timestamp, min_timestamp), to); + } + + private static PageHeader newDataPageHeader(int uncompressedSize, int compressedSize, + int numValues, Statistics statistics, int numRows, TSEncoding encoding, long max_timestamp, + long min_timestamp) { + PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); + // TODO: pageHeader crc uncomplete + + pageHeader.setData_page_header(new DataPageHeader(numValues, numRows, + Encoding.valueOf(encoding.toString()), max_timestamp, min_timestamp)); + if (!statistics.isEmpty()) { + Digest digest = new Digest(); + Map statisticsMap = new HashMap<>(); + // TODO add your statistics + statisticsMap.put(StatisticConstant.MAX_VALUE, ByteBuffer.wrap(statistics.getMaxBytes())); + statisticsMap.put(StatisticConstant.MIN_VALUE, ByteBuffer.wrap(statistics.getMinBytes())); + statisticsMap.put(StatisticConstant.FIRST, ByteBuffer.wrap(statistics.getFirstBytes())); + statisticsMap.put(StatisticConstant.SUM, ByteBuffer.wrap(statistics.getSumBytes())); + statisticsMap.put(StatisticConstant.LAST, ByteBuffer.wrap(statistics.getLastBytes())); + digest.setStatistics(statisticsMap); + + pageHeader.getData_page_header().setDigest(digest); + } + return pageHeader; + } + + /** + * write page header(thrift format) to stream + * + * @param pageHeader input page header + * @param to OutputStream + * @throws IOException cannot write page header to OutputStream + */ + public static void writePageHeader(PageHeader pageHeader, OutputStream to) throws IOException { + try { + pageHeader.write(protocol(to)); + } catch (TException e) { + LOGGER.error("tsfile-file Utils: can not write {}", pageHeader, e); + throw new IOException(e); + } + } + + /** + * read one page header from stream + * + * @param from InputStream + * @return page header + * @throws IOException cannot read page header from InputStream + */ + public static PageHeader readPageHeader(InputStream from) throws IOException { + return readPageHeader(from, new PageHeader()); + } + + private static PageHeader readPageHeader(InputStream from, PageHeader header) throws IOException { + try { + header.read(protocol(from)); + return header; + } catch (TException e) { + LOGGER.error("tsfile-file Utils: can not read {}", header, e); + throw new IOException(e); + } + } + + /** + * @param tbase input class in thrift format + * @param to OutputStream + * @throws IOException exception in IO + */ + public static void write(TBase tbase, OutputStream to) throws IOException { + try { + TSerializer serializer = new TSerializer(new TCompactProtocol.Factory()); + to.write(serializer.serialize(tbase)); + } catch (TException e) { + LOGGER.error("tsfile-file Utils: can not write {}", tbase, e); + throw new IOException(e); + } + } + + /** + * @param from InputStream + * @param tbase output class in thrift format + * @param class in thrift-format + * @return Class in thrift format + * @throws IOException exception in IO + */ + public static > T read(InputStream from, T tbase) throws IOException { + try { + TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory()); + deserializer.deserialize(tbase, IOUtils.toByteArray(from)); + return tbase; + } catch (TException e) { + LOGGER.error("tsfile-file Utils: can not read {}", tbase, e); + throw new IOException(e); + } + } + + private static TProtocol protocol(OutputStream to) { + return new TCompactProtocol((new TIOStreamTransport(to))); + } + + private static TProtocol protocol(InputStream from) { + return new TCompactProtocol((new TIOStreamTransport(from))); + } + + /** + * In current version, DictionaryPageHeader is not used + * + * @param uncompressedSize uncompressed size in byte of one page size + * @param compressedSize compressed size in byte of one page size + * @param numValues number of value + * @param encoding encoding type + * @param to Outputstream + * @throws IOException cannot write dictionary page header to OutputStream + */ + @Deprecated + public void writeDictionaryPageHeader(int uncompressedSize, int compressedSize, int numValues, + TSEncoding encoding, OutputStream to) throws IOException { + PageHeader pageHeader = + new PageHeader(PageType.DICTIONARY_PAGE, uncompressedSize, compressedSize); + pageHeader.setDictionary_page_header( + new DictionaryPageHeader(numValues, Encoding.valueOf(encoding.toString()))); + ReadWriteThriftFormatUtils.writePageHeader(pageHeader, to); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/basis/TsFile.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/basis/TsFile.java index 21f4e935..783f2d8b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/basis/TsFile.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/basis/TsFile.java @@ -5,10 +5,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; - import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; import org.json.JSONObject; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; @@ -41,14 +39,10 @@ public class TsFile { /** * For Write * - * @param file - * a TsFile - * @param schemaJson - * the fileSchema of TsFile in type of JSON - * @throws IOException - * exception in IO - * @throws WriteProcessException - * exception in write process + * @param file a TsFile + * @param schemaJson the fileSchema of TsFile in type of JSON + * @throws IOException exception in IO + * @throws WriteProcessException exception in write process */ public TsFile(File file, JSONObject schemaJson) throws IOException, WriteProcessException { this(file, new FileSchema(schemaJson)); @@ -57,14 +51,10 @@ public TsFile(File file, JSONObject schemaJson) throws IOException, WriteProcess /** * For Write * - * @param file - * a TsFile - * @param schema - * the fileSchema of TsFile - * @throws IOException - * cannot write TsFile - * @throws WriteProcessException - * error occurs when writing + * @param file a TsFile + * @param schema the fileSchema of TsFile + * @throws IOException cannot write TsFile + * @throws WriteProcessException error occurs when writing */ public TsFile(File file, FileSchema schema) throws IOException, WriteProcessException { this(schema); @@ -74,14 +64,10 @@ public TsFile(File file, FileSchema schema) throws IOException, WriteProcessExce /** * For Write * - * @param output - * a TsFile - * @param schemaJson - * the fileSchema of TsFile in type of JSON - * @throws IOException - * exception in IO - * @throws WriteProcessException - * exception in write process + * @param output a TsFile + * @param schemaJson the fileSchema of TsFile in type of JSON + * @throws IOException exception in IO + * @throws WriteProcessException exception in write process */ public TsFile(ITsRandomAccessFileWriter output, JSONObject schemaJson) throws IOException, WriteProcessException { @@ -93,14 +79,10 @@ public TsFile(ITsRandomAccessFileWriter output, JSONObject schemaJson) /** * For Write * - * @param output - * a TsFile - * @param schema - * the fileSchema of TsFile - * @throws IOException - * cannot write TsFile - * @throws WriteProcessException - * error occurs when writing + * @param output a TsFile + * @param schema the fileSchema of TsFile + * @throws IOException cannot write TsFile + * @throws WriteProcessException error occurs when writing */ public TsFile(ITsRandomAccessFileWriter output, FileSchema schema) throws IOException, WriteProcessException { @@ -121,10 +103,8 @@ private TsFile(FileSchema schema) { /** * Notice: This constructor is only for reading TsFile. * - * @param raf - * input reader - * @throws IOException - * cannot read TsFile + * @param raf input reader + * @throws IOException cannot read TsFile */ public TsFile(ITsRandomAccessFileReader raf) throws IOException { this.status = READ; @@ -136,13 +116,10 @@ public TsFile(ITsRandomAccessFileReader raf) throws IOException { * write a line into TsFile
* the corresponding schema must be defined. * - * @param line - * a line of data - * @throws IOException - * thrown if write process meats IOException like the output stream is closed - * abnormally. - * @throws WriteProcessException - * thrown if given data is not matched to fileSchema + * @param line a line of data + * @throws IOException thrown if write process meats IOException like the output stream is closed + * abnormally. + * @throws WriteProcessException thrown if given data is not matched to fileSchema */ public void writeLine(String line) throws IOException, WriteProcessException { checkStatus(WRITE); @@ -153,10 +130,8 @@ public void writeLine(String line) throws IOException, WriteProcessException { /** * add a new property, replace old value if already exist. * - * @param key - * key of property - * @param value - * value of property + * @param key key of property + * @param value value of property */ public void addProp(String key, String value) { fileSchema.addProp(key, value); @@ -165,13 +140,10 @@ public void addProp(String key, String value) { /** * write a TSRecord into TsFile. * - * @param tsRecord - * a line of data in form of {@linkplain TSRecord} - * @throws IOException - * thrown if write process meats IOException like the output stream is closed - * abnormally. - * @throws WriteProcessException - * thrown if given data is not matched to fileSchema + * @param tsRecord a line of data in form of {@linkplain TSRecord} + * @throws IOException thrown if write process meats IOException like the output stream is closed + * abnormally. + * @throws WriteProcessException thrown if given data is not matched to fileSchema */ public void writeRecord(TSRecord tsRecord) throws IOException, WriteProcessException { checkStatus(WRITE); @@ -181,9 +153,8 @@ public void writeRecord(TSRecord tsRecord) throws IOException, WriteProcessExcep /** * end the write process normally. * - * @throws IOException - * thrown if write process meats IOException like the output stream is closed - * abnormally. + * @throws IOException thrown if write process meats IOException like the output stream is closed + * abnormally. */ public void close() throws IOException { if (this.status == WRITE) { @@ -191,7 +162,7 @@ public void close() throws IOException { } else if (this.status == READ) { queryEngine.close(); } else { - String[] msg = new String[] { "WRITE", "READ" }; + String[] msg = new String[] {"WRITE", "READ"}; throw new IOException("This method should be invoked in status " + msg[status] + ", but current status is " + msg[this.status]); } @@ -199,7 +170,7 @@ public void close() throws IOException { public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, - FilterExpression valueFilter) throws IOException { + FilterExpression valueFilter) throws IOException { checkStatus(READ); if (paths.size() == 1 && valueFilter instanceof SingleSeriesFilterExpression && paths.get(0).getDeltaObjectToString() @@ -215,7 +186,7 @@ public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, - FilterExpression valueFilter, Map params) throws IOException { + FilterExpression valueFilter, Map params) throws IOException { checkStatus(READ); return queryEngine.query(paths, timeFilter, null, valueFilter, params); } @@ -224,8 +195,7 @@ public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, * Get All information of column(s) for every deltaObject. * * @return A set of ArrayList SeriesSchema stored in a HashMap separated by deltaObjectId - * @throws IOException - * thrown if fail to get all series schema + * @throws IOException thrown if fail to get all series schema */ public Map> getAllColumns() throws IOException { checkStatus(READ); @@ -236,8 +206,7 @@ public Map> getAllColumns() throws IOException { * Get RowGroupSize for every deltaObject * * @return HashMap - * @throws IOException - * thrown if fail to get row group count + * @throws IOException thrown if fail to get row group count */ public Map getDeltaObjectRowGroupCount() throws IOException { checkStatus(READ); @@ -246,8 +215,7 @@ public Map getDeltaObjectRowGroupCount() throws IOException { /** * @return a map contains all DeltaObjects with type each. - * @throws IOException - * thrown if fail to get delta object type + * @throws IOException thrown if fail to get delta object type */ public Map getDeltaObjectTypes() throws IOException { checkStatus(READ); @@ -257,11 +225,9 @@ public Map getDeltaObjectTypes() throws IOException { /** * Check whether given path exists in this TsFile. * - * @param path - * A path of one Series + * @param path A path of one Series * @return if the path exists - * @throws IOException - * thrown if fail to check path exists + * @throws IOException thrown if fail to check path exists */ public boolean pathExist(Path path) throws IOException { checkStatus(READ); @@ -270,8 +236,7 @@ public boolean pathExist(Path path) throws IOException { /** * @return all deltaObjects' name in current TsFile - * @throws IOException - * thrown if fail to get all delta object + * @throws IOException thrown if fail to get all delta object */ public ArrayList getAllDeltaObject() throws IOException { checkStatus(READ); @@ -280,8 +245,7 @@ public ArrayList getAllDeltaObject() throws IOException { /** * @return all series' schemas in current TsFile - * @throws IOException - * thrown if fail to all series + * @throws IOException thrown if fail to all series */ public List getAllSeries() throws IOException { checkStatus(READ); @@ -292,8 +256,7 @@ public List getAllSeries() throws IOException { * Get all RowGroups' offsets in current TsFile * * @return res.get(i) represents the End-Position for specific rowGroup i in this file. - * @throws IOException - * thrown if fail to get row group pos list + * @throws IOException thrown if fail to get row group pos list */ public ArrayList getRowGroupPosList() throws IOException { checkStatus(READ); @@ -319,8 +282,7 @@ public Map getProps() { /** * clear and set new properties. * - * @param props - * properties in map struct + * @param props properties in map struct */ public void setProps(Map props) { fileSchema.setProps(props); @@ -332,7 +294,7 @@ public String getProp(String key) { private void checkStatus(int status) throws IOException { if (status != this.status) { - String[] msg = new String[] { "WRITE", "READ" }; + String[] msg = new String[] {"WRITE", "READ"}; throw new IOException("This method should be invoked in status " + msg[status] + ", but current status is " + msg[this.status]); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/CrossSeriesFilterExpression.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/CrossSeriesFilterExpression.java index 96470a22..48680f4a 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/CrossSeriesFilterExpression.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/CrossSeriesFilterExpression.java @@ -5,19 +5,19 @@ * @author CGF */ public abstract class CrossSeriesFilterExpression implements FilterExpression { - protected FilterExpression left; - protected FilterExpression right; + protected FilterExpression left; + protected FilterExpression right; - protected CrossSeriesFilterExpression(FilterExpression left, FilterExpression right) { - this.left = left; - this.right = right; - } + protected CrossSeriesFilterExpression(FilterExpression left, FilterExpression right) { + this.left = left; + this.right = right; + } - public FilterExpression getLeft() { - return this.left; - } + public FilterExpression getLeft() { + return this.left; + } - public FilterExpression getRight() { - return this.right; - } -} \ No newline at end of file + public FilterExpression getRight() { + return this.right; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterExpression.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterExpression.java index 9d9fd091..a7aa734b 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterExpression.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterExpression.java @@ -4,26 +4,26 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; /** - * FilterExpression is a top level filter abstraction. - * FilterExpression has two types of subclass : {@link SingleSeriesFilterExpression} and - * {@link CrossSeriesFilterExpression} - * FilterExpression is a role of interviewee in visitor pattern. + * FilterExpression is a top level filter abstraction. FilterExpression has two types of subclass : + * {@link SingleSeriesFilterExpression} and {@link CrossSeriesFilterExpression} FilterExpression is + * a role of interviewee in visitor pattern. * * @author CGF */ public interface FilterExpression { - /** - * All subclass of accept a FilterVisitor, per the visitor pattern - * @param visitor filter visitor - * @param return type - * @return corret filter - */ - T accept(FilterVisitor visitor); + /** + * All subclass of accept a FilterVisitor, per the visitor pattern + * + * @param visitor filter visitor + * @param return type + * @return corret filter + */ + T accept(FilterVisitor visitor); - /** - * FilterSeries get method. - * - * @return FilterSeries - */ - FilterSeries getFilterSeries(); + /** + * FilterSeries get method. + * + * @return FilterSeries + */ + FilterSeries getFilterSeries(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterFactory.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterFactory.java index 05a7a063..dbeb52d5 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterFactory.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/FilterFactory.java @@ -5,243 +5,248 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; /** - * The FilterFactory is used to construct FilterSeries, SingleSeriesFilter, and - * CrossSeriesFilter. + * The FilterFactory is used to construct FilterSeries, SingleSeriesFilter, and CrossSeriesFilter. * * @author CGF */ public final class FilterFactory { - /** - * To construct Time FilterSeries - * - * @return LongFilterSeries - */ - public static LongFilterSeries timeFilterSeries() { - return new LongFilterSeries(null, null, TSDataType.INT64, FilterSeriesType.TIME_FILTER); - } - - /** - * To construct IntFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return IntFilterSeries - */ - public static IntFilterSeries intFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new IntFilterSeries(deltaObjectUID, measurementUID, TSDataType.INT32, filterType); - } - - /** - * To construct DoubleFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return DoubleFilterSeries - */ - public static DoubleFilterSeries doubleFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new DoubleFilterSeries(deltaObjectUID, measurementUID, TSDataType.DOUBLE, filterType); - } - - /** - * To construct LongFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return LongFilterSeries - */ - public static LongFilterSeries longFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new LongFilterSeries(deltaObjectUID, measurementUID, TSDataType.INT64, filterType); - } - - /** - * To construct FloatFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return FloatFilterSeries - */ - public static FloatFilterSeries floatFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new FloatFilterSeries(deltaObjectUID, measurementUID, TSDataType.FLOAT, filterType); - } - - /** - * To construct BooleanFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return BooleanFilterSeries - */ - public static BooleanFilterSeries booleanFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new BooleanFilterSeries(deltaObjectUID, measurementUID, TSDataType.BOOLEAN, filterType); - } - - /** - * To construct StringFilterSeries - * - * @param deltaObjectUID delta object ID - * @param measurementUID measurement ID - * @param filterType filter type - * @return StringFilterSeries - */ - public static StringFilterSeries stringFilterSeries(String deltaObjectUID, String measurementUID, - FilterSeriesType filterType) { - return new StringFilterSeries(deltaObjectUID, measurementUID, TSDataType.TEXT, filterType); - } - - /** - * To generate Eq by filterSeries - * - * @param filterSeries filter series - * @param value filter value - * @param comparable data type - * @param subclass of FilterSeries - * @return filter - */ - public static , C extends FilterSeries> Eq eq(C filterSeries, T value) { - return new Eq(filterSeries, value); - } - - /** - * To generate LtEq by filterSeries - * - * @param filterSeries filter series - * @param value filter value - * @param ifEq if equal - * @param comparable data type - * @param subclass of FilterSeries - * @return lt expression - */ - public static , C extends FilterSeries> LtEq ltEq(C filterSeries, T value, - Boolean ifEq) { - return new LtEq(filterSeries, value, ifEq); - } - - /** - * To generate GtEq by filterSeries - * - * @param filterSeries filter series - * @param value filter value - * @param ifEq if equal - * @param comparable data type - * @param subclass of FilterSeries - * @return gt expression - */ - public static , C extends FilterSeries> GtEq gtEq(C filterSeries, T value, - Boolean ifEq) { - return new GtEq(filterSeries, value, ifEq); - } - - /** - * To generate NotEq by filterSeries - * - * @param filterSeries filter series - * @param value filter value - * @param comparable data type - * @param subclass of FilterSeries - * @return not equal expression - */ - public static , C extends FilterSeries> NotEq noteq(C filterSeries, T value) { - return new NotEq(filterSeries, value); - } - - /** - * To generate Not by filterSeries - * - * @param that not expression - * @return not expression - */ - public static SingleSeriesFilterExpression not(SingleSeriesFilterExpression that) { - return new Not(that); - } - - /** - * To generate And by filterSeries - * - * @param left left expression - * @param right right expression - * @return and expression - */ - private static SingleSeriesFilterExpression ssAnd(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { -// if (left.getFilterSeries().sameSeries(right.getFilterSeries())) - return new And(left, right); -// else -// return new CSAnd(left, right); - } - - /** - * To generate Or by filterSeries - * - * @param left left expression - * @param right right expression - * @return or expression - */ - private static SingleSeriesFilterExpression ssOr(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { -// if (left.getFilterSeries().sameSeries(right.getFilterSeries())) - return new Or(left, right); -// else -// return new CSOr(left, right); - } - - /** - * construct CSAnd(Cross Series Filter And Operators) use FilterExpression - * left, right; - * - * @param left left expression - * @param right right expression - * @return and expression - */ - public static CSAnd csAnd(FilterExpression left, FilterExpression right) { - return new CSAnd(left, right); - } - - /** - * construct CSOr(Cross Series Filter Or Operators) use FilterExpression - * left, right; - * - * @param left left expression - * @param right right expression - * @return or expression - */ - public static CSOr csOr(FilterExpression left, FilterExpression right) { - return new CSOr(left, right); - } - - public static FilterExpression and(FilterExpression left, FilterExpression right) { - if (left.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER && - right.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER) { - return ssAnd((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); - } - - if (left instanceof SingleSeriesFilterExpression && right instanceof SingleSeriesFilterExpression - && (((SingleSeriesFilterExpression) left).getFilterSeries().sameSeries(((SingleSeriesFilterExpression) right).getFilterSeries()))) { - return ssAnd((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); - } else { - return csAnd(left, right); - } - } - - public static FilterExpression or(FilterExpression left, FilterExpression right) { - if (left.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER && - right.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER) { - return ssOr((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); - } - - if (left instanceof SingleSeriesFilterExpression && right instanceof SingleSeriesFilterExpression - && (((SingleSeriesFilterExpression) left).getFilterSeries().sameSeries(((SingleSeriesFilterExpression) right).getFilterSeries()))) { - return ssOr((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); - } else { - return csOr(left, right); - } - } + /** + * To construct Time FilterSeries + * + * @return LongFilterSeries + */ + public static LongFilterSeries timeFilterSeries() { + return new LongFilterSeries(null, null, TSDataType.INT64, FilterSeriesType.TIME_FILTER); + } + + /** + * To construct IntFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return IntFilterSeries + */ + public static IntFilterSeries intFilterSeries(String deltaObjectUID, String measurementUID, + FilterSeriesType filterType) { + return new IntFilterSeries(deltaObjectUID, measurementUID, TSDataType.INT32, filterType); + } + + /** + * To construct DoubleFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return DoubleFilterSeries + */ + public static DoubleFilterSeries doubleFilterSeries(String deltaObjectUID, String measurementUID, + FilterSeriesType filterType) { + return new DoubleFilterSeries(deltaObjectUID, measurementUID, TSDataType.DOUBLE, filterType); + } + + /** + * To construct LongFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return LongFilterSeries + */ + public static LongFilterSeries longFilterSeries(String deltaObjectUID, String measurementUID, + FilterSeriesType filterType) { + return new LongFilterSeries(deltaObjectUID, measurementUID, TSDataType.INT64, filterType); + } + + /** + * To construct FloatFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return FloatFilterSeries + */ + public static FloatFilterSeries floatFilterSeries(String deltaObjectUID, String measurementUID, + FilterSeriesType filterType) { + return new FloatFilterSeries(deltaObjectUID, measurementUID, TSDataType.FLOAT, filterType); + } + + /** + * To construct BooleanFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return BooleanFilterSeries + */ + public static BooleanFilterSeries booleanFilterSeries(String deltaObjectUID, + String measurementUID, FilterSeriesType filterType) { + return new BooleanFilterSeries(deltaObjectUID, measurementUID, TSDataType.BOOLEAN, filterType); + } + + /** + * To construct StringFilterSeries + * + * @param deltaObjectUID delta object ID + * @param measurementUID measurement ID + * @param filterType filter type + * @return StringFilterSeries + */ + public static StringFilterSeries stringFilterSeries(String deltaObjectUID, String measurementUID, + FilterSeriesType filterType) { + return new StringFilterSeries(deltaObjectUID, measurementUID, TSDataType.TEXT, filterType); + } + + /** + * To generate Eq by filterSeries + * + * @param filterSeries filter series + * @param value filter value + * @param comparable data type + * @param subclass of FilterSeries + * @return filter + */ + public static , C extends FilterSeries> Eq eq(C filterSeries, + T value) { + return new Eq(filterSeries, value); + } + + /** + * To generate LtEq by filterSeries + * + * @param filterSeries filter series + * @param value filter value + * @param ifEq if equal + * @param comparable data type + * @param subclass of FilterSeries + * @return lt expression + */ + public static , C extends FilterSeries> LtEq ltEq(C filterSeries, + T value, Boolean ifEq) { + return new LtEq(filterSeries, value, ifEq); + } + + /** + * To generate GtEq by filterSeries + * + * @param filterSeries filter series + * @param value filter value + * @param ifEq if equal + * @param comparable data type + * @param subclass of FilterSeries + * @return gt expression + */ + public static , C extends FilterSeries> GtEq gtEq(C filterSeries, + T value, Boolean ifEq) { + return new GtEq(filterSeries, value, ifEq); + } + + /** + * To generate NotEq by filterSeries + * + * @param filterSeries filter series + * @param value filter value + * @param comparable data type + * @param subclass of FilterSeries + * @return not equal expression + */ + public static , C extends FilterSeries> NotEq noteq(C filterSeries, + T value) { + return new NotEq(filterSeries, value); + } + + /** + * To generate Not by filterSeries + * + * @param that not expression + * @return not expression + */ + public static SingleSeriesFilterExpression not(SingleSeriesFilterExpression that) { + return new Not(that); + } + + /** + * To generate And by filterSeries + * + * @param left left expression + * @param right right expression + * @return and expression + */ + private static SingleSeriesFilterExpression ssAnd(SingleSeriesFilterExpression left, + SingleSeriesFilterExpression right) { + // if (left.getFilterSeries().sameSeries(right.getFilterSeries())) + return new And(left, right); + // else + // return new CSAnd(left, right); + } + + /** + * To generate Or by filterSeries + * + * @param left left expression + * @param right right expression + * @return or expression + */ + private static SingleSeriesFilterExpression ssOr(SingleSeriesFilterExpression left, + SingleSeriesFilterExpression right) { + // if (left.getFilterSeries().sameSeries(right.getFilterSeries())) + return new Or(left, right); + // else + // return new CSOr(left, right); + } + + /** + * construct CSAnd(Cross Series Filter And Operators) use FilterExpression left, right; + * + * @param left left expression + * @param right right expression + * @return and expression + */ + public static CSAnd csAnd(FilterExpression left, FilterExpression right) { + return new CSAnd(left, right); + } + + /** + * construct CSOr(Cross Series Filter Or Operators) use FilterExpression left, right; + * + * @param left left expression + * @param right right expression + * @return or expression + */ + public static CSOr csOr(FilterExpression left, FilterExpression right) { + return new CSOr(left, right); + } + + public static FilterExpression and(FilterExpression left, FilterExpression right) { + if (left.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER + && right.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER) { + return ssAnd((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); + } + + if (left instanceof SingleSeriesFilterExpression + && right instanceof SingleSeriesFilterExpression + && (((SingleSeriesFilterExpression) left).getFilterSeries() + .sameSeries(((SingleSeriesFilterExpression) right).getFilterSeries()))) { + return ssAnd((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); + } else { + return csAnd(left, right); + } + } + + public static FilterExpression or(FilterExpression left, FilterExpression right) { + if (left.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER + && right.getFilterSeries().getFilterType() == FilterSeriesType.TIME_FILTER) { + return ssOr((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); + } + + if (left instanceof SingleSeriesFilterExpression + && right instanceof SingleSeriesFilterExpression + && (((SingleSeriesFilterExpression) left).getFilterSeries() + .sameSeries(((SingleSeriesFilterExpression) right).getFilterSeries()))) { + return ssOr((SingleSeriesFilterExpression) left, (SingleSeriesFilterExpression) right); + } else { + return csOr(left, right); + } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/BooleanFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/BooleanFilterSeries.java index 1e9854dd..2d9e88fb 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/BooleanFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/BooleanFilterSeries.java @@ -9,10 +9,10 @@ */ public class BooleanFilterSeries extends FilterSeries { - private static final long serialVersionUID = 454794989741185890L; + private static final long serialVersionUID = 454794989741185890L; - public BooleanFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.BOOLEAN, filterType); - } + public BooleanFilterSeries(String deltaObjectUID, String measurementUID, + TSDataType seriesDataType, FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.BOOLEAN, filterType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/DoubleFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/DoubleFilterSeries.java index 719fd92c..0903b114 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/DoubleFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/DoubleFilterSeries.java @@ -9,10 +9,10 @@ */ public class DoubleFilterSeries extends FilterSeries { - private static final long serialVersionUID = -5847065869887482598L; + private static final long serialVersionUID = -5847065869887482598L; - public DoubleFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.DOUBLE, filterType); - } + public DoubleFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.DOUBLE, filterType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeries.java index c58286a7..6086edd0 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeries.java @@ -1,57 +1,57 @@ package cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; - import java.io.Serializable; /** - * Define a basic FilterSeries class, which contains deltaObjectUID, - * measurementUID, seriesDataType and filterType. + * Define a basic FilterSeries class, which contains deltaObjectUID, measurementUID, seriesDataType + * and filterType. * * @author CGF */ public abstract class FilterSeries> implements Serializable { - private static final long serialVersionUID = -8834574808000067965L; - - private final String deltaObjectUID; - private final String measurementUID; - private final TSDataType seriesDataType; - - // may be TIME_FILTER, FREQUENCY_FILTER, VALUE_FILTER - private final FilterSeriesType filterType; - - protected FilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - this.deltaObjectUID = deltaObjectUID; - this.measurementUID = measurementUID; - this.seriesDataType = seriesDataType; - this.filterType = filterType; - } - - public String getDeltaObjectUID() { - return this.deltaObjectUID; - } - - public String getMeasurementUID() { - return this.measurementUID; - } - - public TSDataType getSeriesDataType() { - return this.seriesDataType; - } - - public FilterSeriesType getFilterType() { - return this.filterType; - } - - @Override - public String toString() { - return "FilterSeries (" + deltaObjectUID + "," + measurementUID + "," + seriesDataType + "," + filterType + ")"; - } - - public boolean sameSeries(FilterSeries other) { - return deltaObjectUID.equals(other.getDeltaObjectUID()) - && measurementUID.equals(other.getMeasurementUID()); - } -} \ No newline at end of file + private static final long serialVersionUID = -8834574808000067965L; + + private final String deltaObjectUID; + private final String measurementUID; + private final TSDataType seriesDataType; + + // may be TIME_FILTER, FREQUENCY_FILTER, VALUE_FILTER + private final FilterSeriesType filterType; + + protected FilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + this.deltaObjectUID = deltaObjectUID; + this.measurementUID = measurementUID; + this.seriesDataType = seriesDataType; + this.filterType = filterType; + } + + public String getDeltaObjectUID() { + return this.deltaObjectUID; + } + + public String getMeasurementUID() { + return this.measurementUID; + } + + public TSDataType getSeriesDataType() { + return this.seriesDataType; + } + + public FilterSeriesType getFilterType() { + return this.filterType; + } + + @Override + public String toString() { + return "FilterSeries (" + deltaObjectUID + "," + measurementUID + "," + seriesDataType + "," + + filterType + ")"; + } + + public boolean sameSeries(FilterSeries other) { + return deltaObjectUID.equals(other.getDeltaObjectUID()) + && measurementUID.equals(other.getMeasurementUID()); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeriesType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeriesType.java index 2c3533f4..0e8681fb 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeriesType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FilterSeriesType.java @@ -6,5 +6,5 @@ * @author CGF */ public enum FilterSeriesType { - TIME_FILTER, FREQUENCY_FILTER, VALUE_FILTER + TIME_FILTER, FREQUENCY_FILTER, VALUE_FILTER } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FloatFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FloatFilterSeries.java index a2bc4fa1..7090d4bc 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FloatFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/FloatFilterSeries.java @@ -9,10 +9,10 @@ */ public class FloatFilterSeries extends FilterSeries { - private static final long serialVersionUID = -2745416005497409478L; + private static final long serialVersionUID = -2745416005497409478L; - public FloatFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.FLOAT, filterType); - } + public FloatFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.FLOAT, filterType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/IntFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/IntFilterSeries.java index 8021606b..ae71d198 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/IntFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/IntFilterSeries.java @@ -9,10 +9,10 @@ */ public class IntFilterSeries extends FilterSeries { - private static final long serialVersionUID = -7268852368134017134L; + private static final long serialVersionUID = -7268852368134017134L; - public IntFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.INT32, filterType); - } -} \ No newline at end of file + public IntFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.INT32, filterType); + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/LongFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/LongFilterSeries.java index 873e3896..69edb8da 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/LongFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/LongFilterSeries.java @@ -9,10 +9,10 @@ */ public class LongFilterSeries extends FilterSeries { - private static final long serialVersionUID = -6805221044991568903L; + private static final long serialVersionUID = -6805221044991568903L; - public LongFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.INT64, filterType); - } + public LongFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.INT64, filterType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/StringFilterSeries.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/StringFilterSeries.java index 9d09ea6f..bc6d97bf 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/StringFilterSeries.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/filterseries/StringFilterSeries.java @@ -10,10 +10,10 @@ */ public class StringFilterSeries extends FilterSeries { - private static final long serialVersionUID = 454794989741185890L; + private static final long serialVersionUID = 454794989741185890L; - public StringFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, - FilterSeriesType filterType) { - super(deltaObjectUID, measurementUID, TSDataType.TEXT, filterType); - } + public StringFilterSeries(String deltaObjectUID, String measurementUID, TSDataType seriesDataType, + FilterSeriesType filterType) { + super(deltaObjectUID, measurementUID, TSDataType.TEXT, filterType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/And.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/And.java index 9782c087..66197e0e 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/And.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/And.java @@ -11,24 +11,24 @@ */ public class And extends SingleBinaryExpression { - private static final long serialVersionUID = 6705254093824897938L; + private static final long serialVersionUID = 6705254093824897938L; - public And(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { - super(left, right); - } + public And(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { + super(left, right); + } - @Override - public T accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public T accept(FilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public String toString() { - return "AND: ( " + left + "," + right + " )"; - } + @Override + public String toString() { + return "AND: ( " + left + "," + right + " )"; + } - @Override - public FilterSeries getFilterSeries() { - return this.left.getFilterSeries(); - } + @Override + public FilterSeries getFilterSeries() { + return this.left.getFilterSeries(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSAnd.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSAnd.java index 4f3aa21d..a10d1a4c 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSAnd.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSAnd.java @@ -6,38 +6,38 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; /** - * Both the left and right operators of CSAnd must satisfy the condition - * CSAnd represents Cross Series And operation. + * Both the left and right operators of CSAnd must satisfy the condition CSAnd represents Cross + * Series And operation. * * @author CGF */ public class CSAnd extends CrossSeriesFilterExpression { - public CSAnd(FilterExpression left, FilterExpression right) { - super(left, right); - } + public CSAnd(FilterExpression left, FilterExpression right) { + super(left, right); + } - public String toString() { - return "[" + super.left + "]" + " & [" + super.right + "]"; - } + public String toString() { + return "[" + super.left + "]" + " & [" + super.right + "]"; + } - /** - * Not Used - * - * @param visitor filter visitor - * @return accept filter - */ - @Override - public T accept(FilterVisitor visitor) { - return null; - } + /** + * Not Used + * + * @param visitor filter visitor + * @return accept filter + */ + @Override + public T accept(FilterVisitor visitor) { + return null; + } - /** - * Not Used - * - * @return filter - */ - @Override - public FilterSeries getFilterSeries() { - return null; - } + /** + * Not Used + * + * @return filter + */ + @Override + public FilterSeries getFilterSeries() { + return null; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSOr.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSOr.java index 9dae54a2..281ef9a8 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSOr.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/CSOr.java @@ -6,38 +6,38 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; /** - * Either of the left and right operators of CSOr must satisfy the condition - * CSOr represents Cross Series Or operation. + * Either of the left and right operators of CSOr must satisfy the condition CSOr represents Cross + * Series Or operation. * * @author CGF */ public class CSOr extends CrossSeriesFilterExpression { - public CSOr(FilterExpression left, FilterExpression right) { - super(left, right); - } + public CSOr(FilterExpression left, FilterExpression right) { + super(left, right); + } - public String toString() { - return "[" + super.left + "]" + " | [" + super.right + "]"; - } + public String toString() { + return "[" + super.left + "]" + " | [" + super.right + "]"; + } - /** - * Not Used - * - * @param visitor filter visitor - * @return accept filter - */ - @Override - public T accept(FilterVisitor visitor) { - return null; - } + /** + * Not Used + * + * @param visitor filter visitor + * @return accept filter + */ + @Override + public T accept(FilterVisitor visitor) { + return null; + } - /** - * Not Used - * - * @return filter - */ - @Override - public FilterSeries getFilterSeries() { - return null; - } -} \ No newline at end of file + /** + * Not Used + * + * @return filter + */ + @Override + public FilterSeries getFilterSeries() { + return null; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Eq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Eq.java index 96bb22d5..4bb8a8e4 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Eq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Eq.java @@ -11,19 +11,19 @@ */ public class Eq> extends SingleUnaryExpression { - private static final long serialVersionUID = -6668083116644568248L; + private static final long serialVersionUID = -6668083116644568248L; - public Eq(FilterSeries filterSeries, T value) { - super(filterSeries, value); - } + public Eq(FilterSeries filterSeries, T value) { + super(filterSeries, value); + } - @Override - public R accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(FilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public String toString() { - return filterSeries + " = " + value; - } + @Override + public String toString() { + return filterSeries + " = " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/GtEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/GtEq.java index 4d8f8174..00450949 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/GtEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/GtEq.java @@ -11,28 +11,28 @@ */ public class GtEq> extends SingleUnaryExpression { - private static final long serialVersionUID = -2088181659871608986L; - public boolean ifEq = false; + private static final long serialVersionUID = -2088181659871608986L; + public boolean ifEq = false; - public GtEq(FilterSeries filterSeries, T value, Boolean ifEq) { - super(filterSeries, value); - this.ifEq = ifEq; - } + public GtEq(FilterSeries filterSeries, T value, Boolean ifEq) { + super(filterSeries, value); + this.ifEq = ifEq; + } - @Override - public R accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(FilterVisitor visitor) { + return visitor.visit(this); + } - public Boolean getIfEq() { - return this.ifEq; - } + public Boolean getIfEq() { + return this.ifEq; + } - @Override - public String toString() { - if (ifEq) - return filterSeries + " >= " + value; - else - return filterSeries + " > " + value; - } + @Override + public String toString() { + if (ifEq) + return filterSeries + " >= " + value; + else + return filterSeries + " > " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/LtEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/LtEq.java index d0b9f332..c8b29824 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/LtEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/LtEq.java @@ -11,31 +11,31 @@ */ public class LtEq> extends SingleUnaryExpression { - private static final long serialVersionUID = -6472106605198074799L; - - public Boolean ifEq = false; // To judge whether equals(true if LtEq - // operator means less than and equals), - // false by default - - public LtEq(FilterSeries filterSeries, T value, Boolean ifEq) { - super(filterSeries, value); - this.ifEq = ifEq; - } - - @Override - public R accept(FilterVisitor visitor) { - return visitor.visit(this); - } - - public Boolean getIfEq() { - return this.ifEq; - } - - @Override - public String toString() { - if (ifEq) - return filterSeries + " <= " + value; - else - return filterSeries + " < " + value; - } + private static final long serialVersionUID = -6472106605198074799L; + + public Boolean ifEq = false; // To judge whether equals(true if LtEq + // operator means less than and equals), + // false by default + + public LtEq(FilterSeries filterSeries, T value, Boolean ifEq) { + super(filterSeries, value); + this.ifEq = ifEq; + } + + @Override + public R accept(FilterVisitor visitor) { + return visitor.visit(this); + } + + public Boolean getIfEq() { + return this.ifEq; + } + + @Override + public String toString() { + if (ifEq) + return filterSeries + " <= " + value; + else + return filterSeries + " < " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NoFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NoFilter.java index c1c1d4bc..dd9ab9f2 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NoFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NoFilter.java @@ -7,27 +7,26 @@ /** * NoFilter means that there is no filter. */ -public class NoFilter extends SingleSeriesFilterExpression{ - private static NoFilter noFilter; +public class NoFilter extends SingleSeriesFilterExpression { + private static NoFilter noFilter; - private static class SingletonHolder { - private static final NoFilter INSTANCE = new NoFilter(); - } + private static class SingletonHolder { + private static final NoFilter INSTANCE = new NoFilter(); + } - private NoFilter() { - } + private NoFilter() {} - public static final NoFilter getInstance() { - return SingletonHolder.INSTANCE; - } + public static final NoFilter getInstance() { + return SingletonHolder.INSTANCE; + } - @Override - public T accept(FilterVisitor visitor) { - return null; - } + @Override + public T accept(FilterVisitor visitor) { + return null; + } - @Override - public FilterSeries getFilterSeries() { - return null; - } + @Override + public FilterSeries getFilterSeries() { + return null; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Not.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Not.java index d4a9c424..374a8dc0 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Not.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Not.java @@ -3,7 +3,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeries; import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; - import java.io.Serializable; /** @@ -13,29 +12,29 @@ */ public class Not extends SingleSeriesFilterExpression implements Serializable { - private static final long serialVersionUID = 584860326604020881L; - private SingleSeriesFilterExpression that; + private static final long serialVersionUID = 584860326604020881L; + private SingleSeriesFilterExpression that; - public Not(SingleSeriesFilterExpression that) { - this.that = that; - } + public Not(SingleSeriesFilterExpression that) { + this.that = that; + } - @Override - public T accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public T accept(FilterVisitor visitor) { + return visitor.visit(this); + } - public SingleSeriesFilterExpression getFilterExpression() { - return this.that; - } + public SingleSeriesFilterExpression getFilterExpression() { + return this.that; + } - @Override - public String toString() { - return "Not: " + that; - } + @Override + public String toString() { + return "Not: " + that; + } - @Override - public FilterSeries getFilterSeries() { - return that.getFilterSeries(); - } + @Override + public FilterSeries getFilterSeries() { + return that.getFilterSeries(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NotEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NotEq.java index a84801d0..06d46b51 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NotEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/NotEq.java @@ -11,19 +11,19 @@ */ public class NotEq> extends SingleUnaryExpression { - private static final long serialVersionUID = 2574090797476500965L; + private static final long serialVersionUID = 2574090797476500965L; - public NotEq(FilterSeries filterSeries, T value) { - super(filterSeries, value); - } + public NotEq(FilterSeries filterSeries, T value) { + super(filterSeries, value); + } - @Override - public R accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(FilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public String toString() { - return filterSeries + " != " + value; - } + @Override + public String toString() { + return filterSeries + " != " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Or.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Or.java index 2cfdd9b9..4d430af8 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Or.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/Or.java @@ -3,7 +3,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeries; import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; - import java.io.Serializable; /** @@ -13,25 +12,25 @@ */ public class Or extends SingleBinaryExpression implements Serializable { - private static final long serialVersionUID = -968055896528472694L; + private static final long serialVersionUID = -968055896528472694L; - public Or(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { - super(left, right); - } + public Or(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { + super(left, right); + } - @Override - public T accept(FilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public T accept(FilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public String toString() { - return "OR: ( " + left + "," + right + " )"; - } + @Override + public String toString() { + return "OR: ( " + left + "," + right + " )"; + } - @Override - public FilterSeries getFilterSeries() { - return left.getFilterSeries(); - } + @Override + public FilterSeries getFilterSeries() { + return left.getFilterSeries(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleBinaryExpression.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleBinaryExpression.java index d748c947..ea4ec143 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleBinaryExpression.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleBinaryExpression.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; - import java.io.Serializable; /** @@ -10,29 +9,31 @@ * @author CGF */ -public abstract class SingleBinaryExpression extends SingleSeriesFilterExpression implements Serializable { +public abstract class SingleBinaryExpression extends SingleSeriesFilterExpression + implements Serializable { - private static final long serialVersionUID = 1039585564327602465L; + private static final long serialVersionUID = 1039585564327602465L; - protected final SingleSeriesFilterExpression left; - protected final SingleSeriesFilterExpression right; + protected final SingleSeriesFilterExpression left; + protected final SingleSeriesFilterExpression right; - protected SingleBinaryExpression(SingleSeriesFilterExpression left, SingleSeriesFilterExpression right) { - this.left = left; - this.right = right; - } + protected SingleBinaryExpression(SingleSeriesFilterExpression left, + SingleSeriesFilterExpression right) { + this.left = left; + this.right = right; + } - public SingleSeriesFilterExpression getLeft() { - return left; - } + public SingleSeriesFilterExpression getLeft() { + return left; + } - public SingleSeriesFilterExpression getRight() { - return right; - } + public SingleSeriesFilterExpression getRight() { + return right; + } - @Override - public String toString() { - return "( " + left + "," + right + " )"; - } + @Override + public String toString() { + return "( " + left + "," + right + " )"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleUnaryExpression.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleUnaryExpression.java index 6ef0aeac..6dbb6c2f 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleUnaryExpression.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/definition/operators/SingleUnaryExpression.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.Serializable; /** @@ -16,38 +15,38 @@ * @author CGF */ public class SingleUnaryExpression> extends SingleSeriesFilterExpression - implements Serializable { - private static final Logger LOG = LoggerFactory.getLogger(SingleUnaryExpression.class); - private static final long serialVersionUID = 1431606024929453556L; - protected final FilterSeries filterSeries; - protected final T value; - - protected SingleUnaryExpression(FilterSeries filterSeries, T value) { - this.filterSeries = filterSeries; - this.value = value; - } - - public FilterSeries getFilterSeries() { - return filterSeries; - } - - public T getValue() { - return value; - } - - @Override - public String toString() { - return filterSeries + " - " + value; - } - - @SuppressWarnings("hiding") - @Override - public T accept(FilterVisitor visitor) { - // Never be invoked - // This method is invoked by specific UnarySeriesFilter which is - // subclass of UnarySeriesFilter, - // such as LtEq, Eq.. - LOG.error("UnarySeriesFilter's accept method can never be invoked."); - throw new FilterInvokeException("UnarySeriesFilter's accept method can never be invoked."); - } + implements Serializable { + private static final Logger LOG = LoggerFactory.getLogger(SingleUnaryExpression.class); + private static final long serialVersionUID = 1431606024929453556L; + protected final FilterSeries filterSeries; + protected final T value; + + protected SingleUnaryExpression(FilterSeries filterSeries, T value) { + this.filterSeries = filterSeries; + this.value = value; + } + + public FilterSeries getFilterSeries() { + return filterSeries; + } + + public T getValue() { + return value; + } + + @Override + public String toString() { + return filterSeries + " - " + value; + } + + @SuppressWarnings("hiding") + @Override + public T accept(FilterVisitor visitor) { + // Never be invoked + // This method is invoked by specific UnarySeriesFilter which is + // subclass of UnarySeriesFilter, + // such as LtEq, Eq.. + LOG.error("UnarySeriesFilter's accept method can never be invoked."); + throw new FilterInvokeException("UnarySeriesFilter's accept method can never be invoked."); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DigestForFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DigestForFilter.java index 696dfc24..911b2f38 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DigestForFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DigestForFilter.java @@ -4,96 +4,97 @@ import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; - import java.nio.ByteBuffer; /** - * @author ZJR - * class to construct digest. + * @author ZJR class to construct digest. */ public class DigestForFilter { - private ByteBuffer min = null; - private ByteBuffer max = null; - private TSDataType type; + private ByteBuffer min = null; + private ByteBuffer max = null; + private TSDataType type; - public DigestForFilter() { + public DigestForFilter() { - } + } - public DigestForFilter(ByteBuffer min, ByteBuffer max, TSDataType type) { - this.min = min; - this.max = max; - this.type = type; - } + public DigestForFilter(ByteBuffer min, ByteBuffer max, TSDataType type) { + this.min = min; + this.max = max; + this.type = type; + } - public DigestForFilter(long minv, long maxv) { - this.min = ByteBuffer.wrap(BytesUtils.longToBytes(minv)); - this.max = ByteBuffer.wrap(BytesUtils.longToBytes(maxv)); - this.type = TSDataType.INT64; - } + public DigestForFilter(long minv, long maxv) { + this.min = ByteBuffer.wrap(BytesUtils.longToBytes(minv)); + this.max = ByteBuffer.wrap(BytesUtils.longToBytes(maxv)); + this.type = TSDataType.INT64; + } - @SuppressWarnings("unchecked") - public > T getMinValue() { - switch (type) { - case INT32: - return (T) ((Integer) BytesUtils.bytesToInt(min.array())); - case INT64: - return (T) ((Long) BytesUtils.bytesToLong(min.array())); - case FLOAT: - return (T) ((Float) BytesUtils.bytesToFloat(min.array())); - case DOUBLE: - return (T) ((Double) BytesUtils.bytesToDouble(min.array())); - case TEXT: - return (T) new Binary(BytesUtils.bytesToString(min.array())); - case BOOLEAN: - return (T) (Boolean) BytesUtils.bytesToBool(min.array()); - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + @SuppressWarnings("unchecked") + public > T getMinValue() { + switch (type) { + case INT32: + return (T) ((Integer) BytesUtils.bytesToInt(min.array())); + case INT64: + return (T) ((Long) BytesUtils.bytesToLong(min.array())); + case FLOAT: + return (T) ((Float) BytesUtils.bytesToFloat(min.array())); + case DOUBLE: + return (T) ((Double) BytesUtils.bytesToDouble(min.array())); + case TEXT: + return (T) new Binary(BytesUtils.bytesToString(min.array())); + case BOOLEAN: + return (T) (Boolean) BytesUtils.bytesToBool(min.array()); + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - @SuppressWarnings("unchecked") - public > T getMaxValue() { - switch (type) { - case INT32: - return (T) ((Integer) BytesUtils.bytesToInt(max.array())); - case INT64: - return (T) ((Long) BytesUtils.bytesToLong(max.array())); - case FLOAT: - return (T) ((Float) BytesUtils.bytesToFloat(max.array())); - case DOUBLE: - return (T) ((Double) BytesUtils.bytesToDouble(max.array())); - case TEXT: - return (T) new Binary(BytesUtils.bytesToString(max.array())); - case BOOLEAN: - return (T) (Boolean) BytesUtils.bytesToBool(max.array()); - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + @SuppressWarnings("unchecked") + public > T getMaxValue() { + switch (type) { + case INT32: + return (T) ((Integer) BytesUtils.bytesToInt(max.array())); + case INT64: + return (T) ((Long) BytesUtils.bytesToLong(max.array())); + case FLOAT: + return (T) ((Float) BytesUtils.bytesToFloat(max.array())); + case DOUBLE: + return (T) ((Double) BytesUtils.bytesToDouble(max.array())); + case TEXT: + return (T) new Binary(BytesUtils.bytesToString(max.array())); + case BOOLEAN: + return (T) (Boolean) BytesUtils.bytesToBool(max.array()); + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - public Class getTypeClass() { - switch (type) { - case INT32: - return Integer.class; - case INT64: - return Long.class; - case FLOAT: - return Float.class; - case DOUBLE: - return Double.class; - case TEXT: - return String.class; - case BOOLEAN: - return Boolean.class; - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + public Class getTypeClass() { + switch (type) { + case INT32: + return Integer.class; + case INT64: + return Long.class; + case FLOAT: + return Float.class; + case DOUBLE: + return Double.class; + case TEXT: + return String.class; + case BOOLEAN: + return Boolean.class; + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - public TSDataType getType() { - return type; - } + public TSDataType getType() { + return type; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DoubleInterval.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DoubleInterval.java index b44fb3c2..5a9b1232 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DoubleInterval.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/DoubleInterval.java @@ -10,33 +10,33 @@ * @author CGF */ public class DoubleInterval extends Interval { - private static final Logger LOG = LoggerFactory.getLogger(DoubleInterval.class); + private static final Logger LOG = LoggerFactory.getLogger(DoubleInterval.class); - // double value array - public double[] v = new double[arrayMaxn]; + // double value array + public double[] v = new double[arrayMaxn]; - public void addValueFlag(double value, boolean f) { - if (count >= arrayMaxn - 2) { - LOG.error("IntInterval array length spill."); - throw new FilterInvokeException("DoubleInterval array length spill."); - } - v[count] = value; - flag[count] = f; - count++; + public void addValueFlag(double value, boolean f) { + if (count >= arrayMaxn - 2) { + LOG.error("IntInterval array length spill."); + throw new FilterInvokeException("DoubleInterval array length spill."); } + v[count] = value; + flag[count] = f; + count++; + } - public String toString() { - StringBuffer ans = new StringBuffer(); - for (int i = 0; i < count; i += 2) { - if (flag[i]) - ans.append("[" + v[i] + ","); - else - ans.append("(" + v[i] + ","); - if (flag[i + 1]) - ans.append(v[i + 1] + "]"); - else - ans.append(v[i + 1] + ")"); - } - return ans.toString(); + public String toString() { + StringBuffer ans = new StringBuffer(); + for (int i = 0; i < count; i += 2) { + if (flag[i]) + ans.append("[" + v[i] + ","); + else + ans.append("(" + v[i] + ","); + if (flag[i + 1]) + ans.append(v[i + 1] + "]"); + else + ans.append(v[i + 1] + ")"); } + return ans.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FilterUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FilterUtils.java index 049987e0..e8c0eec4 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FilterUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FilterUtils.java @@ -10,294 +10,313 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSOr; import cn.edu.tsinghua.tsfile.timeseries.read.RecordReader; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.*; - import java.io.IOException; import java.util.ArrayList; import java.util.List; public class FilterUtils { - private static final char PATH_SPLITER = '.'; - - //exp-format:deltaObject,measurement,type,exp - public static SingleSeriesFilterExpression construct(String exp, RecordReader recordReader) throws IOException{ - if (exp == null || exp.equals("null")) { - return null; - } - String args[] = exp.split(","); - if (args[0].equals("0") || args[0].equals("1")) { - return construct("null", "null", args[0], args[1], recordReader); - } - String s = args[1]; - String deltaObject = s.substring(0, s.lastIndexOf(PATH_SPLITER)); - String measurement = s.substring(s.lastIndexOf(PATH_SPLITER) + 1); - return construct(deltaObject, measurement, args[0], args[2], recordReader); + private static final char PATH_SPLITER = '.'; + // exp-format:deltaObject,measurement,type,exp + public static SingleSeriesFilterExpression construct(String exp, RecordReader recordReader) + throws IOException { + if (exp == null || exp.equals("null")) { + return null; } - - public static SingleSeriesFilterExpression construct(String deltaObject, String measurement, String filterType, - String exp, RecordReader recordReader) throws IOException{ - - if (exp.equals("null")) { - return null; - } - if (exp.charAt(0) != '(') { - boolean ifEq = exp.charAt(1) == '=' ? true : false; - int type = Integer.valueOf(filterType); - int offset = ifEq ? 2 : 1; - if (exp.charAt(0) == '=') { - if (type == 0) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.TIME_FILTER), v); - } else if (type == 1) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.FREQUENCY_FILTER), v); - } else { - if (recordReader == null) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } - FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); - if (col instanceof IntFilterSeries) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } else if (col instanceof BooleanFilterSeries) { - boolean v = Boolean.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.booleanFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } else if (col instanceof LongFilterSeries) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } else if (col instanceof FloatFilterSeries) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } else if (col instanceof DoubleFilterSeries) { - double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.doubleFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v); - } else if (col instanceof StringFilterSeries) { - String v = String.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.eq(FilterFactory.stringFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), new Binary(v)); - } else { - throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); - } - - } - } else if (exp.charAt(0) == '>') { - if (type == 0) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.TIME_FILTER), v, ifEq); - } else if (type == 1) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.FREQUENCY_FILTER), v, ifEq); - } else { - if (recordReader == null) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } - FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); - if (col instanceof IntFilterSeries) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof LongFilterSeries) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof FloatFilterSeries) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof DoubleFilterSeries) { - double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof StringFilterSeries) { - String v = String.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.gtEq(FilterFactory.stringFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), new Binary(v), ifEq); - } else { - throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); - } - - } - } else if (exp.charAt(0) == '<') { - if (type == 0) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.TIME_FILTER), v, ifEq); - } else if (type == 1) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.FREQUENCY_FILTER), v, ifEq); - } else { - //default filter - if (recordReader == null) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } - FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); - if (col instanceof IntFilterSeries) { - int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof LongFilterSeries) { - long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof FloatFilterSeries) { - float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof DoubleFilterSeries) { - double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), v, ifEq); - } else if (col instanceof StringFilterSeries) { - String v = String.valueOf(exp.substring(offset, exp.length()).trim()); - return FilterFactory.ltEq(FilterFactory.stringFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER), new Binary(v), ifEq); - } else { - throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); - } - } - // long v = Long.valueOf(exp.substring(offset,exp.length()).trim()); - // return FilterFactory.ltEq(FilterFactory.longColumn(deltaObject, measurement, ifTime), v, ifEq); - } - return null; - } - - - List operators = new ArrayList(); - List filters = new ArrayList<>(); - - int idx = 0; - int numbracket = 0; - boolean ltgtFlag = false; - boolean operFlag = false; - - String texp = ""; - - for (; idx < exp.length(); idx++) { - char c = exp.charAt(idx); - if (Character.isWhitespace(c) || c == '\0') { - continue; - } - if (c == '(') { - numbracket++; - } - if (c == ')') { - numbracket--; - } - if (c == '>' || c == '<') { - ltgtFlag = true; - } - if (numbracket == 0 && (c == '|' || c == '&')) { - operFlag = true; - } - - if (ltgtFlag && numbracket == 0 && operFlag) { - SingleSeriesFilterExpression filter = construct(deltaObject, measurement, filterType, - texp.substring(1, texp.length() - 1), recordReader); - filters.add(filter); - operators.add(c); - numbracket = 0; - ltgtFlag = false; - operFlag = false; - texp = ""; - } else { - texp += c; - } - } - if (!texp.equals("")) { - filters.add(construct(deltaObject, measurement, filterType, texp.substring(1, texp.length() - 1), recordReader)); - } - - if (filters.size() - operators.size() != 1) { - return null; - } - - SingleSeriesFilterExpression filter = filters.get(0); - for (int i = 0; i < operators.size(); i++) { - if (operators.get(i) == '|') { - filter = (SingleSeriesFilterExpression) FilterFactory.or(filter, filters.get(i + 1)); - } else if (operators.get(i) == '&') { - filter = (SingleSeriesFilterExpression) FilterFactory.and(filter, filters.get(i + 1)); - } - } - - return filter; + String args[] = exp.split(","); + if (args[0].equals("0") || args[0].equals("1")) { + return construct("null", "null", args[0], args[1], recordReader); } + String s = args[1]; + String deltaObject = s.substring(0, s.lastIndexOf(PATH_SPLITER)); + String measurement = s.substring(s.lastIndexOf(PATH_SPLITER) + 1); + return construct(deltaObject, measurement, args[0], args[2], recordReader); - public static FilterExpression constructCrossFilter(String exp, RecordReader recordReader) throws IOException { - exp = exp.trim(); + } - if (exp.equals("null")) { - return null; - } + public static SingleSeriesFilterExpression construct(String deltaObject, String measurement, + String filterType, String exp, RecordReader recordReader) throws IOException { - if (exp.charAt(0) != '[') { - return construct(exp, recordReader); - } + if (exp.equals("null")) { + return null; + } + if (exp.charAt(0) != '(') { + boolean ifEq = exp.charAt(1) == '=' ? true : false; + int type = Integer.valueOf(filterType); + int offset = ifEq ? 2 : 1; + if (exp.charAt(0) == '=') { + if (type == 0) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.TIME_FILTER), v); + } else if (type == 1) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.FREQUENCY_FILTER), v); + } else { + if (recordReader == null) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } + FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); + if (col instanceof IntFilterSeries) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } else if (col instanceof BooleanFilterSeries) { + boolean v = Boolean.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.booleanFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } else if (col instanceof LongFilterSeries) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } else if (col instanceof FloatFilterSeries) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } else if (col instanceof DoubleFilterSeries) { + double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.doubleFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v); + } else if (col instanceof StringFilterSeries) { + String v = String.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.eq(FilterFactory.stringFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), new Binary(v)); + } else { + throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); + } - int numbraket = 0; - boolean operator = false; - ArrayList filters = new ArrayList<>(); - ArrayList operators = new ArrayList<>(); - String texp = ""; - - for (int i = 0; i < exp.length(); i++) { - char c = exp.charAt(i); - - if (Character.isWhitespace(c) || c == '\0') { - continue; - } - - if (c == '[') { - numbraket++; - } - if (c == ']') { - numbraket--; - } - if (numbraket == 0 && (c == '|' || c == '&')) { - operator = true; - } - - if (numbraket == 0 && operator) { -// System.out.println(texp); -// System.out.println(texp.length()); - FilterExpression filter = constructCrossFilter(texp.substring(1, texp.length() - 1), recordReader); - filters.add(filter); - operators.add(c); - - numbraket = 0; - operator = false; - texp = ""; - } else { - texp += c; - } - } - if (!texp.equals("")) { - filters.add(constructCrossFilter(texp.substring(1, texp.length() - 1), recordReader)); } + } else if (exp.charAt(0) == '>') { + if (type == 0) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.TIME_FILTER), v, ifEq); + } else if (type == 1) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.FREQUENCY_FILTER), v, ifEq); + } else { + if (recordReader == null) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } + FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); + if (col instanceof IntFilterSeries) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof LongFilterSeries) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof FloatFilterSeries) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof DoubleFilterSeries) { + double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof StringFilterSeries) { + String v = String.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.gtEq(FilterFactory.stringFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), new Binary(v), ifEq); + } else { + throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); + } - if (operators.size() == 0) { - //Warning TODO - return new CSAnd(filters.get(0), filters.get(0)); } - - CrossSeriesFilterExpression csf; - if (operators.get(0) == '|') { - csf = new CSOr(filters.get(0), filters.get(1)); + } else if (exp.charAt(0) == '<') { + if (type == 0) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.TIME_FILTER), v, ifEq); + } else if (type == 1) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.FREQUENCY_FILTER), v, ifEq); } else { - csf = new CSAnd(filters.get(0), filters.get(1)); + // default filter + if (recordReader == null) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } + FilterSeries col = recordReader.getColumnByMeasurementName(deltaObject, measurement); + if (col instanceof IntFilterSeries) { + int v = Integer.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof LongFilterSeries) { + long v = Long.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof FloatFilterSeries) { + float v = Float.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof DoubleFilterSeries) { + double v = Double.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), v, ifEq); + } else if (col instanceof StringFilterSeries) { + String v = String.valueOf(exp.substring(offset, exp.length()).trim()); + return FilterFactory.ltEq(FilterFactory.stringFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER), new Binary(v), ifEq); + } else { + throw new UnSupportedDataTypeException("Construct FilterSeries: " + col); + } } + // long v = Long.valueOf(exp.substring(offset,exp.length()).trim()); + // return FilterFactory.ltEq(FilterFactory.longColumn(deltaObject, measurement, ifTime), v, + // ifEq); + } + return null; + } - for (int i = 2; i < filters.size(); i++) { - if (operators.get(i - 1) == '|') { - csf = new CSOr(csf, filters.get(i)); - } else { - csf = new CSAnd(csf, filters.get(i)); - } - } - return csf; + + List operators = new ArrayList(); + List filters = new ArrayList<>(); + + int idx = 0; + int numbracket = 0; + boolean ltgtFlag = false; + boolean operFlag = false; + + String texp = ""; + + for (; idx < exp.length(); idx++) { + char c = exp.charAt(idx); + if (Character.isWhitespace(c) || c == '\0') { + continue; + } + if (c == '(') { + numbracket++; + } + if (c == ')') { + numbracket--; + } + if (c == '>' || c == '<') { + ltgtFlag = true; + } + if (numbracket == 0 && (c == '|' || c == '&')) { + operFlag = true; + } + + if (ltgtFlag && numbracket == 0 && operFlag) { + SingleSeriesFilterExpression filter = construct(deltaObject, measurement, filterType, + texp.substring(1, texp.length() - 1), recordReader); + filters.add(filter); + operators.add(c); + numbracket = 0; + ltgtFlag = false; + operFlag = false; + texp = ""; + } else { + texp += c; + } + } + if (!texp.equals("")) { + filters.add(construct(deltaObject, measurement, filterType, + texp.substring(1, texp.length() - 1), recordReader)); } -} + if (filters.size() - operators.size() != 1) { + return null; + } + SingleSeriesFilterExpression filter = filters.get(0); + for (int i = 0; i < operators.size(); i++) { + if (operators.get(i) == '|') { + filter = (SingleSeriesFilterExpression) FilterFactory.or(filter, filters.get(i + 1)); + } else if (operators.get(i) == '&') { + filter = (SingleSeriesFilterExpression) FilterFactory.and(filter, filters.get(i + 1)); + } + } + return filter; + } + public static FilterExpression constructCrossFilter(String exp, RecordReader recordReader) + throws IOException { + exp = exp.trim(); + if (exp.equals("null")) { + return null; + } + if (exp.charAt(0) != '[') { + return construct(exp, recordReader); + } + int numbraket = 0; + boolean operator = false; + ArrayList filters = new ArrayList<>(); + ArrayList operators = new ArrayList<>(); + String texp = ""; + + for (int i = 0; i < exp.length(); i++) { + char c = exp.charAt(i); + + if (Character.isWhitespace(c) || c == '\0') { + continue; + } + + if (c == '[') { + numbraket++; + } + if (c == ']') { + numbraket--; + } + if (numbraket == 0 && (c == '|' || c == '&')) { + operator = true; + } + + if (numbraket == 0 && operator) { + // System.out.println(texp); + // System.out.println(texp.length()); + FilterExpression filter = + constructCrossFilter(texp.substring(1, texp.length() - 1), recordReader); + filters.add(filter); + operators.add(c); + + numbraket = 0; + operator = false; + texp = ""; + } else { + texp += c; + } + } + if (!texp.equals("")) { + filters.add(constructCrossFilter(texp.substring(1, texp.length() - 1), recordReader)); + } + if (operators.size() == 0) { + // Warning TODO + return new CSAnd(filters.get(0), filters.get(0)); + } + CrossSeriesFilterExpression csf; + if (operators.get(0) == '|') { + csf = new CSOr(filters.get(0), filters.get(1)); + } else { + csf = new CSAnd(filters.get(0), filters.get(1)); + } + for (int i = 2; i < filters.size(); i++) { + if (operators.get(i - 1) == '|') { + csf = new CSOr(csf, filters.get(i)); + } else { + csf = new CSAnd(csf, filters.get(i)); + } + } + return csf; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FloatInterval.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FloatInterval.java index 9811b9c6..182ec352 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FloatInterval.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/FloatInterval.java @@ -10,32 +10,32 @@ * @author CGF */ public class FloatInterval extends Interval { - private static final Logger LOG = LoggerFactory.getLogger(FloatInterval.class); + private static final Logger LOG = LoggerFactory.getLogger(FloatInterval.class); - public float[] v = new float[arrayMaxn]; + public float[] v = new float[arrayMaxn]; - public void addValueFlag(float value, boolean f) { - if (count >= arrayMaxn - 2) { - LOG.error("IntInterval array length spill."); - throw new FilterInvokeException("FloatInterval array length spill."); - } - v[count] = value; - flag[count] = f; - count++; + public void addValueFlag(float value, boolean f) { + if (count >= arrayMaxn - 2) { + LOG.error("IntInterval array length spill."); + throw new FilterInvokeException("FloatInterval array length spill."); } + v[count] = value; + flag[count] = f; + count++; + } - public String toString() { - StringBuffer ans = new StringBuffer(); - for (int i = 0; i < count; i += 2) { - if (flag[i]) - ans.append("[" + v[i] + ","); - else - ans.append("(" + v[i] + ","); - if (flag[i + 1]) - ans.append(v[i + 1] + "]"); - else - ans.append(v[i + 1] + ")"); - } - return ans.toString(); + public String toString() { + StringBuffer ans = new StringBuffer(); + for (int i = 0; i < count; i += 2) { + if (flag[i]) + ans.append("[" + v[i] + ","); + else + ans.append("(" + v[i] + ","); + if (flag[i + 1]) + ans.append(v[i + 1] + "]"); + else + ans.append(v[i + 1] + ")"); } + return ans.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/IntInterval.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/IntInterval.java index 922595e5..7e0c2e06 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/IntInterval.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/IntInterval.java @@ -10,34 +10,34 @@ * @author CGF */ public class IntInterval extends Interval { - private static final Logger LOG = LoggerFactory.getLogger(IntInterval.class); + private static final Logger LOG = LoggerFactory.getLogger(IntInterval.class); - // int value array - public int[] v = new int[arrayMaxn]; + // int value array + public int[] v = new int[arrayMaxn]; - public void addValueFlag(int value, boolean f) { - if (count >= arrayMaxn - 2) { - LOG.error("IntInterval array length spill."); - throw new FilterInvokeException("IntInterval array length spill."); - } - v[count] = value; - flag[count] = f; - count++; + public void addValueFlag(int value, boolean f) { + if (count >= arrayMaxn - 2) { + LOG.error("IntInterval array length spill."); + throw new FilterInvokeException("IntInterval array length spill."); } + v[count] = value; + flag[count] = f; + count++; + } - public String toString() { - StringBuffer ans = new StringBuffer(); - for (int i = 0; i < count; i += 2) { - if (flag[i]) - ans.append("[" + v[i] + ","); - else - ans.append("(" + v[i] + ","); - if (flag[i + 1]) - ans.append(v[i + 1] + "]"); - else - ans.append(v[i + 1] + ")"); - } - return ans.toString(); + public String toString() { + StringBuffer ans = new StringBuffer(); + for (int i = 0; i < count; i += 2) { + if (flag[i]) + ans.append("[" + v[i] + ","); + else + ans.append("(" + v[i] + ","); + if (flag[i + 1]) + ans.append(v[i + 1] + "]"); + else + ans.append(v[i + 1] + ")"); } + return ans.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/Interval.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/Interval.java index f5cf4779..4be08d86 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/Interval.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/Interval.java @@ -11,16 +11,16 @@ * @author CGF */ public abstract class Interval { - // value array max num - protected static final int arrayMaxn = 100; - // visit array to judge whether pos[i] and pos[i+1] could be reached - // flag[i]=true represents that v[i] could be reached - // flag[i]=false represents that v[i] could not be reached - public boolean[] flag = new boolean[arrayMaxn]; - // to identify the last position of array - public int count = 0; + // value array max num + protected static final int arrayMaxn = 100; + // visit array to judge whether pos[i] and pos[i+1] could be reached + // flag[i]=true represents that v[i] could be reached + // flag[i]=false represents that v[i] could not be reached + public boolean[] flag = new boolean[arrayMaxn]; + // to identify the last position of array + public int count = 0; - public Interval() { - this.count = 0; - } + public Interval() { + this.count = 0; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/LongInterval.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/LongInterval.java index 0f33d0e7..59de4b49 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/LongInterval.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/LongInterval.java @@ -10,33 +10,33 @@ * @author CGF */ public class LongInterval extends Interval { - private static final Logger LOG = LoggerFactory.getLogger(LongInterval.class); + private static final Logger LOG = LoggerFactory.getLogger(LongInterval.class); - // long value array - public long[] v = new long[arrayMaxn]; + // long value array + public long[] v = new long[arrayMaxn]; - public void addValueFlag(long value, boolean f) { - if (count >= arrayMaxn - 2) { - LOG.error("IntInterval array length spill."); - throw new FilterInvokeException("LongInterval array length spill."); - } - v[count] = value; - flag[count] = f; - count++; + public void addValueFlag(long value, boolean f) { + if (count >= arrayMaxn - 2) { + LOG.error("IntInterval array length spill."); + throw new FilterInvokeException("LongInterval array length spill."); } + v[count] = value; + flag[count] = f; + count++; + } - public String toString() { - StringBuffer ans = new StringBuffer(); - for (int i = 0; i < count; i += 2) { - if (flag[i]) - ans.append("[" + v[i] + ","); - else - ans.append("(" + v[i] + ","); - if (flag[i + 1]) - ans.append(v[i + 1] + "]"); - else - ans.append(v[i + 1] + ")"); - } - return ans.toString(); + public String toString() { + StringBuffer ans = new StringBuffer(); + for (int i = 0; i < count; i += 2) { + if (flag[i]) + ans.append("[" + v[i] + ","); + else + ans.append("(" + v[i] + ","); + if (flag[i + 1]) + ans.append(v[i + 1] + "]"); + else + ans.append(v[i + 1] + ")"); } + return ans.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/StrDigestForFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/StrDigestForFilter.java index 1eedb08d..79bf9868 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/StrDigestForFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/utils/StrDigestForFilter.java @@ -4,91 +4,92 @@ import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; - import java.nio.ByteBuffer; /** - * @author JT - * class to construct string digest. + * @author JT class to construct string digest. */ -public class StrDigestForFilter extends DigestForFilter{ - private String min = null; - private String max = null; - private TSDataType type; +public class StrDigestForFilter extends DigestForFilter { + private String min = null; + private String max = null; + private TSDataType type; - public StrDigestForFilter(String min, String max, TSDataType type) { - super(); - this.min = min; - this.max = max; - this.type = type; - } + public StrDigestForFilter(String min, String max, TSDataType type) { + super(); + this.min = min; + this.max = max; + this.type = type; + } - public StrDigestForFilter(long minv, long maxv) { - this.min = String.valueOf(minv); - this.max = String.valueOf(maxv); - this.type = TSDataType.INT64; - } + public StrDigestForFilter(long minv, long maxv) { + this.min = String.valueOf(minv); + this.max = String.valueOf(maxv); + this.type = TSDataType.INT64; + } - @SuppressWarnings("unchecked") - public > T getMinValue() { - switch (type) { - case INT32: - return (T) ((Integer) Integer.parseInt(min)); - case INT64: - return (T) ((Long) Long.parseLong(min)); - case FLOAT: - return (T) ((Float) Float.parseFloat(min)); - case DOUBLE: - return (T) ((Double) Double.parseDouble(min)); - case TEXT: - return (T) new Binary(min); - case BOOLEAN: - return (T) ((Boolean) Boolean.parseBoolean(min)); - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + @SuppressWarnings("unchecked") + public > T getMinValue() { + switch (type) { + case INT32: + return (T) ((Integer) Integer.parseInt(min)); + case INT64: + return (T) ((Long) Long.parseLong(min)); + case FLOAT: + return (T) ((Float) Float.parseFloat(min)); + case DOUBLE: + return (T) ((Double) Double.parseDouble(min)); + case TEXT: + return (T) new Binary(min); + case BOOLEAN: + return (T) ((Boolean) Boolean.parseBoolean(min)); + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - @SuppressWarnings("unchecked") - public > T getMaxValue() { - switch (type) { - case INT32: - return (T) ((Integer) Integer.parseInt(max)); - case INT64: - return (T) ((Long) Long.parseLong(max)); - case FLOAT: - return (T) ((Float) Float.parseFloat(max)); - case DOUBLE: - return (T) ((Double) Double.parseDouble(max)); - case TEXT: - return (T) new Binary(max); - case BOOLEAN: - return (T) ((Boolean) Boolean.parseBoolean(max)); - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + @SuppressWarnings("unchecked") + public > T getMaxValue() { + switch (type) { + case INT32: + return (T) ((Integer) Integer.parseInt(max)); + case INT64: + return (T) ((Long) Long.parseLong(max)); + case FLOAT: + return (T) ((Float) Float.parseFloat(max)); + case DOUBLE: + return (T) ((Double) Double.parseDouble(max)); + case TEXT: + return (T) new Binary(max); + case BOOLEAN: + return (T) ((Boolean) Boolean.parseBoolean(max)); + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - public Class getTypeClass() { - switch (type) { - case INT32: - return Integer.class; - case INT64: - return Long.class; - case FLOAT: - return Float.class; - case DOUBLE: - return Double.class; - case TEXT: - return String.class; - case BOOLEAN: - return Boolean.class; - default: - throw new UnSupportFilterDataTypeException("DigestForFilter unsupported datatype : " + type.toString()); - } + public Class getTypeClass() { + switch (type) { + case INT32: + return Integer.class; + case INT64: + return Long.class; + case FLOAT: + return Float.class; + case DOUBLE: + return Double.class; + case TEXT: + return String.class; + case BOOLEAN: + return Boolean.class; + default: + throw new UnSupportFilterDataTypeException( + "DigestForFilter unsupported datatype : " + type.toString()); } + } - public TSDataType getType() { - return type; - } + public TSDataType getType() { + return type; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/DoubleFilterVerifier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/DoubleFilterVerifier.java index 7c6fc819..9f1869f2 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/DoubleFilterVerifier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/DoubleFilterVerifier.java @@ -1,300 +1,301 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DoubleInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; - -/** - * @author CGF - */ -public class DoubleFilterVerifier extends FilterVerifier implements FilterVisitor { - private Double DOUBLE_MIN_VALUE = -Double.MAX_VALUE; - - private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); - - @Override - public Interval getInterval(SingleSeriesFilterExpression filter) { - if (filter == null) { - DoubleInterval ans = new DoubleInterval(); - ans.addValueFlag(DOUBLE_MIN_VALUE, true); - ans.addValueFlag(Double.MAX_VALUE, true); - return ans; - } - - return filter.accept(this); - } - - @Override - public > DoubleInterval visit(Eq eq) { - DoubleInterval ans = new DoubleInterval(); - ans.v[0] = ((Double) eq.getValue()).doubleValue(); - ans.v[1] = ((Double) eq.getValue()).doubleValue(); - ans.flag[0] = true; - ans.flag[1] = true; - ans.count = 2; - return ans; - } - - @Override - public > DoubleInterval visit(NotEq notEq) { - DoubleInterval ans = new DoubleInterval(); - ans.v[0] = DOUBLE_MIN_VALUE; - ans.v[1] = ((Double) notEq.getValue()).doubleValue(); - ans.v[2] = ((Double) notEq.getValue()).doubleValue(); - ans.v[3] = Double.MAX_VALUE; - - if ((Double) notEq.getValue() == DOUBLE_MIN_VALUE) { - ans.flag[0] = false; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } else if ((Double) notEq.getValue() == Double.MAX_VALUE) { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = false; - } else { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } - - ans.count = 4; - return ans; - } - - @Override - public > DoubleInterval visit(LtEq ltEq) { - DoubleInterval ans = new DoubleInterval(); - if (ltEq.ifEq) { - ans.v[1] = ((Double) ltEq.getValue()).doubleValue(); - ans.flag[1] = true; - } else { - ans.v[1] = ((Double) ltEq.getValue()).doubleValue(); - ans.flag[1] = false; - } - - if (ans.v[1] == DOUBLE_MIN_VALUE && !ans.flag[1]) - ans.flag[0] = false; - else - ans.flag[0] = true; - ans.v[0] = DOUBLE_MIN_VALUE; - - ans.count = 2; - return ans; - } - - @Override - public > DoubleInterval visit(GtEq gtEq) { - DoubleInterval ans = new DoubleInterval(); - if (gtEq.ifEq) { - ans.v[0] = ((Double) gtEq.getValue()).doubleValue(); - ans.flag[0] = true; - } else { - ans.v[0] = ((Double) gtEq.getValue()).doubleValue(); - ans.flag[0] = false; - } - - ans.v[1] = Double.MAX_VALUE; - if (ans.v[0] == Double.MAX_VALUE && !ans.flag[0]) - ans.flag[1] = false; - else - ans.flag[1] = true; - - ans.count = 2; - return ans; - } - - @Override - public DoubleInterval visit(Not not) { - return visit(convertor.convert(not)); - } - - public DoubleInterval visit(FilterExpression filter) { - if (filter instanceof Eq) - return visit((Eq) filter); - else if (filter instanceof NotEq) - return visit((NotEq) filter); - else if (filter instanceof LtEq) - return visit((LtEq) filter); - else if (filter instanceof GtEq) - return visit((GtEq) filter); - else if (filter instanceof And) - return visit((And) filter); - else if (filter instanceof Or) - return visit((Or) filter); - return null; - } - - @Override - public DoubleInterval visit(And and) { - return intersection(visit(and.getLeft()), visit(and.getRight())); - } - - @Override - public DoubleInterval visit(Or or) { - return union(visit(or.getLeft()), visit(or.getRight())); - } - - @Override - public DoubleInterval visit(NoFilter noFilter) { - DoubleInterval ans = new DoubleInterval(); - ans.v[0] = DOUBLE_MIN_VALUE; - ans.flag[0] = true; - ans.v[1] = Double.MAX_VALUE; - ans.flag[1] = true; - return ans; - } - - private DoubleInterval intersection(DoubleInterval left, DoubleInterval right) { - DoubleInterval ans = new DoubleInterval(); - DoubleInterval partResult = new DoubleInterval(); - - for (int i = 0; i < left.count; i += 2) { - for (int j = 0; j < right.count; j += 2) { - if (left.v[i + 1] <= right.v[j]) { - if (left.v[i + 1] == right.v[j] && left.flag[i + 1] - && right.flag[j]) { - partResult.addValueFlag(left.v[i + 1], true); - partResult.addValueFlag(left.v[i + 1], true); - } else { - break; - } - } else if (left.v[i] >= right.v[j + 1]) { - if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { - partResult.addValueFlag(left.v[i], true); - partResult.addValueFlag(left.v[i], true); - } - } else { - if (left.v[i] > right.v[j]) { - partResult.addValueFlag(left.v[i], left.flag[i]); - } else { - partResult.addValueFlag(right.v[j], right.flag[j]); - } - if (left.v[i + 1] > right.v[j + 1]) { - partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); - } else { - partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - } - } - - for (int cnt = 0; cnt < partResult.count; cnt++) { - ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); - } - partResult.count = 0; - } - - return ans; - } - - private DoubleInterval union(DoubleInterval left, DoubleInterval right) { - int l = 0, r = 0; - DoubleInterval res = new DoubleInterval(); - while (l < left.count || r < right.count) { - if (l >= left.count) { // only right has unmerged data, all right data should be added to ans - for (int i = r; i < right.count; i += 2) { - res.addValueFlag(right.v[i], right.flag[i]); - res.addValueFlag(right.v[i + 1], right.flag[i + 1]); - } - break; - } - if (r >= right.count) { // only left has unmerged data, all left data should be added to ans - for (int i = l; i < left.count; i += 2) { - res.addValueFlag(left.v[i], left.flag[i]); - res.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - break; - } - - if (left.v[l] >= right.v[r + 1]) { // right first - res.addValueFlag(right.v[r], right.flag[r]); - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - r += 2; - } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] && left.v[l + 1] >= right.v[r + 1]) { // right first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(right.v[r], right.flag[r]); - } - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right - res.addValueFlag(left.v[l], left.flag[l]); - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] && left.v[l + 1] <= right.v[r + 1]) { // left first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(left.v[l], left.flag[l]); - } - // left covers right contains (left.v[l+1]==right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - if (left.v[l + 1] == right.v[r]) { - right.v[r] = left.v[l + 1]; - right.flag[r] = left.flag[l + 1] | right.flag[r]; - l += 2; - } else { - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } else if (left.v[l + 1] <= right.v[r]) { // left first - res.addValueFlag(left.v[l], left.flag[l]); - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - l += 2; - } else { // right covers left - res.addValueFlag(right.v[r], right.flag[r]); - // right first cross contains (left.v[l+1] == right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } - // merge same value into one - DoubleInterval ans = new DoubleInterval(); - if (res.count == 0) - return res; - ans.addValueFlag(res.v[0], res.flag[0]); - ans.addValueFlag(res.v[1], res.flag[1]); - for (int i = 2; i < res.count; i += 2) { - if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { - if (res.v[i + 1] == ans.v[ans.count - 1]) { - ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; - } else { - ans.v[ans.count - 1] = res.v[i + 1]; - ans.flag[ans.count - 1] = res.flag[i + 1]; - } - } else { - ans.addValueFlag(res.v[i], res.flag[i]); - ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); - } - } - return ans; - } -} - - +package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DoubleInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; + +/** + * @author CGF + */ +public class DoubleFilterVerifier extends FilterVerifier implements FilterVisitor { + private Double DOUBLE_MIN_VALUE = -Double.MAX_VALUE; + + private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); + + @Override + public Interval getInterval(SingleSeriesFilterExpression filter) { + if (filter == null) { + DoubleInterval ans = new DoubleInterval(); + ans.addValueFlag(DOUBLE_MIN_VALUE, true); + ans.addValueFlag(Double.MAX_VALUE, true); + return ans; + } + + return filter.accept(this); + } + + @Override + public > DoubleInterval visit(Eq eq) { + DoubleInterval ans = new DoubleInterval(); + ans.v[0] = ((Double) eq.getValue()).doubleValue(); + ans.v[1] = ((Double) eq.getValue()).doubleValue(); + ans.flag[0] = true; + ans.flag[1] = true; + ans.count = 2; + return ans; + } + + @Override + public > DoubleInterval visit(NotEq notEq) { + DoubleInterval ans = new DoubleInterval(); + ans.v[0] = DOUBLE_MIN_VALUE; + ans.v[1] = ((Double) notEq.getValue()).doubleValue(); + ans.v[2] = ((Double) notEq.getValue()).doubleValue(); + ans.v[3] = Double.MAX_VALUE; + + if ((Double) notEq.getValue() == DOUBLE_MIN_VALUE) { + ans.flag[0] = false; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } else if ((Double) notEq.getValue() == Double.MAX_VALUE) { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = false; + } else { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } + + ans.count = 4; + return ans; + } + + @Override + public > DoubleInterval visit(LtEq ltEq) { + DoubleInterval ans = new DoubleInterval(); + if (ltEq.ifEq) { + ans.v[1] = ((Double) ltEq.getValue()).doubleValue(); + ans.flag[1] = true; + } else { + ans.v[1] = ((Double) ltEq.getValue()).doubleValue(); + ans.flag[1] = false; + } + + if (ans.v[1] == DOUBLE_MIN_VALUE && !ans.flag[1]) + ans.flag[0] = false; + else + ans.flag[0] = true; + ans.v[0] = DOUBLE_MIN_VALUE; + + ans.count = 2; + return ans; + } + + @Override + public > DoubleInterval visit(GtEq gtEq) { + DoubleInterval ans = new DoubleInterval(); + if (gtEq.ifEq) { + ans.v[0] = ((Double) gtEq.getValue()).doubleValue(); + ans.flag[0] = true; + } else { + ans.v[0] = ((Double) gtEq.getValue()).doubleValue(); + ans.flag[0] = false; + } + + ans.v[1] = Double.MAX_VALUE; + if (ans.v[0] == Double.MAX_VALUE && !ans.flag[0]) + ans.flag[1] = false; + else + ans.flag[1] = true; + + ans.count = 2; + return ans; + } + + @Override + public DoubleInterval visit(Not not) { + return visit(convertor.convert(not)); + } + + public DoubleInterval visit(FilterExpression filter) { + if (filter instanceof Eq) + return visit((Eq) filter); + else if (filter instanceof NotEq) + return visit((NotEq) filter); + else if (filter instanceof LtEq) + return visit((LtEq) filter); + else if (filter instanceof GtEq) + return visit((GtEq) filter); + else if (filter instanceof And) + return visit((And) filter); + else if (filter instanceof Or) + return visit((Or) filter); + return null; + } + + @Override + public DoubleInterval visit(And and) { + return intersection(visit(and.getLeft()), visit(and.getRight())); + } + + @Override + public DoubleInterval visit(Or or) { + return union(visit(or.getLeft()), visit(or.getRight())); + } + + @Override + public DoubleInterval visit(NoFilter noFilter) { + DoubleInterval ans = new DoubleInterval(); + ans.v[0] = DOUBLE_MIN_VALUE; + ans.flag[0] = true; + ans.v[1] = Double.MAX_VALUE; + ans.flag[1] = true; + return ans; + } + + private DoubleInterval intersection(DoubleInterval left, DoubleInterval right) { + DoubleInterval ans = new DoubleInterval(); + DoubleInterval partResult = new DoubleInterval(); + + for (int i = 0; i < left.count; i += 2) { + for (int j = 0; j < right.count; j += 2) { + if (left.v[i + 1] <= right.v[j]) { + if (left.v[i + 1] == right.v[j] && left.flag[i + 1] && right.flag[j]) { + partResult.addValueFlag(left.v[i + 1], true); + partResult.addValueFlag(left.v[i + 1], true); + } else { + break; + } + } else if (left.v[i] >= right.v[j + 1]) { + if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { + partResult.addValueFlag(left.v[i], true); + partResult.addValueFlag(left.v[i], true); + } + } else { + if (left.v[i] > right.v[j]) { + partResult.addValueFlag(left.v[i], left.flag[i]); + } else { + partResult.addValueFlag(right.v[j], right.flag[j]); + } + if (left.v[i + 1] > right.v[j + 1]) { + partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); + } else { + partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + } + } + + for (int cnt = 0; cnt < partResult.count; cnt++) { + ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); + } + partResult.count = 0; + } + + return ans; + } + + private DoubleInterval union(DoubleInterval left, DoubleInterval right) { + int l = 0, r = 0; + DoubleInterval res = new DoubleInterval(); + while (l < left.count || r < right.count) { + if (l >= left.count) { // only right has unmerged data, all right data should be added to ans + for (int i = r; i < right.count; i += 2) { + res.addValueFlag(right.v[i], right.flag[i]); + res.addValueFlag(right.v[i + 1], right.flag[i + 1]); + } + break; + } + if (r >= right.count) { // only left has unmerged data, all left data should be added to ans + for (int i = l; i < left.count; i += 2) { + res.addValueFlag(left.v[i], left.flag[i]); + res.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + break; + } + + if (left.v[l] >= right.v[r + 1]) { // right first + res.addValueFlag(right.v[r], right.flag[r]); + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + r += 2; + } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] + && left.v[l + 1] >= right.v[r + 1]) { // right first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(right.v[r], right.flag[r]); + } + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right + res.addValueFlag(left.v[l], left.flag[l]); + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] + && left.v[l + 1] <= right.v[r + 1]) { // left first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(left.v[l], left.flag[l]); + } + // left covers right contains (left.v[l+1]==right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + if (left.v[l + 1] == right.v[r]) { + right.v[r] = left.v[l + 1]; + right.flag[r] = left.flag[l + 1] | right.flag[r]; + l += 2; + } else { + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } else if (left.v[l + 1] <= right.v[r]) { // left first + res.addValueFlag(left.v[l], left.flag[l]); + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + l += 2; + } else { // right covers left + res.addValueFlag(right.v[r], right.flag[r]); + // right first cross contains (left.v[l+1] == right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } + // merge same value into one + DoubleInterval ans = new DoubleInterval(); + if (res.count == 0) + return res; + ans.addValueFlag(res.v[0], res.flag[0]); + ans.addValueFlag(res.v[1], res.flag[1]); + for (int i = 2; i < res.count; i += 2) { + if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { + if (res.v[i + 1] == ans.v[ans.count - 1]) { + ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; + } else { + ans.v[ans.count - 1] = res.v[i + 1]; + ans.flag[ans.count - 1] = res.flag[i + 1]; + } + } else { + ans.addValueFlag(res.v[i], res.flag[i]); + ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); + } + } + return ans; + } +} + + diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FilterVerifier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FilterVerifier.java index 796c02b8..c60ca39c 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FilterVerifier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FilterVerifier.java @@ -1,37 +1,37 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; - -import cn.edu.tsinghua.tsfile.common.exception.filter.UnSupportFilterDataTypeException; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * optimizing of filter, transfer SingleSensorFilter to interval comparison - * see {@link Interval} - * - * @author CGF - */ -public abstract class FilterVerifier { - - private static final Logger LOG = LoggerFactory.getLogger(FilterVerifier.class); - - public static FilterVerifier create(TSDataType dataType) { - switch (dataType) { - case INT32: - return new IntFilterVerifier(); - case INT64: - return new LongFilterVerifier(); - case FLOAT: - return new FloatFilterVerifier(); - case DOUBLE: - return new DoubleFilterVerifier(); - default: - LOG.error("wrong filter verifier invoke, FilterVerifier only support INT32,INT64,FLOAT and DOUBLE."); - throw new UnSupportFilterDataTypeException("wrong filter verifier invoke"); - } - } - - public abstract Interval getInterval(SingleSeriesFilterExpression filter); -} +package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; + +import cn.edu.tsinghua.tsfile.common.exception.filter.UnSupportFilterDataTypeException; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * optimizing of filter, transfer SingleSensorFilter to interval comparison see {@link Interval} + * + * @author CGF + */ +public abstract class FilterVerifier { + + private static final Logger LOG = LoggerFactory.getLogger(FilterVerifier.class); + + public static FilterVerifier create(TSDataType dataType) { + switch (dataType) { + case INT32: + return new IntFilterVerifier(); + case INT64: + return new LongFilterVerifier(); + case FLOAT: + return new FloatFilterVerifier(); + case DOUBLE: + return new DoubleFilterVerifier(); + default: + LOG.error( + "wrong filter verifier invoke, FilterVerifier only support INT32,INT64,FLOAT and DOUBLE."); + throw new UnSupportFilterDataTypeException("wrong filter verifier invoke"); + } + } + + public abstract Interval getInterval(SingleSeriesFilterExpression filter); +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FloatFilterVerifier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FloatFilterVerifier.java index 9f117d1c..7c2a809e 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FloatFilterVerifier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/FloatFilterVerifier.java @@ -1,308 +1,309 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.FloatInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; - -/** - * @author CGF - */ -public class FloatFilterVerifier extends FilterVerifier implements FilterVisitor { - private Float FLOAT_MIN_VALUE = -Float.MAX_VALUE; - - private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); - - @Override - public Interval getInterval(SingleSeriesFilterExpression filter) { - if (filter == null) { - FloatInterval ans = new FloatInterval(); - ans.addValueFlag(FLOAT_MIN_VALUE, true); - ans.addValueFlag(Float.MAX_VALUE, true); - return ans; - } - - return filter.accept(this); - } - - @Override - public > FloatInterval visit(Eq eq) { - FloatInterval ans = new FloatInterval(); - ans.v[0] = ((Float) eq.getValue()).floatValue(); - ans.v[1] = ((Float) eq.getValue()).floatValue(); - ans.flag[0] = true; - ans.flag[1] = true; - ans.count = 2; - return ans; - } - - @Override - public > FloatInterval visit(NotEq notEq) { - FloatInterval ans = new FloatInterval(); - ans.v[0] = FLOAT_MIN_VALUE; - ans.v[1] = ((Float) notEq.getValue()).floatValue(); - ans.v[2] = ((Float) notEq.getValue()).floatValue(); - ans.v[3] = Float.MAX_VALUE; - - if ((Float) notEq.getValue() == FLOAT_MIN_VALUE) { - ans.flag[0] = false; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } else if ((Float) notEq.getValue() == Float.MAX_VALUE) { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = false; - } else { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } - - ans.count = 4; - return ans; - } - - @Override - public > FloatInterval visit(LtEq ltEq) { - FloatInterval ans = new FloatInterval(); - if (ltEq.ifEq) { - ans.v[1] = ((Float) ltEq.getValue()).floatValue(); - ans.flag[1] = true; - } else { - ans.v[1] = ((Float) ltEq.getValue()).floatValue(); - ans.flag[1] = false; - } - - if (ans.v[1] == FLOAT_MIN_VALUE && ans.flag[1] == false) - ans.flag[0] = false; - else - ans.flag[0] = true; - ans.v[0] = FLOAT_MIN_VALUE; - - ans.count = 2; - return ans; - } - - @Override - public > FloatInterval visit(GtEq gtEq) { - FloatInterval ans = new FloatInterval(); - if (gtEq.ifEq) { - ans.v[0] = ((Float) gtEq.getValue()).floatValue(); - ans.flag[0] = true; - } else { - ans.v[0] = ((Float) gtEq.getValue()).floatValue(); - ans.flag[0] = false; - } - - ans.v[1] = Float.MAX_VALUE; - if (ans.v[0] == Float.MAX_VALUE && ans.flag[0] == false) - ans.flag[1] = false; - else - ans.flag[1] = true; - - ans.count = 2; - return ans; - } - - @Override - public FloatInterval visit(Not not) { - return visit(convertor.convert(not)); - } - - public FloatInterval visit(FilterExpression filter) { - if (filter instanceof Eq) - return visit((Eq) filter); - else if (filter instanceof NotEq) - return visit((NotEq) filter); - else if (filter instanceof LtEq) - return visit((LtEq) filter); - else if (filter instanceof GtEq) - return visit((GtEq) filter); - else if (filter instanceof And) - return visit((And) filter); - else if (filter instanceof Or) - return visit((Or) filter); - return null; - } - - @Override - public FloatInterval visit(And and) { - return intersection(visit(and.getLeft()), visit(and.getRight())); - } - - @Override - public FloatInterval visit(Or or) { - return union(visit(or.getLeft()), visit(or.getRight())); - } - - @Override - public FloatInterval visit(NoFilter noFilter) { - FloatInterval ans = new FloatInterval(); - ans.v[0] = FLOAT_MIN_VALUE; - ans.flag[0] = true; - ans.v[1] = Float.MAX_VALUE; - ans.flag[1] = true; - return ans; - } - - public FloatInterval intersection(FloatInterval left, FloatInterval right) { - FloatInterval ans = new FloatInterval(); - FloatInterval partResult = new FloatInterval(); - - for (int i = 0; i < left.count; i += 2) { - for (int j = 0; j < right.count; j += 2) { - if (left.v[i + 1] <= right.v[j]) { - if (left.v[i + 1] == right.v[j] && left.flag[i + 1] - && right.flag[j]) { - partResult.addValueFlag(left.v[i + 1], true); - partResult.addValueFlag(left.v[i + 1], true); - } else { - break; - } - } else if (left.v[i] >= right.v[j + 1]) { - if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { - partResult.addValueFlag(left.v[i], true); - partResult.addValueFlag(left.v[i], true); - } else { - continue; - } - } else { - if (left.v[i] > right.v[j]) { - partResult.addValueFlag(left.v[i], left.flag[i]); - } else { - partResult.addValueFlag(right.v[j], right.flag[j]); - } - if (left.v[i + 1] > right.v[j + 1]) { - partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); - } else { - partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - } - } - - for (int cnt = 0; cnt < partResult.count; cnt++) { - ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); - } - partResult.count = 0; - } - return ans; - } - - public FloatInterval union(FloatInterval left, FloatInterval right) { - int l = 0, r = 0; - FloatInterval res = new FloatInterval(); - while (l < left.count || r < right.count) { - if (l >= left.count) { // only right has unmerged data, all right data should be added to ans - for (int i = r; i < right.count; i += 2) { - res.addValueFlag(right.v[i], right.flag[i]); - res.addValueFlag(right.v[i + 1], right.flag[i + 1]); - } - break; - } - if (r >= right.count) { // only left has unmerged data, all left data should be added to ans - for (int i = l; i < left.count; i += 2) { - res.addValueFlag(left.v[i], left.flag[i]); - res.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - break; - } - - if (left.v[l] >= right.v[r + 1]) { // right first - res.addValueFlag(right.v[r], right.flag[r]); - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - r += 2; - continue; - } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] && left.v[l + 1] >= right.v[r + 1]) { // right first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(right.v[r], right.flag[r]); - } - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - continue; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - continue; - } - } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right - res.addValueFlag(left.v[l], left.flag[l]); - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - continue; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - continue; - } - } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] && left.v[l + 1] <= right.v[r + 1]) { // left first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(left.v[l], left.flag[l]); - } - // left covers right contains (left.v[l+1]==right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - if (left.v[l + 1] == right.v[r]) { - right.v[r] = left.v[l + 1]; - right.flag[r] = left.flag[l + 1] | right.flag[r]; - l += 2; - } else { - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } else if (left.v[l + 1] <= right.v[r]) { // left first - res.addValueFlag(left.v[l], left.flag[l]); - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - l += 2; - continue; - } else { // right covers left - res.addValueFlag(right.v[r], right.flag[r]); - // right first cross contains (left.v[l+1] == right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } - // merge same value into one - FloatInterval ans = new FloatInterval(); - if (res.count == 0) - return res; - ans.addValueFlag(res.v[0], res.flag[0]); - ans.addValueFlag(res.v[1], res.flag[1]); - for (int i = 2; i < res.count; i += 2) { - if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { - if (res.v[i + 1] == ans.v[ans.count - 1]) { - ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; - } else { - ans.v[ans.count - 1] = res.v[i + 1]; - ans.flag[ans.count - 1] = res.flag[i + 1]; - } - } else { - ans.addValueFlag(res.v[i], res.flag[i]); - ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); - } - } - return ans; - } - -} - - +package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.FloatInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; + +/** + * @author CGF + */ +public class FloatFilterVerifier extends FilterVerifier implements FilterVisitor { + private Float FLOAT_MIN_VALUE = -Float.MAX_VALUE; + + private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); + + @Override + public Interval getInterval(SingleSeriesFilterExpression filter) { + if (filter == null) { + FloatInterval ans = new FloatInterval(); + ans.addValueFlag(FLOAT_MIN_VALUE, true); + ans.addValueFlag(Float.MAX_VALUE, true); + return ans; + } + + return filter.accept(this); + } + + @Override + public > FloatInterval visit(Eq eq) { + FloatInterval ans = new FloatInterval(); + ans.v[0] = ((Float) eq.getValue()).floatValue(); + ans.v[1] = ((Float) eq.getValue()).floatValue(); + ans.flag[0] = true; + ans.flag[1] = true; + ans.count = 2; + return ans; + } + + @Override + public > FloatInterval visit(NotEq notEq) { + FloatInterval ans = new FloatInterval(); + ans.v[0] = FLOAT_MIN_VALUE; + ans.v[1] = ((Float) notEq.getValue()).floatValue(); + ans.v[2] = ((Float) notEq.getValue()).floatValue(); + ans.v[3] = Float.MAX_VALUE; + + if ((Float) notEq.getValue() == FLOAT_MIN_VALUE) { + ans.flag[0] = false; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } else if ((Float) notEq.getValue() == Float.MAX_VALUE) { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = false; + } else { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } + + ans.count = 4; + return ans; + } + + @Override + public > FloatInterval visit(LtEq ltEq) { + FloatInterval ans = new FloatInterval(); + if (ltEq.ifEq) { + ans.v[1] = ((Float) ltEq.getValue()).floatValue(); + ans.flag[1] = true; + } else { + ans.v[1] = ((Float) ltEq.getValue()).floatValue(); + ans.flag[1] = false; + } + + if (ans.v[1] == FLOAT_MIN_VALUE && ans.flag[1] == false) + ans.flag[0] = false; + else + ans.flag[0] = true; + ans.v[0] = FLOAT_MIN_VALUE; + + ans.count = 2; + return ans; + } + + @Override + public > FloatInterval visit(GtEq gtEq) { + FloatInterval ans = new FloatInterval(); + if (gtEq.ifEq) { + ans.v[0] = ((Float) gtEq.getValue()).floatValue(); + ans.flag[0] = true; + } else { + ans.v[0] = ((Float) gtEq.getValue()).floatValue(); + ans.flag[0] = false; + } + + ans.v[1] = Float.MAX_VALUE; + if (ans.v[0] == Float.MAX_VALUE && ans.flag[0] == false) + ans.flag[1] = false; + else + ans.flag[1] = true; + + ans.count = 2; + return ans; + } + + @Override + public FloatInterval visit(Not not) { + return visit(convertor.convert(not)); + } + + public FloatInterval visit(FilterExpression filter) { + if (filter instanceof Eq) + return visit((Eq) filter); + else if (filter instanceof NotEq) + return visit((NotEq) filter); + else if (filter instanceof LtEq) + return visit((LtEq) filter); + else if (filter instanceof GtEq) + return visit((GtEq) filter); + else if (filter instanceof And) + return visit((And) filter); + else if (filter instanceof Or) + return visit((Or) filter); + return null; + } + + @Override + public FloatInterval visit(And and) { + return intersection(visit(and.getLeft()), visit(and.getRight())); + } + + @Override + public FloatInterval visit(Or or) { + return union(visit(or.getLeft()), visit(or.getRight())); + } + + @Override + public FloatInterval visit(NoFilter noFilter) { + FloatInterval ans = new FloatInterval(); + ans.v[0] = FLOAT_MIN_VALUE; + ans.flag[0] = true; + ans.v[1] = Float.MAX_VALUE; + ans.flag[1] = true; + return ans; + } + + public FloatInterval intersection(FloatInterval left, FloatInterval right) { + FloatInterval ans = new FloatInterval(); + FloatInterval partResult = new FloatInterval(); + + for (int i = 0; i < left.count; i += 2) { + for (int j = 0; j < right.count; j += 2) { + if (left.v[i + 1] <= right.v[j]) { + if (left.v[i + 1] == right.v[j] && left.flag[i + 1] && right.flag[j]) { + partResult.addValueFlag(left.v[i + 1], true); + partResult.addValueFlag(left.v[i + 1], true); + } else { + break; + } + } else if (left.v[i] >= right.v[j + 1]) { + if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { + partResult.addValueFlag(left.v[i], true); + partResult.addValueFlag(left.v[i], true); + } else { + continue; + } + } else { + if (left.v[i] > right.v[j]) { + partResult.addValueFlag(left.v[i], left.flag[i]); + } else { + partResult.addValueFlag(right.v[j], right.flag[j]); + } + if (left.v[i + 1] > right.v[j + 1]) { + partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); + } else { + partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + } + } + + for (int cnt = 0; cnt < partResult.count; cnt++) { + ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); + } + partResult.count = 0; + } + return ans; + } + + public FloatInterval union(FloatInterval left, FloatInterval right) { + int l = 0, r = 0; + FloatInterval res = new FloatInterval(); + while (l < left.count || r < right.count) { + if (l >= left.count) { // only right has unmerged data, all right data should be added to ans + for (int i = r; i < right.count; i += 2) { + res.addValueFlag(right.v[i], right.flag[i]); + res.addValueFlag(right.v[i + 1], right.flag[i + 1]); + } + break; + } + if (r >= right.count) { // only left has unmerged data, all left data should be added to ans + for (int i = l; i < left.count; i += 2) { + res.addValueFlag(left.v[i], left.flag[i]); + res.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + break; + } + + if (left.v[l] >= right.v[r + 1]) { // right first + res.addValueFlag(right.v[r], right.flag[r]); + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + r += 2; + continue; + } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] + && left.v[l + 1] >= right.v[r + 1]) { // right first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(right.v[r], right.flag[r]); + } + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + continue; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + continue; + } + } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right + res.addValueFlag(left.v[l], left.flag[l]); + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + continue; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + continue; + } + } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] + && left.v[l + 1] <= right.v[r + 1]) { // left first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(left.v[l], left.flag[l]); + } + // left covers right contains (left.v[l+1]==right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + if (left.v[l + 1] == right.v[r]) { + right.v[r] = left.v[l + 1]; + right.flag[r] = left.flag[l + 1] | right.flag[r]; + l += 2; + } else { + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } else if (left.v[l + 1] <= right.v[r]) { // left first + res.addValueFlag(left.v[l], left.flag[l]); + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + l += 2; + continue; + } else { // right covers left + res.addValueFlag(right.v[r], right.flag[r]); + // right first cross contains (left.v[l+1] == right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } + // merge same value into one + FloatInterval ans = new FloatInterval(); + if (res.count == 0) + return res; + ans.addValueFlag(res.v[0], res.flag[0]); + ans.addValueFlag(res.v[1], res.flag[1]); + for (int i = 2; i < res.count; i += 2) { + if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { + if (res.v[i + 1] == ans.v[ans.count - 1]) { + ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; + } else { + ans.v[ans.count - 1] = res.v[i + 1]; + ans.flag[ans.count - 1] = res.flag[i + 1]; + } + } else { + ans.addValueFlag(res.v[i], res.flag[i]); + ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); + } + } + return ans; + } + +} + + diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/IntFilterVerifier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/IntFilterVerifier.java index 87a1005f..88a81291 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/IntFilterVerifier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/IntFilterVerifier.java @@ -1,298 +1,299 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.IntInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; - -/** - * FilterVerifier for Integer type. - * - * @author CGF - */ -public class IntFilterVerifier extends FilterVerifier implements FilterVisitor { - private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); - - @Override - public Interval getInterval(SingleSeriesFilterExpression filter) { - if (filter == null) { - IntInterval ans = new IntInterval(); - ans.addValueFlag(Integer.MIN_VALUE, true); - ans.addValueFlag(Integer.MAX_VALUE, true); - return ans; - } - - return filter.accept(this); - } - - @Override - public > IntInterval visit(Eq eq) { - IntInterval ans = new IntInterval(); - ans.v[0] = ((Integer) eq.getValue()).intValue(); - ans.v[1] = ((Integer) eq.getValue()).intValue(); - ans.flag[0] = true; - ans.flag[1] = true; - ans.count = 2; - return ans; - } - - @Override - public > IntInterval visit(NotEq notEq) { - IntInterval ans = new IntInterval(); - ans.v[0] = Integer.MIN_VALUE; - ans.v[1] = ((Integer) notEq.getValue()).intValue(); - ans.v[2] = ((Integer) notEq.getValue()).intValue(); - ans.v[3] = Integer.MAX_VALUE; - - if ((Integer) notEq.getValue() == Integer.MIN_VALUE) { - ans.flag[0] = false; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } else if ((Integer) notEq.getValue() == Integer.MAX_VALUE) { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = false; - } else { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } - - ans.count = 4; - return ans; - } - - @Override - public > IntInterval visit(LtEq ltEq) { - IntInterval ans = new IntInterval(); - if (ltEq.ifEq) { - ans.v[1] = ((Integer) ltEq.getValue()).intValue(); - ans.flag[1] = true; - } else { - ans.v[1] = ((Integer) ltEq.getValue()).intValue(); - ans.flag[1] = false; - } - - if (ans.v[1] == Integer.MIN_VALUE && !ans.flag[1]) - ans.flag[0] = false; - else - ans.flag[0] = true; - ans.v[0] = Integer.MIN_VALUE; - - ans.count = 2; - return ans; - } - - @Override - public > IntInterval visit(GtEq gtEq) { - IntInterval ans = new IntInterval(); - if (gtEq.ifEq) { - ans.v[0] = ((Integer) gtEq.getValue()).intValue(); - ans.flag[0] = true; - } else { - ans.v[0] = ((Integer) gtEq.getValue()).intValue(); - ans.flag[0] = false; - } - - ans.v[1] = Integer.MAX_VALUE; - if (ans.v[0] == Integer.MAX_VALUE && !ans.flag[0]) - ans.flag[1] = false; - else - ans.flag[1] = true; - - ans.count = 2; - return ans; - } - - @Override - public IntInterval visit(Not not) { - return visit(convertor.convert(not)); - } - - public IntInterval visit(FilterExpression filter) { - if (filter instanceof Eq) - return visit((Eq) filter); - else if (filter instanceof NotEq) - return visit((NotEq) filter); - else if (filter instanceof LtEq) - return visit((LtEq) filter); - else if (filter instanceof GtEq) - return visit((GtEq) filter); - else if (filter instanceof And) - return visit((And) filter); - else if (filter instanceof Or) - return visit((Or) filter); - return null; - } - - @Override - public IntInterval visit(And and) { - return intersection(visit(and.getLeft()), visit(and.getRight())); - } - - @Override - public IntInterval visit(Or or) { - return union(visit(or.getLeft()), visit(or.getRight())); - } - - @Override - public IntInterval visit(NoFilter noFilter) { - IntInterval ans = new IntInterval(); - ans.v[0] = Integer.MIN_VALUE; - ans.flag[0] = true; - ans.v[1] = Integer.MAX_VALUE; - ans.flag[1] = true; - return ans; - } - - private IntInterval intersection(IntInterval left, IntInterval right) { - IntInterval ans = new IntInterval(); - IntInterval partResult = new IntInterval(); - - for (int i = 0; i < left.count; i += 2) { - for (int j = 0; j < right.count; j += 2) { - if (left.v[i + 1] <= right.v[j]) { - if (left.v[i + 1] == right.v[j] && left.flag[i + 1] - && right.flag[j]) { - partResult.addValueFlag(left.v[i + 1], true); - partResult.addValueFlag(left.v[i + 1], true); - } else { - break; - } - } else if (left.v[i] >= right.v[j + 1]) { - if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { - partResult.addValueFlag(left.v[i], true); - partResult.addValueFlag(left.v[i], true); - } - } else { - if (left.v[i] > right.v[j]) { - partResult.addValueFlag(left.v[i], left.flag[i]); - } else { - partResult.addValueFlag(right.v[j], right.flag[j]); - } - if (left.v[i + 1] > right.v[j + 1]) { - partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); - } else { - partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - } - } - - for (int cnt = 0; cnt < partResult.count; cnt++) { - ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); - } - partResult.count = 0; - } - - return ans; - } - - private IntInterval union(IntInterval left, IntInterval right) { - int l = 0, r = 0; - IntInterval res = new IntInterval(); - while (l < left.count || r < right.count) { - if (l >= left.count) { // only right has unmerged data, all right data should be added to ans - for (int i = r; i < right.count; i += 2) { - res.addValueFlag(right.v[i], right.flag[i]); - res.addValueFlag(right.v[i + 1], right.flag[i + 1]); - } - break; - } - if (r >= right.count) { // only left has unmerged data, all left data should be added to ans - for (int i = l; i < left.count; i += 2) { - res.addValueFlag(left.v[i], left.flag[i]); - res.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - break; - } - - if (left.v[l] >= right.v[r + 1]) { // right first - res.addValueFlag(right.v[r], right.flag[r]); - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - r += 2; - } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] && left.v[l + 1] >= right.v[r + 1]) { // right first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(right.v[r], right.flag[r]); - } - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right - res.addValueFlag(left.v[l], left.flag[l]); - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] && left.v[l + 1] <= right.v[r + 1]) { // left first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(left.v[l], left.flag[l]); - } - // left covers right contains (left.v[l+1]==right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - if (left.v[l + 1] == right.v[r]) { - right.v[r] = left.v[l + 1]; - right.flag[r] = left.flag[l + 1] | right.flag[r]; - l += 2; - } else { - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } else if (left.v[l + 1] <= right.v[r]) { // left first - res.addValueFlag(left.v[l], left.flag[l]); - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - l += 2; - } else { // right covers left - res.addValueFlag(right.v[r], right.flag[r]); - // right first cross contains (left.v[l+1] == right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } - // merge same value into one - IntInterval ans = new IntInterval(); - if (res.count == 0) - return res; - ans.addValueFlag(res.v[0], res.flag[0]); - ans.addValueFlag(res.v[1], res.flag[1]); - for (int i = 2; i < res.count; i += 2) { - if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { - if (res.v[i + 1] == ans.v[ans.count - 1]) { - ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; - } else { - ans.v[ans.count - 1] = res.v[i + 1]; - ans.flag[ans.count - 1] = res.flag[i + 1]; - } - } else { - ans.addValueFlag(res.v[i], res.flag[i]); - ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); - } - } - return ans; - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.IntInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.Interval; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; + +/** + * FilterVerifier for Integer type. + * + * @author CGF + */ +public class IntFilterVerifier extends FilterVerifier implements FilterVisitor { + private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); + + @Override + public Interval getInterval(SingleSeriesFilterExpression filter) { + if (filter == null) { + IntInterval ans = new IntInterval(); + ans.addValueFlag(Integer.MIN_VALUE, true); + ans.addValueFlag(Integer.MAX_VALUE, true); + return ans; + } + + return filter.accept(this); + } + + @Override + public > IntInterval visit(Eq eq) { + IntInterval ans = new IntInterval(); + ans.v[0] = ((Integer) eq.getValue()).intValue(); + ans.v[1] = ((Integer) eq.getValue()).intValue(); + ans.flag[0] = true; + ans.flag[1] = true; + ans.count = 2; + return ans; + } + + @Override + public > IntInterval visit(NotEq notEq) { + IntInterval ans = new IntInterval(); + ans.v[0] = Integer.MIN_VALUE; + ans.v[1] = ((Integer) notEq.getValue()).intValue(); + ans.v[2] = ((Integer) notEq.getValue()).intValue(); + ans.v[3] = Integer.MAX_VALUE; + + if ((Integer) notEq.getValue() == Integer.MIN_VALUE) { + ans.flag[0] = false; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } else if ((Integer) notEq.getValue() == Integer.MAX_VALUE) { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = false; + } else { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } + + ans.count = 4; + return ans; + } + + @Override + public > IntInterval visit(LtEq ltEq) { + IntInterval ans = new IntInterval(); + if (ltEq.ifEq) { + ans.v[1] = ((Integer) ltEq.getValue()).intValue(); + ans.flag[1] = true; + } else { + ans.v[1] = ((Integer) ltEq.getValue()).intValue(); + ans.flag[1] = false; + } + + if (ans.v[1] == Integer.MIN_VALUE && !ans.flag[1]) + ans.flag[0] = false; + else + ans.flag[0] = true; + ans.v[0] = Integer.MIN_VALUE; + + ans.count = 2; + return ans; + } + + @Override + public > IntInterval visit(GtEq gtEq) { + IntInterval ans = new IntInterval(); + if (gtEq.ifEq) { + ans.v[0] = ((Integer) gtEq.getValue()).intValue(); + ans.flag[0] = true; + } else { + ans.v[0] = ((Integer) gtEq.getValue()).intValue(); + ans.flag[0] = false; + } + + ans.v[1] = Integer.MAX_VALUE; + if (ans.v[0] == Integer.MAX_VALUE && !ans.flag[0]) + ans.flag[1] = false; + else + ans.flag[1] = true; + + ans.count = 2; + return ans; + } + + @Override + public IntInterval visit(Not not) { + return visit(convertor.convert(not)); + } + + public IntInterval visit(FilterExpression filter) { + if (filter instanceof Eq) + return visit((Eq) filter); + else if (filter instanceof NotEq) + return visit((NotEq) filter); + else if (filter instanceof LtEq) + return visit((LtEq) filter); + else if (filter instanceof GtEq) + return visit((GtEq) filter); + else if (filter instanceof And) + return visit((And) filter); + else if (filter instanceof Or) + return visit((Or) filter); + return null; + } + + @Override + public IntInterval visit(And and) { + return intersection(visit(and.getLeft()), visit(and.getRight())); + } + + @Override + public IntInterval visit(Or or) { + return union(visit(or.getLeft()), visit(or.getRight())); + } + + @Override + public IntInterval visit(NoFilter noFilter) { + IntInterval ans = new IntInterval(); + ans.v[0] = Integer.MIN_VALUE; + ans.flag[0] = true; + ans.v[1] = Integer.MAX_VALUE; + ans.flag[1] = true; + return ans; + } + + private IntInterval intersection(IntInterval left, IntInterval right) { + IntInterval ans = new IntInterval(); + IntInterval partResult = new IntInterval(); + + for (int i = 0; i < left.count; i += 2) { + for (int j = 0; j < right.count; j += 2) { + if (left.v[i + 1] <= right.v[j]) { + if (left.v[i + 1] == right.v[j] && left.flag[i + 1] && right.flag[j]) { + partResult.addValueFlag(left.v[i + 1], true); + partResult.addValueFlag(left.v[i + 1], true); + } else { + break; + } + } else if (left.v[i] >= right.v[j + 1]) { + if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { + partResult.addValueFlag(left.v[i], true); + partResult.addValueFlag(left.v[i], true); + } + } else { + if (left.v[i] > right.v[j]) { + partResult.addValueFlag(left.v[i], left.flag[i]); + } else { + partResult.addValueFlag(right.v[j], right.flag[j]); + } + if (left.v[i + 1] > right.v[j + 1]) { + partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); + } else { + partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + } + } + + for (int cnt = 0; cnt < partResult.count; cnt++) { + ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); + } + partResult.count = 0; + } + + return ans; + } + + private IntInterval union(IntInterval left, IntInterval right) { + int l = 0, r = 0; + IntInterval res = new IntInterval(); + while (l < left.count || r < right.count) { + if (l >= left.count) { // only right has unmerged data, all right data should be added to ans + for (int i = r; i < right.count; i += 2) { + res.addValueFlag(right.v[i], right.flag[i]); + res.addValueFlag(right.v[i + 1], right.flag[i + 1]); + } + break; + } + if (r >= right.count) { // only left has unmerged data, all left data should be added to ans + for (int i = l; i < left.count; i += 2) { + res.addValueFlag(left.v[i], left.flag[i]); + res.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + break; + } + + if (left.v[l] >= right.v[r + 1]) { // right first + res.addValueFlag(right.v[r], right.flag[r]); + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + r += 2; + } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] + && left.v[l + 1] >= right.v[r + 1]) { // right first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(right.v[r], right.flag[r]); + } + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right + res.addValueFlag(left.v[l], left.flag[l]); + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] + && left.v[l + 1] <= right.v[r + 1]) { // left first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(left.v[l], left.flag[l]); + } + // left covers right contains (left.v[l+1]==right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + if (left.v[l + 1] == right.v[r]) { + right.v[r] = left.v[l + 1]; + right.flag[r] = left.flag[l + 1] | right.flag[r]; + l += 2; + } else { + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } else if (left.v[l + 1] <= right.v[r]) { // left first + res.addValueFlag(left.v[l], left.flag[l]); + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + l += 2; + } else { // right covers left + res.addValueFlag(right.v[r], right.flag[r]); + // right first cross contains (left.v[l+1] == right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } + // merge same value into one + IntInterval ans = new IntInterval(); + if (res.count == 0) + return res; + ans.addValueFlag(res.v[0], res.flag[0]); + ans.addValueFlag(res.v[1], res.flag[1]); + for (int i = 2; i < res.count; i += 2) { + if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { + if (res.v[i + 1] == ans.v[ans.count - 1]) { + ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; + } else { + ans.v[ans.count - 1] = res.v[i + 1]; + ans.flag[ans.count - 1] = res.flag[i + 1]; + } + } else { + ans.addValueFlag(res.v[i], res.flag[i]); + ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); + } + } + return ans; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/LongFilterVerifier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/LongFilterVerifier.java index bd4df08e..cc13b9af 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/LongFilterVerifier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/verifier/LongFilterVerifier.java @@ -1,297 +1,298 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.LongInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; - -/** - * @author CGF - */ -public class LongFilterVerifier extends FilterVerifier implements FilterVisitor { - private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); - - @Override - public LongInterval getInterval(SingleSeriesFilterExpression filter) { - if (filter == null) { - LongInterval ans = new LongInterval(); - ans.addValueFlag(Long.MIN_VALUE, true); - ans.addValueFlag(Long.MAX_VALUE, true); - return ans; - } - - return filter.accept(this); - } - - @Override - public > LongInterval visit(Eq eq) { - LongInterval ans = new LongInterval(); - ans.v[0] = ((Long) eq.getValue()).longValue(); - ans.v[1] = ((Long) eq.getValue()).longValue(); - ans.flag[0] = true; - ans.flag[1] = true; - ans.count = 2; - return ans; - } - - @Override - public > LongInterval visit(NotEq notEq) { - LongInterval ans = new LongInterval(); - ans.v[0] = Long.MIN_VALUE; - ans.v[1] = ((Long) notEq.getValue()).longValue(); - ans.v[2] = ((Long) notEq.getValue()).longValue(); - ans.v[3] = Long.MAX_VALUE; - - if ((Long) notEq.getValue() == Long.MIN_VALUE) { - ans.flag[0] = false; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } else if ((Long) notEq.getValue() == Long.MAX_VALUE) { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = false; - } else { - ans.flag[0] = true; - ans.flag[1] = false; - ans.flag[2] = false; - ans.flag[3] = true; - } - - ans.count = 4; - return ans; - } - - @Override - public > LongInterval visit(LtEq ltEq) { - LongInterval ans = new LongInterval(); - if (ltEq.ifEq) { - ans.v[1] = ((Long) ltEq.getValue()).longValue(); - ans.flag[1] = true; - } else { - ans.v[1] = ((Long) ltEq.getValue()).longValue(); - ans.flag[1] = false; - } - - if (ans.v[1] == Long.MIN_VALUE && !ans.flag[1]) - ans.flag[0] = false; - else - ans.flag[0] = true; - ans.v[0] = Long.MIN_VALUE; - - ans.count = 2; - return ans; - } - - @Override - public > LongInterval visit(GtEq gtEq) { - LongInterval ans = new LongInterval(); - if (gtEq.ifEq) { - ans.v[0] = ((Long) gtEq.getValue()).longValue(); - ans.flag[0] = true; - } else { - ans.v[0] = ((Long) gtEq.getValue()).longValue(); - ans.flag[0] = false; - } - - ans.v[1] = Long.MAX_VALUE; - if (ans.v[0] == Long.MAX_VALUE && !ans.flag[0]) - ans.flag[1] = false; - else - ans.flag[1] = true; - - ans.count = 2; - return ans; - } - - @Override - public LongInterval visit(Not not) { - return visit(convertor.convert(not)); - } - - public LongInterval visit(FilterExpression filter) { - if (filter instanceof Eq) - return visit((Eq) filter); - else if (filter instanceof NotEq) - return visit((NotEq) filter); - else if (filter instanceof LtEq) - return visit((LtEq) filter); - else if (filter instanceof GtEq) - return visit((GtEq) filter); - else if (filter instanceof And) - return visit((And) filter); - else if (filter instanceof Or) - return visit((Or) filter); - return null; - } - - @Override - public LongInterval visit(And and) { - return intersection(visit(and.getLeft()), visit(and.getRight())); - } - - @Override - public LongInterval visit(Or or) { - return union(visit(or.getLeft()), visit(or.getRight())); - } - - @Override - public LongInterval visit(NoFilter noFilter) { - LongInterval ans = new LongInterval(); - ans.v[0] = Long.MIN_VALUE; - ans.flag[0] = true; - ans.v[1] = Long.MAX_VALUE; - ans.flag[1] = true; - return ans; - } - - private LongInterval intersection(LongInterval left, LongInterval right) { - LongInterval ans = new LongInterval(); - LongInterval partResult = new LongInterval(); - - for (int i = 0; i < left.count; i += 2) { - for (int j = 0; j < right.count; j += 2) { - if (left.v[i + 1] <= right.v[j]) { - if (left.v[i + 1] == right.v[j] && left.flag[i + 1] - && right.flag[j]) { - partResult.addValueFlag(left.v[i + 1], true); - partResult.addValueFlag(left.v[i + 1], true); - } else { - break; - } - } else if (left.v[i] >= right.v[j + 1]) { - if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { - partResult.addValueFlag(left.v[i], true); - partResult.addValueFlag(left.v[i], true); - } - } else { - if (left.v[i] > right.v[j]) { - partResult.addValueFlag(left.v[i], left.flag[i]); - } else { - partResult.addValueFlag(right.v[j], right.flag[j]); - } - if (left.v[i + 1] > right.v[j + 1]) { - partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); - } else { - partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - } - } - - for (int cnt = 0; cnt < partResult.count; cnt++) { - ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); - } - partResult.count = 0; - } - - return ans; - } - - private LongInterval union(LongInterval left, LongInterval right) { - int l = 0, r = 0; - LongInterval res = new LongInterval(); - while (l < left.count || r < right.count) { - if (l >= left.count) { // only right has unmerged data, all right data should be added to ans - for (int i = r; i < right.count; i += 2) { - res.addValueFlag(right.v[i], right.flag[i]); - res.addValueFlag(right.v[i + 1], right.flag[i + 1]); - } - break; - } - if (r >= right.count) { // only left has unmerged data, all left data should be added to ans - for (int i = l; i < left.count; i += 2) { - res.addValueFlag(left.v[i], left.flag[i]); - res.addValueFlag(left.v[i + 1], left.flag[i + 1]); - } - break; - } - - if (left.v[l] >= right.v[r + 1]) { // right first - res.addValueFlag(right.v[r], right.flag[r]); - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - r += 2; - } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] && left.v[l + 1] >= right.v[r + 1]) { // right first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(right.v[r], right.flag[r]); - } - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right - res.addValueFlag(left.v[l], left.flag[l]); - if (left.v[l + 1] == right.v[r + 1]) { - res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); - l += 2; - r += 2; - } else { - res.addValueFlag(right.v[r + 1], right.flag[r + 1]); - left.v[l] = right.v[r + 1]; - left.flag[l] = !right.flag[r + 1]; - r += 2; - } - } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] && left.v[l + 1] <= right.v[r + 1]) { // left first cross - if (left.v[l] == right.v[r]) { - res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); - } else { - res.addValueFlag(left.v[l], left.flag[l]); - } - // left covers right contains (left.v[l+1]==right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - - if (left.v[l + 1] == right.v[r]) { - right.v[r] = left.v[l + 1]; - right.flag[r] = left.flag[l + 1] | right.flag[r]; - l += 2; - } else { - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } else if (left.v[l + 1] <= right.v[r]) { // left first - res.addValueFlag(left.v[l], left.flag[l]); - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - l += 2; - } else { // right covers left - res.addValueFlag(right.v[r], right.flag[r]); - // right first cross contains (left.v[l+1] == right.v[r+1]) - res.addValueFlag(left.v[l + 1], left.flag[l + 1]); - right.v[r] = left.v[l + 1]; - right.flag[r] = !left.flag[l + 1]; - l += 2; - } - } - // merge same value into one - LongInterval ans = new LongInterval(); - if (res.count == 0) - return res; - ans.addValueFlag(res.v[0], res.flag[0]); - ans.addValueFlag(res.v[1], res.flag[1]); - for (int i = 2; i < res.count; i += 2) { - if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { - if (res.v[i + 1] == ans.v[ans.count - 1]) { - ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; - } else { - ans.v[ans.count - 1] = res.v[i + 1]; - ans.flag[ans.count - 1] = res.flag[i + 1]; - } - } else { - ans.addValueFlag(res.v[i], res.flag[i]); - ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); - } - } - return ans; - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter.verifier; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.LongInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.ConvertExpressionVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.FilterVisitor; + +/** + * @author CGF + */ +public class LongFilterVerifier extends FilterVerifier implements FilterVisitor { + private ConvertExpressionVisitor convertor = new ConvertExpressionVisitor(); + + @Override + public LongInterval getInterval(SingleSeriesFilterExpression filter) { + if (filter == null) { + LongInterval ans = new LongInterval(); + ans.addValueFlag(Long.MIN_VALUE, true); + ans.addValueFlag(Long.MAX_VALUE, true); + return ans; + } + + return filter.accept(this); + } + + @Override + public > LongInterval visit(Eq eq) { + LongInterval ans = new LongInterval(); + ans.v[0] = ((Long) eq.getValue()).longValue(); + ans.v[1] = ((Long) eq.getValue()).longValue(); + ans.flag[0] = true; + ans.flag[1] = true; + ans.count = 2; + return ans; + } + + @Override + public > LongInterval visit(NotEq notEq) { + LongInterval ans = new LongInterval(); + ans.v[0] = Long.MIN_VALUE; + ans.v[1] = ((Long) notEq.getValue()).longValue(); + ans.v[2] = ((Long) notEq.getValue()).longValue(); + ans.v[3] = Long.MAX_VALUE; + + if ((Long) notEq.getValue() == Long.MIN_VALUE) { + ans.flag[0] = false; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } else if ((Long) notEq.getValue() == Long.MAX_VALUE) { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = false; + } else { + ans.flag[0] = true; + ans.flag[1] = false; + ans.flag[2] = false; + ans.flag[3] = true; + } + + ans.count = 4; + return ans; + } + + @Override + public > LongInterval visit(LtEq ltEq) { + LongInterval ans = new LongInterval(); + if (ltEq.ifEq) { + ans.v[1] = ((Long) ltEq.getValue()).longValue(); + ans.flag[1] = true; + } else { + ans.v[1] = ((Long) ltEq.getValue()).longValue(); + ans.flag[1] = false; + } + + if (ans.v[1] == Long.MIN_VALUE && !ans.flag[1]) + ans.flag[0] = false; + else + ans.flag[0] = true; + ans.v[0] = Long.MIN_VALUE; + + ans.count = 2; + return ans; + } + + @Override + public > LongInterval visit(GtEq gtEq) { + LongInterval ans = new LongInterval(); + if (gtEq.ifEq) { + ans.v[0] = ((Long) gtEq.getValue()).longValue(); + ans.flag[0] = true; + } else { + ans.v[0] = ((Long) gtEq.getValue()).longValue(); + ans.flag[0] = false; + } + + ans.v[1] = Long.MAX_VALUE; + if (ans.v[0] == Long.MAX_VALUE && !ans.flag[0]) + ans.flag[1] = false; + else + ans.flag[1] = true; + + ans.count = 2; + return ans; + } + + @Override + public LongInterval visit(Not not) { + return visit(convertor.convert(not)); + } + + public LongInterval visit(FilterExpression filter) { + if (filter instanceof Eq) + return visit((Eq) filter); + else if (filter instanceof NotEq) + return visit((NotEq) filter); + else if (filter instanceof LtEq) + return visit((LtEq) filter); + else if (filter instanceof GtEq) + return visit((GtEq) filter); + else if (filter instanceof And) + return visit((And) filter); + else if (filter instanceof Or) + return visit((Or) filter); + return null; + } + + @Override + public LongInterval visit(And and) { + return intersection(visit(and.getLeft()), visit(and.getRight())); + } + + @Override + public LongInterval visit(Or or) { + return union(visit(or.getLeft()), visit(or.getRight())); + } + + @Override + public LongInterval visit(NoFilter noFilter) { + LongInterval ans = new LongInterval(); + ans.v[0] = Long.MIN_VALUE; + ans.flag[0] = true; + ans.v[1] = Long.MAX_VALUE; + ans.flag[1] = true; + return ans; + } + + private LongInterval intersection(LongInterval left, LongInterval right) { + LongInterval ans = new LongInterval(); + LongInterval partResult = new LongInterval(); + + for (int i = 0; i < left.count; i += 2) { + for (int j = 0; j < right.count; j += 2) { + if (left.v[i + 1] <= right.v[j]) { + if (left.v[i + 1] == right.v[j] && left.flag[i + 1] && right.flag[j]) { + partResult.addValueFlag(left.v[i + 1], true); + partResult.addValueFlag(left.v[i + 1], true); + } else { + break; + } + } else if (left.v[i] >= right.v[j + 1]) { + if (left.v[i] == right.v[j + 1] && (left.flag[i] && right.flag[j + 1])) { + partResult.addValueFlag(left.v[i], true); + partResult.addValueFlag(left.v[i], true); + } + } else { + if (left.v[i] > right.v[j]) { + partResult.addValueFlag(left.v[i], left.flag[i]); + } else { + partResult.addValueFlag(right.v[j], right.flag[j]); + } + if (left.v[i + 1] > right.v[j + 1]) { + partResult.addValueFlag(right.v[j + 1], right.flag[j + 1]); + } else { + partResult.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + } + } + + for (int cnt = 0; cnt < partResult.count; cnt++) { + ans.addValueFlag(partResult.v[cnt], partResult.flag[cnt]); + } + partResult.count = 0; + } + + return ans; + } + + private LongInterval union(LongInterval left, LongInterval right) { + int l = 0, r = 0; + LongInterval res = new LongInterval(); + while (l < left.count || r < right.count) { + if (l >= left.count) { // only right has unmerged data, all right data should be added to ans + for (int i = r; i < right.count; i += 2) { + res.addValueFlag(right.v[i], right.flag[i]); + res.addValueFlag(right.v[i + 1], right.flag[i + 1]); + } + break; + } + if (r >= right.count) { // only left has unmerged data, all left data should be added to ans + for (int i = l; i < left.count; i += 2) { + res.addValueFlag(left.v[i], left.flag[i]); + res.addValueFlag(left.v[i + 1], left.flag[i + 1]); + } + break; + } + + if (left.v[l] >= right.v[r + 1]) { // right first + res.addValueFlag(right.v[r], right.flag[r]); + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + r += 2; + } else if (left.v[l] >= right.v[r] && left.v[l] <= right.v[r + 1] + && left.v[l + 1] >= right.v[r + 1]) { // right first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(right.v[r], right.flag[r]); + } + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (left.v[l] <= right.v[r] && left.v[l + 1] >= right.v[r + 1]) { // left covers right + res.addValueFlag(left.v[l], left.flag[l]); + if (left.v[l + 1] == right.v[r + 1]) { + res.addValueFlag(left.v[l + 1], left.flag[l + 1] | right.flag[r + 1]); + l += 2; + r += 2; + } else { + res.addValueFlag(right.v[r + 1], right.flag[r + 1]); + left.v[l] = right.v[r + 1]; + left.flag[l] = !right.flag[r + 1]; + r += 2; + } + } else if (right.v[r] >= left.v[l] && right.v[r] <= left.v[l + 1] + && left.v[l + 1] <= right.v[r + 1]) { // left first cross + if (left.v[l] == right.v[r]) { + res.addValueFlag(left.v[l], left.flag[l] | right.flag[r]); + } else { + res.addValueFlag(left.v[l], left.flag[l]); + } + // left covers right contains (left.v[l+1]==right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + + if (left.v[l + 1] == right.v[r]) { + right.v[r] = left.v[l + 1]; + right.flag[r] = left.flag[l + 1] | right.flag[r]; + l += 2; + } else { + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } else if (left.v[l + 1] <= right.v[r]) { // left first + res.addValueFlag(left.v[l], left.flag[l]); + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + l += 2; + } else { // right covers left + res.addValueFlag(right.v[r], right.flag[r]); + // right first cross contains (left.v[l+1] == right.v[r+1]) + res.addValueFlag(left.v[l + 1], left.flag[l + 1]); + right.v[r] = left.v[l + 1]; + right.flag[r] = !left.flag[l + 1]; + l += 2; + } + } + // merge same value into one + LongInterval ans = new LongInterval(); + if (res.count == 0) + return res; + ans.addValueFlag(res.v[0], res.flag[0]); + ans.addValueFlag(res.v[1], res.flag[1]); + for (int i = 2; i < res.count; i += 2) { + if (res.v[i] == ans.v[ans.count - 1] && (res.flag[i] || ans.flag[ans.count - 1])) { + if (res.v[i + 1] == ans.v[ans.count - 1]) { + ans.flag[ans.count - 1] = ans.flag[ans.count - 1] | res.flag[i + 1]; + } else { + ans.v[ans.count - 1] = res.v[i + 1]; + ans.flag[ans.count - 1] = res.flag[i + 1]; + } + } else { + ans.addValueFlag(res.v[i], res.flag[i]); + ans.addValueFlag(res.v[i + 1], res.flag[i + 1]); + } + } + return ans; + } + +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/ConvertExpressionVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/ConvertExpressionVisitor.java index 883f1e6b..485f3654 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/ConvertExpressionVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/ConvertExpressionVisitor.java @@ -1,62 +1,62 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; - -/** - * To remove not operators, convert all operators recursively. - * {@code Not(and(eq(), not(eq(y))) -> Or(notEq(), eq(y))} - * - * @author CGF - */ -public class ConvertExpressionVisitor implements FilterVisitor { - - private InvertExpressionVisitor invertor = new InvertExpressionVisitor(); - - public FilterExpression convert(FilterExpression exp) { - return exp.accept(this); - } - - @Override - public > FilterExpression visit(Eq eq) { - return eq; - } - - @Override - public > FilterExpression visit(NotEq notEq) { - return notEq; - } - - @Override - public > FilterExpression visit(LtEq ltEq) { - return ltEq; - } - - @Override - public > FilterExpression visit(GtEq gtEq) { - return gtEq; - } - - - @Override - public FilterExpression visit(And and) { - return FilterFactory.and((and.getLeft()), convert(and.getRight())); - } - - @Override - public FilterExpression visit(Or or) { - return FilterFactory.or(convert(or.getLeft()), convert(or.getRight())); - } - - @Override - public FilterExpression visit(NoFilter noFilter) { - return noFilter; - } - - @Override - public FilterExpression visit(Not not) { - return invertor.invert(not.getFilterExpression()); - } -} - +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; + +/** + * To remove not operators, convert all operators recursively. + * {@code Not(and(eq(), not(eq(y))) -> Or(notEq(), eq(y))} + * + * @author CGF + */ +public class ConvertExpressionVisitor implements FilterVisitor { + + private InvertExpressionVisitor invertor = new InvertExpressionVisitor(); + + public FilterExpression convert(FilterExpression exp) { + return exp.accept(this); + } + + @Override + public > FilterExpression visit(Eq eq) { + return eq; + } + + @Override + public > FilterExpression visit(NotEq notEq) { + return notEq; + } + + @Override + public > FilterExpression visit(LtEq ltEq) { + return ltEq; + } + + @Override + public > FilterExpression visit(GtEq gtEq) { + return gtEq; + } + + + @Override + public FilterExpression visit(And and) { + return FilterFactory.and((and.getLeft()), convert(and.getRight())); + } + + @Override + public FilterExpression visit(Or or) { + return FilterFactory.or(convert(or.getLeft()), convert(or.getRight())); + } + + @Override + public FilterExpression visit(NoFilter noFilter) { + return noFilter; + } + + @Override + public FilterExpression visit(Not not) { + return invertor.invert(not.getFilterExpression()); + } +} + diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/DigestVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/DigestVisitor.java index c3da7e76..a198ff72 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/DigestVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/DigestVisitor.java @@ -1,131 +1,131 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.common.exception.FilterDataTypeException; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DigestForFilter; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * To judge whether a series, page could be skipped when reading process - * Implemented using visitor pattern. - * - * @author CGF - */ -public class DigestVisitor implements FilterVisitor { - private static final Logger LOG = LoggerFactory.getLogger(DigestVisitor.class); - - private DigestForFilter digest; - - public Boolean satisfy(DigestForFilter digest, SingleSeriesFilterExpression expression) { - if (expression == null) - return true; - - this.digest = digest; - return expression.accept(this); - } - - @SuppressWarnings("unchecked") - @Override - public > Boolean visit(Eq eq) { - if (!digest.getType().equals(eq.getFilterSeries().getSeriesDataType())) { - LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), - eq.getFilterSeries().getSeriesDataType()); - throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + " does't match " + - eq.getFilterSeries().getSeriesDataType()); - } - try { - return eq.getValue().compareTo((T) digest.getMinValue()) >= 0 - && eq.getValue().compareTo((T) digest.getMaxValue()) <= 0; - } catch (NullPointerException e) { - LOG.error("The value of SingleSensorFilter {} is null", eq); - return false; - } - } - - @SuppressWarnings("unchecked") - @Override - public > Boolean visit(NotEq notEq) { - if (!digest.getType().equals(notEq.getFilterSeries().getSeriesDataType())) { - LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), - notEq.getFilterSeries().getSeriesDataType()); - throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + " does't match " + - notEq.getFilterSeries().getSeriesDataType()); - } - - try { - return notEq.getValue().compareTo((T) digest.getMinValue()) != 0 - && notEq.getValue().compareTo((T) digest.getMaxValue()) != 0; - } catch (NullPointerException e) { - LOG.error("The value of SingleSensorFilter {} is null", notEq); - return false; - } - } - - @SuppressWarnings("unchecked") - @Override - public > Boolean visit(LtEq ltEq) { - if (!digest.getType().equals(ltEq.getFilterSeries().getSeriesDataType())) { - LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), - ltEq.getFilterSeries().getSeriesDataType()); - throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + " does't match " + - ltEq.getFilterSeries().getSeriesDataType()); - } - - try { - if (ltEq.getIfEq()) { - return ltEq.getValue().compareTo((T) digest.getMinValue()) >= 0; - } else { - return ltEq.getValue().compareTo((T) digest.getMinValue()) > 0; - } - } catch (NullPointerException e) { - LOG.error("The value of SingleSensorFilter {} is null", ltEq); - return false; - } - - } - - @SuppressWarnings("unchecked") - @Override - public > Boolean visit(GtEq gtEq) { - if (!digest.getType().equals(gtEq.getFilterSeries().getSeriesDataType())) { - LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), - gtEq.getFilterSeries().getSeriesDataType()); - throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + " does't match " + - gtEq.getFilterSeries().getSeriesDataType()); - } - - try { - if (gtEq.getIfEq()) { - return gtEq.getValue().compareTo((T) digest.getMaxValue()) <= 0; - } else { - return gtEq.getValue().compareTo((T) digest.getMaxValue()) < 0; - } - } catch (NullPointerException e) { - LOG.error("The value of SingleSensorFilter {} is null", gtEq); - return false; - } - } - - @Override - public Boolean visit(Not not) { - return !satisfy(digest, not.getFilterExpression()); - } - - @Override - public Boolean visit(And and) { - return satisfy(digest, and.getLeft()) && satisfy(digest, and.getRight()); - } - - @Override - public Boolean visit(Or or) { - return satisfy(digest, or.getLeft()) || satisfy(digest, or.getRight()); - } - - @Override - public Boolean visit(NoFilter noFilter) { - return true; - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.common.exception.FilterDataTypeException; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DigestForFilter; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * To judge whether a series, page could be skipped when reading process Implemented using visitor + * pattern. + * + * @author CGF + */ +public class DigestVisitor implements FilterVisitor { + private static final Logger LOG = LoggerFactory.getLogger(DigestVisitor.class); + + private DigestForFilter digest; + + public Boolean satisfy(DigestForFilter digest, SingleSeriesFilterExpression expression) { + if (expression == null) + return true; + + this.digest = digest; + return expression.accept(this); + } + + @SuppressWarnings("unchecked") + @Override + public > Boolean visit(Eq eq) { + if (!digest.getType().equals(eq.getFilterSeries().getSeriesDataType())) { + LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), + eq.getFilterSeries().getSeriesDataType()); + throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + + " does't match " + eq.getFilterSeries().getSeriesDataType()); + } + try { + return eq.getValue().compareTo((T) digest.getMinValue()) >= 0 + && eq.getValue().compareTo((T) digest.getMaxValue()) <= 0; + } catch (NullPointerException e) { + LOG.error("The value of SingleSensorFilter {} is null", eq); + return false; + } + } + + @SuppressWarnings("unchecked") + @Override + public > Boolean visit(NotEq notEq) { + if (!digest.getType().equals(notEq.getFilterSeries().getSeriesDataType())) { + LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), + notEq.getFilterSeries().getSeriesDataType()); + throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + + " does't match " + notEq.getFilterSeries().getSeriesDataType()); + } + + try { + return notEq.getValue().compareTo((T) digest.getMinValue()) != 0 + && notEq.getValue().compareTo((T) digest.getMaxValue()) != 0; + } catch (NullPointerException e) { + LOG.error("The value of SingleSensorFilter {} is null", notEq); + return false; + } + } + + @SuppressWarnings("unchecked") + @Override + public > Boolean visit(LtEq ltEq) { + if (!digest.getType().equals(ltEq.getFilterSeries().getSeriesDataType())) { + LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), + ltEq.getFilterSeries().getSeriesDataType()); + throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + + " does't match " + ltEq.getFilterSeries().getSeriesDataType()); + } + + try { + if (ltEq.getIfEq()) { + return ltEq.getValue().compareTo((T) digest.getMinValue()) >= 0; + } else { + return ltEq.getValue().compareTo((T) digest.getMinValue()) > 0; + } + } catch (NullPointerException e) { + LOG.error("The value of SingleSensorFilter {} is null", ltEq); + return false; + } + + } + + @SuppressWarnings("unchecked") + @Override + public > Boolean visit(GtEq gtEq) { + if (!digest.getType().equals(gtEq.getFilterSeries().getSeriesDataType())) { + LOG.error("Generic Not Consistent! {} does't match {}", digest.getTypeClass(), + gtEq.getFilterSeries().getSeriesDataType()); + throw new FilterDataTypeException("Generic Not Consistent! " + digest.getTypeClass() + + " does't match " + gtEq.getFilterSeries().getSeriesDataType()); + } + + try { + if (gtEq.getIfEq()) { + return gtEq.getValue().compareTo((T) digest.getMaxValue()) <= 0; + } else { + return gtEq.getValue().compareTo((T) digest.getMaxValue()) < 0; + } + } catch (NullPointerException e) { + LOG.error("The value of SingleSensorFilter {} is null", gtEq); + return false; + } + } + + @Override + public Boolean visit(Not not) { + return !satisfy(digest, not.getFilterExpression()); + } + + @Override + public Boolean visit(And and) { + return satisfy(digest, and.getLeft()) && satisfy(digest, and.getRight()); + } + + @Override + public Boolean visit(Or or) { + return satisfy(digest, or.getLeft()) || satisfy(digest, or.getRight()); + } + + @Override + public Boolean visit(NoFilter noFilter) { + return true; + } + +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/FilterVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/FilterVisitor.java index f368b64c..7e8eaa0e 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/FilterVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/FilterVisitor.java @@ -1,31 +1,30 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; - -/** - * FilterVisitor is implemented by visitor pattern. - * Implemented using visitor pattern. - * - * A FilterVistor must visit all these methods below, per visitor design pattern. - * And a FilterExpression just need implements an accept() method. - * - * @author CGF - */ -public interface FilterVisitor { - - > R visit(Eq eq); - - > R visit(NotEq notEq); - - > R visit(LtEq ltEq); - - > R visit(GtEq gtEq); - - R visit(Not not); - - R visit(And and); - - R visit(Or or); - - R visit(NoFilter noFilter); -} +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; + +/** + * FilterVisitor is implemented by visitor pattern. Implemented using visitor pattern. + * + * A FilterVistor must visit all these methods below, per visitor design pattern. And a + * FilterExpression just need implements an accept() method. + * + * @author CGF + */ +public interface FilterVisitor { + + > R visit(Eq eq); + + > R visit(NotEq notEq); + + > R visit(LtEq ltEq); + + > R visit(GtEq gtEq); + + R visit(Not not); + + R visit(And and); + + R visit(Or or); + + R visit(NoFilter noFilter); +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/IntervalTimeVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/IntervalTimeVisitor.java index 81c12b93..562f0aa5 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/IntervalTimeVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/IntervalTimeVisitor.java @@ -1,78 +1,78 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; - -/** - * To judge whether an overflow time interval satisfy the filter. - * Implemented using visitor pattern. - * - * @author CGF - */ -public class IntervalTimeVisitor implements FilterVisitor { - - private Long startTime, endTime; - - public boolean satisfy(SingleSeriesFilterExpression timeFilter, Long s, Long e) { - if (timeFilter == null) { - return true; - } - - this.startTime = s; - this.endTime = e; - return timeFilter.accept(this); - } - - @Override - public > Boolean visit(Eq eq) { - return (Long) eq.getValue() >= startTime && (Long) eq.getValue() <= endTime; - - } - - @Override - public > Boolean visit(NotEq notEq) { - if (startTime.equals(endTime) && (notEq.getValue()).equals(startTime)) { - return false; - } - return true; - } - - @Override - public > Boolean visit(LtEq ltEq) { - if (ltEq.getIfEq()) { - return (Long) ltEq.getValue() >= startTime; - } else { - return (Long) ltEq.getValue() > startTime; - } - } - - @Override - public > Boolean visit(GtEq gtEq) { - if (gtEq.getIfEq()) { - return (Long) gtEq.getValue() <= endTime; - } else { - return (Long) gtEq.getValue() < endTime; - } - } - - @Override - public Boolean visit(Not not) { - return !satisfy(not.getFilterExpression(), startTime, endTime); - } - - @Override - public Boolean visit(And and) { - return satisfy(and.getLeft(), startTime, endTime) && satisfy(and.getRight(), startTime, endTime); - } - - @Override - public Boolean visit(Or or) { - return satisfy(or.getLeft(), startTime, endTime) || satisfy(or.getRight(), startTime, endTime); - } - - @Override - public Boolean visit(NoFilter noFilter) { - return true; - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; + +/** + * To judge whether an overflow time interval satisfy the filter. Implemented using visitor pattern. + * + * @author CGF + */ +public class IntervalTimeVisitor implements FilterVisitor { + + private Long startTime, endTime; + + public boolean satisfy(SingleSeriesFilterExpression timeFilter, Long s, Long e) { + if (timeFilter == null) { + return true; + } + + this.startTime = s; + this.endTime = e; + return timeFilter.accept(this); + } + + @Override + public > Boolean visit(Eq eq) { + return (Long) eq.getValue() >= startTime && (Long) eq.getValue() <= endTime; + + } + + @Override + public > Boolean visit(NotEq notEq) { + if (startTime.equals(endTime) && (notEq.getValue()).equals(startTime)) { + return false; + } + return true; + } + + @Override + public > Boolean visit(LtEq ltEq) { + if (ltEq.getIfEq()) { + return (Long) ltEq.getValue() >= startTime; + } else { + return (Long) ltEq.getValue() > startTime; + } + } + + @Override + public > Boolean visit(GtEq gtEq) { + if (gtEq.getIfEq()) { + return (Long) gtEq.getValue() <= endTime; + } else { + return (Long) gtEq.getValue() < endTime; + } + } + + @Override + public Boolean visit(Not not) { + return !satisfy(not.getFilterExpression(), startTime, endTime); + } + + @Override + public Boolean visit(And and) { + return satisfy(and.getLeft(), startTime, endTime) + && satisfy(and.getRight(), startTime, endTime); + } + + @Override + public Boolean visit(Or or) { + return satisfy(or.getLeft(), startTime, endTime) || satisfy(or.getRight(), startTime, endTime); + } + + @Override + public Boolean visit(NoFilter noFilter) { + return true; + } + +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/InvertExpressionVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/InvertExpressionVisitor.java index 015d3868..700cbc71 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/InvertExpressionVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/InvertExpressionVisitor.java @@ -1,69 +1,68 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; - -/** - * Invert(FilterExpression) = Not(FilterExpression) - * Implemented using visitor pattern. - * - * @author CGF - */ -public class InvertExpressionVisitor implements FilterVisitor { - - // to invert the expression recursively - public FilterExpression invert(FilterExpression fe) { - return fe.accept(this); - } - - @Override - public > FilterExpression visit(Eq eq) { - return new NotEq(eq.getFilterSeries(), eq.getValue()); - } - - @Override - public > FilterExpression visit(NotEq notEq) { - return new Eq(notEq.getFilterSeries(), notEq.getValue()); - } - - @Override - public > FilterExpression visit(LtEq ltEq) { - if (ltEq.getIfEq()) { - return FilterFactory.gtEq(ltEq.getFilterSeries(), ltEq.getValue(), false); - } - - return FilterFactory.gtEq(ltEq.getFilterSeries(), ltEq.getValue(), true); - } - - @Override - public > FilterExpression visit(GtEq gtEq) { - if (gtEq.getIfEq()) { - return FilterFactory.ltEq(gtEq.getFilterSeries(), gtEq.getValue(), false); - } - - return FilterFactory.ltEq(gtEq.getFilterSeries(), gtEq.getValue(), true); - } - - @Override - public FilterExpression visit(And and) { - return FilterFactory.or(invert(and.getLeft()), invert(and.getRight())); - } - - @Override - public FilterExpression visit(Or or) { - return FilterFactory.and(invert(or.getLeft()), invert(or.getRight())); - } - - @Override - public FilterExpression visit(NoFilter noFilter) { - return null; - } - - @Override - public SingleSeriesFilterExpression visit(Not not) { - return not.getFilterExpression(); - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; + +/** + * Invert(FilterExpression) = Not(FilterExpression) Implemented using visitor pattern. + * + * @author CGF + */ +public class InvertExpressionVisitor implements FilterVisitor { + + // to invert the expression recursively + public FilterExpression invert(FilterExpression fe) { + return fe.accept(this); + } + + @Override + public > FilterExpression visit(Eq eq) { + return new NotEq(eq.getFilterSeries(), eq.getValue()); + } + + @Override + public > FilterExpression visit(NotEq notEq) { + return new Eq(notEq.getFilterSeries(), notEq.getValue()); + } + + @Override + public > FilterExpression visit(LtEq ltEq) { + if (ltEq.getIfEq()) { + return FilterFactory.gtEq(ltEq.getFilterSeries(), ltEq.getValue(), false); + } + + return FilterFactory.gtEq(ltEq.getFilterSeries(), ltEq.getValue(), true); + } + + @Override + public > FilterExpression visit(GtEq gtEq) { + if (gtEq.getIfEq()) { + return FilterFactory.ltEq(gtEq.getFilterSeries(), gtEq.getValue(), false); + } + + return FilterFactory.ltEq(gtEq.getFilterSeries(), gtEq.getValue(), true); + } + + @Override + public FilterExpression visit(And and) { + return FilterFactory.or(invert(and.getLeft()), invert(and.getRight())); + } + + @Override + public FilterExpression visit(Or or) { + return FilterFactory.and(invert(or.getLeft()), invert(or.getRight())); + } + + @Override + public FilterExpression visit(NoFilter noFilter) { + return null; + } + + @Override + public SingleSeriesFilterExpression visit(Not not) { + return not.getFilterExpression(); + } + +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitor.java index 6f8399fa..0544ef22 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitor.java @@ -1,174 +1,172 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.*; -import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.FilterVerifier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * To judge whether a single value satisfy the filter. - * Implemented per visitor pattern. - * - * @param data type for filter - * @author CGF - */ -public class SingleValueVisitor> implements FilterVisitor { - - private static final Logger LOG = LoggerFactory.getLogger(SingleValueVisitor.class); - private V value; - private FilterVerifier verifier; - private SingleSeriesFilterExpression singleSeriesFilter; - private Interval interval; - - public SingleValueVisitor() { - } - - /** - * This method is only used for INT32,INT64,FLOAT,DOUBLE data type. - * - * @param filter - */ - public SingleValueVisitor(SingleSeriesFilterExpression filter) { - verifier = FilterVerifier.create(filter.getFilterSeries().getSeriesDataType()); - this.singleSeriesFilter = filter; - interval = verifier.getInterval(singleSeriesFilter); - } - - /** - * This method exits a problem, the data type of value must accord with filter. - * - * @param value value to filter - * @param filter filter - * @return is satisfied - */ - public Boolean satisfyObject(Object value, SingleSeriesFilterExpression filter) { - if (filter == null) - return true; - - // The value type and filter type may not be consistent - return this.satisfy((V) value, filter); - } - - private Boolean satisfy(V value, SingleSeriesFilterExpression filter) { - this.value = value; - return filter.accept(this); - } - - /** - * optimization of filter, filter is value interval - * - * @param value value to filter - * @return is satisfied - */ - public boolean verify(int value) { - IntInterval val = (IntInterval) interval; - for (int i = 0; i < val.count; i += 2) { - if (val.v[i] < value && value < val.v[i + 1]) - return true; - if (val.v[i] == value && val.flag[i]) - return true; - if (val.v[i + 1] == value && val.flag[i + 1]) - return true; - } - return false; - } - - public boolean verify(long value) { - LongInterval val = (LongInterval) interval; - for (int i = 0; i < val.count; i += 2) { - if (val.v[i] < value && value < val.v[i + 1]) - return true; - if (val.v[i] == value && val.flag[i]) - return true; - if (val.v[i + 1] == value && val.flag[i + 1]) - return true; - } - return false; - } - - public boolean verify(float value) { - FloatInterval val = (FloatInterval) interval; - for (int i = 0; i < val.count; i += 2) { - if (val.v[i] < value && value < val.v[i + 1]) - return true; - if (val.v[i] == value && val.flag[i]) - return true; - if (val.v[i + 1] == value && val.flag[i + 1]) - return true; - } - return false; - } - - public boolean verify(double value) { - DoubleInterval val = (DoubleInterval) interval; - for (int i = 0; i < val.count; i += 2) { - if (val.v[i] < value && value < val.v[i + 1]) - return true; - if (val.v[i] == value && val.flag[i]) - return true; - if (val.v[i + 1] == value && val.flag[i + 1]) - return true; - } - return false; - } - - @Override - public > Boolean visit(Eq eq) { - if (eq.getValue().equals(value)) - return true; - return false; - } - - @Override - public > Boolean visit(NotEq notEq) { - if (!notEq.getValue().equals(value)) - return true; - return false; - } - - @Override - public > Boolean visit(LtEq ltEq) { - if (ltEq.getIfEq() && ltEq.getValue().compareTo((T) value) >= 0) - return true; - if (!ltEq.getIfEq() && ltEq.getValue().compareTo((T) value) > 0) - return true; - return false; - } - - @Override - public > Boolean visit(GtEq gtEq) { - if (gtEq.getIfEq() && gtEq.getValue().compareTo((T) value) <= 0) - return true; - if (!gtEq.getIfEq() && gtEq.getValue().compareTo((T) value) < 0) - return true; - return false; - } - - @Override - public Boolean visit(Not not) { - if (satisfy(value, not.getFilterExpression())) - return false; - return true; - } - - @Override - public Boolean visit(And and) { - return satisfy(value, and.getLeft()) && satisfy(value, and.getRight()); - } - - @Override - public Boolean visit(Or or) { - return satisfy(value, or.getLeft()) || satisfy(value, or.getRight()); - } - - @Override - public Boolean visit(NoFilter noFilter) { - return true; - } - - public Interval getInterval() { - return this.interval; - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.*; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.*; +import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.FilterVerifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * To judge whether a single value satisfy the filter. Implemented per visitor pattern. + * + * @param data type for filter + * @author CGF + */ +public class SingleValueVisitor> implements FilterVisitor { + + private static final Logger LOG = LoggerFactory.getLogger(SingleValueVisitor.class); + private V value; + private FilterVerifier verifier; + private SingleSeriesFilterExpression singleSeriesFilter; + private Interval interval; + + public SingleValueVisitor() {} + + /** + * This method is only used for INT32,INT64,FLOAT,DOUBLE data type. + * + * @param filter + */ + public SingleValueVisitor(SingleSeriesFilterExpression filter) { + verifier = FilterVerifier.create(filter.getFilterSeries().getSeriesDataType()); + this.singleSeriesFilter = filter; + interval = verifier.getInterval(singleSeriesFilter); + } + + /** + * This method exits a problem, the data type of value must accord with filter. + * + * @param value value to filter + * @param filter filter + * @return is satisfied + */ + public Boolean satisfyObject(Object value, SingleSeriesFilterExpression filter) { + if (filter == null) + return true; + + // The value type and filter type may not be consistent + return this.satisfy((V) value, filter); + } + + private Boolean satisfy(V value, SingleSeriesFilterExpression filter) { + this.value = value; + return filter.accept(this); + } + + /** + * optimization of filter, filter is value interval + * + * @param value value to filter + * @return is satisfied + */ + public boolean verify(int value) { + IntInterval val = (IntInterval) interval; + for (int i = 0; i < val.count; i += 2) { + if (val.v[i] < value && value < val.v[i + 1]) + return true; + if (val.v[i] == value && val.flag[i]) + return true; + if (val.v[i + 1] == value && val.flag[i + 1]) + return true; + } + return false; + } + + public boolean verify(long value) { + LongInterval val = (LongInterval) interval; + for (int i = 0; i < val.count; i += 2) { + if (val.v[i] < value && value < val.v[i + 1]) + return true; + if (val.v[i] == value && val.flag[i]) + return true; + if (val.v[i + 1] == value && val.flag[i + 1]) + return true; + } + return false; + } + + public boolean verify(float value) { + FloatInterval val = (FloatInterval) interval; + for (int i = 0; i < val.count; i += 2) { + if (val.v[i] < value && value < val.v[i + 1]) + return true; + if (val.v[i] == value && val.flag[i]) + return true; + if (val.v[i + 1] == value && val.flag[i + 1]) + return true; + } + return false; + } + + public boolean verify(double value) { + DoubleInterval val = (DoubleInterval) interval; + for (int i = 0; i < val.count; i += 2) { + if (val.v[i] < value && value < val.v[i + 1]) + return true; + if (val.v[i] == value && val.flag[i]) + return true; + if (val.v[i + 1] == value && val.flag[i + 1]) + return true; + } + return false; + } + + @Override + public > Boolean visit(Eq eq) { + if (eq.getValue().equals(value)) + return true; + return false; + } + + @Override + public > Boolean visit(NotEq notEq) { + if (!notEq.getValue().equals(value)) + return true; + return false; + } + + @Override + public > Boolean visit(LtEq ltEq) { + if (ltEq.getIfEq() && ltEq.getValue().compareTo((T) value) >= 0) + return true; + if (!ltEq.getIfEq() && ltEq.getValue().compareTo((T) value) > 0) + return true; + return false; + } + + @Override + public > Boolean visit(GtEq gtEq) { + if (gtEq.getIfEq() && gtEq.getValue().compareTo((T) value) <= 0) + return true; + if (!gtEq.getIfEq() && gtEq.getValue().compareTo((T) value) < 0) + return true; + return false; + } + + @Override + public Boolean visit(Not not) { + if (satisfy(value, not.getFilterExpression())) + return false; + return true; + } + + @Override + public Boolean visit(And and) { + return satisfy(value, and.getLeft()) && satisfy(value, and.getRight()); + } + + @Override + public Boolean visit(Or or) { + return satisfy(value, or.getLeft()) || satisfy(value, or.getRight()); + } + + @Override + public Boolean visit(NoFilter noFilter) { + return true; + } + + public Interval getInterval() { + return this.interval; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitorFactory.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitorFactory.java index 6f6d4893..3f6cc255 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitorFactory.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filter/visitorImpl/SingleValueVisitorFactory.java @@ -1,50 +1,51 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; - -import cn.edu.tsinghua.tsfile.common.exception.UnSupportedDataTypeException; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * To generate a SingleValueVisitor using TSDataType. - * - * @author CGF - */ -public class SingleValueVisitorFactory { - - static final Logger LOG = LoggerFactory.getLogger(SingleValueVisitorFactory.class); - - private static final SingleValueVisitor intVisitor = new SingleValueVisitor(); - private static final SingleValueVisitor longVisitor = new SingleValueVisitor(); - private static final SingleValueVisitor floatVisitor = new SingleValueVisitor(); - private static final SingleValueVisitor doubleVisitor = new SingleValueVisitor(); - private static final SingleValueVisitor booleanVisitor = new SingleValueVisitor(); - private static final SingleValueVisitor stringVisitor = new SingleValueVisitor(); - - /** - * get SingleValueVisitor using TSDataType - * - * @param type data type of TsFile - * @return single value visitor - */ - public static SingleValueVisitor getSingleValueVisitor(TSDataType type) { - switch (type) { - case INT64: - return longVisitor; - case INT32: - return intVisitor; - case FLOAT: - return floatVisitor; - case DOUBLE: - return doubleVisitor; - case BOOLEAN: - return booleanVisitor; - case TEXT: - return stringVisitor; - default: - LOG.error("Unsupported tsfile data type."); - throw new UnSupportedDataTypeException("Unsupported tsfile data type."); - } - } -} - +package cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl; + +import cn.edu.tsinghua.tsfile.common.exception.UnSupportedDataTypeException; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * To generate a SingleValueVisitor using TSDataType. + * + * @author CGF + */ +public class SingleValueVisitorFactory { + + static final Logger LOG = LoggerFactory.getLogger(SingleValueVisitorFactory.class); + + private static final SingleValueVisitor intVisitor = new SingleValueVisitor(); + private static final SingleValueVisitor longVisitor = new SingleValueVisitor(); + private static final SingleValueVisitor floatVisitor = new SingleValueVisitor(); + private static final SingleValueVisitor doubleVisitor = new SingleValueVisitor(); + private static final SingleValueVisitor booleanVisitor = + new SingleValueVisitor(); + private static final SingleValueVisitor stringVisitor = new SingleValueVisitor(); + + /** + * get SingleValueVisitor using TSDataType + * + * @param type data type of TsFile + * @return single value visitor + */ + public static SingleValueVisitor getSingleValueVisitor(TSDataType type) { + switch (type) { + case INT64: + return longVisitor; + case INT32: + return intVisitor; + case FLOAT: + return floatVisitor; + case DOUBLE: + return doubleVisitor; + case BOOLEAN: + return booleanVisitor; + case TEXT: + return stringVisitor; + default: + LOG.error("Unsupported tsfile data type."); + throw new UnSupportedDataTypeException("Unsupported tsfile data type."); + } + } +} + diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/TimeFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/TimeFilter.java index c9eb2e11..82adb94c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/TimeFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/TimeFilter.java @@ -9,84 +9,84 @@ */ public class TimeFilter { - public static class TimeEq extends Eq { - private TimeEq(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeEq extends Eq { + private TimeEq(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeNotEq extends NotEq { - private TimeNotEq(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeNotEq extends NotEq { + private TimeNotEq(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeGt extends Gt { - private TimeGt(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeGt extends Gt { + private TimeGt(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeGtEq extends GtEq { - private TimeGtEq(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeGtEq extends GtEq { + private TimeGtEq(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeLt extends Lt { - private TimeLt(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeLt extends Lt { + private TimeLt(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeLtEq extends LtEq { - private TimeLtEq(Long value) { - super(value, FilterType.TIME_FILTER); - } + public static class TimeLtEq extends LtEq { + private TimeLtEq(Long value) { + super(value, FilterType.TIME_FILTER); } + } - public static class TimeNoRestriction extends NoRestriction { - public String toString() { - return FilterType.TIME_FILTER + super.toString(); - } + public static class TimeNoRestriction extends NoRestriction { + public String toString() { + return FilterType.TIME_FILTER + super.toString(); } + } - public static class TimeNot extends Not { - private TimeNot(Filter filter) { - super(filter); - } + public static class TimeNot extends Not { + private TimeNot(Filter filter) { + super(filter); } + } - public static TimeEq eq(Long value) { - return new TimeEq(value); - } + public static TimeEq eq(Long value) { + return new TimeEq(value); + } - public static TimeGt gt(Long value) { - return new TimeGt(value); - } + public static TimeGt gt(Long value) { + return new TimeGt(value); + } - public static TimeGtEq gtEq(Long value) { - return new TimeGtEq(value); - } + public static TimeGtEq gtEq(Long value) { + return new TimeGtEq(value); + } - public static TimeLt lt(Long value) { - return new TimeLt(value); - } + public static TimeLt lt(Long value) { + return new TimeLt(value); + } - public static TimeLtEq ltEq(Long value) { - return new TimeLtEq(value); - } + public static TimeLtEq ltEq(Long value) { + return new TimeLtEq(value); + } - public static TimeNoRestriction noRestriction() { - return new TimeNoRestriction(); - } + public static TimeNoRestriction noRestriction() { + return new TimeNoRestriction(); + } - public static TimeNot not(Filter filter) { - return new TimeNot(filter); - } + public static TimeNot not(Filter filter) { + return new TimeNot(filter); + } - public static TimeNotEq notEq(Long value) { - return new TimeNotEq(value); - } + public static TimeNotEq notEq(Long value) { + return new TimeNotEq(value); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/ValueFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/ValueFilter.java index 8b06de2b..1da5fc90 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/ValueFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/ValueFilter.java @@ -9,87 +9,87 @@ */ public class ValueFilter { - public static class ValueEq> extends Eq { - private ValueEq(T value) { - super(value, FilterType.VALUE_FILTER); - } + public static class ValueEq> extends Eq { + private ValueEq(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static class ValueGt> extends Gt { - private ValueGt(T value) { - super(value, FilterType.VALUE_FILTER); - } + public static class ValueGt> extends Gt { + private ValueGt(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static class ValueGtEq> extends GtEq { - private ValueGtEq(T value) { - super(value, FilterType.VALUE_FILTER); - } + public static class ValueGtEq> extends GtEq { + private ValueGtEq(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static class ValueLt> extends Lt { - private ValueLt(T value) { - super(value, FilterType.VALUE_FILTER); - } + public static class ValueLt> extends Lt { + private ValueLt(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static class ValueLtEq> extends LtEq { - private ValueLtEq(T value) { - super(value, FilterType.VALUE_FILTER); - } + public static class ValueLtEq> extends LtEq { + private ValueLtEq(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static class ValueNoRestriction> extends NoRestriction { - public String toString() { - return FilterType.VALUE_FILTER + super.toString(); - } + public static class ValueNoRestriction> extends NoRestriction { + public String toString() { + return FilterType.VALUE_FILTER + super.toString(); } + } - public static class ValueNot> extends Not { - private ValueNot(Filter filter) { - super(filter); - } - - public String toString() { - return FilterType.VALUE_FILTER + super.toString(); - } + public static class ValueNot> extends Not { + private ValueNot(Filter filter) { + super(filter); } - public static class ValueNotEq> extends NotEq { - private ValueNotEq(T value) { - super(value, FilterType.VALUE_FILTER); - } + public String toString() { + return FilterType.VALUE_FILTER + super.toString(); } + } - public static > ValueEq eq(T value) { - return new ValueEq(value); + public static class ValueNotEq> extends NotEq { + private ValueNotEq(T value) { + super(value, FilterType.VALUE_FILTER); } + } - public static > ValueGt gt(T value) { - return new ValueGt(value); - } + public static > ValueEq eq(T value) { + return new ValueEq(value); + } - public static > ValueGtEq gtEq(T value) { - return new ValueGtEq(value); - } + public static > ValueGt gt(T value) { + return new ValueGt(value); + } - public static > ValueLt lt(T value) { - return new ValueLt(value); - } + public static > ValueGtEq gtEq(T value) { + return new ValueGtEq(value); + } - public static > ValueLtEq ltEq(T value) { - return new ValueLtEq(value); - } + public static > ValueLt lt(T value) { + return new ValueLt(value); + } - public static > ValueNoRestriction noRestriction() { - return new ValueNoRestriction(); - } + public static > ValueLtEq ltEq(T value) { + return new ValueLtEq(value); + } - public static > ValueNot not(Filter filter) { - return new ValueNot(filter); - } + public static > ValueNoRestriction noRestriction() { + return new ValueNoRestriction(); + } - public static > ValueNotEq notEq(T value) { - return new ValueNotEq(value); - } + public static > ValueNot not(Filter filter) { + return new ValueNot(filter); + } + + public static > ValueNotEq notEq(T value) { + return new ValueNotEq(value); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/BinaryFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/BinaryFilter.java index 569d573a..78ed7433 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/BinaryFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/BinaryFilter.java @@ -11,26 +11,26 @@ public abstract class BinaryFilter> implements Filter, Serializable { - private static final long serialVersionUID = 1039585564327602465L; + private static final long serialVersionUID = 1039585564327602465L; - protected final Filter left; - protected final Filter right; + protected final Filter left; + protected final Filter right; - protected BinaryFilter(Filter left, Filter right) { - this.left = left; - this.right = right; - } + protected BinaryFilter(Filter left, Filter right) { + this.left = left; + this.right = right; + } - public Filter getLeft() { - return left; - } + public Filter getLeft() { + return left; + } - public Filter getRight() { - return right; - } + public Filter getRight() { + return right; + } - @Override - public String toString() { - return "( " + left + "," + right + " )"; - } + @Override + public String toString() { + return "( " + left + "," + right + " )"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/Filter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/Filter.java index ee47f5e3..bc33f905 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/Filter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/Filter.java @@ -6,17 +6,15 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; /** - * Filter is a top level filter abstraction. - * Filter has two types of implementations : {@link BinaryFilter} and - * {@link UnaryFilter} - * Filter is a role of interviewee in visitor pattern. + * Filter is a top level filter abstraction. Filter has two types of implementations : + * {@link BinaryFilter} and {@link UnaryFilter} Filter is a role of interviewee in visitor pattern. * * @author CGF */ public interface Filter> { - R accept(AbstractFilterVisitor visitor); + R accept(AbstractFilterVisitor visitor); - R accept(TimeValuePair timeValuePair, TimeValuePairFilterVisitor visitor); + R accept(TimeValuePair timeValuePair, TimeValuePairFilterVisitor visitor); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/UnaryFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/UnaryFilter.java index 39e9377b..da79027f 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/UnaryFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/basic/UnaryFilter.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.filterV2.basic; import cn.edu.tsinghua.tsfile.timeseries.filterV2.factory.FilterType; - import java.io.Serializable; /** @@ -12,24 +11,24 @@ */ public abstract class UnaryFilter> implements Filter, Serializable { - private static final long serialVersionUID = 1431606024929453556L; - protected final T value; + private static final long serialVersionUID = 1431606024929453556L; + protected final T value; - protected FilterType filterType; + protected FilterType filterType; - protected UnaryFilter(T value, FilterType filterType) { - this.value = value; - this.filterType = filterType; - } + protected UnaryFilter(T value, FilterType filterType) { + this.value = value; + this.filterType = filterType; + } - public T getValue() { - return value; - } + public T getValue() { + return value; + } - public FilterType getFilterType() { - return filterType; - } + public FilterType getFilterType() { + return filterType; + } - @Override - public abstract String toString(); + @Override + public abstract String toString(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/exception/QueryFilterOptimizationException.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/exception/QueryFilterOptimizationException.java index fd5042cf..d5614d9f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/exception/QueryFilterOptimizationException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/exception/QueryFilterOptimizationException.java @@ -3,17 +3,17 @@ /** * Created by zhangjinrui on 2017/12/19. */ -public class QueryFilterOptimizationException extends Exception{ +public class QueryFilterOptimizationException extends Exception { - public QueryFilterOptimizationException(String msg){ - super(msg); - } + public QueryFilterOptimizationException(String msg) { + super(msg); + } - public QueryFilterOptimizationException(Throwable cause){ - super(cause); - } + public QueryFilterOptimizationException(Throwable cause) { + super(cause); + } - public QueryFilterOptimizationException(String message, Throwable cause) { - super(message, cause); - } + public QueryFilterOptimizationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/BinaryQueryFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/BinaryQueryFilter.java index 11d2a08a..a1deca0a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/BinaryQueryFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/BinaryQueryFilter.java @@ -3,10 +3,10 @@ /** * @author Jinrui Zhang */ -public interface BinaryQueryFilter extends QueryFilter{ - QueryFilter getLeft(); +public interface BinaryQueryFilter extends QueryFilter { + QueryFilter getLeft(); - QueryFilter getRight(); + QueryFilter getRight(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilter.java index e1b36fb2..2779014c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilter.java @@ -4,5 +4,5 @@ * @author Jinrui Zhang */ public interface QueryFilter { - QueryFilterType getType(); + QueryFilterType getType(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilterType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilterType.java index ddfeffda..e9cce447 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilterType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/QueryFilterType.java @@ -4,5 +4,5 @@ * Created by zhangjinrui on 2017/12/19. */ public enum QueryFilterType { - AND, OR, SERIES, GLOBAL_TIME + AND, OR, SERIES, GLOBAL_TIME } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/UnaryQueryFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/UnaryQueryFilter.java index e5c9ae04..7031ca3a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/UnaryQueryFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/UnaryQueryFilter.java @@ -6,6 +6,6 @@ /** * @author Jinrui Zhang */ -public interface UnaryQueryFilter extends QueryFilter{ - Filter getFilter(); +public interface UnaryQueryFilter extends QueryFilter { + Filter getFilter(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/GlobalTimeFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/GlobalTimeFilter.java index 8a0db2ff..0a74698c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/GlobalTimeFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/GlobalTimeFilter.java @@ -8,26 +8,26 @@ * Created by zhangjinrui on 2017/12/15. */ public class GlobalTimeFilter implements UnaryQueryFilter { - private Filter filter; + private Filter filter; - public GlobalTimeFilter(Filter filter) { - this.filter = filter; - } + public GlobalTimeFilter(Filter filter) { + this.filter = filter; + } - public Filter getFilter() { - return filter; - } + public Filter getFilter() { + return filter; + } - public void setFilter(Filter filter) { - this.filter = filter; - } + public void setFilter(Filter filter) { + this.filter = filter; + } - @Override - public QueryFilterType getType() { - return QueryFilterType.GLOBAL_TIME; - } + @Override + public QueryFilterType getType() { + return QueryFilterType.GLOBAL_TIME; + } - public String toString() { - return "[" + this.filter.toString() + "]"; - } + public String toString() { + return "[" + this.filter.toString() + "]"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/QueryFilterFactory.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/QueryFilterFactory.java index d05c58a8..4c3328f1 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/QueryFilterFactory.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/QueryFilterFactory.java @@ -9,66 +9,68 @@ */ public abstract class QueryFilterFactory implements BinaryQueryFilter { - protected static class And extends QueryFilterFactory { - public QueryFilter left; - public QueryFilter right; - - public And(QueryFilter left, QueryFilter right){ - this.left = left; - this.right = right; - } - - @Override - public QueryFilter getLeft() { - return left; - } - - @Override - public QueryFilter getRight() { - return right; - } - - @Override - public QueryFilterType getType() { - return QueryFilterType.AND; - } - - public String toString() { - return "[" + left + " && " + right + "]"; - } + protected static class And extends QueryFilterFactory { + public QueryFilter left; + public QueryFilter right; + + public And(QueryFilter left, QueryFilter right) { + this.left = left; + this.right = right; + } + + @Override + public QueryFilter getLeft() { + return left; + } + + @Override + public QueryFilter getRight() { + return right; + } + + @Override + public QueryFilterType getType() { + return QueryFilterType.AND; + } + + public String toString() { + return "[" + left + " && " + right + "]"; + } + } + + protected static class Or extends QueryFilterFactory { + public QueryFilter left; + public QueryFilter right; + + public Or(QueryFilter left, QueryFilter right) { + this.left = left; + this.right = right; } - protected static class Or extends QueryFilterFactory { - public QueryFilter left; - public QueryFilter right; - public Or(QueryFilter left, QueryFilter right) { - this.left = left; - this.right = right; - } - - @Override - public QueryFilter getLeft() { - return left; - } - @Override - public QueryFilter getRight() { - return right; - } - - public QueryFilterType getType() { - return QueryFilterType.OR; - } - - public String toString() { - return "[" + left + " || " + right + "]"; - } + @Override + public QueryFilter getLeft() { + return left; } - public static BinaryQueryFilter and(QueryFilter left, QueryFilter right){ - return new And(left, right); + @Override + public QueryFilter getRight() { + return right; } - public static BinaryQueryFilter or(QueryFilter left, QueryFilter right) { - return new Or(left, right); + public QueryFilterType getType() { + return QueryFilterType.OR; } + + public String toString() { + return "[" + left + " || " + right + "]"; + } + } + + public static BinaryQueryFilter and(QueryFilter left, QueryFilter right) { + return new And(left, right); + } + + public static BinaryQueryFilter or(QueryFilter left, QueryFilter right) { + return new Or(left, right); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/SeriesFilter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/SeriesFilter.java index 164a3d2d..6a097d87 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/SeriesFilter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/impl/SeriesFilter.java @@ -12,38 +12,38 @@ * Created by zhangjinrui on 2017/12/15. */ public class SeriesFilter> implements UnaryQueryFilter { - private TimeValuePairFilterVisitor timeValuePairFilterVisitor; - private Path seriesPath; - private Filter filter; - - public SeriesFilter(Path seriesDescriptor, Filter filter) { - this.seriesPath = seriesDescriptor; - this.filter = filter; - timeValuePairFilterVisitor = new TimeValuePairFilterVisitorImpl(); - } - - public boolean satisfy(TimeValuePair timeValuePair) { - return timeValuePairFilterVisitor.satisfy(timeValuePair, this.filter); - } - - @Override - public QueryFilterType getType() { - return QueryFilterType.SERIES; - } - - public Filter getFilter() { - return filter; - } - - public void setFilter(Filter filter) { - this.filter = filter; - } - - public String toString() { - return "[" + seriesPath + ":" + filter + "]"; - } - - public Path getSeriesPath() { - return this.seriesPath; - } + private TimeValuePairFilterVisitor timeValuePairFilterVisitor; + private Path seriesPath; + private Filter filter; + + public SeriesFilter(Path seriesDescriptor, Filter filter) { + this.seriesPath = seriesDescriptor; + this.filter = filter; + timeValuePairFilterVisitor = new TimeValuePairFilterVisitorImpl(); + } + + public boolean satisfy(TimeValuePair timeValuePair) { + return timeValuePairFilterVisitor.satisfy(timeValuePair, this.filter); + } + + @Override + public QueryFilterType getType() { + return QueryFilterType.SERIES; + } + + public Filter getFilter() { + return filter; + } + + public void setFilter(Filter filter) { + this.filter = filter; + } + + public String toString() { + return "[" + seriesPath + ":" + filter + "]"; + } + + public Path getSeriesPath() { + return this.seriesPath; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterOptimizer.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterOptimizer.java index 16574b43..9df6adba 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterOptimizer.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterOptimizer.java @@ -11,113 +11,129 @@ import cn.edu.tsinghua.tsfile.timeseries.filterV2.expression.impl.SeriesFilter; import cn.edu.tsinghua.tsfile.timeseries.filterV2.factory.FilterFactory; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.util.List; /** * Created by zhangjinrui on 2017/12/19. */ public class QueryFilterOptimizer { - - private static class QueryFilterOptimizerHelper { - private static final QueryFilterOptimizer INSTANCE = new QueryFilterOptimizer(); - } - private QueryFilterOptimizer() { + private static class QueryFilterOptimizerHelper { + private static final QueryFilterOptimizer INSTANCE = new QueryFilterOptimizer(); + } - } + private QueryFilterOptimizer() { - public QueryFilter convertGlobalTimeFilter(QueryFilter queryFilter, List selectedSeries) throws QueryFilterOptimizationException { - if (queryFilter instanceof UnaryQueryFilter) { - return queryFilter; - } else if (queryFilter instanceof BinaryQueryFilter) { - QueryFilterType relation = queryFilter.getType(); - QueryFilter left = ((BinaryQueryFilter) queryFilter).getLeft(); - QueryFilter right = ((BinaryQueryFilter) queryFilter).getRight(); - if (left.getType() == QueryFilterType.GLOBAL_TIME && right.getType() == QueryFilterType.GLOBAL_TIME) { - return combineTwoGlobalTimeFilter((GlobalTimeFilter) left, (GlobalTimeFilter) right, queryFilter.getType()); - } else if (left.getType() == QueryFilterType.GLOBAL_TIME && right.getType() != QueryFilterType.GLOBAL_TIME) { - return handleOneGlobalTimeFilter((GlobalTimeFilter) left, right, selectedSeries, relation); - } else if (left.getType() != QueryFilterType.GLOBAL_TIME && right.getType() == QueryFilterType.GLOBAL_TIME) { - return handleOneGlobalTimeFilter((GlobalTimeFilter) right, left, selectedSeries, relation); - } else if (left.getType() != QueryFilterType.GLOBAL_TIME && right.getType() != QueryFilterType.GLOBAL_TIME) { - QueryFilter regularLeft = convertGlobalTimeFilter(left, selectedSeries); - QueryFilter regularRight = convertGlobalTimeFilter(right, selectedSeries); - BinaryQueryFilter midRet = null; - if (relation == QueryFilterType.AND) { - midRet = QueryFilterFactory.and(regularLeft, regularRight); - } else if (relation == QueryFilterType.OR) { - midRet = QueryFilterFactory.or(regularLeft, regularRight); - } else { - throw new UnsupportedOperationException("unsupported queryFilter type: " + relation); - } - if (midRet.getLeft().getType() == QueryFilterType.GLOBAL_TIME || midRet.getRight().getType() == QueryFilterType.GLOBAL_TIME) { - return convertGlobalTimeFilter(midRet, selectedSeries); - } else { - return midRet; - } - - } else if (left.getType() == QueryFilterType.SERIES && right.getType() == QueryFilterType.SERIES) { - return queryFilter; - } - } - throw new UnsupportedOperationException("unknown queryFilter type: " + queryFilter.getClass().getName()); - } + } - private QueryFilter handleOneGlobalTimeFilter(GlobalTimeFilter globalTimeFilter, QueryFilter queryFilter - , List selectedSeries, QueryFilterType relation) throws QueryFilterOptimizationException { - QueryFilter regularRightQueryFilter = convertGlobalTimeFilter(queryFilter, selectedSeries); - if (regularRightQueryFilter instanceof GlobalTimeFilter) { - return combineTwoGlobalTimeFilter(globalTimeFilter, (GlobalTimeFilter) regularRightQueryFilter, relation); - } + public QueryFilter convertGlobalTimeFilter(QueryFilter queryFilter, List selectedSeries) + throws QueryFilterOptimizationException { + if (queryFilter instanceof UnaryQueryFilter) { + return queryFilter; + } else if (queryFilter instanceof BinaryQueryFilter) { + QueryFilterType relation = queryFilter.getType(); + QueryFilter left = ((BinaryQueryFilter) queryFilter).getLeft(); + QueryFilter right = ((BinaryQueryFilter) queryFilter).getRight(); + if (left.getType() == QueryFilterType.GLOBAL_TIME + && right.getType() == QueryFilterType.GLOBAL_TIME) { + return combineTwoGlobalTimeFilter((GlobalTimeFilter) left, (GlobalTimeFilter) right, + queryFilter.getType()); + } else if (left.getType() == QueryFilterType.GLOBAL_TIME + && right.getType() != QueryFilterType.GLOBAL_TIME) { + return handleOneGlobalTimeFilter((GlobalTimeFilter) left, right, selectedSeries, relation); + } else if (left.getType() != QueryFilterType.GLOBAL_TIME + && right.getType() == QueryFilterType.GLOBAL_TIME) { + return handleOneGlobalTimeFilter((GlobalTimeFilter) right, left, selectedSeries, relation); + } else if (left.getType() != QueryFilterType.GLOBAL_TIME + && right.getType() != QueryFilterType.GLOBAL_TIME) { + QueryFilter regularLeft = convertGlobalTimeFilter(left, selectedSeries); + QueryFilter regularRight = convertGlobalTimeFilter(right, selectedSeries); + BinaryQueryFilter midRet = null; if (relation == QueryFilterType.AND) { - addTimeFilterToQueryFilter((globalTimeFilter).getFilter(), regularRightQueryFilter); - return regularRightQueryFilter; + midRet = QueryFilterFactory.and(regularLeft, regularRight); } else if (relation == QueryFilterType.OR) { - return QueryFilterFactory.or(convertGlobalTimeFilterToQueryFilterBySeriesList(globalTimeFilter, selectedSeries), queryFilter); - } - throw new QueryFilterOptimizationException("unknown relation in queryFilter:" + relation); - } - - private QueryFilter convertGlobalTimeFilterToQueryFilterBySeriesList( - GlobalTimeFilter timeFilter, List selectedSeries) throws QueryFilterOptimizationException { - if (selectedSeries.size() == 0) { - throw new QueryFilterOptimizationException("size of selectSeries could not be 0"); + midRet = QueryFilterFactory.or(regularLeft, regularRight); + } else { + throw new UnsupportedOperationException("unsupported queryFilter type: " + relation); } - SeriesFilter firstSeriesFilter = new SeriesFilter(selectedSeries.get(0), timeFilter.getFilter()); - QueryFilter queryFilter = firstSeriesFilter; - for (int i = 1; i < selectedSeries.size(); i++) { - queryFilter = QueryFilterFactory.or(queryFilter, new SeriesFilter(selectedSeries.get(i), timeFilter.getFilter())); + if (midRet.getLeft().getType() == QueryFilterType.GLOBAL_TIME + || midRet.getRight().getType() == QueryFilterType.GLOBAL_TIME) { + return convertGlobalTimeFilter(midRet, selectedSeries); + } else { + return midRet; } + + } else if (left.getType() == QueryFilterType.SERIES + && right.getType() == QueryFilterType.SERIES) { return queryFilter; + } } + throw new UnsupportedOperationException( + "unknown queryFilter type: " + queryFilter.getClass().getName()); + } - private void addTimeFilterToQueryFilter(Filter timeFilter, QueryFilter queryFilter) { - if (queryFilter instanceof SeriesFilter) { - addTimeFilterToSeriesFilter(timeFilter, (SeriesFilter) queryFilter); - } else if (queryFilter instanceof QueryFilterFactory) { - addTimeFilterToQueryFilter(timeFilter, ((QueryFilterFactory) queryFilter).getLeft()); - addTimeFilterToQueryFilter(timeFilter, ((QueryFilterFactory) queryFilter).getRight()); - } else { - throw new UnsupportedOperationException("queryFilter should contains only SeriesFilter but other type is found:" - + queryFilter.getClass().getName()); - } + private QueryFilter handleOneGlobalTimeFilter(GlobalTimeFilter globalTimeFilter, + QueryFilter queryFilter, List selectedSeries, QueryFilterType relation) + throws QueryFilterOptimizationException { + QueryFilter regularRightQueryFilter = convertGlobalTimeFilter(queryFilter, selectedSeries); + if (regularRightQueryFilter instanceof GlobalTimeFilter) { + return combineTwoGlobalTimeFilter(globalTimeFilter, + (GlobalTimeFilter) regularRightQueryFilter, relation); } + if (relation == QueryFilterType.AND) { + addTimeFilterToQueryFilter((globalTimeFilter).getFilter(), regularRightQueryFilter); + return regularRightQueryFilter; + } else if (relation == QueryFilterType.OR) { + return QueryFilterFactory.or( + convertGlobalTimeFilterToQueryFilterBySeriesList(globalTimeFilter, selectedSeries), + queryFilter); + } + throw new QueryFilterOptimizationException("unknown relation in queryFilter:" + relation); + } - private void addTimeFilterToSeriesFilter(Filter timeFilter, SeriesFilter seriesFilter) { - seriesFilter.setFilter(FilterFactory.and(seriesFilter.getFilter(), timeFilter)); + private QueryFilter convertGlobalTimeFilterToQueryFilterBySeriesList(GlobalTimeFilter timeFilter, + List selectedSeries) throws QueryFilterOptimizationException { + if (selectedSeries.size() == 0) { + throw new QueryFilterOptimizationException("size of selectSeries could not be 0"); + } + SeriesFilter firstSeriesFilter = + new SeriesFilter(selectedSeries.get(0), timeFilter.getFilter()); + QueryFilter queryFilter = firstSeriesFilter; + for (int i = 1; i < selectedSeries.size(); i++) { + queryFilter = QueryFilterFactory.or(queryFilter, + new SeriesFilter(selectedSeries.get(i), timeFilter.getFilter())); } + return queryFilter; + } - private GlobalTimeFilter combineTwoGlobalTimeFilter(GlobalTimeFilter left, GlobalTimeFilter right, QueryFilterType type) { - if (type == QueryFilterType.AND) { - return new GlobalTimeFilter(FilterFactory.and(left.getFilter(), right.getFilter())); - } else if (type == QueryFilterType.OR) { - return new GlobalTimeFilter(FilterFactory.or(left.getFilter(), right.getFilter())); - } - throw new UnsupportedOperationException("unrecognized QueryFilterOperatorType :" + type); + private void addTimeFilterToQueryFilter(Filter timeFilter, QueryFilter queryFilter) { + if (queryFilter instanceof SeriesFilter) { + addTimeFilterToSeriesFilter(timeFilter, (SeriesFilter) queryFilter); + } else if (queryFilter instanceof QueryFilterFactory) { + addTimeFilterToQueryFilter(timeFilter, ((QueryFilterFactory) queryFilter).getLeft()); + addTimeFilterToQueryFilter(timeFilter, ((QueryFilterFactory) queryFilter).getRight()); + } else { + throw new UnsupportedOperationException( + "queryFilter should contains only SeriesFilter but other type is found:" + + queryFilter.getClass().getName()); } + } - public static QueryFilterOptimizer getInstance() { - return QueryFilterOptimizerHelper.INSTANCE; + private void addTimeFilterToSeriesFilter(Filter timeFilter, SeriesFilter seriesFilter) { + seriesFilter.setFilter(FilterFactory.and(seriesFilter.getFilter(), timeFilter)); + } + + private GlobalTimeFilter combineTwoGlobalTimeFilter(GlobalTimeFilter left, GlobalTimeFilter right, + QueryFilterType type) { + if (type == QueryFilterType.AND) { + return new GlobalTimeFilter(FilterFactory.and(left.getFilter(), right.getFilter())); + } else if (type == QueryFilterType.OR) { + return new GlobalTimeFilter(FilterFactory.or(left.getFilter(), right.getFilter())); } + throw new UnsupportedOperationException("unrecognized QueryFilterOperatorType :" + type); + } + + public static QueryFilterOptimizer getInstance() { + return QueryFilterOptimizerHelper.INSTANCE; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterPrinter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterPrinter.java index fdba175e..5c25d4a5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterPrinter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/expression/util/QueryFilterPrinter.java @@ -9,37 +9,37 @@ */ public class QueryFilterPrinter { - private static final int MAX_DEPTH = 100; - private static final char PREFIX_CHAR = '\t'; - private static final String[] PREFIX = new String[MAX_DEPTH]; + private static final int MAX_DEPTH = 100; + private static final char PREFIX_CHAR = '\t'; + private static final String[] PREFIX = new String[MAX_DEPTH]; - static { - StringBuilder stringBuilder = new StringBuilder(); - for (int i = 0; i < MAX_DEPTH; i++) { - PREFIX[i] = stringBuilder.toString(); - stringBuilder.append(PREFIX_CHAR); - } + static { + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < MAX_DEPTH; i++) { + PREFIX[i] = stringBuilder.toString(); + stringBuilder.append(PREFIX_CHAR); } + } - public static void print(QueryFilter queryFilter) { - print(queryFilter, 0); - } + public static void print(QueryFilter queryFilter) { + print(queryFilter, 0); + } - private static void print(QueryFilter queryFilter, int level) { - if (queryFilter instanceof UnaryQueryFilter) { - System.out.println(getPrefix(level) + queryFilter); - } else { - System.out.println(getPrefix(level) + queryFilter.getType() + ":"); - print(((BinaryQueryFilter)queryFilter).getLeft(), level + 1); - print(((BinaryQueryFilter)queryFilter).getRight(), level + 1); - } + private static void print(QueryFilter queryFilter, int level) { + if (queryFilter instanceof UnaryQueryFilter) { + System.out.println(getPrefix(level) + queryFilter); + } else { + System.out.println(getPrefix(level) + queryFilter.getType() + ":"); + print(((BinaryQueryFilter) queryFilter).getLeft(), level + 1); + print(((BinaryQueryFilter) queryFilter).getRight(), level + 1); } + } - private static String getPrefix(int count) { - if (count < MAX_DEPTH) { - return PREFIX[count]; - } else { - return PREFIX[MAX_DEPTH - 1]; - } + private static String getPrefix(int count) { + if (count < MAX_DEPTH) { + return PREFIX[count]; + } else { + return PREFIX[MAX_DEPTH - 1]; } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterFactory.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterFactory.java index dc077d97..324ea1ad 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterFactory.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterFactory.java @@ -9,16 +9,16 @@ * Created by zhangjinrui on 2017/12/15. */ public class FilterFactory { - public static > Filter and(Filter left, Filter right){ - return new And(left, right); - } + public static > Filter and(Filter left, Filter right) { + return new And(left, right); + } - public static > Filter or(Filter left, Filter right){ - return new Or(left, right); - } + public static > Filter or(Filter left, Filter right) { + return new Or(left, right); + } - public static > Filter not(Filter filter) { - return new Not<>(filter); - } + public static > Filter not(Filter filter) { + return new Not<>(filter); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterType.java index 92d5ad15..9cc3e70e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/factory/FilterType.java @@ -4,15 +4,16 @@ * Created by zhangjinrui on 2017/12/15. */ public enum FilterType { - VALUE_FILTER("value"), TIME_FILTER("time"); + VALUE_FILTER("value"), TIME_FILTER("time"); - private String name; - FilterType(String name){ - this.name = name; - } + private String name; - public String toString(){ - return name; - } + FilterType(String name) { + this.name = name; + } + + public String toString() { + return name; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/And.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/And.java index 2e1a720f..c08fcd59 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/And.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/And.java @@ -13,24 +13,24 @@ */ public class And> extends BinaryFilter { - private static final long serialVersionUID = 6705254093824897938L; + private static final long serialVersionUID = 6705254093824897938L; - public And(Filter left, Filter right) { - super(left, right); - } + public And(Filter left, Filter right) { + super(left, right); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - @Override - public String toString() { - return "(" + left + " && " + right + ")"; - } + @Override + public String toString() { + return "(" + left + " && " + right + ")"; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Eq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Eq.java index 9f407e23..c42759a1 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Eq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Eq.java @@ -14,24 +14,24 @@ */ public class Eq> extends UnaryFilter { - private static final long serialVersionUID = -6668083116644568248L; + private static final long serialVersionUID = -6668083116644568248L; - public Eq(T value, FilterType filterType) { - super(value, filterType); - } + public Eq(T value, FilterType filterType) { + super(value, filterType); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public String toString() { - return getFilterType() + " == " + value; - } + @Override + public String toString() { + return getFilterType() + " == " + value; + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Gt.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Gt.java index 92cec972..a96e611e 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Gt.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Gt.java @@ -14,24 +14,24 @@ */ public class Gt> extends UnaryFilter { - private static final long serialVersionUID = -2088181659871608986L; + private static final long serialVersionUID = -2088181659871608986L; - public Gt(T value, FilterType filterType) { - super(value, filterType); - } + public Gt(T value, FilterType filterType) { + super(value, filterType); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - @Override - public String toString() { - return getFilterType() + " > " + value; - } + @Override + public String toString() { + return getFilterType() + " > " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/GtEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/GtEq.java index 2307b6a2..28909c5f 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/GtEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/GtEq.java @@ -14,24 +14,24 @@ */ public class GtEq> extends UnaryFilter { - private static final long serialVersionUID = -2088181659871608986L; + private static final long serialVersionUID = -2088181659871608986L; - public GtEq(T value, FilterType filterType) { - super(value, filterType); - } + public GtEq(T value, FilterType filterType) { + super(value, filterType); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - @Override - public String toString() { - return getFilterType() + " >= " + value; - } + @Override + public String toString() { + return getFilterType() + " >= " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Lt.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Lt.java index 6980a9de..02c77ae7 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Lt.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Lt.java @@ -14,24 +14,24 @@ */ public class Lt> extends UnaryFilter { - private static final long serialVersionUID = -2088181659871608986L; + private static final long serialVersionUID = -2088181659871608986L; - public Lt(T value, FilterType filterType) { - super(value, filterType); - } + public Lt(T value, FilterType filterType) { + super(value, filterType); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - @Override - public String toString() { - return getFilterType() + " < " + value; - } + @Override + public String toString() { + return getFilterType() + " < " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/LtEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/LtEq.java index efa055a9..2086a802 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/LtEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/LtEq.java @@ -14,24 +14,24 @@ */ public class LtEq> extends UnaryFilter { - private static final long serialVersionUID = -2088181659871608986L; + private static final long serialVersionUID = -2088181659871608986L; - public LtEq(T value, FilterType filterType) { - super(value, filterType); - } + public LtEq(T value, FilterType filterType) { + super(value, filterType); + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - @Override - public String toString() { - return getFilterType() + " <= " + value; - } + @Override + public String toString() { + return getFilterType() + " <= " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NoRestriction.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NoRestriction.java index b20a5115..5ef5d195 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NoRestriction.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NoRestriction.java @@ -10,19 +10,19 @@ * NoRestriction means that there is no filter. */ public class NoRestriction> implements Filter { - private static final NoRestriction INSTANCE = new NoRestriction(); + private static final NoRestriction INSTANCE = new NoRestriction(); - public static final NoRestriction getInstance() { - return INSTANCE; - } + public static final NoRestriction getInstance() { + return INSTANCE; + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Not.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Not.java index dd05cee7..91991259 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Not.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Not.java @@ -4,7 +4,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.AbstractFilterVisitor; import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.TimeValuePairFilterVisitor; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - import java.io.Serializable; /** @@ -14,30 +13,30 @@ */ public class Not> implements Filter, Serializable { - private static final long serialVersionUID = 584860326604020881L; - private Filter that; + private static final long serialVersionUID = 584860326604020881L; + private Filter that; - public Not(Filter that) { - this.that = that; - } + public Not(Filter that) { + this.that = that; + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } - public Filter getFilterExpression() { - return this.that; - } + public Filter getFilterExpression() { + return this.that; + } - @Override - public String toString() { - return "Not: " + that; - } + @Override + public String toString() { + return "Not: " + that; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NotEq.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NotEq.java index c5938c5f..ed108a2e 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NotEq.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/NotEq.java @@ -15,24 +15,24 @@ */ public class NotEq> extends UnaryFilter { - private static final long serialVersionUID = 2574090797476500965L; - - public NotEq(T value, FilterType filterType) { - super(value, filterType); - } - - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } - - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } - - @Override - public String toString() { - return getFilterType() + " != " + value; - } + private static final long serialVersionUID = 2574090797476500965L; + + public NotEq(T value, FilterType filterType) { + super(value, filterType); + } + + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } + + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } + + @Override + public String toString() { + return getFilterType() + " != " + value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Or.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Or.java index 75f196b2..3346f123 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Or.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/operator/Or.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.AbstractFilterVisitor; import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.TimeValuePairFilterVisitor; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - import java.io.Serializable; /** @@ -15,24 +14,24 @@ */ public class Or> extends BinaryFilter implements Serializable { - private static final long serialVersionUID = -968055896528472694L; + private static final long serialVersionUID = -968055896528472694L; - public Or(Filter left, Filter right) { - super(left, right); - } + public Or(Filter left, Filter right) { + super(left, right); + } - @Override - public String toString() { - return "(" + left + " || " + right + ")"; - } + @Override + public String toString() { + return "(" + left + " || " + right + ")"; + } - @Override - public R accept(AbstractFilterVisitor visitor) { - return visitor.visit(this); - } + @Override + public R accept(AbstractFilterVisitor visitor) { + return visitor.visit(this); + } - @Override - public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { - return visitor.visit(value, this); - } + @Override + public R accept(TimeValuePair value, TimeValuePairFilterVisitor visitor) { + return visitor.visit(value, this); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/AbstractFilterVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/AbstractFilterVisitor.java index c9462442..1317940c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/AbstractFilterVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/AbstractFilterVisitor.java @@ -5,23 +5,23 @@ public interface AbstractFilterVisitor { - > R visit(Eq eq); + > R visit(Eq eq); - > R visit(NotEq notEq); + > R visit(NotEq notEq); - > R visit(LtEq ltEq); + > R visit(LtEq ltEq); - > R visit(GtEq gtEq); + > R visit(GtEq gtEq); - > R visit(Gt gt); + > R visit(Gt gt); - > R visit(Lt lt); + > R visit(Lt lt); - > R visit(Not not); + > R visit(Not not); - > R visit(And and); + > R visit(And and); - > R visit(Or or); + > R visit(Or or); - > R visit(NoRestriction noFilter); + > R visit(NoRestriction noFilter); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/TimeValuePairFilterVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/TimeValuePairFilterVisitor.java index a210328b..922e1a6b 100755 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/TimeValuePairFilterVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/TimeValuePairFilterVisitor.java @@ -1,31 +1,31 @@ -package cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor; - - -import cn.edu.tsinghua.tsfile.timeseries.filterV2.basic.Filter; -import cn.edu.tsinghua.tsfile.timeseries.filterV2.operator.*; -import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - -public interface TimeValuePairFilterVisitor { - - > R satisfy(TimeValuePair value, Filter filter); - - > R visit(TimeValuePair value, Eq eq); - - > R visit(TimeValuePair value, NotEq notEq); - - > R visit(TimeValuePair value, LtEq ltEq); - - > R visit(TimeValuePair value, GtEq gtEq); - - > R visit(TimeValuePair value, Gt gt); - - > R visit(TimeValuePair value, Lt lt); - - > R visit(TimeValuePair value, Not not); - - > R visit(TimeValuePair value, And and); - - > R visit(TimeValuePair value, Or or); - - > R visit(TimeValuePair value, NoRestriction noFilter); -} +package cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor; + + +import cn.edu.tsinghua.tsfile.timeseries.filterV2.basic.Filter; +import cn.edu.tsinghua.tsfile.timeseries.filterV2.operator.*; +import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; + +public interface TimeValuePairFilterVisitor { + + > R satisfy(TimeValuePair value, Filter filter); + + > R visit(TimeValuePair value, Eq eq); + + > R visit(TimeValuePair value, NotEq notEq); + + > R visit(TimeValuePair value, LtEq ltEq); + + > R visit(TimeValuePair value, GtEq gtEq); + + > R visit(TimeValuePair value, Gt gt); + + > R visit(TimeValuePair value, Lt lt); + + > R visit(TimeValuePair value, Not not); + + > R visit(TimeValuePair value, And and); + + > R visit(TimeValuePair value, Or or); + + > R visit(TimeValuePair value, NoRestriction noFilter); +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/DigestFilterVisitor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/DigestFilterVisitor.java index 812664b3..5d9bb07e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/DigestFilterVisitor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/DigestFilterVisitor.java @@ -13,91 +13,92 @@ */ public class DigestFilterVisitor implements AbstractFilterVisitor { - private ThreadLocal timestampDigest; - private ThreadLocal valueDigest; - private ThreadLocal> minValue; - private ThreadLocal> maxValue; - - public DigestFilterVisitor() { - this.timestampDigest = new ThreadLocal<>(); - this.valueDigest = new ThreadLocal<>(); - this.minValue = new ThreadLocal<>(); - this.maxValue = new ThreadLocal<>(); - } - - public Boolean satisfy(DigestForFilter timestampDigest, DigestForFilter valueDigest, Filter filter) { - this.timestampDigest.set(timestampDigest); - this.valueDigest.set(valueDigest); - return filter.accept(this); - } - - private void prepareMaxAndMinValue(UnaryFilter unaryFilter) { - if (unaryFilter.getFilterType() == FilterType.TIME_FILTER) { - this.minValue.set(timestampDigest.get().getMinValue()); - this.maxValue.set(timestampDigest.get().getMaxValue()); - } else { - this.minValue.set(valueDigest.get().getMinValue()); - this.maxValue.set(valueDigest.get().getMaxValue()); - } - } - - @Override - public > Boolean visit(Eq eq) { - prepareMaxAndMinValue(eq); - return eq.getValue().compareTo((T) minValue.get()) >= 0 - && eq.getValue().compareTo((T) maxValue.get()) <= 0; - } - - @Override - public > Boolean visit(NotEq notEq) { - prepareMaxAndMinValue(notEq); - return notEq.getValue().compareTo((T) minValue.get()) == 0 - && notEq.getValue().compareTo((T) maxValue.get()) == 0; - } - - @Override - public > Boolean visit(LtEq ltEq) { - prepareMaxAndMinValue(ltEq); - return ltEq.getValue().compareTo((T) minValue.get()) >= 0; - } - - @Override - public > Boolean visit(GtEq gtEq) { - prepareMaxAndMinValue(gtEq); - return gtEq.getValue().compareTo((T) maxValue.get()) <= 0; - } - - @Override - public > Boolean visit(Gt gt) { - prepareMaxAndMinValue(gt); - return gt.getValue().compareTo((T) maxValue.get()) < 0; - } - - @Override - public > Boolean visit(Lt lt) { - prepareMaxAndMinValue(lt); - return lt.getValue().compareTo((T) minValue.get()) > 0; - } - - @Override - public > Boolean visit(Not not) { - return !satisfy(timestampDigest.get(), valueDigest.get(), not.getFilterExpression()); - } - - @Override - public > Boolean visit(And and) { - return satisfy(timestampDigest.get(), valueDigest.get(), and.getLeft()) - && satisfy(timestampDigest.get(), valueDigest.get(), and.getRight()); - } - - @Override - public > Boolean visit(Or or) { - return satisfy(timestampDigest.get(), valueDigest.get(), or.getLeft()) - || satisfy(timestampDigest.get(), valueDigest.get(), or.getRight()); - } - - @Override - public > Boolean visit(NoRestriction noFilter) { - return true; + private ThreadLocal timestampDigest; + private ThreadLocal valueDigest; + private ThreadLocal> minValue; + private ThreadLocal> maxValue; + + public DigestFilterVisitor() { + this.timestampDigest = new ThreadLocal<>(); + this.valueDigest = new ThreadLocal<>(); + this.minValue = new ThreadLocal<>(); + this.maxValue = new ThreadLocal<>(); + } + + public Boolean satisfy(DigestForFilter timestampDigest, DigestForFilter valueDigest, + Filter filter) { + this.timestampDigest.set(timestampDigest); + this.valueDigest.set(valueDigest); + return filter.accept(this); + } + + private void prepareMaxAndMinValue(UnaryFilter unaryFilter) { + if (unaryFilter.getFilterType() == FilterType.TIME_FILTER) { + this.minValue.set(timestampDigest.get().getMinValue()); + this.maxValue.set(timestampDigest.get().getMaxValue()); + } else { + this.minValue.set(valueDigest.get().getMinValue()); + this.maxValue.set(valueDigest.get().getMaxValue()); } + } + + @Override + public > Boolean visit(Eq eq) { + prepareMaxAndMinValue(eq); + return eq.getValue().compareTo((T) minValue.get()) >= 0 + && eq.getValue().compareTo((T) maxValue.get()) <= 0; + } + + @Override + public > Boolean visit(NotEq notEq) { + prepareMaxAndMinValue(notEq); + return notEq.getValue().compareTo((T) minValue.get()) == 0 + && notEq.getValue().compareTo((T) maxValue.get()) == 0; + } + + @Override + public > Boolean visit(LtEq ltEq) { + prepareMaxAndMinValue(ltEq); + return ltEq.getValue().compareTo((T) minValue.get()) >= 0; + } + + @Override + public > Boolean visit(GtEq gtEq) { + prepareMaxAndMinValue(gtEq); + return gtEq.getValue().compareTo((T) maxValue.get()) <= 0; + } + + @Override + public > Boolean visit(Gt gt) { + prepareMaxAndMinValue(gt); + return gt.getValue().compareTo((T) maxValue.get()) < 0; + } + + @Override + public > Boolean visit(Lt lt) { + prepareMaxAndMinValue(lt); + return lt.getValue().compareTo((T) minValue.get()) > 0; + } + + @Override + public > Boolean visit(Not not) { + return !satisfy(timestampDigest.get(), valueDigest.get(), not.getFilterExpression()); + } + + @Override + public > Boolean visit(And and) { + return satisfy(timestampDigest.get(), valueDigest.get(), and.getLeft()) + && satisfy(timestampDigest.get(), valueDigest.get(), and.getRight()); + } + + @Override + public > Boolean visit(Or or) { + return satisfy(timestampDigest.get(), valueDigest.get(), or.getLeft()) + || satisfy(timestampDigest.get(), valueDigest.get(), or.getRight()); + } + + @Override + public > Boolean visit(NoRestriction noFilter) { + return true; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/TimeValuePairFilterVisitorImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/TimeValuePairFilterVisitorImpl.java index 7deb6864..0cdd5495 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/TimeValuePairFilterVisitorImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/visitor/impl/TimeValuePairFilterVisitorImpl.java @@ -10,78 +10,78 @@ * Created by zhangjinrui on 2017/12/15. */ public class TimeValuePairFilterVisitorImpl implements TimeValuePairFilterVisitor { - @Override - public > Boolean satisfy(TimeValuePair value, Filter filter) { - return filter.accept(value, this); - } + @Override + public > Boolean satisfy(TimeValuePair value, Filter filter) { + return filter.accept(value, this); + } - @Override - public > Boolean visit(TimeValuePair value, Eq eq) { - Object v = (eq instanceof TimeEq) ? value.getTimestamp() : value.getValue().getValue(); - return eq.getValue().equals(v); - } + @Override + public > Boolean visit(TimeValuePair value, Eq eq) { + Object v = (eq instanceof TimeEq) ? value.getTimestamp() : value.getValue().getValue(); + return eq.getValue().equals(v); + } - @Override - public > Boolean visit(TimeValuePair value, NotEq notEq) { - Object v = (notEq instanceof TimeNotEq) ? value.getTimestamp() : value.getValue().getValue(); - return !notEq.getValue().equals(v); - } + @Override + public > Boolean visit(TimeValuePair value, NotEq notEq) { + Object v = (notEq instanceof TimeNotEq) ? value.getTimestamp() : value.getValue().getValue(); + return !notEq.getValue().equals(v); + } - @Override - public > Boolean visit(TimeValuePair value, LtEq ltEq) { - Object v = (ltEq instanceof TimeLtEq) ? value.getTimestamp() : value.getValue().getValue(); - if (ltEq.getValue().compareTo((T) v) >= 0) { - return true; - } - return false; + @Override + public > Boolean visit(TimeValuePair value, LtEq ltEq) { + Object v = (ltEq instanceof TimeLtEq) ? value.getTimestamp() : value.getValue().getValue(); + if (ltEq.getValue().compareTo((T) v) >= 0) { + return true; } + return false; + } - @Override - public > Boolean visit(TimeValuePair value, GtEq gtEq) { - Object v = (gtEq instanceof TimeGtEq) ? value.getTimestamp() : value.getValue().getValue(); - if (gtEq.getValue().compareTo((T) v) <= 0) { - return true; - } - return false; + @Override + public > Boolean visit(TimeValuePair value, GtEq gtEq) { + Object v = (gtEq instanceof TimeGtEq) ? value.getTimestamp() : value.getValue().getValue(); + if (gtEq.getValue().compareTo((T) v) <= 0) { + return true; } + return false; + } - @Override - public > Boolean visit(TimeValuePair value, Gt gt) { - Object v = (gt instanceof TimeGt) ? value.getTimestamp() : value.getValue().getValue(); - if (gt.getValue().compareTo((T) v) < 0) { - return true; - } - return false; + @Override + public > Boolean visit(TimeValuePair value, Gt gt) { + Object v = (gt instanceof TimeGt) ? value.getTimestamp() : value.getValue().getValue(); + if (gt.getValue().compareTo((T) v) < 0) { + return true; } + return false; + } - @Override - public > Boolean visit(TimeValuePair value, Lt lt) { - Object v = (lt instanceof TimeLt) ? value.getTimestamp() : value.getValue().getValue(); - if (lt.getValue().compareTo((T) v) > 0) { - return true; - } - return false; + @Override + public > Boolean visit(TimeValuePair value, Lt lt) { + Object v = (lt instanceof TimeLt) ? value.getTimestamp() : value.getValue().getValue(); + if (lt.getValue().compareTo((T) v) > 0) { + return true; } + return false; + } - @Override - public > Boolean visit(TimeValuePair value, Not not) { - return !satisfy(value, not.getFilterExpression()); - } + @Override + public > Boolean visit(TimeValuePair value, Not not) { + return !satisfy(value, not.getFilterExpression()); + } - @Override - public > Boolean visit(TimeValuePair value, And and) { - return satisfy(value, and.getLeft()) && satisfy(value, and.getRight()); - } + @Override + public > Boolean visit(TimeValuePair value, And and) { + return satisfy(value, and.getLeft()) && satisfy(value, and.getRight()); + } - @Override - public > Boolean visit(TimeValuePair value, Or or) { - return satisfy(value, or.getLeft()) || satisfy(value, or.getRight()); - } + @Override + public > Boolean visit(TimeValuePair value, Or or) { + return satisfy(value, or.getLeft()) || satisfy(value, or.getRight()); + } - @Override - public > Boolean visit(TimeValuePair value, NoRestriction noFilter) { - return true; - } + @Override + public > Boolean visit(TimeValuePair value, NoRestriction noFilter) { + return true; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/FileReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/FileReader.java index c0376eac..6626d687 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/FileReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/FileReader.java @@ -11,7 +11,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.*; @@ -19,321 +18,328 @@ /** - * This class is used to read TSFileMetaData and construct - * file level reader which contains the information of RowGroupReader. + * This class is used to read TSFileMetaData and construct file level reader which + * contains the information of RowGroupReader. * * @author Jinrui Zhang */ public class FileReader { - private static final Logger logger = LoggerFactory.getLogger(FileReader.class); - - private static final int FOOTER_LENGTH = 4; - private static final int MAGIC_LENGTH = TsFileIOWriter.magicStringBytes.length; - private static final int LRU_LENGTH = 1000000; // TODO: get this from a configuration - /** - * If the file has many rowgroups and series, - * the storage of fileMetaData may be large. - */ - private TsFileMetaData fileMetaData; - private ITsRandomAccessFileReader randomAccessFileReader; - - private Map> rowGroupReaderMap; - // TODO: do we need to manage RowGroupReaders across files? - private LinkedList rowGroupReaderLRUList; - - /** - * Lock when initializing RowGroupReaders so that the same deltaObj will not be initialized more than once. - */ - private ReentrantReadWriteLock rwLock; - - public FileReader(ITsRandomAccessFileReader raf) throws IOException { - this.randomAccessFileReader = raf; - this.rwLock = new ReentrantReadWriteLock(); - this.rowGroupReaderLRUList = new LinkedList<>(); - init(); - } - - /** - * Used for IoTDB compatibility - * - * @param reader - * @param rowGroupMetaDataList - */ - public FileReader(ITsRandomAccessFileReader reader, List rowGroupMetaDataList) throws IOException { - this.randomAccessFileReader = reader; - this.rwLock = new ReentrantReadWriteLock(); - this.rowGroupReaderLRUList = new LinkedList<>(); - initFromRowGroupMetadataList(rowGroupMetaDataList); + private static final Logger logger = LoggerFactory.getLogger(FileReader.class); + + private static final int FOOTER_LENGTH = 4; + private static final int MAGIC_LENGTH = TsFileIOWriter.magicStringBytes.length; + private static final int LRU_LENGTH = 1000000; // TODO: get this from a configuration + /** + * If the file has many rowgroups and series, the storage of fileMetaData may be + * large. + */ + private TsFileMetaData fileMetaData; + private ITsRandomAccessFileReader randomAccessFileReader; + + private Map> rowGroupReaderMap; + // TODO: do we need to manage RowGroupReaders across files? + private LinkedList rowGroupReaderLRUList; + + /** + * Lock when initializing RowGroupReaders so that the same deltaObj will not be initialized more + * than once. + */ + private ReentrantReadWriteLock rwLock; + + public FileReader(ITsRandomAccessFileReader raf) throws IOException { + this.randomAccessFileReader = raf; + this.rwLock = new ReentrantReadWriteLock(); + this.rowGroupReaderLRUList = new LinkedList<>(); + init(); + } + + /** + * Used for IoTDB compatibility + * + * @param reader + * @param rowGroupMetaDataList + */ + public FileReader(ITsRandomAccessFileReader reader, List rowGroupMetaDataList) + throws IOException { + this.randomAccessFileReader = reader; + this.rwLock = new ReentrantReadWriteLock(); + this.rowGroupReaderLRUList = new LinkedList<>(); + initFromRowGroupMetadataList(rowGroupMetaDataList); + } + + /** + * FileReader initialization, construct fileMetaData + * rowGroupReaderList, and rowGroupReaderMap. + * + * @throws IOException file read error + */ + private void init() throws IOException { + long l = randomAccessFileReader.length(); + randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH); + int fileMetaDataLength = randomAccessFileReader.readInt(); + randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH - fileMetaDataLength); + byte[] buf = new byte[fileMetaDataLength]; + randomAccessFileReader.read(buf, 0, buf.length);// FIXME is this a potential bug? + + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + this.fileMetaData = new TsFileMetaDataConverter() + .toTsFileMetadata(ReadWriteThriftFormatUtils.readFileMetaData(bais)); + + rowGroupReaderMap = new HashMap<>(); + } + + /** + * //TODO verify rightness Used for IoTDB compatibility + * + * @param rowGroupMetadataList + */ + private void initFromRowGroupMetadataList(List rowGroupMetadataList) { + rowGroupReaderMap = new HashMap<>(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetadataList) { + String deltaObjectID = rowGroupMetaData.getDeltaObjectID(); + updateLRU(deltaObjectID); } - - /** - * FileReader initialization, construct fileMetaData - * rowGroupReaderList, and rowGroupReaderMap. - * - * @throws IOException file read error - */ - private void init() throws IOException { - long l = randomAccessFileReader.length(); - randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH); - int fileMetaDataLength = randomAccessFileReader.readInt(); - randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH - fileMetaDataLength); - byte[] buf = new byte[fileMetaDataLength]; - randomAccessFileReader.read(buf, 0, buf.length);//FIXME is this a potential bug? - - ByteArrayInputStream bais = new ByteArrayInputStream(buf); - this.fileMetaData = new TsFileMetaDataConverter().toTsFileMetadata(ReadWriteThriftFormatUtils.readFileMetaData(bais)); - - rowGroupReaderMap = new HashMap<>(); + initRowGroupReaders(rowGroupMetadataList); + } + + /** + * Do not use this method for potential risks of LRU cache overflow. + * + * @return + */ + @Deprecated + public Map> getRowGroupReaderMap() { + if (this.fileMetaData == null) { + return rowGroupReaderMap; } - /** - * //TODO verify rightness - * Used for IoTDB compatibility - * - * @param rowGroupMetadataList - */ - private void initFromRowGroupMetadataList(List rowGroupMetadataList) { - rowGroupReaderMap = new HashMap<>(); - for (RowGroupMetaData rowGroupMetaData : rowGroupMetadataList) { - String deltaObjectID = rowGroupMetaData.getDeltaObjectID(); - updateLRU(deltaObjectID); - } - initRowGroupReaders(rowGroupMetadataList); + try { + loadAllDeltaObj(); + } catch (IOException e) { + logger.error("cannot get all RowGroupReaders because {}", e.getMessage()); } - - /** - * Do not use this method for potential risks of LRU cache overflow. - * - * @return - */ - @Deprecated - public Map> getRowGroupReaderMap() { - if (this.fileMetaData == null) { - return rowGroupReaderMap; - } - - try { - loadAllDeltaObj(); - } catch (IOException e) { - logger.error("cannot get all RowGroupReaders because {}", e.getMessage()); - } - return this.rowGroupReaderMap; - } - - public Map getProps() { - return fileMetaData.getProps(); - } - - public String getProp(String key) { - return fileMetaData.getProp(key); - } - - /** - * Get all readers that access every RowGroup belonging to deltaObjectUID within this file. - * This method will try to init the readers if they are uninitialized(non-exist). - * - * @param deltaObjectUID name of the desired deltaObject - * @return A list of RowGroupReaders specified by deltaObjectUID - * or NULL if such deltaObject doesn't exist in this file - */ - public List getRowGroupReaderListByDeltaObject(String deltaObjectUID) throws IOException { - loadDeltaObj(deltaObjectUID); - return this.rowGroupReaderMap.get(deltaObjectUID); - } - - public List getRowGroupReaderListByDeltaObjectByHadoop(String deltaObjectUID) throws IOException { - return this.rowGroupReaderMap.get(deltaObjectUID); + return this.rowGroupReaderMap; + } + + public Map getProps() { + return fileMetaData.getProps(); + } + + public String getProp(String key) { + return fileMetaData.getProp(key); + } + + /** + * Get all readers that access every RowGroup belonging to deltaObjectUID within this file. This + * method will try to init the readers if they are uninitialized(non-exist). + * + * @param deltaObjectUID name of the desired deltaObject + * @return A list of RowGroupReaders specified by deltaObjectUID or NULL if such deltaObject + * doesn't exist in this file + */ + public List getRowGroupReaderListByDeltaObject(String deltaObjectUID) + throws IOException { + loadDeltaObj(deltaObjectUID); + return this.rowGroupReaderMap.get(deltaObjectUID); + } + + public List getRowGroupReaderListByDeltaObjectByHadoop(String deltaObjectUID) + throws IOException { + return this.rowGroupReaderMap.get(deltaObjectUID); + } + + public TSDataType getDataTypeBySeriesName(String deltaObject, String measurement) + throws IOException { + loadDeltaObj(deltaObject); + List rgrList = getRowGroupReaderMap().get(deltaObject); + if (rgrList == null || rgrList.size() == 0) { + return null; } - - public TSDataType getDataTypeBySeriesName(String deltaObject, String measurement) throws IOException { - loadDeltaObj(deltaObject); - List rgrList = getRowGroupReaderMap().get(deltaObject); - if (rgrList == null || rgrList.size() == 0) { - return null; - } - return rgrList.get(0).getDataTypeBySeriesName(measurement); + return rgrList.get(0).getDataTypeBySeriesName(measurement); + } + + public void close() throws IOException { + this.randomAccessFileReader.close(); + } + + /* + * The below methods can be used to init RowGroupReaders of a given deltaObj in different ways, in + * case of another refactoring. Current method is based on TsDeltaObject. + */ + + /** + * This method is thread-safe. + * + * @param deltaObjUID + * @throws IOException + */ + private void initRowGroupReaders(String deltaObjUID) throws IOException { + // avoid duplicates + if (this.rowGroupReaderMap.containsKey(deltaObjUID)) + return; + this.rwLock.writeLock().lock(); + try { + TsDeltaObject deltaObj = this.fileMetaData.getDeltaObject(deltaObjUID); + initRowGroupReaders(deltaObj); + } finally { + this.rwLock.writeLock().unlock(); } - - public void close() throws IOException { - this.randomAccessFileReader.close(); + } + + /** + * This method is thread-unsafe, so the caller must ensure thread safety. + * + * @param deltaObj TSDeltaObject that contains a list of RowGroupMetaData + * @throws IOException + */ + private void initRowGroupReaders(TsDeltaObject deltaObj) throws IOException { + if (deltaObj == null) + return; + // read metadata block and use its RowGroupMetadata list to construct RowGroupReaders + TsRowGroupBlockMetaData blockMeta = new TsRowGroupBlockMetaData(); + blockMeta.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData( + this.randomAccessFileReader, deltaObj.offset, deltaObj.metadataBlockSize)); + initRowGroupReaders(blockMeta.getRowGroups()); + } + + /** + * Core method, construct RowGroupReader for every RowGroup in given list, thread-unsafe. The + * caller should avoid adding duplicate readers. + * + * @param groupList + */ + private void initRowGroupReaders(List groupList) { + if (groupList == null) + return; + // TODO: advice: parallel the process to speed up + for (RowGroupMetaData meta : groupList) { + // the passed raf should be new rafs to realize parallelism + RowGroupReader reader = new RowGroupReader(meta, this.randomAccessFileReader); + + List readerList = this.rowGroupReaderMap.get(meta.getDeltaObjectID()); + if (readerList == null) { + readerList = new ArrayList<>(); + rowGroupReaderMap.put(meta.getDeltaObjectID(), readerList); + } + readerList.add(reader); } - - /* The below methods can be used to init RowGroupReaders of a given deltaObj - in different ways, in case of another refactoring. Current method is based on TsDeltaObject. - */ - - /** - * This method is thread-safe. - * - * @param deltaObjUID - * @throws IOException - */ - private void initRowGroupReaders(String deltaObjUID) throws IOException { - // avoid duplicates - if (this.rowGroupReaderMap.containsKey(deltaObjUID)) - return; - this.rwLock.writeLock().lock(); - try { - TsDeltaObject deltaObj = this.fileMetaData.getDeltaObject(deltaObjUID); - initRowGroupReaders(deltaObj); - } finally { - this.rwLock.writeLock().unlock(); - } + } + + /* + * Belows are methods for maintaining LRU List. Is using an interface or base class better? + */ + + /** + * Add a deltaObj by its name to the tail of the LRU list. If the deltaObj already exists, remove + * it. When adding a new item, check if the volume exceeds, if so, remove the head of list and + * responding RowGroupReaders. + * + * @param deltaObjUID + */ + private void updateLRU(String deltaObjUID) { + int idx = this.rowGroupReaderLRUList.indexOf(deltaObjUID); + if (idx != -1) { + // not a new item + this.rowGroupReaderLRUList.remove(idx); + } else { + // a new item + if (this.rowGroupReaderLRUList.size() > this.LRU_LENGTH) { + String removedDeltaObj = this.rowGroupReaderLRUList.removeFirst(); + this.rowGroupReaderMap.remove(removedDeltaObj); + } } - - /** - * This method is thread-unsafe, so the caller must ensure thread safety. - * - * @param deltaObj TSDeltaObject that contains a list of RowGroupMetaData - * @throws IOException - */ - private void initRowGroupReaders(TsDeltaObject deltaObj) throws IOException { - if (deltaObj == null) - return; - // read metadata block and use its RowGroupMetadata list to construct RowGroupReaders - TsRowGroupBlockMetaData blockMeta = new TsRowGroupBlockMetaData(); - blockMeta.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData(this.randomAccessFileReader, - deltaObj.offset, deltaObj.metadataBlockSize)); - initRowGroupReaders(blockMeta.getRowGroups()); + this.rowGroupReaderLRUList.addLast(deltaObjUID); + } + + @Deprecated + // only used for compatibility, such as spark + public List getRowGroupReaderList() throws IOException { + if (this.rowGroupReaderMap == null || this.rowGroupReaderMap.size() == 0) { + loadAllDeltaObj(); } - /** - * Core method, construct RowGroupReader for every RowGroup in given list, thread-unsafe. - * The caller should avoid adding duplicate readers. - * - * @param groupList - */ - private void initRowGroupReaders(List groupList) { - if (groupList == null) - return; - // TODO: advice: parallel the process to speed up - for (RowGroupMetaData meta : groupList) { - // the passed raf should be new rafs to realize parallelism - RowGroupReader reader = new RowGroupReader(meta, this.randomAccessFileReader); - - List readerList = this.rowGroupReaderMap.get(meta.getDeltaObjectID()); - if (readerList == null) { - readerList = new ArrayList<>(); - rowGroupReaderMap.put(meta.getDeltaObjectID(), readerList); - } - readerList.add(reader); - } + List ret = new ArrayList<>(); + for (Map.Entry> entry : this.rowGroupReaderMap.entrySet()) { + ret.addAll(entry.getValue()); } - - /* - Belows are methods for maintaining LRU List. Is using an interface or base class better? - */ - - /** - * Add a deltaObj by its name to the tail of the LRU list. If the deltaObj already exists, - * remove it. When adding a new item, check if the volume exceeds, if so, remove the head of - * list and responding RowGroupReaders. - * - * @param deltaObjUID - */ - private void updateLRU(String deltaObjUID) { - int idx = this.rowGroupReaderLRUList.indexOf(deltaObjUID); - if (idx != -1) { - // not a new item - this.rowGroupReaderLRUList.remove(idx); - } else { - // a new item - if (this.rowGroupReaderLRUList.size() > this.LRU_LENGTH) { - String removedDeltaObj = this.rowGroupReaderLRUList.removeFirst(); - this.rowGroupReaderMap.remove(removedDeltaObj); - } - } - this.rowGroupReaderLRUList.addLast(deltaObjUID); + return ret; + } + + /** + * This method prefetch metadata of a DeltaObject for methods like checkSeries, if the DeltaObject + * is not in memory. + * + * @param deltaObjUID + */ + public void loadDeltaObj(String deltaObjUID) throws IOException { + // check if this file do have this delta_obj + if (!this.fileMetaData.containsDeltaObject(deltaObjUID)) { + return; } - - @Deprecated - // only used for compatibility, such as spark - public List getRowGroupReaderList() throws IOException { - if (this.rowGroupReaderMap == null || this.rowGroupReaderMap.size() == 0) { - loadAllDeltaObj(); - } - - List ret = new ArrayList<>(); - for (Map.Entry> entry : this.rowGroupReaderMap.entrySet()) { - ret.addAll(entry.getValue()); - } - return ret; + List ret = rowGroupReaderMap.get(deltaObjUID); + if (ret == null) { + initRowGroupReaders(deltaObjUID); } + updateLRU(deltaObjUID); + } - /** - * This method prefetch metadata of a DeltaObject for methods like checkSeries, - * if the DeltaObject is not in memory. - * - * @param deltaObjUID - */ - public void loadDeltaObj(String deltaObjUID) throws IOException { - // check if this file do have this delta_obj - if (!this.fileMetaData.containsDeltaObject(deltaObjUID)) { - return; - } - List ret = rowGroupReaderMap.get(deltaObjUID); - if (ret == null) { - initRowGroupReaders(deltaObjUID); - } - updateLRU(deltaObjUID); + private void loadAllDeltaObj() throws IOException { + Collection deltaObjects = fileMetaData.getDeltaObjectMap().keySet(); + for (String deltaObject : deltaObjects) { + initRowGroupReaders(deltaObject); } - - private void loadAllDeltaObj() throws IOException { - Collection deltaObjects = fileMetaData.getDeltaObjectMap().keySet(); - for (String deltaObject : deltaObjects) { - initRowGroupReaders(deltaObject); - } + } + + public boolean containsDeltaObj(String deltaObjUID) { + return this.fileMetaData.containsDeltaObject(deltaObjUID); + } + + public boolean containsSeries(String deltaObjUID, String measurementID) throws IOException { + if (!this.containsDeltaObj(deltaObjUID)) { + return false; + } else { + this.loadDeltaObj(deltaObjUID); + List readers = rowGroupReaderMap.get(deltaObjUID); + for (RowGroupReader reader : readers) { + if (reader.containsMeasurement(measurementID)) + return true; + } } - - public boolean containsDeltaObj(String deltaObjUID) { - return this.fileMetaData.containsDeltaObject(deltaObjUID); + return false; + } + + public TsFileMetaData getFileMetaData() { + return this.fileMetaData; + } + + // used by hadoop + public List getSortedRowGroupMetaDataList() throws IOException { + List rowGroupMetaDataList = new ArrayList<>(); + Collection deltaObjects = fileMetaData.getDeltaObjectMap().keySet(); + for (String deltaObjectID : deltaObjects) { + this.rwLock.writeLock().lock(); + try { + TsDeltaObject deltaObj = this.fileMetaData.getDeltaObject(deltaObjectID); + TsRowGroupBlockMetaData blockMeta = new TsRowGroupBlockMetaData(); + blockMeta.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData( + this.randomAccessFileReader, deltaObj.offset, deltaObj.metadataBlockSize)); + rowGroupMetaDataList.addAll(blockMeta.getRowGroups()); + } finally { + this.rwLock.writeLock().unlock(); + } } - public boolean containsSeries(String deltaObjUID, String measurementID) throws IOException { - if (!this.containsDeltaObj(deltaObjUID)) { - return false; - } else { - this.loadDeltaObj(deltaObjUID); - List readers = rowGroupReaderMap.get(deltaObjUID); - for (RowGroupReader reader : readers) { - if (reader.containsMeasurement(measurementID)) - return true; - } - } - return false; - } + Comparator comparator = new Comparator() { + @Override + public int compare(RowGroupMetaData o1, RowGroupMetaData o2) { - public TsFileMetaData getFileMetaData() { - return this.fileMetaData; - } + return Long.signum(o1.getMetaDatas().get(0).getProperties().getFileOffset() + - o2.getMetaDatas().get(0).getProperties().getFileOffset()); + } - //used by hadoop - public List getSortedRowGroupMetaDataList() throws IOException{ - List rowGroupMetaDataList = new ArrayList<>(); - Collection deltaObjects = fileMetaData.getDeltaObjectMap().keySet(); - for (String deltaObjectID : deltaObjects) { - this.rwLock.writeLock().lock(); - try { - TsDeltaObject deltaObj = this.fileMetaData.getDeltaObject(deltaObjectID); - TsRowGroupBlockMetaData blockMeta = new TsRowGroupBlockMetaData(); - blockMeta.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData(this.randomAccessFileReader, - deltaObj.offset, deltaObj.metadataBlockSize)); - rowGroupMetaDataList.addAll(blockMeta.getRowGroups()); - } finally { - this.rwLock.writeLock().unlock(); - } - } - - Comparator comparator = new Comparator() { - @Override - public int compare(RowGroupMetaData o1, RowGroupMetaData o2) { - - return Long.signum(o1.getMetaDatas().get(0).getProperties().getFileOffset() - o2.getMetaDatas().get(0).getProperties().getFileOffset()); - } - - }; - rowGroupMetaDataList.sort(comparator); - return rowGroupMetaDataList; - } + }; + rowGroupMetaDataList.sort(comparator); + return rowGroupMetaDataList; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/PageReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/PageReader.java index 7362281c..1a1b2ac5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/PageReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/PageReader.java @@ -4,63 +4,61 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; import cn.edu.tsinghua.tsfile.format.PageHeader; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; /** - * @author Jinrui Zhang - * PageReader is used to read a page in a column. + * @author Jinrui Zhang PageReader is used to read a page in a column. */ public class PageReader { - private ByteArrayInputStream bis; - private PageHeader pageHeader = null; - private UnCompressor unCompressor = null; - - public PageReader(ByteArrayInputStream bis, CompressionTypeName compressionTypeName) { - this.bis = bis; - unCompressor = UnCompressor.getUnCompressor(compressionTypeName); - } + private ByteArrayInputStream bis; + private PageHeader pageHeader = null; + private UnCompressor unCompressor = null; - public boolean hasNextPage() { - if (bis.available() > 0) - return true; - return false; - } + public PageReader(ByteArrayInputStream bis, CompressionTypeName compressionTypeName) { + this.bis = bis; + unCompressor = UnCompressor.getUnCompressor(compressionTypeName); + } - public PageHeader getNextPageHeader() throws IOException { - if (pageHeader != null) { - return pageHeader; - } - if (bis.available() > 0) { - pageHeader = ReadWriteThriftFormatUtils.readPageHeader(bis); - return pageHeader; + public boolean hasNextPage() { + if (bis.available() > 0) + return true; + return false; + } - } - return null; + public PageHeader getNextPageHeader() throws IOException { + if (pageHeader != null) { + return pageHeader; } + if (bis.available() > 0) { + pageHeader = ReadWriteThriftFormatUtils.readPageHeader(bis); + return pageHeader; - public ByteArrayInputStream getNextPage() throws IOException { - if (bis.available() > 0) { - pageHeader = getNextPageHeader(); - int pageSize = pageHeader.getCompressed_page_size(); - byte[] pageContent = new byte[pageSize]; - bis.read(pageContent, 0, pageSize); - pageContent = unCompressor.uncompress(pageContent); - pageHeader = null; - return new ByteArrayInputStream(pageContent); - } - return null; } + return null; + } - public void readPage(InputStream in, byte[] buf, int pageSize) throws IOException { - in.read(buf, 0, pageSize); + public ByteArrayInputStream getNextPage() throws IOException { + if (bis.available() > 0) { + pageHeader = getNextPageHeader(); + int pageSize = pageHeader.getCompressed_page_size(); + byte[] pageContent = new byte[pageSize]; + bis.read(pageContent, 0, pageSize); + pageContent = unCompressor.uncompress(pageContent); + pageHeader = null; + return new ByteArrayInputStream(pageContent); } + return null; + } - public void skipCurrentPage() { - long skipSize = this.pageHeader.getCompressed_page_size(); - bis.skip(skipSize); - pageHeader = null; - } + public void readPage(InputStream in, byte[] buf, int pageSize) throws IOException { + in.read(buf, 0, pageSize); + } + + public void skipCurrentPage() { + long skipSize = this.pageHeader.getCompressed_page_size(); + bis.skip(skipSize); + pageHeader = null; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RecordReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RecordReader.java index b1069072..985b7229 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RecordReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RecordReader.java @@ -13,7 +13,6 @@ import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.*; @@ -23,441 +22,469 @@ */ public class RecordReader { - private static final Logger logger = LoggerFactory.getLogger(RecordReader.class); - private FileReader fileReader; - private Map> seriesSchemaMap; - - public RecordReader(ITsRandomAccessFileReader raf) throws IOException { - this.fileReader = new FileReader(raf); + private static final Logger logger = LoggerFactory.getLogger(RecordReader.class); + private FileReader fileReader; + private Map> seriesSchemaMap; + + public RecordReader(ITsRandomAccessFileReader raf) throws IOException { + this.fileReader = new FileReader(raf); + } + + // for hadoop-connector + public RecordReader(ITsRandomAccessFileReader raf, List rowGroupMetaDataList) + throws IOException { + this.fileReader = new FileReader(raf, rowGroupMetaDataList); + } + + /** + * Read one path without filter. + * + * @param res the iterative result + * @param fetchSize fetch size + * @param deltaObjectUID delta object id + * @param measurementUID measurement Id + * @return the result in means of DynamicOneColumnData + * @throws IOException TsFile read error + */ + public DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize, + String deltaObjectUID, String measurementUID) throws IOException { + + checkSeries(deltaObjectUID, measurementUID); + + List rowGroupReaderList = + fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); + int i = 0; + if (res != null) { + i = res.getRowGroupIndex(); } - - //for hadoop-connector - public RecordReader(ITsRandomAccessFileReader raf, List rowGroupMetaDataList) throws IOException { - this.fileReader = new FileReader(raf, rowGroupMetaDataList); + for (; i < rowGroupReaderList.size(); i++) { + RowGroupReader rowGroupReader = rowGroupReaderList.get(i); + + if (rowGroupReader.getValueReaders().get(measurementUID) == null) { + return alignColumn(measurementUID); + } + + res = getValueInOneColumn(res, fetchSize, rowGroupReader, measurementUID); + if (res.valueLength >= fetchSize) { + res.hasReadAll = false; + break; + } } - - /** - * Read one path without filter. - * - * @param res the iterative result - * @param fetchSize fetch size - * @param deltaObjectUID delta object id - * @param measurementUID measurement Id - * @return the result in means of DynamicOneColumnData - * @throws IOException TsFile read error - */ - public DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize - , String deltaObjectUID, String measurementUID) throws IOException { - - checkSeries(deltaObjectUID, measurementUID); - - List rowGroupReaderList = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); - int i = 0; - if (res != null) { - i = res.getRowGroupIndex(); - } - for (; i < rowGroupReaderList.size(); i++) { - RowGroupReader rowGroupReader = rowGroupReaderList.get(i); - - if(rowGroupReader.getValueReaders().get(measurementUID) == null) { - return alignColumn(measurementUID); - } - - res = getValueInOneColumn(res, fetchSize, rowGroupReader, measurementUID); - if (res.valueLength >= fetchSize) { - res.hasReadAll = false; - break; - } - } - return res; + return res; + } + + /** + * Read one path without filter and do not throw exceptino. Used by hadoop. + * + * @param res the iterative result + * @param fetchSize fetch size + * @param deltaObjectUID delta object id + * @param measurementUID measurement Id + * @return the result in means of DynamicOneColumnData + * @throws IOException TsFile read error + */ + public DynamicOneColumnData getValueInOneColumnWithoutException(DynamicOneColumnData res, + int fetchSize, String deltaObjectUID, String measurementUID) throws IOException { + try { + checkSeriesByHadoop(deltaObjectUID, measurementUID); + } catch (IOException ex) { + if (res == null) + res = new DynamicOneColumnData(); + res.dataType = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID).get(0) + .getDataTypeBySeriesName(measurementUID); + return res; } - - /** - * Read one path without filter and do not throw exceptino. Used by hadoop. - * - * @param res the iterative result - * @param fetchSize fetch size - * @param deltaObjectUID delta object id - * @param measurementUID measurement Id - * @return the result in means of DynamicOneColumnData - * @throws IOException TsFile read error - */ - public DynamicOneColumnData getValueInOneColumnWithoutException(DynamicOneColumnData res, int fetchSize - , String deltaObjectUID, String measurementUID) throws IOException { - try { - checkSeriesByHadoop(deltaObjectUID, measurementUID); - }catch(IOException ex){ - if(res == null)res = new DynamicOneColumnData(); - res.dataType = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID).get(0).getDataTypeBySeriesName(measurementUID); - return res; - } - List rowGroupReaderList = fileReader.getRowGroupReaderListByDeltaObjectByHadoop(deltaObjectUID); - int i = 0; - if (res != null) { - i = res.getRowGroupIndex(); - } - for (; i < rowGroupReaderList.size(); i++) { - RowGroupReader rowGroupReader = rowGroupReaderList.get(i); - res = getValueInOneColumn(res, fetchSize, rowGroupReader, measurementUID); - if (res.valueLength >= fetchSize) { - res.hasReadAll = false; - break; - } - } - return res; + List rowGroupReaderList = + fileReader.getRowGroupReaderListByDeltaObjectByHadoop(deltaObjectUID); + int i = 0; + if (res != null) { + i = res.getRowGroupIndex(); } - - private DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize, - RowGroupReader rowGroupReader, String measurementId) throws IOException { - return rowGroupReader.getValueReaders().get(measurementId).readOneColumn(res, fetchSize); + for (; i < rowGroupReaderList.size(); i++) { + RowGroupReader rowGroupReader = rowGroupReaderList.get(i); + res = getValueInOneColumn(res, fetchSize, rowGroupReader, measurementUID); + if (res.valueLength >= fetchSize) { + res.hasReadAll = false; + break; + } } - - - /** - * Read one path without filter from one specific - * RowGroupReader according to the indexList。 - * @param res result - * @param fetchSize fetch size - * @param deltaObjectUID delta object id - * @param measurementId measurement Id - * @param idxes index list of the RowGroupReader - * @return DynamicOneColumnData - * @throws IOException failed to get value - */ - public DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize, String deltaObjectUID, - String measurementId, ArrayList idxes) throws IOException { - checkSeries(deltaObjectUID, measurementId); - int rowGroupSkipCount = 0; - - List rowGroupReaderList = fileReader.getRowGroupReaderList(); - int i = 0; - if (res != null) { - i = res.getRowGroupIndex(); - } - for (; i < idxes.size(); i++) { - int idx = idxes.get(i); - RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); - if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { - rowGroupSkipCount++; - continue; - } - - if(rowGroupReader.getValueReaders().get(measurementId) == null) { - return alignColumn(measurementId); - } - - res = rowGroupReader.getValueReaders().get(measurementId).readOneColumn(res, fetchSize); - for (int k = 0; k < rowGroupSkipCount; k++) { - res.plusRowGroupIndexAndInitPageOffset(); - } - if (res.valueLength >= fetchSize) { - res.hasReadAll = false; - break; - } - } - return res; + return res; + } + + private DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize, + RowGroupReader rowGroupReader, String measurementId) throws IOException { + return rowGroupReader.getValueReaders().get(measurementId).readOneColumn(res, fetchSize); + } + + + /** + * Read one path without filter from one specific RowGroupReader according to the + * indexList。 + * + * @param res result + * @param fetchSize fetch size + * @param deltaObjectUID delta object id + * @param measurementId measurement Id + * @param idxes index list of the RowGroupReader + * @return DynamicOneColumnData + * @throws IOException failed to get value + */ + public DynamicOneColumnData getValueInOneColumn(DynamicOneColumnData res, int fetchSize, + String deltaObjectUID, String measurementId, ArrayList idxes) throws IOException { + checkSeries(deltaObjectUID, measurementId); + int rowGroupSkipCount = 0; + + List rowGroupReaderList = fileReader.getRowGroupReaderList(); + int i = 0; + if (res != null) { + i = res.getRowGroupIndex(); } - - public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize - , SingleSeriesFilterExpression valueFilter) throws IOException { - String deltaObjectUID = valueFilter.getFilterSeries().getDeltaObjectUID(); - String measurementUID = valueFilter.getFilterSeries().getMeasurementUID(); - return getValuesUseFilter(res, fetchSize, deltaObjectUID, measurementUID, null, null, valueFilter); - } - - public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, String deltaObjectUID, - String measurementId, SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - SingleSeriesFilterExpression valueFilter) throws IOException { - checkSeries(deltaObjectUID, measurementId); - int i = 0; - if (res != null) { - i = res.getRowGroupIndex(); - } - - List rowGroupReaderList = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); - for (; i < rowGroupReaderList.size(); i++) { - RowGroupReader rowGroupReader = rowGroupReaderList.get(i); - - if(rowGroupReader.getValueReaders().get(measurementId) == null) { - return alignColumn(measurementId); - } - - res = getValuesUseFilter(res, fetchSize, rowGroupReader, measurementId, timeFilter, freqFilter, valueFilter); - if (res.valueLength >= fetchSize) { - res.hasReadAll = false; - break; - } - } - return res; + for (; i < idxes.size(); i++) { + int idx = idxes.get(i); + RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); + if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { + rowGroupSkipCount++; + continue; + } + + if (rowGroupReader.getValueReaders().get(measurementId) == null) { + return alignColumn(measurementId); + } + + res = rowGroupReader.getValueReaders().get(measurementId).readOneColumn(res, fetchSize); + for (int k = 0; k < rowGroupSkipCount; k++) { + res.plusRowGroupIndexAndInitPageOffset(); + } + if (res.valueLength >= fetchSize) { + res.hasReadAll = false; + break; + } } - - public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize - , SingleSeriesFilterExpression valueFilter, ArrayList idxs) throws IOException { - String deltaObjectUID = valueFilter.getFilterSeries().getDeltaObjectUID(); - String measurementUID = valueFilter.getFilterSeries().getMeasurementUID(); - return getValuesUseFilter(res, fetchSize, deltaObjectUID, measurementUID, null, null, valueFilter, idxs); + return res; + } + + public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter) throws IOException { + String deltaObjectUID = valueFilter.getFilterSeries().getDeltaObjectUID(); + String measurementUID = valueFilter.getFilterSeries().getMeasurementUID(); + return getValuesUseFilter(res, fetchSize, deltaObjectUID, measurementUID, null, null, + valueFilter); + } + + public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, + String deltaObjectUID, String measurementId, SingleSeriesFilterExpression timeFilter, + SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) + throws IOException { + checkSeries(deltaObjectUID, measurementId); + int i = 0; + if (res != null) { + i = res.getRowGroupIndex(); } - public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, String deltaObjectUID, - String measurementId, SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - SingleSeriesFilterExpression valueFilter, ArrayList idxs) throws IOException { - checkSeries(deltaObjectUID, measurementId); - int rowGroupSkipCount = 0; - - List rowGroupReaderList = fileReader.getRowGroupReaderList(); - int i = 0; - if (res != null) { - i = res.getRowGroupIndex(); - } - for (; i < idxs.size(); i++) { - logger.info("GetValuesUseFilter and timeIdxs. RowGroupIndex is :" + idxs.get(i)); - int idx = idxs.get(i); - RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); - if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { - rowGroupSkipCount++; - continue; - } - - if(rowGroupReader.getValueReaders().get(measurementId) == null) { - return alignColumn(measurementId); - } - - res = getValuesUseFilter(res, fetchSize, rowGroupReader, measurementId, timeFilter, freqFilter, valueFilter); - for (int k = 0; k < rowGroupSkipCount; k++) { - res.plusRowGroupIndexAndInitPageOffset(); - } - if (res.valueLength >= fetchSize) { - res.hasReadAll = false; - break; - } - } - return res; + List rowGroupReaderList = + fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); + for (; i < rowGroupReaderList.size(); i++) { + RowGroupReader rowGroupReader = rowGroupReaderList.get(i); + + if (rowGroupReader.getValueReaders().get(measurementId) == null) { + return alignColumn(measurementId); + } + + res = getValuesUseFilter(res, fetchSize, rowGroupReader, measurementId, timeFilter, + freqFilter, valueFilter); + if (res.valueLength >= fetchSize) { + res.hasReadAll = false; + break; + } } - - private DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, - RowGroupReader rowGroupReader, String measurementId, SingleSeriesFilterExpression timeFilter, - SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) throws IOException { - - res = rowGroupReader.getValueReaders().get(measurementId).readOneColumnUseFilter(res, fetchSize, timeFilter, - freqFilter, valueFilter); - return res; + return res; + } + + public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, ArrayList idxs) throws IOException { + String deltaObjectUID = valueFilter.getFilterSeries().getDeltaObjectUID(); + String measurementUID = valueFilter.getFilterSeries().getMeasurementUID(); + return getValuesUseFilter(res, fetchSize, deltaObjectUID, measurementUID, null, null, + valueFilter, idxs); + } + + public DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, + String deltaObjectUID, String measurementId, SingleSeriesFilterExpression timeFilter, + SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter, + ArrayList idxs) throws IOException { + checkSeries(deltaObjectUID, measurementId); + int rowGroupSkipCount = 0; + + List rowGroupReaderList = fileReader.getRowGroupReaderList(); + int i = 0; + if (res != null) { + i = res.getRowGroupIndex(); } - - public DynamicOneColumnData getValuesUseTimestamps(String deltaObjectUID, String measurementId, long[] timestamps) - throws IOException { - checkSeries(deltaObjectUID, measurementId); - DynamicOneColumnData res = null; - List rowGroupReaderList = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); - for (int i = 0; i < rowGroupReaderList.size(); i++) { - RowGroupReader rowGroupReader = rowGroupReaderList.get(i); - - if(rowGroupReader.getValueReaders().get(measurementId) == null) { - return alignColumn(measurementId); - } - - if (i == 0) { - res = getValuesUseTimestamps(rowGroupReader, measurementId, timestamps); - } else { - DynamicOneColumnData tmpRes = getValuesUseTimestamps(rowGroupReader, measurementId, timestamps); - res.mergeRecord(tmpRes); - } - } - return res; + for (; i < idxs.size(); i++) { + logger.info("GetValuesUseFilter and timeIdxs. RowGroupIndex is :" + idxs.get(i)); + int idx = idxs.get(i); + RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); + if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { + rowGroupSkipCount++; + continue; + } + + if (rowGroupReader.getValueReaders().get(measurementId) == null) { + return alignColumn(measurementId); + } + + res = getValuesUseFilter(res, fetchSize, rowGroupReader, measurementId, timeFilter, + freqFilter, valueFilter); + for (int k = 0; k < rowGroupSkipCount; k++) { + res.plusRowGroupIndexAndInitPageOffset(); + } + if (res.valueLength >= fetchSize) { + res.hasReadAll = false; + break; + } } - - public DynamicOneColumnData getValuesUseTimestamps(String deltaObjectUID, String measurementId, long[] timeRet, - ArrayList idxs) throws IOException { - checkSeries(deltaObjectUID, measurementId); - DynamicOneColumnData res = null; - List rowGroupReaderList = fileReader.getRowGroupReaderList(); - - boolean init = false; - for (int i = 0; i < idxs.size(); i++) { - int idx = idxs.get(i); - RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); - if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { - continue; - } - - if(rowGroupReader.getValueReaders().get(measurementId) == null) { - return alignColumn(measurementId); - } - - if (!init) { - res = getValuesUseTimestamps(rowGroupReader, measurementId, timeRet); - init = true; - } else { - DynamicOneColumnData tmpRes = getValuesUseTimestamps(rowGroupReader, measurementId, timeRet); - res.mergeRecord(tmpRes); - } - } - return res; + return res; + } + + private DynamicOneColumnData getValuesUseFilter(DynamicOneColumnData res, int fetchSize, + RowGroupReader rowGroupReader, String measurementId, SingleSeriesFilterExpression timeFilter, + SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) + throws IOException { + + res = rowGroupReader.getValueReaders().get(measurementId).readOneColumnUseFilter(res, fetchSize, + timeFilter, freqFilter, valueFilter); + return res; + } + + public DynamicOneColumnData getValuesUseTimestamps(String deltaObjectUID, String measurementId, + long[] timestamps) throws IOException { + checkSeries(deltaObjectUID, measurementId); + DynamicOneColumnData res = null; + List rowGroupReaderList = + fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); + for (int i = 0; i < rowGroupReaderList.size(); i++) { + RowGroupReader rowGroupReader = rowGroupReaderList.get(i); + + if (rowGroupReader.getValueReaders().get(measurementId) == null) { + return alignColumn(measurementId); + } + + if (i == 0) { + res = getValuesUseTimestamps(rowGroupReader, measurementId, timestamps); + } else { + DynamicOneColumnData tmpRes = + getValuesUseTimestamps(rowGroupReader, measurementId, timestamps); + res.mergeRecord(tmpRes); + } } - - private DynamicOneColumnData getValuesUseTimestamps(RowGroupReader rowGroupReader, String measurementId, long[] timeRet) - throws IOException { - return rowGroupReader.getValueReaders().get(measurementId).getValuesForGivenValues(timeRet); + return res; + } + + public DynamicOneColumnData getValuesUseTimestamps(String deltaObjectUID, String measurementId, + long[] timeRet, ArrayList idxs) throws IOException { + checkSeries(deltaObjectUID, measurementId); + DynamicOneColumnData res = null; + List rowGroupReaderList = fileReader.getRowGroupReaderList(); + + boolean init = false; + for (int i = 0; i < idxs.size(); i++) { + int idx = idxs.get(i); + RowGroupReader rowGroupReader = rowGroupReaderList.get(idx); + if (!deltaObjectUID.equals(rowGroupReader.getDeltaObjectUID())) { + continue; + } + + if (rowGroupReader.getValueReaders().get(measurementId) == null) { + return alignColumn(measurementId); + } + + if (!init) { + res = getValuesUseTimestamps(rowGroupReader, measurementId, timeRet); + init = true; + } else { + DynamicOneColumnData tmpRes = + getValuesUseTimestamps(rowGroupReader, measurementId, timeRet); + res.mergeRecord(tmpRes); + } } - - public boolean isEnumsColumn(String deltaObjectUID, String sid) throws IOException { - List rowGroupReaderList = fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); - for (RowGroupReader rowGroupReader : rowGroupReaderList) { - if (rowGroupReader.getValueReaderForSpecificMeasurement(sid) == null) { - continue; - } - if (rowGroupReader.getValueReaders().get(sid).getDataType() == TSDataType.ENUMS) { - return true; - } - } - return false; + return res; + } + + private DynamicOneColumnData getValuesUseTimestamps(RowGroupReader rowGroupReader, + String measurementId, long[] timeRet) throws IOException { + return rowGroupReader.getValueReaders().get(measurementId).getValuesForGivenValues(timeRet); + } + + public boolean isEnumsColumn(String deltaObjectUID, String sid) throws IOException { + List rowGroupReaderList = + fileReader.getRowGroupReaderListByDeltaObject(deltaObjectUID); + for (RowGroupReader rowGroupReader : rowGroupReaderList) { + if (rowGroupReader.getValueReaderForSpecificMeasurement(sid) == null) { + continue; + } + if (rowGroupReader.getValueReaders().get(sid).getDataType() == TSDataType.ENUMS) { + return true; + } } - - //For Tsfile-Spark-Connector - public List getAllSeriesSchema() throws IOException { - List tslist = this.fileReader.getFileMetaData().getTimeSeriesList(); - List seriesSchemas = new ArrayList<>(); - for(TimeSeriesMetadata ts: tslist ) { - seriesSchemas.add(new SeriesSchema(ts.getMeasurementUID(), ts.getType(), null)); - } - return seriesSchemas; + return false; + } + + // For Tsfile-Spark-Connector + public List getAllSeriesSchema() throws IOException { + List tslist = this.fileReader.getFileMetaData().getTimeSeriesList(); + List seriesSchemas = new ArrayList<>(); + for (TimeSeriesMetadata ts : tslist) { + seriesSchemas.add(new SeriesSchema(ts.getMeasurementUID(), ts.getType(), null)); } - - public ArrayList getAllDeltaObjects() throws IOException { - ArrayList res = new ArrayList<>(); - HashMap deltaObjectMap = new HashMap<>(); - List rowGroupReaders = fileReader.getRowGroupReaderList(); - for (RowGroupReader rgr : rowGroupReaders) { - String deltaObjectUID = rgr.getDeltaObjectUID(); - if (!deltaObjectMap.containsKey(deltaObjectUID)) { - res.add(deltaObjectUID); - deltaObjectMap.put(deltaObjectUID, 1); - } - } - return res; + return seriesSchemas; + } + + public ArrayList getAllDeltaObjects() throws IOException { + ArrayList res = new ArrayList<>(); + HashMap deltaObjectMap = new HashMap<>(); + List rowGroupReaders = fileReader.getRowGroupReaderList(); + for (RowGroupReader rgr : rowGroupReaders) { + String deltaObjectUID = rgr.getDeltaObjectUID(); + if (!deltaObjectMap.containsKey(deltaObjectUID)) { + res.add(deltaObjectUID); + deltaObjectMap.put(deltaObjectUID, 1); + } } - - public Map> getAllSeriesSchemasGroupByDeltaObject() { - Map> res = new HashMap<>(); - Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); - for (String deltaObjectUID : rowGroupReaders.keySet()) { - HashMap measurementMap = new HashMap<>(); - ArrayList cols = new ArrayList<>(); - for (RowGroupReader rgr : rowGroupReaders.get(deltaObjectUID)) { - for (String measurement : rgr.seriesDataTypeMap.keySet()) { - if (!measurementMap.containsKey(measurement)) { - cols.add(new SeriesSchema(measurement, rgr.seriesDataTypeMap.get(measurement), null)); - measurementMap.put(measurement, 1); - } - } - } - res.put(deltaObjectUID, cols); + return res; + } + + public Map> getAllSeriesSchemasGroupByDeltaObject() { + Map> res = new HashMap<>(); + Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); + for (String deltaObjectUID : rowGroupReaders.keySet()) { + HashMap measurementMap = new HashMap<>(); + ArrayList cols = new ArrayList<>(); + for (RowGroupReader rgr : rowGroupReaders.get(deltaObjectUID)) { + for (String measurement : rgr.seriesDataTypeMap.keySet()) { + if (!measurementMap.containsKey(measurement)) { + cols.add(new SeriesSchema(measurement, rgr.seriesDataTypeMap.get(measurement), null)); + measurementMap.put(measurement, 1); + } } - return res; + } + res.put(deltaObjectUID, cols); } - - public Map getDeltaObjectRowGroupCounts() { - Map res = new HashMap<>(); - Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); - for (String deltaObjectUID : rowGroupReaders.keySet()) { - res.put(deltaObjectUID, rowGroupReaders.get(deltaObjectUID).size()); - } - return res; + return res; + } + + public Map getDeltaObjectRowGroupCounts() { + Map res = new HashMap<>(); + Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); + for (String deltaObjectUID : rowGroupReaders.keySet()) { + res.put(deltaObjectUID, rowGroupReaders.get(deltaObjectUID).size()); } + return res; + } - public Map getDeltaObjectTypes() { - Map res = new HashMap<>(); - Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); - for (String deltaObjectUID : rowGroupReaders.keySet()) { + public Map getDeltaObjectTypes() { + Map res = new HashMap<>(); + Map> rowGroupReaders = fileReader.getRowGroupReaderMap(); + for (String deltaObjectUID : rowGroupReaders.keySet()) { - RowGroupReader rgr = rowGroupReaders.get(deltaObjectUID).get(0); - } - return res; + RowGroupReader rgr = rowGroupReaders.get(deltaObjectUID).get(0); } - - public ArrayList getRowGroupPosList() throws IOException { - ArrayList res = new ArrayList<>(); - long startPos = 0; - for (RowGroupReader rowGroupReader : fileReader.getRowGroupReaderList()) { - long currentEndPos = rowGroupReader.getTotalByteSize() + startPos; - res.add(currentEndPos); - startPos = currentEndPos; - } - return res; + return res; + } + + public ArrayList getRowGroupPosList() throws IOException { + ArrayList res = new ArrayList<>(); + long startPos = 0; + for (RowGroupReader rowGroupReader : fileReader.getRowGroupReaderList()) { + long currentEndPos = rowGroupReader.getTotalByteSize() + startPos; + res.add(currentEndPos); + startPos = currentEndPos; } + return res; + } - public FilterSeries getColumnByMeasurementName(String deltaObject, String measurement) throws IOException { - TSDataType type = null; - - //modified for Tsfile-Spark-Connector - type = this.fileReader.getFileMetaData().getType(measurement); - - if (type == TSDataType.INT32) { - return FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else if (type == TSDataType.INT64) { - return FilterFactory.longFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else if (type == TSDataType.FLOAT) { - return FilterFactory.floatFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else if (type == TSDataType.DOUBLE) { - return FilterFactory.doubleFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else if (type == TSDataType.BOOLEAN) { - return FilterFactory.booleanFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else if (type == TSDataType.ENUMS || type == TSDataType.TEXT) { - return FilterFactory.stringFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); - } else { - throw new UnSupportedDataTypeException(String.valueOf(type)); - } - } + public FilterSeries getColumnByMeasurementName(String deltaObject, String measurement) + throws IOException { + TSDataType type = null; // modified for Tsfile-Spark-Connector - private void checkSeries(String deltaObject, String measurement) throws IOException { - this.fileReader.loadDeltaObj(deltaObject); - if(!fileReader.containsDeltaObj(deltaObject) || !fileReader.getFileMetaData().containsMeasurement(measurement)) { - throw new IOException("Series "+ deltaObject + "#" + measurement + " does not exist in the current file."); - } + type = this.fileReader.getFileMetaData().getType(measurement); + + if (type == TSDataType.INT32) { + return FilterFactory.intFilterSeries(deltaObject, measurement, FilterSeriesType.VALUE_FILTER); + } else if (type == TSDataType.INT64) { + return FilterFactory.longFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER); + } else if (type == TSDataType.FLOAT) { + return FilterFactory.floatFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER); + } else if (type == TSDataType.DOUBLE) { + return FilterFactory.doubleFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER); + } else if (type == TSDataType.BOOLEAN) { + return FilterFactory.booleanFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER); + } else if (type == TSDataType.ENUMS || type == TSDataType.TEXT) { + return FilterFactory.stringFilterSeries(deltaObject, measurement, + FilterSeriesType.VALUE_FILTER); + } else { + throw new UnSupportedDataTypeException(String.valueOf(type)); } - - - // corresponding with the modification of method 'checkSeries' - private DynamicOneColumnData alignColumn(String measurementId) throws IOException{ - TSDataType type = fileReader.getFileMetaData().getType(measurementId); - return new DynamicOneColumnData(type); + } + + // modified for Tsfile-Spark-Connector + private void checkSeries(String deltaObject, String measurement) throws IOException { + this.fileReader.loadDeltaObj(deltaObject); + if (!fileReader.containsDeltaObj(deltaObject) + || !fileReader.getFileMetaData().containsMeasurement(measurement)) { + throw new IOException( + "Series " + deltaObject + "#" + measurement + " does not exist in the current file."); } - - private void checkSeriesByHadoop(String deltaObject, String measurement) throws IOException { - if (seriesSchemaMap == null) { - seriesSchemaMap = new HashMap<>(); - Map> seriesSchemaListMap = getAllSeriesSchemasGroupByDeltaObject(); - for (String key : seriesSchemaListMap.keySet()) { - HashMap tmap = new HashMap<>(); - for (SeriesSchema ss : seriesSchemaListMap.get(key)) { - tmap.put(ss.name, ss); - } - seriesSchemaMap.put(key, tmap); - } - } - if (seriesSchemaMap.containsKey(deltaObject)) { - if (seriesSchemaMap.get(deltaObject).containsKey(measurement)) { - return; - } + } + + + // corresponding with the modification of method 'checkSeries' + private DynamicOneColumnData alignColumn(String measurementId) throws IOException { + TSDataType type = fileReader.getFileMetaData().getType(measurementId); + return new DynamicOneColumnData(type); + } + + private void checkSeriesByHadoop(String deltaObject, String measurement) throws IOException { + if (seriesSchemaMap == null) { + seriesSchemaMap = new HashMap<>(); + Map> seriesSchemaListMap = + getAllSeriesSchemasGroupByDeltaObject(); + for (String key : seriesSchemaListMap.keySet()) { + HashMap tmap = new HashMap<>(); + for (SeriesSchema ss : seriesSchemaListMap.get(key)) { + tmap.put(ss.name, ss); } - throw new IOException("Series is not exist in current file: " + deltaObject + "#" + measurement); + seriesSchemaMap.put(key, tmap); + } } - - public List getAllRowGroupReaders() throws IOException { - return fileReader.getRowGroupReaderList(); + if (seriesSchemaMap.containsKey(deltaObject)) { + if (seriesSchemaMap.get(deltaObject).containsKey(measurement)) { + return; + } } + throw new IOException( + "Series is not exist in current file: " + deltaObject + "#" + measurement); + } - public Map getProps() { - return fileReader.getProps(); - } + public List getAllRowGroupReaders() throws IOException { + return fileReader.getRowGroupReaderList(); + } - public String getProp(String key) { - return fileReader.getProp(key); - } + public Map getProps() { + return fileReader.getProps(); + } - public void close() throws IOException { - fileReader.close(); - } + public String getProp(String key) { + return fileReader.getProp(key); + } + + public void close() throws IOException { + fileReader.close(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RowGroupReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RowGroupReader.java index 146ce161..9351046a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RowGroupReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/RowGroupReader.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -16,129 +15,135 @@ import java.util.Map; /** - * @author Jinrui Zhang - * This class is used to read one RowGroup. + * @author Jinrui Zhang This class is used to read one RowGroup. */ public class RowGroupReader { - protected static final Logger logger = LoggerFactory.getLogger(RowGroupReader.class); - public Map seriesDataTypeMap; - protected Map valueReaders = new HashMap<>(); - protected String deltaObjectUID; - - protected List measurementIds; - protected long totalByteSize; - - protected ITsRandomAccessFileReader raf; - - public RowGroupReader() { - - } - - public RowGroupReader(RowGroupMetaData rowGroupMetaData, ITsRandomAccessFileReader raf) { - logger.debug(String.format("init a new RowGroupReader, the deltaObjectId is %s", rowGroupMetaData.getDeltaObjectID())); - seriesDataTypeMap = new HashMap<>(); - deltaObjectUID = rowGroupMetaData.getDeltaObjectID(); - measurementIds = new ArrayList<>(); - this.totalByteSize = rowGroupMetaData.getTotalByteSize(); - this.raf = raf; - - initValueReaders(rowGroupMetaData); - } - - public List getTimeByRet(List timeRet, HashMap retMap) { - List timeRes = new ArrayList(); - for (Integer i : retMap.keySet()) { - timeRes.add(timeRet.get(i)); - } - return timeRes; - } - - public TSDataType getDataTypeBySeriesName(String name) { - return this.seriesDataTypeMap.get(name); - } - - public String getDeltaObjectUID() { - return this.deltaObjectUID; - } - - /** - * Read time-value pairs whose time is be included in timeRet. WARNING: this - * function is only for "time" Series - * - * @param measurementId measurement's id - * @param timeRet Array of the time. - * @return DynamicOneColumnData - * @throws IOException exception in IO - */ - public DynamicOneColumnData readValueUseTimestamps(String measurementId, long[] timeRet) throws IOException { - logger.debug("query {}.{} using common time, time length : {}", deltaObjectUID, measurementId, timeRet.length); - return valueReaders.get(measurementId).getValuesForGivenValues(timeRet); - } - - public DynamicOneColumnData readOneColumnUseFilter(String sid, DynamicOneColumnData res, int fetchSize - , SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) throws IOException { - ValueReader valueReader = valueReaders.get(sid); - return valueReader.readOneColumnUseFilter(res, fetchSize, timeFilter, freqFilter, valueFilter); - } - - public DynamicOneColumnData readOneColumn(String sid, DynamicOneColumnData res, int fetchSize) throws IOException { - ValueReader valueReader = valueReaders.get(sid); - return valueReader.readOneColumn(res, fetchSize); - } - - public ValueReader getValueReaderForSpecificMeasurement(String sid) { - return getValueReaders().get(sid); - } - - public long getTotalByteSize() { - return totalByteSize; - } - - public void setTotalByteSize(long totalByteSize) { - this.totalByteSize = totalByteSize; - } - - public Map getValueReaders() { - return valueReaders; - } - - public void setValueReaders(HashMap valueReaders) { - this.valueReaders = valueReaders; - } - - public ITsRandomAccessFileReader getRaf() { - return raf; - } - - public void setRaf(ITsRandomAccessFileReader raf) { - this.raf = raf; - } - - public boolean containsMeasurement(String measurementID) { - return this.valueReaders.containsKey(measurementID); - } - - public void close() throws IOException { - this.raf.close(); - } - - public void initValueReaders(RowGroupMetaData rowGroupMetaData) { - for (TimeSeriesChunkMetaData tscMetaData : rowGroupMetaData.getTimeSeriesChunkMetaDataList()) { - if (tscMetaData.getVInTimeSeriesChunkMetaData() != null) { - measurementIds.add(tscMetaData.getProperties().getMeasurementUID()); - seriesDataTypeMap.put(tscMetaData.getProperties().getMeasurementUID(), - tscMetaData.getVInTimeSeriesChunkMetaData().getDataType()); - - ValueReader si = new ValueReader(tscMetaData.getProperties().getFileOffset(), - tscMetaData.getTotalByteSize(), - tscMetaData.getVInTimeSeriesChunkMetaData().getDataType(), - tscMetaData.getVInTimeSeriesChunkMetaData().getDigest(), this.raf, - tscMetaData.getVInTimeSeriesChunkMetaData().getEnumValues(), - tscMetaData.getProperties().getCompression(), tscMetaData.getNumRows(), - tscMetaData.getTInTimeSeriesChunkMetaData().getStartTime(), tscMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); - valueReaders.put(tscMetaData.getProperties().getMeasurementUID(), si); - } - } - } -} \ No newline at end of file + protected static final Logger logger = LoggerFactory.getLogger(RowGroupReader.class); + public Map seriesDataTypeMap; + protected Map valueReaders = new HashMap<>(); + protected String deltaObjectUID; + + protected List measurementIds; + protected long totalByteSize; + + protected ITsRandomAccessFileReader raf; + + public RowGroupReader() { + + } + + public RowGroupReader(RowGroupMetaData rowGroupMetaData, ITsRandomAccessFileReader raf) { + logger.debug(String.format("init a new RowGroupReader, the deltaObjectId is %s", + rowGroupMetaData.getDeltaObjectID())); + seriesDataTypeMap = new HashMap<>(); + deltaObjectUID = rowGroupMetaData.getDeltaObjectID(); + measurementIds = new ArrayList<>(); + this.totalByteSize = rowGroupMetaData.getTotalByteSize(); + this.raf = raf; + + initValueReaders(rowGroupMetaData); + } + + public List getTimeByRet(List timeRet, HashMap retMap) { + List timeRes = new ArrayList(); + for (Integer i : retMap.keySet()) { + timeRes.add(timeRet.get(i)); + } + return timeRes; + } + + public TSDataType getDataTypeBySeriesName(String name) { + return this.seriesDataTypeMap.get(name); + } + + public String getDeltaObjectUID() { + return this.deltaObjectUID; + } + + /** + * Read time-value pairs whose time is be included in timeRet. WARNING: this function is only for + * "time" Series + * + * @param measurementId measurement's id + * @param timeRet Array of the time. + * @return DynamicOneColumnData + * @throws IOException exception in IO + */ + public DynamicOneColumnData readValueUseTimestamps(String measurementId, long[] timeRet) + throws IOException { + logger.debug("query {}.{} using common time, time length : {}", deltaObjectUID, measurementId, + timeRet.length); + return valueReaders.get(measurementId).getValuesForGivenValues(timeRet); + } + + public DynamicOneColumnData readOneColumnUseFilter(String sid, DynamicOneColumnData res, + int fetchSize, SingleSeriesFilterExpression timeFilter, + SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) + throws IOException { + ValueReader valueReader = valueReaders.get(sid); + return valueReader.readOneColumnUseFilter(res, fetchSize, timeFilter, freqFilter, valueFilter); + } + + public DynamicOneColumnData readOneColumn(String sid, DynamicOneColumnData res, int fetchSize) + throws IOException { + ValueReader valueReader = valueReaders.get(sid); + return valueReader.readOneColumn(res, fetchSize); + } + + public ValueReader getValueReaderForSpecificMeasurement(String sid) { + return getValueReaders().get(sid); + } + + public long getTotalByteSize() { + return totalByteSize; + } + + public void setTotalByteSize(long totalByteSize) { + this.totalByteSize = totalByteSize; + } + + public Map getValueReaders() { + return valueReaders; + } + + public void setValueReaders(HashMap valueReaders) { + this.valueReaders = valueReaders; + } + + public ITsRandomAccessFileReader getRaf() { + return raf; + } + + public void setRaf(ITsRandomAccessFileReader raf) { + this.raf = raf; + } + + public boolean containsMeasurement(String measurementID) { + return this.valueReaders.containsKey(measurementID); + } + + public void close() throws IOException { + this.raf.close(); + } + + public void initValueReaders(RowGroupMetaData rowGroupMetaData) { + for (TimeSeriesChunkMetaData tscMetaData : rowGroupMetaData.getTimeSeriesChunkMetaDataList()) { + if (tscMetaData.getVInTimeSeriesChunkMetaData() != null) { + measurementIds.add(tscMetaData.getProperties().getMeasurementUID()); + seriesDataTypeMap.put(tscMetaData.getProperties().getMeasurementUID(), + tscMetaData.getVInTimeSeriesChunkMetaData().getDataType()); + + ValueReader si = new ValueReader(tscMetaData.getProperties().getFileOffset(), + tscMetaData.getTotalByteSize(), + tscMetaData.getVInTimeSeriesChunkMetaData().getDataType(), + tscMetaData.getVInTimeSeriesChunkMetaData().getDigest(), this.raf, + tscMetaData.getVInTimeSeriesChunkMetaData().getEnumValues(), + tscMetaData.getProperties().getCompression(), tscMetaData.getNumRows(), + tscMetaData.getTInTimeSeriesChunkMetaData().getStartTime(), + tscMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); + valueReaders.put(tscMetaData.getProperties().getMeasurementUID(), si); + } + } + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/TsRandomAccessLocalFileReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/TsRandomAccessLocalFileReader.java index 1a216934..eb12306d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/TsRandomAccessLocalFileReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/TsRandomAccessLocalFileReader.java @@ -3,7 +3,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; - import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; /** diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/ValueReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/ValueReader.java index 8b7ea545..f4c4dead 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/ValueReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/ValueReader.java @@ -22,658 +22,688 @@ import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.List; - import static cn.edu.tsinghua.tsfile.format.Encoding.*; /** - * @author Jinrui Zhang - * This class is mainly used to read one column of data in RowGroup. - * It provides a number of different methods to read data - * in different ways. + * @author Jinrui Zhang This class is mainly used to read one column of data in RowGroup. It + * provides a number of different methods to read data in different ways. */ public class ValueReader { - private static final Logger LOG = LoggerFactory.getLogger(ValueReader.class); - - public Decoder decoder; - public Decoder timeDecoder; - public Decoder freqDecoder; - public long fileOffset = -1; - public long totalSize = -1; - public TSDataType dataType; - public TsDigest digest; - public ITsRandomAccessFileReader raf; - public List enumValues; - public CompressionTypeName compressionTypeName; - public long rowNums; - private long startTime, endTime; - - // save the mainFrequency of this page - public List mainFrequency = null; - - /** - * @param offset Offset for current column in file. - * @param totalSize Total bytes size for this column. - * @param dataType Data type of this column - * @param digest Digest for this column. - */ - public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest) { - Encoding timeEncoding = getEncodingByString(TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder); - this.timeDecoder = Decoder.getDecoderByType(timeEncoding, TSDataType.INT64); - // this.timeDecoder = new DeltaBinaryDecoder.LongDeltaDecoder(); - this.fileOffset = offset; - this.totalSize = totalSize; - - this.decoder = null; - this.dataType = dataType; - this.digest = digest; + private static final Logger LOG = LoggerFactory.getLogger(ValueReader.class); + + public Decoder decoder; + public Decoder timeDecoder; + public Decoder freqDecoder; + public long fileOffset = -1; + public long totalSize = -1; + public TSDataType dataType; + public TsDigest digest; + public ITsRandomAccessFileReader raf; + public List enumValues; + public CompressionTypeName compressionTypeName; + public long rowNums; + private long startTime, endTime; + + // save the mainFrequency of this page + public List mainFrequency = null; + + /** + * @param offset Offset for current column in file. + * @param totalSize Total bytes size for this column. + * @param dataType Data type of this column + * @param digest Digest for this column. + */ + public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest) { + Encoding timeEncoding = + getEncodingByString(TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder); + this.timeDecoder = Decoder.getDecoderByType(timeEncoding, TSDataType.INT64); + // this.timeDecoder = new DeltaBinaryDecoder.LongDeltaDecoder(); + this.fileOffset = offset; + this.totalSize = totalSize; + + this.decoder = null; + this.dataType = dataType; + this.digest = digest; + } + + public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest, + ITsRandomAccessFileReader raf, CompressionTypeName compressionTypeName) { + this(offset, totalSize, dataType, digest); + this.compressionTypeName = compressionTypeName; + this.raf = raf; + } + + /** + * @param offset Column Offset in current file + * @param totalSize Total bytes size for this column + * @param dataType DataType for this column + * @param digest Digest for this column including time and value digests + * @param raf RandomAccessFileReader stream + * @param enumValues EnumValues if this column's dataType is ENUM + * @param compressionTypeName CompressionType used for this column + * @param rowNums Total of rows for this column + */ + public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest, + ITsRandomAccessFileReader raf, List enumValues, + CompressionTypeName compressionTypeName, long rowNums, long startTime, long endTime) { + this(offset, totalSize, dataType, digest, raf, compressionTypeName); + this.enumValues = enumValues; + this.rowNums = rowNums; + this.startTime = startTime; + this.endTime = endTime; + } + + /** + * Read time value from the page and return them. + * + * @param page InputStream + * @param size time size + * @param skip If skip is true, then return long[] which is null. + * @return common timestamp + * @throws IOException cannot init time value + */ + public long[] initTimeValue(InputStream page, int size, boolean skip) throws IOException { + long[] res = null; + int idx = 0; + + int length = ReadWriteStreamUtils.readUnsignedVarInt(page); + byte[] buf = new byte[length]; + int readSize = 0; + readSize = page.read(buf, 0, length); + if (readSize != length) { + throw new IOException("Expect byte size : " + totalSize + ". Read size : " + readSize); } - - public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest, ITsRandomAccessFileReader raf, - CompressionTypeName compressionTypeName) { - this(offset, totalSize, dataType, digest); - this.compressionTypeName = compressionTypeName; - this.raf = raf; + if (!skip) { + ByteArrayInputStream bis = new ByteArrayInputStream(buf); + res = new long[size]; + while (timeDecoder.hasNext(bis)) { + res[idx++] = timeDecoder.readLong(bis); + } } - /** - * @param offset Column Offset in current file - * @param totalSize Total bytes size for this column - * @param dataType DataType for this column - * @param digest Digest for this column including time and value digests - * @param raf RandomAccessFileReader stream - * @param enumValues EnumValues if this column's dataType is ENUM - * @param compressionTypeName CompressionType used for this column - * @param rowNums Total of rows for this column - */ - public ValueReader(long offset, long totalSize, TSDataType dataType, TsDigest digest, ITsRandomAccessFileReader raf, - List enumValues, CompressionTypeName compressionTypeName, long rowNums, long startTime, long endTime) { - this(offset, totalSize, dataType, digest, raf, compressionTypeName); - this.enumValues = enumValues; - this.rowNums = rowNums; - this.startTime = startTime; - this.endTime = endTime; - } + return res; + } - /** - * Read time value from the page and return them. - * - * @param page InputStream - * @param size time size - * @param skip If skip is true, then return long[] which is null. - * @return common timestamp - * @throws IOException cannot init time value - */ - public long[] initTimeValue(InputStream page, int size, boolean skip) throws IOException { - long[] res = null; - int idx = 0; - - int length = ReadWriteStreamUtils.readUnsignedVarInt(page); - byte[] buf = new byte[length]; - int readSize = 0; - readSize = page.read(buf, 0, length); - if (readSize != length) { - throw new IOException("Expect byte size : " + totalSize + ". Read size : " + readSize); - } - if (!skip) { - ByteArrayInputStream bis = new ByteArrayInputStream(buf); - res = new long[size]; - while (timeDecoder.hasNext(bis)) { - res[idx++] = timeDecoder.readLong(bis); - } - } + public ByteArrayInputStream initBAIS() throws IOException { + int length = (int) this.totalSize; + byte[] buf = new byte[length]; + int readSize = 0; - return res; + raf.seek(fileOffset); + readSize = raf.read(buf, 0, length); + if (readSize != length) { + throw new IOException("Expect byte size : " + length + ". Read size : " + readSize); } - public ByteArrayInputStream initBAIS() throws IOException { - int length = (int) this.totalSize; - byte[] buf = new byte[length]; - int readSize = 0; - - raf.seek(fileOffset); - readSize = raf.read(buf, 0, length); - if (readSize != length) { - throw new IOException("Expect byte size : " + length + ". Read size : " + readSize); - } - - ByteArrayInputStream bais = new ByteArrayInputStream(buf); - return bais; + ByteArrayInputStream bais = new ByteArrayInputStream(buf); + return bais; + } + + public ByteArrayInputStream initBAISForOnePage(long pageOffset) throws IOException { + int length = (int) (this.totalSize - (pageOffset - fileOffset)); + byte[] buf = new byte[length]; + int readSize = 0; + raf.seek(pageOffset); + readSize = raf.read(buf, 0, length); + if (readSize != length) { + throw new IOException("Expect byte size : " + length + ". Read size : " + readSize); } - public ByteArrayInputStream initBAISForOnePage(long pageOffset) throws IOException { - int length = (int) (this.totalSize - (pageOffset - fileOffset)); - byte[] buf = new byte[length]; - int readSize = 0; - raf.seek(pageOffset); - readSize = raf.read(buf, 0, length); - if (readSize != length) { - throw new IOException("Expect byte size : " + length + ". Read size : " + readSize); - } - - return new ByteArrayInputStream(buf); + return new ByteArrayInputStream(buf); + } + + /** + * //TODO what about timeFilters? Judge whether current column is satisfied for given filters + */ + public boolean columnSatisfied(SingleSeriesFilterExpression valueFilter, + SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression timeFilter) { + TsDigest digest = null; + DigestForFilter valueDigest = null; + + if (valueFilter != null) { + digest = getDigest(); + if (getDataType() == TSDataType.ENUMS) { + String minString = enumValues.get( + BytesUtils.bytesToInt(digest.getStatistics().get(StatisticConstant.MIN_VALUE).array()) + - 1); + String maxString = enumValues.get( + BytesUtils.bytesToInt(digest.getStatistics().get(StatisticConstant.MAX_VALUE).array()) + - 1); + valueDigest = new DigestForFilter(ByteBuffer.wrap(BytesUtils.StringToBytes(minString)), + ByteBuffer.wrap(BytesUtils.StringToBytes(maxString)), TSDataType.TEXT); + } else { + valueDigest = new DigestForFilter(digest.getStatistics().get(StatisticConstant.MIN_VALUE), + digest.getStatistics().get(StatisticConstant.MAX_VALUE), getDataType()); + } } - /** - * //TODO what about timeFilters? - * Judge whether current column is satisfied for given filters - */ - public boolean columnSatisfied(SingleSeriesFilterExpression valueFilter, SingleSeriesFilterExpression freqFilter, - SingleSeriesFilterExpression timeFilter) { - TsDigest digest = null; - DigestForFilter valueDigest = null; - - if (valueFilter != null) { - digest = getDigest(); - if (getDataType() == TSDataType.ENUMS) { - String minString = enumValues.get(BytesUtils.bytesToInt(digest.getStatistics().get(StatisticConstant.MIN_VALUE).array()) - 1); - String maxString = enumValues.get(BytesUtils.bytesToInt(digest.getStatistics().get(StatisticConstant.MAX_VALUE).array()) - 1); - valueDigest = new DigestForFilter(ByteBuffer.wrap(BytesUtils.StringToBytes(minString)), ByteBuffer.wrap(BytesUtils.StringToBytes(maxString)), TSDataType.TEXT); - } else { - valueDigest = new DigestForFilter(digest.getStatistics().get(StatisticConstant.MIN_VALUE) - , digest.getStatistics().get(StatisticConstant.MAX_VALUE) - , getDataType()); - } - } - - DigestVisitor valueVisitor = new DigestVisitor(); - IntervalTimeVisitor timeVisitor = new IntervalTimeVisitor(); - if (valueVisitor.satisfy(valueDigest, valueFilter) && timeVisitor.satisfy(timeFilter, startTime, endTime)) { - LOG.debug(String.format("current series is satisfy the time filter and value filter, start time : %s, end time : %s", startTime, endTime)); - return true; - } - return false; + DigestVisitor valueVisitor = new DigestVisitor(); + IntervalTimeVisitor timeVisitor = new IntervalTimeVisitor(); + if (valueVisitor.satisfy(valueDigest, valueFilter) + && timeVisitor.satisfy(timeFilter, startTime, endTime)) { + LOG.debug(String.format( + "current series is satisfy the time filter and value filter, start time : %s, end time : %s", + startTime, endTime)); + return true; } - - /** - * Judge whether current page is satisfied for given filters according to - * the digests of this page - */ - public boolean pageSatisfied(DigestForFilter timeDigestFF, DigestForFilter valueDigestFF, - SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression valueFilter, SingleSeriesFilterExpression freqFilter) { - DigestVisitor digestVisitor = new DigestVisitor(); - if ((valueFilter == null && timeFilter == null) - || (valueFilter != null && (valueDigestFF == null || digestVisitor.satisfy(valueDigestFF, valueFilter))) - || (timeFilter != null && digestVisitor.satisfy(timeDigestFF, timeFilter))) { - return true; - } - return false; + return false; + } + + /** + * Judge whether current page is satisfied for given filters according to the digests of this page + */ + public boolean pageSatisfied(DigestForFilter timeDigestFF, DigestForFilter valueDigestFF, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression valueFilter, + SingleSeriesFilterExpression freqFilter) { + DigestVisitor digestVisitor = new DigestVisitor(); + if ((valueFilter == null && timeFilter == null) + || (valueFilter != null + && (valueDigestFF == null || digestVisitor.satisfy(valueDigestFF, valueFilter))) + || (timeFilter != null && digestVisitor.satisfy(timeDigestFF, timeFilter))) { + return true; } - - /** - * Read the whole column without filters. - * @param res result - * @param fetchSize size of result - * @return DynamicOneColumnData - * @throws IOException occurs error in read one column - */ - public DynamicOneColumnData readOneColumn(DynamicOneColumnData res, int fetchSize) throws IOException { - return readOneColumnUseFilter(res, fetchSize, null, null, null); + return false; + } + + /** + * Read the whole column without filters. + * + * @param res result + * @param fetchSize size of result + * @return DynamicOneColumnData + * @throws IOException occurs error in read one column + */ + public DynamicOneColumnData readOneColumn(DynamicOneColumnData res, int fetchSize) + throws IOException { + return readOneColumnUseFilter(res, fetchSize, null, null, null); + } + + public SingleValueVisitor getSingleValueVisitorByDataType(TSDataType type, + SingleSeriesFilterExpression filter) { + switch (type) { + case INT32: + return new SingleValueVisitor(filter); + case INT64: + return new SingleValueVisitor(filter); + case FLOAT: + return new SingleValueVisitor(filter); + case DOUBLE: + return new SingleValueVisitor(filter); + default: + return SingleValueVisitorFactory.getSingleValueVisitor(type); } - - public SingleValueVisitor getSingleValueVisitorByDataType(TSDataType type, SingleSeriesFilterExpression filter) { - switch (type) { - case INT32: - return new SingleValueVisitor(filter); - case INT64: - return new SingleValueVisitor(filter); - case FLOAT: - return new SingleValueVisitor(filter); - case DOUBLE: - return new SingleValueVisitor(filter); - default: - return SingleValueVisitorFactory.getSingleValueVisitor(type); - } + } + + /** + * Read one column values with specific filters. + * + * @param res result + * @param fetchSize size of result + * @param timeFilter filter for time. + * @param freqFilter filter for frequency. + * @param valueFilter filter for value. + * @return answer DynamicOneColumnData + * @throws IOException occurs error in read one column using filter + */ + public DynamicOneColumnData readOneColumnUseFilter(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + SingleSeriesFilterExpression valueFilter) throws IOException { + + SingleValueVisitor timeVisitor = null; + if (timeFilter != null) { + timeVisitor = getSingleValueVisitorByDataType(TSDataType.INT64, timeFilter); } - - /** - * Read one column values with specific filters. - * @param res result - * @param fetchSize size of result - * @param timeFilter filter for time. - * @param freqFilter filter for frequency. - * @param valueFilter filter for value. - * @return answer DynamicOneColumnData - * @throws IOException occurs error in read one column using filter - */ - public DynamicOneColumnData readOneColumnUseFilter(DynamicOneColumnData res, int fetchSize, - SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) - throws IOException { - - SingleValueVisitor timeVisitor = null; - if (timeFilter != null) { - timeVisitor = getSingleValueVisitorByDataType(TSDataType.INT64, timeFilter); - } - SingleValueVisitor valueVisitor = null; - if (valueFilter != null) { - valueVisitor = getSingleValueVisitorByDataType(getDataType(), valueFilter); - } - - if (res == null) { - res = new DynamicOneColumnData(getDataType(), true); - res.pageOffset = this.fileOffset; - res.leftSize = this.totalSize; - } - - // that res.pageOffset is -1 represents reading from the start ofcurrent column. - if (res.pageOffset == -1) { - res.pageOffset = this.fileOffset; - } - - // record the length of res before reading - int currentLength = res.valueLength; - - if (columnSatisfied(valueFilter, freqFilter, timeFilter)) { - LOG.debug("ValueFilter satisfied Or ValueFilter is null. [ValueFilter] is: " + valueFilter); - - // Initialize the bis according to the offset in last read. - ByteArrayInputStream bis = initBAISForOnePage(res.pageOffset); - PageReader pageReader = new PageReader(bis, compressionTypeName); - int pageCount = 0; - while ((res.pageOffset - fileOffset) < totalSize && (res.valueLength - currentLength) < fetchSize) { - int lastAvailable = bis.available(); - - pageCount++; - LOG.debug("read one page using filter, the page count is {}", pageCount); - PageHeader pageHeader = pageReader.getNextPageHeader(); - - // construct valueFilter - Digest pageDigest = pageHeader.data_page_header.getDigest(); - DigestForFilter valueDigestFF = null; - if (pageDigest != null) { - if (getDataType() == TSDataType.ENUMS) { - String minString = enumValues.get(BytesUtils.bytesToInt(pageDigest.getStatistics().get(StatisticConstant.MIN_VALUE).array()) - 1); - String maxString = enumValues.get(BytesUtils.bytesToInt(pageDigest.getStatistics().get(StatisticConstant.MAX_VALUE).array()) - 1); - valueDigestFF = new DigestForFilter(ByteBuffer.wrap(BytesUtils.StringToBytes(minString)), ByteBuffer.wrap(BytesUtils.StringToBytes(maxString)), TSDataType.TEXT); - } else { - valueDigestFF = new DigestForFilter(pageDigest.getStatistics().get(StatisticConstant.MIN_VALUE) - , pageDigest.getStatistics().get(StatisticConstant.MAX_VALUE), - getDataType()); - } - } - - // construct timeFilter - long mint = pageHeader.data_page_header.min_timestamp; - long maxt = pageHeader.data_page_header.max_timestamp; - DigestForFilter timeDigestFF = new DigestForFilter(mint, maxt); - - if (pageSatisfied(timeDigestFF, valueDigestFF, timeFilter, valueFilter, freqFilter)) { - - LOG.debug("page " + pageCount + " satisfied filter"); - - InputStream page = pageReader.getNextPage(); - - setDecoder(Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), getDataType())); - - // get timevalues in this page - long[] timeValues = initTimeValue(page, pageHeader.data_page_header.num_rows, false); - - try { - int timeIdx = 0; - switch (dataType) { - case BOOLEAN: - while (decoder.hasNext(page)) { - boolean v = decoder.readBoolean(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putBoolean(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case INT32: - while (decoder.hasNext(page)) { - int v = decoder.readInt(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putInt(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case INT64: - while (decoder.hasNext(page)) { - long v = decoder.readLong(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putLong(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case FLOAT: - while (decoder.hasNext(page)) { - float v = decoder.readFloat(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putFloat(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case DOUBLE: - while (decoder.hasNext(page)) { - double v = decoder.readDouble(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putDouble(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case TEXT: - while (decoder.hasNext(page)) { - Binary v = decoder.readBinary(page); - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putBinary(v); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - case ENUMS: - while (decoder.hasNext(page)) { - int v = decoder.readInt(page) - 1; - if ((timeFilter == null || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) && - (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { - res.putBinary(Binary.valueOf(enumValues.get(v))); - res.putTime(timeValues[timeIdx]); - } - timeIdx++; - } - break; - default: - throw new IOException("Data type not supported. " + dataType); - } - } catch (IOException e) { - e.printStackTrace(); - } - - } else { - pageReader.skipCurrentPage(); - } - res.pageOffset += (lastAvailable - bis.available()); - } - - // Represents current Column has been read all, prepare for next column in another RowGroup. - if ((res.pageOffset - fileOffset) >= totalSize) { - res.plusRowGroupIndexAndInitPageOffset(); - } - return res; - } - return res; + SingleValueVisitor valueVisitor = null; + if (valueFilter != null) { + valueVisitor = getSingleValueVisitorByDataType(getDataType(), valueFilter); } - /** - * Read time-value pairs whose time is be included in timeRet. WARNING: this - * function is only for "time" Series - * - * @param timestamps array of the time. - * @return answer DynamicOneColumnData using given timestamps - * @throws IOException occurs error in read - */ - public DynamicOneColumnData getValuesForGivenValues(long[] timestamps) throws IOException { - DynamicOneColumnData res = new DynamicOneColumnData(dataType, true); - - if (timestamps.length == 0) { - return res; - } - - // time index of timeValues - int timeIdx = 0; - - ByteArrayInputStream bis = initBAIS(); - PageReader pageReader = new PageReader(bis, compressionTypeName); - int pageCount = 0; - - while (timeIdx < timestamps.length && pageReader.hasNextPage()) { - pageCount++; - LOG.debug("read one page using common timestamps, the page count is {}", pageCount); - PageHeader pageHeader = pageReader.getNextPageHeader(); - - long timeMaxv = pageHeader.data_page_header.getMax_timestamp(); - - // If there may be some values acceptable in this page - if (timeIdx < timestamps.length && timeMaxv >= timestamps[timeIdx]) { - - InputStream page = pageReader.getNextPage(); - - setDecoder(Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), getDataType())); - - long[] timeValues = initTimeValue(page, pageHeader.data_page_header.num_rows, false); - - int i = 0; - switch (dataType) { - case BOOLEAN: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readBoolean(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putBoolean(decoder.readBoolean(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case INT32: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readInt(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putInt(decoder.readInt(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case INT64: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readLong(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putLong(decoder.readLong(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case FLOAT: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readFloat(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putFloat(decoder.readFloat(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case DOUBLE: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readDouble(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putDouble(decoder.readDouble(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case TEXT: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readBinary(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putBinary(decoder.readBinary(page)); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - case ENUMS: - while (i < timeValues.length && timeIdx < timestamps.length) { - while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { - i++; - decoder.readInt(page); - } - if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { - res.putBinary(Binary.valueOf(enumValues.get(decoder.readInt(page) - 1))); - res.putTime(timeValues[i]); - i++; - timeIdx++; - } - while (timeIdx < timestamps.length && i < timeValues.length && timestamps[timeIdx] < timeValues[i]) { - timeIdx++; - } - } - break; - default: - throw new IOException("Data Type not support"); - } - } else { - pageReader.skipCurrentPage(); - } - } - return res; - } - - - public void setDecoder(Decoder d) { - this.decoder = d; - } - - public long getFileOffset() { - return this.fileOffset; - } - - public void setFileOffset(long offset) { - this.fileOffset = offset; + if (res == null) { + res = new DynamicOneColumnData(getDataType(), true); + res.pageOffset = this.fileOffset; + res.leftSize = this.totalSize; } - public long getTotalSize() { - return this.totalSize; + // that res.pageOffset is -1 represents reading from the start ofcurrent column. + if (res.pageOffset == -1) { + res.pageOffset = this.fileOffset; } - public TsDigest getDigest() { - return this.digest; - } + // record the length of res before reading + int currentLength = res.valueLength; + + if (columnSatisfied(valueFilter, freqFilter, timeFilter)) { + LOG.debug("ValueFilter satisfied Or ValueFilter is null. [ValueFilter] is: " + valueFilter); + + // Initialize the bis according to the offset in last read. + ByteArrayInputStream bis = initBAISForOnePage(res.pageOffset); + PageReader pageReader = new PageReader(bis, compressionTypeName); + int pageCount = 0; + while ((res.pageOffset - fileOffset) < totalSize + && (res.valueLength - currentLength) < fetchSize) { + int lastAvailable = bis.available(); + + pageCount++; + LOG.debug("read one page using filter, the page count is {}", pageCount); + PageHeader pageHeader = pageReader.getNextPageHeader(); + + // construct valueFilter + Digest pageDigest = pageHeader.data_page_header.getDigest(); + DigestForFilter valueDigestFF = null; + if (pageDigest != null) { + if (getDataType() == TSDataType.ENUMS) { + String minString = enumValues.get(BytesUtils.bytesToInt( + pageDigest.getStatistics().get(StatisticConstant.MIN_VALUE).array()) - 1); + String maxString = enumValues.get(BytesUtils.bytesToInt( + pageDigest.getStatistics().get(StatisticConstant.MAX_VALUE).array()) - 1); + valueDigestFF = + new DigestForFilter(ByteBuffer.wrap(BytesUtils.StringToBytes(minString)), + ByteBuffer.wrap(BytesUtils.StringToBytes(maxString)), TSDataType.TEXT); + } else { + valueDigestFF = + new DigestForFilter(pageDigest.getStatistics().get(StatisticConstant.MIN_VALUE), + pageDigest.getStatistics().get(StatisticConstant.MAX_VALUE), getDataType()); + } + } - public TSDataType getDataType() { - return this.dataType; - } + // construct timeFilter + long mint = pageHeader.data_page_header.min_timestamp; + long maxt = pageHeader.data_page_header.max_timestamp; + DigestForFilter timeDigestFF = new DigestForFilter(mint, maxt); - public List getMainFrequency() { - return mainFrequency; - } + if (pageSatisfied(timeDigestFF, valueDigestFF, timeFilter, valueFilter, freqFilter)) { - public void setMainFrequency(List mainFrequency) { - this.mainFrequency = mainFrequency; - } + LOG.debug("page " + pageCount + " satisfied filter"); - public long getNumRows() { - return rowNums; - } + InputStream page = pageReader.getNextPage(); - public void setNumRows(long rowNums) { - this.rowNums = rowNums; - } + setDecoder(Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), + getDataType())); - public List getEnumValues() { - return enumValues; - } + // get timevalues in this page + long[] timeValues = initTimeValue(page, pageHeader.data_page_header.num_rows, false); - public void setEnumValues(List enumValues) { - this.enumValues = enumValues; - } + try { + int timeIdx = 0; + switch (dataType) { + case BOOLEAN: + while (decoder.hasNext(page)) { + boolean v = decoder.readBoolean(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putBoolean(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case INT32: + while (decoder.hasNext(page)) { + int v = decoder.readInt(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putInt(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case INT64: + while (decoder.hasNext(page)) { + long v = decoder.readLong(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putLong(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case FLOAT: + while (decoder.hasNext(page)) { + float v = decoder.readFloat(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putFloat(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case DOUBLE: + while (decoder.hasNext(page)) { + double v = decoder.readDouble(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putDouble(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case TEXT: + while (decoder.hasNext(page)) { + Binary v = decoder.readBinary(page); + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putBinary(v); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + case ENUMS: + while (decoder.hasNext(page)) { + int v = decoder.readInt(page) - 1; + if ((timeFilter == null + || timeVisitor.satisfyObject(timeValues[timeIdx], timeFilter)) + && (valueFilter == null || valueVisitor.satisfyObject(v, valueFilter))) { + res.putBinary(Binary.valueOf(enumValues.get(v))); + res.putTime(timeValues[timeIdx]); + } + timeIdx++; + } + break; + default: + throw new IOException("Data type not supported. " + dataType); + } + } catch (IOException e) { + e.printStackTrace(); + } - public long getStartTime() { - return this.startTime; + } else { + pageReader.skipCurrentPage(); + } + res.pageOffset += (lastAvailable - bis.available()); + } + + // Represents current Column has been read all, prepare for next column in another RowGroup. + if ((res.pageOffset - fileOffset) >= totalSize) { + res.plusRowGroupIndexAndInitPageOffset(); + } + return res; } - - public long getEndTime() { - return this.endTime; + return res; + } + + /** + * Read time-value pairs whose time is be included in timeRet. WARNING: this function is only for + * "time" Series + * + * @param timestamps array of the time. + * @return answer DynamicOneColumnData using given timestamps + * @throws IOException occurs error in read + */ + public DynamicOneColumnData getValuesForGivenValues(long[] timestamps) throws IOException { + DynamicOneColumnData res = new DynamicOneColumnData(dataType, true); + + if (timestamps.length == 0) { + return res; } - private Encoding getEncodingByString(String encoding) { - switch (encoding) { - case "PLAIN": - return PLAIN; - case "PLAIN_DICTIONARY": - return PLAIN_DICTIONARY; - case "RLE": - return RLE; - case "DELTA_BINARY_PACKED": - return DELTA_BINARY_PACKED; - case "DELTA_LENGTH_BYTE_ARRAY": - return DELTA_LENGTH_BYTE_ARRAY; - case "DELTA_BYTE_ARRAY": - return DELTA_BYTE_ARRAY; - case "RLE_DICTIONARY": - return RLE_DICTIONARY; - case "DIFF": - return DIFF; - case "TS_2DIFF": - return TS_2DIFF; - case "BITMAP": - return BITMAP; - case "PLA": - return PLA; - case "SDT": - return SDT; - case "DFT": - return DFT; - case "GORILLA": - return GORILLA; - default: - return null; + // time index of timeValues + int timeIdx = 0; + + ByteArrayInputStream bis = initBAIS(); + PageReader pageReader = new PageReader(bis, compressionTypeName); + int pageCount = 0; + + while (timeIdx < timestamps.length && pageReader.hasNextPage()) { + pageCount++; + LOG.debug("read one page using common timestamps, the page count is {}", pageCount); + PageHeader pageHeader = pageReader.getNextPageHeader(); + + long timeMaxv = pageHeader.data_page_header.getMax_timestamp(); + + // If there may be some values acceptable in this page + if (timeIdx < timestamps.length && timeMaxv >= timestamps[timeIdx]) { + + InputStream page = pageReader.getNextPage(); + + setDecoder(Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), + getDataType())); + + long[] timeValues = initTimeValue(page, pageHeader.data_page_header.num_rows, false); + + int i = 0; + switch (dataType) { + case BOOLEAN: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readBoolean(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putBoolean(decoder.readBoolean(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case INT32: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readInt(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putInt(decoder.readInt(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case INT64: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readLong(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putLong(decoder.readLong(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case FLOAT: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readFloat(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putFloat(decoder.readFloat(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case DOUBLE: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readDouble(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putDouble(decoder.readDouble(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case TEXT: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readBinary(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putBinary(decoder.readBinary(page)); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + case ENUMS: + while (i < timeValues.length && timeIdx < timestamps.length) { + while (i < timeValues.length && timeValues[i] < timestamps[timeIdx]) { + i++; + decoder.readInt(page); + } + if (i < timeValues.length && timeValues[i] == timestamps[timeIdx]) { + res.putBinary(Binary.valueOf(enumValues.get(decoder.readInt(page) - 1))); + res.putTime(timeValues[i]); + i++; + timeIdx++; + } + while (timeIdx < timestamps.length && i < timeValues.length + && timestamps[timeIdx] < timeValues[i]) { + timeIdx++; + } + } + break; + default: + throw new IOException("Data Type not support"); } + } else { + pageReader.skipCurrentPage(); + } + } + return res; + } + + + public void setDecoder(Decoder d) { + this.decoder = d; + } + + public long getFileOffset() { + return this.fileOffset; + } + + public void setFileOffset(long offset) { + this.fileOffset = offset; + } + + public long getTotalSize() { + return this.totalSize; + } + + public TsDigest getDigest() { + return this.digest; + } + + public TSDataType getDataType() { + return this.dataType; + } + + public List getMainFrequency() { + return mainFrequency; + } + + public void setMainFrequency(List mainFrequency) { + this.mainFrequency = mainFrequency; + } + + public long getNumRows() { + return rowNums; + } + + public void setNumRows(long rowNums) { + this.rowNums = rowNums; + } + + public List getEnumValues() { + return enumValues; + } + + public void setEnumValues(List enumValues) { + this.enumValues = enumValues; + } + + public long getStartTime() { + return this.startTime; + } + + public long getEndTime() { + return this.endTime; + } + + private Encoding getEncodingByString(String encoding) { + switch (encoding) { + case "PLAIN": + return PLAIN; + case "PLAIN_DICTIONARY": + return PLAIN_DICTIONARY; + case "RLE": + return RLE; + case "DELTA_BINARY_PACKED": + return DELTA_BINARY_PACKED; + case "DELTA_LENGTH_BYTE_ARRAY": + return DELTA_LENGTH_BYTE_ARRAY; + case "DELTA_BYTE_ARRAY": + return DELTA_BYTE_ARRAY; + case "RLE_DICTIONARY": + return RLE_DICTIONARY; + case "DIFF": + return DIFF; + case "TS_2DIFF": + return TS_2DIFF; + case "BITMAP": + return BITMAP; + case "PLA": + return PLA; + case "SDT": + return SDT; + case "DFT": + return DFT; + case "GORILLA": + return GORILLA; + default: + return null; } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/FileStreamManager.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/FileStreamManager.java index 36a13284..dbaa4292 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/FileStreamManager.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/FileStreamManager.java @@ -2,46 +2,45 @@ import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; import cn.edu.tsinghua.tsfile.timeseries.read.TsRandomAccessLocalFileReader; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.FileNotFoundException; import java.io.IOException; /** - * This class provides some function to get one FileReader for one path. - * Maybe in the later version, every FileReader will be managed by this class. + * This class provides some function to get one FileReader for one path. Maybe in the later version, + * every FileReader will be managed by this class. * * @author Jinrui Zhang */ public class FileStreamManager { - private static final Logger logger = LoggerFactory.getLogger(FileStreamManager.class); - - private static class FileStreamManagerHolder{ - private static final FileStreamManager INSTANCE = new FileStreamManager(); - } - - private FileStreamManager() { - } + private static final Logger logger = LoggerFactory.getLogger(FileStreamManager.class); - public static final FileStreamManager getInstance() { - return FileStreamManagerHolder.INSTANCE; - } + private static class FileStreamManagerHolder { + private static final FileStreamManager INSTANCE = new FileStreamManager(); + } - public ITsRandomAccessFileReader getLocalRandomAccessFileReader(String path) throws FileNotFoundException { - return new TsRandomAccessLocalFileReader(path); - } + private FileStreamManager() {} - public void closeLocalRandomAccessFileReader(TsRandomAccessLocalFileReader localFileInput) throws IOException { - localFileInput.close(); - } + public static final FileStreamManager getInstance() { + return FileStreamManagerHolder.INSTANCE; + } + + public ITsRandomAccessFileReader getLocalRandomAccessFileReader(String path) + throws FileNotFoundException { + return new TsRandomAccessLocalFileReader(path); + } + + public void closeLocalRandomAccessFileReader(TsRandomAccessLocalFileReader localFileInput) + throws IOException { + localFileInput.close(); + } - public void close(ITsRandomAccessFileReader raf) { - try { - raf.close(); - } catch (IOException e) { - logger.error("Error when close RAF: {}", e.getMessage()); - } + public void close(ITsRandomAccessFileReader raf) { + try { + raf.close(); + } catch (IOException e) { + logger.error("Error when close RAF: {}", e.getMessage()); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/SeriesSchema.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/SeriesSchema.java index 3ca73e31..f8bf258c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/SeriesSchema.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/management/SeriesSchema.java @@ -2,42 +2,41 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; - import java.util.HashMap; /** - * This class define a schema for one time series. - * This schema includes three main parameters which represent the {@code name},the {@code dataType} and - * the {@code encoding} type for this time series. Some other arguments are put in {@code args} + * This class define a schema for one time series. This schema includes three main parameters which + * represent the {@code name},the {@code dataType} and the {@code encoding} type for this time + * series. Some other arguments are put in {@code args} * * @author Jinrui Zhang */ public class SeriesSchema { - public String name; - public TSDataType dataType; - public TSEncoding encoding; - private HashMap args; - - public SeriesSchema(String name, TSDataType dataType, TSEncoding encoding) { - this.name = name; - this.dataType = dataType; - this.encoding = encoding; - this.args = new HashMap<>(); - } - - public void putKeyValueToArgs(String key, String value) { - this.args.put(key, value); - } - - public Object getValueFromArgs(String key) { - return args.get(key); - } - - public HashMap getArgsMap() { - return args; - } - - public void setArgsMap(HashMap argsMap) { - this.args = argsMap; - } + public String name; + public TSDataType dataType; + public TSEncoding encoding; + private HashMap args; + + public SeriesSchema(String name, TSDataType dataType, TSEncoding encoding) { + this.name = name; + this.dataType = dataType; + this.encoding = encoding; + this.args = new HashMap<>(); + } + + public void putKeyValueToArgs(String key, String value) { + this.args.put(key, value); + } + + public Object getValueFromArgs(String key) { + return args.get(key); + } + + public HashMap getArgsMap() { + return args; + } + + public void setArgsMap(HashMap argsMap) { + this.args = argsMap; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/BatchReadRecordGenerator.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/BatchReadRecordGenerator.java index b07405a3..95441967 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/BatchReadRecordGenerator.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/BatchReadRecordGenerator.java @@ -2,7 +2,6 @@ import cn.edu.tsinghua.tsfile.common.exception.ProcessorException; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.io.IOException; import java.util.HashMap; import java.util.LinkedHashMap; @@ -16,95 +15,96 @@ * @author Jinrui Zhang */ public abstract class BatchReadRecordGenerator { - public LinkedHashMap retMap; - private LinkedHashMap hasMoreRet; - private int noRetCount; - private HashMap timeMap; - private PriorityQueue heap; - private int fetchSize; + public LinkedHashMap retMap; + private LinkedHashMap hasMoreRet; + private int noRetCount; + private HashMap timeMap; + private PriorityQueue heap; + private int fetchSize; - public BatchReadRecordGenerator(List paths, int fetchSize) throws ProcessorException, IOException { - noRetCount = 0; - retMap = new LinkedHashMap<>(); - hasMoreRet = new LinkedHashMap<>(); - timeMap = new HashMap<>(); - this.fetchSize = fetchSize; - // init for every Series - for (Path p : paths) { - DynamicOneColumnData res = getMoreRecordsForOneColumn(p, null); - retMap.put(p, res); - if (res.valueLength == 0) { - hasMoreRet.put(p, false); - noRetCount++; - } else { - hasMoreRet.put(p, true); - } - } - initHeap(); + public BatchReadRecordGenerator(List paths, int fetchSize) + throws ProcessorException, IOException { + noRetCount = 0; + retMap = new LinkedHashMap<>(); + hasMoreRet = new LinkedHashMap<>(); + timeMap = new HashMap<>(); + this.fetchSize = fetchSize; + // init for every Series + for (Path p : paths) { + DynamicOneColumnData res = getMoreRecordsForOneColumn(p, null); + retMap.put(p, res); + if (res.valueLength == 0) { + hasMoreRet.put(p, false); + noRetCount++; + } else { + hasMoreRet.put(p, true); + } } + initHeap(); + } - private void initHeap() { - heap = new PriorityQueue<>(); - for (Path p : retMap.keySet()) { - DynamicOneColumnData res = retMap.get(p); - if (res.curIdx < res.valueLength) { - heapPut(res.getTime(res.curIdx)); - } - } + private void initHeap() { + heap = new PriorityQueue<>(); + for (Path p : retMap.keySet()) { + DynamicOneColumnData res = retMap.get(p); + if (res.curIdx < res.valueLength) { + heapPut(res.getTime(res.curIdx)); + } } + } - private void heapPut(long t) { - if (!timeMap.containsKey(t)) { - heap.add(t); - timeMap.put(t, 1); - } + private void heapPut(long t) { + if (!timeMap.containsKey(t)) { + heap.add(t); + timeMap.put(t, 1); } + } - private Long heapGet() { - Long t = heap.poll(); - timeMap.remove(t); - return t; - } + private Long heapGet() { + Long t = heap.poll(); + timeMap.remove(t); + return t; + } - public void clearDataInLastQuery(DynamicOneColumnData res) { - res.clearData(); - } + public void clearDataInLastQuery(DynamicOneColumnData res) { + res.clearData(); + } - public abstract DynamicOneColumnData getMoreRecordsForOneColumn(Path p - , DynamicOneColumnData res) throws ProcessorException, IOException; + public abstract DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws ProcessorException, IOException; - /** - * Calculate the fetchSize number RowRecords. - * Invoking this method will remove the top value in heap until the OldRowRecord number reach to fetchSize. - * - * @throws ProcessorException exception in read process - * @throws IOException exception in IO - */ - public void calculateRecord() throws ProcessorException, IOException { - int recordCount = 0; - while (recordCount < fetchSize && noRetCount < retMap.size()) { - Long minTime = heapGet(); - if (minTime == null) { - break; - } - for (Path path : retMap.keySet()) { - if (hasMoreRet.get(path)) { - DynamicOneColumnData res = retMap.get(path); - if (minTime.equals(res.getTime(res.curIdx))) { - res.curIdx++; - if (res.curIdx == res.valueLength) { - res = getMoreRecordsForOneColumn(path, res); - if (res.curIdx == res.valueLength) { - hasMoreRet.put(path, false); - noRetCount++; - continue; - } - } - heapPut(res.getTime(res.curIdx)); - } - } + /** + * Calculate the fetchSize number RowRecords. Invoking this method will remove the top value in + * heap until the OldRowRecord number reach to fetchSize. + * + * @throws ProcessorException exception in read process + * @throws IOException exception in IO + */ + public void calculateRecord() throws ProcessorException, IOException { + int recordCount = 0; + while (recordCount < fetchSize && noRetCount < retMap.size()) { + Long minTime = heapGet(); + if (minTime == null) { + break; + } + for (Path path : retMap.keySet()) { + if (hasMoreRet.get(path)) { + DynamicOneColumnData res = retMap.get(path); + if (minTime.equals(res.getTime(res.curIdx))) { + res.curIdx++; + if (res.curIdx == res.valueLength) { + res = getMoreRecordsForOneColumn(path, res); + if (res.curIdx == res.valueLength) { + hasMoreRet.put(path, false); + noRetCount++; + continue; + } } - recordCount++; + heapPut(res.getTime(res.curIdx)); + } } + } + recordCount++; } -} \ No newline at end of file + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossOnePassQueryIteratorDataSet.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossOnePassQueryIteratorDataSet.java index 1082195a..428bd94b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossOnePassQueryIteratorDataSet.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossOnePassQueryIteratorDataSet.java @@ -4,95 +4,95 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.OldRowRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.LinkedHashMap; /** - * This class is the subclass of {@code OnePassQueryDataSet}. It is used to store - * and fetch more records for batch query in TsFile's SingleFileQuery. + * This class is the subclass of {@code OnePassQueryDataSet}. It is used to store and fetch more + * records for batch query in TsFile's SingleFileQuery. * * @author Jinrui Zhang */ public abstract class CrossOnePassQueryIteratorDataSet extends OnePassQueryDataSet { - private static final Logger LOG = LoggerFactory.getLogger(CrossOnePassQueryIteratorDataSet.class); - //special for save time values when processing cross getIndex - private boolean hasReadAll; + private static final Logger LOG = LoggerFactory.getLogger(CrossOnePassQueryIteratorDataSet.class); + // special for save time values when processing cross getIndex + private boolean hasReadAll; - public CrossOnePassQueryIteratorDataSet(CrossQueryTimeGenerator timeGenerator) throws IOException { - this.crossQueryTimeGenerator = timeGenerator; - mapRet = new LinkedHashMap<>(); - hasReadAll = getMoreRecords(); - size = mapRet.size(); - } + public CrossOnePassQueryIteratorDataSet(CrossQueryTimeGenerator timeGenerator) + throws IOException { + this.crossQueryTimeGenerator = timeGenerator; + mapRet = new LinkedHashMap<>(); + hasReadAll = getMoreRecords(); + size = mapRet.size(); + } - /** - * @return True represents that there is no more data to be read. - * @throws IOException exception in IO - */ - public abstract boolean getMoreRecords() throws IOException; + /** + * @return True represents that there is no more data to be read. + * @throws IOException exception in IO + */ + public abstract boolean getMoreRecords() throws IOException; - public boolean hasNextRecord() { - if (!ifInit) { - // hasReadAll is true represents that there is no records in this OnePassQueryDataSet - if (hasReadAll) { - return false; - } - initForRecord(); - ifInit = true; + public boolean hasNextRecord() { + if (!ifInit) { + // hasReadAll is true represents that there is no records in this OnePassQueryDataSet + if (hasReadAll) { + return false; + } + initForRecord(); + ifInit = true; + } + if (heap.peek() != null) { + return true; + } + if (!hasReadAll) { + try { + hasReadAll = getMoreRecords(); + if (hasReadAll) { + return false; } + initForRecord(); if (heap.peek() != null) { - return true; - } - if (!hasReadAll) { - try { - hasReadAll = getMoreRecords(); - if (hasReadAll) { - return false; - } - initForRecord(); - if (heap.peek() != null) { - return true; - } - } catch (IOException e) { - LOG.error("Error in get Next Record:", e); - } + return true; } - return false; + } catch (IOException e) { + LOG.error("Error in get Next Record:", e); + } } + return false; + } - public OldRowRecord getNextRecord() { - if (!hasNextRecord()) { - return null; - } + public OldRowRecord getNextRecord() { + if (!hasNextRecord()) { + return null; + } - Long minTime = heapGet(); - OldRowRecord r = new OldRowRecord(minTime, null, null); - for (int i = 0; i < size; i++) { - if (i == 0) { - r.setDeltaObjectId(deltaObjectIds[i]); - } - Field f; + Long minTime = heapGet(); + OldRowRecord r = new OldRowRecord(minTime, null, null); + for (int i = 0; i < size; i++) { + if (i == 0) { + r.setDeltaObjectId(deltaObjectIds[i]); + } + Field f; - //get more fields in columns i - if (timeIdxs[i] < cols[i].valueLength) { - //Get more fields from file... - } + // get more fields in columns i + if (timeIdxs[i] < cols[i].valueLength) { + // Get more fields from file... + } - if (timeIdxs[i] < cols[i].valueLength && minTime == cols[i].getTime(timeIdxs[i])) { - f = new Field(cols[i].dataType, deltaObjectIds[i], measurementIds[i]); - f.setNull(false); - putValueToField(cols[i], timeIdxs[i], f); - timeIdxs[i]++; - if (timeIdxs[i] < cols[i].valueLength) { - heapPut(cols[i].getTime(timeIdxs[i])); - } - } else { - f = new Field(cols[i].dataType, measurementIds[i]); - f.setNull(true); - } - r.addField(f); + if (timeIdxs[i] < cols[i].valueLength && minTime == cols[i].getTime(timeIdxs[i])) { + f = new Field(cols[i].dataType, deltaObjectIds[i], measurementIds[i]); + f.setNull(false); + putValueToField(cols[i], timeIdxs[i], f); + timeIdxs[i]++; + if (timeIdxs[i] < cols[i].valueLength) { + heapPut(cols[i].getTime(timeIdxs[i])); } - return r; + } else { + f = new Field(cols[i].dataType, measurementIds[i]); + f.setNull(true); + } + r.addField(f); } -} \ No newline at end of file + return r; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossQueryTimeGenerator.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossQueryTimeGenerator.java index 6ee2f3eb..48e3a1f9 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossQueryTimeGenerator.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/CrossQueryTimeGenerator.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSAnd; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSOr; import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; - import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -18,192 +17,196 @@ */ public abstract class CrossQueryTimeGenerator { - public ArrayList retMap; // represent the single valueFilter and its' data - public ArrayList hasReadAllList; // represent whether the data has been read all - protected ArrayList lastValueList; // represent the value stored in CSOr relation - protected ArrayList idxCount; // represent the dfsCnt and the sum node number of its' subtree - protected int dfsCnt; // to record which single valueFilter is used - - protected SingleSeriesFilterExpression timeFilter; - protected SingleSeriesFilterExpression freqFilter; - protected FilterExpression valueFilter; - protected int fetchSize; - - public CrossQueryTimeGenerator(SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - FilterExpression valueFilter, int fetchSize) { - retMap = new ArrayList<>(); - hasReadAllList = new ArrayList<>(); - lastValueList = new ArrayList<>(); - idxCount = new ArrayList<>(); - this.valueFilter = valueFilter; - this.timeFilter = timeFilter; - this.fetchSize = fetchSize; - dfsCnt = -1; - initRetMapAndFilterMap(valueFilter); + public ArrayList retMap; // represent the single valueFilter and its' data + public ArrayList hasReadAllList; // represent whether the data has been read all + protected ArrayList lastValueList; // represent the value stored in CSOr relation + protected ArrayList idxCount; // represent the dfsCnt and the sum node number of its' + // subtree + protected int dfsCnt; // to record which single valueFilter is used + + protected SingleSeriesFilterExpression timeFilter; + protected SingleSeriesFilterExpression freqFilter; + protected FilterExpression valueFilter; + protected int fetchSize; + + public CrossQueryTimeGenerator(SingleSeriesFilterExpression timeFilter, + SingleSeriesFilterExpression freqFilter, FilterExpression valueFilter, int fetchSize) { + retMap = new ArrayList<>(); + hasReadAllList = new ArrayList<>(); + lastValueList = new ArrayList<>(); + idxCount = new ArrayList<>(); + this.valueFilter = valueFilter; + this.timeFilter = timeFilter; + this.fetchSize = fetchSize; + dfsCnt = -1; + initRetMapAndFilterMap(valueFilter); + } + + private int initRetMapAndFilterMap(FilterExpression valueFilter) { + dfsCnt++; + int tmpIdx = dfsCnt; + retMap.add(null); + hasReadAllList.add(false); + lastValueList.add(-1L); + idxCount.add(-1); + + if (valueFilter instanceof SingleSeriesFilterExpression) { + idxCount.set(tmpIdx, 1); + return 1; + } else if (valueFilter instanceof CSAnd) { + FilterExpression left = ((CSAnd) valueFilter).getLeft(); + FilterExpression right = ((CSAnd) valueFilter).getRight(); + int l = initRetMapAndFilterMap(left); + int r = initRetMapAndFilterMap(right); + idxCount.set(tmpIdx, l + r + 1); + return l + r + 1; + } else { + FilterExpression left = ((CSOr) valueFilter).getLeft(); + FilterExpression right = ((CSOr) valueFilter).getRight(); + int l = initRetMapAndFilterMap(left); + int r = initRetMapAndFilterMap(right); + idxCount.set(tmpIdx, l + r + 1); + return l + r + 1; } - - private int initRetMapAndFilterMap(FilterExpression valueFilter) { - dfsCnt++; - int tmpIdx = dfsCnt; - retMap.add(null); - hasReadAllList.add(false); - lastValueList.add(-1L); - idxCount.add(-1); - - if (valueFilter instanceof SingleSeriesFilterExpression) { - idxCount.set(tmpIdx, 1); - return 1; - } else if (valueFilter instanceof CSAnd) { - FilterExpression left = ((CSAnd) valueFilter).getLeft(); - FilterExpression right = ((CSAnd) valueFilter).getRight(); - int l = initRetMapAndFilterMap(left); - int r = initRetMapAndFilterMap(right); - idxCount.set(tmpIdx, l + r + 1); - return l + r + 1; - } else { - FilterExpression left = ((CSOr) valueFilter).getLeft(); - FilterExpression right = ((CSOr) valueFilter).getRight(); - int l = initRetMapAndFilterMap(left); - int r = initRetMapAndFilterMap(right); - idxCount.set(tmpIdx, l + r + 1); - return l + r + 1; - } + } + + /** + * Calculate common time using FilterExpression. + * + * @return common time + * @throws ProcessorException exception in query processor + * @throws IOException exception in IO + */ + public long[] generateTimes() throws ProcessorException, IOException { + long[] res = new long[fetchSize]; + + int cnt = 0; + SingleValueVisitor timeFilterVisitor = new SingleValueVisitor<>(); + while (cnt < fetchSize) { + // init dfsCnt=-1 before calculateOneTime + dfsCnt = -1; + long v = calculateOneTime(valueFilter); + if (v == -1) { + break; + } + if ((timeFilter == null) + || (timeFilter != null && timeFilterVisitor.satisfyObject(v, timeFilter))) { + res[cnt] = v; + cnt++; + } } - - /** - * Calculate common time using FilterExpression. - * @return common time - * @throws ProcessorException exception in query processor - * @throws IOException exception in IO - */ - public long[] generateTimes() throws ProcessorException, IOException { - long[] res = new long[fetchSize]; - - int cnt = 0; - SingleValueVisitor timeFilterVisitor = new SingleValueVisitor<>(); - while (cnt < fetchSize) { - // init dfsCnt=-1 before calculateOneTime - dfsCnt = -1; - long v = calculateOneTime(valueFilter); - if (v == -1) { - break; - } - if ((timeFilter == null) || (timeFilter != null && timeFilterVisitor.satisfyObject(v, timeFilter))) { - res[cnt] = v; - cnt++; - } - } - if (cnt < fetchSize) { - return Arrays.copyOfRange(res, 0, cnt); - } - return res; + if (cnt < fetchSize) { + return Arrays.copyOfRange(res, 0, cnt); } + return res; + } + + private long calculateOneTime(FilterExpression valueFilter) + throws ProcessorException, IOException { + // first check whether the value is used in CSOr relation + dfsCnt++; + if (lastValueList.get(dfsCnt) != -1L) { + long v = lastValueList.get(dfsCnt); + lastValueList.set(dfsCnt, -1L); + // this current valueFilter is a branch of CSOr relation, and has been calculated before + // return the value directly and no need to calculate again + dfsCnt += (idxCount.get(dfsCnt) - 1); + return v; + } + if (valueFilter instanceof SingleSeriesFilterExpression) { + DynamicOneColumnData res = retMap.get(dfsCnt); - private long calculateOneTime(FilterExpression valueFilter) throws ProcessorException, IOException { - // first check whether the value is used in CSOr relation - dfsCnt++; - if (lastValueList.get(dfsCnt) != -1L) { - long v = lastValueList.get(dfsCnt); - lastValueList.set(dfsCnt, -1L); - // this current valueFilter is a branch of CSOr relation, and has been calculated before - // return the value directly and no need to calculate again - dfsCnt += (idxCount.get(dfsCnt) - 1); - return v; - } - if (valueFilter instanceof SingleSeriesFilterExpression) { - DynamicOneColumnData res = retMap.get(dfsCnt); - - // res is null or res has no data. - if ((res == null) || (res.curIdx == res.valueLength && !hasReadAllList.get(dfsCnt))) { - res = getMoreRecordForOneCol(dfsCnt, (SingleSeriesFilterExpression) valueFilter); - } - - if (res == null || res.curIdx == res.valueLength) { - //represent this col has no more value - return -1; - } - return res.getTime(res.curIdx++); - } else if (valueFilter instanceof CSAnd) { - FilterExpression left = ((CSAnd) valueFilter).getLeft(); - FilterExpression right = ((CSAnd) valueFilter).getRight(); - int leftPreIndex = dfsCnt; - long l = calculateOneTime(left); - int rightPreIndex = dfsCnt; - long r = calculateOneTime(right); - while (l != -1 && r != -1) { - while (l < r && l != -1) { - dfsCnt = leftPreIndex; - l = calculateOneTime(left); - } - if (l == r) { - break; - } - dfsCnt = rightPreIndex; - r = calculateOneTime(right); - } - if (l == -1 || r == -1) { - return -1; - } - return l; - } else if (valueFilter instanceof CSOr) { - FilterExpression left = ((CSOr) valueFilter).getLeft(); - FilterExpression right = ((CSOr) valueFilter).getRight(); - int lidx = dfsCnt + 1; - long l = calculateOneTime(left); - // dfsCnt has changed when above calculateOneTime(left) is over - int ridx = dfsCnt + 1; - long r = calculateOneTime(right); + // res is null or res has no data. + if ((res == null) || (res.curIdx == res.valueLength && !hasReadAllList.get(dfsCnt))) { + res = getMoreRecordForOneCol(dfsCnt, (SingleSeriesFilterExpression) valueFilter); + } - if (l == -1 && r != -1) { - return r; - } else if (l != -1 && r == -1) { - return l; - } else if (l == -1 && r == -1) { - return -1; - } else { - if (l < r) { - lastValueList.set(ridx, r); - return l; - } else if (l > r) { - lastValueList.set(lidx, l); - return r; - } else { - return l; - } - } - } + if (res == null || res.curIdx == res.valueLength) { + // represent this col has no more value return -1; - } - - public DynamicOneColumnData getMoreRecordForOneCol(int idx, SingleSeriesFilterExpression valueFilter) - throws ProcessorException, IOException { - DynamicOneColumnData res = retMap.get(idx); - if (res != null) { - // rowGroupIdx will not change - res.clearData(); + } + return res.getTime(res.curIdx++); + } else if (valueFilter instanceof CSAnd) { + FilterExpression left = ((CSAnd) valueFilter).getLeft(); + FilterExpression right = ((CSAnd) valueFilter).getRight(); + int leftPreIndex = dfsCnt; + long l = calculateOneTime(left); + int rightPreIndex = dfsCnt; + long r = calculateOneTime(right); + while (l != -1 && r != -1) { + while (l < r && l != -1) { + dfsCnt = leftPreIndex; + l = calculateOneTime(left); + } + if (l == r) { + break; } - res = getDataInNextBatch(res, fetchSize, valueFilter, idx); - retMap.set(idx, res); - if (res == null || res.valueLength == 0) { - hasReadAllList.set(idx, true); + dfsCnt = rightPreIndex; + r = calculateOneTime(right); + } + if (l == -1 || r == -1) { + return -1; + } + return l; + } else if (valueFilter instanceof CSOr) { + FilterExpression left = ((CSOr) valueFilter).getLeft(); + FilterExpression right = ((CSOr) valueFilter).getRight(); + int lidx = dfsCnt + 1; + long l = calculateOneTime(left); + // dfsCnt has changed when above calculateOneTime(left) is over + int ridx = dfsCnt + 1; + long r = calculateOneTime(right); + + if (l == -1 && r != -1) { + return r; + } else if (l != -1 && r == -1) { + return l; + } else if (l == -1 && r == -1) { + return -1; + } else { + if (l < r) { + lastValueList.set(ridx, r); + return l; + } else if (l > r) { + lastValueList.set(lidx, l); + return r; + } else { + return l; } - return res; + } } - - /** - * valueFilterNumber parameter is mainly used for IoTDB. - * Because of the exist of RecordReaderCache, - * we must know the occur position of the SingleSeriesFilter in CrossSeriesFilterExpression. - * - * @param res result set - * @param fetchSize fetch size of this query - * @param valueFilter filter of value - * @param valueFilterNumber the position number of SingleValueFilter in CrossValueFilter - * @return the query data of next read - * @throws ProcessorException exception in query process - * @throws IOException exception in IO - */ - public abstract DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, - SingleSeriesFilterExpression valueFilter, int valueFilterNumber) - throws ProcessorException, IOException; -} \ No newline at end of file + return -1; + } + + public DynamicOneColumnData getMoreRecordForOneCol(int idx, + SingleSeriesFilterExpression valueFilter) throws ProcessorException, IOException { + DynamicOneColumnData res = retMap.get(idx); + if (res != null) { + // rowGroupIdx will not change + res.clearData(); + } + res = getDataInNextBatch(res, fetchSize, valueFilter, idx); + retMap.set(idx, res); + if (res == null || res.valueLength == 0) { + hasReadAllList.set(idx, true); + } + return res; + } + + /** + * valueFilterNumber parameter is mainly used for IoTDB. Because of the exist of + * RecordReaderCache, we must know the occur position of the SingleSeriesFilter in + * CrossSeriesFilterExpression. + * + * @param res result set + * @param fetchSize fetch size of this query + * @param valueFilter filter of value + * @param valueFilterNumber the position number of SingleValueFilter in CrossValueFilter + * @return the query data of next read + * @throws ProcessorException exception in query process + * @throws IOException exception in IO + */ + public abstract DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, int valueFilterNumber) + throws ProcessorException, IOException; +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/DynamicOneColumnData.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/DynamicOneColumnData.java index 0a1b711f..1e2b87e9 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/DynamicOneColumnData.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/DynamicOneColumnData.java @@ -5,729 +5,735 @@ import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; - import java.util.ArrayList; /** - * DynamicOneColumnData is a self-defined data structure which is optimized for different type - * of values. This class can be viewed as a collection which is more efficient than ArrayList. + * DynamicOneColumnData is a self-defined data structure which is optimized for different type of + * values. This class can be viewed as a collection which is more efficient than ArrayList. * * @author Jinrui Zhang */ public class DynamicOneColumnData { - private int TIME_CAPACITY = 1; - private int VALUE_CAPACITY = 1; - private int EMPTY_TIME_CAPACITY = 1; - private int CAPACITY_THRESHOLD = 1024; - - public int rowGroupIndex = 0; - public long pageOffset = -1; - public long leftSize = -1; - public boolean hasReadAll = false; - public TSDataType dataType; - public int curIdx; - public int insertTrueIndex = 0; - - public int timeArrayIdx; // the number of ArrayList in timeRet - private int curTimeIdx; // the index of current ArrayList in timeRet - public int timeLength; // the insert timestamp number of timeRet - private int valueArrayIdx;// the number of ArrayList in valueRet - private int curValueIdx; // the index of current ArrayList in valueRet - public int valueLength; // the insert value number of valueRet - - public boolean hasEmptyTime; - public int emptyTimeArrayIdx; - public int curEmptyTimeIdx; - public int emptyTimeLength; - - public ArrayList timeRet; - public ArrayList emptyTimeRet; - public ArrayList booleanRet; - public ArrayList intRet; - public ArrayList longRet; - public ArrayList floatRet; - public ArrayList doubleRet; - public ArrayList binaryRet; - - // some variables that record overflow information - public DynamicOneColumnData insertTrue; - public DynamicOneColumnData updateTrue; - public DynamicOneColumnData updateFalse; - public SingleSeriesFilterExpression timeFilter; - - public DynamicOneColumnData() { - dataType = null; - } - - public DynamicOneColumnData(TSDataType type) { - dataType = type; - } - - /** - * @param type Data type to record for this DynamicOneColumnData - * @param recordTime whether to record time value for this DynamicOneColumnData - */ - public DynamicOneColumnData(TSDataType type, boolean recordTime) { - init(type, recordTime, false); - } - - public DynamicOneColumnData(TSDataType type, boolean recordTime, boolean hasEmptyTime) { - this.hasEmptyTime = hasEmptyTime; - init(type, recordTime, hasEmptyTime); - } - - public void init(TSDataType type, boolean recordTime, boolean hasEmptyTime) { - this.dataType = type; - this.valueArrayIdx = 0; - this.curValueIdx = 0; - this.valueLength = 0; - this.curIdx = 0; - CAPACITY_THRESHOLD = TSFileConfig.dynamicDataSize; - - if (recordTime) { - timeRet = new ArrayList<>(); - timeRet.add(new long[TIME_CAPACITY]); - timeArrayIdx = 0; - curTimeIdx = 0; - timeLength = 0; - } - - if (hasEmptyTime) { - emptyTimeRet = new ArrayList<>(); - emptyTimeRet.add(new long[EMPTY_TIME_CAPACITY]); - emptyTimeArrayIdx = 0; - curEmptyTimeIdx = 0; - emptyTimeLength = 0; - } - - switch (dataType) { - case BOOLEAN: - booleanRet = new ArrayList<>(); - booleanRet.add(new boolean[VALUE_CAPACITY]); - break; - case INT32: - intRet = new ArrayList<>(); - intRet.add(new int[VALUE_CAPACITY]); - break; - case INT64: - longRet = new ArrayList<>(); - longRet.add(new long[VALUE_CAPACITY]); - break; - case FLOAT: - floatRet = new ArrayList<>(); - floatRet.add(new float[VALUE_CAPACITY]); - break; - case DOUBLE: - doubleRet = new ArrayList<>(); - doubleRet.add(new double[VALUE_CAPACITY]); - break; - case TEXT: - binaryRet = new ArrayList<>(); - binaryRet.add(new Binary[VALUE_CAPACITY]); - break; - case ENUMS: - intRet = new ArrayList<>(); - intRet.add(new int[VALUE_CAPACITY]); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - public void putTime(long v) { - if (curTimeIdx == TIME_CAPACITY) { - if (TIME_CAPACITY >= CAPACITY_THRESHOLD) { - this.timeRet.add(new long[TIME_CAPACITY]); - timeArrayIdx++; - curTimeIdx = 0; - } else { - long[] newData = new long[TIME_CAPACITY * 2]; - System.arraycopy(timeRet.get(0), 0, newData, 0, TIME_CAPACITY); - this.timeRet.set(0, newData); - TIME_CAPACITY = TIME_CAPACITY * 2; - } - } - (timeRet.get(timeArrayIdx))[curTimeIdx++] = v; - timeLength++; - } - - public void putEmptyTime(long v) { - if (curEmptyTimeIdx == EMPTY_TIME_CAPACITY) { - if (EMPTY_TIME_CAPACITY >= CAPACITY_THRESHOLD) { - this.emptyTimeRet.add(new long[EMPTY_TIME_CAPACITY]); - emptyTimeArrayIdx++; - curEmptyTimeIdx = 0; - } else { - long[] newData = new long[EMPTY_TIME_CAPACITY * 2]; - System.arraycopy(emptyTimeRet.get(0), 0, newData, 0, EMPTY_TIME_CAPACITY); - this.emptyTimeRet.set(0, newData); - EMPTY_TIME_CAPACITY = EMPTY_TIME_CAPACITY * 2; - } + private int TIME_CAPACITY = 1; + private int VALUE_CAPACITY = 1; + private int EMPTY_TIME_CAPACITY = 1; + private int CAPACITY_THRESHOLD = 1024; + + public int rowGroupIndex = 0; + public long pageOffset = -1; + public long leftSize = -1; + public boolean hasReadAll = false; + public TSDataType dataType; + public int curIdx; + public int insertTrueIndex = 0; + + public int timeArrayIdx; // the number of ArrayList in timeRet + private int curTimeIdx; // the index of current ArrayList in timeRet + public int timeLength; // the insert timestamp number of timeRet + private int valueArrayIdx;// the number of ArrayList in valueRet + private int curValueIdx; // the index of current ArrayList in valueRet + public int valueLength; // the insert value number of valueRet + + public boolean hasEmptyTime; + public int emptyTimeArrayIdx; + public int curEmptyTimeIdx; + public int emptyTimeLength; + + public ArrayList timeRet; + public ArrayList emptyTimeRet; + public ArrayList booleanRet; + public ArrayList intRet; + public ArrayList longRet; + public ArrayList floatRet; + public ArrayList doubleRet; + public ArrayList binaryRet; + + // some variables that record overflow information + public DynamicOneColumnData insertTrue; + public DynamicOneColumnData updateTrue; + public DynamicOneColumnData updateFalse; + public SingleSeriesFilterExpression timeFilter; + + public DynamicOneColumnData() { + dataType = null; + } + + public DynamicOneColumnData(TSDataType type) { + dataType = type; + } + + /** + * @param type Data type to record for this DynamicOneColumnData + * @param recordTime whether to record time value for this DynamicOneColumnData + */ + public DynamicOneColumnData(TSDataType type, boolean recordTime) { + init(type, recordTime, false); + } + + public DynamicOneColumnData(TSDataType type, boolean recordTime, boolean hasEmptyTime) { + this.hasEmptyTime = hasEmptyTime; + init(type, recordTime, hasEmptyTime); + } + + public void init(TSDataType type, boolean recordTime, boolean hasEmptyTime) { + this.dataType = type; + this.valueArrayIdx = 0; + this.curValueIdx = 0; + this.valueLength = 0; + this.curIdx = 0; + CAPACITY_THRESHOLD = TSFileConfig.dynamicDataSize; + + if (recordTime) { + timeRet = new ArrayList<>(); + timeRet.add(new long[TIME_CAPACITY]); + timeArrayIdx = 0; + curTimeIdx = 0; + timeLength = 0; + } + + if (hasEmptyTime) { + emptyTimeRet = new ArrayList<>(); + emptyTimeRet.add(new long[EMPTY_TIME_CAPACITY]); + emptyTimeArrayIdx = 0; + curEmptyTimeIdx = 0; + emptyTimeLength = 0; + } + + switch (dataType) { + case BOOLEAN: + booleanRet = new ArrayList<>(); + booleanRet.add(new boolean[VALUE_CAPACITY]); + break; + case INT32: + intRet = new ArrayList<>(); + intRet.add(new int[VALUE_CAPACITY]); + break; + case INT64: + longRet = new ArrayList<>(); + longRet.add(new long[VALUE_CAPACITY]); + break; + case FLOAT: + floatRet = new ArrayList<>(); + floatRet.add(new float[VALUE_CAPACITY]); + break; + case DOUBLE: + doubleRet = new ArrayList<>(); + doubleRet.add(new double[VALUE_CAPACITY]); + break; + case TEXT: + binaryRet = new ArrayList<>(); + binaryRet.add(new Binary[VALUE_CAPACITY]); + break; + case ENUMS: + intRet = new ArrayList<>(); + intRet.add(new int[VALUE_CAPACITY]); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public void putTime(long v) { + if (curTimeIdx == TIME_CAPACITY) { + if (TIME_CAPACITY >= CAPACITY_THRESHOLD) { + this.timeRet.add(new long[TIME_CAPACITY]); + timeArrayIdx++; + curTimeIdx = 0; + } else { + long[] newData = new long[TIME_CAPACITY * 2]; + System.arraycopy(timeRet.get(0), 0, newData, 0, TIME_CAPACITY); + this.timeRet.set(0, newData); + TIME_CAPACITY = TIME_CAPACITY * 2; + } + } + (timeRet.get(timeArrayIdx))[curTimeIdx++] = v; + timeLength++; + } + + public void putEmptyTime(long v) { + if (curEmptyTimeIdx == EMPTY_TIME_CAPACITY) { + if (EMPTY_TIME_CAPACITY >= CAPACITY_THRESHOLD) { + this.emptyTimeRet.add(new long[EMPTY_TIME_CAPACITY]); + emptyTimeArrayIdx++; + curEmptyTimeIdx = 0; + } else { + long[] newData = new long[EMPTY_TIME_CAPACITY * 2]; + System.arraycopy(emptyTimeRet.get(0), 0, newData, 0, EMPTY_TIME_CAPACITY); + this.emptyTimeRet.set(0, newData); + EMPTY_TIME_CAPACITY = EMPTY_TIME_CAPACITY * 2; + } + } + (emptyTimeRet.get(emptyTimeArrayIdx))[curEmptyTimeIdx++] = v; + emptyTimeLength++; + } + + /** + * add all time and value from another DynamicOneColumnData to self. + * + * @param col DynamicOneColumnData to be merged + */ + public void mergeRecord(DynamicOneColumnData col) { + for (int i = 0; i < col.timeLength; i++) { + putTime(col.getTime(i)); + } + switch (dataType) { + case BOOLEAN: + for (int i = 0; i < col.valueLength; i++) { + putBoolean(col.getBoolean(i)); } - (emptyTimeRet.get(emptyTimeArrayIdx))[curEmptyTimeIdx++] = v; - emptyTimeLength++; - } - - /** - * add all time and value from another DynamicOneColumnData to self. - * - * @param col DynamicOneColumnData to be merged - */ - public void mergeRecord(DynamicOneColumnData col) { - for (int i = 0; i < col.timeLength; i++) { - putTime(col.getTime(i)); + break; + case INT32: + for (int i = 0; i < col.valueLength; i++) { + putInt(col.getInt(i)); } - switch (dataType) { - case BOOLEAN: - for (int i = 0; i < col.valueLength; i++) { - putBoolean(col.getBoolean(i)); - } - break; - case INT32: - for (int i = 0; i < col.valueLength; i++) { - putInt(col.getInt(i)); - } - break; - case INT64: - for (int i = 0; i < col.valueLength; i++) { - putLong(col.getLong(i)); - } - break; - case FLOAT: - for (int i = 0; i < col.valueLength; i++) { - putFloat(col.getFloat(i)); - } - break; - case DOUBLE: - for (int i = 0; i < col.valueLength; i++) { - putDouble(col.getDouble(i)); - } - break; - case TEXT: - for (int i = 0; i < col.valueLength; i++) { - putBinary(col.getBinary(i)); - } - break; - case ENUMS: - for (int i = 0; i < col.valueLength; i++) { - putBinary(col.getBinary(i)); - } - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); + break; + case INT64: + for (int i = 0; i < col.valueLength; i++) { + putLong(col.getLong(i)); } - } - - public void putBoolean(boolean v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.booleanRet.size() <= valueArrayIdx + 1) { - this.booleanRet.add(new boolean[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - boolean[] newData = new boolean[VALUE_CAPACITY * 2]; - System.arraycopy(booleanRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.booleanRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } - } - (this.booleanRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; - } - - public void putInt(int v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.intRet.size() <= valueArrayIdx + 1) { - this.intRet.add(new int[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - int[] newData = new int[VALUE_CAPACITY * 2]; - System.arraycopy(intRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.intRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } - } - (this.intRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; - } - - public void putLong(long v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.longRet.size() <= valueArrayIdx + 1) { - this.longRet.add(new long[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - long[] newData = new long[VALUE_CAPACITY * 2]; - System.arraycopy(longRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.longRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } + break; + case FLOAT: + for (int i = 0; i < col.valueLength; i++) { + putFloat(col.getFloat(i)); } - (this.longRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; - } - - public void putFloat(float v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.floatRet.size() <= valueArrayIdx + 1) { - this.floatRet.add(new float[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - float[] newData = new float[VALUE_CAPACITY * 2]; - System.arraycopy(floatRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.floatRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } + break; + case DOUBLE: + for (int i = 0; i < col.valueLength; i++) { + putDouble(col.getDouble(i)); } - (this.floatRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; - } - - public void putDouble(double v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.doubleRet.size() <= valueArrayIdx + 1) { - this.doubleRet.add(new double[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - double[] newData = new double[VALUE_CAPACITY * 2]; - System.arraycopy(doubleRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.doubleRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } + break; + case TEXT: + for (int i = 0; i < col.valueLength; i++) { + putBinary(col.getBinary(i)); } - (this.doubleRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; - } - - public void putBinary(Binary v) { - if (curValueIdx == VALUE_CAPACITY) { - if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { - if (this.binaryRet.size() <= valueArrayIdx + 1) { - this.binaryRet.add(new Binary[VALUE_CAPACITY]); - } - valueArrayIdx++; - curValueIdx = 0; - } else { - Binary[] newData = new Binary[VALUE_CAPACITY * 2]; - System.arraycopy(binaryRet.get(0), 0, newData, 0, VALUE_CAPACITY); - this.binaryRet.set(0, newData); - VALUE_CAPACITY = VALUE_CAPACITY * 2; - } + break; + case ENUMS: + for (int i = 0; i < col.valueLength; i++) { + putBinary(col.getBinary(i)); } - (this.binaryRet.get(valueArrayIdx))[curValueIdx++] = v; - valueLength++; + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); } + } - /** - * Checks if the given index is in range. If not, throws an appropriate - * runtime exception. - */ - private void rangeCheck(int idx) { - if (idx < 0) { - throw new IndexOutOfBoundsException("DynamicOneColumnData value range check, Index is negative: " + idx); + public void putBoolean(boolean v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.booleanRet.size() <= valueArrayIdx + 1) { + this.booleanRet.add(new boolean[VALUE_CAPACITY]); } - if (idx >= valueLength) { - throw new IndexOutOfBoundsException("DynamicOneColumnData value range check, Index : " + idx + ". Length : " + valueLength); + valueArrayIdx++; + curValueIdx = 0; + } else { + boolean[] newData = new boolean[VALUE_CAPACITY * 2]; + System.arraycopy(booleanRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.booleanRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.booleanRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + public void putInt(int v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.intRet.size() <= valueArrayIdx + 1) { + this.intRet.add(new int[VALUE_CAPACITY]); } - } - - /** - * Checks if the given index is in range. If not, throws an appropriate - * runtime exception. - */ - private void rangeCheckForTime(int idx) { - if (idx < 0) { - throw new IndexOutOfBoundsException("DynamicOneColumnData time range check, Index is negative: " + idx); + valueArrayIdx++; + curValueIdx = 0; + } else { + int[] newData = new int[VALUE_CAPACITY * 2]; + System.arraycopy(intRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.intRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.intRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + public void putLong(long v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.longRet.size() <= valueArrayIdx + 1) { + this.longRet.add(new long[VALUE_CAPACITY]); } - if (idx >= timeLength) { - throw new IndexOutOfBoundsException("DynamicOneColumnData time range check, Index : " + idx + ". Length : " + timeLength); + valueArrayIdx++; + curValueIdx = 0; + } else { + long[] newData = new long[VALUE_CAPACITY * 2]; + System.arraycopy(longRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.longRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.longRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + public void putFloat(float v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.floatRet.size() <= valueArrayIdx + 1) { + this.floatRet.add(new float[VALUE_CAPACITY]); } - } - - private void rangeCheckForEmptyTime(int idx) { - if (idx < 0) { - throw new IndexOutOfBoundsException("DynamicOneColumnData empty time range check, Index is negative: " + idx); + valueArrayIdx++; + curValueIdx = 0; + } else { + float[] newData = new float[VALUE_CAPACITY * 2]; + System.arraycopy(floatRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.floatRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.floatRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + public void putDouble(double v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.doubleRet.size() <= valueArrayIdx + 1) { + this.doubleRet.add(new double[VALUE_CAPACITY]); } - if (idx >= emptyTimeLength) { - throw new IndexOutOfBoundsException("DynamicOneColumnData empty time range check, Index : " + idx + ". Length : " + emptyTimeLength); + valueArrayIdx++; + curValueIdx = 0; + } else { + double[] newData = new double[VALUE_CAPACITY * 2]; + System.arraycopy(doubleRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.doubleRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.doubleRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + public void putBinary(Binary v) { + if (curValueIdx == VALUE_CAPACITY) { + if (VALUE_CAPACITY >= CAPACITY_THRESHOLD) { + if (this.binaryRet.size() <= valueArrayIdx + 1) { + this.binaryRet.add(new Binary[VALUE_CAPACITY]); } - } - - public boolean getBoolean(int idx) { - rangeCheck(idx); - return this.booleanRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setBoolean(int idx, boolean v) { - rangeCheck(idx); - this.booleanRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public int getInt(int idx) { - rangeCheck(idx); - return this.intRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setInt(int idx, int v) { - rangeCheck(idx); - this.intRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public long getLong(int idx) { - rangeCheck(idx); - return this.longRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setLong(int idx, long v) { - rangeCheck(idx); - this.longRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public float getFloat(int idx) { - rangeCheck(idx); - return this.floatRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setFloat(int idx, float v) { - rangeCheck(idx); - this.floatRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public double getDouble(int idx) { - rangeCheck(idx); - return this.doubleRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setDouble(int idx, double v) { - rangeCheck(idx); - this.doubleRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public Binary getBinary(int idx) { - rangeCheck(idx); - return this.binaryRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setBinary(int idx, Binary v) { - this.binaryRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public long getTime(int idx) { - rangeCheckForTime(idx); - return this.timeRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; - } - - public void setTime(int idx, long v) { - rangeCheckForTime(idx); - this.timeRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; - } - - public long getEmptyTime(int idx) { - rangeCheckForEmptyTime(idx); - return this.emptyTimeRet.get(idx / EMPTY_TIME_CAPACITY)[idx % EMPTY_TIME_CAPACITY]; - } - - public long[] getTimeAsArray() { - long[] res = new long[timeLength]; - for (int i = 0; i < timeLength; i++) { - res[i] = timeRet.get(i / TIME_CAPACITY)[i % TIME_CAPACITY]; - } - return res; - } - - public void putAnObject(Object v) { - switch (dataType) { - case BOOLEAN: - putBoolean((boolean) v); - break; - case INT32: - putInt((int) v); - break; - case INT64: - putLong((long) v); - break; - case FLOAT: - putFloat((float) v); - break; - case DOUBLE: - putDouble((double) v); - break; - case TEXT: - putBinary((Binary) v); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - public Comparable getAnObject(int idx) { + valueArrayIdx++; + curValueIdx = 0; + } else { + Binary[] newData = new Binary[VALUE_CAPACITY * 2]; + System.arraycopy(binaryRet.get(0), 0, newData, 0, VALUE_CAPACITY); + this.binaryRet.set(0, newData); + VALUE_CAPACITY = VALUE_CAPACITY * 2; + } + } + (this.binaryRet.get(valueArrayIdx))[curValueIdx++] = v; + valueLength++; + } + + /** + * Checks if the given index is in range. If not, throws an appropriate runtime exception. + */ + private void rangeCheck(int idx) { + if (idx < 0) { + throw new IndexOutOfBoundsException( + "DynamicOneColumnData value range check, Index is negative: " + idx); + } + if (idx >= valueLength) { + throw new IndexOutOfBoundsException( + "DynamicOneColumnData value range check, Index : " + idx + ". Length : " + valueLength); + } + } + + /** + * Checks if the given index is in range. If not, throws an appropriate runtime exception. + */ + private void rangeCheckForTime(int idx) { + if (idx < 0) { + throw new IndexOutOfBoundsException( + "DynamicOneColumnData time range check, Index is negative: " + idx); + } + if (idx >= timeLength) { + throw new IndexOutOfBoundsException( + "DynamicOneColumnData time range check, Index : " + idx + ". Length : " + timeLength); + } + } + + private void rangeCheckForEmptyTime(int idx) { + if (idx < 0) { + throw new IndexOutOfBoundsException( + "DynamicOneColumnData empty time range check, Index is negative: " + idx); + } + if (idx >= emptyTimeLength) { + throw new IndexOutOfBoundsException("DynamicOneColumnData empty time range check, Index : " + + idx + ". Length : " + emptyTimeLength); + } + } + + public boolean getBoolean(int idx) { + rangeCheck(idx); + return this.booleanRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setBoolean(int idx, boolean v) { + rangeCheck(idx); + this.booleanRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public int getInt(int idx) { + rangeCheck(idx); + return this.intRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setInt(int idx, int v) { + rangeCheck(idx); + this.intRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public long getLong(int idx) { + rangeCheck(idx); + return this.longRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setLong(int idx, long v) { + rangeCheck(idx); + this.longRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public float getFloat(int idx) { + rangeCheck(idx); + return this.floatRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setFloat(int idx, float v) { + rangeCheck(idx); + this.floatRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public double getDouble(int idx) { + rangeCheck(idx); + return this.doubleRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setDouble(int idx, double v) { + rangeCheck(idx); + this.doubleRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public Binary getBinary(int idx) { + rangeCheck(idx); + return this.binaryRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setBinary(int idx, Binary v) { + this.binaryRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public long getTime(int idx) { + rangeCheckForTime(idx); + return this.timeRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY]; + } + + public void setTime(int idx, long v) { + rangeCheckForTime(idx); + this.timeRet.get(idx / TIME_CAPACITY)[idx % TIME_CAPACITY] = v; + } + + public long getEmptyTime(int idx) { + rangeCheckForEmptyTime(idx); + return this.emptyTimeRet.get(idx / EMPTY_TIME_CAPACITY)[idx % EMPTY_TIME_CAPACITY]; + } + + public long[] getTimeAsArray() { + long[] res = new long[timeLength]; + for (int i = 0; i < timeLength; i++) { + res[i] = timeRet.get(i / TIME_CAPACITY)[i % TIME_CAPACITY]; + } + return res; + } + + public void putAnObject(Object v) { + switch (dataType) { + case BOOLEAN: + putBoolean((boolean) v); + break; + case INT32: + putInt((int) v); + break; + case INT64: + putLong((long) v); + break; + case FLOAT: + putFloat((float) v); + break; + case DOUBLE: + putDouble((double) v); + break; + case TEXT: + putBinary((Binary) v); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public Comparable getAnObject(int idx) { + switch (dataType) { + case BOOLEAN: + return getBoolean(idx); + case DOUBLE: + return getDouble(idx); + case TEXT: + return getBinary(idx); + case FLOAT: + return getFloat(idx); + case INT32: + return getInt(idx); + case INT64: + return getLong(idx); + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public void setAnObject(int idx, Comparable v) { + switch (dataType) { + case BOOLEAN: + setBoolean(idx, (Boolean) v); + break; + case DOUBLE: + setDouble(idx, (Double) v); + break; + case TEXT: + setBinary(idx, (Binary) v); + break; + case FLOAT: + setFloat(idx, (Float) v); + break; + case INT32: + setInt(idx, (Integer) v); + break; + case INT64: + setLong(idx, (Long) v); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public String getStringValue(int idx) { + switch (dataType) { + case BOOLEAN: + return String.valueOf(getBoolean(idx)); + case INT32: + return String.valueOf(getInt(idx)); + case INT64: + return String.valueOf(getLong(idx)); + case FLOAT: + return String.valueOf(getFloat(idx)); + case DOUBLE: + return String.valueOf(getDouble(idx)); + case TEXT: + return String.valueOf(getBinary(idx)); + case ENUMS: + return String.valueOf(getBinary(idx)); + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public String getStringTimeValuePair(int idx) { + String v; + switch (dataType) { + case BOOLEAN: + v = String.valueOf(getBoolean(idx)); + break; + case INT32: + v = String.valueOf(getInt(idx)); + break; + case INT64: + v = String.valueOf(getLong(idx)); + break; + case FLOAT: + v = String.valueOf(getFloat(idx)); + break; + case DOUBLE: + v = String.valueOf(getDouble(idx)); + break; + case TEXT: + v = String.valueOf(getBinary(idx)); + break; + case ENUMS: + v = String.valueOf(getBinary(idx)); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + String t = String.valueOf(getTime(idx)); + StringBuffer sb = new StringBuffer(); + sb.append(t); + sb.append("\t"); + sb.append(v); + return sb.toString(); + } + + public void putAValueFromDynamicOneColumnData(DynamicOneColumnData B, int idx) { + switch (dataType) { + case BOOLEAN: + putBoolean(B.getBoolean(idx)); + break; + case INT32: + putInt(B.getInt(idx)); + break; + case INT64: + putLong(B.getLong(idx)); + break; + case FLOAT: + putFloat(B.getFloat(idx)); + break; + case DOUBLE: + putDouble(B.getDouble(idx)); + break; + case TEXT: + putBinary(B.getBinary(idx)); + break; + case ENUMS: + putBinary(B.getBinary(idx)); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + /** + * Remove the data whose index position is between size and valueLength. + * + * @param size the data whose position is greater than size will be removed + */ + public void rollBack(int size) { + // rollback the length + valueLength -= size; + timeLength -= size; + if (size <= curValueIdx) { + curValueIdx -= size; + curTimeIdx -= size; + } else { + size -= curValueIdx; + size += TIME_CAPACITY; + while (size > TIME_CAPACITY) { switch (dataType) { - case BOOLEAN: - return getBoolean(idx); - case DOUBLE: - return getDouble(idx); - case TEXT: - return getBinary(idx); - case FLOAT: - return getFloat(idx); - case INT32: - return getInt(idx); - case INT64: - return getLong(idx); - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); + case BOOLEAN: + booleanRet.remove(valueArrayIdx); + break; + case INT32: + intRet.remove(valueArrayIdx); + break; + case INT64: + longRet.remove(valueArrayIdx); + break; + case FLOAT: + floatRet.remove(valueArrayIdx); + break; + case DOUBLE: + doubleRet.remove(valueArrayIdx); + break; + case TEXT: + binaryRet.remove(valueArrayIdx); + break; + case ENUMS: + binaryRet.remove(valueArrayIdx); + break; + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); } - } - - public void setAnObject(int idx, Comparable v) { - switch (dataType) { - case BOOLEAN: - setBoolean(idx, (Boolean) v); - break; - case DOUBLE: - setDouble(idx, (Double) v); - break; - case TEXT: - setBinary(idx, (Binary) v); - break; - case FLOAT: - setFloat(idx, (Float) v); - break; - case INT32: - setInt(idx, (Integer) v); - break; - case INT64: - setLong(idx, (Long) v); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - public String getStringValue(int idx) { - switch (dataType) { - case BOOLEAN: - return String.valueOf(getBoolean(idx)); - case INT32: - return String.valueOf(getInt(idx)); - case INT64: - return String.valueOf(getLong(idx)); - case FLOAT: - return String.valueOf(getFloat(idx)); - case DOUBLE: - return String.valueOf(getDouble(idx)); - case TEXT: - return String.valueOf(getBinary(idx)); - case ENUMS: - return String.valueOf(getBinary(idx)); - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - public String getStringTimeValuePair(int idx) { - String v; - switch (dataType) { - case BOOLEAN: - v = String.valueOf(getBoolean(idx)); - break; - case INT32: - v = String.valueOf(getInt(idx)); - break; - case INT64: - v = String.valueOf(getLong(idx)); - break; - case FLOAT: - v = String.valueOf(getFloat(idx)); - break; - case DOUBLE: - v = String.valueOf(getDouble(idx)); - break; - case TEXT: - v = String.valueOf(getBinary(idx)); - break; - case ENUMS: - v = String.valueOf(getBinary(idx)); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - String t = String.valueOf(getTime(idx)); - StringBuffer sb = new StringBuffer(); - sb.append(t); - sb.append("\t"); - sb.append(v); - return sb.toString(); - } - - public void putAValueFromDynamicOneColumnData(DynamicOneColumnData B, int idx) { - switch (dataType) { - case BOOLEAN: - putBoolean(B.getBoolean(idx)); - break; - case INT32: - putInt(B.getInt(idx)); - break; - case INT64: - putLong(B.getLong(idx)); - break; - case FLOAT: - putFloat(B.getFloat(idx)); - break; - case DOUBLE: - putDouble(B.getDouble(idx)); - break; - case TEXT: - putBinary(B.getBinary(idx)); - break; - case ENUMS: - putBinary(B.getBinary(idx)); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - /** - * Remove the data whose index position is between size and valueLength. - * @param size the data whose position is greater than size will be removed - */ - public void rollBack(int size) { - //rollback the length - valueLength -= size; - timeLength -= size; - if (size <= curValueIdx) { - curValueIdx -= size; - curTimeIdx -= size; - } else { - size -= curValueIdx; - size += TIME_CAPACITY; - while (size > TIME_CAPACITY) { - switch (dataType) { - case BOOLEAN: - booleanRet.remove(valueArrayIdx); - break; - case INT32: - intRet.remove(valueArrayIdx); - break; - case INT64: - longRet.remove(valueArrayIdx); - break; - case FLOAT: - floatRet.remove(valueArrayIdx); - break; - case DOUBLE: - doubleRet.remove(valueArrayIdx); - break; - case TEXT: - binaryRet.remove(valueArrayIdx); - break; - case ENUMS: - binaryRet.remove(valueArrayIdx); - break; - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - valueArrayIdx--; - timeRet.remove(timeArrayIdx); - timeArrayIdx--; - - size -= TIME_CAPACITY; - } - curValueIdx = TIME_CAPACITY - size; - } - } - - /** - * Remove the last empty time. - */ - public void removeLastEmptyTime() { - emptyTimeLength -= 1; - curEmptyTimeIdx -= 1; - - // curEmptyTimeIdx will never == -1 - if (curEmptyTimeIdx == 0) { - if (emptyTimeArrayIdx == 0) { - curEmptyTimeIdx = 0; - } else { - curEmptyTimeIdx = EMPTY_TIME_CAPACITY; - emptyTimeRet.remove(emptyTimeArrayIdx); - emptyTimeArrayIdx -= 1; - } - } - } - - public void clearData() { - this.init(dataType, true, hasEmptyTime); - } - - public DynamicOneColumnData sub(int startPos) { - return sub(startPos, this.valueLength - 1); - } - - /** - * Extract the needed data between start position and end position. - * - * @param startPos start position of index - * @param endPos end position of index - * @return the new DynamicOneColumnData whose data is equals to position startPos and position endPos - */ - public DynamicOneColumnData sub(int startPos, int endPos) { - DynamicOneColumnData subRes = new DynamicOneColumnData(dataType, true); - for (int i = startPos; i <= endPos; i++) { - subRes.putTime(getTime(i)); - subRes.putAValueFromDynamicOneColumnData(this, i); - } - return subRes; - } - - public void putOverflowInfo(DynamicOneColumnData insertTrue, DynamicOneColumnData updateTrue, - DynamicOneColumnData updateFalse, SingleSeriesFilterExpression timeFilter) { - this.insertTrue = insertTrue; - this.updateTrue = updateTrue; - this.updateFalse = updateFalse; - this.timeFilter = timeFilter; - } - - public void copyFetchInfoTo(DynamicOneColumnData oneColRet) { - oneColRet.rowGroupIndex = this.rowGroupIndex; - oneColRet.pageOffset = this.pageOffset; - oneColRet.leftSize = this.leftSize; - oneColRet.hasReadAll = this.hasReadAll; - oneColRet.insertTrueIndex = this.insertTrueIndex; - oneColRet.insertTrue = this.insertTrue; - oneColRet.updateFalse = this.updateFalse; - oneColRet.updateTrue = this.updateTrue; - oneColRet.timeFilter = this.timeFilter; - } - - public void plusRowGroupIndexAndInitPageOffset() { - - this.rowGroupIndex++; - //RowGroupIndex's change means that The pageOffset should be updateTo the value in next RowGroup. - //But we don't know the value, so set the pageOffset to -1. And we calculate the accuracy value - //in the reading procedure. - this.pageOffset = -1; - } - - public int getRowGroupIndex() { - return this.rowGroupIndex; - } + valueArrayIdx--; + timeRet.remove(timeArrayIdx); + timeArrayIdx--; + + size -= TIME_CAPACITY; + } + curValueIdx = TIME_CAPACITY - size; + } + } + + /** + * Remove the last empty time. + */ + public void removeLastEmptyTime() { + emptyTimeLength -= 1; + curEmptyTimeIdx -= 1; + + // curEmptyTimeIdx will never == -1 + if (curEmptyTimeIdx == 0) { + if (emptyTimeArrayIdx == 0) { + curEmptyTimeIdx = 0; + } else { + curEmptyTimeIdx = EMPTY_TIME_CAPACITY; + emptyTimeRet.remove(emptyTimeArrayIdx); + emptyTimeArrayIdx -= 1; + } + } + } + + public void clearData() { + this.init(dataType, true, hasEmptyTime); + } + + public DynamicOneColumnData sub(int startPos) { + return sub(startPos, this.valueLength - 1); + } + + /** + * Extract the needed data between start position and end position. + * + * @param startPos start position of index + * @param endPos end position of index + * @return the new DynamicOneColumnData whose data is equals to position startPos and position + * endPos + */ + public DynamicOneColumnData sub(int startPos, int endPos) { + DynamicOneColumnData subRes = new DynamicOneColumnData(dataType, true); + for (int i = startPos; i <= endPos; i++) { + subRes.putTime(getTime(i)); + subRes.putAValueFromDynamicOneColumnData(this, i); + } + return subRes; + } + + public void putOverflowInfo(DynamicOneColumnData insertTrue, DynamicOneColumnData updateTrue, + DynamicOneColumnData updateFalse, SingleSeriesFilterExpression timeFilter) { + this.insertTrue = insertTrue; + this.updateTrue = updateTrue; + this.updateFalse = updateFalse; + this.timeFilter = timeFilter; + } + + public void copyFetchInfoTo(DynamicOneColumnData oneColRet) { + oneColRet.rowGroupIndex = this.rowGroupIndex; + oneColRet.pageOffset = this.pageOffset; + oneColRet.leftSize = this.leftSize; + oneColRet.hasReadAll = this.hasReadAll; + oneColRet.insertTrueIndex = this.insertTrueIndex; + oneColRet.insertTrue = this.insertTrue; + oneColRet.updateFalse = this.updateFalse; + oneColRet.updateTrue = this.updateTrue; + oneColRet.timeFilter = this.timeFilter; + } + + public void plusRowGroupIndexAndInitPageOffset() { + + this.rowGroupIndex++; + // RowGroupIndex's change means that The pageOffset should be updateTo the value in next + // RowGroup. + // But we don't know the value, so set the pageOffset to -1. And we calculate the accuracy value + // in the reading procedure. + this.pageOffset = -1; + } + + public int getRowGroupIndex() { + return this.rowGroupIndex; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/HadoopQueryEngine.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/HadoopQueryEngine.java index 7cedffdd..238a9a20 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/HadoopQueryEngine.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/HadoopQueryEngine.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; @@ -17,107 +16,122 @@ public class HadoopQueryEngine extends QueryEngine { - private static final String SEPARATOR_DEVIDE_SERIES = "."; - private List rowGroupMetaDataList; - - public HadoopQueryEngine(ITsRandomAccessFileReader raf, List rowGroupMetaDataList) throws IOException { - super(raf, rowGroupMetaDataList); - this.rowGroupMetaDataList = rowGroupMetaDataList; - } + private static final String SEPARATOR_DEVIDE_SERIES = "."; + private List rowGroupMetaDataList; - private List initDeviceIdList() { - Set deviceIdSet = new HashSet<>(); - for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { - deviceIdSet.add(rowGroupMetaData.getDeltaObjectID()); - } - return new ArrayList<>(deviceIdSet); - } + public HadoopQueryEngine(ITsRandomAccessFileReader raf, + List rowGroupMetaDataList) throws IOException { + super(raf, rowGroupMetaDataList); + this.rowGroupMetaDataList = rowGroupMetaDataList; + } - private List initSensorIdList(){ - Set sensorIdSet = new HashSet<>(); - for(RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { - for(TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData.getTimeSeriesChunkMetaDataList()){ - sensorIdSet.add(timeSeriesChunkMetaData.getProperties().getMeasurementUID()); - } - } - return new ArrayList<>(sensorIdSet); + private List initDeviceIdList() { + Set deviceIdSet = new HashSet<>(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { + deviceIdSet.add(rowGroupMetaData.getDeltaObjectID()); } - - public OnePassQueryDataSet queryWithSpecificRowGroups(List deviceIdList, List sensorIdList, FilterExpression timeFilter, FilterExpression freqFilter, FilterExpression valueFilter) throws IOException{ - if(deviceIdList == null)deviceIdList = initDeviceIdList(); - if(sensorIdList == null)sensorIdList = initSensorIdList(); - - List paths = new ArrayList<>(); - for(String deviceId : deviceIdList){ - for(String sensorId: sensorIdList){ - paths.add(new Path(deviceId + SEPARATOR_DEVIDE_SERIES + sensorId)); - } - } - - if (timeFilter == null && freqFilter == null && valueFilter == null) { - return queryWithoutFilter(paths); - } else if (valueFilter instanceof SingleSeriesFilterExpression || (timeFilter != null && valueFilter == null)) { - return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (SingleSeriesFilterExpression) valueFilter); - } else if (valueFilter instanceof CrossSeriesFilterExpression) { - return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (CrossSeriesFilterExpression) valueFilter); - } - throw new IOException("Query Not Support Exception"); + return new ArrayList<>(deviceIdSet); + } + + private List initSensorIdList() { + Set sensorIdSet = new HashSet<>(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { + for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData + .getTimeSeriesChunkMetaDataList()) { + sensorIdSet.add(timeSeriesChunkMetaData.getProperties().getMeasurementUID()); + } } - - private OnePassQueryDataSet queryWithoutFilter(List paths) throws IOException { - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValueInOneColumnWithoutException(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString()); - } - }; + return new ArrayList<>(sensorIdSet); + } + + public OnePassQueryDataSet queryWithSpecificRowGroups(List deviceIdList, + List sensorIdList, FilterExpression timeFilter, FilterExpression freqFilter, + FilterExpression valueFilter) throws IOException { + if (deviceIdList == null) + deviceIdList = initDeviceIdList(); + if (sensorIdList == null) + sensorIdList = initSensorIdList(); + + List paths = new ArrayList<>(); + for (String deviceId : deviceIdList) { + for (String sensorId : sensorIdList) { + paths.add(new Path(deviceId + SEPARATOR_DEVIDE_SERIES + sensorId)); + } } - private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, SingleSeriesFilterExpression timeFilter, - SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) throws IOException { - logger.debug("start read one column data with filter"); - - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString() - , timeFilter, freqFilter, valueFilter); - } - }; + if (timeFilter == null && freqFilter == null && valueFilter == null) { + return queryWithoutFilter(paths); + } else if (valueFilter instanceof SingleSeriesFilterExpression + || (timeFilter != null && valueFilter == null)) { + return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (SingleSeriesFilterExpression) valueFilter); + } else if (valueFilter instanceof CrossSeriesFilterExpression) { + return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (CrossSeriesFilterExpression) valueFilter); } - - private OnePassQueryDataSet crossColumnQuery(List paths, SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - CrossSeriesFilterExpression valueFilter) throws IOException { - CrossQueryTimeGenerator timeQueryDataSet = new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { - @Override - public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, - SingleSeriesFilterExpression valueFilter, int valueFilterNumber) throws ProcessorException, IOException { - return recordReader.getValuesUseFilter(res, fetchSize, valueFilter); - } + throw new IOException("Query Not Support Exception"); + } + + private OnePassQueryDataSet queryWithoutFilter(List paths) throws IOException { + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValueInOneColumnWithoutException(res, FETCH_SIZE, + p.getDeltaObjectToString(), p.getMeasurementToString()); + } + }; + } + + private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + SingleSeriesFilterExpression valueFilter) throws IOException { + logger.debug("start read one column data with filter"); + + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), + p.getMeasurementToString(), timeFilter, freqFilter, valueFilter); + } + }; + } + + private OnePassQueryDataSet crossColumnQuery(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + CrossSeriesFilterExpression valueFilter) throws IOException { + CrossQueryTimeGenerator timeQueryDataSet = + new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { + @Override + public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, int valueFilterNumber) + throws ProcessorException, IOException { + return recordReader.getValuesUseFilter(res, fetchSize, valueFilter); + } }; - return new CrossOnePassQueryIteratorDataSet(timeQueryDataSet) { - @Override - public boolean getMoreRecords() throws IOException { - try { - long[] timeRet = crossQueryTimeGenerator.generateTimes(); - if (timeRet.length == 0) { - return true; - } - for (Path p : paths) { - String deltaObjectUID = p.getDeltaObjectToString(); - String measurementUID = p.getMeasurementToString(); - DynamicOneColumnData oneColDataList = recordReader.getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet); - mapRet.put(p.getFullPath(), oneColDataList); - } - - } catch (ProcessorException e) { - throw new IOException(e.getMessage()); - } - return false; - } - }; - } -} \ No newline at end of file + return new CrossOnePassQueryIteratorDataSet(timeQueryDataSet) { + @Override + public boolean getMoreRecords() throws IOException { + try { + long[] timeRet = crossQueryTimeGenerator.generateTimes(); + if (timeRet.length == 0) { + return true; + } + for (Path p : paths) { + String deltaObjectUID = p.getDeltaObjectToString(); + String measurementUID = p.getMeasurementToString(); + DynamicOneColumnData oneColDataList = + recordReader.getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet); + mapRet.put(p.getFullPath(), oneColDataList); + } + + } catch (ProcessorException e) { + throw new IOException(e.getMessage()); + } + return false; + } + }; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/IteratorOnePassQueryDataSet.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/IteratorOnePassQueryDataSet.java index 5d409b01..a7d2663d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/IteratorOnePassQueryDataSet.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/IteratorOnePassQueryDataSet.java @@ -5,112 +5,111 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.Field; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.*; public abstract class IteratorOnePassQueryDataSet extends OnePassQueryDataSet { - private static final Logger logger = LoggerFactory.getLogger(IteratorOnePassQueryDataSet.class); - public LinkedHashMap retMap; - private LinkedHashMap hasMoreRet; + private static final Logger logger = LoggerFactory.getLogger(IteratorOnePassQueryDataSet.class); + public LinkedHashMap retMap; + private LinkedHashMap hasMoreRet; - public IteratorOnePassQueryDataSet(List paths) throws IOException { - hasMoreRet = new LinkedHashMap<>(); - retMap = new LinkedHashMap<>(); - timeMap = new HashMap<>(); - for (Path p : paths) { - DynamicOneColumnData res = getMoreRecordsForOneColumn(p, null); + public IteratorOnePassQueryDataSet(List paths) throws IOException { + hasMoreRet = new LinkedHashMap<>(); + retMap = new LinkedHashMap<>(); + timeMap = new HashMap<>(); + for (Path p : paths) { + DynamicOneColumnData res = getMoreRecordsForOneColumn(p, null); - retMap.put(p, res); - if (res == null || res.valueLength == 0) { - hasMoreRet.put(p, false); - } else { - hasMoreRet.put(p, true); - } - } + retMap.put(p, res); + if (res == null || res.valueLength == 0) { + hasMoreRet.put(p, false); + } else { + hasMoreRet.put(p, true); + } } + } - public abstract DynamicOneColumnData getMoreRecordsForOneColumn(Path colName - , DynamicOneColumnData res) throws IOException; + public abstract DynamicOneColumnData getMoreRecordsForOneColumn(Path colName, + DynamicOneColumnData res) throws IOException; - //modified by hadoop - public void initForRecord() { - size = retMap.size(); - heap = new PriorityQueue<>(size); + // modified by hadoop + public void initForRecord() { + size = retMap.size(); + heap = new PriorityQueue<>(size); - if (size > 0) { - deltaObjectIds = new String[size]; - measurementIds = new String[size]; - } else { - LOG.error("OnePassQueryDataSet init row record occurs error! the size of ret is 0."); - } + if (size > 0) { + deltaObjectIds = new String[size]; + measurementIds = new String[size]; + } else { + LOG.error("OnePassQueryDataSet init row record occurs error! the size of ret is 0."); + } - int i = 0; - for (Path p : retMap.keySet()) { - deltaObjectIds[i] = p.getDeltaObjectToString(); - measurementIds[i] = p.getMeasurementToString(); + int i = 0; + for (Path p : retMap.keySet()) { + deltaObjectIds[i] = p.getDeltaObjectToString(); + measurementIds[i] = p.getMeasurementToString(); - DynamicOneColumnData res = retMap.get(p); - if (res != null && res.curIdx < res.valueLength) { - heapPut(res.getTime(res.curIdx)); - } - i++; - } + DynamicOneColumnData res = retMap.get(p); + if (res != null && res.curIdx < res.valueLength) { + heapPut(res.getTime(res.curIdx)); + } + i++; } + } - public boolean hasNextRecord() { - if (!ifInit) { - initForRecord(); - ifInit = true; - } - if (heap.peek() != null) { - return true; - } - return false; + public boolean hasNextRecord() { + if (!ifInit) { + initForRecord(); + ifInit = true; } + if (heap.peek() != null) { + return true; + } + return false; + } - //modified by hadoop - public OldRowRecord getNextRecord() { - if (!ifInit) { - initForRecord(); - ifInit = true; - } + // modified by hadoop + public OldRowRecord getNextRecord() { + if (!ifInit) { + initForRecord(); + ifInit = true; + } - if (!hasNextRecord()) { - return null; - } + if (!hasNextRecord()) { + return null; + } - Long minTime = heapGet(); - OldRowRecord r = new OldRowRecord(minTime, null, null); - for (Path p : retMap.keySet()) { - Field f; - DynamicOneColumnData res = retMap.get(p); - if (res.curIdx < res.valueLength && minTime == res.getTime(res.curIdx)) { - f = new Field(res.dataType, p.getDeltaObjectToString(), p.getMeasurementToString()); - f.setNull(false); - putValueToField(res, res.curIdx, f); - res.curIdx++; - if (hasMoreRet.get(p) && res.curIdx >= res.valueLength) { - res.clearData(); - try { - res = getMoreRecordsForOneColumn(p, res); - } catch (IOException e) { - logger.error("", e); - } - retMap.put(p, res); - if (res.valueLength == 0) { - hasMoreRet.put(p, false); - } - } - if (res.curIdx < res.valueLength) { - heapPut(res.getTime(res.curIdx)); - } - } else { - f = new Field(res.dataType, p.getDeltaObjectToString(), p.getMeasurementToString()); - f.setNull(true); - } - r.addField(f); + Long minTime = heapGet(); + OldRowRecord r = new OldRowRecord(minTime, null, null); + for (Path p : retMap.keySet()) { + Field f; + DynamicOneColumnData res = retMap.get(p); + if (res.curIdx < res.valueLength && minTime == res.getTime(res.curIdx)) { + f = new Field(res.dataType, p.getDeltaObjectToString(), p.getMeasurementToString()); + f.setNull(false); + putValueToField(res, res.curIdx, f); + res.curIdx++; + if (hasMoreRet.get(p) && res.curIdx >= res.valueLength) { + res.clearData(); + try { + res = getMoreRecordsForOneColumn(p, res); + } catch (IOException e) { + logger.error("", e); + } + retMap.put(p, res); + if (res.valueLength == 0) { + hasMoreRet.put(p, false); + } + } + if (res.curIdx < res.valueLength) { + heapPut(res.getTime(res.curIdx)); } - return r; + } else { + f = new Field(res.dataType, p.getDeltaObjectToString(), p.getMeasurementToString()); + f.setNull(true); + } + r.addField(f); } -} \ No newline at end of file + return r; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/OnePassQueryDataSet.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/OnePassQueryDataSet.java index 9d3a0a7a..3a1568a7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/OnePassQueryDataSet.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/OnePassQueryDataSet.java @@ -9,283 +9,285 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryDataSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.PriorityQueue; -public class OnePassQueryDataSet implements QueryDataSet{ - protected static final Logger LOG = LoggerFactory.getLogger(OnePassQueryDataSet.class); - protected static final char PATH_SPLITTER = '.'; - - /** - * Time Generator for Cross Query when using batching read - **/ - public CrossQueryTimeGenerator crossQueryTimeGenerator; - - /** - * mapRet.key stores the query path, mapRet.value stores the query result of mapRet.key - **/ - public LinkedHashMap mapRet; - - /** - * generator used for batch read - **/ - protected BatchReadRecordGenerator batchReadGenerator; - - /** - * special for save time values when processing cross getIndex - **/ - protected PriorityQueue heap; - - /** - * the content of cols equals to mapRet - **/ - protected DynamicOneColumnData[] cols; - - /** - * timeIdxs[i] stores the index of cols[i] - **/ - protected int[] timeIdxs; - - /** - * emptyTimeIdxs[i] stores the empty time index of cols[i] - **/ - protected int[] emptyTimeIdxs; - - protected String[] deltaObjectIds; - protected String[] measurementIds; - protected HashMap timeMap; // timestamp occurs time - protected int size; - protected boolean ifInit = false; - protected OldRowRecord currentRecord = null; - private Map deltaMap; // this variable is used for IoTDb - - public OnePassQueryDataSet() { - mapRet = new LinkedHashMap<>(); +public class OnePassQueryDataSet implements QueryDataSet { + protected static final Logger LOG = LoggerFactory.getLogger(OnePassQueryDataSet.class); + protected static final char PATH_SPLITTER = '.'; + + /** + * Time Generator for Cross Query when using batching read + **/ + public CrossQueryTimeGenerator crossQueryTimeGenerator; + + /** + * mapRet.key stores the query path, mapRet.value stores the query result of mapRet.key + **/ + public LinkedHashMap mapRet; + + /** + * generator used for batch read + **/ + protected BatchReadRecordGenerator batchReadGenerator; + + /** + * special for save time values when processing cross getIndex + **/ + protected PriorityQueue heap; + + /** + * the content of cols equals to mapRet + **/ + protected DynamicOneColumnData[] cols; + + /** + * timeIdxs[i] stores the index of cols[i] + **/ + protected int[] timeIdxs; + + /** + * emptyTimeIdxs[i] stores the empty time index of cols[i] + **/ + protected int[] emptyTimeIdxs; + + protected String[] deltaObjectIds; + protected String[] measurementIds; + protected HashMap timeMap; // timestamp occurs time + protected int size; + protected boolean ifInit = false; + protected OldRowRecord currentRecord = null; + private Map deltaMap; // this variable is used for IoTDb + + public OnePassQueryDataSet() { + mapRet = new LinkedHashMap<>(); + } + + public void initForRecord() { + size = mapRet.keySet().size(); + + if (size > 0) { + heap = new PriorityQueue<>(size); + cols = new DynamicOneColumnData[size]; + deltaObjectIds = new String[size]; + measurementIds = new String[size]; + timeIdxs = new int[size]; + emptyTimeIdxs = new int[size]; + timeMap = new HashMap<>(); + } else { + LOG.error("OnePassQueryDataSet init row record occurs error! the size of ret is 0."); + heap = new PriorityQueue<>(); } - public void initForRecord() { - size = mapRet.keySet().size(); - - if (size > 0) { - heap = new PriorityQueue<>(size); - cols = new DynamicOneColumnData[size]; - deltaObjectIds = new String[size]; - measurementIds = new String[size]; - timeIdxs = new int[size]; - emptyTimeIdxs = new int[size]; - timeMap = new HashMap<>(); - } else { - LOG.error("OnePassQueryDataSet init row record occurs error! the size of ret is 0."); - heap = new PriorityQueue<>(); + int i = 0; + for (String key : mapRet.keySet()) { + cols[i] = mapRet.get(key); + deltaObjectIds[i] = key.substring(0, key.lastIndexOf(PATH_SPLITTER)); + measurementIds[i] = key.substring(key.lastIndexOf(PATH_SPLITTER) + 1); + timeIdxs[i] = 0; + emptyTimeIdxs[i] = 0; + + if (cols[i] != null + && (cols[i].valueLength > 0 || cols[i].timeLength > 0 || cols[i].emptyTimeLength > 0)) { + long minTime = Long.MAX_VALUE; + if (cols[i].timeLength > 0) { + minTime = cols[i].getTime(0); } - - int i = 0; - for (String key : mapRet.keySet()) { - cols[i] = mapRet.get(key); - deltaObjectIds[i] = key.substring(0, key.lastIndexOf(PATH_SPLITTER)); - measurementIds[i] = key.substring(key.lastIndexOf(PATH_SPLITTER) + 1); - timeIdxs[i] = 0; - emptyTimeIdxs[i] = 0; - - if (cols[i] != null && (cols[i].valueLength > 0 || cols[i].timeLength > 0 || cols[i].emptyTimeLength > 0)) { - long minTime = Long.MAX_VALUE; - if (cols[i].timeLength > 0) { - minTime = cols[i].getTime(0); - } - if (cols[i].emptyTimeLength > 0) { - minTime = Math.min(minTime, cols[i].getEmptyTime(0)); - } - heapPut(minTime); - } - i++; + if (cols[i].emptyTimeLength > 0) { + minTime = Math.min(minTime, cols[i].getEmptyTime(0)); } + heapPut(minTime); + } + i++; } + } - protected void heapPut(long t) { - if (!timeMap.containsKey(t)) { - heap.add(t); - timeMap.put(t, 1); - } + protected void heapPut(long t) { + if (!timeMap.containsKey(t)) { + heap.add(t); + timeMap.put(t, 1); + } + } + + protected Long heapGet() { + Long t = heap.poll(); + timeMap.remove(t); + return t; + } + + public boolean hasNextRecord() { + if (!ifInit) { + initForRecord(); + ifInit = true; } + if (heap.peek() != null) { + return true; + } + return false; + } - protected Long heapGet() { - Long t = heap.poll(); - timeMap.remove(t); - return t; + public OldRowRecord getNextRecord() { + if (!ifInit) { + initForRecord(); + ifInit = true; } - public boolean hasNextRecord() { - if (!ifInit) { - initForRecord(); - ifInit = true; - } - if (heap.peek() != null) { - return true; - } - return false; + Long minTime = heapGet(); + if (minTime == null) { + return null; } - public OldRowRecord getNextRecord() { - if (!ifInit) { - initForRecord(); - ifInit = true; + OldRowRecord record = new OldRowRecord(minTime, null, null); + for (int i = 0; i < size; i++) { + if (i == 0) { + record.setDeltaObjectId(deltaObjectIds[i]); + } + Field field = new Field(cols[i].dataType, deltaObjectIds[i], measurementIds[i]); + if (timeIdxs[i] < cols[i].timeLength && minTime == cols[i].getTime(timeIdxs[i])) { + field.setNull(false); + putValueToField(cols[i], timeIdxs[i], field); + timeIdxs[i]++; + long nextTime = Long.MAX_VALUE; + if (timeIdxs[i] < cols[i].timeLength) { + nextTime = cols[i].getTime(timeIdxs[i]); } - - Long minTime = heapGet(); - if (minTime == null) { - return null; + if (emptyTimeIdxs[i] < cols[i].emptyTimeLength) { + nextTime = Math.min(nextTime, cols[i].getEmptyTime(emptyTimeIdxs[i])); } - - OldRowRecord record = new OldRowRecord(minTime, null, null); - for (int i = 0; i < size; i++) { - if (i == 0) { - record.setDeltaObjectId(deltaObjectIds[i]); - } - Field field = new Field(cols[i].dataType, deltaObjectIds[i], measurementIds[i]); - if (timeIdxs[i] < cols[i].timeLength && minTime == cols[i].getTime(timeIdxs[i])) { - field.setNull(false); - putValueToField(cols[i], timeIdxs[i], field); - timeIdxs[i]++; - long nextTime = Long.MAX_VALUE; - if (timeIdxs[i] < cols[i].timeLength) { - nextTime = cols[i].getTime(timeIdxs[i]); - } - if (emptyTimeIdxs[i] < cols[i].emptyTimeLength) { - nextTime = Math.min(nextTime, cols[i].getEmptyTime(emptyTimeIdxs[i])); - } - if (nextTime != Long.MAX_VALUE) { - heapPut(nextTime); - } - } else if (emptyTimeIdxs[i] < cols[i].emptyTimeLength && minTime == cols[i].getEmptyTime(emptyTimeIdxs[i])) { - field.setNull(true); - emptyTimeIdxs[i]++; - long nextTime = Long.MAX_VALUE; - if (emptyTimeIdxs[i] < cols[i].emptyTimeLength) { - nextTime = cols[i].getEmptyTime(emptyTimeIdxs[i]); - } - if (timeIdxs[i] < cols[i].timeLength) { - nextTime = Math.min(nextTime, cols[i].getTime(timeIdxs[i])); - } - if (nextTime != Long.MAX_VALUE) { - heapPut(nextTime); - } - } else { - field.setNull(true); - } - record.addField(field); + if (nextTime != Long.MAX_VALUE) { + heapPut(nextTime); } - return record; - } - - - @Override - public boolean hasNext() throws IOException { - return hasNextRecord(); - } - - @Override - public RowRecord next() throws IOException { - OldRowRecord oldRowRecord = getNextRecord(); - return OnePassQueryDataSet.convertToNew(oldRowRecord); - } - - public static RowRecord convertToNew(OldRowRecord oldRowRecord) { - RowRecord rowRecord = new RowRecord(oldRowRecord.timestamp); - for(Field field: oldRowRecord.fields) { - String path = field.deltaObjectId + field.measurementId; - - if(field.isNull()) { - rowRecord.putField(new Path(path), null); - } else { - TsPrimitiveType value; - switch (field.dataType) { - case TEXT: - value = new TsPrimitiveType.TsBinary(field.getBinaryV()); - break; - case FLOAT: - value = new TsPrimitiveType.TsFloat(field.getFloatV()); - break; - case INT32: - value = new TsPrimitiveType.TsInt(field.getIntV()); - break; - case INT64: - value = new TsPrimitiveType.TsLong(field.getLongV()); - break; - case DOUBLE: - value = new TsPrimitiveType.TsDouble(field.getDoubleV()); - break; - case BOOLEAN: - value = new TsPrimitiveType.TsBoolean(field.getBoolV()); - break; - default: - throw new UnSupportedDataTypeException("UnSupported datatype: " + String.valueOf(field.dataType)); - } - rowRecord.putField(new Path(path), value); - } + } else if (emptyTimeIdxs[i] < cols[i].emptyTimeLength + && minTime == cols[i].getEmptyTime(emptyTimeIdxs[i])) { + field.setNull(true); + emptyTimeIdxs[i]++; + long nextTime = Long.MAX_VALUE; + if (emptyTimeIdxs[i] < cols[i].emptyTimeLength) { + nextTime = cols[i].getEmptyTime(emptyTimeIdxs[i]); } - return rowRecord; - } - - public OldRowRecord getCurrentRecord() { - if (!ifInit) { - initForRecord(); - ifInit = true; + if (timeIdxs[i] < cols[i].timeLength) { + nextTime = Math.min(nextTime, cols[i].getTime(timeIdxs[i])); } - return currentRecord; - } - - public void putValueToField(DynamicOneColumnData col, int idx, Field f) { - switch (col.dataType) { - case BOOLEAN: - f.setBoolV(col.getBoolean(idx)); - break; - case INT32: - f.setIntV(col.getInt(idx)); - break; - case INT64: - f.setLongV(col.getLong(idx)); - break; - case FLOAT: - f.setFloatV(col.getFloat(idx)); - break; - case DOUBLE: - f.setDoubleV(col.getDouble(idx)); - break; - case TEXT: - f.setBinaryV(col.getBinary(idx)); - break; - case ENUMS: - f.setBinaryV(col.getBinary(idx)); - break; - default: - throw new UnSupportedDataTypeException("UnSupported" + String.valueOf(col.dataType)); + if (nextTime != Long.MAX_VALUE) { + heapPut(nextTime); } + } else { + field.setNull(true); + } + record.addField(field); } - - public void clear() { - this.ifInit = false; - for (DynamicOneColumnData col : mapRet.values()) { - col.clearData(); + return record; + } + + + @Override + public boolean hasNext() throws IOException { + return hasNextRecord(); + } + + @Override + public RowRecord next() throws IOException { + OldRowRecord oldRowRecord = getNextRecord(); + return OnePassQueryDataSet.convertToNew(oldRowRecord); + } + + public static RowRecord convertToNew(OldRowRecord oldRowRecord) { + RowRecord rowRecord = new RowRecord(oldRowRecord.timestamp); + for (Field field : oldRowRecord.fields) { + String path = field.deltaObjectId + field.measurementId; + + if (field.isNull()) { + rowRecord.putField(new Path(path), null); + } else { + TsPrimitiveType value; + switch (field.dataType) { + case TEXT: + value = new TsPrimitiveType.TsBinary(field.getBinaryV()); + break; + case FLOAT: + value = new TsPrimitiveType.TsFloat(field.getFloatV()); + break; + case INT32: + value = new TsPrimitiveType.TsInt(field.getIntV()); + break; + case INT64: + value = new TsPrimitiveType.TsLong(field.getLongV()); + break; + case DOUBLE: + value = new TsPrimitiveType.TsDouble(field.getDoubleV()); + break; + case BOOLEAN: + value = new TsPrimitiveType.TsBoolean(field.getBoolV()); + break; + default: + throw new UnSupportedDataTypeException( + "UnSupported datatype: " + String.valueOf(field.dataType)); } + rowRecord.putField(new Path(path), value); + } } + return rowRecord; + } - public BatchReadRecordGenerator getBatchReadGenerator() { - return batchReadGenerator; + public OldRowRecord getCurrentRecord() { + if (!ifInit) { + initForRecord(); + ifInit = true; } - - public void setBatchReadGenerator(BatchReadRecordGenerator batchReadGenerator) { - this.batchReadGenerator = batchReadGenerator; + return currentRecord; + } + + public void putValueToField(DynamicOneColumnData col, int idx, Field f) { + switch (col.dataType) { + case BOOLEAN: + f.setBoolV(col.getBoolean(idx)); + break; + case INT32: + f.setIntV(col.getInt(idx)); + break; + case INT64: + f.setLongV(col.getLong(idx)); + break; + case FLOAT: + f.setFloatV(col.getFloat(idx)); + break; + case DOUBLE: + f.setDoubleV(col.getDouble(idx)); + break; + case TEXT: + f.setBinaryV(col.getBinary(idx)); + break; + case ENUMS: + f.setBinaryV(col.getBinary(idx)); + break; + default: + throw new UnSupportedDataTypeException("UnSupported" + String.valueOf(col.dataType)); } + } - public Map getDeltaMap() { - return this.deltaMap; + public void clear() { + this.ifInit = false; + for (DynamicOneColumnData col : mapRet.values()) { + col.clearData(); } + } - public void setDeltaMap(Map deltaMap) { - this.deltaMap = deltaMap; - } -} \ No newline at end of file + public BatchReadRecordGenerator getBatchReadGenerator() { + return batchReadGenerator; + } + + public void setBatchReadGenerator(BatchReadRecordGenerator batchReadGenerator) { + this.batchReadGenerator = batchReadGenerator; + } + + public Map getDeltaMap() { + return this.deltaMap; + } + + public void setDeltaMap(Map deltaMap) { + this.deltaMap = deltaMap; + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryConfig.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryConfig.java index 94eae8e7..ac240ae7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryConfig.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryConfig.java @@ -4,104 +4,103 @@ public class QueryConfig { - private String timeFilter; - private String freqFilter; - private ArrayList selectColumns; - private String valueFilter; - private QueryType queryType; - - /** - * Construct a queryConfig for QUERY_WITHOUT_FILTER - * - * @param selects selected columns - */ - public QueryConfig(String selects) { - this.selectColumns = new ArrayList<>(); - String[] cols = selects.split("\\|"); - for (String col : cols) { - selectColumns.add(col); - } - this.queryType = QueryType.QUERY_WITHOUT_FILTER; + private String timeFilter; + private String freqFilter; + private ArrayList selectColumns; + private String valueFilter; + private QueryType queryType; + + /** + * Construct a queryConfig for QUERY_WITHOUT_FILTER + * + * @param selects selected columns + */ + public QueryConfig(String selects) { + this.selectColumns = new ArrayList<>(); + String[] cols = selects.split("\\|"); + for (String col : cols) { + selectColumns.add(col); } - - /** - * Construct a queryConfig automatically according to the filters - * - * @param selects selected columns - * @param timeFilter time filter - * @param freqFilter frequency filter - * @param valueFilter value filter - */ - public QueryConfig(String selects, String timeFilter, String freqFilter, - String valueFilter) { - this.selectColumns = new ArrayList(); - String[] cols = selects.split("\\|"); - - for (String col : cols) { - selectColumns.add(col); - } - - this.setTimeFilter(timeFilter); - this.setFreqFilter(freqFilter); - this.setValueFilter(valueFilter); - - if (timeFilter.equals("null") && freqFilter.equals("null") && valueFilter.equals("null")) { - this.queryType = QueryType.QUERY_WITHOUT_FILTER; - } else if (valueFilter.startsWith("[")) { - this.queryType = QueryType.CROSS_QUERY; - } else { - this.queryType = QueryType.SELECT_ONE_COL_WITH_FILTER; - } + this.queryType = QueryType.QUERY_WITHOUT_FILTER; + } + + /** + * Construct a queryConfig automatically according to the filters + * + * @param selects selected columns + * @param timeFilter time filter + * @param freqFilter frequency filter + * @param valueFilter value filter + */ + public QueryConfig(String selects, String timeFilter, String freqFilter, String valueFilter) { + this.selectColumns = new ArrayList(); + String[] cols = selects.split("\\|"); + + for (String col : cols) { + selectColumns.add(col); } - public QueryConfig(ArrayList selectColumns, String timeFilter, String freqFilter, - String valueFilter) { - this.selectColumns = selectColumns; - this.setTimeFilter(timeFilter); - this.setFreqFilter(freqFilter); - this.setValueFilter(valueFilter); + this.setTimeFilter(timeFilter); + this.setFreqFilter(freqFilter); + this.setValueFilter(valueFilter); + + if (timeFilter.equals("null") && freqFilter.equals("null") && valueFilter.equals("null")) { + this.queryType = QueryType.QUERY_WITHOUT_FILTER; + } else if (valueFilter.startsWith("[")) { + this.queryType = QueryType.CROSS_QUERY; + } else { + this.queryType = QueryType.SELECT_ONE_COL_WITH_FILTER; } + } + public QueryConfig(ArrayList selectColumns, String timeFilter, String freqFilter, + String valueFilter) { + this.selectColumns = selectColumns; + this.setTimeFilter(timeFilter); + this.setFreqFilter(freqFilter); + this.setValueFilter(valueFilter); + } - public ArrayList getSelectColumns() { - return selectColumns; - } - public void setSelectColumns(ArrayList selectColumns) { - this.selectColumns = selectColumns; - } + public ArrayList getSelectColumns() { + return selectColumns; + } - public String getTimeFilter() { - return timeFilter; - } + public void setSelectColumns(ArrayList selectColumns) { + this.selectColumns = selectColumns; + } - public void setTimeFilter(String timeFilter) { - this.timeFilter = timeFilter; - } + public String getTimeFilter() { + return timeFilter; + } - public String getFreqFilter() { - return freqFilter; - } + public void setTimeFilter(String timeFilter) { + this.timeFilter = timeFilter; + } - private void setFreqFilter(String freqFilter) { - this.freqFilter = freqFilter; - } + public String getFreqFilter() { + return freqFilter; + } - public String getValueFilter() { - return valueFilter; - } + private void setFreqFilter(String freqFilter) { + this.freqFilter = freqFilter; + } - private void setValueFilter(String valueFilter) { - this.valueFilter = valueFilter; - } + public String getValueFilter() { + return valueFilter; + } - public QueryType getQueryType() { - return queryType; - } + private void setValueFilter(String valueFilter) { + this.valueFilter = valueFilter; + } - public void setQueryType(QueryType queryType) { - this.queryType = queryType; - } + public QueryType getQueryType() { + return queryType; + } + + public void setQueryType(QueryType queryType) { + this.queryType = queryType; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryEngine.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryEngine.java index 6560285e..7f06bddc 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryEngine.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryEngine.java @@ -16,332 +16,365 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; public class QueryEngine { - protected static final Logger logger = LoggerFactory.getLogger(QueryEngine.class); - protected static int FETCH_SIZE = 20000; - protected RecordReader recordReader; - - public QueryEngine(ITsRandomAccessFileReader raf) throws IOException { - recordReader = new RecordReader(raf); + protected static final Logger logger = LoggerFactory.getLogger(QueryEngine.class); + protected static int FETCH_SIZE = 20000; + protected RecordReader recordReader; + + public QueryEngine(ITsRandomAccessFileReader raf) throws IOException { + recordReader = new RecordReader(raf); + } + + public QueryEngine(ITsRandomAccessFileReader raf, int fetchSize) throws IOException { + recordReader = new RecordReader(raf); + FETCH_SIZE = fetchSize; + } + + // for hadoop-connector + public QueryEngine(ITsRandomAccessFileReader raf, List rowGroupMetaDataList) + throws IOException { + recordReader = new RecordReader(raf, rowGroupMetaDataList); + } + + public static OnePassQueryDataSet query(QueryConfig config, String fileName) throws IOException { + TsRandomAccessLocalFileReader raf = new TsRandomAccessLocalFileReader(fileName); + QueryEngine queryEngine = new QueryEngine(raf); + OnePassQueryDataSet onePassQueryDataSet = queryEngine.query(config); + raf.close(); + return onePassQueryDataSet; + } + + public OnePassQueryDataSet query(QueryConfig config) throws IOException { + if (config.getQueryType() == QueryType.QUERY_WITHOUT_FILTER) { + return queryWithoutFilter(config); + } else if (config.getQueryType() == QueryType.SELECT_ONE_COL_WITH_FILTER) { + return readOneColumnValueUseFilter(config); + } else if (config.getQueryType() == QueryType.CROSS_QUERY) { + return crossColumnQuery(config); } - - public QueryEngine(ITsRandomAccessFileReader raf, int fetchSize) throws IOException { - recordReader = new RecordReader(raf); - FETCH_SIZE = fetchSize; + return null; + } + + /** + * One of the basic query methods, return OnePassQueryDataSet which contains the + * query result. + *

+ * + * @param paths query paths + * @param timeFilter filter for time + * @param freqFilter filter for frequency + * @param valueFilter filter for value + * @return query result + * @throws IOException TsFile read error + */ + public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, + FilterExpression freqFilter, FilterExpression valueFilter) throws IOException { + + if (timeFilter == null && freqFilter == null && valueFilter == null) { + return queryWithoutFilter(paths); + } else if (valueFilter instanceof SingleSeriesFilterExpression + || (timeFilter != null && valueFilter == null)) { + return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (SingleSeriesFilterExpression) valueFilter); + } else if (valueFilter instanceof CrossSeriesFilterExpression) { + return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (CrossSeriesFilterExpression) valueFilter); } - - //for hadoop-connector - public QueryEngine(ITsRandomAccessFileReader raf, List rowGroupMetaDataList) throws IOException { - recordReader = new RecordReader(raf, rowGroupMetaDataList); + return null; + } + + public OnePassQueryDataSet query(QueryConfig config, Map params) + throws IOException { + List paths = getPathsFromSelectedPaths(config.getSelectColumns()); + + SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); + SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); + FilterExpression valueFilter; + if (config.getQueryType() == QueryType.CROSS_QUERY) { + valueFilter = FilterUtils.constructCrossFilter(config.getValueFilter(), recordReader); + } else { + valueFilter = FilterUtils.construct(config.getValueFilter(), recordReader); } + return query(paths, timeFilter, freqFilter, valueFilter, params); + } - public static OnePassQueryDataSet query(QueryConfig config, String fileName) throws IOException { - TsRandomAccessLocalFileReader raf = new TsRandomAccessLocalFileReader(fileName); - QueryEngine queryEngine = new QueryEngine(raf); - OnePassQueryDataSet onePassQueryDataSet = queryEngine.query(config); - raf.close(); - return onePassQueryDataSet; - } + public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, + FilterExpression freqFilter, FilterExpression valueFilter, Map params) + throws IOException { - public OnePassQueryDataSet query(QueryConfig config) throws IOException { - if (config.getQueryType() == QueryType.QUERY_WITHOUT_FILTER) { - return queryWithoutFilter(config); - } else if (config.getQueryType() == QueryType.SELECT_ONE_COL_WITH_FILTER) { - return readOneColumnValueUseFilter(config); - } else if (config.getQueryType() == QueryType.CROSS_QUERY) { - return crossColumnQuery(config); - } - return null; - } - - /** - * One of the basic query methods, return OnePassQueryDataSet which contains - * the query result. - *

- * - * @param paths query paths - * @param timeFilter filter for time - * @param freqFilter filter for frequency - * @param valueFilter filter for value - * @return query result - * @throws IOException TsFile read error - */ - public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, FilterExpression freqFilter, - FilterExpression valueFilter) throws IOException { - - if (timeFilter == null && freqFilter == null && valueFilter == null) { - return queryWithoutFilter(paths); - } else if (valueFilter instanceof SingleSeriesFilterExpression || (timeFilter != null && valueFilter == null)) { - return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (SingleSeriesFilterExpression) valueFilter); - } else if (valueFilter instanceof CrossSeriesFilterExpression) { - return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (CrossSeriesFilterExpression) valueFilter); - } - return null; - } + long startOffset = params.get(QueryConstant.PARTITION_START_OFFSET); + long endOffset = params.get(QueryConstant.PARTITION_END_OFFSET); - public OnePassQueryDataSet query(QueryConfig config, Map params) throws IOException { - List paths = getPathsFromSelectedPaths(config.getSelectColumns()); + ArrayList idxs = calSpecificRowGroupByPartition(startOffset, endOffset); - SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); - SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); - FilterExpression valueFilter; - if (config.getQueryType() == QueryType.CROSS_QUERY) { - valueFilter = FilterUtils.constructCrossFilter(config.getValueFilter(), recordReader); - } else { - valueFilter = FilterUtils.construct(config.getValueFilter(), recordReader); - } - return query(paths, timeFilter, freqFilter, valueFilter, params); + if (logger.isDebugEnabled()) { + logger.debug(startOffset + "|" + endOffset + "|" + idxs); } - - public OnePassQueryDataSet query(List paths, FilterExpression timeFilter, FilterExpression freqFilter, - FilterExpression valueFilter, Map params) throws IOException { - - long startOffset = params.get(QueryConstant.PARTITION_START_OFFSET); - long endOffset = params.get(QueryConstant.PARTITION_END_OFFSET); - - ArrayList idxs = calSpecificRowGroupByPartition(startOffset, endOffset); - - if (logger.isDebugEnabled()) { - logger.debug(startOffset + "|" + endOffset + "|" + idxs); - } - return queryWithSpecificRowGroups(paths, timeFilter, freqFilter, valueFilter, idxs); + return queryWithSpecificRowGroups(paths, timeFilter, freqFilter, valueFilter, idxs); + } + + private OnePassQueryDataSet queryWithSpecificRowGroups(List paths, + FilterExpression timeFilter, FilterExpression freqFilter, FilterExpression valueFilter, + ArrayList rowGroupIndexList) throws IOException { + if (timeFilter == null && freqFilter == null && valueFilter == null) { + return queryWithoutFilter(paths, rowGroupIndexList); + } else if (valueFilter instanceof SingleSeriesFilterExpression + || (timeFilter != null && valueFilter == null)) { + return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (SingleSeriesFilterExpression) valueFilter, + rowGroupIndexList); + } else if (valueFilter instanceof CrossSeriesFilterExpression) { + return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, + (SingleSeriesFilterExpression) freqFilter, (CrossSeriesFilterExpression) valueFilter, + rowGroupIndexList); } - - private OnePassQueryDataSet queryWithSpecificRowGroups(List paths, FilterExpression timeFilter, FilterExpression freqFilter - , FilterExpression valueFilter, ArrayList rowGroupIndexList) throws IOException { - if (timeFilter == null && freqFilter == null && valueFilter == null) { - return queryWithoutFilter(paths, rowGroupIndexList); - } else if (valueFilter instanceof SingleSeriesFilterExpression || (timeFilter != null && valueFilter == null)) { - return readOneColumnValueUseFilter(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (SingleSeriesFilterExpression) valueFilter, rowGroupIndexList); - } else if (valueFilter instanceof CrossSeriesFilterExpression) { - return crossColumnQuery(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter, - (CrossSeriesFilterExpression) valueFilter, rowGroupIndexList); - } - throw new IOException("Query Not Support Exception"); - } - - private OnePassQueryDataSet queryWithoutFilter(QueryConfig config) throws IOException { - List paths = getPathsFromSelectedPaths(config.getSelectColumns()); - return queryWithoutFilter(paths); - } - - private OnePassQueryDataSet queryWithoutFilter(List paths) throws IOException { - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValueInOneColumn(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString()); - } + throw new IOException("Query Not Support Exception"); + } + + private OnePassQueryDataSet queryWithoutFilter(QueryConfig config) throws IOException { + List paths = getPathsFromSelectedPaths(config.getSelectColumns()); + return queryWithoutFilter(paths); + } + + private OnePassQueryDataSet queryWithoutFilter(List paths) throws IOException { + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValueInOneColumn(res, FETCH_SIZE, p.getDeltaObjectToString(), + p.getMeasurementToString()); + } + }; + } + + private OnePassQueryDataSet queryWithoutFilter(List paths, + ArrayList RowGroupIdxList) throws IOException { + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValueInOneColumn(res, FETCH_SIZE, p.getDeltaObjectToString(), + p.getMeasurementToString(), RowGroupIdxList); + } + }; + } + + private OnePassQueryDataSet readOneColumnValueUseFilter(QueryConfig config) throws IOException { + SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); + SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); + SingleSeriesFilterExpression valueFilter = + FilterUtils.construct(config.getValueFilter(), recordReader); + List paths = getPathsFromSelectedPaths(config.getSelectColumns()); + return readOneColumnValueUseFilter(paths, timeFilter, freqFilter, valueFilter); + } + + private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + SingleSeriesFilterExpression valueFilter) throws IOException { + logger.debug("start read one column data with filter..."); + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), + p.getMeasurementToString(), timeFilter, freqFilter, valueFilter); + } + }; + } + + private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + SingleSeriesFilterExpression valueFilter, ArrayList rowGroupIndexList) + throws IOException { + logger.debug( + "start read one column data with filter according to specific RowGroup Index List {}", + rowGroupIndexList); + + return new IteratorOnePassQueryDataSet(paths) { + @Override + public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) + throws IOException { + return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), + p.getMeasurementToString(), timeFilter, freqFilter, valueFilter, rowGroupIndexList); + } + }; + } + + private OnePassQueryDataSet crossColumnQuery(QueryConfig config) throws IOException { + logger.info("start cross columns getIndex..."); + SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); + SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); + CrossSeriesFilterExpression valueFilter = (CrossSeriesFilterExpression) FilterUtils + .constructCrossFilter(config.getValueFilter(), recordReader); + List paths = getPathsFromSelectedPaths(config.getSelectColumns()); + return crossColumnQuery(paths, timeFilter, freqFilter, valueFilter); + } + + private OnePassQueryDataSet crossColumnQuery(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + CrossSeriesFilterExpression valueFilter) throws IOException { + + CrossQueryTimeGenerator timeGenerator = + new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { + @Override + public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, int valueFilterNumber) + throws ProcessorException, IOException { + return recordReader.getValuesUseFilter(res, fetchSize, valueFilter); + } }; - } - - private OnePassQueryDataSet queryWithoutFilter(List paths, ArrayList RowGroupIdxList) throws IOException { - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValueInOneColumn(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString(), RowGroupIdxList); - } - }; - } - - private OnePassQueryDataSet readOneColumnValueUseFilter(QueryConfig config) throws IOException { - SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); - SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); - SingleSeriesFilterExpression valueFilter = FilterUtils.construct(config.getValueFilter(), recordReader); - List paths = getPathsFromSelectedPaths(config.getSelectColumns()); - return readOneColumnValueUseFilter(paths, timeFilter, freqFilter, valueFilter); - } - - private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, SingleSeriesFilterExpression timeFilter, - SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter) throws IOException { - logger.debug("start read one column data with filter..."); - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString() - , timeFilter, freqFilter, valueFilter); - } - }; - } - - private OnePassQueryDataSet readOneColumnValueUseFilter(List paths, SingleSeriesFilterExpression timeFilter, - SingleSeriesFilterExpression freqFilter, SingleSeriesFilterExpression valueFilter, ArrayList rowGroupIndexList) throws IOException { - logger.debug("start read one column data with filter according to specific RowGroup Index List {}", rowGroupIndexList); - - return new IteratorOnePassQueryDataSet(paths) { - @Override - public DynamicOneColumnData getMoreRecordsForOneColumn(Path p, DynamicOneColumnData res) throws IOException { - return recordReader.getValuesUseFilter(res, FETCH_SIZE, p.getDeltaObjectToString(), p.getMeasurementToString() - , timeFilter, freqFilter, valueFilter, rowGroupIndexList); - } - }; - } - private OnePassQueryDataSet crossColumnQuery(QueryConfig config) throws IOException { - logger.info("start cross columns getIndex..."); - SingleSeriesFilterExpression timeFilter = FilterUtils.construct(config.getTimeFilter(), null); - SingleSeriesFilterExpression freqFilter = FilterUtils.construct(config.getFreqFilter(), null); - CrossSeriesFilterExpression valueFilter = (CrossSeriesFilterExpression) FilterUtils.constructCrossFilter(config.getValueFilter(), - recordReader); - List paths = getPathsFromSelectedPaths(config.getSelectColumns()); - return crossColumnQuery(paths, timeFilter, freqFilter, valueFilter); - } - - private OnePassQueryDataSet crossColumnQuery(List paths, SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - CrossSeriesFilterExpression valueFilter) throws IOException { - - CrossQueryTimeGenerator timeGenerator = new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { - @Override - public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, - SingleSeriesFilterExpression valueFilter, int valueFilterNumber) throws ProcessorException, IOException { - return recordReader.getValuesUseFilter(res, fetchSize, valueFilter); - } - }; - - return new CrossOnePassQueryIteratorDataSet(timeGenerator) { - @Override - public boolean getMoreRecords() throws IOException { - try { - long[] timeRet = crossQueryTimeGenerator.generateTimes(); - if (timeRet.length == 0) { - return true; - } - for (Path p : paths) { - String deltaObjectUID = p.getDeltaObjectToString(); - String measurementUID = p.getMeasurementToString(); - DynamicOneColumnData oneColDataList = recordReader.getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet); - mapRet.put(p.getFullPath(), oneColDataList); - } - - } catch (ProcessorException e) { - throw new IOException(e.getMessage()); - } - return false; - } - }; - } - - private OnePassQueryDataSet crossColumnQuery(List paths, SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, - CrossSeriesFilterExpression valueFilter, ArrayList RowGroupIdxList) throws IOException { - CrossQueryTimeGenerator timeQueryDataSet = new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { - @Override - public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, - SingleSeriesFilterExpression valueFilter, int valueFilterNumber) throws ProcessorException, IOException { - return recordReader.getValuesUseFilter(res, fetchSize, valueFilter, RowGroupIdxList); - } - }; - - return new CrossOnePassQueryIteratorDataSet(timeQueryDataSet) { - @Override - public boolean getMoreRecords() throws IOException { - try { - long[] timeRet = crossQueryTimeGenerator.generateTimes(); - if (timeRet.length == 0) { - return true; - } - for (Path p : paths) { - String deltaObjectUID = p.getDeltaObjectToString(); - String measurementUID = p.getMeasurementToString(); - DynamicOneColumnData oneColDataList = recordReader.getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet, RowGroupIdxList); - mapRet.put(p.getFullPath(), oneColDataList); - } - - } catch (ProcessorException e) { - throw new IOException(e.getMessage()); - } - return false; - } + return new CrossOnePassQueryIteratorDataSet(timeGenerator) { + @Override + public boolean getMoreRecords() throws IOException { + try { + long[] timeRet = crossQueryTimeGenerator.generateTimes(); + if (timeRet.length == 0) { + return true; + } + for (Path p : paths) { + String deltaObjectUID = p.getDeltaObjectToString(); + String measurementUID = p.getMeasurementToString(); + DynamicOneColumnData oneColDataList = + recordReader.getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet); + mapRet.put(p.getFullPath(), oneColDataList); + } + + } catch (ProcessorException e) { + throw new IOException(e.getMessage()); + } + return false; + } + }; + } + + private OnePassQueryDataSet crossColumnQuery(List paths, + SingleSeriesFilterExpression timeFilter, SingleSeriesFilterExpression freqFilter, + CrossSeriesFilterExpression valueFilter, ArrayList RowGroupIdxList) + throws IOException { + CrossQueryTimeGenerator timeQueryDataSet = + new CrossQueryTimeGenerator(timeFilter, freqFilter, valueFilter, FETCH_SIZE) { + @Override + public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, int valueFilterNumber) + throws ProcessorException, IOException { + return recordReader.getValuesUseFilter(res, fetchSize, valueFilter, RowGroupIdxList); + } }; - } - private List getPathsFromSelectedPaths(List selectedPaths) { - List paths = new ArrayList<>(); - for (String path : selectedPaths) { - Path p = new Path(path); - paths.add(p); + return new CrossOnePassQueryIteratorDataSet(timeQueryDataSet) { + @Override + public boolean getMoreRecords() throws IOException { + try { + long[] timeRet = crossQueryTimeGenerator.generateTimes(); + if (timeRet.length == 0) { + return true; + } + for (Path p : paths) { + String deltaObjectUID = p.getDeltaObjectToString(); + String measurementUID = p.getMeasurementToString(); + DynamicOneColumnData oneColDataList = recordReader + .getValuesUseTimestamps(deltaObjectUID, measurementUID, timeRet, RowGroupIdxList); + mapRet.put(p.getFullPath(), oneColDataList); + } + + } catch (ProcessorException e) { + throw new IOException(e.getMessage()); } - return paths; - } - - public Map> getAllSeriesSchemasGroupByDeltaObject() throws IOException { - return recordReader.getAllSeriesSchemasGroupByDeltaObject(); - } - - public Map getDeltaObjectRowGroupCount() throws IOException { - return recordReader.getDeltaObjectRowGroupCounts(); - } - - public Map getDeltaObjectTypes() throws IOException { - return recordReader.getDeltaObjectTypes(); + return false; + } + }; + } + + private List getPathsFromSelectedPaths(List selectedPaths) { + List paths = new ArrayList<>(); + for (String path : selectedPaths) { + Path p = new Path(path); + paths.add(p); } - - public boolean pathExist(Path path) throws IOException{ - FilterSeries col = recordReader.getColumnByMeasurementName(path.getDeltaObjectToString(), path.getMeasurementToString()); - - return col != null; + return paths; + } + + public Map> getAllSeriesSchemasGroupByDeltaObject() + throws IOException { + return recordReader.getAllSeriesSchemasGroupByDeltaObject(); + } + + public Map getDeltaObjectRowGroupCount() throws IOException { + return recordReader.getDeltaObjectRowGroupCounts(); + } + + public Map getDeltaObjectTypes() throws IOException { + return recordReader.getDeltaObjectTypes(); + } + + public boolean pathExist(Path path) throws IOException { + FilterSeries col = recordReader.getColumnByMeasurementName(path.getDeltaObjectToString(), + path.getMeasurementToString()); + + return col != null; + } + + public ArrayList getAllDeltaObject() throws IOException { + return recordReader.getAllDeltaObjects(); + } + + public List getAllSeriesSchema() throws IOException { + return recordReader.getAllSeriesSchema(); + } + + // Start - Methods for spark reading + public ArrayList getRowGroupPosList() throws IOException { + return recordReader.getRowGroupPosList(); + } + + public ArrayList calSpecificRowGroupByPartition(long start, long end) + throws IOException { + ArrayList rowGroupsPosList = getRowGroupPosList(); + ArrayList res = new ArrayList<>(); + long curStartPos = 0L; + for (int i = 0; i < rowGroupsPosList.size(); i++) { + long curEndPos = rowGroupsPosList.get(i); + long midPos = curStartPos + (curEndPos - curStartPos) / 2; + if (start < midPos && midPos <= end) { + res.add(i); + } + curStartPos = curEndPos; } - - public ArrayList getAllDeltaObject() throws IOException { - return recordReader.getAllDeltaObjects(); + return res; + } + + public ArrayList getAllDeltaObjectUIDByPartition(long start, long end) + throws IOException { + ArrayList rowGroupsPosList = getRowGroupPosList(); + List rgrs = recordReader.getAllRowGroupReaders(); + ArrayList res = new ArrayList<>(); + long curStartPos = 0L; + for (int i = 0; i < rowGroupsPosList.size(); i++) { + long curEndPos = rowGroupsPosList.get(i); + long midPos = curStartPos + (curEndPos - curStartPos) / 2; + if (start < midPos && midPos <= end) { + res.add(rgrs.get(i).getDeltaObjectUID()); + } + curStartPos = curEndPos; } + return res; + } - public List getAllSeriesSchema() throws IOException { - return recordReader.getAllSeriesSchema(); - } + public Map getProps() { + return recordReader.getProps(); + } - // Start - Methods for spark reading - public ArrayList getRowGroupPosList() throws IOException { - return recordReader.getRowGroupPosList(); - } - - public ArrayList calSpecificRowGroupByPartition(long start, long end) throws IOException { - ArrayList rowGroupsPosList = getRowGroupPosList(); - ArrayList res = new ArrayList<>(); - long curStartPos = 0L; - for (int i = 0; i < rowGroupsPosList.size(); i++) { - long curEndPos = rowGroupsPosList.get(i); - long midPos = curStartPos + (curEndPos - curStartPos) / 2; - if (start < midPos && midPos <= end) { - res.add(i); - } - curStartPos = curEndPos; - } - return res; - } + public String getProp(String key) { + return recordReader.getProp(key); + } - public ArrayList getAllDeltaObjectUIDByPartition(long start, long end) throws IOException { - ArrayList rowGroupsPosList = getRowGroupPosList(); - List rgrs = recordReader.getAllRowGroupReaders(); - ArrayList res = new ArrayList<>(); - long curStartPos = 0L; - for (int i = 0; i < rowGroupsPosList.size(); i++) { - long curEndPos = rowGroupsPosList.get(i); - long midPos = curStartPos + (curEndPos - curStartPos) / 2; - if (start < midPos && midPos <= end) { - res.add(rgrs.get(i).getDeltaObjectUID()); - } - curStartPos = curEndPos; - } - return res; - } - - public Map getProps() { - return recordReader.getProps(); - } - - public String getProp(String key) { - return recordReader.getProp(key); - } - - public void close() throws IOException{ - recordReader.close(); - } + public void close() throws IOException { + recordReader.close(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryType.java index 32c8b0ee..403f3ef1 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/query/QueryType.java @@ -1,5 +1,5 @@ package cn.edu.tsinghua.tsfile.timeseries.read.query; public enum QueryType { - QUERY_WITHOUT_FILTER, SELECT_ONE_COL_WITH_FILTER, CROSS_QUERY + QUERY_WITHOUT_FILTER, SELECT_ONE_COL_WITH_FILTER, CROSS_QUERY } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/ColumnInfo.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/ColumnInfo.java index 50aefec4..5b986b4a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/ColumnInfo.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/ColumnInfo.java @@ -3,47 +3,47 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; public class ColumnInfo { - private String name; - private TSDataType dataType; - - public ColumnInfo(String name, TSDataType dataType) { - this.setName(name); - this.setDataType(dataType); - } - - public TSDataType getDataType() { - return dataType; - } - - public void setDataType(TSDataType dataType) { - this.dataType = dataType; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String toString() { - return getName() + ":" + getDataType(); - } - - public int hashCode() { - return getName().hashCode(); - } - - public boolean equals(Object o) { - if (this == o) { - return true; - } else { - if (o instanceof ColumnInfo) { - return this.getName().equals(((ColumnInfo) o).getName()); - } else { - return false; - } - } - } + private String name; + private TSDataType dataType; + + public ColumnInfo(String name, TSDataType dataType) { + this.setName(name); + this.setDataType(dataType); + } + + public TSDataType getDataType() { + return dataType; + } + + public void setDataType(TSDataType dataType) { + this.dataType = dataType; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return getName() + ":" + getDataType(); + } + + public int hashCode() { + return getName().hashCode(); + } + + public boolean equals(Object o) { + if (this == o) { + return true; + } else { + if (o instanceof ColumnInfo) { + return this.getName().equals(((ColumnInfo) o).getName()); + } else { + return false; + } + } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Field.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Field.java index 588da23c..42c1b049 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Field.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Field.java @@ -5,124 +5,118 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; /** - * Field is the components of one {@code OldRowRecord} which store a value in - * specific data type. + * Field is the components of one {@code OldRowRecord} which store a value in specific data type. * * @author Jinrui Zhang */ public class Field { - public TSDataType dataType; - public String deltaObjectId; - public String measurementId; - private boolean boolV; - private int intV; - private long longV; - private float floatV; - private double doubleV; - private Binary binaryV; - private boolean isNull; - - public Field(TSDataType dataType, String measurementId) { - this.dataType = dataType; - this.measurementId = measurementId; - this.deltaObjectId = "default"; - } - - public Field(TSDataType dataType, String deltaObjectId, String measurementId) { - this.dataType = dataType; - this.deltaObjectId = deltaObjectId; - this.measurementId = measurementId; - } - - public boolean getBoolV() { - return boolV; - } - - public void setBoolV(boolean boolV) { - this.boolV = boolV; - } - - public int getIntV() { - return intV; - } - - public void setIntV(int intV) { - this.intV = intV; - } - - public long getLongV() { - return longV; - } - - public void setLongV(long longV) { - this.longV = longV; - } - - public float getFloatV() { - return floatV; - } - - public void setFloatV(float floatV) { - this.floatV = floatV; - } - - public double getDoubleV() { - return doubleV; - } - - public void setDoubleV(double doubleV) { - this.doubleV = doubleV; - } - - public Binary getBinaryV() { - return binaryV; - } - - public void setBinaryV(Binary binaryV) { - this.binaryV = binaryV; - } - - public String getStringValue() { - if (isNull) { - return "null"; - } - switch (dataType) { - case BOOLEAN: - return String.valueOf(boolV); - case INT32: - return String.valueOf(intV); - case INT64: - return String.valueOf(longV); - case FLOAT: - return String.valueOf(floatV); - case DOUBLE: - return String.valueOf(doubleV); - case TEXT: - return binaryV.toString(); - case ENUMS: - return binaryV.toString(); - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } - } - - public String toString() { - return getStringValue(); - } - - public boolean isNull() { - return isNull; - } - - public void setNull(boolean isNull) { - this.isNull = isNull; - } + public TSDataType dataType; + public String deltaObjectId; + public String measurementId; + private boolean boolV; + private int intV; + private long longV; + private float floatV; + private double doubleV; + private Binary binaryV; + private boolean isNull; + + public Field(TSDataType dataType, String measurementId) { + this.dataType = dataType; + this.measurementId = measurementId; + this.deltaObjectId = "default"; + } + + public Field(TSDataType dataType, String deltaObjectId, String measurementId) { + this.dataType = dataType; + this.deltaObjectId = deltaObjectId; + this.measurementId = measurementId; + } + + public boolean getBoolV() { + return boolV; + } + + public void setBoolV(boolean boolV) { + this.boolV = boolV; + } + + public int getIntV() { + return intV; + } + + public void setIntV(int intV) { + this.intV = intV; + } + + public long getLongV() { + return longV; + } + + public void setLongV(long longV) { + this.longV = longV; + } + + public float getFloatV() { + return floatV; + } + + public void setFloatV(float floatV) { + this.floatV = floatV; + } + + public double getDoubleV() { + return doubleV; + } + + public void setDoubleV(double doubleV) { + this.doubleV = doubleV; + } + + public Binary getBinaryV() { + return binaryV; + } + + public void setBinaryV(Binary binaryV) { + this.binaryV = binaryV; + } + + public String getStringValue() { + if (isNull) { + return "null"; + } + switch (dataType) { + case BOOLEAN: + return String.valueOf(boolV); + case INT32: + return String.valueOf(intV); + case INT64: + return String.valueOf(longV); + case FLOAT: + return String.valueOf(floatV); + case DOUBLE: + return String.valueOf(doubleV); + case TEXT: + return binaryV.toString(); + case ENUMS: + return binaryV.toString(); + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); + } + } + + public String toString() { + return getStringValue(); + } + + public boolean isNull() { + return isNull; + } + + public void setNull(boolean isNull) { + this.isNull = isNull; + } } - - - - - diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/OldRowRecord.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/OldRowRecord.java index a18519b4..c17498fa 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/OldRowRecord.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/OldRowRecord.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.record.DataPoint; import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; import cn.edu.tsinghua.tsfile.timeseries.write.record.datapoint.*; - import java.util.ArrayList; import java.util.List; @@ -17,83 +16,83 @@ * @author Jinrui Zhang */ public class OldRowRecord { - public long timestamp; - public String deltaObjectId; - public List fields; + public long timestamp; + public String deltaObjectId; + public List fields; - public OldRowRecord(long timestamp, String deltaObjectId, String deltaObjectType) { - this.timestamp = timestamp; - this.deltaObjectId = deltaObjectId; - this.fields = new ArrayList(); - } + public OldRowRecord(long timestamp, String deltaObjectId, String deltaObjectType) { + this.timestamp = timestamp; + this.deltaObjectId = deltaObjectId; + this.fields = new ArrayList(); + } - public long getTime() { - return timestamp; - } + public long getTime() { + return timestamp; + } - public String getRowKey() { - return deltaObjectId; - } + public String getRowKey() { + return deltaObjectId; + } - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } - public void setDeltaObjectId(String did) { - this.deltaObjectId = did; - } + public void setDeltaObjectId(String did) { + this.deltaObjectId = did; + } - public int addField(Field f) { - this.fields.add(f); - return fields.size(); - } + public int addField(Field f) { + this.fields.add(f); + return fields.size(); + } - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(timestamp); - for (Field f : fields) { - sb.append("\t"); - sb.append(f); - } - return sb.toString(); + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(timestamp); + for (Field f : fields) { + sb.append("\t"); + sb.append(f); } + return sb.toString(); + } - public TSRecord toTSRecord() { - TSRecord r = new TSRecord(timestamp, deltaObjectId); - for (Field f : fields) { - if (!f.isNull()) { - DataPoint d = createDataPoint(f.dataType, f.measurementId, f); - r.addTuple(d); - } - } - return r; + public TSRecord toTSRecord() { + TSRecord r = new TSRecord(timestamp, deltaObjectId); + for (Field f : fields) { + if (!f.isNull()) { + DataPoint d = createDataPoint(f.dataType, f.measurementId, f); + r.addTuple(d); + } } + return r; + } - private DataPoint createDataPoint(TSDataType dataType, String measurementId, Field f) { - switch (dataType) { + private DataPoint createDataPoint(TSDataType dataType, String measurementId, Field f) { + switch (dataType) { - case BOOLEAN: - return new BooleanDataPoint(measurementId, f.getBoolV()); - case DOUBLE: - return new DoubleDataPoint(measurementId, f.getDoubleV()); - case FLOAT: - return new FloatDataPoint(measurementId, f.getFloatV()); - case INT32: - return new IntDataPoint(measurementId, f.getIntV()); - case INT64: - return new LongDataPoint(measurementId, f.getLongV()); - case TEXT: - return new StringDataPoint(measurementId, Binary.valueOf(f.getStringValue())); - default: - throw new UnSupportedDataTypeException(String.valueOf(dataType)); - } + case BOOLEAN: + return new BooleanDataPoint(measurementId, f.getBoolV()); + case DOUBLE: + return new DoubleDataPoint(measurementId, f.getDoubleV()); + case FLOAT: + return new FloatDataPoint(measurementId, f.getFloatV()); + case INT32: + return new IntDataPoint(measurementId, f.getIntV()); + case INT64: + return new LongDataPoint(measurementId, f.getLongV()); + case TEXT: + return new StringDataPoint(measurementId, Binary.valueOf(f.getStringValue())); + default: + throw new UnSupportedDataTypeException(String.valueOf(dataType)); } + } - /** - * @return the fields - */ - public List getFields() { - return fields; - } + /** + * @return the fields + */ + public List getFields() { + return fields; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Path.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Path.java index 911a3ae5..80aea4e1 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Path.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/read/support/Path.java @@ -4,173 +4,173 @@ import cn.edu.tsinghua.tsfile.timeseries.utils.StringContainer; /** - * This class define an Object named Path to represent a series in delta system. - * And in batch read, this definition is also used in query processing. - * Note that, Path is unmodified after a new object has been created. + * This class define an Object named Path to represent a series in delta system. And in batch read, + * this definition is also used in query processing. Note that, Path is unmodified after a new + * object has been created. * * @author Kangrong */ public class Path { - private String measurement = null; - private String deltaObject = null; - private String fullPath; - - public Path(StringContainer pathSc) { - assert pathSc != null; - String[] splits = pathSc.toString().split(SystemConstant.PATH_SEPARATER_NO_REGEX); - init(splits); - } - - public Path(String pathSc) { - assert pathSc != null; - String[] splits = pathSc.split(SystemConstant.PATH_SEPARATER_NO_REGEX); - init(splits); - } - - public Path(String[] pathSc) { - assert pathSc != null; - String[] splits = - new StringContainer(pathSc, SystemConstant.PATH_SEPARATOR).toString().split( - SystemConstant.PATH_SEPARATER_NO_REGEX); - init(splits); - } - - private void init(String[] splitedPathArray) { - StringContainer sc = new StringContainer(splitedPathArray, SystemConstant.PATH_SEPARATOR); - if (sc.size() <= 1) { - deltaObject = ""; - fullPath = measurement = sc.toString(); - } else { - deltaObject = sc.getSubStringContainer(0, -2).toString(); - measurement = sc.getSubString(-1); - fullPath = sc.toString(); - } - } - - - public String getFullPath() { - return fullPath; - } - - public String getDeltaObjectToString() { - return deltaObject; - } - - public String getMeasurementToString() { - return measurement; - } - - @Override - public int hashCode() { - return fullPath.hashCode(); - } - - @Override - public boolean equals(Object obj) { - return obj != null && obj instanceof Path && this.fullPath.equals(((Path) obj).fullPath); - } - - public boolean equals(String obj) { - return obj != null && this.fullPath.equals(obj); - } - - @Override - public String toString() { - return fullPath; - } - - - @Override - public Path clone() { - return new Path(fullPath); - } - - /** - * if prefix is null, return false, else judge whether this.fullPath starts with prefix - * - * @param prefix the prefix string to be tested. - * @return True if fullPath starts with prefix - */ - public boolean startWith(String prefix) { - return prefix != null && fullPath.startsWith(prefix); - } - - /** - * if prefix is null, return false, else judge whether this.fullPath starts with prefix.fullPath - * - * @param prefix the prefix path to be tested. - * @return True if fullPath starts with prefix.fullPath - */ - public boolean startWith(Path prefix) { - return startWith(prefix.fullPath); - } - - public static Path mergePath(Path prefix, Path suffix) { - StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); - sc.addTail(prefix); - sc.addTail(suffix); - return new Path(sc); - } - - /** - * add {@code prefix} as the prefix of {@code src}. - * - * @param src to be added. - * @param prefix the newly prefix - * @return if this path start with prefix - */ - public static Path addPrefixPath(Path src, String prefix) { - StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); - sc.addTail(prefix); - sc.addTail(src); - return new Path(sc); - } - - /** - * add {@code prefix} as the prefix of {@code src}. - * - * @param src to be added. - * @param prefix the newly prefix - * @return Path - */ - public static Path addPrefixPath(Path src, Path prefix) { - return addPrefixPath(src, prefix.fullPath); - } - - /** - * replace prefix of descPrefix with given parameter {@code srcPrefix}. - * If the level of the path constructed by {@code srcPrefix} is larger than {@code descPrefix}, return {@code - * srcPrefix} directly. - * - * @param srcPrefix the prefix to replace descPrefix - * @param descPrefix to be replaced - * @return If the level of the path constructed by {@code srcPrefix} is larger than {@code descPrefix}, return - * {@code srcPrefix} directly. - */ - public static Path replace(String srcPrefix, Path descPrefix) { - if ("".equals(srcPrefix) || descPrefix.startWith(srcPrefix)) - return descPrefix; - int prefixSize = srcPrefix.split(SystemConstant.PATH_SEPARATER_NO_REGEX).length; - String[] descArray = descPrefix.fullPath.split(SystemConstant.PATH_SEPARATER_NO_REGEX); - if (descArray.length <= prefixSize) - return new Path(srcPrefix); - StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); - sc.addTail(srcPrefix); - for (int i = prefixSize; i < descArray.length; i++) { - sc.addTail(descArray[i]); - } - return new Path(sc); - } - - /** - * replace prefix of {@code descPrefix} with given parameter {@code srcPrefix}. - * If the level of {@code srcPrefix} is larger than {@code descPrefix}, return {@code srcPrefix} directly. - * - * @param srcPrefix the prefix to replace descPrefix - * @param descPrefix to be replaced - * @return If the level of {@code srcPrefix} is larger than {@code descPrefix}, return {@code srcPrefix} directly. - */ - public static Path replace(Path srcPrefix, Path descPrefix) { - return replace(srcPrefix.fullPath, descPrefix); - } + private String measurement = null; + private String deltaObject = null; + private String fullPath; + + public Path(StringContainer pathSc) { + assert pathSc != null; + String[] splits = pathSc.toString().split(SystemConstant.PATH_SEPARATER_NO_REGEX); + init(splits); + } + + public Path(String pathSc) { + assert pathSc != null; + String[] splits = pathSc.split(SystemConstant.PATH_SEPARATER_NO_REGEX); + init(splits); + } + + public Path(String[] pathSc) { + assert pathSc != null; + String[] splits = new StringContainer(pathSc, SystemConstant.PATH_SEPARATOR).toString() + .split(SystemConstant.PATH_SEPARATER_NO_REGEX); + init(splits); + } + + private void init(String[] splitedPathArray) { + StringContainer sc = new StringContainer(splitedPathArray, SystemConstant.PATH_SEPARATOR); + if (sc.size() <= 1) { + deltaObject = ""; + fullPath = measurement = sc.toString(); + } else { + deltaObject = sc.getSubStringContainer(0, -2).toString(); + measurement = sc.getSubString(-1); + fullPath = sc.toString(); + } + } + + + public String getFullPath() { + return fullPath; + } + + public String getDeltaObjectToString() { + return deltaObject; + } + + public String getMeasurementToString() { + return measurement; + } + + @Override + public int hashCode() { + return fullPath.hashCode(); + } + + @Override + public boolean equals(Object obj) { + return obj != null && obj instanceof Path && this.fullPath.equals(((Path) obj).fullPath); + } + + public boolean equals(String obj) { + return obj != null && this.fullPath.equals(obj); + } + + @Override + public String toString() { + return fullPath; + } + + + @Override + public Path clone() { + return new Path(fullPath); + } + + /** + * if prefix is null, return false, else judge whether this.fullPath starts with prefix + * + * @param prefix the prefix string to be tested. + * @return True if fullPath starts with prefix + */ + public boolean startWith(String prefix) { + return prefix != null && fullPath.startsWith(prefix); + } + + /** + * if prefix is null, return false, else judge whether this.fullPath starts with prefix.fullPath + * + * @param prefix the prefix path to be tested. + * @return True if fullPath starts with prefix.fullPath + */ + public boolean startWith(Path prefix) { + return startWith(prefix.fullPath); + } + + public static Path mergePath(Path prefix, Path suffix) { + StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); + sc.addTail(prefix); + sc.addTail(suffix); + return new Path(sc); + } + + /** + * add {@code prefix} as the prefix of {@code src}. + * + * @param src to be added. + * @param prefix the newly prefix + * @return if this path start with prefix + */ + public static Path addPrefixPath(Path src, String prefix) { + StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); + sc.addTail(prefix); + sc.addTail(src); + return new Path(sc); + } + + /** + * add {@code prefix} as the prefix of {@code src}. + * + * @param src to be added. + * @param prefix the newly prefix + * @return Path + */ + public static Path addPrefixPath(Path src, Path prefix) { + return addPrefixPath(src, prefix.fullPath); + } + + /** + * replace prefix of descPrefix with given parameter {@code srcPrefix}. If the level of the path + * constructed by {@code srcPrefix} is larger than {@code descPrefix}, return {@code + * srcPrefix} directly. + * + * @param srcPrefix the prefix to replace descPrefix + * @param descPrefix to be replaced + * @return If the level of the path constructed by {@code srcPrefix} is larger than + * {@code descPrefix}, return {@code srcPrefix} directly. + */ + public static Path replace(String srcPrefix, Path descPrefix) { + if ("".equals(srcPrefix) || descPrefix.startWith(srcPrefix)) + return descPrefix; + int prefixSize = srcPrefix.split(SystemConstant.PATH_SEPARATER_NO_REGEX).length; + String[] descArray = descPrefix.fullPath.split(SystemConstant.PATH_SEPARATER_NO_REGEX); + if (descArray.length <= prefixSize) + return new Path(srcPrefix); + StringContainer sc = new StringContainer(SystemConstant.PATH_SEPARATOR); + sc.addTail(srcPrefix); + for (int i = prefixSize; i < descArray.length; i++) { + sc.addTail(descArray[i]); + } + return new Path(sc); + } + + /** + * replace prefix of {@code descPrefix} with given parameter {@code srcPrefix}. If the level of + * {@code srcPrefix} is larger than {@code descPrefix}, return {@code srcPrefix} directly. + * + * @param srcPrefix the prefix to replace descPrefix + * @param descPrefix to be replaced + * @return If the level of {@code srcPrefix} is larger than {@code descPrefix}, return + * {@code srcPrefix} directly. + */ + public static Path replace(Path srcPrefix, Path descPrefix) { + return replace(srcPrefix.fullPath, descPrefix); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/basis/ReadOnlyTsFile.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/basis/ReadOnlyTsFile.java index 217c3647..5e3a006a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/basis/ReadOnlyTsFile.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/basis/ReadOnlyTsFile.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryExpression; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.impl.QueryExecutorRouter; - import java.io.IOException; /** @@ -16,23 +15,23 @@ */ public class ReadOnlyTsFile { - private ITsRandomAccessFileReader randomAccessFileReader; - private MetadataQuerier metadataQuerier; - private SeriesChunkLoader seriesChunkLoader; - private QueryExecutorRouter queryExecutorRouter; + private ITsRandomAccessFileReader randomAccessFileReader; + private MetadataQuerier metadataQuerier; + private SeriesChunkLoader seriesChunkLoader; + private QueryExecutorRouter queryExecutorRouter; - public ReadOnlyTsFile(ITsRandomAccessFileReader randomAccessFileReader) throws IOException { - this.randomAccessFileReader = randomAccessFileReader; - this.metadataQuerier = new MetadataQuerierByFileImpl(randomAccessFileReader); - this.seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - queryExecutorRouter = new QueryExecutorRouter(metadataQuerier, seriesChunkLoader); - } + public ReadOnlyTsFile(ITsRandomAccessFileReader randomAccessFileReader) throws IOException { + this.randomAccessFileReader = randomAccessFileReader; + this.metadataQuerier = new MetadataQuerierByFileImpl(randomAccessFileReader); + this.seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + queryExecutorRouter = new QueryExecutorRouter(metadataQuerier, seriesChunkLoader); + } - public QueryDataSet query(QueryExpression queryExpression) throws IOException { - return queryExecutorRouter.execute(queryExpression); - } + public QueryDataSet query(QueryExpression queryExpression) throws IOException { + return queryExecutorRouter.execute(queryExpression); + } - public void close() throws IOException { - randomAccessFileReader.close(); - } + public void close() throws IOException { + randomAccessFileReader.close(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/EncodedSeriesChunkDescriptor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/EncodedSeriesChunkDescriptor.java index 064494e7..9b021a07 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/EncodedSeriesChunkDescriptor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/EncodedSeriesChunkDescriptor.java @@ -3,133 +3,132 @@ import cn.edu.tsinghua.tsfile.file.metadata.TsDigest; import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; - import java.util.List; /** * Created by zhangjinrui on 2017/12/25. */ public class EncodedSeriesChunkDescriptor implements SeriesChunkDescriptor { - public static final char UUID_SPLITER = '.'; - private String filePath; - private long offsetInFile; - private long lengthOfBytes; - private CompressionTypeName compressionTypeName; - private TSDataType dataType; - private TsDigest valueDigest; - private long minTimestamp; - private long maxTimestamp; - private long countOfPoints; - private List enumValueList; - private long maxTombstoneTime; - - public EncodedSeriesChunkDescriptor(long offsetInFile, long lengthOfBytes, CompressionTypeName compressionTypeName, - TSDataType dataType, TsDigest valueDigest, long minTimestamp, long maxTimestamp, long countOfPoints) { - this.offsetInFile = offsetInFile; - this.lengthOfBytes = lengthOfBytes; - this.compressionTypeName = compressionTypeName; - this.dataType = dataType; - this.valueDigest = valueDigest; - this.minTimestamp = minTimestamp; - this.maxTimestamp = maxTimestamp; - this.countOfPoints = countOfPoints; - } - - public EncodedSeriesChunkDescriptor(long offsetInFile, long lengthOfBytes, CompressionTypeName compressionTypeName, - TSDataType dataType, TsDigest valueDigest, long minTimestamp, long maxTimestamp, long countOfPoints, List enumValueList) { - this(offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, minTimestamp, maxTimestamp, countOfPoints); - this.enumValueList = enumValueList; - } - - public EncodedSeriesChunkDescriptor(String filePath, long offsetInFile, long lengthOfBytes, CompressionTypeName compressionTypeName, - TSDataType dataType, TsDigest valueDigest, long minTimestamp, long maxTimestamp, long countOfPoints) { - this(offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, minTimestamp, maxTimestamp, countOfPoints); - this.filePath = filePath; - } - - public EncodedSeriesChunkDescriptor(String filePath, long offsetInFile, long lengthOfBytes, CompressionTypeName compressionTypeName, - TSDataType dataType, TsDigest valueDigest, long minTimestamp, long maxTimestamp, long countOfPoints, List enumValueList) { - this(filePath, offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, minTimestamp, maxTimestamp, countOfPoints); - this.enumValueList = enumValueList; - } - - public boolean equals(Object object) { - if (!(object instanceof EncodedSeriesChunkDescriptor)) { - return false; - } - return getUUID().equals(((EncodedSeriesChunkDescriptor) object).getUUID()); - } - - public int hashCode() { - return getUUID().hashCode(); - } - - private String getUUID() { - return new StringBuilder().append(filePath).append(UUID_SPLITER).append(offsetInFile) - .append(UUID_SPLITER).append(lengthOfBytes).toString(); - } - - public String getFilePath() { - return filePath; - } - - public long getOffsetInFile() { - return offsetInFile; - } - - public long getLengthOfBytes() { - return lengthOfBytes; - } - - public CompressionTypeName getCompressionTypeName() { - return compressionTypeName; - } - - public TSDataType getDataType() { - return dataType; - } - - public TsDigest getValueDigest() { - return valueDigest; - } - - public long getMinTimestamp() { - return minTimestamp; - } - - public long getMaxTimestamp() { - return maxTimestamp; - } - - public long getCountOfPoints() { - return countOfPoints; - } - - public List getEnumValueList() { - return enumValueList; - } - - @Override - public String toString() { - return "EncodedSeriesChunkDescriptor{" + - "filePath='" + filePath + '\'' + - ", offsetInFile=" + offsetInFile + - ", lengthOfBytes=" + lengthOfBytes + - ", compressionTypeName=" + compressionTypeName + - ", dataType=" + dataType + - ", valueDigest=" + valueDigest + - ", minTimestamp=" + minTimestamp + - ", maxTimestamp=" + maxTimestamp + - ", countOfPoints=" + countOfPoints + - ", enumValueList=" + enumValueList + - '}'; - } - - public long getMaxTombstoneTime() { - return maxTombstoneTime; - } - - public void setMaxTombstoneTime(long maxTombstoneTime) { - this.maxTombstoneTime = maxTombstoneTime; - } + public static final char UUID_SPLITER = '.'; + private String filePath; + private long offsetInFile; + private long lengthOfBytes; + private CompressionTypeName compressionTypeName; + private TSDataType dataType; + private TsDigest valueDigest; + private long minTimestamp; + private long maxTimestamp; + private long countOfPoints; + private List enumValueList; + private long maxTombstoneTime; + + public EncodedSeriesChunkDescriptor(long offsetInFile, long lengthOfBytes, + CompressionTypeName compressionTypeName, TSDataType dataType, TsDigest valueDigest, + long minTimestamp, long maxTimestamp, long countOfPoints) { + this.offsetInFile = offsetInFile; + this.lengthOfBytes = lengthOfBytes; + this.compressionTypeName = compressionTypeName; + this.dataType = dataType; + this.valueDigest = valueDigest; + this.minTimestamp = minTimestamp; + this.maxTimestamp = maxTimestamp; + this.countOfPoints = countOfPoints; + } + + public EncodedSeriesChunkDescriptor(long offsetInFile, long lengthOfBytes, + CompressionTypeName compressionTypeName, TSDataType dataType, TsDigest valueDigest, + long minTimestamp, long maxTimestamp, long countOfPoints, List enumValueList) { + this(offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, minTimestamp, + maxTimestamp, countOfPoints); + this.enumValueList = enumValueList; + } + + public EncodedSeriesChunkDescriptor(String filePath, long offsetInFile, long lengthOfBytes, + CompressionTypeName compressionTypeName, TSDataType dataType, TsDigest valueDigest, + long minTimestamp, long maxTimestamp, long countOfPoints) { + this(offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, minTimestamp, + maxTimestamp, countOfPoints); + this.filePath = filePath; + } + + public EncodedSeriesChunkDescriptor(String filePath, long offsetInFile, long lengthOfBytes, + CompressionTypeName compressionTypeName, TSDataType dataType, TsDigest valueDigest, + long minTimestamp, long maxTimestamp, long countOfPoints, List enumValueList) { + this(filePath, offsetInFile, lengthOfBytes, compressionTypeName, dataType, valueDigest, + minTimestamp, maxTimestamp, countOfPoints); + this.enumValueList = enumValueList; + } + + public boolean equals(Object object) { + if (!(object instanceof EncodedSeriesChunkDescriptor)) { + return false; + } + return getUUID().equals(((EncodedSeriesChunkDescriptor) object).getUUID()); + } + + public int hashCode() { + return getUUID().hashCode(); + } + + private String getUUID() { + return new StringBuilder().append(filePath).append(UUID_SPLITER).append(offsetInFile) + .append(UUID_SPLITER).append(lengthOfBytes).toString(); + } + + public String getFilePath() { + return filePath; + } + + public long getOffsetInFile() { + return offsetInFile; + } + + public long getLengthOfBytes() { + return lengthOfBytes; + } + + public CompressionTypeName getCompressionTypeName() { + return compressionTypeName; + } + + public TSDataType getDataType() { + return dataType; + } + + public TsDigest getValueDigest() { + return valueDigest; + } + + public long getMinTimestamp() { + return minTimestamp; + } + + public long getMaxTimestamp() { + return maxTimestamp; + } + + public long getCountOfPoints() { + return countOfPoints; + } + + public List getEnumValueList() { + return enumValueList; + } + + @Override + public String toString() { + return "EncodedSeriesChunkDescriptor{" + "filePath='" + filePath + '\'' + ", offsetInFile=" + + offsetInFile + ", lengthOfBytes=" + lengthOfBytes + ", compressionTypeName=" + + compressionTypeName + ", dataType=" + dataType + ", valueDigest=" + valueDigest + + ", minTimestamp=" + minTimestamp + ", maxTimestamp=" + maxTimestamp + ", countOfPoints=" + + countOfPoints + ", enumValueList=" + enumValueList + '}'; + } + + public long getMaxTombstoneTime() { + return maxTombstoneTime; + } + + public void setMaxTombstoneTime(long maxTombstoneTime) { + this.maxTombstoneTime = maxTombstoneTime; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/MemSeriesChunk.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/MemSeriesChunk.java index 590a29c7..79ac4b82 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/MemSeriesChunk.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/MemSeriesChunk.java @@ -5,20 +5,21 @@ /** * Created by zhangjinrui on 2017/12/25. */ -public class MemSeriesChunk implements SeriesChunk{ - private EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor; - private ByteArrayInputStream seriesChunkBodyStream; +public class MemSeriesChunk implements SeriesChunk { + private EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor; + private ByteArrayInputStream seriesChunkBodyStream; - public MemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor, ByteArrayInputStream seriesChunkBodyStream) { - this.encodedSeriesChunkDescriptor = encodedSeriesChunkDescriptor; - this.seriesChunkBodyStream = seriesChunkBodyStream; - } + public MemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor, + ByteArrayInputStream seriesChunkBodyStream) { + this.encodedSeriesChunkDescriptor = encodedSeriesChunkDescriptor; + this.seriesChunkBodyStream = seriesChunkBodyStream; + } - public EncodedSeriesChunkDescriptor getEncodedSeriesChunkDescriptor() { - return encodedSeriesChunkDescriptor; - } + public EncodedSeriesChunkDescriptor getEncodedSeriesChunkDescriptor() { + return encodedSeriesChunkDescriptor; + } - public ByteArrayInputStream getSeriesChunkBodyStream() { - return seriesChunkBodyStream; - } + public ByteArrayInputStream getSeriesChunkBodyStream() { + return seriesChunkBodyStream; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunk.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunk.java index 96160f53..5384456e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunk.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunk.java @@ -7,7 +7,7 @@ */ public interface SeriesChunk { - SeriesChunkDescriptor getEncodedSeriesChunkDescriptor(); + SeriesChunkDescriptor getEncodedSeriesChunkDescriptor(); - InputStream getSeriesChunkBodyStream(); + InputStream getSeriesChunkBodyStream(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunkDescriptor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunkDescriptor.java index c501a853..99b752a4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunkDescriptor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesChunkDescriptor.java @@ -8,15 +8,15 @@ * Created by zhangjinrui on 2018/1/15. */ public interface SeriesChunkDescriptor { - TSDataType getDataType(); + TSDataType getDataType(); - TsDigest getValueDigest(); + TsDigest getValueDigest(); - long getMinTimestamp(); + long getMinTimestamp(); - long getMaxTimestamp(); + long getMaxTimestamp(); - long getCountOfPoints(); + long getCountOfPoints(); - CompressionTypeName getCompressionTypeName(); + CompressionTypeName getCompressionTypeName(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesDescriptor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesDescriptor.java index 23519da4..9140ae1e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesDescriptor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/common/SeriesDescriptor.java @@ -7,31 +7,31 @@ * @author Jinrui Zhang */ public class SeriesDescriptor { - private Path path; - private TSDataType dataType; + private Path path; + private TSDataType dataType; - public SeriesDescriptor(Path path, TSDataType dataType) { - this.path = path; - this.dataType = dataType; - } + public SeriesDescriptor(Path path, TSDataType dataType) { + this.path = path; + this.dataType = dataType; + } - public Path getPath() { - return path; - } + public Path getPath() { + return path; + } - public void setPath(Path path) { - this.path = path; - } + public void setPath(Path path) { + this.path = path; + } - public TSDataType getDataType() { - return dataType; - } + public TSDataType getDataType() { + return dataType; + } - public void setDataType(TSDataType dataType) { - this.dataType = dataType; - } + public void setDataType(TSDataType dataType) { + this.dataType = dataType; + } - public String toString() { - return this.path.toString(); - } + public String toString() { + return this.path.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerier.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerier.java index 81ee817d..f6ca1132 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerier.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerier.java @@ -2,7 +2,6 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; - import java.io.IOException; import java.util.List; @@ -11,6 +10,6 @@ */ public interface MetadataQuerier { - List getSeriesChunkDescriptorList(Path path) throws IOException; + List getSeriesChunkDescriptorList(Path path) throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImpl.java index 0f856be1..dfb0f737 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImpl.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; import cn.edu.tsinghua.tsfile.timeseries.utils.cache.LRUCache; import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; @@ -19,103 +18,117 @@ * Created by zhangjinrui on 2017/12/25. */ public class MetadataQuerierByFileImpl implements MetadataQuerier { - private static final int FOOTER_LENGTH = 4; - private static final int MAGIC_LENGTH = TsFileIOWriter.magicStringBytes.length; - private static final int ROWGROUP_METADATA_CACHE_SIZE = 1000; //TODO: how to specify this value - private static final int SERIESCHUNK_DESCRIPTOR_CACHE_SIZE = 100000; + private static final int FOOTER_LENGTH = 4; + private static final int MAGIC_LENGTH = TsFileIOWriter.magicStringBytes.length; + private static final int ROWGROUP_METADATA_CACHE_SIZE = 1000; // TODO: how to specify this value + private static final int SERIESCHUNK_DESCRIPTOR_CACHE_SIZE = 100000; - private ITsRandomAccessFileReader randomAccessFileReader; - private TsFileMetaData fileMetaData; + private ITsRandomAccessFileReader randomAccessFileReader; + private TsFileMetaData fileMetaData; - private LRUCache> rowGroupMetadataCache; - private LRUCache> seriesChunkDescriptorCache; + private LRUCache> rowGroupMetadataCache; + private LRUCache> seriesChunkDescriptorCache; - public MetadataQuerierByFileImpl(ITsRandomAccessFileReader randomAccessFileReader) throws IOException { - this.randomAccessFileReader = randomAccessFileReader; - initFileMetadata(); - rowGroupMetadataCache = new LRUCache>(ROWGROUP_METADATA_CACHE_SIZE) { - @Override - public void beforeRemove(List object) { - return; - } + public MetadataQuerierByFileImpl(ITsRandomAccessFileReader randomAccessFileReader) + throws IOException { + this.randomAccessFileReader = randomAccessFileReader; + initFileMetadata(); + rowGroupMetadataCache = + new LRUCache>(ROWGROUP_METADATA_CACHE_SIZE) { + @Override + public void beforeRemove(List object) { + return; + } - @Override - public List loadObjectByKey(String key) throws CacheException { - try { - return loadRowGroupMetadata(key); - } catch (IOException e) { - throw new CacheException(e); - } + @Override + public List loadObjectByKey(String key) throws CacheException { + try { + return loadRowGroupMetadata(key); + } catch (IOException e) { + throw new CacheException(e); } + } }; - seriesChunkDescriptorCache = new LRUCache>(SERIESCHUNK_DESCRIPTOR_CACHE_SIZE) { - @Override - public void beforeRemove(List object) throws CacheException { - return; - } + seriesChunkDescriptorCache = + new LRUCache>(SERIESCHUNK_DESCRIPTOR_CACHE_SIZE) { + @Override + public void beforeRemove(List object) + throws CacheException { + return; + } - @Override - public List loadObjectByKey(Path key) throws CacheException { - return loadSeriesChunkDescriptor(key); - } + @Override + public List loadObjectByKey(Path key) + throws CacheException { + return loadSeriesChunkDescriptor(key); + } }; - } + } - private void initFileMetadata() throws IOException { - long l = randomAccessFileReader.length(); - randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH); - int fileMetaDataLength = randomAccessFileReader.readInt(); - randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH - fileMetaDataLength); - byte[] buf = new byte[fileMetaDataLength]; - randomAccessFileReader.read(buf, 0, buf.length); + private void initFileMetadata() throws IOException { + long l = randomAccessFileReader.length(); + randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH); + int fileMetaDataLength = randomAccessFileReader.readInt(); + randomAccessFileReader.seek(l - MAGIC_LENGTH - FOOTER_LENGTH - fileMetaDataLength); + byte[] buf = new byte[fileMetaDataLength]; + randomAccessFileReader.read(buf, 0, buf.length); - ByteArrayInputStream metadataInputStream = new ByteArrayInputStream(buf); - this.fileMetaData = new TsFileMetaDataConverter().toTsFileMetadata(ReadWriteThriftFormatUtils.readFileMetaData(metadataInputStream)); - } + ByteArrayInputStream metadataInputStream = new ByteArrayInputStream(buf); + this.fileMetaData = new TsFileMetaDataConverter() + .toTsFileMetadata(ReadWriteThriftFormatUtils.readFileMetaData(metadataInputStream)); + } - @Override - public List getSeriesChunkDescriptorList(Path path) throws IOException { - try { - return seriesChunkDescriptorCache.get(path); - } catch (CacheException e) { - throw new IOException(String.format("Get SeriesChunkDescriptorList for Path[%s] Error.", path), e); - } + @Override + public List getSeriesChunkDescriptorList(Path path) + throws IOException { + try { + return seriesChunkDescriptorCache.get(path); + } catch (CacheException e) { + throw new IOException( + String.format("Get SeriesChunkDescriptorList for Path[%s] Error.", path), e); } + } - private List loadSeriesChunkDescriptor(Path path) throws CacheException { - List rowGroupMetaDataList = rowGroupMetadataCache.get(path.getDeltaObjectToString()); - List encodedSeriesChunkDescriptorList = new ArrayList<>(); - for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { - List timeSeriesChunkMetaDataListInOneRowGroup = rowGroupMetaData.getTimeSeriesChunkMetaDataList(); - for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : timeSeriesChunkMetaDataListInOneRowGroup) { - if (path.getMeasurementToString().equals(timeSeriesChunkMetaData.getProperties().getMeasurementUID())) { - encodedSeriesChunkDescriptorList.add(generateSeriesChunkDescriptorByMetadata(timeSeriesChunkMetaData)); - } - } + private List loadSeriesChunkDescriptor(Path path) + throws CacheException { + List rowGroupMetaDataList = + rowGroupMetadataCache.get(path.getDeltaObjectToString()); + List encodedSeriesChunkDescriptorList = new ArrayList<>(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDataList) { + List timeSeriesChunkMetaDataListInOneRowGroup = + rowGroupMetaData.getTimeSeriesChunkMetaDataList(); + for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : timeSeriesChunkMetaDataListInOneRowGroup) { + if (path.getMeasurementToString() + .equals(timeSeriesChunkMetaData.getProperties().getMeasurementUID())) { + encodedSeriesChunkDescriptorList + .add(generateSeriesChunkDescriptorByMetadata(timeSeriesChunkMetaData)); } - return encodedSeriesChunkDescriptorList; + } } + return encodedSeriesChunkDescriptorList; + } - private EncodedSeriesChunkDescriptor generateSeriesChunkDescriptorByMetadata(TimeSeriesChunkMetaData timeSeriesChunkMetaData) { - EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = new EncodedSeriesChunkDescriptor( - timeSeriesChunkMetaData.getProperties().getFileOffset(), - timeSeriesChunkMetaData.getTotalByteSize(), - timeSeriesChunkMetaData.getProperties().getCompression(), - timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getDataType(), - timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getDigest(), - timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime(), - timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime(), - timeSeriesChunkMetaData.getNumRows(), - timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getEnumValues()); - return encodedSeriesChunkDescriptor; - } + private EncodedSeriesChunkDescriptor generateSeriesChunkDescriptorByMetadata( + TimeSeriesChunkMetaData timeSeriesChunkMetaData) { + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = + new EncodedSeriesChunkDescriptor(timeSeriesChunkMetaData.getProperties().getFileOffset(), + timeSeriesChunkMetaData.getTotalByteSize(), + timeSeriesChunkMetaData.getProperties().getCompression(), + timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getDataType(), + timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getDigest(), + timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime(), + timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime(), + timeSeriesChunkMetaData.getNumRows(), + timeSeriesChunkMetaData.getVInTimeSeriesChunkMetaData().getEnumValues()); + return encodedSeriesChunkDescriptor; + } - private List loadRowGroupMetadata(String deltaObjectID) throws IOException { - TsDeltaObject deltaObject = fileMetaData.getDeltaObject(deltaObjectID); - TsRowGroupBlockMetaData rowGroupBlockMetaData = new TsRowGroupBlockMetaData(); - rowGroupBlockMetaData.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData(this.randomAccessFileReader, - deltaObject.offset, deltaObject.metadataBlockSize)); - return rowGroupBlockMetaData.getRowGroups(); - } + private List loadRowGroupMetadata(String deltaObjectID) throws IOException { + TsDeltaObject deltaObject = fileMetaData.getDeltaObject(deltaObjectID); + TsRowGroupBlockMetaData rowGroupBlockMetaData = new TsRowGroupBlockMetaData(); + rowGroupBlockMetaData.convertToTSF(ReadWriteThriftFormatUtils.readRowGroupBlockMetaData( + this.randomAccessFileReader, deltaObject.offset, deltaObject.metadataBlockSize)); + return rowGroupBlockMetaData.getRowGroups(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoader.java index cd93ea52..1519fabc 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoader.java @@ -2,12 +2,12 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.common.SeriesChunk; import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; - import java.io.IOException; /** * Created by zhangjinrui on 2017/12/26. */ public interface SeriesChunkLoader { - SeriesChunk getMemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException; + SeriesChunk getMemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderImpl.java index 3a975e53..5d35b9ea 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderImpl.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; import cn.edu.tsinghua.tsfile.timeseries.readV2.common.MemSeriesChunk; import cn.edu.tsinghua.tsfile.timeseries.utils.cache.LRUCache; - import java.io.ByteArrayInputStream; import java.io.IOException; @@ -13,49 +12,53 @@ * Created by zhangjinrui on 2017/12/25. */ public class SeriesChunkLoaderImpl implements SeriesChunkLoader { - private static final int DEFAULT_MEMSERISCHUNK_CACHE_SIZE = 100; - private ITsRandomAccessFileReader randomAccessFileReader; - private LRUCache seriesChunkBytesCache; - - public SeriesChunkLoaderImpl(ITsRandomAccessFileReader randomAccessFileReader) { - this(randomAccessFileReader, DEFAULT_MEMSERISCHUNK_CACHE_SIZE); - } + private static final int DEFAULT_MEMSERISCHUNK_CACHE_SIZE = 100; + private ITsRandomAccessFileReader randomAccessFileReader; + private LRUCache seriesChunkBytesCache; - public SeriesChunkLoaderImpl(ITsRandomAccessFileReader randomAccessFileReader, int cacheSize) { - this.randomAccessFileReader = randomAccessFileReader; - seriesChunkBytesCache = new LRUCache(cacheSize) { - @Override - public void beforeRemove(byte[] object) throws CacheException { - return; - } + public SeriesChunkLoaderImpl(ITsRandomAccessFileReader randomAccessFileReader) { + this(randomAccessFileReader, DEFAULT_MEMSERISCHUNK_CACHE_SIZE); + } - @Override - public byte[] loadObjectByKey(EncodedSeriesChunkDescriptor key) throws CacheException { - try { - return load(key); - } catch (IOException e) { - throw new CacheException(e); - } - } - }; - } + public SeriesChunkLoaderImpl(ITsRandomAccessFileReader randomAccessFileReader, int cacheSize) { + this.randomAccessFileReader = randomAccessFileReader; + seriesChunkBytesCache = new LRUCache(cacheSize) { + @Override + public void beforeRemove(byte[] object) throws CacheException { + return; + } - public MemSeriesChunk getMemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException { + @Override + public byte[] loadObjectByKey(EncodedSeriesChunkDescriptor key) throws CacheException { try { - return new MemSeriesChunk(encodedSeriesChunkDescriptor, new ByteArrayInputStream(seriesChunkBytesCache.get(encodedSeriesChunkDescriptor))); - } catch (CacheException e) { - throw new IOException(e); + return load(key); + } catch (IOException e) { + throw new CacheException(e); } + } + }; + } + + public MemSeriesChunk getMemSeriesChunk(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException { + try { + return new MemSeriesChunk(encodedSeriesChunkDescriptor, + new ByteArrayInputStream(seriesChunkBytesCache.get(encodedSeriesChunkDescriptor))); + } catch (CacheException e) { + throw new IOException(e); } + } - private byte[] load(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException { - int seriesChunkLength = (int) encodedSeriesChunkDescriptor.getLengthOfBytes(); - byte[] buf = new byte[seriesChunkLength]; - randomAccessFileReader.seek(encodedSeriesChunkDescriptor.getOffsetInFile()); - int readLength = randomAccessFileReader.read(buf, 0, seriesChunkLength); - if (readLength != seriesChunkLength) { - throw new IOException("length of seriesChunk read from file is not right. Expected:" + seriesChunkLength + ". Actual: " + readLength); - } - return buf; + private byte[] load(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException { + int seriesChunkLength = (int) encodedSeriesChunkDescriptor.getLengthOfBytes(); + byte[] buf = new byte[seriesChunkLength]; + randomAccessFileReader.seek(encodedSeriesChunkDescriptor.getOffsetInFile()); + int readLength = randomAccessFileReader.read(buf, 0, seriesChunkLength); + if (readLength != seriesChunkLength) { + throw new IOException("length of seriesChunk read from file is not right. Expected:" + + seriesChunkLength + ". Actual: " + readLength); } + return buf; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/RowRecord.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/RowRecord.java index f8038798..d66ea8ce 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/RowRecord.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/RowRecord.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.readV2.datatype; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.util.LinkedHashMap; import java.util.Map; @@ -9,43 +8,43 @@ * Created by zhangjinrui on 2017/12/26. */ public class RowRecord { - private long timestamp; - private LinkedHashMap fields; + private long timestamp; + private LinkedHashMap fields; - public RowRecord() { - fields = new LinkedHashMap<>(); - } + public RowRecord() { + fields = new LinkedHashMap<>(); + } - public RowRecord(long timestamp) { - this(); - this.timestamp = timestamp; - } + public RowRecord(long timestamp) { + this(); + this.timestamp = timestamp; + } - public void putField(Path path, TsPrimitiveType tsPrimitiveType) { - fields.put(path, tsPrimitiveType); - } + public void putField(Path path, TsPrimitiveType tsPrimitiveType) { + fields.put(path, tsPrimitiveType); + } - public long getTimestamp() { - return timestamp; - } + public long getTimestamp() { + return timestamp; + } - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } - public LinkedHashMap getFields() { - return fields; - } + public LinkedHashMap getFields() { + return fields; + } - public void setFields(LinkedHashMap fields) { - this.fields = fields; - } + public void setFields(LinkedHashMap fields) { + this.fields = fields; + } - public String toString() { - StringBuilder stringBuilder = new StringBuilder("[Timestamp]:").append(timestamp); - for (Map.Entry entry : fields.entrySet()) { - stringBuilder.append("\t[").append(entry.getKey()).append("]:").append(entry.getValue()); - } - return stringBuilder.toString(); + public String toString() { + StringBuilder stringBuilder = new StringBuilder("[Timestamp]:").append(timestamp); + for (Map.Entry entry : fields.entrySet()) { + stringBuilder.append("\t[").append(entry.getKey()).append("]:").append(entry.getValue()); } + return stringBuilder.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TimeValuePair.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TimeValuePair.java index ac5042de..a153f406 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TimeValuePair.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TimeValuePair.java @@ -5,47 +5,47 @@ /** * @author Jinrui Zhang */ -public class TimeValuePair implements Serializable{ - private long timestamp; - private TsPrimitiveType value; - - public TimeValuePair(long timestamp, TsPrimitiveType value) { - this.timestamp = timestamp; - this.value = value; - } - - public long getTimestamp() { - return timestamp; - } - - public void setTimestamp(long timestamp) { - this.timestamp = timestamp; - } - - public TsPrimitiveType getValue() { - return value; - } - - public void setValue(TsPrimitiveType value) { - this.value = value; - } - - public String toString() { - StringBuilder stringBuilder = new StringBuilder(); - stringBuilder.append(timestamp).append(" : ").append(getValue()); - return stringBuilder.toString(); - } - - public boolean equals(Object object) { - if (object instanceof TimeValuePair) { - return ((TimeValuePair) object).getTimestamp() == timestamp - && ((TimeValuePair) object).getValue() != null - && ((TimeValuePair) object).getValue().equals(value); - } - return false; - } - - public int getSize() { - return 8 + 8 + value.getSize(); - } +public class TimeValuePair implements Serializable { + private long timestamp; + private TsPrimitiveType value; + + public TimeValuePair(long timestamp, TsPrimitiveType value) { + this.timestamp = timestamp; + this.value = value; + } + + public long getTimestamp() { + return timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + public TsPrimitiveType getValue() { + return value; + } + + public void setValue(TsPrimitiveType value) { + this.value = value; + } + + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + stringBuilder.append(timestamp).append(" : ").append(getValue()); + return stringBuilder.toString(); + } + + public boolean equals(Object object) { + if (object instanceof TimeValuePair) { + return ((TimeValuePair) object).getTimestamp() == timestamp + && ((TimeValuePair) object).getValue() != null + && ((TimeValuePair) object).getValue().equals(value); + } + return false; + } + + public int getSize() { + return 8 + 8 + value.getSize(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TsPrimitiveType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TsPrimitiveType.java index 688ff5af..fdcb4d8c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TsPrimitiveType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/datatype/TsPrimitiveType.java @@ -3,266 +3,266 @@ import cn.edu.tsinghua.tsfile.common.exception.UnSupportedDataTypeException; import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; - import java.io.Serializable; /** * @author Jinrui Zhang */ public abstract class TsPrimitiveType implements Serializable { + public boolean getBoolean() { + throw new UnsupportedOperationException("getBoolean() is not supported for current sub-class"); + } + + public int getInt() { + throw new UnsupportedOperationException("getInt() is not supported for current sub-class"); + } + + public long getLong() { + throw new UnsupportedOperationException("getLong() is not supported for current sub-class"); + } + + public float getFloat() { + throw new UnsupportedOperationException("getFloat() is not supported for current sub-class"); + } + + public double getDouble() { + throw new UnsupportedOperationException("getDouble() is not supported for current sub-class"); + } + + public Binary getBinary() { + throw new UnsupportedOperationException("getBinary() is not supported for current sub-class"); + } + + /** + * @return size of one instance of current class + */ + public abstract int getSize(); + + public abstract Object getValue(); + + public abstract String getStringValue(); + + public abstract TSDataType getDataType(); + + public String toString() { + return getStringValue(); + } + + public boolean equals(Object object) { + return (object instanceof TsPrimitiveType) + && (((TsPrimitiveType) object).getValue().equals(getValue())); + } + + + public static class TsBoolean extends TsPrimitiveType { + + public boolean value; + + public TsBoolean(boolean value) { + this.value = value; + } + public boolean getBoolean() { - throw new UnsupportedOperationException("getBoolean() is not supported for current sub-class"); + return value; } - public int getInt() { - throw new UnsupportedOperationException("getInt() is not supported for current sub-class"); + @Override + public int getSize() { + return 4 + 1; } - public long getLong() { - throw new UnsupportedOperationException("getLong() is not supported for current sub-class"); + @Override + public Object getValue() { + return value; } - public float getFloat() { - throw new UnsupportedOperationException("getFloat() is not supported for current sub-class"); + @Override + public String getStringValue() { + return String.valueOf(value); } - public double getDouble() { - throw new UnsupportedOperationException("getDouble() is not supported for current sub-class"); + @Override + public TSDataType getDataType() { + return TSDataType.BOOLEAN; } + } - public Binary getBinary() { - throw new UnsupportedOperationException("getBinary() is not supported for current sub-class"); + public static class TsInt extends TsPrimitiveType { + public int value; + + public TsInt(int value) { + this.value = value; } - /** - * @return size of one instance of current class - */ - public abstract int getSize(); + public int getInt() { + return value; + } - public abstract Object getValue(); + @Override + public int getSize() { + return 4 + 4; + } - public abstract String getStringValue(); + @Override + public Object getValue() { + return value; + } - public abstract TSDataType getDataType(); + @Override + public String getStringValue() { + return String.valueOf(value); + } - public String toString() { - return getStringValue(); + @Override + public TSDataType getDataType() { + return TSDataType.INT32; } + } + + public static class TsLong extends TsPrimitiveType { + public long value; - public boolean equals(Object object) { - return (object instanceof TsPrimitiveType) && (((TsPrimitiveType) object).getValue().equals(getValue())); + public TsLong(long value) { + this.value = value; } + public long getLong() { + return value; + } - public static class TsBoolean extends TsPrimitiveType { + @Override + public int getSize() { + return 4 + 8; + } - public boolean value; + @Override + public String getStringValue() { + return String.valueOf(value); + } - public TsBoolean(boolean value) { - this.value = value; - } + @Override + public TSDataType getDataType() { + return TSDataType.INT64; + } - public boolean getBoolean() { - return value; - } + @Override + public Object getValue() { + return value; + } + } - @Override - public int getSize() { - return 4 + 1; - } + public static class TsFloat extends TsPrimitiveType { + public float value; - @Override - public Object getValue() { - return value; - } + public TsFloat(float value) { + this.value = value; + } - @Override - public String getStringValue() { - return String.valueOf(value); - } + public float getFloat() { + return value; + } - @Override - public TSDataType getDataType() { - return TSDataType.BOOLEAN; - } + @Override + public int getSize() { + return 4 + 4; } - public static class TsInt extends TsPrimitiveType { - public int value; + @Override + public Object getValue() { + return value; + } - public TsInt(int value) { - this.value = value; - } + @Override + public String getStringValue() { + return String.valueOf(value); + } - public int getInt() { - return value; - } + @Override + public TSDataType getDataType() { + return TSDataType.FLOAT; + } + } - @Override - public int getSize() { - return 4 + 4; - } + public static class TsDouble extends TsPrimitiveType { + public double value; - @Override - public Object getValue() { - return value; - } + public TsDouble(double value) { + this.value = value; + } - @Override - public String getStringValue() { - return String.valueOf(value); - } + public double getDouble() { + return value; + } - @Override - public TSDataType getDataType() { - return TSDataType.INT32; - } + @Override + public int getSize() { + return 4 + 8; } - public static class TsLong extends TsPrimitiveType { - public long value; + @Override + public Object getValue() { + return value; + } - public TsLong(long value) { - this.value = value; - } + @Override + public String getStringValue() { + return String.valueOf(value); + } - public long getLong() { - return value; - } + @Override + public TSDataType getDataType() { + return TSDataType.DOUBLE; + } + } - @Override - public int getSize() { - return 4 + 8; - } + public static class TsBinary extends TsPrimitiveType { + public Binary value; - @Override - public String getStringValue() { - return String.valueOf(value); - } + public TsBinary(Binary value) { + this.value = value; + } - @Override - public TSDataType getDataType() { - return TSDataType.INT64; - } + public Binary getBinary() { + return value; + } + + @Override + public int getSize() { + return 4 + 4 + value.getLength(); + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; } - public static class TsFloat extends TsPrimitiveType { - public float value; + @Override + public String getStringValue() { + return String.valueOf(value); + } - public TsFloat(float value) { - this.value = value; - } - - public float getFloat() { - return value; - } - - @Override - public int getSize() { - return 4 + 4; - } - - @Override - public Object getValue() { - return value; - } - - @Override - public String getStringValue() { - return String.valueOf(value); - } - - @Override - public TSDataType getDataType() { - return TSDataType.FLOAT; - } - } - - public static class TsDouble extends TsPrimitiveType { - public double value; - - public TsDouble(double value) { - this.value = value; - } - - public double getDouble() { - return value; - } - - @Override - public int getSize() { - return 4 + 8; - } - - @Override - public Object getValue() { - return value; - } - - @Override - public String getStringValue() { - return String.valueOf(value); - } - - @Override - public TSDataType getDataType() { - return TSDataType.DOUBLE; - } - } - - public static class TsBinary extends TsPrimitiveType { - public Binary value; - - public TsBinary(Binary value) { - this.value = value; - } - - public Binary getBinary() { - return value; - } - - @Override - public int getSize() { - return 4 + 4 + value.getLength(); - } - - @Override - public Object getValue() { - return value; - } - - @Override - public String getStringValue() { - return String.valueOf(value); - } - - @Override - public TSDataType getDataType() { - return TSDataType.TEXT; - } - } - - public static TsPrimitiveType getByType(TSDataType dataType, Object v) { - switch (dataType) { - case BOOLEAN: - return new TsPrimitiveType.TsBoolean((boolean) v); - case INT32: - return new TsPrimitiveType.TsInt((int) v); - case INT64: - return new TsPrimitiveType.TsLong((long) v); - case FLOAT: - return new TsPrimitiveType.TsFloat((float) v); - case DOUBLE: - return new TsPrimitiveType.TsDouble((double) v); - case TEXT: - return new TsPrimitiveType.TsBinary((Binary) v); - default: - throw new UnSupportedDataTypeException("Unsupported data type:" + dataType); - } + @Override + public TSDataType getDataType() { + return TSDataType.TEXT; + } + } + + public static TsPrimitiveType getByType(TSDataType dataType, Object v) { + switch (dataType) { + case BOOLEAN: + return new TsPrimitiveType.TsBoolean((boolean) v); + case INT32: + return new TsPrimitiveType.TsInt((int) v); + case INT64: + return new TsPrimitiveType.TsLong((long) v); + case FLOAT: + return new TsPrimitiveType.TsFloat((float) v); + case DOUBLE: + return new TsPrimitiveType.TsDouble((double) v); + case TEXT: + return new TsPrimitiveType.TsBinary((Binary) v); + default: + throw new UnSupportedDataTypeException("Unsupported data type:" + dataType); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryDataSet.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryDataSet.java index 4e1c1ef5..7a555781 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryDataSet.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryDataSet.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.readV2.query; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.RowRecord; - import java.io.IOException; /** @@ -9,8 +8,8 @@ */ public interface QueryDataSet { - boolean hasNext() throws IOException; + boolean hasNext() throws IOException; - RowRecord next() throws IOException; + RowRecord next() throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExecutor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExecutor.java index 131b4cef..af692835 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExecutor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExecutor.java @@ -7,5 +7,5 @@ */ public interface QueryExecutor { - QueryDataSet execute(QueryExpression queryExpression) throws IOException; + QueryDataSet execute(QueryExpression queryExpression) throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExpression.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExpression.java index 8f55bc22..85403633 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExpression.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/QueryExpression.java @@ -2,7 +2,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filterV2.expression.QueryFilter; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.util.ArrayList; import java.util.List; @@ -10,52 +9,52 @@ * Created by zhangjinrui on 2017/12/26. */ public class QueryExpression { - private List selectedSeries; - private QueryFilter queryFilter; - private boolean hasQueryFilter; - - private QueryExpression() { - selectedSeries = new ArrayList<>(); - hasQueryFilter = false; - } - - public static QueryExpression create() { - return new QueryExpression(); - } - - public QueryExpression addSelectedPath(Path path) { - this.selectedSeries.add(path); - return this; - } - - public QueryExpression setQueryFilter(QueryFilter queryFilter) { - if (queryFilter != null) { - this.queryFilter = queryFilter; - hasQueryFilter = true; - } - return this; - } - - public QueryExpression setSelectSeries(List selectedSeries) { - this.selectedSeries = selectedSeries; - return this; - } - - public QueryFilter getQueryFilter() { - return queryFilter; - } - - public List getSelectedSeries() { - return selectedSeries; - } - - public String toString() { - StringBuilder stringBuilder = new StringBuilder("\n\t[Selected Series]:").append(selectedSeries) - .append("\n\t[QueryFilter]:").append(queryFilter); - return stringBuilder.toString(); - } - - public boolean hasQueryFilter() { - return hasQueryFilter; - } + private List selectedSeries; + private QueryFilter queryFilter; + private boolean hasQueryFilter; + + private QueryExpression() { + selectedSeries = new ArrayList<>(); + hasQueryFilter = false; + } + + public static QueryExpression create() { + return new QueryExpression(); + } + + public QueryExpression addSelectedPath(Path path) { + this.selectedSeries.add(path); + return this; + } + + public QueryExpression setQueryFilter(QueryFilter queryFilter) { + if (queryFilter != null) { + this.queryFilter = queryFilter; + hasQueryFilter = true; + } + return this; + } + + public QueryExpression setSelectSeries(List selectedSeries) { + this.selectedSeries = selectedSeries; + return this; + } + + public QueryFilter getQueryFilter() { + return queryFilter; + } + + public List getSelectedSeries() { + return selectedSeries; + } + + public String toString() { + StringBuilder stringBuilder = new StringBuilder("\n\t[Selected Series]:").append(selectedSeries) + .append("\n\t[QueryFilter]:").append(queryFilter); + return stringBuilder.toString(); + } + + public boolean hasQueryFilter() { + return hasQueryFilter; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/MergeQueryDataSet.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/MergeQueryDataSet.java index 9bced68d..ef7d15b5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/MergeQueryDataSet.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/MergeQueryDataSet.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; - import java.io.IOException; import java.util.LinkedHashMap; import java.util.PriorityQueue; @@ -16,62 +15,64 @@ */ public class MergeQueryDataSet implements QueryDataSet { - private LinkedHashMap readersOfSelectedSeries; - private PriorityQueue heap; + private LinkedHashMap readersOfSelectedSeries; + private PriorityQueue heap; - public MergeQueryDataSet(LinkedHashMap readersOfSelectedSeries) throws IOException { - this.readersOfSelectedSeries = readersOfSelectedSeries; - initHeap(); - } + public MergeQueryDataSet(LinkedHashMap readersOfSelectedSeries) + throws IOException { + this.readersOfSelectedSeries = readersOfSelectedSeries; + initHeap(); + } - private void initHeap() throws IOException { - heap = new PriorityQueue<>(); - for (Path path : readersOfSelectedSeries.keySet()) { - SeriesReader seriesReader = readersOfSelectedSeries.get(path); - if (seriesReader.hasNext()) { - TimeValuePair timeValuePair = seriesReader.next(); - heap.add(new Point(path, timeValuePair.getTimestamp(), timeValuePair.getValue())); - } - } + private void initHeap() throws IOException { + heap = new PriorityQueue<>(); + for (Path path : readersOfSelectedSeries.keySet()) { + SeriesReader seriesReader = readersOfSelectedSeries.get(path); + if (seriesReader.hasNext()) { + TimeValuePair timeValuePair = seriesReader.next(); + heap.add(new Point(path, timeValuePair.getTimestamp(), timeValuePair.getValue())); + } } + } - @Override - public boolean hasNext() throws IOException { - return heap.size() > 0; - } + @Override + public boolean hasNext() throws IOException { + return heap.size() > 0; + } - @Override - public RowRecord next() throws IOException { - Point aimPoint = heap.peek(); - RowRecord rowRecord = new RowRecord(aimPoint.timestamp); - for (Path path : readersOfSelectedSeries.keySet()) { - rowRecord.putField(path, null); - } - while (heap.size() > 0 && heap.peek().timestamp == aimPoint.timestamp) { - Point point = heap.poll(); - rowRecord.putField(point.path, point.tsPrimitiveType); - if (readersOfSelectedSeries.get(point.path).hasNext()) { - TimeValuePair nextTimeValuePair = readersOfSelectedSeries.get(point.path).next(); - heap.add(new Point(point.path, nextTimeValuePair.getTimestamp(), nextTimeValuePair.getValue())); - } - } - return rowRecord; + @Override + public RowRecord next() throws IOException { + Point aimPoint = heap.peek(); + RowRecord rowRecord = new RowRecord(aimPoint.timestamp); + for (Path path : readersOfSelectedSeries.keySet()) { + rowRecord.putField(path, null); + } + while (heap.size() > 0 && heap.peek().timestamp == aimPoint.timestamp) { + Point point = heap.poll(); + rowRecord.putField(point.path, point.tsPrimitiveType); + if (readersOfSelectedSeries.get(point.path).hasNext()) { + TimeValuePair nextTimeValuePair = readersOfSelectedSeries.get(point.path).next(); + heap.add( + new Point(point.path, nextTimeValuePair.getTimestamp(), nextTimeValuePair.getValue())); + } } + return rowRecord; + } - private static class Point implements Comparable { - private Path path; - private long timestamp; - private TsPrimitiveType tsPrimitiveType; + private static class Point implements Comparable { + private Path path; + private long timestamp; + private TsPrimitiveType tsPrimitiveType; - private Point(Path path, long timestamp, TsPrimitiveType tsPrimitiveType) { - this.path = path; - this.timestamp = timestamp; - this.tsPrimitiveType = tsPrimitiveType; - } + private Point(Path path, long timestamp, TsPrimitiveType tsPrimitiveType) { + this.path = path; + this.timestamp = timestamp; + this.tsPrimitiveType = tsPrimitiveType; + } - @Override - public int compareTo(Point o) { - return Long.compare(timestamp, o.timestamp); - } + @Override + public int compareTo(Point o) { + return Long.compare(timestamp, o.timestamp); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryDataSetForQueryWithQueryFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryDataSetForQueryWithQueryFilterImpl.java index efd021c3..5033c77c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryDataSetForQueryWithQueryFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryDataSetForQueryWithQueryFilterImpl.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.TimestampGenerator; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.impl.SeriesReaderFromSingleFileByTimestampImpl; - import java.io.IOException; import java.util.LinkedHashMap; @@ -14,27 +13,29 @@ */ public class QueryDataSetForQueryWithQueryFilterImpl implements QueryDataSet { - private TimestampGenerator timestampGenerator; - private LinkedHashMap readersOfSelectedSeries; + private TimestampGenerator timestampGenerator; + private LinkedHashMap readersOfSelectedSeries; - public QueryDataSetForQueryWithQueryFilterImpl(TimestampGenerator timestampGenerator, LinkedHashMap readersOfSelectedSeries) { - this.timestampGenerator = timestampGenerator; - this.readersOfSelectedSeries = readersOfSelectedSeries; - } + public QueryDataSetForQueryWithQueryFilterImpl(TimestampGenerator timestampGenerator, + LinkedHashMap readersOfSelectedSeries) { + this.timestampGenerator = timestampGenerator; + this.readersOfSelectedSeries = readersOfSelectedSeries; + } - @Override - public boolean hasNext() throws IOException { - return timestampGenerator.hasNext(); - } + @Override + public boolean hasNext() throws IOException { + return timestampGenerator.hasNext(); + } - @Override - public RowRecord next() throws IOException { - long timestamp = timestampGenerator.next(); - RowRecord rowRecord = new RowRecord(timestamp); - for (Path path : readersOfSelectedSeries.keySet()) { - SeriesReaderFromSingleFileByTimestampImpl seriesChunkReaderByTimestamp = readersOfSelectedSeries.get(path); - rowRecord.putField(path, seriesChunkReaderByTimestamp.getValueInTimestamp(timestamp)); - } - return rowRecord; + @Override + public RowRecord next() throws IOException { + long timestamp = timestampGenerator.next(); + RowRecord rowRecord = new RowRecord(timestamp); + for (Path path : readersOfSelectedSeries.keySet()) { + SeriesReaderFromSingleFileByTimestampImpl seriesChunkReaderByTimestamp = + readersOfSelectedSeries.get(path); + rowRecord.putField(path, seriesChunkReaderByTimestamp.getValueInTimestamp(timestamp)); } + return rowRecord; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryExecutorRouter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryExecutorRouter.java index a2230b2b..5628e60f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryExecutorRouter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryExecutorRouter.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryExecutor; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryExpression; - import java.io.IOException; /** @@ -17,31 +16,35 @@ */ public class QueryExecutorRouter implements QueryExecutor { - private MetadataQuerier metadataQuerier; - private SeriesChunkLoader seriesChunkLoader; + private MetadataQuerier metadataQuerier; + private SeriesChunkLoader seriesChunkLoader; - public QueryExecutorRouter(MetadataQuerier metadataQuerier, SeriesChunkLoader seriesChunkLoader) { - this.metadataQuerier = metadataQuerier; - this.seriesChunkLoader = seriesChunkLoader; - } + public QueryExecutorRouter(MetadataQuerier metadataQuerier, SeriesChunkLoader seriesChunkLoader) { + this.metadataQuerier = metadataQuerier; + this.seriesChunkLoader = seriesChunkLoader; + } - @Override - public QueryDataSet execute(QueryExpression queryExpression) throws IOException { - if (queryExpression.hasQueryFilter()) { - try { - QueryFilter queryFilter = queryExpression.getQueryFilter(); - QueryFilter regularQueryFilter = QueryFilterOptimizer.getInstance().convertGlobalTimeFilter(queryFilter, queryExpression.getSelectedSeries()); - queryExpression.setQueryFilter(regularQueryFilter); - if (regularQueryFilter instanceof GlobalTimeFilter) { - return new QueryWithGlobalTimeFilterExecutorImpl(seriesChunkLoader, metadataQuerier).execute(queryExpression); - } else { - return new QueryWithQueryFilterExecutorImpl(seriesChunkLoader, metadataQuerier).execute(queryExpression); - } - } catch (QueryFilterOptimizationException e) { - throw new IOException(e); - } + @Override + public QueryDataSet execute(QueryExpression queryExpression) throws IOException { + if (queryExpression.hasQueryFilter()) { + try { + QueryFilter queryFilter = queryExpression.getQueryFilter(); + QueryFilter regularQueryFilter = QueryFilterOptimizer.getInstance() + .convertGlobalTimeFilter(queryFilter, queryExpression.getSelectedSeries()); + queryExpression.setQueryFilter(regularQueryFilter); + if (regularQueryFilter instanceof GlobalTimeFilter) { + return new QueryWithGlobalTimeFilterExecutorImpl(seriesChunkLoader, metadataQuerier) + .execute(queryExpression); } else { - return new QueryWithoutFilterExecutorImpl(seriesChunkLoader, metadataQuerier).execute(queryExpression); + return new QueryWithQueryFilterExecutorImpl(seriesChunkLoader, metadataQuerier) + .execute(queryExpression); } + } catch (QueryFilterOptimizationException e) { + throw new IOException(e); + } + } else { + return new QueryWithoutFilterExecutorImpl(seriesChunkLoader, metadataQuerier) + .execute(queryExpression); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithGlobalTimeFilterExecutorImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithGlobalTimeFilterExecutorImpl.java index 09202665..95a24e16 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithGlobalTimeFilterExecutorImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithGlobalTimeFilterExecutorImpl.java @@ -11,7 +11,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryExpression; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.impl.SeriesReaderFromSingleFileWithFilterImpl; - import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; @@ -21,28 +20,33 @@ */ public class QueryWithGlobalTimeFilterExecutorImpl implements QueryExecutor { - private SeriesChunkLoader seriesChunkLoader; - private MetadataQuerier metadataQuerier; + private SeriesChunkLoader seriesChunkLoader; + private MetadataQuerier metadataQuerier; - public QueryWithGlobalTimeFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) { - this.seriesChunkLoader = seriesChunkLoader; - this.metadataQuerier = metadataQuerier; - } + public QueryWithGlobalTimeFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, + MetadataQuerier metadataQuerier) { + this.seriesChunkLoader = seriesChunkLoader; + this.metadataQuerier = metadataQuerier; + } - @Override - public QueryDataSet execute(QueryExpression queryExpression) throws IOException { - LinkedHashMap readersOfSelectedSeries = new LinkedHashMap<>(); - Filter timeFilter = ((GlobalTimeFilter) queryExpression.getQueryFilter()).getFilter(); - initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries(), timeFilter); - return new MergeQueryDataSet(readersOfSelectedSeries); - } + @Override + public QueryDataSet execute(QueryExpression queryExpression) throws IOException { + LinkedHashMap readersOfSelectedSeries = new LinkedHashMap<>(); + Filter timeFilter = ((GlobalTimeFilter) queryExpression.getQueryFilter()).getFilter(); + initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries(), + timeFilter); + return new MergeQueryDataSet(readersOfSelectedSeries); + } - private void initReadersOfSelectedSeries(LinkedHashMap readersOfSelectedSeries, - List selectedSeries, Filter timeFilter) throws IOException { - for (Path path : selectedSeries) { - List encodedSeriesChunkDescriptorList = metadataQuerier.getSeriesChunkDescriptorList(path); - SeriesReader seriesReader = new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList, timeFilter); - readersOfSelectedSeries.put(path, seriesReader); - } + private void initReadersOfSelectedSeries( + LinkedHashMap readersOfSelectedSeries, List selectedSeries, + Filter timeFilter) throws IOException { + for (Path path : selectedSeries) { + List encodedSeriesChunkDescriptorList = + metadataQuerier.getSeriesChunkDescriptorList(path); + SeriesReader seriesReader = new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList, timeFilter); + readersOfSelectedSeries.put(path, seriesReader); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithQueryFilterExecutorImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithQueryFilterExecutorImpl.java index fe07f4be..492cd90b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithQueryFilterExecutorImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithQueryFilterExecutorImpl.java @@ -10,7 +10,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.TimestampGenerator; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.TimestampGeneratorByQueryFilterImpl; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.impl.SeriesReaderFromSingleFileByTimestampImpl; - import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; @@ -21,30 +20,35 @@ */ public class QueryWithQueryFilterExecutorImpl implements QueryExecutor { - private SeriesChunkLoader seriesChunkLoader; - private MetadataQuerier metadataQuerier; - - public QueryWithQueryFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) { - this.seriesChunkLoader = seriesChunkLoader; - this.metadataQuerier = metadataQuerier; - } - - @Override - public QueryDataSet execute(QueryExpression queryExpression) throws IOException { - TimestampGenerator timestampGenerator = new TimestampGeneratorByQueryFilterImpl(queryExpression.getQueryFilter(), - seriesChunkLoader, metadataQuerier); - LinkedHashMap readersOfSelectedSeries = new LinkedHashMap<>(); - initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries()); - return new QueryDataSetForQueryWithQueryFilterImpl(timestampGenerator, readersOfSelectedSeries); - } - - private void initReadersOfSelectedSeries(LinkedHashMap readersOfSelectedSeries, - List selectedSeries) throws IOException { - for (Path path : selectedSeries) { - List encodedSeriesChunkDescriptorList = metadataQuerier.getSeriesChunkDescriptorList(path); - SeriesReaderFromSingleFileByTimestampImpl seriesReader = new SeriesReaderFromSingleFileByTimestampImpl( - seriesChunkLoader, encodedSeriesChunkDescriptorList); - readersOfSelectedSeries.put(path, seriesReader); - } + private SeriesChunkLoader seriesChunkLoader; + private MetadataQuerier metadataQuerier; + + public QueryWithQueryFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, + MetadataQuerier metadataQuerier) { + this.seriesChunkLoader = seriesChunkLoader; + this.metadataQuerier = metadataQuerier; + } + + @Override + public QueryDataSet execute(QueryExpression queryExpression) throws IOException { + TimestampGenerator timestampGenerator = new TimestampGeneratorByQueryFilterImpl( + queryExpression.getQueryFilter(), seriesChunkLoader, metadataQuerier); + LinkedHashMap readersOfSelectedSeries = + new LinkedHashMap<>(); + initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries()); + return new QueryDataSetForQueryWithQueryFilterImpl(timestampGenerator, readersOfSelectedSeries); + } + + private void initReadersOfSelectedSeries( + LinkedHashMap readersOfSelectedSeries, + List selectedSeries) throws IOException { + for (Path path : selectedSeries) { + List encodedSeriesChunkDescriptorList = + metadataQuerier.getSeriesChunkDescriptorList(path); + SeriesReaderFromSingleFileByTimestampImpl seriesReader = + new SeriesReaderFromSingleFileByTimestampImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); + readersOfSelectedSeries.put(path, seriesReader); } -} \ No newline at end of file + } +} diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithoutFilterExecutorImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithoutFilterExecutorImpl.java index 320b2103..1b043970 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithoutFilterExecutorImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/impl/QueryWithoutFilterExecutorImpl.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.QueryExpression; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.impl.SeriesReaderFromSingleFileWithoutFilterImpl; - import java.io.IOException; import java.util.LinkedHashMap; import java.util.List; @@ -19,27 +18,31 @@ */ public class QueryWithoutFilterExecutorImpl implements QueryExecutor { - private SeriesChunkLoader seriesChunkLoader; - private MetadataQuerier metadataQuerier; + private SeriesChunkLoader seriesChunkLoader; + private MetadataQuerier metadataQuerier; - public QueryWithoutFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) { - this.seriesChunkLoader = seriesChunkLoader; - this.metadataQuerier = metadataQuerier; - } + public QueryWithoutFilterExecutorImpl(SeriesChunkLoader seriesChunkLoader, + MetadataQuerier metadataQuerier) { + this.seriesChunkLoader = seriesChunkLoader; + this.metadataQuerier = metadataQuerier; + } - @Override - public QueryDataSet execute(QueryExpression queryExpression) throws IOException { - LinkedHashMap readersOfSelectedSeries = new LinkedHashMap<>(); - initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries()); - return new MergeQueryDataSet(readersOfSelectedSeries); - } + @Override + public QueryDataSet execute(QueryExpression queryExpression) throws IOException { + LinkedHashMap readersOfSelectedSeries = new LinkedHashMap<>(); + initReadersOfSelectedSeries(readersOfSelectedSeries, queryExpression.getSelectedSeries()); + return new MergeQueryDataSet(readersOfSelectedSeries); + } - private void initReadersOfSelectedSeries(LinkedHashMap readersOfSelectedSeries, - List selectedSeries) throws IOException { - for (Path path : selectedSeries) { - List encodedSeriesChunkDescriptorList = metadataQuerier.getSeriesChunkDescriptorList(path); - SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList); - readersOfSelectedSeries.put(path, seriesReader); - } + private void initReadersOfSelectedSeries( + LinkedHashMap readersOfSelectedSeries, List selectedSeries) + throws IOException { + for (Path path : selectedSeries) { + List encodedSeriesChunkDescriptorList = + metadataQuerier.getSeriesChunkDescriptorList(path); + SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); + readersOfSelectedSeries.put(path, seriesReader); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructor.java index 7ffc67ed..0fd1a96a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructor.java @@ -10,7 +10,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.node.Node; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.node.OrNode; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; - import java.io.IOException; /** @@ -18,21 +17,23 @@ */ public abstract class NodeConstructor { - public Node construct(QueryFilter queryFilter) throws IOException { - if (queryFilter.getType() == QueryFilterType.SERIES) { - return new LeafNode(generateSeriesReader((SeriesFilter) queryFilter)); - } else if (queryFilter.getType() == QueryFilterType.OR) { - Node leftChild = construct(((BinaryQueryFilter) queryFilter).getLeft()); - Node rightChild = construct(((BinaryQueryFilter) queryFilter).getRight()); - return new OrNode(leftChild, rightChild); - } else if (queryFilter.getType() == QueryFilterType.AND) { - Node leftChild = construct(((BinaryQueryFilter) queryFilter).getLeft()); - Node rightChild = construct(((BinaryQueryFilter) queryFilter).getRight()); - return new AndNode(leftChild, rightChild); - } - throw new UnSupportedDataTypeException("Unsupported QueryFilterType when construct OperatorNode: " + queryFilter.getType()); + public Node construct(QueryFilter queryFilter) throws IOException { + if (queryFilter.getType() == QueryFilterType.SERIES) { + return new LeafNode(generateSeriesReader((SeriesFilter) queryFilter)); + } else if (queryFilter.getType() == QueryFilterType.OR) { + Node leftChild = construct(((BinaryQueryFilter) queryFilter).getLeft()); + Node rightChild = construct(((BinaryQueryFilter) queryFilter).getRight()); + return new OrNode(leftChild, rightChild); + } else if (queryFilter.getType() == QueryFilterType.AND) { + Node leftChild = construct(((BinaryQueryFilter) queryFilter).getLeft()); + Node rightChild = construct(((BinaryQueryFilter) queryFilter).getRight()); + return new AndNode(leftChild, rightChild); } + throw new UnSupportedDataTypeException( + "Unsupported QueryFilterType when construct OperatorNode: " + queryFilter.getType()); + } - public abstract SeriesReader generateSeriesReader(SeriesFilter seriesFilter) throws IOException; + public abstract SeriesReader generateSeriesReader(SeriesFilter seriesFilter) + throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructorForSingleFileImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructorForSingleFileImpl.java index 1baa33f3..c3c9e71f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructorForSingleFileImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeConstructorForSingleFileImpl.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.SeriesChunkLoader; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.impl.SeriesReaderFromSingleFileWithFilterImpl; - import java.io.IOException; import java.util.List; @@ -14,18 +13,20 @@ * Created by zhangjinrui on 2017/12/26. */ public class NodeConstructorForSingleFileImpl extends NodeConstructor { - private MetadataQuerier metadataQuerier; - private SeriesChunkLoader seriesChunkLoader; + private MetadataQuerier metadataQuerier; + private SeriesChunkLoader seriesChunkLoader; - public NodeConstructorForSingleFileImpl(MetadataQuerier metadataQuerier, SeriesChunkLoader seriesChunkLoader) { - this.metadataQuerier = metadataQuerier; - this.seriesChunkLoader = seriesChunkLoader; - } + public NodeConstructorForSingleFileImpl(MetadataQuerier metadataQuerier, + SeriesChunkLoader seriesChunkLoader) { + this.metadataQuerier = metadataQuerier; + this.seriesChunkLoader = seriesChunkLoader; + } - @Override - public SeriesReader generateSeriesReader(SeriesFilter seriesFilter) throws IOException { - List encodedSeriesChunkDescriptorList = metadataQuerier.getSeriesChunkDescriptorList( - seriesFilter.getSeriesPath()); - return new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList, seriesFilter.getFilter()); - } + @Override + public SeriesReader generateSeriesReader(SeriesFilter seriesFilter) throws IOException { + List encodedSeriesChunkDescriptorList = + metadataQuerier.getSeriesChunkDescriptorList(seriesFilter.getSeriesPath()); + return new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList, seriesFilter.getFilter()); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGenerator.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGenerator.java index fa1de1b6..d6438866 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGenerator.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGenerator.java @@ -7,8 +7,8 @@ */ public interface TimestampGenerator { - boolean hasNext() throws IOException; + boolean hasNext() throws IOException; - long next() throws IOException; + long next() throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorByQueryFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorByQueryFilterImpl.java index e0f36ee1..a24cef66 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorByQueryFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorByQueryFilterImpl.java @@ -4,7 +4,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.MetadataQuerier; import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.SeriesChunkLoader; import cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.node.Node; - import java.io.IOException; /** @@ -12,27 +11,29 @@ */ public class TimestampGeneratorByQueryFilterImpl implements TimestampGenerator { - private QueryFilter queryFilter; - private Node operatorNode; - - public TimestampGeneratorByQueryFilterImpl(QueryFilter queryFilter, SeriesChunkLoader seriesChunkLoader - , MetadataQuerier metadataQuerier) throws IOException { - this.queryFilter = queryFilter; - initNode(seriesChunkLoader, metadataQuerier); - } - - private void initNode(SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) throws IOException { - NodeConstructorForSingleFileImpl nodeConstructorForSingleFile = new NodeConstructorForSingleFileImpl(metadataQuerier, seriesChunkLoader); - this.operatorNode = nodeConstructorForSingleFile.construct(queryFilter); - } - - @Override - public boolean hasNext() throws IOException { - return operatorNode.hasNext(); - } - - @Override - public long next() throws IOException { - return operatorNode.next(); - } + private QueryFilter queryFilter; + private Node operatorNode; + + public TimestampGeneratorByQueryFilterImpl(QueryFilter queryFilter, + SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) throws IOException { + this.queryFilter = queryFilter; + initNode(seriesChunkLoader, metadataQuerier); + } + + private void initNode(SeriesChunkLoader seriesChunkLoader, MetadataQuerier metadataQuerier) + throws IOException { + NodeConstructorForSingleFileImpl nodeConstructorForSingleFile = + new NodeConstructorForSingleFileImpl(metadataQuerier, seriesChunkLoader); + this.operatorNode = nodeConstructorForSingleFile.construct(queryFilter); + } + + @Override + public boolean hasNext() throws IOException { + return operatorNode.hasNext(); + } + + @Override + public long next() throws IOException { + return operatorNode.next(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/AndNode.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/AndNode.java index f93d76b2..a6d98084 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/AndNode.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/AndNode.java @@ -7,66 +7,66 @@ */ public class AndNode implements Node { - private Node leftChild; - private Node rightChild; + private Node leftChild; + private Node rightChild; - private long cachedValue; - private boolean hasCachedValue; + private long cachedValue; + private boolean hasCachedValue; - public AndNode(Node leftChild, Node rightChild) { - this.leftChild = leftChild; - this.rightChild = rightChild; - this.hasCachedValue = false; - } + public AndNode(Node leftChild, Node rightChild) { + this.leftChild = leftChild; + this.rightChild = rightChild; + this.hasCachedValue = false; + } - @Override - public boolean hasNext() throws IOException { - if (hasCachedValue) { - return true; - } - if (leftChild.hasNext() && rightChild.hasNext()) { - long leftValue = leftChild.next(); - long rightValue = rightChild.next(); - while (true) { - if (leftValue == rightValue) { - this.hasCachedValue = true; - this.cachedValue = leftValue; - return true; - } else if (leftValue > rightValue) { - if (rightChild.hasNext()) { - rightValue = rightChild.next(); - } else { - return false; - } - } else { //leftValue < rightValue - if (leftChild.hasNext()) { - leftValue = leftChild.next(); - } else { - return false; - } - } - } - } - return false; + @Override + public boolean hasNext() throws IOException { + if (hasCachedValue) { + return true; } - - /** - * If there is no value in current Node, -1 will be returned if {@code next()} is invoked - * - * @return - * @throws IOException - */ - @Override - public long next() throws IOException { - if (hasNext()) { - hasCachedValue = false; - return cachedValue; + if (leftChild.hasNext() && rightChild.hasNext()) { + long leftValue = leftChild.next(); + long rightValue = rightChild.next(); + while (true) { + if (leftValue == rightValue) { + this.hasCachedValue = true; + this.cachedValue = leftValue; + return true; + } else if (leftValue > rightValue) { + if (rightChild.hasNext()) { + rightValue = rightChild.next(); + } else { + return false; + } + } else { // leftValue < rightValue + if (leftChild.hasNext()) { + leftValue = leftChild.next(); + } else { + return false; + } } - return -1; + } } + return false; + } - @Override - public NodeType getType() { - return NodeType.AND; + /** + * If there is no value in current Node, -1 will be returned if {@code next()} is invoked + * + * @return + * @throws IOException + */ + @Override + public long next() throws IOException { + if (hasNext()) { + hasCachedValue = false; + return cachedValue; } + return -1; + } + + @Override + public NodeType getType() { + return NodeType.AND; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/LeafNode.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/LeafNode.java index 79ceff37..f88a6a67 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/LeafNode.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/LeafNode.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.readV2.query.timegenerator.node; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; - import java.io.IOException; /** @@ -9,24 +8,24 @@ */ public class LeafNode implements Node { - private SeriesReader seriesReader; + private SeriesReader seriesReader; - public LeafNode(SeriesReader seriesReader) { - this.seriesReader = seriesReader; - } + public LeafNode(SeriesReader seriesReader) { + this.seriesReader = seriesReader; + } - @Override - public boolean hasNext() throws IOException { - return seriesReader.hasNext(); - } + @Override + public boolean hasNext() throws IOException { + return seriesReader.hasNext(); + } - @Override - public long next() throws IOException { - return seriesReader.next().getTimestamp(); - } + @Override + public long next() throws IOException { + return seriesReader.next().getTimestamp(); + } - @Override - public NodeType getType() { - return NodeType.LEAF; - } + @Override + public NodeType getType() { + return NodeType.LEAF; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/Node.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/Node.java index af01083f..ba970fa2 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/Node.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/Node.java @@ -7,9 +7,9 @@ */ public interface Node { - boolean hasNext() throws IOException; + boolean hasNext() throws IOException; - long next() throws IOException; + long next() throws IOException; - NodeType getType(); + NodeType getType(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/NodeType.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/NodeType.java index 76acc14b..345980a5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/NodeType.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/NodeType.java @@ -4,5 +4,5 @@ * @author Jinrui Zhang */ public enum NodeType { - AND, OR, LEAF + AND, OR, LEAF } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/OrNode.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/OrNode.java index ac9fbd25..3255530f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/OrNode.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/node/OrNode.java @@ -7,79 +7,79 @@ */ public class OrNode implements Node { - private Node leftChild; - private Node rightChild; + private Node leftChild; + private Node rightChild; - private boolean hasCachedLeftValue; - private long cachedLeftValue; - private boolean hasCachedRightValue; - private long cachedRightValue; + private boolean hasCachedLeftValue; + private long cachedLeftValue; + private boolean hasCachedRightValue; + private long cachedRightValue; - public OrNode(Node leftChild, Node rightChild) { - this.leftChild = leftChild; - this.rightChild = rightChild; - this.hasCachedLeftValue = false; - this.hasCachedRightValue = false; - } + public OrNode(Node leftChild, Node rightChild) { + this.leftChild = leftChild; + this.rightChild = rightChild; + this.hasCachedLeftValue = false; + this.hasCachedRightValue = false; + } - @Override - public boolean hasNext() throws IOException { - if (hasCachedLeftValue || hasCachedRightValue) { - return true; - } - return leftChild.hasNext() || rightChild.hasNext(); + @Override + public boolean hasNext() throws IOException { + if (hasCachedLeftValue || hasCachedRightValue) { + return true; } + return leftChild.hasNext() || rightChild.hasNext(); + } - private boolean hasLeftValue() throws IOException { - return hasCachedLeftValue || leftChild.hasNext(); - } + private boolean hasLeftValue() throws IOException { + return hasCachedLeftValue || leftChild.hasNext(); + } - private long getLeftValue() throws IOException { - if (hasCachedLeftValue) { - hasCachedLeftValue = false; - return cachedLeftValue; - } - return leftChild.next(); + private long getLeftValue() throws IOException { + if (hasCachedLeftValue) { + hasCachedLeftValue = false; + return cachedLeftValue; } + return leftChild.next(); + } - private boolean hasRightValue() throws IOException { - return hasCachedRightValue || rightChild.hasNext(); - } + private boolean hasRightValue() throws IOException { + return hasCachedRightValue || rightChild.hasNext(); + } - private long getRightValue() throws IOException { - if (hasCachedRightValue) { - hasCachedRightValue = false; - return cachedRightValue; - } - return rightChild.next(); + private long getRightValue() throws IOException { + if (hasCachedRightValue) { + hasCachedRightValue = false; + return cachedRightValue; } + return rightChild.next(); + } - @Override - public long next() throws IOException { - if (hasLeftValue() && !hasRightValue()) { - return getLeftValue(); - } else if (!hasLeftValue() && hasRightValue()) { - return getRightValue(); - } else if (hasLeftValue() && hasRightValue()) { - long leftValue = getLeftValue(); - long rightValue = getRightValue(); - if (leftValue < rightValue) { - hasCachedRightValue = true; - cachedRightValue = rightValue; - return leftValue; - } else if (leftValue > rightValue) { - hasCachedLeftValue = true; - cachedLeftValue = leftValue; - return rightValue; - } else { - return leftValue; - } - } - return -1; + @Override + public long next() throws IOException { + if (hasLeftValue() && !hasRightValue()) { + return getLeftValue(); + } else if (!hasLeftValue() && hasRightValue()) { + return getRightValue(); + } else if (hasLeftValue() && hasRightValue()) { + long leftValue = getLeftValue(); + long rightValue = getRightValue(); + if (leftValue < rightValue) { + hasCachedRightValue = true; + cachedRightValue = rightValue; + return leftValue; + } else if (leftValue > rightValue) { + hasCachedLeftValue = true; + cachedLeftValue = leftValue; + return rightValue; + } else { + return leftValue; + } } + return -1; + } - @Override - public NodeType getType() { - return NodeType.OR; - } + @Override + public NodeType getType() { + return NodeType.OR; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/SeriesReaderByTimeStamp.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/SeriesReaderByTimeStamp.java index e788935d..a474b028 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/SeriesReaderByTimeStamp.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/SeriesReaderByTimeStamp.java @@ -1,15 +1,15 @@ package cn.edu.tsinghua.tsfile.timeseries.readV2.reader; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType; - import java.io.IOException; -public interface SeriesReaderByTimeStamp extends SeriesReader{ - /** - * @param timestamp - * @return If there is no TimeValuePair whose timestamp equals to given timestamp, then return null. - * @throws IOException - */ - TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException; +public interface SeriesReaderByTimeStamp extends SeriesReader { + /** + * @param timestamp + * @return If there is no TimeValuePair whose timestamp equals to given timestamp, then return + * null. + * @throws IOException + */ + TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/TimeValuePairReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/TimeValuePairReader.java index bfec601b..74ca375c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/TimeValuePairReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/TimeValuePairReader.java @@ -1,20 +1,19 @@ package cn.edu.tsinghua.tsfile.timeseries.readV2.reader; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - import java.io.IOException; /** * @author Jinrui Zhang */ -public interface TimeValuePairReader{ +public interface TimeValuePairReader { - boolean hasNext() throws IOException; + boolean hasNext() throws IOException; - TimeValuePair next() throws IOException; + TimeValuePair next() throws IOException; - void skipCurrentTimeValuePair() throws IOException; + void skipCurrentTimeValuePair() throws IOException; - void close() throws IOException; + void close() throws IOException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/PageReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/PageReader.java index 8bc59327..46412b0f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/PageReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/PageReader.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType.*; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.TimeValuePairReader; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -18,93 +17,94 @@ */ public class PageReader implements TimeValuePairReader { - private TSDataType dataType; - private Decoder valueDecoder; - private Decoder timeDecoder; - private InputStream timestampInputStream; - private InputStream valueInputStream; - private boolean hasOneCachedTimeValuePair; - private TimeValuePair cachedTimeValuePair; + private TSDataType dataType; + private Decoder valueDecoder; + private Decoder timeDecoder; + private InputStream timestampInputStream; + private InputStream valueInputStream; + private boolean hasOneCachedTimeValuePair; + private TimeValuePair cachedTimeValuePair; - public PageReader(InputStream pageContent, TSDataType dataType, Decoder valueDecoder, Decoder timeDecoder) throws IOException { - this.dataType = dataType; - this.valueDecoder = valueDecoder; - this.timeDecoder = timeDecoder; - hasOneCachedTimeValuePair = false; - splitInputStreamToTimeStampAndValue(pageContent); - } + public PageReader(InputStream pageContent, TSDataType dataType, Decoder valueDecoder, + Decoder timeDecoder) throws IOException { + this.dataType = dataType; + this.valueDecoder = valueDecoder; + this.timeDecoder = timeDecoder; + hasOneCachedTimeValuePair = false; + splitInputStreamToTimeStampAndValue(pageContent); + } - private void splitInputStreamToTimeStampAndValue(InputStream pageContent) throws IOException { - int timeInputStreamLength = ReadWriteStreamUtils.readUnsignedVarInt(pageContent); - byte[] buf = new byte[timeInputStreamLength]; - int readSize = pageContent.read(buf, 0, timeInputStreamLength); - if (readSize != timeInputStreamLength) { - throw new IOException("Error when read bytes of encoded timestamps. " + - "Expect byte size : " + timeInputStreamLength + ". Read size : " + readSize); - } - this.timestampInputStream = new ByteArrayInputStream(buf); - this.valueInputStream = pageContent; + private void splitInputStreamToTimeStampAndValue(InputStream pageContent) throws IOException { + int timeInputStreamLength = ReadWriteStreamUtils.readUnsignedVarInt(pageContent); + byte[] buf = new byte[timeInputStreamLength]; + int readSize = pageContent.read(buf, 0, timeInputStreamLength); + if (readSize != timeInputStreamLength) { + throw new IOException("Error when read bytes of encoded timestamps. " + "Expect byte size : " + + timeInputStreamLength + ". Read size : " + readSize); } + this.timestampInputStream = new ByteArrayInputStream(buf); + this.valueInputStream = pageContent; + } - @Override - public boolean hasNext() throws IOException { - if (hasOneCachedTimeValuePair) { - return true; - } - if (timeDecoder.hasNext(timestampInputStream) && valueDecoder.hasNext(valueInputStream)) { - cacheOneTimeValuePair(); - this.hasOneCachedTimeValuePair = true; - return true; - } - return false; + @Override + public boolean hasNext() throws IOException { + if (hasOneCachedTimeValuePair) { + return true; } - - @Override - public TimeValuePair next() throws IOException { - if (hasNext()) { - this.hasOneCachedTimeValuePair = false; - return cachedTimeValuePair; - } else { - throw new IOException("No more TimeValuePair in current page"); - } + if (timeDecoder.hasNext(timestampInputStream) && valueDecoder.hasNext(valueInputStream)) { + cacheOneTimeValuePair(); + this.hasOneCachedTimeValuePair = true; + return true; } + return false; + } - private void cacheOneTimeValuePair() { - long timestamp = timeDecoder.readLong(timestampInputStream); - TsPrimitiveType value = readOneValue(); - this.cachedTimeValuePair = new TimeValuePair(timestamp, value); + @Override + public TimeValuePair next() throws IOException { + if (hasNext()) { + this.hasOneCachedTimeValuePair = false; + return cachedTimeValuePair; + } else { + throw new IOException("No more TimeValuePair in current page"); } + } - @Override - public void skipCurrentTimeValuePair() throws IOException { - next(); - } + private void cacheOneTimeValuePair() { + long timestamp = timeDecoder.readLong(timestampInputStream); + TsPrimitiveType value = readOneValue(); + this.cachedTimeValuePair = new TimeValuePair(timestamp, value); + } - @Override - public void close() throws IOException { - timestampInputStream.close(); - valueInputStream.close(); - } + @Override + public void skipCurrentTimeValuePair() throws IOException { + next(); + } + + @Override + public void close() throws IOException { + timestampInputStream.close(); + valueInputStream.close(); + } - private TsPrimitiveType readOneValue() { - switch (dataType) { - case BOOLEAN: - return new TsBoolean(valueDecoder.readBoolean(valueInputStream)); - case INT32: - return new TsInt(valueDecoder.readInt(valueInputStream)); - case INT64: - return new TsLong(valueDecoder.readLong(valueInputStream)); - case FLOAT: - return new TsFloat(valueDecoder.readFloat(valueInputStream)); - case DOUBLE: - return new TsDouble(valueDecoder.readDouble(valueInputStream)); - case TEXT: - return new TsBinary(valueDecoder.readBinary(valueInputStream)); - case ENUMS: - return new TsInt(valueDecoder.readInt(valueInputStream)); - default: - break; - } - throw new UnSupportedDataTypeException("Unsupported data type :" + dataType); + private TsPrimitiveType readOneValue() { + switch (dataType) { + case BOOLEAN: + return new TsBoolean(valueDecoder.readBoolean(valueInputStream)); + case INT32: + return new TsInt(valueDecoder.readInt(valueInputStream)); + case INT64: + return new TsLong(valueDecoder.readLong(valueInputStream)); + case FLOAT: + return new TsFloat(valueDecoder.readFloat(valueInputStream)); + case DOUBLE: + return new TsDouble(valueDecoder.readDouble(valueInputStream)); + case TEXT: + return new TsBinary(valueDecoder.readBinary(valueInputStream)); + case ENUMS: + return new TsInt(valueDecoder.readInt(valueInputStream)); + default: + break; } + throw new UnSupportedDataTypeException("Unsupported data type :" + dataType); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReader.java index 62f5b988..8359632c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReader.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.format.PageHeader; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.TimeValuePairReader; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -19,127 +18,133 @@ */ public abstract class SeriesChunkReader implements TimeValuePairReader { - protected TSDataType dataType; - private InputStream seriesChunkInputStream; - - private boolean pageReaderInitialized; - private PageReader pageReader; - private UnCompressor unCompressor; - private Encoding defaultTimestampEncoding; - protected boolean hasCachedTimeValuePair; - protected TimeValuePair cachedTimeValuePair; - private long maxTombstoneTime; - - - public SeriesChunkReader(InputStream seriesChunkInputStream, TSDataType dataType, CompressionTypeName compressionTypeName) { - this.seriesChunkInputStream = seriesChunkInputStream; - this.dataType = dataType; - this.unCompressor = UnCompressor.getUnCompressor(compressionTypeName); - this.pageReaderInitialized = false; - defaultTimestampEncoding = Encoding.TS_2DIFF; + protected TSDataType dataType; + private InputStream seriesChunkInputStream; + + private boolean pageReaderInitialized; + private PageReader pageReader; + private UnCompressor unCompressor; + private Encoding defaultTimestampEncoding; + protected boolean hasCachedTimeValuePair; + protected TimeValuePair cachedTimeValuePair; + private long maxTombstoneTime; + + + public SeriesChunkReader(InputStream seriesChunkInputStream, TSDataType dataType, + CompressionTypeName compressionTypeName) { + this.seriesChunkInputStream = seriesChunkInputStream; + this.dataType = dataType; + this.unCompressor = UnCompressor.getUnCompressor(compressionTypeName); + this.pageReaderInitialized = false; + defaultTimestampEncoding = Encoding.TS_2DIFF; + } + + @Override + public boolean hasNext() throws IOException { + if (hasCachedTimeValuePair) { + return true; } - - @Override - public boolean hasNext() throws IOException { - if (hasCachedTimeValuePair) { - return true; + // Judge whether next satisfied page exists + while (true) { + if (!pageReaderInitialized) { + boolean hasMoreSatisfiedPage = constructPageReaderIfNextSatisfiedPageExists(); + if (!hasMoreSatisfiedPage) { + return false; } - //Judge whether next satisfied page exists - while (true) { - if (!pageReaderInitialized) { - boolean hasMoreSatisfiedPage = constructPageReaderIfNextSatisfiedPageExists(); - if (!hasMoreSatisfiedPage) { - return false; - } - pageReaderInitialized = true; - } - - while (pageReader.hasNext()) { - TimeValuePair timeValuePair = pageReader.next(); - if (timeValuePairSatisfied(timeValuePair) && timeValuePair.getTimestamp() > maxTombstoneTime) { - this.hasCachedTimeValuePair = true; - this.cachedTimeValuePair = timeValuePair; - return true; - } - } - pageReaderInitialized = false; + pageReaderInitialized = true; + } + + while (pageReader.hasNext()) { + TimeValuePair timeValuePair = pageReader.next(); + if (timeValuePairSatisfied(timeValuePair) + && timeValuePair.getTimestamp() > maxTombstoneTime) { + this.hasCachedTimeValuePair = true; + this.cachedTimeValuePair = timeValuePair; + return true; } + } + pageReaderInitialized = false; } + } - @Override - public TimeValuePair next() throws IOException { - if (hasNext()) { - hasCachedTimeValuePair = false; - return cachedTimeValuePair; - } - throw new IOException("No more timeValuePair in current MemSeriesChunk"); + @Override + public TimeValuePair next() throws IOException { + if (hasNext()) { + hasCachedTimeValuePair = false; + return cachedTimeValuePair; } - - private boolean constructPageReaderIfNextSatisfiedPageExists() throws IOException { - boolean gotNextPageReader = false; - while (hasNextPageInStream() && !gotNextPageReader) { - PageHeader pageHeader = getNextPageHeader(); - if (pageSatisfied(pageHeader)) { - Decoder valueDecoder = Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), dataType); - //TODO: How to get defaultTimeDecoder by TSConfig rather than hard code here ? - Decoder defaultTimeDecoder = Decoder.getDecoderByType(defaultTimestampEncoding, TSDataType.INT64); - pageReader = constructPageReaderForNextPage(pageHeader.getCompressed_page_size(), valueDecoder, defaultTimeDecoder); - gotNextPageReader = true; - } else { - skipBytesInStreamByLength(pageHeader.getCompressed_page_size()); - } - } - return gotNextPageReader; - + throw new IOException("No more timeValuePair in current MemSeriesChunk"); + } + + private boolean constructPageReaderIfNextSatisfiedPageExists() throws IOException { + boolean gotNextPageReader = false; + while (hasNextPageInStream() && !gotNextPageReader) { + PageHeader pageHeader = getNextPageHeader(); + if (pageSatisfied(pageHeader)) { + Decoder valueDecoder = + Decoder.getDecoderByType(pageHeader.getData_page_header().getEncoding(), dataType); + // TODO: How to get defaultTimeDecoder by TSConfig rather than hard code here ? + Decoder defaultTimeDecoder = + Decoder.getDecoderByType(defaultTimestampEncoding, TSDataType.INT64); + pageReader = constructPageReaderForNextPage(pageHeader.getCompressed_page_size(), + valueDecoder, defaultTimeDecoder); + gotNextPageReader = true; + } else { + skipBytesInStreamByLength(pageHeader.getCompressed_page_size()); + } } + return gotNextPageReader; - private boolean hasNextPageInStream() throws IOException { - if (seriesChunkInputStream.available() > 0) { - return true; - } - return false; + } + + private boolean hasNextPageInStream() throws IOException { + if (seriesChunkInputStream.available() > 0) { + return true; } + return false; + } - public abstract boolean pageSatisfied(PageHeader pageHeader); + public abstract boolean pageSatisfied(PageHeader pageHeader); - public abstract boolean timeValuePairSatisfied(TimeValuePair timeValuePair); + public abstract boolean timeValuePairSatisfied(TimeValuePair timeValuePair); - private void skipBytesInStreamByLength(long length) throws IOException { - seriesChunkInputStream.skip(length); - } + private void skipBytesInStreamByLength(long length) throws IOException { + seriesChunkInputStream.skip(length); + } - private PageReader constructPageReaderForNextPage(int compressedPageBodyLength, Decoder valueDecoder, Decoder timeDecoder) - throws IOException { - byte[] compressedPageBody = new byte[compressedPageBodyLength]; - int readLength = seriesChunkInputStream.read(compressedPageBody, 0, compressedPageBodyLength); - if (readLength != compressedPageBodyLength) { - throw new IOException("unexpected byte read length when read compressedPageBody. Expected:" - + compressedPageBody + ". Actual:" + readLength); - } - PageReader pageReader = new PageReader(new ByteArrayInputStream(unCompressor.uncompress(compressedPageBody)), - dataType, valueDecoder, timeDecoder); - return pageReader; + private PageReader constructPageReaderForNextPage(int compressedPageBodyLength, + Decoder valueDecoder, Decoder timeDecoder) throws IOException { + byte[] compressedPageBody = new byte[compressedPageBodyLength]; + int readLength = seriesChunkInputStream.read(compressedPageBody, 0, compressedPageBodyLength); + if (readLength != compressedPageBodyLength) { + throw new IOException("unexpected byte read length when read compressedPageBody. Expected:" + + compressedPageBody + ". Actual:" + readLength); } + PageReader pageReader = + new PageReader(new ByteArrayInputStream(unCompressor.uncompress(compressedPageBody)), + dataType, valueDecoder, timeDecoder); + return pageReader; + } - private PageHeader getNextPageHeader() throws IOException { - return ReadWriteThriftFormatUtils.readPageHeader(seriesChunkInputStream); - } + private PageHeader getNextPageHeader() throws IOException { + return ReadWriteThriftFormatUtils.readPageHeader(seriesChunkInputStream); + } - @Override - public void skipCurrentTimeValuePair() { + @Override + public void skipCurrentTimeValuePair() { - } + } - @Override - public void close() throws IOException { + @Override + public void close() throws IOException { - } + } - public void setMaxTombstoneTime(long maxTombStoneTime) { - this.maxTombstoneTime = maxTombStoneTime; - } + public void setMaxTombstoneTime(long maxTombStoneTime) { + this.maxTombstoneTime = maxTombStoneTime; + } - public long getMaxTombstoneTime() { - return this.maxTombstoneTime; - } + public long getMaxTombstoneTime() { + return this.maxTombstoneTime; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderByTimestampImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderByTimestampImpl.java index 7e4e7dff..d2d05c44 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderByTimestampImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderByTimestampImpl.java @@ -6,61 +6,62 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReaderByTimeStamp; - import java.io.IOException; import java.io.InputStream; /** * Created by zhangjinrui on 2017/12/26. */ -public class SeriesChunkReaderByTimestampImpl extends SeriesChunkReader implements SeriesReaderByTimeStamp{ +public class SeriesChunkReaderByTimestampImpl extends SeriesChunkReader + implements SeriesReaderByTimeStamp { - private long currentTimestamp; + private long currentTimestamp; - public SeriesChunkReaderByTimestampImpl(InputStream seriesChunkInputStream, TSDataType dataType, CompressionTypeName compressionTypeName) { - super(seriesChunkInputStream, dataType, compressionTypeName); - currentTimestamp = Long.MIN_VALUE; - } + public SeriesChunkReaderByTimestampImpl(InputStream seriesChunkInputStream, TSDataType dataType, + CompressionTypeName compressionTypeName) { + super(seriesChunkInputStream, dataType, compressionTypeName); + currentTimestamp = Long.MIN_VALUE; + } - @Override - public boolean pageSatisfied(PageHeader pageHeader) { - long maxTimestamp = pageHeader.data_page_header.max_timestamp; - //If minTimestamp > currentTimestamp, this page should NOT be skipped - if (maxTimestamp < currentTimestamp || maxTimestamp < getMaxTombstoneTime()) { - return false; - } - return true; + @Override + public boolean pageSatisfied(PageHeader pageHeader) { + long maxTimestamp = pageHeader.data_page_header.max_timestamp; + // If minTimestamp > currentTimestamp, this page should NOT be skipped + if (maxTimestamp < currentTimestamp || maxTimestamp < getMaxTombstoneTime()) { + return false; } + return true; + } - @Override - public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { - return timeValuePair.getTimestamp() >= currentTimestamp && timeValuePair.getTimestamp() > getMaxTombstoneTime(); - } - - public void setCurrentTimestamp(long currentTimestamp) { - this.currentTimestamp = currentTimestamp; - if(hasCachedTimeValuePair && cachedTimeValuePair.getTimestamp() < currentTimestamp){ - hasCachedTimeValuePair = false; - } + @Override + public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { + return timeValuePair.getTimestamp() >= currentTimestamp + && timeValuePair.getTimestamp() > getMaxTombstoneTime(); + } + + public void setCurrentTimestamp(long currentTimestamp) { + this.currentTimestamp = currentTimestamp; + if (hasCachedTimeValuePair && cachedTimeValuePair.getTimestamp() < currentTimestamp) { + hasCachedTimeValuePair = false; } + } - @Override - public TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException { - setCurrentTimestamp(timestamp); - if(hasCachedTimeValuePair && cachedTimeValuePair.getTimestamp() == timestamp){ - hasCachedTimeValuePair = false; - return cachedTimeValuePair.getValue(); - } - while (hasNext()){ - cachedTimeValuePair = next(); - if(cachedTimeValuePair.getTimestamp() == timestamp){ - return cachedTimeValuePair.getValue(); - } - else if(cachedTimeValuePair.getTimestamp() > timestamp){ - hasCachedTimeValuePair = true; - return null; - } - } + @Override + public TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException { + setCurrentTimestamp(timestamp); + if (hasCachedTimeValuePair && cachedTimeValuePair.getTimestamp() == timestamp) { + hasCachedTimeValuePair = false; + return cachedTimeValuePair.getValue(); + } + while (hasNext()) { + cachedTimeValuePair = next(); + if (cachedTimeValuePair.getTimestamp() == timestamp) { + return cachedTimeValuePair.getValue(); + } else if (cachedTimeValuePair.getTimestamp() > timestamp) { + hasCachedTimeValuePair = true; return null; + } } + return null; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithFilterImpl.java index 73ab4bdd..99b6e513 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithFilterImpl.java @@ -11,7 +11,6 @@ import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.impl.DigestFilterVisitor; import cn.edu.tsinghua.tsfile.timeseries.filterV2.visitor.impl.TimeValuePairFilterVisitorImpl; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - import java.io.InputStream; /** @@ -19,36 +18,36 @@ */ public class SeriesChunkReaderWithFilterImpl extends SeriesChunkReader { - private Filter filter; - private DigestFilterVisitor digestFilterVisitor; - private TimeValuePairFilterVisitor timeValuePairFilterVisitor; + private Filter filter; + private DigestFilterVisitor digestFilterVisitor; + private TimeValuePairFilterVisitor timeValuePairFilterVisitor; - public SeriesChunkReaderWithFilterImpl(InputStream seriesChunkInputStream, TSDataType dataType, - CompressionTypeName compressionTypeName, Filter filter) { - super(seriesChunkInputStream, dataType, compressionTypeName); - this.filter = filter; - this.timeValuePairFilterVisitor = new TimeValuePairFilterVisitorImpl(); - this.digestFilterVisitor = new DigestFilterVisitor(); - } + public SeriesChunkReaderWithFilterImpl(InputStream seriesChunkInputStream, TSDataType dataType, + CompressionTypeName compressionTypeName, Filter filter) { + super(seriesChunkInputStream, dataType, compressionTypeName); + this.filter = filter; + this.timeValuePairFilterVisitor = new TimeValuePairFilterVisitorImpl(); + this.digestFilterVisitor = new DigestFilterVisitor(); + } - @Override - public boolean pageSatisfied(PageHeader pageHeader) { - if (pageHeader.data_page_header.max_timestamp < getMaxTombstoneTime()) - return false; - DigestForFilter timeDigest = new DigestForFilter(pageHeader.data_page_header.getMin_timestamp(), - pageHeader.data_page_header.getMax_timestamp()); - //TODO: Using ByteBuffer as min/max is best - DigestForFilter valueDigest = new DigestForFilter( - pageHeader.data_page_header.digest.getStatistics().get(StatisticConstant.MIN_VALUE), - pageHeader.data_page_header.digest.getStatistics().get(StatisticConstant.MAX_VALUE), - dataType); - return digestFilterVisitor.satisfy(timeDigest, valueDigest, filter); - } + @Override + public boolean pageSatisfied(PageHeader pageHeader) { + if (pageHeader.data_page_header.max_timestamp < getMaxTombstoneTime()) + return false; + DigestForFilter timeDigest = new DigestForFilter(pageHeader.data_page_header.getMin_timestamp(), + pageHeader.data_page_header.getMax_timestamp()); + // TODO: Using ByteBuffer as min/max is best + DigestForFilter valueDigest = new DigestForFilter( + pageHeader.data_page_header.digest.getStatistics().get(StatisticConstant.MIN_VALUE), + pageHeader.data_page_header.digest.getStatistics().get(StatisticConstant.MAX_VALUE), + dataType); + return digestFilterVisitor.satisfy(timeDigest, valueDigest, filter); + } - @Override - public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { - if (timeValuePair.getTimestamp() < getMaxTombstoneTime()) - return false; - return timeValuePairFilterVisitor.satisfy(timeValuePair, filter); - } + @Override + public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { + if (timeValuePair.getTimestamp() < getMaxTombstoneTime()) + return false; + return timeValuePairFilterVisitor.satisfy(timeValuePair, filter); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithoutFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithoutFilterImpl.java index 6862d992..01ffe969 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithoutFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesChunkReaderWithoutFilterImpl.java @@ -4,7 +4,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.format.PageHeader; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; - import java.io.InputStream; /** @@ -12,17 +11,18 @@ */ public class SeriesChunkReaderWithoutFilterImpl extends SeriesChunkReader { - public SeriesChunkReaderWithoutFilterImpl(InputStream seriesChunkInputStream, TSDataType dataType, CompressionTypeName compressionTypeName) { - super(seriesChunkInputStream, dataType, compressionTypeName); - } + public SeriesChunkReaderWithoutFilterImpl(InputStream seriesChunkInputStream, TSDataType dataType, + CompressionTypeName compressionTypeName) { + super(seriesChunkInputStream, dataType, compressionTypeName); + } - @Override - public boolean pageSatisfied(PageHeader pageHeader) { - return pageHeader.data_page_header.max_timestamp > getMaxTombstoneTime(); - } + @Override + public boolean pageSatisfied(PageHeader pageHeader) { + return pageHeader.data_page_header.max_timestamp > getMaxTombstoneTime(); + } - @Override - public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { - return timeValuePair.getTimestamp() > getMaxTombstoneTime(); - } + @Override + public boolean timeValuePairSatisfied(TimeValuePair timeValuePair) { + return timeValuePair.getTimestamp() > getMaxTombstoneTime(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFile.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFile.java index 713d9a0d..d12a73fb 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFile.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFile.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.SeriesChunkLoaderImpl; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; - import java.io.IOException; import java.util.List; @@ -17,82 +16,90 @@ */ public abstract class SeriesReaderFromSingleFile implements SeriesReader { - protected SeriesChunkLoader seriesChunkLoader; - protected List encodedSeriesChunkDescriptorList; + protected SeriesChunkLoader seriesChunkLoader; + protected List encodedSeriesChunkDescriptorList; - protected SeriesChunkReader seriesChunkReader; - protected boolean seriesChunkReaderInitialized; - protected int currentReadSeriesChunkIndex; + protected SeriesChunkReader seriesChunkReader; + protected boolean seriesChunkReaderInitialized; + protected int currentReadSeriesChunkIndex; - protected ITsRandomAccessFileReader randomAccessFileReader; + protected ITsRandomAccessFileReader randomAccessFileReader; - public SeriesReaderFromSingleFile(ITsRandomAccessFileReader randomAccessFileReader, Path path) throws IOException { - this.randomAccessFileReader = randomAccessFileReader; - this.seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - this.encodedSeriesChunkDescriptorList = new MetadataQuerierByFileImpl(randomAccessFileReader).getSeriesChunkDescriptorList(path); - this.currentReadSeriesChunkIndex = -1; - this.seriesChunkReaderInitialized = false; - } + public SeriesReaderFromSingleFile(ITsRandomAccessFileReader randomAccessFileReader, Path path) + throws IOException { + this.randomAccessFileReader = randomAccessFileReader; + this.seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + this.encodedSeriesChunkDescriptorList = + new MetadataQuerierByFileImpl(randomAccessFileReader).getSeriesChunkDescriptorList(path); + this.currentReadSeriesChunkIndex = -1; + this.seriesChunkReaderInitialized = false; + } - public SeriesReaderFromSingleFile(ITsRandomAccessFileReader randomAccessFileReader, - SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - this(seriesChunkLoader, encodedSeriesChunkDescriptorList); - this.randomAccessFileReader = randomAccessFileReader; - } + public SeriesReaderFromSingleFile(ITsRandomAccessFileReader randomAccessFileReader, + SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + this(seriesChunkLoader, encodedSeriesChunkDescriptorList); + this.randomAccessFileReader = randomAccessFileReader; + } - /** - * Using this constructor cannot close corresponding FileStream - * @param seriesChunkLoader - * @param encodedSeriesChunkDescriptorList - */ - public SeriesReaderFromSingleFile(SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - this.seriesChunkLoader = seriesChunkLoader; - this.encodedSeriesChunkDescriptorList = encodedSeriesChunkDescriptorList; - this.currentReadSeriesChunkIndex = -1; - this.seriesChunkReaderInitialized = false; - } + /** + * Using this constructor cannot close corresponding FileStream + * + * @param seriesChunkLoader + * @param encodedSeriesChunkDescriptorList + */ + public SeriesReaderFromSingleFile(SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + this.seriesChunkLoader = seriesChunkLoader; + this.encodedSeriesChunkDescriptorList = encodedSeriesChunkDescriptorList; + this.currentReadSeriesChunkIndex = -1; + this.seriesChunkReaderInitialized = false; + } - @Override - public boolean hasNext() throws IOException { - if (seriesChunkReaderInitialized && seriesChunkReader.hasNext()) { - return true; - } - while ((currentReadSeriesChunkIndex + 1) < encodedSeriesChunkDescriptorList.size()) { - if (!seriesChunkReaderInitialized) { - EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = encodedSeriesChunkDescriptorList.get(++currentReadSeriesChunkIndex); - if (seriesChunkSatisfied(encodedSeriesChunkDescriptor)) { - initSeriesChunkReader(encodedSeriesChunkDescriptor); - seriesChunkReaderInitialized = true; - } else { - continue; - } - } - if (seriesChunkReader.hasNext()) { - return true; - } else { - seriesChunkReaderInitialized = false; - } + @Override + public boolean hasNext() throws IOException { + if (seriesChunkReaderInitialized && seriesChunkReader.hasNext()) { + return true; + } + while ((currentReadSeriesChunkIndex + 1) < encodedSeriesChunkDescriptorList.size()) { + if (!seriesChunkReaderInitialized) { + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = + encodedSeriesChunkDescriptorList.get(++currentReadSeriesChunkIndex); + if (seriesChunkSatisfied(encodedSeriesChunkDescriptor)) { + initSeriesChunkReader(encodedSeriesChunkDescriptor); + seriesChunkReaderInitialized = true; + } else { + continue; } - return false; + } + if (seriesChunkReader.hasNext()) { + return true; + } else { + seriesChunkReaderInitialized = false; + } } + return false; + } - @Override - public TimeValuePair next() throws IOException { - return seriesChunkReader.next(); - } + @Override + public TimeValuePair next() throws IOException { + return seriesChunkReader.next(); + } - @Override - public void skipCurrentTimeValuePair() throws IOException { - next(); - } + @Override + public void skipCurrentTimeValuePair() throws IOException { + next(); + } - protected abstract void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException; + protected abstract void initSeriesChunkReader( + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException; - protected abstract boolean seriesChunkSatisfied(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor); + protected abstract boolean seriesChunkSatisfied( + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor); - public void close() throws IOException { - if (randomAccessFileReader != null) { - randomAccessFileReader.close(); - } + public void close() throws IOException { + if (randomAccessFileReader != null) { + randomAccessFileReader.close(); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileByTimestampImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileByTimestampImpl.java index 474c2b0a..c8d3a19b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileByTimestampImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileByTimestampImpl.java @@ -8,120 +8,128 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TimeValuePair; import cn.edu.tsinghua.tsfile.timeseries.readV2.datatype.TsPrimitiveType; import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReaderByTimeStamp; - import java.io.IOException; import java.util.List; /** * Created by zhangjinrui on 2017/12/26. */ -public class SeriesReaderFromSingleFileByTimestampImpl extends SeriesReaderFromSingleFile implements SeriesReaderByTimeStamp { +public class SeriesReaderFromSingleFileByTimestampImpl extends SeriesReaderFromSingleFile + implements SeriesReaderByTimeStamp { - private long currentTimestamp; - private boolean hasCacheLastTimeValuePair; - private TimeValuePair cachedTimeValuePair; - private int nextSeriesChunkIndex; + private long currentTimestamp; + private boolean hasCacheLastTimeValuePair; + private TimeValuePair cachedTimeValuePair; + private int nextSeriesChunkIndex; - public SeriesReaderFromSingleFileByTimestampImpl(SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - super(seriesChunkLoader, encodedSeriesChunkDescriptorList); - nextSeriesChunkIndex = 0; - currentTimestamp = Long.MIN_VALUE; - } + public SeriesReaderFromSingleFileByTimestampImpl(SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + super(seriesChunkLoader, encodedSeriesChunkDescriptorList); + nextSeriesChunkIndex = 0; + currentTimestamp = Long.MIN_VALUE; + } - public SeriesReaderFromSingleFileByTimestampImpl(ITsRandomAccessFileReader randomAccessFileReader, Path path) throws IOException { - super(randomAccessFileReader, path); - currentTimestamp = Long.MIN_VALUE; - } + public SeriesReaderFromSingleFileByTimestampImpl(ITsRandomAccessFileReader randomAccessFileReader, + Path path) throws IOException { + super(randomAccessFileReader, path); + currentTimestamp = Long.MIN_VALUE; + } - public SeriesReaderFromSingleFileByTimestampImpl(ITsRandomAccessFileReader randomAccessFileReader, - SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); - currentTimestamp = Long.MIN_VALUE; - } + public SeriesReaderFromSingleFileByTimestampImpl(ITsRandomAccessFileReader randomAccessFileReader, + SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); + currentTimestamp = Long.MIN_VALUE; + } - @Override - public boolean hasNext() throws IOException { - if (hasCacheLastTimeValuePair && cachedTimeValuePair.getTimestamp() >= currentTimestamp) { - return true; - } - if (seriesChunkReaderInitialized) { - ((SeriesChunkReaderByTimestampImpl) seriesChunkReader).setCurrentTimestamp(currentTimestamp); - if(seriesChunkReader.hasNext()){ - return true; - } - } - while (nextSeriesChunkIndex < encodedSeriesChunkDescriptorList.size()) { - if (!seriesChunkReaderInitialized) { - EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = encodedSeriesChunkDescriptorList.get(nextSeriesChunkIndex); - //maxTime >= currentTime - if (seriesChunkSatisfied(encodedSeriesChunkDescriptor)) { - initSeriesChunkReader(encodedSeriesChunkDescriptor); - ((SeriesChunkReaderByTimestampImpl) seriesChunkReader).setCurrentTimestamp(currentTimestamp); - seriesChunkReaderInitialized = true; - nextSeriesChunkIndex++; - } else { - //maxTime < currentTime, skip this seriesChunk - continue; - } - } - if (seriesChunkReader.hasNext()) { - return true; - } else { - seriesChunkReaderInitialized = false; - } + @Override + public boolean hasNext() throws IOException { + if (hasCacheLastTimeValuePair && cachedTimeValuePair.getTimestamp() >= currentTimestamp) { + return true; + } + if (seriesChunkReaderInitialized) { + ((SeriesChunkReaderByTimestampImpl) seriesChunkReader).setCurrentTimestamp(currentTimestamp); + if (seriesChunkReader.hasNext()) { + return true; + } + } + while (nextSeriesChunkIndex < encodedSeriesChunkDescriptorList.size()) { + if (!seriesChunkReaderInitialized) { + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor = + encodedSeriesChunkDescriptorList.get(nextSeriesChunkIndex); + // maxTime >= currentTime + if (seriesChunkSatisfied(encodedSeriesChunkDescriptor)) { + initSeriesChunkReader(encodedSeriesChunkDescriptor); + ((SeriesChunkReaderByTimestampImpl) seriesChunkReader) + .setCurrentTimestamp(currentTimestamp); + seriesChunkReaderInitialized = true; + nextSeriesChunkIndex++; + } else { + // maxTime < currentTime, skip this seriesChunk + continue; } - return false; + } + if (seriesChunkReader.hasNext()) { + return true; + } else { + seriesChunkReaderInitialized = false; + } } + return false; + } - @Override - public TimeValuePair next() throws IOException { - if (hasCacheLastTimeValuePair) { - hasCacheLastTimeValuePair = false; - return cachedTimeValuePair; - } - return seriesChunkReader.next(); + @Override + public TimeValuePair next() throws IOException { + if (hasCacheLastTimeValuePair) { + hasCacheLastTimeValuePair = false; + return cachedTimeValuePair; } + return seriesChunkReader.next(); + } - /** - * @param timestamp - * @return If there is no TimeValuePair whose timestamp equals to given timestamp, then return null. - * @throws IOException - */ - @Override - public TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException { - this.currentTimestamp = timestamp; - if (hasCacheLastTimeValuePair) { - if (cachedTimeValuePair.getTimestamp() == timestamp) { - hasCacheLastTimeValuePair = false; - return cachedTimeValuePair.getValue(); - } else if (cachedTimeValuePair.getTimestamp() > timestamp) { - return null; - } - } - if(hasNext()){ - cachedTimeValuePair = next(); - if (cachedTimeValuePair.getTimestamp() == timestamp) { - return cachedTimeValuePair.getValue(); - } else if (cachedTimeValuePair.getTimestamp() > timestamp) { - hasCacheLastTimeValuePair = true; - return null; - } - } + /** + * @param timestamp + * @return If there is no TimeValuePair whose timestamp equals to given timestamp, then return + * null. + * @throws IOException + */ + @Override + public TsPrimitiveType getValueInTimestamp(long timestamp) throws IOException { + this.currentTimestamp = timestamp; + if (hasCacheLastTimeValuePair) { + if (cachedTimeValuePair.getTimestamp() == timestamp) { + hasCacheLastTimeValuePair = false; + return cachedTimeValuePair.getValue(); + } else if (cachedTimeValuePair.getTimestamp() > timestamp) { return null; + } } - - @Override - protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException { - SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); - this.seriesChunkReader = new SeriesChunkReaderByTimestampImpl(memSeriesChunk.getSeriesChunkBodyStream() - , encodedSeriesChunkDescriptor.getDataType(), - encodedSeriesChunkDescriptor.getCompressionTypeName()); - this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); + if (hasNext()) { + cachedTimeValuePair = next(); + if (cachedTimeValuePair.getTimestamp() == timestamp) { + return cachedTimeValuePair.getValue(); + } else if (cachedTimeValuePair.getTimestamp() > timestamp) { + hasCacheLastTimeValuePair = true; + return null; + } } + return null; + } - @Override - protected boolean seriesChunkSatisfied(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { - long maxTimestamp = encodedSeriesChunkDescriptor.getMaxTimestamp(); - return maxTimestamp >= currentTimestamp; - } + @Override + protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException { + SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); + this.seriesChunkReader = new SeriesChunkReaderByTimestampImpl( + memSeriesChunk.getSeriesChunkBodyStream(), encodedSeriesChunkDescriptor.getDataType(), + encodedSeriesChunkDescriptor.getCompressionTypeName()); + this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); + } + + @Override + protected boolean seriesChunkSatisfied( + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { + long maxTimestamp = encodedSeriesChunkDescriptor.getMaxTimestamp(); + return maxTimestamp >= currentTimestamp; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithFilterImpl.java index 78ed6956..64503fbf 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithFilterImpl.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; import cn.edu.tsinghua.tsfile.timeseries.readV2.common.SeriesChunk; import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.SeriesChunkLoader; - import java.io.IOException; import java.util.List; @@ -18,48 +17,53 @@ */ public class SeriesReaderFromSingleFileWithFilterImpl extends SeriesReaderFromSingleFile { - private Filter filter; - private DigestFilterVisitor digestFilterVisitor; + private Filter filter; + private DigestFilterVisitor digestFilterVisitor; - public SeriesReaderFromSingleFileWithFilterImpl(SeriesChunkLoader seriesChunkLoader - , List encodedSeriesChunkDescriptorList, Filter filter) { - super(seriesChunkLoader, encodedSeriesChunkDescriptorList); - this.filter = filter; - this.digestFilterVisitor = new DigestFilterVisitor(); - } + public SeriesReaderFromSingleFileWithFilterImpl(SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList, Filter filter) { + super(seriesChunkLoader, encodedSeriesChunkDescriptorList); + this.filter = filter; + this.digestFilterVisitor = new DigestFilterVisitor(); + } - public SeriesReaderFromSingleFileWithFilterImpl(ITsRandomAccessFileReader randomAccessFileReader, SeriesChunkLoader seriesChunkLoader, - List encodedSeriesChunkDescriptorList, Filter filter) { - super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); - this.filter = filter; - this.digestFilterVisitor = new DigestFilterVisitor(); - } + public SeriesReaderFromSingleFileWithFilterImpl(ITsRandomAccessFileReader randomAccessFileReader, + SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList, Filter filter) { + super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); + this.filter = filter; + this.digestFilterVisitor = new DigestFilterVisitor(); + } - public SeriesReaderFromSingleFileWithFilterImpl(ITsRandomAccessFileReader randomAccessFileReader - , Path path, Filter filter) throws IOException { - super(randomAccessFileReader, path); - this.filter = filter; - this.digestFilterVisitor = new DigestFilterVisitor(); - } + public SeriesReaderFromSingleFileWithFilterImpl(ITsRandomAccessFileReader randomAccessFileReader, + Path path, Filter filter) throws IOException { + super(randomAccessFileReader, path); + this.filter = filter; + this.digestFilterVisitor = new DigestFilterVisitor(); + } - protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException { - SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); - this.seriesChunkReader = new SeriesChunkReaderWithFilterImpl(memSeriesChunk.getSeriesChunkBodyStream(), - memSeriesChunk.getEncodedSeriesChunkDescriptor().getDataType(), - memSeriesChunk.getEncodedSeriesChunkDescriptor().getCompressionTypeName(), - filter); - this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); - } + protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException { + SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); + this.seriesChunkReader = + new SeriesChunkReaderWithFilterImpl(memSeriesChunk.getSeriesChunkBodyStream(), + memSeriesChunk.getEncodedSeriesChunkDescriptor().getDataType(), + memSeriesChunk.getEncodedSeriesChunkDescriptor().getCompressionTypeName(), filter); + this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); + } - @Override - protected boolean seriesChunkSatisfied(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { - DigestForFilter timeDigest = new DigestForFilter(encodedSeriesChunkDescriptor.getMinTimestamp(), - encodedSeriesChunkDescriptor.getMaxTimestamp()); - //TODO: Using ByteBuffer as min/max is best - DigestForFilter valueDigest = new DigestForFilter( - encodedSeriesChunkDescriptor.getValueDigest().getStatistics().get(StatisticConstant.MIN_VALUE), - encodedSeriesChunkDescriptor.getValueDigest().getStatistics().get(StatisticConstant.MAX_VALUE), - encodedSeriesChunkDescriptor.getDataType()); - return digestFilterVisitor.satisfy(timeDigest, valueDigest, filter); - } + @Override + protected boolean seriesChunkSatisfied( + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { + DigestForFilter timeDigest = new DigestForFilter(encodedSeriesChunkDescriptor.getMinTimestamp(), + encodedSeriesChunkDescriptor.getMaxTimestamp()); + // TODO: Using ByteBuffer as min/max is best + DigestForFilter valueDigest = new DigestForFilter( + encodedSeriesChunkDescriptor.getValueDigest().getStatistics() + .get(StatisticConstant.MIN_VALUE), + encodedSeriesChunkDescriptor.getValueDigest().getStatistics() + .get(StatisticConstant.MAX_VALUE), + encodedSeriesChunkDescriptor.getDataType()); + return digestFilterVisitor.satisfy(timeDigest, valueDigest, filter); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithoutFilterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithoutFilterImpl.java index 0874415b..2a3fa008 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithoutFilterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/readV2/reader/impl/SeriesReaderFromSingleFileWithoutFilterImpl.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.common.EncodedSeriesChunkDescriptor; import cn.edu.tsinghua.tsfile.timeseries.readV2.common.SeriesChunk; import cn.edu.tsinghua.tsfile.timeseries.readV2.controller.SeriesChunkLoader; - import java.io.IOException; import java.util.List; @@ -14,29 +13,35 @@ */ public class SeriesReaderFromSingleFileWithoutFilterImpl extends SeriesReaderFromSingleFile { - public SeriesReaderFromSingleFileWithoutFilterImpl(SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - super(seriesChunkLoader, encodedSeriesChunkDescriptorList); - } - - public SeriesReaderFromSingleFileWithoutFilterImpl(ITsRandomAccessFileReader randomAccessFileReader, Path path) throws IOException { - super(randomAccessFileReader, path); - } - - public SeriesReaderFromSingleFileWithoutFilterImpl(ITsRandomAccessFileReader randomAccessFileReader, - SeriesChunkLoader seriesChunkLoader, List encodedSeriesChunkDescriptorList) { - super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); - } - - protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) throws IOException { - SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); - this.seriesChunkReader = new SeriesChunkReaderWithoutFilterImpl(memSeriesChunk.getSeriesChunkBodyStream(), - memSeriesChunk.getEncodedSeriesChunkDescriptor().getDataType(), - memSeriesChunk.getEncodedSeriesChunkDescriptor().getCompressionTypeName()); - this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); - } - - @Override - protected boolean seriesChunkSatisfied(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { - return true; - } + public SeriesReaderFromSingleFileWithoutFilterImpl(SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + super(seriesChunkLoader, encodedSeriesChunkDescriptorList); + } + + public SeriesReaderFromSingleFileWithoutFilterImpl( + ITsRandomAccessFileReader randomAccessFileReader, Path path) throws IOException { + super(randomAccessFileReader, path); + } + + public SeriesReaderFromSingleFileWithoutFilterImpl( + ITsRandomAccessFileReader randomAccessFileReader, SeriesChunkLoader seriesChunkLoader, + List encodedSeriesChunkDescriptorList) { + super(randomAccessFileReader, seriesChunkLoader, encodedSeriesChunkDescriptorList); + } + + protected void initSeriesChunkReader(EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) + throws IOException { + SeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); + this.seriesChunkReader = + new SeriesChunkReaderWithoutFilterImpl(memSeriesChunk.getSeriesChunkBodyStream(), + memSeriesChunk.getEncodedSeriesChunkDescriptor().getDataType(), + memSeriesChunk.getEncodedSeriesChunkDescriptor().getCompressionTypeName()); + this.seriesChunkReader.setMaxTombstoneTime(encodedSeriesChunkDescriptor.getMaxTombstoneTime()); + } + + @Override + protected boolean seriesChunkSatisfied( + EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor) { + return true; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtils.java index cdce2ebe..66f9bd70 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtils.java @@ -3,58 +3,57 @@ import java.io.File; /** - * FileUtils is just used for return file attribute like file size, and contains - * some measurement conversion among B, KB, MB etc. + * FileUtils is just used for return file attribute like file size, and contains some measurement + * conversion among B, KB, MB etc. * * @author kangrong */ public class FileUtils { - public static double getLocalFileByte(String filePath, Unit unit) { - File f = new File(filePath); - return getLocalFileByte(f, unit); - } - - public static double getLocalFileByte(File file, Unit unit) { - return format(transformUnit(file.length(), unit), 2); - } - - /** - * transform the byte value number to another unit. - * - * @param value - a number represented Byte which to be transformed - * @param unit - the target unit to be transformed - * @return - value number in unit of given parameter - */ - public static double transformUnit(double value, Unit unit) { - return value / Math.pow(1024, unit.ordinal()); - } - - /** - * transform the value number from other unit to Byte unit. - * - * @param value - a number to be transformed - * @param unit - the source unit to be transformed, maybe in unit of KB, MB, - * GB - * @return - value number in unit of Byte - */ - public static double transformUnitToByte(double value, Unit unit) { - return value * Math.pow(1024, unit.ordinal()); - } - - /** - * reserves some decimal for given double value - * - * @param num - given double value - * @param round - reserved decimal number - * @return - double value in given decimal number - */ - public static double format(double num, int round) { - long a = (long) (num * Math.pow(10, round)); - return ((double) a) / Math.pow(10, round); - } - - public static enum Unit { - B, KB, MB, GB, TB, PB, EB - } + public static double getLocalFileByte(String filePath, Unit unit) { + File f = new File(filePath); + return getLocalFileByte(f, unit); + } + + public static double getLocalFileByte(File file, Unit unit) { + return format(transformUnit(file.length(), unit), 2); + } + + /** + * transform the byte value number to another unit. + * + * @param value - a number represented Byte which to be transformed + * @param unit - the target unit to be transformed + * @return - value number in unit of given parameter + */ + public static double transformUnit(double value, Unit unit) { + return value / Math.pow(1024, unit.ordinal()); + } + + /** + * transform the value number from other unit to Byte unit. + * + * @param value - a number to be transformed + * @param unit - the source unit to be transformed, maybe in unit of KB, MB, GB + * @return - value number in unit of Byte + */ + public static double transformUnitToByte(double value, Unit unit) { + return value * Math.pow(1024, unit.ordinal()); + } + + /** + * reserves some decimal for given double value + * + * @param num - given double value + * @param round - reserved decimal number + * @return - double value in given decimal number + */ + public static double format(double num, int round) { + long a = (long) (num * Math.pow(10, round)); + return ((double) a) / Math.pow(10, round); + } + + public static enum Unit { + B, KB, MB, GB, TB, PB, EB + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/Loader.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/Loader.java index ce16924c..e7239e2e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/Loader.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/Loader.java @@ -7,28 +7,28 @@ import java.util.Set; public class Loader { - public static Set getResources(String resource, ClassLoader classLoader) throws IOException{ - Set urlSet = new HashSet<>(); - Enumeration urlEnum = classLoader.getResources(resource); - while(urlEnum.hasMoreElements()){ - urlSet.add(urlEnum.nextElement()); - } - return urlSet; - } - - public static URL getResource(String resource, ClassLoader classLoader){ - return classLoader.getResource(resource); - } - - public static ClassLoader getClassLoaderOfObject(Object o){ - if(o == null){ - throw new NullPointerException("Input object cannot be null"); - } - return getClassLoaderOfClass(o.getClass()); - } - - public static ClassLoader getClassLoaderOfClass(final Class clazz){ - ClassLoader classLoader = clazz.getClassLoader(); - return classLoader == null ? ClassLoader.getSystemClassLoader() : classLoader; - } + public static Set getResources(String resource, ClassLoader classLoader) throws IOException { + Set urlSet = new HashSet<>(); + Enumeration urlEnum = classLoader.getResources(resource); + while (urlEnum.hasMoreElements()) { + urlSet.add(urlEnum.nextElement()); + } + return urlSet; + } + + public static URL getResource(String resource, ClassLoader classLoader) { + return classLoader.getResource(resource); + } + + public static ClassLoader getClassLoaderOfObject(Object o) { + if (o == null) { + throw new NullPointerException("Input object cannot be null"); + } + return getClassLoaderOfClass(o.getClass()); + } + + public static ClassLoader getClassLoaderOfClass(final Class clazz) { + ClassLoader classLoader = clazz.getClassLoader(); + return classLoader == null ? ClassLoader.getSystemClassLoader() : classLoader; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtils.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtils.java index e69a9702..1cc95d4e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtils.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtils.java @@ -15,83 +15,77 @@ * @author kangrong */ public class RecordUtils { - private static final Logger LOG = LoggerFactory.getLogger(RecordUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(RecordUtils.class); - /** - * support input format: {@code ,,[,,]}.CSV line - * is separated by "," - * - * @param str - input string - * @param schema - constructed file schema - * @return TSRecord constructed from str - */ - public static TSRecord parseSimpleTupleRecord(String str, FileSchema schema) { - String[] items = str.split(JsonFormatConstant.TSRECORD_SEPARATOR); - String deltaObjectId = items[0].trim(); - long timestamp; + /** + * support input format: {@code ,,[,,]}.CSV line + * is separated by "," + * + * @param str - input string + * @param schema - constructed file schema + * @return TSRecord constructed from str + */ + public static TSRecord parseSimpleTupleRecord(String str, FileSchema schema) { + String[] items = str.split(JsonFormatConstant.TSRECORD_SEPARATOR); + String deltaObjectId = items[0].trim(); + long timestamp; + try { + timestamp = Long.valueOf(items[1].trim()); + } catch (NumberFormatException e) { + LOG.warn("given timestamp is illegal:{}", str); + // return a TSRecord without any data points + return new TSRecord(-1, deltaObjectId); + } + TSRecord ret = new TSRecord(timestamp, deltaObjectId); + String measurementId; + TSDataType type; + for (int i = 2; i < items.length - 1; i += 2) { + measurementId = items[i].trim(); + type = schema.getMeasurementDataTypes(measurementId); + if (type == null) { + LOG.warn("measurementId:{},type not found, pass", measurementId); + continue; + } + String value = items[i + 1].trim(); + if (!"".equals(value)) { try { - timestamp = Long.valueOf(items[1].trim()); + switch (type) { + case INT32: + ret.addTuple(new IntDataPoint(measurementId, Integer.valueOf(value))); + break; + case INT64: + ret.addTuple(new LongDataPoint(measurementId, Long.valueOf(value))); + break; + case FLOAT: + ret.addTuple(new FloatDataPoint(measurementId, Float.valueOf(value))); + break; + case DOUBLE: + ret.addTuple(new DoubleDataPoint(measurementId, Double.valueOf(value))); + break; + case ENUMS: + ret.addTuple(new EnumDataPoint(measurementId, + (schema.getMeasurementDescriptor(measurementId)).parseEnumValue(value))); + break; + case BOOLEAN: + ret.addTuple(new BooleanDataPoint(measurementId, Boolean.valueOf(value))); + break; + case TEXT: + ret.addTuple(new StringDataPoint(measurementId, Binary.valueOf(items[i + 1]))); + break; + // BIGDECIMAL is annotated because no encoder supports this type. + // case BIGDECIMAL: + // ret.addTuple(new BigDecimalDataPoint(measurementId, new BigDecimal( + // items[i + 1]))); + // break; + default: + LOG.warn("unsupported data type:{}", type); + break; + } } catch (NumberFormatException e) { - LOG.warn("given timestamp is illegal:{}", str); - // return a TSRecord without any data points - return new TSRecord(-1, deltaObjectId); - } - TSRecord ret = new TSRecord(timestamp, deltaObjectId); - String measurementId; - TSDataType type; - for (int i = 2; i < items.length - 1; i += 2) { - measurementId = items[i].trim(); - type = schema.getMeasurementDataTypes(measurementId); - if (type == null) { - LOG.warn("measurementId:{},type not found, pass", measurementId); - continue; - } - String value = items[i + 1].trim(); - if (!"".equals(value)) { - try { - switch (type) { - case INT32: - ret.addTuple(new IntDataPoint(measurementId, Integer - .valueOf(value))); - break; - case INT64: - ret.addTuple(new LongDataPoint(measurementId, Long - .valueOf(value))); - break; - case FLOAT: - ret.addTuple(new FloatDataPoint(measurementId, Float - .valueOf(value))); - break; - case DOUBLE: - ret.addTuple(new DoubleDataPoint(measurementId, Double - .valueOf(value))); - break; - case ENUMS: - ret.addTuple(new EnumDataPoint(measurementId, (schema - .getMeasurementDescriptor(measurementId)).parseEnumValue(value))); - break; - case BOOLEAN: - ret.addTuple(new BooleanDataPoint(measurementId, Boolean - .valueOf(value))); - break; - case TEXT: - ret.addTuple(new StringDataPoint(measurementId, Binary - .valueOf(items[i + 1]))); - break; - // BIGDECIMAL is annotated because no encoder supports this type. - // case BIGDECIMAL: - // ret.addTuple(new BigDecimalDataPoint(measurementId, new BigDecimal( - // items[i + 1]))); - // break; - default: - LOG.warn("unsupported data type:{}", type); - break; - } - } catch (NumberFormatException e) { - LOG.warn("parsing measurement meets error, omit it", e.getMessage()); - } - } + LOG.warn("parsing measurement meets error, omit it", e.getMessage()); } - return ret; + } } + return ret; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainer.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainer.java index 403854e6..bd331f7f 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainer.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainer.java @@ -3,340 +3,335 @@ import java.util.ArrayList; /** - * this class is used to contact String effectively.It contains a StringBuider - * and initialize it until {@code toString} is called. - * Note:it's not thread safety + * this class is used to contact String effectively.It contains a StringBuider and initialize it + * until {@code toString} is called. Note:it's not thread safety * * @author kangrong */ public class StringContainer { - // while call toString, all substrings are jointed with joinSeparator - private final String joinSeparator; - private StringBuilder stringBuilder; - private ArrayList sequenceList; - private ArrayList reverseList; - /** - * the summation length of all string segments - */ - private int totalLength = 0; - /** - * the count of string segments - */ - private int count = 0; - private boolean isUpdated = true; - private String cache; + // while call toString, all substrings are jointed with joinSeparator + private final String joinSeparator; + private StringBuilder stringBuilder; + private ArrayList sequenceList; + private ArrayList reverseList; + /** + * the summation length of all string segments + */ + private int totalLength = 0; + /** + * the count of string segments + */ + private int count = 0; + private boolean isUpdated = true; + private String cache; - public StringContainer() { - sequenceList = new ArrayList<>(); - reverseList = new ArrayList<>(); - joinSeparator = null; - } + public StringContainer() { + sequenceList = new ArrayList<>(); + reverseList = new ArrayList<>(); + joinSeparator = null; + } - public StringContainer(String joinSeparator) { - sequenceList = new ArrayList<>(); - reverseList = new ArrayList<>(); - this.joinSeparator = joinSeparator; - } + public StringContainer(String joinSeparator) { + sequenceList = new ArrayList<>(); + reverseList = new ArrayList<>(); + this.joinSeparator = joinSeparator; + } - public StringContainer(String[] strings) { - this(); - addTail(strings); - } + public StringContainer(String[] strings) { + this(); + addTail(strings); + } - public StringContainer(String[] strings, String joinSeparator) { - this(joinSeparator); - addTail(strings); - } + public StringContainer(String[] strings, String joinSeparator) { + this(joinSeparator); + addTail(strings); + } - public int size() { - return count; - } + public int size() { + return count; + } - public int length() { - return totalLength; - } + public int length() { + return totalLength; + } - public ArrayList getSequenceList() { - return sequenceList; - } + public ArrayList getSequenceList() { + return sequenceList; + } - public ArrayList getReverseList() { - return reverseList; - } + public ArrayList getReverseList() { + return reverseList; + } - public StringContainer addTail(Object... objs) { - isUpdated = true; - count += objs.length; - for (int i = 0; i < objs.length; i++) { - String str = objs[i].toString(); - totalLength += str.length(); - sequenceList.add(str); - } - return this; + public StringContainer addTail(Object... objs) { + isUpdated = true; + count += objs.length; + for (int i = 0; i < objs.length; i++) { + String str = objs[i].toString(); + totalLength += str.length(); + sequenceList.add(str); } + return this; + } - /** - * add a Strings array at this container's tail.
- * strings:"a","b","c",
- * StringContainer this:["d","e","f"],
- * result:this:["d","e","f","a","b","c"],
- * - * @param strings - to be added - * @return - this object - */ - public StringContainer addTail(String... strings) { - isUpdated = true; - count += strings.length; - for (int i = 0; i < strings.length; i++) { - totalLength += strings[i].length(); - sequenceList.add(strings[i]); - } - return this; + /** + * add a Strings array at this container's tail.
+ * strings:"a","b","c",
+ * StringContainer this:["d","e","f"],
+ * result:this:["d","e","f","a","b","c"],
+ * + * @param strings - to be added + * @return - this object + */ + public StringContainer addTail(String... strings) { + isUpdated = true; + count += strings.length; + for (int i = 0; i < strings.length; i++) { + totalLength += strings[i].length(); + sequenceList.add(strings[i]); } + return this; + } - /** - * add a StringContainer at this container's tail.
- * param StringContainer:["a","b","c"],
- * this StringContainer :["d","e","f"],
- * result:this:["d","e","f","a","b","c"],
- * - * @param mContainer - to be added - * @return - this object - */ - public StringContainer addTail(StringContainer mContainer) { - isUpdated = true; - ArrayList mSeqList = mContainer.getSequenceList(); - ArrayList mRevList = mContainer.getReverseList(); - count += mRevList.size() + mSeqList.size(); - String temp; - for (int i = mRevList.size() - 1; i >= 0; i--) { - temp = mRevList.get(i); - sequenceList.add(temp); - totalLength += temp.length(); - } - for (int i = 0; i < mSeqList.size(); i++) { - temp = mSeqList.get(i); - sequenceList.add(temp); - totalLength += temp.length(); - } - return this; + /** + * add a StringContainer at this container's tail.
+ * param StringContainer:["a","b","c"],
+ * this StringContainer :["d","e","f"],
+ * result:this:["d","e","f","a","b","c"],
+ * + * @param mContainer - to be added + * @return - this object + */ + public StringContainer addTail(StringContainer mContainer) { + isUpdated = true; + ArrayList mSeqList = mContainer.getSequenceList(); + ArrayList mRevList = mContainer.getReverseList(); + count += mRevList.size() + mSeqList.size(); + String temp; + for (int i = mRevList.size() - 1; i >= 0; i--) { + temp = mRevList.get(i); + sequenceList.add(temp); + totalLength += temp.length(); } - - /** - * add a Strings array from this container's header.
- * strings:"a","b","c",
- * StringContainer this:["d","e","f"],
- * result:this:["a","b","c","d","e","f"],
- * - * @param strings - to be added - * @return - this object - */ - public StringContainer addHead(String... strings) { - isUpdated = true; - count += strings.length; - for (int i = strings.length - 1; i >= 0; i--) { - totalLength += strings[i].length(); - reverseList.add(strings[i]); - } - return this; + for (int i = 0; i < mSeqList.size(); i++) { + temp = mSeqList.get(i); + sequenceList.add(temp); + totalLength += temp.length(); } + return this; + } - /** - * add a StringContainer from this container's header.
- * StringContainer m:["a","b","c"],
- * StringContainer this:["d","e","f"],
- * result:this:["a","b","c","d","e","f"],
- * - * @param mContainer - given StringContainer to be add in head - * @return - this object - */ - public StringContainer addHead(StringContainer mContainer) { - isUpdated = true; - ArrayList mSeqList = mContainer.getSequenceList(); - ArrayList mRevList = mContainer.getReverseList(); - count += mRevList.size() + mSeqList.size(); - String temp; - for (int i = mSeqList.size() - 1; i >= 0; i--) { - temp = mSeqList.get(i); - reverseList.add(temp); - totalLength += temp.length(); - } - for (int i = 0; i < mRevList.size(); i++) { - temp = mRevList.get(i); - reverseList.add(temp); - totalLength += temp.length(); - } - return this; + /** + * add a Strings array from this container's header.
+ * strings:"a","b","c",
+ * StringContainer this:["d","e","f"],
+ * result:this:["a","b","c","d","e","f"],
+ * + * @param strings - to be added + * @return - this object + */ + public StringContainer addHead(String... strings) { + isUpdated = true; + count += strings.length; + for (int i = strings.length - 1; i >= 0; i--) { + totalLength += strings[i].length(); + reverseList.add(strings[i]); } + return this; + } - @Override - public String toString() { - if (!isUpdated) - return cache; - if (totalLength <= 0) - return ""; - if (joinSeparator == null) { - stringBuilder = new StringBuilder(totalLength); - for (int i = reverseList.size() - 1; i >= 0; i--) { - stringBuilder.append(reverseList.get(i)); - } - for (int i = 0; i < sequenceList.size(); i++) { - stringBuilder.append(sequenceList.get(i)); - } - cache = stringBuilder.toString(); - } else { - cache = join(joinSeparator); - } - isUpdated = false; - return cache; + /** + * add a StringContainer from this container's header.
+ * StringContainer m:["a","b","c"],
+ * StringContainer this:["d","e","f"],
+ * result:this:["a","b","c","d","e","f"],
+ * + * @param mContainer - given StringContainer to be add in head + * @return - this object + */ + public StringContainer addHead(StringContainer mContainer) { + isUpdated = true; + ArrayList mSeqList = mContainer.getSequenceList(); + ArrayList mRevList = mContainer.getReverseList(); + count += mRevList.size() + mSeqList.size(); + String temp; + for (int i = mSeqList.size() - 1; i >= 0; i--) { + temp = mSeqList.get(i); + reverseList.add(temp); + totalLength += temp.length(); } + for (int i = 0; i < mRevList.size(); i++) { + temp = mRevList.get(i); + reverseList.add(temp); + totalLength += temp.length(); + } + return this; + } - /** - * for all string in rev and seq, concat them with separator and return - * String - * - * @param separator separator of string - * @return - result joined in type of String with parameter - */ - public String join(String separator) { - if (totalLength <= 0) - return ""; - stringBuilder = new StringBuilder(totalLength + (count - 1) - * separator.length()); - for (int i = reverseList.size() - 1; i >= 1; i--) { - stringBuilder.append(reverseList.get(i)); - stringBuilder.append(separator); - } - if (!reverseList.isEmpty()) { - stringBuilder.append(reverseList.get(0)); - if (!sequenceList.isEmpty()) - stringBuilder.append(separator); - } - int i; - for (i = 0; i < sequenceList.size() - 1; i++) { - stringBuilder.append(sequenceList.get(i)); - stringBuilder.append(separator); - } - if (!sequenceList.isEmpty()) - stringBuilder.append(sequenceList.get(i)); - return stringBuilder.toString(); + @Override + public String toString() { + if (!isUpdated) + return cache; + if (totalLength <= 0) + return ""; + if (joinSeparator == null) { + stringBuilder = new StringBuilder(totalLength); + for (int i = reverseList.size() - 1; i >= 0; i--) { + stringBuilder.append(reverseList.get(i)); + } + for (int i = 0; i < sequenceList.size(); i++) { + stringBuilder.append(sequenceList.get(i)); + } + cache = stringBuilder.toString(); + } else { + cache = join(joinSeparator); } + isUpdated = false; + return cache; + } - /** - * return a sub-string in this container.
- * e.g. this container is ["aa","bbb","cc","d","ee"]; this.getSubString(0) = - * "a";this.getSubString(2) = "c";this.getSubString(-1) = "ee"; - * - * @param index - the index of wanted sub-string - * @return - substring result - */ - public String getSubString(int index) { - int realIndex = index >= 0 ? index : count + index; - if (realIndex < 0 || realIndex >= count) - throw new IndexOutOfBoundsException("Index: " + index - + ", Real Index: " + realIndex + ", Size: " + count); - if (realIndex < reverseList.size()) { - return reverseList.get(reverseList.size() - 1 - realIndex); - } else { - return sequenceList.get(realIndex - reverseList.size()); - } + /** + * for all string in rev and seq, concat them with separator and return String + * + * @param separator separator of string + * @return - result joined in type of String with parameter + */ + public String join(String separator) { + if (totalLength <= 0) + return ""; + stringBuilder = new StringBuilder(totalLength + (count - 1) * separator.length()); + for (int i = reverseList.size() - 1; i >= 1; i--) { + stringBuilder.append(reverseList.get(i)); + stringBuilder.append(separator); + } + if (!reverseList.isEmpty()) { + stringBuilder.append(reverseList.get(0)); + if (!sequenceList.isEmpty()) + stringBuilder.append(separator); } + int i; + for (i = 0; i < sequenceList.size() - 1; i++) { + stringBuilder.append(sequenceList.get(i)); + stringBuilder.append(separator); + } + if (!sequenceList.isEmpty()) + stringBuilder.append(sequenceList.get(i)); + return stringBuilder.toString(); + } - /** - * /** return a sub-container consist of several continuous strings in this - * {@code container.If start <= end, return a empty container} - * e.g. this container is ["aa","bbb","cc","d","ee"]; - * this.getSubString(0,0) = ["aa"]
- * this.getSubString(1,3) = ["bbb","cc","d"]
- * this.getSubString(1,-1) = ["bbb","cc","d", "ee"]
- * - * @param start - the start index of wanted sub-string - * @param end - the end index of wanted sub-string - * @return - substring result - */ - public StringContainer getSubStringContainer(int start, int end) { - int realStartIndex = start >= 0 ? start : count + start; - int realEndIndex = end >= 0 ? end : count + end; - if (realStartIndex < 0 || realStartIndex >= count) - throw new IndexOutOfBoundsException("start Index: " + start - + ", Real start Index: " + realStartIndex + ", Size: " - + count); - if (realEndIndex < 0 || realEndIndex >= count) - throw new IndexOutOfBoundsException("end Index: " + end - + ", Real end Index: " + realEndIndex + ", Size: " + count); - StringContainer ret = new StringContainer(joinSeparator); - if (realStartIndex < reverseList.size()) { - for (int i = reverseList.size() - 1 - realStartIndex; i >= Math - .max(0, reverseList.size() - 1 - realEndIndex); i--) { - ret.addTail(this.reverseList.get(i)); - } - } - if (realEndIndex >= reverseList.size()) { - for (int i = Math.max(0, realStartIndex - reverseList.size()); i <= realEndIndex - - reverseList.size(); i++) { - ret.addTail(this.sequenceList.get(i)); - } - } - return ret; + /** + * return a sub-string in this container.
+ * e.g. this container is ["aa","bbb","cc","d","ee"]; this.getSubString(0) = + * "a";this.getSubString(2) = "c";this.getSubString(-1) = "ee"; + * + * @param index - the index of wanted sub-string + * @return - substring result + */ + public String getSubString(int index) { + int realIndex = index >= 0 ? index : count + index; + if (realIndex < 0 || realIndex >= count) + throw new IndexOutOfBoundsException( + "Index: " + index + ", Real Index: " + realIndex + ", Size: " + count); + if (realIndex < reverseList.size()) { + return reverseList.get(reverseList.size() - 1 - realIndex); + } else { + return sequenceList.get(realIndex - reverseList.size()); } + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - if (joinSeparator != null) - result = prime * result + joinSeparator.hashCode(); - for (String string : reverseList) { - result = prime * result + ((string == null) ? 0 : string.hashCode()); - } - for (String string : sequenceList) { - result = prime * result + ((string == null) ? 0 : string.hashCode()); - } - return result; + /** + * /** return a sub-container consist of several continuous strings in this + * {@code container.If start <= end, return a empty container} e.g. this container is + * ["aa","bbb","cc","d","ee"]; this.getSubString(0,0) = ["aa"]
+ * this.getSubString(1,3) = ["bbb","cc","d"]
+ * this.getSubString(1,-1) = ["bbb","cc","d", "ee"]
+ * + * @param start - the start index of wanted sub-string + * @param end - the end index of wanted sub-string + * @return - substring result + */ + public StringContainer getSubStringContainer(int start, int end) { + int realStartIndex = start >= 0 ? start : count + start; + int realEndIndex = end >= 0 ? end : count + end; + if (realStartIndex < 0 || realStartIndex >= count) + throw new IndexOutOfBoundsException( + "start Index: " + start + ", Real start Index: " + realStartIndex + ", Size: " + count); + if (realEndIndex < 0 || realEndIndex >= count) + throw new IndexOutOfBoundsException( + "end Index: " + end + ", Real end Index: " + realEndIndex + ", Size: " + count); + StringContainer ret = new StringContainer(joinSeparator); + if (realStartIndex < reverseList.size()) { + for (int i = reverseList.size() - 1 - realStartIndex; i >= Math.max(0, + reverseList.size() - 1 - realEndIndex); i--) { + ret.addTail(this.reverseList.get(i)); + } + } + if (realEndIndex >= reverseList.size()) { + for (int i = Math.max(0, realStartIndex - reverseList.size()); i <= realEndIndex + - reverseList.size(); i++) { + ret.addTail(this.sequenceList.get(i)); + } } + return ret; + } - @Override - public boolean equals(Object sc) { - return this.equals((StringContainer) sc); + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + if (joinSeparator != null) + result = prime * result + joinSeparator.hashCode(); + for (String string : reverseList) { + result = prime * result + ((string == null) ? 0 : string.hashCode()); } + for (String string : sequenceList) { + result = prime * result + ((string == null) ? 0 : string.hashCode()); + } + return result; + } + + @Override + public boolean equals(Object sc) { + return this.equals((StringContainer) sc); + } - public boolean equals(StringContainer sc) { - if (sc == this) - return true; - if (count != sc.count) - return false; - if (totalLength != sc.totalLength) - return false; - if (!joinSeparator.equals(sc.joinSeparator)) - return false; - if (sequenceList.size() != sc.sequenceList.size()) - return false; - for (int i = 0; i < sequenceList.size(); i++) { - if (!sequenceList.get(i).equals(sc.sequenceList.get(i))) - return false; - } - if (reverseList.size() != sc.reverseList.size()) - return false; - for (int i = 0; i < reverseList.size(); i++) { - if (!reverseList.get(i).equals(sc.reverseList.get(i))) - return false; - } - return true; + public boolean equals(StringContainer sc) { + if (sc == this) + return true; + if (count != sc.count) + return false; + if (totalLength != sc.totalLength) + return false; + if (!joinSeparator.equals(sc.joinSeparator)) + return false; + if (sequenceList.size() != sc.sequenceList.size()) + return false; + for (int i = 0; i < sequenceList.size(); i++) { + if (!sequenceList.get(i).equals(sc.sequenceList.get(i))) + return false; } + if (reverseList.size() != sc.reverseList.size()) + return false; + for (int i = 0; i < reverseList.size(); i++) { + if (!reverseList.get(i).equals(sc.reverseList.get(i))) + return false; + } + return true; + } - @Override - public StringContainer clone() { - StringContainer ret = new StringContainer(joinSeparator); - for (String s : sequenceList) { - ret.sequenceList.add(s); - } - for (String s : reverseList) { - ret.reverseList.add(s); - } - ret.totalLength = totalLength; - ret.count = count; - ret.isUpdated = isUpdated; - ret.cache = cache; - return ret; + @Override + public StringContainer clone() { + StringContainer ret = new StringContainer(joinSeparator); + for (String s : sequenceList) { + ret.sequenceList.add(s); + } + for (String s : reverseList) { + ret.reverseList.add(s); } + ret.totalLength = totalLength; + ret.count = count; + ret.isUpdated = isUpdated; + ret.cache = cache; + return ret; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/TSFileEnum.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/TSFileEnum.java index 363ba18b..0f0f23b3 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/TSFileEnum.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/TSFileEnum.java @@ -12,34 +12,34 @@ * @author kangrong */ public class TSFileEnum { - private int index = 1; - private Map enumMap = new LinkedHashMap<>(); + private int index = 1; + private Map enumMap = new LinkedHashMap<>(); - public void addTSFileEnum(String value) { - enumMap.put(value, index++); - } + public void addTSFileEnum(String value) { + enumMap.put(value, index++); + } - /** - * just like java enum's ordinal - * - * @param value a string appearing in enum. - * @return the responding index in TSFileEnum - */ - public int enumOrdinal(String value) { - return enumMap.getOrDefault(value, -1); - } + /** + * just like java enum's ordinal + * + * @param value a string appearing in enum. + * @return the responding index in TSFileEnum + */ + public int enumOrdinal(String value) { + return enumMap.getOrDefault(value, -1); + } - /** - * just like java enum's values() - * - * @return all values in TSFileEnum in form of List{@code} - */ - public List getEnumDataValues() { - return new ArrayList<>(enumMap.keySet()); - } + /** + * just like java enum's values() + * + * @return all values in TSFileEnum in form of List{@code} + */ + public List getEnumDataValues() { + return new ArrayList<>(enumMap.keySet()); + } - @Override - public String toString() { - return enumMap.keySet().toString(); - } + @Override + public String toString() { + return enumMap.keySet().toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/Cache.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/Cache.java index 17e1e11e..ef28eba5 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/Cache.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/Cache.java @@ -6,7 +6,7 @@ * Created by zhangjinrui on 2017/12/25. */ public interface Cache { - T get(K key) throws CacheException; + T get(K key) throws CacheException; - void clear(); + void clear(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCache.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCache.java index cc577ea9..cdfc75e4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCache.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCache.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.utils.cache; import cn.edu.tsinghua.tsfile.common.exception.cache.CacheException; - import java.util.LinkedHashMap; import java.util.Map; @@ -10,56 +9,56 @@ */ public abstract class LRUCache implements Cache { - private int cacheSize; - private Map cache; + private int cacheSize; + private Map cache; - public LRUCache(int cacheSize) { - this.cacheSize = cacheSize; - this.cache = new LinkedHashMap<>(); - } + public LRUCache(int cacheSize) { + this.cacheSize = cacheSize; + this.cache = new LinkedHashMap<>(); + } - @Override - public T get(K key) throws CacheException { - if (cache.containsKey(key)) { - moveObjectToTail(key); - } else { - removeFirstObjectIfCacheIsFull(); - cache.put(key, loadObjectByKey(key)); - } - return cache.get(key); + @Override + public T get(K key) throws CacheException { + if (cache.containsKey(key)) { + moveObjectToTail(key); + } else { + removeFirstObjectIfCacheIsFull(); + cache.put(key, loadObjectByKey(key)); } + return cache.get(key); + } - public void clear() { - this.cache.clear(); - } + public void clear() { + this.cache.clear(); + } - private void moveObjectToTail(K key) { - T value = cache.get(key); - cache.remove(key); - cache.put(key, value); - } + private void moveObjectToTail(K key) { + T value = cache.get(key); + cache.remove(key); + cache.put(key, value); + } - private void removeFirstObjectIfCacheIsFull() throws CacheException { - if (cache.size() == this.cacheSize) { - removeFirstObject(); - } + private void removeFirstObjectIfCacheIsFull() throws CacheException { + if (cache.size() == this.cacheSize) { + removeFirstObject(); } + } - private void removeFirstObject() throws CacheException { - if (cache.size() == 0) { - return; - } - K key = cache.keySet().iterator().next(); - beforeRemove(cache.get(key)); - cache.remove(key); + private void removeFirstObject() throws CacheException { + if (cache.size() == 0) { + return; } + K key = cache.keySet().iterator().next(); + beforeRemove(cache.get(key)); + cache.remove(key); + } - /** - * Do something before remove object from cache. - * - * @param object - */ - public abstract void beforeRemove(T object) throws CacheException; + /** + * Do something before remove object from cache. + * + * @param object + */ + public abstract void beforeRemove(T object) throws CacheException; - public abstract T loadObjectByKey(K key) throws CacheException; + public abstract T loadObjectByKey(K key) throws CacheException; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/TsFileWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/TsFileWriter.java index 0c92a371..b3e9f564 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/TsFileWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/TsFileWriter.java @@ -16,7 +16,6 @@ import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.File; import java.io.IOException; import java.util.HashMap; @@ -113,15 +112,16 @@ public void addMeasurement(MeasurementDescriptor measurementDescriptor) /** * add a new measurement according to json string. - * @param measurement - * example: - *

+   * 
+   * @param measurement example:
+   * 
+   *        
      {
             "measurement_id": "sensor_cpu_50",
             "data_type": "INT32",
             "encoding": "RLE"
         }
-   *          
+ *
* * @throws WriteProcessException if the json is illegal or the measurement exists */ @@ -132,41 +132,36 @@ public void addMeasurementByJson(JSONObject measurement) throws WriteProcessExce /** * Confirm whether the record is legal. If legal, add it into this RecordWriter. * - * @param record - * - a record responding a line + * @param record - a record responding a line * @return - whether the record has been added into RecordWriter legally * @throws WriteProcessException exception */ protected boolean checkIsTimeSeriesExist(TSRecord record) throws WriteProcessException { - IRowGroupWriter groupWriter; - if (!groupWriters.containsKey(record.deltaObjectId)) { - groupWriter = new RowGroupWriterImpl(record.deltaObjectId); - groupWriters.put(record.deltaObjectId, groupWriter); - } else{ - groupWriter = groupWriters.get(record.deltaObjectId); - } - Map schemaDescriptorMap = schema.getDescriptor(); - for (DataPoint dp : record.dataPointList) { - String measurementId = dp.getMeasurementId(); - if (schemaDescriptorMap.containsKey(measurementId)) - groupWriter.addSeriesWriter(schemaDescriptorMap.get(measurementId), pageSize); - else - throw new NoMeasurementException("input measurement is invalid: " + measurementId); - } - return true; + IRowGroupWriter groupWriter; + if (!groupWriters.containsKey(record.deltaObjectId)) { + groupWriter = new RowGroupWriterImpl(record.deltaObjectId); + groupWriters.put(record.deltaObjectId, groupWriter); + } else { + groupWriter = groupWriters.get(record.deltaObjectId); + } + Map schemaDescriptorMap = schema.getDescriptor(); + for (DataPoint dp : record.dataPointList) { + String measurementId = dp.getMeasurementId(); + if (schemaDescriptorMap.containsKey(measurementId)) + groupWriter.addSeriesWriter(schemaDescriptorMap.get(measurementId), pageSize); + else + throw new NoMeasurementException("input measurement is invalid: " + measurementId); + } + return true; } /** * write a record in type of T. * - * @param record - * - record responding a data line - * @throws IOException - * exception in IO - * @throws WriteProcessException - * exception in write process - * @return true -size of tsfile or metadata reaches the threshold. - * false - otherwise + * @param record - record responding a data line + * @throws IOException exception in IO + * @throws WriteProcessException exception in write process + * @return true -size of tsfile or metadata reaches the threshold. false - otherwise */ public boolean write(TSRecord record) throws IOException, WriteProcessException { if (checkIsTimeSeriesExist(record)) { @@ -174,7 +169,7 @@ public boolean write(TSRecord record) throws IOException, WriteProcessException ++recordCount; return checkMemorySize(); } - return false; + return false; } /** @@ -194,10 +189,8 @@ public long calculateMemSizeForAllGroup() { * check occupied memory size, if it exceeds the rowGroupSize threshold, flush them to given * OutputStream. * - * @throws IOException - * exception in IO - * @return true - size of tsfile or metadata reaches the threshold. - * false - otherwise + * @throws IOException exception in IO + * @return true - size of tsfile or metadata reaches the threshold. false - otherwise */ protected boolean checkMemorySize() throws IOException { if (recordCount >= recordCountForNextMemCheck) { @@ -207,8 +200,8 @@ protected boolean checkMemorySize() throws IOException { recordCountForNextMemCheck = rowGroupSizeThreshold / oneRowMaxSize; return flushRowGroup(false); } else { - recordCountForNextMemCheck = recordCount - + (rowGroupSizeThreshold - memSize) / oneRowMaxSize; + recordCountForNextMemCheck = + recordCount + (rowGroupSizeThreshold - memSize) / oneRowMaxSize; return false; } } @@ -218,12 +211,10 @@ protected boolean checkMemorySize() throws IOException { /** * flush the data in all series writers and their page writers to outputStream. * - * @param isFillRowGroup - * whether to fill RowGroup - * @throws IOException - * exception in IO - * @return true - size of tsfile or metadata reaches the threshold. - * false - otherwise. But this function just return false, the Override of IoTDB may return true. + * @param isFillRowGroup whether to fill RowGroup + * @throws IOException exception in IO + * @return true - size of tsfile or metadata reaches the threshold. false - otherwise. But this + * function just return false, the Override of IoTDB may return true. */ protected boolean flushRowGroup(boolean isFillRowGroup) throws IOException { // at the present stage, just flush one block @@ -265,8 +256,7 @@ private void reset() { * calling this method to write the last data remaining in memory and close the normal and error * OutputStream. * - * @throws IOException - * exception in IO + * @throws IOException exception in IO */ public void close() throws IOException { LOG.info("start close file"); diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/desc/MeasurementDescriptor.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/desc/MeasurementDescriptor.java index a9ed77f7..4dec5560 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/desc/MeasurementDescriptor.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/desc/MeasurementDescriptor.java @@ -49,7 +49,7 @@ public MeasurementDescriptor(String measurementId, TSDataType type, TSEncoding e this.type = type; this.measurementId = measurementId; this.encoding = encoding; - this.props = props == null? Collections.emptyMap(): props; + this.props = props == null ? Collections.emptyMap() : props; this.conf = TSFileDescriptor.getInstance().getConfig(); // initialize TSDataType. e.g. set data values for enum type if (type == TSDataType.ENUMS) { @@ -62,8 +62,8 @@ public MeasurementDescriptor(String measurementId, TSDataType type, TSEncoding e if (props != null && props.containsKey(JsonFormatConstant.COMPRESS_TYPE)) { this.compressor = Compressor.getCompressor(props.get(JsonFormatConstant.COMPRESS_TYPE)); } else { - this.compressor = Compressor - .getCompressor(TSFileDescriptor.getInstance().getConfig().compressor); + this.compressor = + Compressor.getCompressor(TSFileDescriptor.getInstance().getConfig().compressor); } } @@ -71,7 +71,7 @@ public String getMeasurementId() { return measurementId; } - public Map getProps(){ + public Map getProps() { return props; } @@ -143,8 +143,7 @@ public Compressor getCompressor() { * data type calling this method
* e.g. enum:[MAN(0),WOMAN(1)],calls parseEnumValue("WOMAN"),return 1 * - * @param string - * - enum value in type of string + * @param string - enum value in type of string * @return - ordinal integer in enum field */ public int parseEnumValue(String string) { diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/InvalidJsonSchemaException.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/InvalidJsonSchemaException.java index 8e360fa0..d6231c0c 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/InvalidJsonSchemaException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/InvalidJsonSchemaException.java @@ -1,14 +1,15 @@ package cn.edu.tsinghua.tsfile.timeseries.write.exception; /** - * This exception is throw if the json of schema in writing process is invalid, like missing necessary fields. + * This exception is throw if the json of schema in writing process is invalid, like missing + * necessary fields. * * @author kangrong */ public class InvalidJsonSchemaException extends WriteProcessException { - private static final long serialVersionUID = -4469810656988557000L; + private static final long serialVersionUID = -4469810656988557000L; - public InvalidJsonSchemaException(String msg) { - super(msg); - } + public InvalidJsonSchemaException(String msg) { + super(msg); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/NoMeasurementException.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/NoMeasurementException.java index f10a7220..9923a66a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/NoMeasurementException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/NoMeasurementException.java @@ -7,9 +7,9 @@ */ public class NoMeasurementException extends WriteProcessException { - private static final long serialVersionUID = -5599767368831572747L; + private static final long serialVersionUID = -5599767368831572747L; - public NoMeasurementException(String msg) { - super(msg); - } + public NoMeasurementException(String msg) { + super(msg); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/PageException.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/PageException.java index 52020804..67179080 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/PageException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/PageException.java @@ -7,9 +7,9 @@ */ public class PageException extends WriteProcessException { - private static final long serialVersionUID = 7385627296529388683L; + private static final long serialVersionUID = 7385627296529388683L; - public PageException(String msg) { - super(msg); - } + public PageException(String msg) { + super(msg); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/WriteProcessException.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/WriteProcessException.java index ce6f6a40..c4eac717 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/WriteProcessException.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/exception/WriteProcessException.java @@ -6,16 +6,16 @@ * @author kangrong */ public class WriteProcessException extends Exception { - private static final long serialVersionUID = -2664638061585302767L; - protected String errMsg; + private static final long serialVersionUID = -2664638061585302767L; + protected String errMsg; - public WriteProcessException(String msg) { - super(msg); - this.errMsg = msg; - } + public WriteProcessException(String msg) { + super(msg); + this.errMsg = msg; + } - @Override - public String getMessage() { - return errMsg; - } + @Override + public String getMessage() { + return errMsg; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/io/TsFileIOWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/io/TsFileIOWriter.java index 6633bc59..d826c10e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/io/TsFileIOWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/io/TsFileIOWriter.java @@ -9,10 +9,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.constant.StatisticConstant; import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; @@ -38,304 +36,280 @@ import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; /** - * TSFileIOWriter is used to construct metadata and write data stored in memory - * to output stream. + * TSFileIOWriter is used to construct metadata and write data stored in memory to output stream. * * @author kangrong */ public class TsFileIOWriter { - public static final byte[] magicStringBytes; - public static final TsFileMetaDataConverter metadataConverter = new TsFileMetaDataConverter(); - private static final Logger LOG = LoggerFactory.getLogger(TsFileIOWriter.class); - - static { - magicStringBytes = BytesUtils.StringToBytes(TSFileConfig.MAGIC_STRING); - } - - private ITsRandomAccessFileWriter out; - protected List rowGroupMetaDatas = new ArrayList<>(); - private RowGroupMetaData currentRowGroupMetaData; - private TimeSeriesChunkMetaData currentChunkMetaData; - - - public TsFileIOWriter(){ - - } - - public void setIOWriter(ITsRandomAccessFileWriter out){ - this.out = out; - } - - /** - * for writing a new tsfile. - * - * @param file - * be used to output written data - * @throws IOException - * if I/O error occurs - */ - public TsFileIOWriter(File file) throws IOException { - this.out = new TsRandomAccessFileWriter(file); - startFile(); - } - - /** - * for writing a new tsfile. - * - * @param output - * be used to output written data - * @throws IOException - * if I/O error occurs - */ - public TsFileIOWriter(ITsRandomAccessFileWriter output) throws IOException { - this.out = output; - startFile(); - } - - /** - * This is just used to restore one TSFile from List of RowGroupMetaData and - * the offset. - * - * @param output - * be used to output written data - * @param offset - * offset to restore - * @param rowGroups - * given a constructed row group list for fault recovery - * @throws IOException - * if I/O error occurs - */ - public TsFileIOWriter(ITsRandomAccessFileWriter output, long offset, List rowGroups) - throws IOException { - this.out = output; - out.seek(offset); - this.rowGroupMetaDatas = rowGroups; - } - - /** - * Writes given ListByteArrayOutputStream to output stream. - * This method is called when total memory size exceeds the row group size - * threshold. - * - * @param bytes - * - data of several pages which has been packed - * @throws IOException - * if an I/O error occurs. - */ - public void writeBytesToStream(ListByteArrayOutputStream bytes) throws IOException { - bytes.writeAllTo(out.getOutputStream()); - } - - private void startFile() throws IOException { - out.write(magicStringBytes); - } - - /** - * start a {@linkplain RowGroupMetaData RowGroupMetaData}. - * - * @param recordCount - * - the record count of this time series input in this stage - * @param deltaObjectId - * - delta object id - */ - public void startRowGroup(long recordCount, String deltaObjectId) { - LOG.debug("start row group:{}", deltaObjectId); - currentRowGroupMetaData = new RowGroupMetaData(deltaObjectId, recordCount, 0, new ArrayList<>(), ""); - - - } - - public void startRowGroup(String deltaObjectId) { - LOG.debug("start row group:{}", deltaObjectId); - currentRowGroupMetaData = new RowGroupMetaData(deltaObjectId, 0, 0, new ArrayList<>(), ""); - } - - /** - * start a {@linkplain TimeSeriesChunkMetaData TimeSeriesChunkMetaData}. - * - * @param descriptor - * - measurement of this time series - * @param compressionCodecName - * - compression name of this time series - * @param tsDataType - * - data type - * @param statistics - * - statistic of the whole series - * @param maxTime - * - maximum timestamp of the whole series in this stage - * @param minTime - * - minimum timestamp of the whole series in this stage - * @throws IOException - * if I/O error occurs - */ - public void startSeries(MeasurementDescriptor descriptor, CompressionTypeName compressionCodecName, - TSDataType tsDataType, Statistics statistics, long maxTime, long minTime) throws IOException { - LOG.debug("start series:{}", descriptor); - currentChunkMetaData = new TimeSeriesChunkMetaData(descriptor.getMeasurementId(), TSChunkType.VALUE, - out.getPos(), compressionCodecName); - TInTimeSeriesChunkMetaData t = new TInTimeSeriesChunkMetaData(tsDataType, minTime, maxTime); - currentChunkMetaData.setTInTimeSeriesChunkMetaData(t); - - VInTimeSeriesChunkMetaData v = new VInTimeSeriesChunkMetaData(tsDataType); - TsDigest tsDigest = new TsDigest(); - Map statisticsMap = new HashMap<>(); - // TODO add your statistics - statisticsMap.put(StatisticConstant.MAX_VALUE,ByteBuffer.wrap(statistics.getMaxBytes())); - statisticsMap.put(StatisticConstant.MIN_VALUE,ByteBuffer.wrap(statistics.getMinBytes())); - statisticsMap.put(StatisticConstant.FIRST,ByteBuffer.wrap(statistics.getFirstBytes())); - statisticsMap.put(StatisticConstant.SUM,ByteBuffer.wrap(statistics.getSumBytes())); - statisticsMap.put(StatisticConstant.LAST,ByteBuffer.wrap(statistics.getLastBytes())); - tsDigest.setStatistics(statisticsMap); - - v.setDigest(tsDigest); - descriptor.setDataValues(v); - currentChunkMetaData.setVInTimeSeriesChunkMetaData(v); - } - - public void endSeries(long size, long totalValueCount) { - LOG.debug("end series:{},totalvalue:{}", currentChunkMetaData, totalValueCount); - currentChunkMetaData.setTotalByteSize(size); - currentChunkMetaData.setNumRows(totalValueCount); - currentRowGroupMetaData.addTimeSeriesChunkMetaData(currentChunkMetaData); - currentChunkMetaData = null; - } - - public void endRowGroup(long memSize) { - currentRowGroupMetaData.setTotalByteSize(memSize); - rowGroupMetaDatas.add(currentRowGroupMetaData); - LOG.debug("end row group:{}", currentRowGroupMetaData); - currentRowGroupMetaData = null; - } - - public void endRowGroup(long memSize,long recordCount) { - currentRowGroupMetaData.setTotalByteSize(memSize); - currentRowGroupMetaData.setNumOfRows(recordCount); - rowGroupMetaDatas.add(currentRowGroupMetaData); - LOG.debug("end row group:{}", currentRowGroupMetaData); - currentRowGroupMetaData = null; - } - - /** - * write {@linkplain TsFileMetaData TSFileMetaData} to output stream and - * close it. - * - * @param schema - * FileSchema - * @throws IOException - * if I/O error occurs - */ - public void endFile(FileSchema schema) throws IOException { - List timeSeriesList = schema.getTimeSeriesMetadatas(); - LOG.debug("get time series list:{}", timeSeriesList); - // clustering rowGroupMetadata and build the range - - Map tsDeltaObjectMap = new HashMap<>(); - String currentDeltaObject; - TsRowGroupBlockMetaData currentTsRowGroupBlockMetaData; - - LinkedHashMap tsRowGroupBlockMetaDataMap = new LinkedHashMap<>(); - for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDatas) { - currentDeltaObject = rowGroupMetaData.getDeltaObjectID(); - if (!tsRowGroupBlockMetaDataMap.containsKey(currentDeltaObject)) { - TsRowGroupBlockMetaData tsRowGroupBlockMetaData = new TsRowGroupBlockMetaData(); - tsRowGroupBlockMetaData.setDeltaObjectID(currentDeltaObject); - tsRowGroupBlockMetaDataMap.put(currentDeltaObject, tsRowGroupBlockMetaData); - } - tsRowGroupBlockMetaDataMap.get(currentDeltaObject).addRowGroupMetaData(rowGroupMetaData); - } - Iterator> iterator = tsRowGroupBlockMetaDataMap.entrySet() - .iterator(); - long offset; - long offsetIndex; - /** size of RowGroupMetadataBlock in byte **/ - int metadataBlockSize; - - /** start time for a delta object **/ - long startTime; - - /** end time for a delta object **/ - long endTime; - - while (iterator.hasNext()) { - startTime = Long.MAX_VALUE; - endTime = Long.MIN_VALUE; - - Map.Entry entry = iterator.next(); - currentDeltaObject = entry.getKey(); - currentTsRowGroupBlockMetaData = entry.getValue(); - - for (RowGroupMetaData rowGroupMetaData : currentTsRowGroupBlockMetaData.getRowGroups()) { - for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData - .getTimeSeriesChunkMetaDataList()) { - startTime = Long.min(startTime, - timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime()); - endTime = Long.max(endTime, timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); - } - } - offsetIndex = out.getPos(); - // flush tsRowGroupBlockMetaDatas in order - ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(currentTsRowGroupBlockMetaData.convertToThrift(), - out.getOutputStream()); - offset = out.getPos(); - TsDeltaObject tsDeltaObject = new TsDeltaObject(offsetIndex, (int) (offset - offsetIndex), startTime, - endTime); - tsDeltaObjectMap.put(currentDeltaObject, tsDeltaObject); - } - - TsFileMetaData tsFileMetaData = new TsFileMetaData(tsDeltaObjectMap, timeSeriesList, - TSFileConfig.currentVersion); - Map props = schema.getProps(); - tsFileMetaData.setProps(props); - serializeTsFileMetadata(tsFileMetaData); - out.close(); - LOG.info("output stream is closed"); - } - - /** - * get the length of normal OutputStream. - * - * @return - length of normal OutputStream - * @throws IOException - * if I/O error occurs - */ - public long getPos() throws IOException { - return out.getPos(); - } - - private void serializeTsFileMetadata(TsFileMetaData footer) throws IOException { - long footerIndex = out.getPos(); - LOG.debug("serialize the footer,file pos:{}", footerIndex); - TsFileMetaDataConverter metadataConverter = new TsFileMetaDataConverter(); - ReadWriteThriftFormatUtils.writeFileMetaData(metadataConverter.toThriftFileMetadata(footer), - out.getOutputStream()); - LOG.debug("serialize the footer finished, file pos:{}", out.getPos()); - out.write(BytesUtils.intToBytes((int) (out.getPos() - footerIndex))); - out.write(magicStringBytes); - } - - /** - * fill in output stream to complete row group threshold. - * - * @param diff - * how many bytes that will be filled. - * @throws IOException - * if diff is greater than Integer.max_value - */ - public void fillInRowGroup(long diff) throws IOException { - if (diff <= Integer.MAX_VALUE) { - out.write(new byte[(int) diff]); - } else { - throw new IOException("write too much blank byte array!array size:" + diff); - } - } - - /** - * Get the list of RowGroupMetaData in memory. - * - * @return - current list of RowGroupMetaData - */ - public List getRowGroups() { - return rowGroupMetaDatas; - } + public static final byte[] magicStringBytes; + public static final TsFileMetaDataConverter metadataConverter = new TsFileMetaDataConverter(); + private static final Logger LOG = LoggerFactory.getLogger(TsFileIOWriter.class); + + static { + magicStringBytes = BytesUtils.StringToBytes(TSFileConfig.MAGIC_STRING); + } + + private ITsRandomAccessFileWriter out; + protected List rowGroupMetaDatas = new ArrayList<>(); + private RowGroupMetaData currentRowGroupMetaData; + private TimeSeriesChunkMetaData currentChunkMetaData; + + + public TsFileIOWriter() { + + } + + public void setIOWriter(ITsRandomAccessFileWriter out) { + this.out = out; + } + + /** + * for writing a new tsfile. + * + * @param file be used to output written data + * @throws IOException if I/O error occurs + */ + public TsFileIOWriter(File file) throws IOException { + this.out = new TsRandomAccessFileWriter(file); + startFile(); + } + + /** + * for writing a new tsfile. + * + * @param output be used to output written data + * @throws IOException if I/O error occurs + */ + public TsFileIOWriter(ITsRandomAccessFileWriter output) throws IOException { + this.out = output; + startFile(); + } + + /** + * This is just used to restore one TSFile from List of RowGroupMetaData and the offset. + * + * @param output be used to output written data + * @param offset offset to restore + * @param rowGroups given a constructed row group list for fault recovery + * @throws IOException if I/O error occurs + */ + public TsFileIOWriter(ITsRandomAccessFileWriter output, long offset, + List rowGroups) throws IOException { + this.out = output; + out.seek(offset); + this.rowGroupMetaDatas = rowGroups; + } + + /** + * Writes given ListByteArrayOutputStream to output stream. This method is called + * when total memory size exceeds the row group size threshold. + * + * @param bytes - data of several pages which has been packed + * @throws IOException if an I/O error occurs. + */ + public void writeBytesToStream(ListByteArrayOutputStream bytes) throws IOException { + bytes.writeAllTo(out.getOutputStream()); + } + + private void startFile() throws IOException { + out.write(magicStringBytes); + } + + /** + * start a {@linkplain RowGroupMetaData RowGroupMetaData}. + * + * @param recordCount - the record count of this time series input in this stage + * @param deltaObjectId - delta object id + */ + public void startRowGroup(long recordCount, String deltaObjectId) { + LOG.debug("start row group:{}", deltaObjectId); + currentRowGroupMetaData = + new RowGroupMetaData(deltaObjectId, recordCount, 0, new ArrayList<>(), ""); + + + } + + public void startRowGroup(String deltaObjectId) { + LOG.debug("start row group:{}", deltaObjectId); + currentRowGroupMetaData = new RowGroupMetaData(deltaObjectId, 0, 0, new ArrayList<>(), ""); + } + + /** + * start a {@linkplain TimeSeriesChunkMetaData TimeSeriesChunkMetaData}. + * + * @param descriptor - measurement of this time series + * @param compressionCodecName - compression name of this time series + * @param tsDataType - data type + * @param statistics - statistic of the whole series + * @param maxTime - maximum timestamp of the whole series in this stage + * @param minTime - minimum timestamp of the whole series in this stage + * @throws IOException if I/O error occurs + */ + public void startSeries(MeasurementDescriptor descriptor, + CompressionTypeName compressionCodecName, TSDataType tsDataType, Statistics statistics, + long maxTime, long minTime) throws IOException { + LOG.debug("start series:{}", descriptor); + currentChunkMetaData = new TimeSeriesChunkMetaData(descriptor.getMeasurementId(), + TSChunkType.VALUE, out.getPos(), compressionCodecName); + TInTimeSeriesChunkMetaData t = new TInTimeSeriesChunkMetaData(tsDataType, minTime, maxTime); + currentChunkMetaData.setTInTimeSeriesChunkMetaData(t); + + VInTimeSeriesChunkMetaData v = new VInTimeSeriesChunkMetaData(tsDataType); + TsDigest tsDigest = new TsDigest(); + Map statisticsMap = new HashMap<>(); + // TODO add your statistics + statisticsMap.put(StatisticConstant.MAX_VALUE, ByteBuffer.wrap(statistics.getMaxBytes())); + statisticsMap.put(StatisticConstant.MIN_VALUE, ByteBuffer.wrap(statistics.getMinBytes())); + statisticsMap.put(StatisticConstant.FIRST, ByteBuffer.wrap(statistics.getFirstBytes())); + statisticsMap.put(StatisticConstant.SUM, ByteBuffer.wrap(statistics.getSumBytes())); + statisticsMap.put(StatisticConstant.LAST, ByteBuffer.wrap(statistics.getLastBytes())); + tsDigest.setStatistics(statisticsMap); + + v.setDigest(tsDigest); + descriptor.setDataValues(v); + currentChunkMetaData.setVInTimeSeriesChunkMetaData(v); + } + + public void endSeries(long size, long totalValueCount) { + LOG.debug("end series:{},totalvalue:{}", currentChunkMetaData, totalValueCount); + currentChunkMetaData.setTotalByteSize(size); + currentChunkMetaData.setNumRows(totalValueCount); + currentRowGroupMetaData.addTimeSeriesChunkMetaData(currentChunkMetaData); + currentChunkMetaData = null; + } + + public void endRowGroup(long memSize) { + currentRowGroupMetaData.setTotalByteSize(memSize); + rowGroupMetaDatas.add(currentRowGroupMetaData); + LOG.debug("end row group:{}", currentRowGroupMetaData); + currentRowGroupMetaData = null; + } + + public void endRowGroup(long memSize, long recordCount) { + currentRowGroupMetaData.setTotalByteSize(memSize); + currentRowGroupMetaData.setNumOfRows(recordCount); + rowGroupMetaDatas.add(currentRowGroupMetaData); + LOG.debug("end row group:{}", currentRowGroupMetaData); + currentRowGroupMetaData = null; + } + + /** + * write {@linkplain TsFileMetaData TSFileMetaData} to output stream and close it. + * + * @param schema FileSchema + * @throws IOException if I/O error occurs + */ + public void endFile(FileSchema schema) throws IOException { + List timeSeriesList = schema.getTimeSeriesMetadatas(); + LOG.debug("get time series list:{}", timeSeriesList); + // clustering rowGroupMetadata and build the range + + Map tsDeltaObjectMap = new HashMap<>(); + String currentDeltaObject; + TsRowGroupBlockMetaData currentTsRowGroupBlockMetaData; + + LinkedHashMap tsRowGroupBlockMetaDataMap = + new LinkedHashMap<>(); + for (RowGroupMetaData rowGroupMetaData : rowGroupMetaDatas) { + currentDeltaObject = rowGroupMetaData.getDeltaObjectID(); + if (!tsRowGroupBlockMetaDataMap.containsKey(currentDeltaObject)) { + TsRowGroupBlockMetaData tsRowGroupBlockMetaData = new TsRowGroupBlockMetaData(); + tsRowGroupBlockMetaData.setDeltaObjectID(currentDeltaObject); + tsRowGroupBlockMetaDataMap.put(currentDeltaObject, tsRowGroupBlockMetaData); + } + tsRowGroupBlockMetaDataMap.get(currentDeltaObject).addRowGroupMetaData(rowGroupMetaData); + } + Iterator> iterator = + tsRowGroupBlockMetaDataMap.entrySet().iterator(); + long offset; + long offsetIndex; + /** size of RowGroupMetadataBlock in byte **/ + int metadataBlockSize; + + /** start time for a delta object **/ + long startTime; + + /** end time for a delta object **/ + long endTime; + + while (iterator.hasNext()) { + startTime = Long.MAX_VALUE; + endTime = Long.MIN_VALUE; + + Map.Entry entry = iterator.next(); + currentDeltaObject = entry.getKey(); + currentTsRowGroupBlockMetaData = entry.getValue(); + + for (RowGroupMetaData rowGroupMetaData : currentTsRowGroupBlockMetaData.getRowGroups()) { + for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData + .getTimeSeriesChunkMetaDataList()) { + startTime = Long.min(startTime, + timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime()); + endTime = Long.max(endTime, + timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); + } + } + offsetIndex = out.getPos(); + // flush tsRowGroupBlockMetaDatas in order + ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata( + currentTsRowGroupBlockMetaData.convertToThrift(), out.getOutputStream()); + offset = out.getPos(); + TsDeltaObject tsDeltaObject = + new TsDeltaObject(offsetIndex, (int) (offset - offsetIndex), startTime, endTime); + tsDeltaObjectMap.put(currentDeltaObject, tsDeltaObject); + } + + TsFileMetaData tsFileMetaData = + new TsFileMetaData(tsDeltaObjectMap, timeSeriesList, TSFileConfig.currentVersion); + Map props = schema.getProps(); + tsFileMetaData.setProps(props); + serializeTsFileMetadata(tsFileMetaData); + out.close(); + LOG.info("output stream is closed"); + } + + /** + * get the length of normal OutputStream. + * + * @return - length of normal OutputStream + * @throws IOException if I/O error occurs + */ + public long getPos() throws IOException { + return out.getPos(); + } + + private void serializeTsFileMetadata(TsFileMetaData footer) throws IOException { + long footerIndex = out.getPos(); + LOG.debug("serialize the footer,file pos:{}", footerIndex); + TsFileMetaDataConverter metadataConverter = new TsFileMetaDataConverter(); + ReadWriteThriftFormatUtils.writeFileMetaData(metadataConverter.toThriftFileMetadata(footer), + out.getOutputStream()); + LOG.debug("serialize the footer finished, file pos:{}", out.getPos()); + out.write(BytesUtils.intToBytes((int) (out.getPos() - footerIndex))); + out.write(magicStringBytes); + } + + /** + * fill in output stream to complete row group threshold. + * + * @param diff how many bytes that will be filled. + * @throws IOException if diff is greater than Integer.max_value + */ + public void fillInRowGroup(long diff) throws IOException { + if (diff <= Integer.MAX_VALUE) { + out.write(new byte[(int) diff]); + } else { + throw new IOException("write too much blank byte array!array size:" + diff); + } + } + + /** + * Get the list of RowGroupMetaData in memory. + * + * @return - current list of RowGroupMetaData + */ + public List getRowGroups() { + return rowGroupMetaDatas; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/IPageWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/IPageWriter.java index 7dd6f682..8e4dc7d7 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/IPageWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/IPageWriter.java @@ -7,7 +7,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.exception.PageException; import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.List; @@ -20,35 +19,35 @@ * @see ISeriesWriter */ public interface IPageWriter { - /** - * store a page to this pageWriter. - * - * @param listByteArray - the data to be stored to pageWriter - * @param valueCount - the amount of values in that page - * @param statistics - the statistics for that page - * @param maxTimestamp - timestamp maximum in given data - * @param minTimestamp - timestamp minimum in given data - * @throws PageException - if an PageException occurs. - */ - void writePage(ListByteArrayOutputStream listByteArray, int valueCount, Statistics statistics, - long maxTimestamp, long minTimestamp) throws PageException; + /** + * store a page to this pageWriter. + * + * @param listByteArray - the data to be stored to pageWriter + * @param valueCount - the amount of values in that page + * @param statistics - the statistics for that page + * @param maxTimestamp - timestamp maximum in given data + * @param minTimestamp - timestamp minimum in given data + * @throws PageException - if an PageException occurs. + */ + void writePage(ListByteArrayOutputStream listByteArray, int valueCount, Statistics statistics, + long maxTimestamp, long minTimestamp) throws PageException; - /** - * write the page to specified IOWriter - * - * @param writer the specified IOWriter - * @param statistics the statistic information provided by series writer - * @throws IOException exception in IO - */ - void writeToFileWriter(TsFileIOWriter writer, Statistics statistics) throws IOException; + /** + * write the page to specified IOWriter + * + * @param writer the specified IOWriter + * @param statistics the statistic information provided by series writer + * @throws IOException exception in IO + */ + void writeToFileWriter(TsFileIOWriter writer, Statistics statistics) throws IOException; - /** - * reset exist data in page for next stage - */ - void reset(); + /** + * reset exist data in page for next stage + */ + void reset(); - /** - * @return the max possible allocated size - */ - long estimateMaxPageMemSize(); + /** + * @return the max possible allocated size + */ + long estimateMaxPageMemSize(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/PageWriterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/PageWriterImpl.java index bf159f2d..bf95fff8 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/PageWriterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/page/PageWriterImpl.java @@ -12,7 +12,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.List; @@ -24,96 +23,100 @@ * @see IPageWriter IPageWriter */ public class PageWriterImpl implements IPageWriter { - private static Logger LOG = LoggerFactory.getLogger(PageWriterImpl.class); - private final Compressor compressor; - private final MeasurementDescriptor desc; - private ListByteArrayOutputStream buf; - private long totalValueCount; - private long maxTimestamp; - private long minTimestamp = -1; + private static Logger LOG = LoggerFactory.getLogger(PageWriterImpl.class); + private final Compressor compressor; + private final MeasurementDescriptor desc; + private ListByteArrayOutputStream buf; + private long totalValueCount; + private long maxTimestamp; + private long minTimestamp = -1; - public PageWriterImpl(MeasurementDescriptor desc) { - this.desc = desc; - this.compressor = desc.getCompressor(); - this.buf = new ListByteArrayOutputStream(); - } + public PageWriterImpl(MeasurementDescriptor desc) { + this.desc = desc; + this.compressor = desc.getCompressor(); + this.buf = new ListByteArrayOutputStream(); + } - @Override - public void writePage(ListByteArrayOutputStream listByteArray, int valueCount, Statistics statistics, - long maxTimestamp, long minTimestamp) throws PageException { - // compress the input data - if (this.minTimestamp == -1) - this.minTimestamp = minTimestamp; - if(this.minTimestamp==-1){ - LOG.error("Write page error, {}, minTime:{}, maxTime:{}",desc,minTimestamp,maxTimestamp); - } - this.maxTimestamp = maxTimestamp; - int uncompressedSize = listByteArray.size(); - ListByteArrayOutputStream compressedBytes = compressor.compress(listByteArray); - int compressedSize = compressedBytes.size(); - PublicBAOS tempOutputStream = new PublicBAOS(estimateMaxPageHeaderSize() + compressedSize); - // write the page header to IOWriter - try { - ReadWriteThriftFormatUtils.writeDataPageHeader(uncompressedSize, compressedSize, valueCount, statistics, - valueCount, desc.getEncodingType(), tempOutputStream, maxTimestamp, minTimestamp); - } catch (IOException e) { - resetTimeStamp(); - throw new PageException( - "meet IO Exception in writeDataPageHeader,ignore this page,error message:" + e.getMessage()); - } - this.totalValueCount += valueCount; - try { - compressedBytes.writeAllTo(tempOutputStream); - } catch (IOException e) { - /* - * In our method, this line is to flush listByteArray to buf, both - * of them are in class of ListByteArrayOutputStream which contain - * several ByteArrayOutputStream. In general, they won't throw - * exception. The IOException is just for interface requirement of - * OutputStream. - */ - throw new PageException("meet IO Exception in buffer append,but we cannot understand it:" + e.getMessage()); - } - buf.append(tempOutputStream); - LOG.debug("page {}:write page from seriesWriter, valueCount:{}, stats:{},size:{}", desc, valueCount, statistics, - estimateMaxPageMemSize()); + @Override + public void writePage(ListByteArrayOutputStream listByteArray, int valueCount, + Statistics statistics, long maxTimestamp, long minTimestamp) throws PageException { + // compress the input data + if (this.minTimestamp == -1) + this.minTimestamp = minTimestamp; + if (this.minTimestamp == -1) { + LOG.error("Write page error, {}, minTime:{}, maxTime:{}", desc, minTimestamp, maxTimestamp); } - - private void resetTimeStamp() { - if (totalValueCount == 0) - minTimestamp = -1; + this.maxTimestamp = maxTimestamp; + int uncompressedSize = listByteArray.size(); + ListByteArrayOutputStream compressedBytes = compressor.compress(listByteArray); + int compressedSize = compressedBytes.size(); + PublicBAOS tempOutputStream = new PublicBAOS(estimateMaxPageHeaderSize() + compressedSize); + // write the page header to IOWriter + try { + ReadWriteThriftFormatUtils.writeDataPageHeader(uncompressedSize, compressedSize, valueCount, + statistics, valueCount, desc.getEncodingType(), tempOutputStream, maxTimestamp, + minTimestamp); + } catch (IOException e) { + resetTimeStamp(); + throw new PageException( + "meet IO Exception in writeDataPageHeader,ignore this page,error message:" + + e.getMessage()); } - - @Override - public void writeToFileWriter(TsFileIOWriter writer, Statistics statistics) throws IOException { - if(minTimestamp==-1){ - LOG.error("Write page error, {}, minTime:{}, maxTime:{}",desc,minTimestamp,maxTimestamp); - } - writer.startSeries(desc, compressor.getCodecName(), desc.getType(), statistics, maxTimestamp, minTimestamp); - long totalByteSize = writer.getPos(); - writer.writeBytesToStream(buf); - LOG.debug("write series to file finished:{}", desc); - long size = writer.getPos() - totalByteSize; - writer.endSeries(size, totalValueCount); - LOG.debug("page {}:write page to fileWriter,type:{},maxTime:{},minTime:{},nowPos:{},stats:{}", - desc.getMeasurementId(), desc.getType(), maxTimestamp, minTimestamp, writer.getPos(), statistics); + this.totalValueCount += valueCount; + try { + compressedBytes.writeAllTo(tempOutputStream); + } catch (IOException e) { + /* + * In our method, this line is to flush listByteArray to buf, both of them are in class of + * ListByteArrayOutputStream which contain several ByteArrayOutputStream. In general, they + * won't throw exception. The IOException is just for interface requirement of OutputStream. + */ + throw new PageException( + "meet IO Exception in buffer append,but we cannot understand it:" + e.getMessage()); } + buf.append(tempOutputStream); + LOG.debug("page {}:write page from seriesWriter, valueCount:{}, stats:{},size:{}", desc, + valueCount, statistics, estimateMaxPageMemSize()); + } - @Override - public void reset() { - minTimestamp = -1; - buf.reset(); - totalValueCount = 0; - } + private void resetTimeStamp() { + if (totalValueCount == 0) + minTimestamp = -1; + } - @Override - public long estimateMaxPageMemSize() { - // return size of buffer + page max size; - return buf.size() + estimateMaxPageHeaderSize(); + @Override + public void writeToFileWriter(TsFileIOWriter writer, Statistics statistics) + throws IOException { + if (minTimestamp == -1) { + LOG.error("Write page error, {}, minTime:{}, maxTime:{}", desc, minTimestamp, maxTimestamp); } + writer.startSeries(desc, compressor.getCodecName(), desc.getType(), statistics, maxTimestamp, + minTimestamp); + long totalByteSize = writer.getPos(); + writer.writeBytesToStream(buf); + LOG.debug("write series to file finished:{}", desc); + long size = writer.getPos() - totalByteSize; + writer.endSeries(size, totalValueCount); + LOG.debug("page {}:write page to fileWriter,type:{},maxTime:{},minTime:{},nowPos:{},stats:{}", + desc.getMeasurementId(), desc.getType(), maxTimestamp, minTimestamp, writer.getPos(), + statistics); + } - private int estimateMaxPageHeaderSize() { - int digestSize = (totalValueCount == 0) ? 0 : desc.getTypeLength() * 2; - return TsFileIOWriter.metadataConverter.calculatePageHeaderSize(digestSize); - } + @Override + public void reset() { + minTimestamp = -1; + buf.reset(); + totalValueCount = 0; + } + + @Override + public long estimateMaxPageMemSize() { + // return size of buffer + page max size; + return buf.size() + estimateMaxPageHeaderSize(); + } + + private int estimateMaxPageHeaderSize() { + int digestSize = (totalValueCount == 0) ? 0 : desc.getTypeLength() * 2; + return TsFileIOWriter.metadataConverter.calculatePageHeaderSize(digestSize); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/DataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/DataPoint.java index e42ce45f..369309e0 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/DataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/DataPoint.java @@ -6,119 +6,118 @@ import cn.edu.tsinghua.tsfile.timeseries.utils.StringContainer; import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import cn.edu.tsinghua.tsfile.timeseries.write.record.datapoint.*; - import java.io.IOException; import java.math.BigDecimal; /** - * This is a abstract class representing a data point. DataPoint consists of a - * measurement id and a data type. subclass of DataPoint need override method - * {@code write(long time, ISeriesWriter writer)} .Every subclass has its data - * type and overrides a setting method for its data type. + * This is a abstract class representing a data point. DataPoint consists of a measurement id and a + * data type. subclass of DataPoint need override method + * {@code write(long time, ISeriesWriter writer)} .Every subclass has its data type and overrides a + * setting method for its data type. * * @author kangrong */ public abstract class DataPoint { - protected final TSDataType type; - protected final String measurementId; - - public DataPoint(TSDataType type, String measurementId) { - this.type = type; - this.measurementId = measurementId; - } - - /** - * Construct one data point with data type and value - * - * @param dataType data type - * @param measurementId measurement id - * @param value value in string format - * @return data point class according to data type - */ - public static DataPoint getDataPoint(TSDataType dataType, String measurementId, String value) { - DataPoint dataPoint = null; - switch (dataType) { - case INT32: - dataPoint = new IntDataPoint(measurementId, Integer.valueOf(value)); - break; - case INT64: - dataPoint = new LongDataPoint(measurementId, Long.valueOf(value)); - break; - case FLOAT: - dataPoint = new FloatDataPoint(measurementId, Float.valueOf(value)); - break; - case DOUBLE: - dataPoint = new DoubleDataPoint(measurementId, Double.valueOf(value)); - break; - case BOOLEAN: - dataPoint = new BooleanDataPoint(measurementId, Boolean.valueOf(value)); - break; - case TEXT: - dataPoint = new StringDataPoint(measurementId, new Binary(value)); - break; - case BIGDECIMAL: - dataPoint = new BigDecimalDataPoint(measurementId, new BigDecimal(value)); - break; - case ENUMS: - dataPoint = new EnumDataPoint(measurementId, Integer.valueOf(value)); - break; - default: - throw new UnSupportedDataTypeException("This data type is not supoort -" + dataType); - } - return dataPoint; - } - - /** - * write to seriesWriter and return the series name - * - * @param time timestamp - * @param writer writer - * @throws IOException exception in IO - */ - public abstract void write(long time, ISeriesWriter writer) throws IOException; - - public String getMeasurementId() { - return measurementId; - } - - public abstract Object getValue(); - - public TSDataType getType() { - return type; - } - - @Override - public String toString() { - StringContainer sc = new StringContainer(" "); - sc.addTail("{measurement id:", measurementId, "type:", type, "value:", getValue(), "}"); - return sc.toString(); - } - - public void setInteger(int value) { - throw new UnsupportedOperationException("set Integer not support in DataPoint"); - } - - public void setLong(long value) { - throw new UnsupportedOperationException("set Long not support in DataPoint"); - } - - public void setBoolean(boolean value) { - throw new UnsupportedOperationException("set Boolean not support in DataPoint"); - } - - public void setFloat(float value) { - throw new UnsupportedOperationException("set Float not support in DataPoint"); - } - - public void setDouble(double value) { - throw new UnsupportedOperationException("set Double not support in DataPoint"); - } - - public void setString(Binary value) { - throw new UnsupportedOperationException("set String not support in DataPoint"); - } - - public void setBigDecimal(BigDecimal value) { - throw new UnsupportedOperationException("set BigDecimal not support in DataPoint"); + protected final TSDataType type; + protected final String measurementId; + + public DataPoint(TSDataType type, String measurementId) { + this.type = type; + this.measurementId = measurementId; + } + + /** + * Construct one data point with data type and value + * + * @param dataType data type + * @param measurementId measurement id + * @param value value in string format + * @return data point class according to data type + */ + public static DataPoint getDataPoint(TSDataType dataType, String measurementId, String value) { + DataPoint dataPoint = null; + switch (dataType) { + case INT32: + dataPoint = new IntDataPoint(measurementId, Integer.valueOf(value)); + break; + case INT64: + dataPoint = new LongDataPoint(measurementId, Long.valueOf(value)); + break; + case FLOAT: + dataPoint = new FloatDataPoint(measurementId, Float.valueOf(value)); + break; + case DOUBLE: + dataPoint = new DoubleDataPoint(measurementId, Double.valueOf(value)); + break; + case BOOLEAN: + dataPoint = new BooleanDataPoint(measurementId, Boolean.valueOf(value)); + break; + case TEXT: + dataPoint = new StringDataPoint(measurementId, new Binary(value)); + break; + case BIGDECIMAL: + dataPoint = new BigDecimalDataPoint(measurementId, new BigDecimal(value)); + break; + case ENUMS: + dataPoint = new EnumDataPoint(measurementId, Integer.valueOf(value)); + break; + default: + throw new UnSupportedDataTypeException("This data type is not supoort -" + dataType); } + return dataPoint; + } + + /** + * write to seriesWriter and return the series name + * + * @param time timestamp + * @param writer writer + * @throws IOException exception in IO + */ + public abstract void write(long time, ISeriesWriter writer) throws IOException; + + public String getMeasurementId() { + return measurementId; + } + + public abstract Object getValue(); + + public TSDataType getType() { + return type; + } + + @Override + public String toString() { + StringContainer sc = new StringContainer(" "); + sc.addTail("{measurement id:", measurementId, "type:", type, "value:", getValue(), "}"); + return sc.toString(); + } + + public void setInteger(int value) { + throw new UnsupportedOperationException("set Integer not support in DataPoint"); + } + + public void setLong(long value) { + throw new UnsupportedOperationException("set Long not support in DataPoint"); + } + + public void setBoolean(boolean value) { + throw new UnsupportedOperationException("set Boolean not support in DataPoint"); + } + + public void setFloat(float value) { + throw new UnsupportedOperationException("set Float not support in DataPoint"); + } + + public void setDouble(double value) { + throw new UnsupportedOperationException("set Double not support in DataPoint"); + } + + public void setString(Binary value) { + throw new UnsupportedOperationException("set String not support in DataPoint"); + } + + public void setBigDecimal(BigDecimal value) { + throw new UnsupportedOperationException("set BigDecimal not support in DataPoint"); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/TSRecord.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/TSRecord.java index 18dd01c8..e5362191 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/TSRecord.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/TSRecord.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.write.record; import cn.edu.tsinghua.tsfile.timeseries.utils.StringContainer; - import java.util.ArrayList; import java.util.List; @@ -12,30 +11,30 @@ * @author kangrong */ public class TSRecord { - public long time; - public String deltaObjectId; - public List dataPointList = new ArrayList<>(); + public long time; + public String deltaObjectId; + public List dataPointList = new ArrayList<>(); - public TSRecord(long timestamp, String deltaObjectId) { - this.time = timestamp; - this.deltaObjectId = deltaObjectId; - } + public TSRecord(long timestamp, String deltaObjectId) { + this.time = timestamp; + this.deltaObjectId = deltaObjectId; + } - public void setTime(long timestamp) { - this.time = timestamp; - } + public void setTime(long timestamp) { + this.time = timestamp; + } - public void addTuple(DataPoint tuple) { - this.dataPointList.add(tuple); - } + public void addTuple(DataPoint tuple) { + this.dataPointList.add(tuple); + } - public String toString() { - StringContainer sc = new StringContainer(" "); - sc.addTail("{delta object id:", deltaObjectId, "time:", time, ",data:["); - for (DataPoint tuple : dataPointList) { - sc.addTail(tuple); - } - sc.addTail("]}"); - return sc.toString(); + public String toString() { + StringContainer sc = new StringContainer(" "); + sc.addTail("{delta object id:", deltaObjectId, "time:", time, ",data:["); + for (DataPoint tuple : dataPointList) { + sc.addTail(tuple); } + sc.addTail("]}"); + return sc.toString(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BigDecimalDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BigDecimalDataPoint.java index 2f930252..edc5accc 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BigDecimalDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BigDecimalDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.math.BigDecimal; @@ -16,30 +15,30 @@ * @see DataPoint DataPoint */ public class BigDecimalDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(BigDecimalDataPoint.class); - private BigDecimal value; + private static final Logger LOG = LoggerFactory.getLogger(BigDecimalDataPoint.class); + private BigDecimal value; - public BigDecimalDataPoint(String measurementId, BigDecimal v) { - super(TSDataType.BIGDECIMAL, measurementId); - this.value = v; - } + public BigDecimalDataPoint(String measurementId, BigDecimal v) { + super(TSDataType.BIGDECIMAL, measurementId); + this.value = v; + } - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setBigDecimal(BigDecimal value) { - this.value = value; - } + @Override + public void setBigDecimal(BigDecimal value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BooleanDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BooleanDataPoint.java index c9dbc056..fe49f6b4 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BooleanDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/BooleanDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,30 +14,30 @@ * @see DataPoint DataPoint */ public class BooleanDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(BooleanDataPoint.class); - private boolean value; + private static final Logger LOG = LoggerFactory.getLogger(BooleanDataPoint.class); + private boolean value; - public BooleanDataPoint(String measurementId, boolean v) { - super(TSDataType.BOOLEAN, measurementId); - this.value = v; - } + public BooleanDataPoint(String measurementId, boolean v) { + super(TSDataType.BOOLEAN, measurementId); + this.value = v; + } - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setBoolean(boolean value) { - this.value = value; - } + @Override + public void setBoolean(boolean value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/DoubleDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/DoubleDataPoint.java index dcaa6c39..a10675b9 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/DoubleDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/DoubleDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,30 +14,30 @@ * @see DataPoint DataPoint */ public class DoubleDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(DoubleDataPoint.class); - private double value; + private static final Logger LOG = LoggerFactory.getLogger(DoubleDataPoint.class); + private double value; - public DoubleDataPoint(String measurementId, double v) { - super(TSDataType.DOUBLE, measurementId); - this.value = v; - } + public DoubleDataPoint(String measurementId, double v) { + super(TSDataType.DOUBLE, measurementId); + this.value = v; + } - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setDouble(double value) { - this.value = value; - } + @Override + public void setDouble(double value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/EnumDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/EnumDataPoint.java index 3c35cc13..5e4b3634 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/EnumDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/EnumDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,31 +14,31 @@ * @see DataPoint DataPoint */ public class EnumDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(EnumDataPoint.class); - private int value; - - public EnumDataPoint(String measurementId, int v) { - super(TSDataType.ENUMS, measurementId); - this.value = v; + private static final Logger LOG = LoggerFactory.getLogger(EnumDataPoint.class); + private int value; + + public EnumDataPoint(String measurementId, int v) { + super(TSDataType.ENUMS, measurementId); + this.value = v; + } + + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); - - } + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setInteger(int value) { - this.value = value; - } + @Override + public void setInteger(int value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/FloatDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/FloatDataPoint.java index 4d05197c..86e2be4a 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/FloatDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/FloatDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,31 +14,31 @@ * @see DataPoint DataPoint */ public class FloatDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(FloatDataPoint.class); - private float value; - - public FloatDataPoint(String measurementId, float v) { - super(TSDataType.FLOAT, measurementId); - this.value = v; + private static final Logger LOG = LoggerFactory.getLogger(FloatDataPoint.class); + private float value; + + public FloatDataPoint(String measurementId, float v) { + super(TSDataType.FLOAT, measurementId); + this.value = v; + } + + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); - - } + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setFloat(float value) { - this.value = value; - } + @Override + public void setFloat(float value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/IntDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/IntDataPoint.java index 0dc9ff36..fdbdb0d8 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/IntDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/IntDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,31 +14,31 @@ * @see DataPoint DataPoint */ public class IntDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(IntDataPoint.class); - private int value; - - public IntDataPoint(String measurementId, int v) { - super(TSDataType.INT32, measurementId); - this.value = v; + private static final Logger LOG = LoggerFactory.getLogger(IntDataPoint.class); + private int value; + + public IntDataPoint(String measurementId, int v) { + super(TSDataType.INT32, measurementId); + this.value = v; + } + + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); - - } + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setInteger(int value) { - this.value = value; - } + @Override + public void setInteger(int value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/LongDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/LongDataPoint.java index ae35032a..803238dd 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/LongDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/LongDataPoint.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -15,31 +14,31 @@ * @see DataPoint DataPoint */ public class LongDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(LongDataPoint.class); - private long value; - - public LongDataPoint(String measurementId, long v) { - super(TSDataType.INT64, measurementId); - this.value = v; + private static final Logger LOG = LoggerFactory.getLogger(LongDataPoint.class); + private long value; + + public LongDataPoint(String measurementId, long v) { + super(TSDataType.INT64, measurementId); + this.value = v; + } + + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); - - } + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setLong(long value) { - this.value = value; - } + @Override + public void setLong(long value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/StringDataPoint.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/StringDataPoint.java index 80de74f0..d04a1b96 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/StringDataPoint.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/record/datapoint/StringDataPoint.java @@ -6,7 +6,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ISeriesWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; /** @@ -16,31 +15,31 @@ * @see DataPoint DataPoint */ public class StringDataPoint extends DataPoint { - private static final Logger LOG = LoggerFactory.getLogger(StringDataPoint.class); - private Binary value; - - public StringDataPoint(String measurementId, Binary v) { - super(TSDataType.TEXT, measurementId); - this.value = v; + private static final Logger LOG = LoggerFactory.getLogger(StringDataPoint.class); + private Binary value; + + public StringDataPoint(String measurementId, Binary v) { + super(TSDataType.TEXT, measurementId); + this.value = v; + } + + @Override + public void write(long time, ISeriesWriter writer) throws IOException { + if (writer == null) { + LOG.warn("given ISeriesWriter is null, do nothing and return"); + return; } + writer.write(time, value); - @Override - public void write(long time, ISeriesWriter writer) throws IOException { - if (writer == null) { - LOG.warn("given ISeriesWriter is null, do nothing and return"); - return; - } - writer.write(time, value); - - } + } - @Override - public Object getValue() { - return value; - } + @Override + public Object getValue() { + return value; + } - @Override - public void setString(Binary value) { - this.value = value; - } + @Override + public void setString(Binary value) { + this.value = value; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/FileSchema.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/FileSchema.java index 69ebe020..50626d0b 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/FileSchema.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/FileSchema.java @@ -8,7 +8,6 @@ import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -59,10 +58,8 @@ public FileSchema(Map measurements, * Add a property to {@code props}.
* If the key exists, this method will update the value of the key. * - * @param key - * key of property - * @param value - * value of property + * @param key key of property + * @param value value of property */ public void addProp(String key, String value) { additionalProperties.put(key, value); @@ -119,10 +116,8 @@ public Map getDescriptor() { /** * add a TimeSeriesMetadata into this fileSchema * - * @param measurementId - * - the measurement id of this TimeSeriesMetadata - * @param type - * - the data type of this TimeSeriesMetadata + * @param measurementId - the measurement id of this TimeSeriesMetadata + * @param type - the data type of this TimeSeriesMetadata */ private void addTimeSeriesMetadata(String measurementId, TSDataType type) { TimeSeriesMetadata ts = new TimeSeriesMetadata(measurementId, type); diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/SchemaBuilder.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/SchemaBuilder.java index 5e483671..f0faa85e 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/SchemaBuilder.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/SchemaBuilder.java @@ -4,7 +4,6 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import cn.edu.tsinghua.tsfile.timeseries.write.desc.MeasurementDescriptor; - import java.util.Map; /** @@ -13,73 +12,74 @@ * @author qiaojialin */ public class SchemaBuilder { - private FileSchema fileSchema; + private FileSchema fileSchema; - public SchemaBuilder() { - fileSchema = new FileSchema(); - } + public SchemaBuilder() { + fileSchema = new FileSchema(); + } - /** - * add one series to tsfile schema - * - * @param measurementId (not null) id of the series - * @param dataType (not null) series data type - * @param tsEncoding (not null) encoding method you specified - * @param props information in encoding method - * @return this - */ - public SchemaBuilder addSeries(String measurementId, TSDataType dataType, TSEncoding tsEncoding, - Map props) { - MeasurementDescriptor md = new MeasurementDescriptor(measurementId, dataType, tsEncoding, props); - fileSchema.registerMeasurement(md); - return this; - } + /** + * add one series to tsfile schema + * + * @param measurementId (not null) id of the series + * @param dataType (not null) series data type + * @param tsEncoding (not null) encoding method you specified + * @param props information in encoding method + * @return this + */ + public SchemaBuilder addSeries(String measurementId, TSDataType dataType, TSEncoding tsEncoding, + Map props) { + MeasurementDescriptor md = + new MeasurementDescriptor(measurementId, dataType, tsEncoding, props); + fileSchema.registerMeasurement(md); + return this; + } - /** - * MeasurementDescriptor is the schema of one series - * - * @param descriptor series schema - * @return schema builder - */ - public SchemaBuilder addSeries(MeasurementDescriptor descriptor) { - fileSchema.registerMeasurement(descriptor); - return this; - } + /** + * MeasurementDescriptor is the schema of one series + * + * @param descriptor series schema + * @return schema builder + */ + public SchemaBuilder addSeries(MeasurementDescriptor descriptor) { + fileSchema.registerMeasurement(descriptor); + return this; + } - /** - * add one series to tsfile schema - * - * @param measurementId (not null) id of the series - * @param dataType (not null) series data type - * @param encoding (not null) encoding method you specified - * @param props information in encoding method - * @return this - */ - public SchemaBuilder addSeries(String measurementId, TSDataType dataType, String encoding, - Map props) { - TSEncoding tsEncoding = TSEncoding.valueOf(encoding); - addSeries(measurementId, dataType, tsEncoding, props); - return this; - } + /** + * add one series to tsfile schema + * + * @param measurementId (not null) id of the series + * @param dataType (not null) series data type + * @param encoding (not null) encoding method you specified + * @param props information in encoding method + * @return this + */ + public SchemaBuilder addSeries(String measurementId, TSDataType dataType, String encoding, + Map props) { + TSEncoding tsEncoding = TSEncoding.valueOf(encoding); + addSeries(measurementId, dataType, tsEncoding, props); + return this; + } - public SchemaBuilder addProp(String key, String value) { - fileSchema.addProp(key, value); - return this; - } + public SchemaBuilder addProp(String key, String value) { + fileSchema.addProp(key, value); + return this; + } - public SchemaBuilder setProps(Map props) { - fileSchema.setProps(props); - return this; - } + public SchemaBuilder setProps(Map props) { + fileSchema.setProps(props); + return this; + } - /** - * get file schema after adding all series and properties - * - * @return constructed file schema - */ - public FileSchema build() { - return this.fileSchema; - } + /** + * get file schema after adding all series and properties + * + * @return constructed file schema + */ + public FileSchema build() { + return this.fileSchema; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverter.java index 76caaac6..9e731f13 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverter.java @@ -2,12 +2,10 @@ import java.util.HashMap; import java.util.Map; - import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; @@ -50,10 +48,8 @@ public class JsonConverter { /** * input a FileSchema and a jsonObject to be converted, * - * @param jsonSchema - * the whole schema in type of JSONObject - * @throws InvalidJsonSchemaException - * throw exception when json schema is not valid + * @param jsonSchema the whole schema in type of JSONObject + * @throws InvalidJsonSchemaException throw exception when json schema is not valid * @return converted measurement descriptors */ @@ -64,8 +60,8 @@ public static Map converterJsonToMeasurementDescr throw new InvalidJsonSchemaException("missing fields:" + JsonFormatConstant.JSON_SCHEMA); JSONArray schemaArray = jsonSchema.getJSONArray(JsonFormatConstant.JSON_SCHEMA); for (int i = 0; i < schemaArray.length(); i++) { - MeasurementDescriptor mDescriptor = convertJsonToMeasureMentDescriptor( - schemaArray.getJSONObject(i)); + MeasurementDescriptor mDescriptor = + convertJsonToMeasureMentDescriptor(schemaArray.getJSONObject(i)); result.put(mDescriptor.getMeasurementId(), mDescriptor); } return result; @@ -95,8 +91,8 @@ public static MeasurementDescriptor convertJsonToMeasureMentDescriptor( String measurementId = measurementObj.getString(JsonFormatConstant.MEASUREMENT_UID); TSDataType type = TSDataType.valueOf(measurementObj.getString(JsonFormatConstant.DATA_TYPE)); // encoding information - TSEncoding encoding = TSEncoding - .valueOf(measurementObj.getString(JsonFormatConstant.MEASUREMENT_ENCODING)); + TSEncoding encoding = + TSEncoding.valueOf(measurementObj.getString(JsonFormatConstant.MEASUREMENT_ENCODING)); // all information of one series Map props = new HashMap<>(); for (Object key : measurementObj.keySet()) { @@ -109,13 +105,11 @@ public static MeasurementDescriptor convertJsonToMeasureMentDescriptor( /** * given a FileSchema and convert it into a JSONObject * - * @param fileSchema - * the given schema in type of {@linkplain FileSchema FileSchema} + * @param fileSchema the given schema in type of {@linkplain FileSchema FileSchema} * @return converted File Schema in type of JSONObject */ - public static JSONObject converterFileSchemaToJson( - FileSchema fileSchema) { + public static JSONObject converterFileSchemaToJson(FileSchema fileSchema) { JSONObject ret = new JSONObject(); JSONArray jsonSchema = new JSONArray(); JSONObject jsonProperties = new JSONObject(); @@ -130,11 +124,13 @@ public static JSONObject converterFileSchemaToJson( } private static JSONObject convertMeasurementDescriptorToJson( - MeasurementDescriptor measurementDescriptor) { + MeasurementDescriptor measurementDescriptor) { JSONObject measurementObj = new JSONObject(); - measurementObj.put(JsonFormatConstant.MEASUREMENT_UID, measurementDescriptor.getMeasurementId()); + measurementObj.put(JsonFormatConstant.MEASUREMENT_UID, + measurementDescriptor.getMeasurementId()); measurementObj.put(JsonFormatConstant.DATA_TYPE, measurementDescriptor.getType().toString()); - measurementObj.put(JsonFormatConstant.MEASUREMENT_ENCODING, measurementDescriptor.getEncodingType().toString()); + measurementObj.put(JsonFormatConstant.MEASUREMENT_ENCODING, + measurementDescriptor.getEncodingType().toString()); measurementDescriptor.getProps().forEach(measurementObj::put); return measurementObj; } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverter.java index 1559ff62..6a2c3047 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverter.java @@ -7,14 +7,12 @@ import cn.edu.tsinghua.tsfile.timeseries.utils.TSFileEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.util.List; import java.util.Map; /** - * Each subclass of TSDataTypeConverter responds a enumerate value in - * {@linkplain TSDataType TSDataType}, which stores several - * configuration related to responding encoding type.
+ * Each subclass of TSDataTypeConverter responds a enumerate value in {@linkplain TSDataType + * TSDataType}, which stores several configuration related to responding encoding type.
* Each TSDataType has a responding TSDataTypeConverter. The design referring to visit pattern * provides same outer interface for different TSDataTypes and gets rid of the duplicate switch-case * code. @@ -22,134 +20,132 @@ * @author kangrong */ public abstract class TSDataTypeConverter { - private static final Logger LOG = LoggerFactory.getLogger(TSDataTypeConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(TSDataTypeConverter.class); - /** - * A static method to check the input parameter. If it's legal, return this parameter in its - * appropriate class type. - * - * @param type - data type - * @param pmKey - argument key in JSON object key-value pair - * @param value - argument value in JSON object key-value pair in type of String - * @return - argument value in JSON object key-value pair in its suitable type - * @throws MetadataArgsErrorException throw exception when metadata has wrong args - */ - public static Object checkParameter(TSDataType type, String pmKey, String value) - throws MetadataArgsErrorException { - switch (type) { - case ENUMS: - return (new ENUMS()).checkParameter(pmKey, value); - default: - throw new MetadataArgsErrorException("don't need args:{}" + pmKey); - } + /** + * A static method to check the input parameter. If it's legal, return this parameter in its + * appropriate class type. + * + * @param type - data type + * @param pmKey - argument key in JSON object key-value pair + * @param value - argument value in JSON object key-value pair in type of String + * @return - argument value in JSON object key-value pair in its suitable type + * @throws MetadataArgsErrorException throw exception when metadata has wrong args + */ + public static Object checkParameter(TSDataType type, String pmKey, String value) + throws MetadataArgsErrorException { + switch (type) { + case ENUMS: + return (new ENUMS()).checkParameter(pmKey, value); + default: + throw new MetadataArgsErrorException("don't need args:{}" + pmKey); } + } - /** - * Up to now, TSDataTypeConverter has only Enum converter - * - * @param type data type of TsFile - * @return Converter to convert data type - * @since version 0.1.0 - */ - public static TSDataTypeConverter getConverter(TSDataType type) { - switch (type) { - case ENUMS: - return new ENUMS(); - default: - LOG.error("UnsupportedDataTypeException:{}", type); - throw new UnsupportedOperationException(); - } + /** + * Up to now, TSDataTypeConverter has only Enum converter + * + * @param type data type of TsFile + * @return Converter to convert data type + * @since version 0.1.0 + */ + public static TSDataTypeConverter getConverter(TSDataType type) { + switch (type) { + case ENUMS: + return new ENUMS(); + default: + LOG.error("UnsupportedDataTypeException:{}", type); + throw new UnsupportedOperationException(); } + } + + /** + * for ENUMS, JSON is a method of the initialization. Each ENUMS in json-format schema should have + * data value parameters. initFromProps gets values from JSON object which would be used latter. + * If this type has extra parameter to construct, override it. + * + * @param props - properties which contains information DataTypeConverter needs + */ + public void initFromProps(Map props) {} + + /** + * based on visit pattern to provide unified parameter type in interface. write data values to + * VseriesMetaData + * + * @param v - VInTimeSeriesChunkMetaData to be set data + */ + public void setDataValues(VInTimeSeriesChunkMetaData v) {} + + /** + * For a kind of datatypeConverter, check the input parameter. If it's legal, return this + * parameter in its appropriate class type. It needs subclass extending. + * + * @param pmKey - argument key in JSON object key-value pair + * @param value - argument value in JSON object key-value pair in type of String + * @return - default return is null which means this data type needn't the parameter + * @throws MetadataArgsErrorException throw exception when metadata has wrong args + */ + public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { + throw new MetadataArgsErrorException("don't need args:{}" + pmKey); + } + + public static class ENUMS extends TSDataTypeConverter { + private TSFileEnum tsfileEnum = null; /** - * for ENUMS, JSON is a method of the initialization. Each ENUMS in json-format schema should - * have data value parameters. initFromProps gets values from JSON object which would be - * used latter. If this type has extra parameter to construct, override it. + * input a enum string value, return it ordinal integer * - * @param props - properties which contains information DataTypeConverter needs + * @param v - enum string + * @return - ordinal integer */ + public int parseValue(String v) { + if (v == null || "".equals(v)) { + LOG.warn("write enum null, String:{}", v); + return -1; + } + if (tsfileEnum == null) { + LOG.warn("TSDataTypeConverter is not initialized"); + return -1; + } + return tsfileEnum.enumOrdinal(v); + } + + @Override public void initFromProps(Map props) { + if (props == null || !props.containsKey(JsonFormatConstant.ENUM_VALUES)) { + LOG.warn("ENUMS has no data values."); + return; + } + String valueStr = props.get(JsonFormatConstant.ENUM_VALUES).replaceAll("\"", ""); + valueStr = valueStr.substring(1, valueStr.length() - 1); + String[] values = valueStr.split(","); + tsfileEnum = new TSFileEnum(); + for (String value : values) { + tsfileEnum.addTSFileEnum(value); + } } - /** - * based on visit pattern to provide unified parameter type in interface. write data values to - * VseriesMetaData - * - * @param v - VInTimeSeriesChunkMetaData to be set data - */ + @Override public void setDataValues(VInTimeSeriesChunkMetaData v) { + if (tsfileEnum != null) { + List dataValues = tsfileEnum.getEnumDataValues(); + v.setEnumValues(dataValues); + } } - /** - * For a kind of datatypeConverter, check the input parameter. If it's legal, return this - * parameter in its appropriate class type. It needs subclass extending. - * - * @param pmKey - argument key in JSON object key-value pair - * @param value - argument value in JSON object key-value pair in type of String - * @return - default return is null which means this data type needn't the parameter - * @throws MetadataArgsErrorException throw exception when metadata has wrong args - */ + @Override public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { - throw new MetadataArgsErrorException("don't need args:{}" + pmKey); + if (JsonFormatConstant.ENUM_VALUES.equals(pmKey)) { + return value.split(JsonFormatConstant.ENUM_VALUES_SEPARATOR); + } else { + throw new MetadataArgsErrorException("don't need args:{}" + JsonFormatConstant.ENUM_VALUES); + } } - public static class ENUMS extends TSDataTypeConverter { - private TSFileEnum tsfileEnum = null; - - /** - * input a enum string value, return it ordinal integer - * - * @param v - enum string - * @return - ordinal integer - */ - public int parseValue(String v) { - if (v == null || "".equals(v)) { - LOG.warn("write enum null, String:{}", v); - return -1; - } - if (tsfileEnum == null) { - LOG.warn("TSDataTypeConverter is not initialized"); - return -1; - } - return tsfileEnum.enumOrdinal(v); - } - - @Override - public void initFromProps(Map props) { - if (props == null || !props.containsKey(JsonFormatConstant.ENUM_VALUES)) { - LOG.warn("ENUMS has no data values."); - return; - } - String valueStr = props.get(JsonFormatConstant.ENUM_VALUES).replaceAll("\"", ""); - valueStr = valueStr.substring(1, valueStr.length() - 1); - String[] values = valueStr.split(","); - tsfileEnum = new TSFileEnum(); - for (String value : values) { - tsfileEnum.addTSFileEnum(value); - } - } - - @Override - public void setDataValues(VInTimeSeriesChunkMetaData v) { - if (tsfileEnum != null) { - List dataValues = tsfileEnum.getEnumDataValues(); - v.setEnumValues(dataValues); - } - } - - @Override - public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { - if (JsonFormatConstant.ENUM_VALUES.equals(pmKey)) { - return value.split(JsonFormatConstant.ENUM_VALUES_SEPARATOR); - } else { - throw new MetadataArgsErrorException("don't need args:{}" + JsonFormatConstant.ENUM_VALUES); - } - } - - @Override - public String toString() { - return tsfileEnum.toString(); - } + @Override + public String toString() { + return tsfileEnum.toString(); } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverter.java index f9f6d4e2..b6f84fe3 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverter.java @@ -12,13 +12,11 @@ import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.util.Map; /** - * Each subclass of TSEncodingConverter responds a enumerate value in - * {@linkplain TSEncoding TSEncoding}, which stores several - * configuration related to responding encoding type to generate + * Each subclass of TSEncodingConverter responds a enumerate value in {@linkplain TSEncoding + * TSEncoding}, which stores several configuration related to responding encoding type to generate * {@linkplain Encoder Encoder} instance.
* Each TSEncoding has a responding TSEncodingConverter. The design referring to visit pattern * provides same outer interface for different TSEncodings and gets rid of the duplicate switch-case @@ -27,268 +25,264 @@ * @author kangrong */ public abstract class TSEncodingConverter { - private static final Logger LOG = LoggerFactory.getLogger(TSEncodingConverter.class); - protected final TSFileConfig conf; + private static final Logger LOG = LoggerFactory.getLogger(TSEncodingConverter.class); + protected final TSFileConfig conf; + + public TSEncodingConverter() { + this.conf = TSFileDescriptor.getInstance().getConfig(); + } - public TSEncodingConverter() { - this.conf = TSFileDescriptor.getInstance().getConfig(); + /** + * return responding TSEncodingConverter from a TSEncoding + * + * @param type - given encoding type + * @return - responding TSEncodingConverter + */ + public static TSEncodingConverter getConverter(TSEncoding type) { + switch (type) { + case PLAIN: + return new PLAIN(); + case RLE: + return new RLE(); + case TS_2DIFF: + return new TS_2DIFF(); + case BITMAP: + return new BITMAP(); + case GORILLA: + return new GORILLA(); + default: + throw new UnsupportedOperationException(type.toString()); } + } - /** - * return responding TSEncodingConverter from a TSEncoding - * - * @param type - given encoding type - * @return - responding TSEncodingConverter - */ - public static TSEncodingConverter getConverter(TSEncoding type) { - switch (type) { - case PLAIN: - return new PLAIN(); - case RLE: - return new RLE(); - case TS_2DIFF: - return new TS_2DIFF(); - case BITMAP: - return new BITMAP(); - case GORILLA: - return new GORILLA(); - default: - throw new UnsupportedOperationException(type.toString()); - } + /** + * check the validity of input parameter. If it's valid, return this parameter in its appropriate + * type. + * + * @param encoding - encoding type + * @param pmKey - argument key in JSON object key-value pair + * @param value - argument value in JSON object key-value pair in type of String + * @return - argument value in JSON object key-value pair in its suitable type + * @throws MetadataArgsErrorException throw exception when metadata has wrong args + */ + public static Object checkParameter(TSEncoding encoding, String pmKey, String value) + throws MetadataArgsErrorException { + return getConverter(encoding).checkParameter(pmKey, value); + } + + /** + * return a series's encoder with different types and parameters according to its measurement id + * and data type + * + * @param measurementId - given measurement id + * @param type - given data type + * @return - return a {@linkplain Encoder Encoder} + */ + public abstract Encoder getEncoder(String measurementId, TSDataType type); + + /** + * for TSEncoding, JSON is a kind of type for initialization. {@code InitFromJsonObject} gets + * values from JSON object which will be used latter.
+ * if this type has extra parameters to construct, override it. + * + * @param measurementId - measurement id to be added. + * @param props - properties of encoding + */ + public void initFromProps(String measurementId, Map props) {} + + /** + * For a TSEncodingConverter, check the input parameter. If it's valid, return this parameter in + * its appropriate type. This method needs to be extended. + * + * @param pmKey - argument key in JSON object key-value pair + * @param value - argument value in JSON object key-value pair in type of String + * @return - default return is null which means this data type needn't the parameter + * @throws MetadataArgsErrorException throw exception when metadata has wrong args + */ + public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { + throw new MetadataArgsErrorException("don't need args:{}" + pmKey); + } + + @Override + public String toString() { + return ""; + } + + public static class PLAIN extends TSEncodingConverter { + private int maxStringLength; + + @Override + public Encoder getEncoder(String measurementId, TSDataType type) { + return new PlainEncoder(EndianType.LITTLE_ENDIAN, type, maxStringLength); } - /** - * check the validity of input parameter. If it's valid, return this parameter in its - * appropriate type. - * - * @param encoding - encoding type - * @param pmKey - argument key in JSON object key-value pair - * @param value - argument value in JSON object key-value pair in type of String - * @return - argument value in JSON object key-value pair in its suitable type - * @throws MetadataArgsErrorException throw exception when metadata has wrong args - */ - public static Object checkParameter(TSEncoding encoding, String pmKey, String value) - throws MetadataArgsErrorException { - return getConverter(encoding).checkParameter(pmKey, value); + @Override + public void initFromProps(String measurementId, Map props) { + // set max error from initialized map or default value if not set + if (props == null || !props.containsKey(JsonFormatConstant.MAX_STRING_LENGTH)) { + maxStringLength = conf.maxStringLength; + } else { + maxStringLength = Integer.valueOf(props.get(JsonFormatConstant.MAX_STRING_LENGTH)); + if (maxStringLength < 0) { + maxStringLength = conf.maxStringLength; + LOG.warn("cannot set max string length to negative value, replaced with default value:{}", + maxStringLength); + } + } } + } - /** - * return a series's encoder with different types and parameters according to its measurement id - * and data type - * - * @param measurementId - given measurement id - * @param type - given data type - * @return - return a {@linkplain Encoder Encoder} - */ - public abstract Encoder getEncoder(String measurementId, TSDataType type); + public static class RLE extends TSEncodingConverter { + private int maxPointNumber = 0; + + @Override + public Encoder getEncoder(String measurementId, TSDataType type) { + switch (type) { + case ENUMS: + case INT32: + case BOOLEAN: + return new IntRleEncoder(EndianType.LITTLE_ENDIAN); + case INT64: + return new LongRleEncoder(EndianType.LITTLE_ENDIAN); + case FLOAT: + case DOUBLE: + // case BIGDECIMAL: + return new FloatEncoder(TSEncoding.RLE, type, maxPointNumber); + default: + throw new UnSupportedDataTypeException("RLE doesn't support data type: " + type); + } + } /** - * for TSEncoding, JSON is a kind of type for initialization. {@code InitFromJsonObject} gets - * values from JSON object which will be used latter.
- * if this type has extra parameters to construct, override it. - * - * @param measurementId - measurement id to be added. - * @param props - properties of encoding + * RLE could specify max_point_number in given JSON Object, which means the maximum + * decimal digits for float or double data. */ + @Override public void initFromProps(String measurementId, Map props) { + // set max error from initialized map or default value if not set + if (props == null || !props.containsKey(JsonFormatConstant.MAX_POINT_NUMBER)) { + maxPointNumber = conf.floatPrecision; + } else { + maxPointNumber = Integer.valueOf(props.get(JsonFormatConstant.MAX_POINT_NUMBER)); + if (maxPointNumber < 0) { + maxPointNumber = conf.floatPrecision; + LOG.warn("cannot set max point number to negative value, replaced with default value:{}", + maxPointNumber); + } + } } + @Override /** - * For a TSEncodingConverter, check the input parameter. If it's valid, return this parameter in - * its appropriate type. This method needs to be extended. - * - * @param pmKey - argument key in JSON object key-value pair - * @param value - argument value in JSON object key-value pair in type of String - * @return - default return is null which means this data type needn't the parameter - * @throws MetadataArgsErrorException throw exception when metadata has wrong args + * RLE could specify max_point_number as parameter, which means the maximum decimal + * digits for float or double data. */ public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { + if (JsonFormatConstant.MAX_POINT_NUMBER.equals(pmKey)) { + try { + return Integer.valueOf(value); + } catch (NumberFormatException e) { + throw new MetadataArgsErrorException( + "paramter " + pmKey + " meets error integer format :" + value); + } + } else throw new MetadataArgsErrorException("don't need args:{}" + pmKey); } @Override public String toString() { - return ""; + return JsonFormatConstant.MAX_POINT_NUMBER + ":" + maxPointNumber; } + } - public static class PLAIN extends TSEncodingConverter { - private int maxStringLength; + public static class TS_2DIFF extends TSEncodingConverter { + private int maxPointNumber = 0; - @Override - public Encoder getEncoder(String measurementId, TSDataType type) { - return new PlainEncoder(EndianType.LITTLE_ENDIAN, type, maxStringLength); - } - - @Override - public void initFromProps(String measurementId, Map props) { - // set max error from initialized map or default value if not set - if (props == null || !props.containsKey(JsonFormatConstant.MAX_STRING_LENGTH)) { - maxStringLength = conf.maxStringLength; - } else { - maxStringLength = Integer.valueOf(props.get(JsonFormatConstant.MAX_STRING_LENGTH)); - if (maxStringLength < 0) { - maxStringLength = conf.maxStringLength; - LOG.warn( - "cannot set max string length to negative value, replaced with default value:{}", - maxStringLength); - } - } - } + @Override + public Encoder getEncoder(String measurementId, TSDataType type) { + switch (type) { + case INT32: + return new DeltaBinaryEncoder.IntDeltaEncoder(); + case INT64: + return new DeltaBinaryEncoder.LongDeltaEncoder(); + case FLOAT: + case DOUBLE: + // case BIGDECIMAL: + return new FloatEncoder(TSEncoding.TS_2DIFF, type, maxPointNumber); + default: + throw new UnSupportedDataTypeException("TS_2DIFF doesn't support data type: " + type); + } } - public static class RLE extends TSEncodingConverter { - private int maxPointNumber = 0; - - @Override - public Encoder getEncoder(String measurementId, TSDataType type) { - switch (type) { - case ENUMS: - case INT32: - case BOOLEAN: - return new IntRleEncoder(EndianType.LITTLE_ENDIAN); - case INT64: - return new LongRleEncoder(EndianType.LITTLE_ENDIAN); - case FLOAT: - case DOUBLE: -// case BIGDECIMAL: - return new FloatEncoder(TSEncoding.RLE, type, maxPointNumber); - default: - throw new UnSupportedDataTypeException("RLE doesn't support data type: " + type); - } - } - - /** - * RLE could specify max_point_number in given JSON Object, which means the maximum - * decimal digits for float or double data. - */ - @Override - public void initFromProps(String measurementId, Map props) { - // set max error from initialized map or default value if not set - if (props == null || !props.containsKey(JsonFormatConstant.MAX_POINT_NUMBER)) { - maxPointNumber = conf.floatPrecision; - } else { - maxPointNumber = Integer.valueOf(props.get(JsonFormatConstant.MAX_POINT_NUMBER)); - if (maxPointNumber < 0) { - maxPointNumber = conf.floatPrecision; - LOG.warn( - "cannot set max point number to negative value, replaced with default value:{}", - maxPointNumber); - } - } - } - - @Override - /** - * RLE could specify max_point_number as parameter, which means the maximum - * decimal digits for float or double data. - */ - public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { - if (JsonFormatConstant.MAX_POINT_NUMBER.equals(pmKey)) { - try { - return Integer.valueOf(value); - } catch (NumberFormatException e) { - throw new MetadataArgsErrorException("paramter " + pmKey - + " meets error integer format :" + value); - } - } else - throw new MetadataArgsErrorException("don't need args:{}" + pmKey); - } - - @Override - public String toString() { - return JsonFormatConstant.MAX_POINT_NUMBER + ":" + maxPointNumber; + @Override + /** + * TS_2DIFF could specify max_point_number in given JSON Object, which means the maximum + * decimal digits for float or double data. + */ + public void initFromProps(String measurementId, Map props) { + // set max error from initialized map or default value if not set + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + if (props == null || !props.containsKey(JsonFormatConstant.MAX_POINT_NUMBER)) { + maxPointNumber = conf.floatPrecision; + } else { + maxPointNumber = Integer.valueOf(props.get(JsonFormatConstant.MAX_POINT_NUMBER)); + if (maxPointNumber < 0) { + maxPointNumber = conf.floatPrecision; + LOG.warn("cannot set max point number to negative value, replaced with default value:{}", + maxPointNumber); } + } } - public static class TS_2DIFF extends TSEncodingConverter { - private int maxPointNumber = 0; - - @Override - public Encoder getEncoder(String measurementId, TSDataType type) { - switch (type) { - case INT32: - return new DeltaBinaryEncoder.IntDeltaEncoder(); - case INT64: - return new DeltaBinaryEncoder.LongDeltaEncoder(); - case FLOAT: - case DOUBLE: -// case BIGDECIMAL: - return new FloatEncoder(TSEncoding.TS_2DIFF, type, maxPointNumber); - default: - throw new UnSupportedDataTypeException("TS_2DIFF doesn't support data type: " + type); - } - } - - @Override - /** - * TS_2DIFF could specify max_point_number in given JSON Object, which means the maximum - * decimal digits for float or double data. - */ - public void initFromProps(String measurementId, Map props) { - // set max error from initialized map or default value if not set - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - if (props == null || !props.containsKey(JsonFormatConstant.MAX_POINT_NUMBER)) { - maxPointNumber = conf.floatPrecision; - } else { - maxPointNumber = Integer.valueOf(props.get(JsonFormatConstant.MAX_POINT_NUMBER)); - if (maxPointNumber < 0) { - maxPointNumber = conf.floatPrecision; - LOG.warn( - "cannot set max point number to negative value, replaced with default value:{}", - maxPointNumber); - } - } + @Override + /** + * TS_2DIFF could specify max_point_number as parameter, which means the maximum decimal + * digits for float or double data. + */ + public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { + if (JsonFormatConstant.MAX_POINT_NUMBER.equals(pmKey)) { + try { + return Integer.valueOf(value); + } catch (NumberFormatException e) { + throw new MetadataArgsErrorException( + "paramter " + pmKey + " meets error integer format :" + value); } + } else + throw new MetadataArgsErrorException("don't need args:{}" + pmKey); + } - @Override - /** - * TS_2DIFF could specify max_point_number as parameter, which means the maximum - * decimal digits for float or double data. - */ - public Object checkParameter(String pmKey, String value) throws MetadataArgsErrorException { - if (JsonFormatConstant.MAX_POINT_NUMBER.equals(pmKey)) { - try { - return Integer.valueOf(value); - } catch (NumberFormatException e) { - throw new MetadataArgsErrorException("paramter " + pmKey - + " meets error integer format :" + value); - } - } else - throw new MetadataArgsErrorException("don't need args:{}" + pmKey); - } + @Override + public String toString() { + return JsonFormatConstant.MAX_POINT_NUMBER + ":" + maxPointNumber; + } - @Override - public String toString() { - return JsonFormatConstant.MAX_POINT_NUMBER + ":" + maxPointNumber; - } + } + public static class BITMAP extends TSEncodingConverter { + @Override + public Encoder getEncoder(String measurementId, TSDataType type) { + switch (type) { + case ENUMS: + return new BitmapEncoder(EndianType.LITTLE_ENDIAN); + default: + throw new UnSupportedDataTypeException("BITMAP doesn't support data type: " + type); + } } + } - public static class BITMAP extends TSEncodingConverter { - @Override - public Encoder getEncoder(String measurementId, TSDataType type) { - switch (type) { - case ENUMS: - return new BitmapEncoder(EndianType.LITTLE_ENDIAN); - default: - throw new UnSupportedDataTypeException("BITMAP doesn't support data type: " + type); - } - } - } - - public static class GORILLA extends TSEncodingConverter { + public static class GORILLA extends TSEncodingConverter { - @Override - public Encoder getEncoder(String measurementId, TSDataType type) { - switch (type) { - case FLOAT: - return new SinglePrecisionEncoder(); - case DOUBLE: - return new DoublePrecisionEncoder(); - default: - throw new UnSupportedDataTypeException("GORILLA doesn't support data type: " + type); - } - } + @Override + public Encoder getEncoder(String measurementId, TSDataType type) { + switch (type) { + case FLOAT: + return new SinglePrecisionEncoder(); + case DOUBLE: + return new DoublePrecisionEncoder(); + default: + throw new UnSupportedDataTypeException("GORILLA doesn't support data type: " + type); + } + } - } + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/IRowGroupWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/IRowGroupWriter.java index a794c269..d327a3c0 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/IRowGroupWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/IRowGroupWriter.java @@ -4,51 +4,47 @@ import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; import cn.edu.tsinghua.tsfile.timeseries.write.record.DataPoint; - import java.io.IOException; import java.util.List; /** - * a row group in TSFile contains a list of value series. TimeSeriesGroupWriter - * should implement write method which inputs a time stamp(in TimeValue class) - * and a list of data points. It also should provide flushing method for - * outputting to OS file system or HDFS. + * a row group in TSFile contains a list of value series. TimeSeriesGroupWriter should implement + * write method which inputs a time stamp(in TimeValue class) and a list of data points. It also + * should provide flushing method for outputting to OS file system or HDFS. * * @author kangrong */ public interface IRowGroupWriter { - /** - * receive a timestamp and a list of data points, write them to themselves - * series writers. - * - * @param time - all data points have unify time stamp. - * @param data - data point list to input - * @throws WriteProcessException exception in write process - * @throws IOException exception in IO - */ - void write(long time, List data) throws WriteProcessException, IOException; + /** + * receive a timestamp and a list of data points, write them to themselves series writers. + * + * @param time - all data points have unify time stamp. + * @param data - data point list to input + * @throws WriteProcessException exception in write process + * @throws IOException exception in IO + */ + void write(long time, List data) throws WriteProcessException, IOException; - /** - * flushing method for outputting to OS file system or HDFS. - * - * @param tsfileWriter - TSFileIOWriter - * @throws IOException exception in IO - */ - void flushToFileWriter(TsFileIOWriter tsfileWriter) throws IOException; + /** + * flushing method for outputting to OS file system or HDFS. + * + * @param tsfileWriter - TSFileIOWriter + * @throws IOException exception in IO + */ + void flushToFileWriter(TsFileIOWriter tsfileWriter) throws IOException; - /** - * Note that, this method should be called after running - * {@code long calcAllocatedSize()} - * - * @return - allocated memory size. - */ - long updateMaxGroupMemSize(); + /** + * Note that, this method should be called after running {@code long calcAllocatedSize()} + * + * @return - allocated memory size. + */ + long updateMaxGroupMemSize(); - /** - * given a measurement descriptor, create a corresponding writer and put into this RowGroupWriter - * - * @param measurementDescriptor a measurement descriptor containing the message of the series - * @param pageSize the specified page size - */ - void addSeriesWriter(MeasurementDescriptor measurementDescriptor, int pageSize); + /** + * given a measurement descriptor, create a corresponding writer and put into this RowGroupWriter + * + * @param measurementDescriptor a measurement descriptor containing the message of the series + * @param pageSize the specified page size + */ + void addSeriesWriter(MeasurementDescriptor measurementDescriptor, int pageSize); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ISeriesWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ISeriesWriter.java index b84fb032..1a342e9d 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ISeriesWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ISeriesWriter.java @@ -2,7 +2,6 @@ import java.io.IOException; import java.math.BigDecimal; - import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; @@ -12,21 +11,21 @@ * @author kangrong */ public interface ISeriesWriter { - void write(long time, int value) throws IOException; + void write(long time, int value) throws IOException; - void write(long time, long value) throws IOException; + void write(long time, long value) throws IOException; - void write(long time, boolean value) throws IOException; + void write(long time, boolean value) throws IOException; - void write(long time, float value) throws IOException; + void write(long time, float value) throws IOException; - void write(long time, double value) throws IOException; + void write(long time, double value) throws IOException; - void write(long time, BigDecimal value) throws IOException; + void write(long time, BigDecimal value) throws IOException; - void write(long time, Binary value) throws IOException; + void write(long time, Binary value) throws IOException; - void writeToFileWriter(TsFileIOWriter tsfileWriter) throws IOException; + void writeToFileWriter(TsFileIOWriter tsfileWriter) throws IOException; - long estimateMaxSeriesMemSize(); + long estimateMaxSeriesMemSize(); } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/RowGroupWriterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/RowGroupWriterImpl.java index 09f3a8e5..fed04846 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/RowGroupWriterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/RowGroupWriterImpl.java @@ -4,10 +4,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.timeseries.write.desc.MeasurementDescriptor; import cn.edu.tsinghua.tsfile.timeseries.write.exception.NoMeasurementException; import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; @@ -23,47 +21,49 @@ * @see IRowGroupWriter IRowGroupWriter */ public class RowGroupWriterImpl implements IRowGroupWriter { - private static Logger LOG = LoggerFactory.getLogger(RowGroupWriterImpl.class); - private final String deltaObjectId; - private Map dataSeriesWriters = new HashMap(); + private static Logger LOG = LoggerFactory.getLogger(RowGroupWriterImpl.class); + private final String deltaObjectId; + private Map dataSeriesWriters = new HashMap(); - public RowGroupWriterImpl(String deltaObjectId) { - this.deltaObjectId = deltaObjectId; - } + public RowGroupWriterImpl(String deltaObjectId) { + this.deltaObjectId = deltaObjectId; + } - @Override - public void addSeriesWriter(MeasurementDescriptor desc, int pageSizeThreshold) { - if(!dataSeriesWriters.containsKey(desc.getMeasurementId())) { - IPageWriter pageWriter = new PageWriterImpl(desc); - ISeriesWriter seriesWriter = new SeriesWriterImpl(deltaObjectId, desc, pageWriter, pageSizeThreshold); - this.dataSeriesWriters.put(desc.getMeasurementId(), seriesWriter); - } + @Override + public void addSeriesWriter(MeasurementDescriptor desc, int pageSizeThreshold) { + if (!dataSeriesWriters.containsKey(desc.getMeasurementId())) { + IPageWriter pageWriter = new PageWriterImpl(desc); + ISeriesWriter seriesWriter = + new SeriesWriterImpl(deltaObjectId, desc, pageWriter, pageSizeThreshold); + this.dataSeriesWriters.put(desc.getMeasurementId(), seriesWriter); } + } - @Override - public void write(long time, List data) throws WriteProcessException, IOException { - for (DataPoint point : data) { - String measurementId = point.getMeasurementId(); - if (!dataSeriesWriters.containsKey(measurementId)) - throw new NoMeasurementException("time " + time + ", measurement id " + measurementId + " not found!"); - point.write(time, dataSeriesWriters.get(measurementId)); + @Override + public void write(long time, List data) throws WriteProcessException, IOException { + for (DataPoint point : data) { + String measurementId = point.getMeasurementId(); + if (!dataSeriesWriters.containsKey(measurementId)) + throw new NoMeasurementException( + "time " + time + ", measurement id " + measurementId + " not found!"); + point.write(time, dataSeriesWriters.get(measurementId)); - } } + } - @Override - public void flushToFileWriter(TsFileIOWriter deltaFileWriter) throws IOException { - LOG.debug("start flush delta object id:{}", deltaObjectId); - for (ISeriesWriter seriesWriter : dataSeriesWriters.values()) { - seriesWriter.writeToFileWriter(deltaFileWriter); - } + @Override + public void flushToFileWriter(TsFileIOWriter deltaFileWriter) throws IOException { + LOG.debug("start flush delta object id:{}", deltaObjectId); + for (ISeriesWriter seriesWriter : dataSeriesWriters.values()) { + seriesWriter.writeToFileWriter(deltaFileWriter); } + } - @Override - public long updateMaxGroupMemSize() { - long bufferSize = 0; - for (ISeriesWriter seriesWriter : dataSeriesWriters.values()) - bufferSize += seriesWriter.estimateMaxSeriesMemSize(); - return bufferSize; - } + @Override + public long updateMaxGroupMemSize() { + long bufferSize = 0; + for (ISeriesWriter seriesWriter : dataSeriesWriters.values()) + bufferSize += seriesWriter.estimateMaxSeriesMemSize(); + return bufferSize; + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/SeriesWriterImpl.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/SeriesWriterImpl.java index 8bd3a06b..bce87d18 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/SeriesWriterImpl.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/SeriesWriterImpl.java @@ -2,10 +2,8 @@ import java.io.IOException; import java.math.BigDecimal; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.utils.Binary; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; @@ -16,205 +14,205 @@ import cn.edu.tsinghua.tsfile.timeseries.write.page.IPageWriter; /** - * A implementation of {@code ISeriesWriter}. {@code SeriesWriterImpl} consists - * of a {@code PageWriter}, a {@code ValueWriter}, and two {@code Statistics}. + * A implementation of {@code ISeriesWriter}. {@code SeriesWriterImpl} consists of a + * {@code PageWriter}, a {@code ValueWriter}, and two {@code Statistics}. * * @author kangrong * @see ISeriesWriter ISeriesWriter */ public class SeriesWriterImpl implements ISeriesWriter { - private static final Logger LOG = LoggerFactory.getLogger(SeriesWriterImpl.class); - private static final int MINIMUM_RECORD_COUNT_FOR_CHECK = 1; - - private final TSDataType dataType; - private final IPageWriter pageWriter; - /** - * page size threshold - */ - private final long psThres; - private final int pageCountUpperBound; - /** - * value writer to encode data - */ - private ValueWriter dataValueWriter; - - /** - * value count on of a page. It will be reset after calling - * {@code writePage()} - */ - private int valueCount; - private int valueCountForNextSizeCheck; - /** - * statistic on a page. It will be reset after calling {@code writePage()} - */ - private Statistics pageStatistics; - /** - * statistic on a stage. It will be reset after calling - * {@code writeToFileWriter()} - */ - private Statistics seriesStatistics; - private long time; - private long minTimestamp = -1; - private String deltaObjectId; - private MeasurementDescriptor desc; - - public SeriesWriterImpl(String deltaObjectId, MeasurementDescriptor desc, IPageWriter pageWriter, - int pageSizeThreshold) { - this.deltaObjectId = deltaObjectId; - this.desc = desc; - this.dataType = desc.getType(); - this.pageWriter = pageWriter; - this.psThres = pageSizeThreshold; - // initial check of memory usage. So that we have enough data to make an - // initial prediction - this.valueCountForNextSizeCheck = MINIMUM_RECORD_COUNT_FOR_CHECK; - this.seriesStatistics = Statistics.getStatsByType(desc.getType()); - resetPageStatistics(); - this.dataValueWriter = new ValueWriter(); - this.pageCountUpperBound = TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage; - - this.dataValueWriter.setTimeEncoder(desc.getTimeEncoder()); - this.dataValueWriter.setValueEncoder(desc.getValueEncoder()); - } - - private void resetPageStatistics() { - this.pageStatistics = Statistics.getStatsByType(dataType); - } - - @Override - public void write(long time, long value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - @Override - public void write(long time, int value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - @Override - public void write(long time, boolean value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - @Override - public void write(long time, float value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); + private static final Logger LOG = LoggerFactory.getLogger(SeriesWriterImpl.class); + private static final int MINIMUM_RECORD_COUNT_FOR_CHECK = 1; + + private final TSDataType dataType; + private final IPageWriter pageWriter; + /** + * page size threshold + */ + private final long psThres; + private final int pageCountUpperBound; + /** + * value writer to encode data + */ + private ValueWriter dataValueWriter; + + /** + * value count on of a page. It will be reset after calling {@code writePage()} + */ + private int valueCount; + private int valueCountForNextSizeCheck; + /** + * statistic on a page. It will be reset after calling {@code writePage()} + */ + private Statistics pageStatistics; + /** + * statistic on a stage. It will be reset after calling {@code writeToFileWriter()} + */ + private Statistics seriesStatistics; + private long time; + private long minTimestamp = -1; + private String deltaObjectId; + private MeasurementDescriptor desc; + + public SeriesWriterImpl(String deltaObjectId, MeasurementDescriptor desc, IPageWriter pageWriter, + int pageSizeThreshold) { + this.deltaObjectId = deltaObjectId; + this.desc = desc; + this.dataType = desc.getType(); + this.pageWriter = pageWriter; + this.psThres = pageSizeThreshold; + // initial check of memory usage. So that we have enough data to make an + // initial prediction + this.valueCountForNextSizeCheck = MINIMUM_RECORD_COUNT_FOR_CHECK; + this.seriesStatistics = Statistics.getStatsByType(desc.getType()); + resetPageStatistics(); + this.dataValueWriter = new ValueWriter(); + this.pageCountUpperBound = TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage; + + this.dataValueWriter.setTimeEncoder(desc.getTimeEncoder()); + this.dataValueWriter.setValueEncoder(desc.getValueEncoder()); + } + + private void resetPageStatistics() { + this.pageStatistics = Statistics.getStatsByType(dataType); + } + + @Override + public void write(long time, long value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, int value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, boolean value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, float value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, double value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, BigDecimal value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + @Override + public void write(long time, Binary value) throws IOException { + this.time = time; + ++valueCount; + dataValueWriter.write(time, value); + pageStatistics.updateStats(value); + if (minTimestamp == -1) + minTimestamp = time; + checkPageSize(); + } + + /** + * check occupied memory size, if it exceeds the PageSize threshold, flush them to given + * OutputStream. + */ + private void checkPageSize() { + if (valueCount == pageCountUpperBound) { + LOG.debug("current line count reaches the upper bound, write page {}", desc); + writePage(); + } else if (valueCount >= valueCountForNextSizeCheck) { + // not checking the memory used for every value + long currentColumnSize = dataValueWriter.estimateMaxMemSize(); + if (currentColumnSize > psThres) { + // we will write the current page + LOG.debug("enough size, write page {}", desc); + writePage(); + } else { + LOG.debug("{}:{} not enough size, now: {}, change to {}", deltaObjectId, desc, valueCount, + valueCountForNextSizeCheck); + } + // reset the valueCountForNextSizeCheck for the next page + valueCountForNextSizeCheck = (int) (((float) psThres / currentColumnSize) * valueCount); } - - @Override - public void write(long time, double value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - @Override - public void write(long time, BigDecimal value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - @Override - public void write(long time, Binary value) throws IOException { - this.time = time; - ++valueCount; - dataValueWriter.write(time, value); - pageStatistics.updateStats(value); - if (minTimestamp == -1) - minTimestamp = time; - checkPageSize(); - } - - /** - * check occupied memory size, if it exceeds the PageSize threshold, flush - * them to given OutputStream. - */ - private void checkPageSize() { - if (valueCount == pageCountUpperBound) { - LOG.debug("current line count reaches the upper bound, write page {}", desc); - writePage(); - } else if (valueCount >= valueCountForNextSizeCheck) { - // not checking the memory used for every value - long currentColumnSize = dataValueWriter.estimateMaxMemSize(); - if (currentColumnSize > psThres) { - // we will write the current page - LOG.debug("enough size, write page {}", desc); - writePage(); - } else { - LOG.debug("{}:{} not enough size, now: {}, change to {}", deltaObjectId, desc, valueCount, - valueCountForNextSizeCheck); - } - // reset the valueCountForNextSizeCheck for the next page - valueCountForNextSizeCheck = (int) (((float) psThres / currentColumnSize) * valueCount); - } - } - - /** - * pack data into {@code IPageWriter} - */ - private void writePage() { - try { - pageWriter.writePage(dataValueWriter.getBytes(), valueCount, pageStatistics, time, minTimestamp); - this.seriesStatistics.mergeStatistics(this.pageStatistics); - } catch (IOException e) { - LOG.error("meet error in dataValueWriter.getBytes(),ignore this page, {}", e.getMessage()); - } catch (PageException e) { - LOG.error("meet error in pageWriter.writePage,ignore this page, error message:{}", e.getMessage()); - } finally { - // clear start time stamp for next initializing - minTimestamp = -1; - valueCount = 0; - dataValueWriter.reset(); - resetPageStatistics(); - } - } - - @Override - public void writeToFileWriter(TsFileIOWriter tsfileWriter) throws IOException { - if (valueCount > 0) { - writePage(); - } - pageWriter.writeToFileWriter(tsfileWriter, seriesStatistics); - pageWriter.reset(); - // reset series_statistics - this.seriesStatistics = Statistics.getStatsByType(dataType); + } + + /** + * pack data into {@code IPageWriter} + */ + private void writePage() { + try { + pageWriter.writePage(dataValueWriter.getBytes(), valueCount, pageStatistics, time, + minTimestamp); + this.seriesStatistics.mergeStatistics(this.pageStatistics); + } catch (IOException e) { + LOG.error("meet error in dataValueWriter.getBytes(),ignore this page, {}", e.getMessage()); + } catch (PageException e) { + LOG.error("meet error in pageWriter.writePage,ignore this page, error message:{}", + e.getMessage()); + } finally { + // clear start time stamp for next initializing + minTimestamp = -1; + valueCount = 0; + dataValueWriter.reset(); + resetPageStatistics(); } + } - @Override - public long estimateMaxSeriesMemSize() { - return dataValueWriter.estimateMaxMemSize() + pageWriter.estimateMaxPageMemSize(); + @Override + public void writeToFileWriter(TsFileIOWriter tsfileWriter) throws IOException { + if (valueCount > 0) { + writePage(); } + pageWriter.writeToFileWriter(tsfileWriter, seriesStatistics); + pageWriter.reset(); + // reset series_statistics + this.seriesStatistics = Statistics.getStatsByType(dataType); + } + + @Override + public long estimateMaxSeriesMemSize() { + return dataValueWriter.estimateMaxMemSize() + pageWriter.estimateMaxPageMemSize(); + } } diff --git a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriter.java b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriter.java index 4a0ca8f3..f9e248f6 100644 --- a/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriter.java +++ b/src/main/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriter.java @@ -5,7 +5,6 @@ import cn.edu.tsinghua.tsfile.common.utils.PublicBAOS; import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; import cn.edu.tsinghua.tsfile.encoding.encoder.Encoder; - import java.io.IOException; import java.math.BigDecimal; @@ -16,109 +15,112 @@ * @author kangrong */ public class ValueWriter { - // time - private Encoder timeEncoder; - private PublicBAOS timeOut; - // value - private Encoder valueEncoder; - private PublicBAOS valueOut; - - private PublicBAOS timeSizeOut; - - public ValueWriter() { - this.timeOut = new PublicBAOS(); - this.valueOut = new PublicBAOS(); - this.timeSizeOut = new PublicBAOS(); - } - - public void write(long time, boolean value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, short value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, int value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, long value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, float value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, double value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, BigDecimal value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - public void write(long time, Binary value) throws IOException { - timeEncoder.encode(time, timeOut); - valueEncoder.encode(value, valueOut); - } - - /** - * flush all data remained in encoders. - * - * @throws IOException - */ - private void prepareEndWriteOnePage() throws IOException { - timeEncoder.flush(timeOut); - valueEncoder.flush(valueOut); - timeOut.flush(); - valueOut.flush(); - } - - /** - * getBytes return data what it has been written in form of ListByteArrayOutputStream. - * - * @return - list byte array output stream containing time size, time stream and value stream. - * @throws IOException exception in IO - */ - public ListByteArrayOutputStream getBytes() throws IOException { - prepareEndWriteOnePage(); - ReadWriteStreamUtils.writeUnsignedVarInt(timeOut.size(), timeSizeOut); - return new ListByteArrayOutputStream(timeSizeOut, timeOut, valueOut); - } - - /** - * calculate max possible memory size it occupies, including time outputStream and value outputStream - * - * @return allocated size in time, value and outputStream - */ - public long estimateMaxMemSize() { - return timeOut.size() + valueOut.size() + timeEncoder.getMaxByteSize() + valueEncoder.getMaxByteSize(); - } - - /** - * reset data in ByteArrayOutputStream - */ - public void reset() { - timeOut.reset(); - valueOut.reset(); - timeSizeOut.reset(); - } - - public void setTimeEncoder(Encoder encoder) { - this.timeEncoder = encoder; - } - - public void setValueEncoder(Encoder encoder) { - this.valueEncoder = encoder; - } + // time + private Encoder timeEncoder; + private PublicBAOS timeOut; + // value + private Encoder valueEncoder; + private PublicBAOS valueOut; + + private PublicBAOS timeSizeOut; + + public ValueWriter() { + this.timeOut = new PublicBAOS(); + this.valueOut = new PublicBAOS(); + this.timeSizeOut = new PublicBAOS(); + } + + public void write(long time, boolean value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, short value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, int value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, long value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, float value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, double value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, BigDecimal value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + public void write(long time, Binary value) throws IOException { + timeEncoder.encode(time, timeOut); + valueEncoder.encode(value, valueOut); + } + + /** + * flush all data remained in encoders. + * + * @throws IOException + */ + private void prepareEndWriteOnePage() throws IOException { + timeEncoder.flush(timeOut); + valueEncoder.flush(valueOut); + timeOut.flush(); + valueOut.flush(); + } + + /** + * getBytes return data what it has been written in form of + * ListByteArrayOutputStream. + * + * @return - list byte array output stream containing time size, time stream and value stream. + * @throws IOException exception in IO + */ + public ListByteArrayOutputStream getBytes() throws IOException { + prepareEndWriteOnePage(); + ReadWriteStreamUtils.writeUnsignedVarInt(timeOut.size(), timeSizeOut); + return new ListByteArrayOutputStream(timeSizeOut, timeOut, valueOut); + } + + /** + * calculate max possible memory size it occupies, including time outputStream and value + * outputStream + * + * @return allocated size in time, value and outputStream + */ + public long estimateMaxMemSize() { + return timeOut.size() + valueOut.size() + timeEncoder.getMaxByteSize() + + valueEncoder.getMaxByteSize(); + } + + /** + * reset data in ByteArrayOutputStream + */ + public void reset() { + timeOut.reset(); + valueOut.reset(); + timeSizeOut.reset(); + } + + public void setTimeEncoder(Encoder encoder) { + this.timeEncoder = encoder; + } + + public void setValueEncoder(Encoder encoder) { + this.valueEncoder = encoder; + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtilsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtilsTest.java index 54ca1885..3821c99e 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtilsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/BytesUtilsTest.java @@ -2,14 +2,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Random; - import org.junit.Test; public class BytesUtilsTest { diff --git a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStreamTest.java b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStreamTest.java index 08a4bacc..4bf9c5b8 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStreamTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ListByteArrayOutputStreamTest.java @@ -2,98 +2,97 @@ import org.junit.Before; import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** * This class is used for testing functions of ListByteOutputStream. + * * @author kangrong */ public class ListByteArrayOutputStreamTest { - private byte[] b1 = new byte[]{0, 1, 2}; - private byte[] b2 = new byte[]{3}; - private byte[] b3 = new byte[]{4, 5, 6, 7}; - private byte[] b4 = new byte[]{8, 9, 10}; + private byte[] b1 = new byte[] {0, 1, 2}; + private byte[] b2 = new byte[] {3}; + private byte[] b3 = new byte[] {4, 5, 6, 7}; + private byte[] b4 = new byte[] {8, 9, 10}; - private PublicBAOS s1; - private PublicBAOS s2; - private PublicBAOS s3; - private PublicBAOS s4; - private PublicBAOS total; + private PublicBAOS s1; + private PublicBAOS s2; + private PublicBAOS s3; + private PublicBAOS s4; + private PublicBAOS total; - @Before - public void before() throws IOException { - s1 = new PublicBAOS(); - s1.write(b1); - s2 = new PublicBAOS(); - s2.write(b2); - s3 = new PublicBAOS(); - s3.write(b3); - s4 = new PublicBAOS(); - s4.write(b4); - total = new PublicBAOS(); - total.write(b1); - total.write(b2); - total.write(b3); - total.write(b4); - } + @Before + public void before() throws IOException { + s1 = new PublicBAOS(); + s1.write(b1); + s2 = new PublicBAOS(); + s2.write(b2); + s3 = new PublicBAOS(); + s3.write(b3); + s4 = new PublicBAOS(); + s4.write(b4); + total = new PublicBAOS(); + total.write(b1); + total.write(b2); + total.write(b3); + total.write(b4); + } - @Test - public void testAppend() { - try { - ListByteArrayOutputStream listStream = new ListByteArrayOutputStream(); - listStream.append(s1); - listStream.append(s2); - listStream.append(s3); - listStream.append(s4); - assertEquals(11, listStream.size()); - byte[] ret = listStream.toByteArray(); - for (int i = 0; i < ret.length; i++) { - assertEquals(i, ret[i]); - } - } catch (IOException e) { - fail(); - } + @Test + public void testAppend() { + try { + ListByteArrayOutputStream listStream = new ListByteArrayOutputStream(); + listStream.append(s1); + listStream.append(s2); + listStream.append(s3); + listStream.append(s4); + assertEquals(11, listStream.size()); + byte[] ret = listStream.toByteArray(); + for (int i = 0; i < ret.length; i++) { + assertEquals(i, ret[i]); + } + } catch (IOException e) { + fail(); } + } - @Test - public void testFrom() { - try { - ListByteArrayOutputStream listStream = ListByteArrayOutputStream.from(total); - assertEquals(11, listStream.size()); - byte[] ret = listStream.toByteArray(); - for (int i = 0; i < ret.length; i++) { - assertEquals(i, ret[i]); - } - listStream.reset(); - assertEquals(0, listStream.size()); - } catch (IOException e) { - fail(); - } + @Test + public void testFrom() { + try { + ListByteArrayOutputStream listStream = ListByteArrayOutputStream.from(total); + assertEquals(11, listStream.size()); + byte[] ret = listStream.toByteArray(); + for (int i = 0; i < ret.length; i++) { + assertEquals(i, ret[i]); + } + listStream.reset(); + assertEquals(0, listStream.size()); + } catch (IOException e) { + fail(); } + } - @Test - public void testToArray() { - try { - ListByteArrayOutputStream listStream = new ListByteArrayOutputStream(s1, s2, s3, s4); - assertEquals(11, listStream.size()); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - listStream.writeAllTo(out); - byte[] ret = out.toByteArray(); - for (int i = 0; i < ret.length; i++) { - assertEquals(i, ret[i]); - } - assertEquals(11, listStream.size()); - listStream.reset(); - assertEquals(0, listStream.size()); - } catch (IOException e) { - fail(); - } + @Test + public void testToArray() { + try { + ListByteArrayOutputStream listStream = new ListByteArrayOutputStream(s1, s2, s3, s4); + assertEquals(11, listStream.size()); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + listStream.writeAllTo(out); + byte[] ret = out.toByteArray(); + for (int i = 0; i < ret.length; i++) { + assertEquals(i, ret[i]); + } + assertEquals(11, listStream.size()); + listStream.reset(); + assertEquals(0, listStream.size()); + } catch (IOException e) { + fail(); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/PairTest.java b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/PairTest.java index 15e08d38..3449fbda 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/PairTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/PairTest.java @@ -1,10 +1,8 @@ package cn.edu.tsinghua.tsfile.common.utils; import static org.junit.Assert.*; - import java.util.HashMap; import java.util.Map; - import org.junit.Test; public class PairTest { diff --git a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/RandomAccessOutputStreamTest.java b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/RandomAccessOutputStreamTest.java index 252bbc74..b616b24a 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/RandomAccessOutputStreamTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/RandomAccessOutputStreamTest.java @@ -1,12 +1,10 @@ package cn.edu.tsinghua.tsfile.common.utils; import static org.junit.Assert.assertEquals; - import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.RandomAccessFile; - import org.junit.Test; public class RandomAccessOutputStreamTest { diff --git a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtilsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtilsTest.java index 59205628..507d894e 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtilsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/common/utils/ReadWriteStreamUtilsTest.java @@ -1,13 +1,11 @@ package cn.edu.tsinghua.tsfile.common.utils; import static org.junit.Assert.*; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; - import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/src/test/java/cn/edu/tsinghua/tsfile/compress/CompressTest.java b/src/test/java/cn/edu/tsinghua/tsfile/compress/CompressTest.java index dc154f1b..1a34540f 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/compress/CompressTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/compress/CompressTest.java @@ -8,9 +8,7 @@ import org.junit.Before; import org.junit.Test; import org.xerial.snappy.Snappy; - import java.io.IOException; - import static org.junit.Assert.assertEquals; /** @@ -19,48 +17,46 @@ * */ public class CompressTest { - private final String inputString = "Hello snappy-java! Snappy-java is a JNI-based wrapper of " - + "Snappy, a fast compressor/decompressor."; - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void noCompressorTest() throws IOException { - PublicBAOS out = new PublicBAOS(); - out.write(inputString.getBytes("UTF-8")); - Compressor.NoCompressor compressor = new Compressor.NoCompressor(); - NoUnCompressor unCompressor = new NoUnCompressor(); - ListByteArrayOutputStream compressed = compressor.compress(ListByteArrayOutputStream.from(out)); - byte[] uncompressed = unCompressor.uncompress(compressed.toByteArray()); - String result = new String(uncompressed, "UTF-8"); - assertEquals(inputString, result); - } - - @Test - public void snappyCompressorTest() throws IOException { - PublicBAOS out = new PublicBAOS(); - out.write(inputString.getBytes("UTF-8")); - Compressor.SnappyCompressor compressor = new Compressor.SnappyCompressor(); - SnappyUnCompressor unCompressor = new SnappyUnCompressor(); - ListByteArrayOutputStream compressed = compressor.compress(ListByteArrayOutputStream.from(out)); - byte[] uncompressed = unCompressor.uncompress(compressed.toByteArray()); - String result = new String(uncompressed, "UTF-8"); - assertEquals(inputString, result); - } - - @Test - public void snappyTest() throws IOException { - byte[] compressed = Snappy.compress(inputString.getBytes("UTF-8")); - byte[] uncompressed = Snappy.uncompress(compressed); - - String result = new String(uncompressed, "UTF-8"); - assertEquals(inputString, result); - } + private final String inputString = "Hello snappy-java! Snappy-java is a JNI-based wrapper of " + + "Snappy, a fast compressor/decompressor."; + + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception {} + + @Test + public void noCompressorTest() throws IOException { + PublicBAOS out = new PublicBAOS(); + out.write(inputString.getBytes("UTF-8")); + Compressor.NoCompressor compressor = new Compressor.NoCompressor(); + NoUnCompressor unCompressor = new NoUnCompressor(); + ListByteArrayOutputStream compressed = compressor.compress(ListByteArrayOutputStream.from(out)); + byte[] uncompressed = unCompressor.uncompress(compressed.toByteArray()); + String result = new String(uncompressed, "UTF-8"); + assertEquals(inputString, result); + } + + @Test + public void snappyCompressorTest() throws IOException { + PublicBAOS out = new PublicBAOS(); + out.write(inputString.getBytes("UTF-8")); + Compressor.SnappyCompressor compressor = new Compressor.SnappyCompressor(); + SnappyUnCompressor unCompressor = new SnappyUnCompressor(); + ListByteArrayOutputStream compressed = compressor.compress(ListByteArrayOutputStream.from(out)); + byte[] uncompressed = unCompressor.uncompress(compressed.toByteArray()); + String result = new String(uncompressed, "UTF-8"); + assertEquals(inputString, result); + } + + @Test + public void snappyTest() throws IOException { + byte[] compressed = Snappy.compress(inputString.getBytes("UTF-8")); + byte[] uncompressed = Snappy.uncompress(compressed); + + String result = new String(uncompressed, "UTF-8"); + assertEquals(inputString, result); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/compress/SnappyTest.java b/src/test/java/cn/edu/tsinghua/tsfile/compress/SnappyTest.java index 641bead2..ca5aac1d 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/compress/SnappyTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/compress/SnappyTest.java @@ -1,10 +1,8 @@ package cn.edu.tsinghua.tsfile.compress; import static org.junit.Assert.*; - import java.io.IOException; import java.io.UnsupportedEncodingException; - import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -17,22 +15,21 @@ */ public class SnappyTest { - @Before - public void setUp() throws Exception {} + @Before + public void setUp() throws Exception {} - @After - public void tearDown() throws Exception {} + @After + public void tearDown() throws Exception {} - @Test - public void test() throws UnsupportedEncodingException, IOException { - String input = - "Hello snappy-java! Snappy-java is a JNI-based wrapper of " - + "Snappy, a fast compresser/decompresser."; - byte[] compressed = Snappy.compress(input.getBytes("UTF-8")); - byte[] uncompressed = Snappy.uncompress(compressed); + @Test + public void test() throws UnsupportedEncodingException, IOException { + String input = "Hello snappy-java! Snappy-java is a JNI-based wrapper of " + + "Snappy, a fast compresser/decompresser."; + byte[] compressed = Snappy.compress(input.getBytes("UTF-8")); + byte[] uncompressed = Snappy.uncompress(compressed); - String result = new String(uncompressed, "UTF-8"); - assertEquals(input, result); - } + String result = new String(uncompressed, "UTF-8"); + assertEquals(input, result); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPackerTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPackerTest.java index 955f5ac3..436f6d06 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPackerTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/IntPackerTest.java @@ -1,10 +1,8 @@ package cn.edu.tsinghua.tsfile.encoding.bitpacking; import static org.junit.Assert.assertEquals; - import java.util.ArrayList; import java.util.Random; - import org.junit.Test; /** @@ -14,53 +12,53 @@ */ public class IntPackerTest { - @Test - public void test() { - Random rand = new Random(); - int width = 31; + @Test + public void test() { + Random rand = new Random(); + int width = 31; + + int count = 100000; + ArrayList preValues = new ArrayList(); + IntPacker packer = new IntPacker(width); + byte[] bb = new byte[count * width]; + int idx = 0; + for (int i = 0; i < count; i++) { + int[] vs = new int[8]; + for (int j = 0; j < 8; j++) { + vs[j] = rand.nextInt(Integer.MAX_VALUE); + preValues.add(vs[j]); + } + byte[] tb = new byte[width]; + packer.pack8Values(vs, 0, tb); + for (int j = 0; j < tb.length; j++) { + bb[idx++] = tb[j]; + } + } + int res[] = new int[count * 8]; + packer.unpackAllValues(bb, 0, bb.length, res); - int count = 100000; - ArrayList preValues = new ArrayList(); - IntPacker packer = new IntPacker(width); - byte[] bb = new byte[count * width]; - int idx = 0; - for (int i = 0; i < count; i++) { - int[] vs = new int[8]; - for(int j = 0 ; j < 8 ; j++){ - vs[j] = rand.nextInt(Integer.MAX_VALUE); - preValues.add(vs[j]); - } - byte[] tb = new byte[width]; - packer.pack8Values(vs, 0, tb); - for (int j = 0; j < tb.length; j++) { - bb[idx++] = tb[j]; - } - } - int res[] = new int[count * 8]; - packer.unpackAllValues(bb, 0, bb.length, res); - - for(int i = 0 ; i < count * 8 ; i ++){ - int v = preValues.get(i); - assertEquals(res[i], v); - } - } + for (int i = 0; i < count * 8; i++) { + int v = preValues.get(i); + assertEquals(res[i], v); + } + } - @Test - public void test2(){ - for(int width = 4;width < 32; width++){ - int[] arr = new int[8]; - int[] res = new int[8]; - for(int i = 0; i < 8; i++){ - arr[i] = i; - } - IntPacker packer = new IntPacker(width); - byte[] buf = new byte[width]; - packer.pack8Values(arr, 0, buf); - packer.unpack8Values(buf, 0, res); - for(int i = 0; i < 8; i++){ - assertEquals(arr[i], res[i]); - } - } - } + @Test + public void test2() { + for (int width = 4; width < 32; width++) { + int[] arr = new int[8]; + int[] res = new int[8]; + for (int i = 0; i < 8; i++) { + arr[i] = i; + } + IntPacker packer = new IntPacker(width); + byte[] buf = new byte[width]; + packer.pack8Values(arr, 0, buf); + packer.unpack8Values(buf, 0, res); + for (int i = 0; i < 8; i++) { + assertEquals(arr[i], res[i]); + } + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPackerTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPackerTest.java index 7eec2af8..e99b4ab1 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPackerTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/bitpacking/LongPackerTest.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.encoding.bitpacking; import static org.junit.Assert.*; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -9,7 +8,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Random; - import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; import org.junit.Test; @@ -20,97 +18,97 @@ */ public class LongPackerTest { - @Test - public void test() { - Random rand = new Random(); - int byteCount = 63; + @Test + public void test() { + Random rand = new Random(); + int byteCount = 63; + + LongPacker packer = new LongPacker(byteCount); + ArrayList preValues = new ArrayList<>(); + int count = 1; + byte[] bb = new byte[count * byteCount]; + int idx = 0; + for (int i = 0; i < count; i++) { + long[] vs = new long[8]; + for (int j = 0; j < 8; j++) { + long v = rand.nextLong(); + vs[j] = v < 0 ? -v : v; + preValues.add(vs[j]); + } + + byte[] tb = new byte[byteCount]; + packer.pack8Values(vs, 0, tb); + for (int j = 0; j < tb.length; j++) { + bb[idx++] = tb[j]; + } + } + long tres[] = new long[count * 8]; + packer.unpackAllValues(bb, 0, bb.length, tres); - LongPacker packer = new LongPacker(byteCount); - ArrayList preValues = new ArrayList<>(); - int count = 1; - byte[] bb = new byte[count * byteCount]; - int idx = 0; - for (int i = 0; i < count; i++) { - long[] vs = new long[8]; - for(int j = 0 ; j < 8; j ++){ - long v = rand.nextLong(); - vs[j] = v < 0 ? -v : v; - preValues.add(vs[j]); - } - - byte[] tb = new byte[byteCount]; - packer.pack8Values(vs, 0, tb); - for (int j = 0; j < tb.length; j++) { - bb[idx++] = tb[j]; - } - } - long tres[] = new long[count * 8]; - packer.unpackAllValues(bb, 0, bb.length, tres); - - for(int i = 0 ; i < count * 8 ; i++){ - long v = preValues.get(i); - assertEquals(tres[i], v); - } - } + for (int i = 0; i < count * 8; i++) { + long v = preValues.get(i); + assertEquals(tres[i], v); + } + } - @Test - public void testPackAll() throws IOException { - List bpList = new ArrayList(); - int bpCount = 15; - long bpStart = 11; - for (int i = 0; i < bpCount; i++) { - bpList.add(bpStart); - bpStart *= 3; - } - bpList.add(0L); - int bpBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(bpList); + @Test + public void testPackAll() throws IOException { + List bpList = new ArrayList(); + int bpCount = 15; + long bpStart = 11; + for (int i = 0; i < bpCount; i++) { + bpList.add(bpStart); + bpStart *= 3; + } + bpList.add(0L); + int bpBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(bpList); - LongPacker packer = new LongPacker(bpBitWidth); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); + LongPacker packer = new LongPacker(bpBitWidth); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); - long[] value1 = new long[8]; - long[] value2 = new long[8]; - for (int i = 0; i < 8; i++) { - value1[i] = bpList.get(i); - value2[i] = bpList.get(i + 8); - } - byte[] bytes1 = new byte[bpBitWidth]; - byte[] bytes2 = new byte[bpBitWidth]; - packer.pack8Values(value1, 0, bytes1); - baos.write(bytes1); - packer.pack8Values(value2, 0, bytes2); - baos.write(bytes2); + long[] value1 = new long[8]; + long[] value2 = new long[8]; + for (int i = 0; i < 8; i++) { + value1[i] = bpList.get(i); + value2[i] = bpList.get(i + 8); + } + byte[] bytes1 = new byte[bpBitWidth]; + byte[] bytes2 = new byte[bpBitWidth]; + packer.pack8Values(value1, 0, bytes1); + baos.write(bytes1); + packer.pack8Values(value2, 0, bytes2); + baos.write(bytes2); - long[] readArray = new long[16]; - byte[] bytes = new byte[2 * bpBitWidth]; - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - int bytesToRead = 2 * bpBitWidth; - bytesToRead = Math.min(bytesToRead, bais.available()); - new DataInputStream(bais).readFully(bytes, 0, bytesToRead); + long[] readArray = new long[16]; + byte[] bytes = new byte[2 * bpBitWidth]; + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + int bytesToRead = 2 * bpBitWidth; + bytesToRead = Math.min(bytesToRead, bais.available()); + new DataInputStream(bais).readFully(bytes, 0, bytesToRead); - // save all long values in currentBuffer - packer.unpackAllValues(bytes, 0, bytesToRead, readArray); - for(int i = 0 ; i < 16 ; i++){ - long v = bpList.get(i); - assertEquals(readArray[i], v); - } - } + // save all long values in currentBuffer + packer.unpackAllValues(bytes, 0, bytesToRead, readArray); + for (int i = 0; i < 16; i++) { + long v = bpList.get(i); + assertEquals(readArray[i], v); + } + } - @Test - public void test2(){ - for(int width = 4;width < 63; width++){ - long[] arr = new long[8]; - long[] res = new long[8]; - for(int i = 0; i < 8; i++){ - arr[i] = i; - } - LongPacker packer = new LongPacker(width); - byte[] buf = new byte[width]; - packer.pack8Values(arr, 0, buf); - packer.unpack8Values(buf, 0, res); - for(int i = 0; i < 8; i++){ - assertEquals(arr[i], res[i]); - } - } - } + @Test + public void test2() { + for (int width = 4; width < 63; width++) { + long[] arr = new long[8]; + long[] res = new long[8]; + for (int i = 0; i < 8; i++) { + arr[i] = i; + } + LongPacker packer = new LongPacker(width); + byte[] buf = new byte[width]; + packer.pack8Values(arr, 0, buf); + packer.unpack8Values(buf, 0, res); + for (int i = 0; i < 8; i++) { + assertEquals(arr[i], res[i]); + } + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoderTest.java index 88772bb8..6cfd2b92 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/BitmapDecoderTest.java @@ -1,12 +1,10 @@ package cn.edu.tsinghua.tsfile.encoding.decoder; import static org.junit.Assert.*; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.List; - import cn.edu.tsinghua.tsfile.encoding.common.EndianType; import cn.edu.tsinghua.tsfile.encoding.encoder.BitmapEncoder; import cn.edu.tsinghua.tsfile.encoding.encoder.Encoder; @@ -23,7 +21,7 @@ */ public class BitmapDecoderTest { private static final Logger LOGGER = LoggerFactory.getLogger(BitmapDecoderTest.class); - + private List intList; private List booleanList; diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoderTest.java index c34a1a1c..cd0d2ede 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/FloatDecoderTest.java @@ -1,12 +1,10 @@ package cn.edu.tsinghua.tsfile.encoding.decoder; import static org.junit.Assert.*; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.List; - import cn.edu.tsinghua.tsfile.encoding.encoder.Encoder; import cn.edu.tsinghua.tsfile.encoding.encoder.FloatEncoder; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; @@ -97,13 +95,13 @@ public void testDIFFDouble() throws Exception { } } -// @Test -// public void testBigDecimal() throws Exception { -// for (int i = 1; i <= 5; i++) { -// testDecimalLenght(TSEncoding.TS_2DIFF, doubleList, doubleMaxPointNumber, false, i); -// testDecimalLenght(TSEncoding.RLE, doubleList, doubleMaxPointNumber, false, i); -// } -// } + // @Test + // public void testBigDecimal() throws Exception { + // for (int i = 1; i <= 5; i++) { + // testDecimalLenght(TSEncoding.TS_2DIFF, doubleList, doubleMaxPointNumber, false, i); + // testDecimalLenght(TSEncoding.RLE, doubleList, doubleMaxPointNumber, false, i); + // } + // } @Test public void test() throws Exception { @@ -121,9 +119,9 @@ public void test() throws Exception { float value1_ = decoder1.readFloat(bais); float value2_ = decoder2.readFloat(bais); assertEquals(value, value1_, delta); - assertEquals(value+2, value2_, delta); + assertEquals(value + 2, value2_, delta); LOGGER.debug("{} // {}", value, value1_); - LOGGER.debug("{} // {}", value+2, value2_); + LOGGER.debug("{} // {}", value + 2, value2_); } private void testFloatLength(TSEncoding encoding, List valueList, int maxPointValue, @@ -176,29 +174,29 @@ private void testDoubleLength(TSEncoding encoding, List valueList, int m } } -// private void testDecimalLenght(TSEncoding encoding, List valueList, int maxPointValue, -// boolean isDebug, int repeatCount) throws Exception { -// Encoder encoder = new FloatEncoder(encoding, TSDataType.BIGDECIMAL, maxPointValue); -// ByteArrayOutputStream baos = new ByteArrayOutputStream(); -// for (int i = 0; i < repeatCount; i++) { -// for (double value : valueList) { -// encoder.encode(new BigDecimal(value), baos); -// } -// encoder.flush(baos); -// } -// LOGGER.debug("Repeated {} encoding done ", repeatCount); -// ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); -// -// for (int i = 0; i < repeatCount; i++) { -// Decoder decoder = new FloatDecoder(encoding, TSDataType.BIGDECIMAL); -// for (double value : valueList) { -// double value_ = decoder.readBigDecimal(bais).doubleValue(); -// if (isDebug) { -// LOGGER.debug("{} // {}", value_, value); -// } -// assertEquals(value, value_, delta); -// } -// LOGGER.debug("Repeated {} turn ", repeatCount, i); -// } -// } + // private void testDecimalLenght(TSEncoding encoding, List valueList, int maxPointValue, + // boolean isDebug, int repeatCount) throws Exception { + // Encoder encoder = new FloatEncoder(encoding, TSDataType.BIGDECIMAL, maxPointValue); + // ByteArrayOutputStream baos = new ByteArrayOutputStream(); + // for (int i = 0; i < repeatCount; i++) { + // for (double value : valueList) { + // encoder.encode(new BigDecimal(value), baos); + // } + // encoder.flush(baos); + // } + // LOGGER.debug("Repeated {} encoding done ", repeatCount); + // ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + // + // for (int i = 0; i < repeatCount; i++) { + // Decoder decoder = new FloatDecoder(encoding, TSDataType.BIGDECIMAL); + // for (double value : valueList) { + // double value_ = decoder.readBigDecimal(bais).doubleValue(); + // if (isDebug) { + // LOGGER.debug("{} // {}", value_, value); + // } + // assertEquals(value, value_, delta); + // } + // LOGGER.debug("Repeated {} turn ", repeatCount, i); + // } + // } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoderTest.java index 52401a63..40e5c977 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/GorillaDecoderTest.java @@ -1,237 +1,235 @@ package cn.edu.tsinghua.tsfile.encoding.decoder; import static org.junit.Assert.*; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; - import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.encoding.encoder.DoublePrecisionEncoder; import cn.edu.tsinghua.tsfile.encoding.encoder.Encoder; import cn.edu.tsinghua.tsfile.encoding.encoder.SinglePrecisionEncoder; public class GorillaDecoderTest { - private static final Logger LOGGER = LoggerFactory.getLogger(GorillaDecoderTest.class); - private List floatList; - private List doubleList; - private final double delta = 0.0000001; - private final int floatMaxPointValue = 10000; - private final long doubleMaxPointValue = 1000000000000000L; - - @Before - public void setUp() throws Exception { - floatList = new ArrayList(); - int hybridCount = 11; - int hybridNum = 50; - int hybridStart = 2000; - for (int i = 0; i < hybridNum; i++) { - for (int j = 0; j < hybridCount; j++) { - floatList.add((float) hybridStart / floatMaxPointValue); - } - for (int j = 0; j < hybridCount; j++) { - floatList.add((float) hybridStart / floatMaxPointValue); - hybridStart += 3; - } - - hybridCount += 2; - } - - doubleList = new ArrayList(); - int hybridCountDouble = 11; - int hybridNumDouble = 50; - long hybridStartDouble = 2000; - - for (int i = 0; i < hybridNumDouble; i++) { - for (int j = 0; j < hybridCountDouble; j++) { - doubleList.add((double) hybridStartDouble / doubleMaxPointValue); - } - for (int j = 0; j < hybridCountDouble; j++) { - doubleList.add((double) hybridStartDouble / doubleMaxPointValue); - hybridStart += 3; - } - - hybridCountDouble += 2; - } - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testNegativeNumber() throws IOException { - Encoder encoder = new SinglePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - float value = -7.101f; - encoder.encode(value, baos); - encoder.encode(value - 2, baos); - encoder.encode(value - 4, baos); - encoder.flush(baos); - encoder.encode(value, baos); - encoder.encode(value - 2, baos); - encoder.encode(value - 4, baos); - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - for(int i = 0; i < 2;i++){ - Decoder decoder = new SinglePrecisionDecoder(); - if(decoder.hasNext(bais)){ - assertEquals(value, decoder.readFloat(bais), delta); - } - if(decoder.hasNext(bais)){ - assertEquals(value-2, decoder.readFloat(bais), delta); - } - if(decoder.hasNext(bais)){ - assertEquals(value-4, decoder.readFloat(bais), delta); - } - } - } - - @Test - public void testZeroNumber() throws IOException{ - Encoder encoder = new DoublePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - double value = 0f; - encoder.encode(value, baos); - encoder.encode(value, baos); - encoder.encode(value, baos); - encoder.flush(baos); - encoder.encode(value, baos); - encoder.encode(value, baos); - encoder.encode(value, baos); - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - for(int i = 0; i < 2;i++){ - Decoder decoder = new DoublePrecisionDecoder(); - if(decoder.hasNext(bais)){ - assertEquals(value, decoder.readDouble(bais), delta); - } - if(decoder.hasNext(bais)){ - assertEquals(value, decoder.readDouble(bais), delta); - } - if(decoder.hasNext(bais)){ - assertEquals(value, decoder.readDouble(bais), delta); - } - } - } - - @Test - public void testFloatRepeat() throws Exception { - for (int i = 1; i <= 10; i++) { - testFloatLength(floatList, false, i); - } - } - - @Test - public void testDoubleRepeat() throws Exception { - for (int i = 1; i <= 10; i++) { - testDoubleLength(doubleList, false, i); - } - } - - @Test - public void testFloat() throws IOException { - Encoder encoder = new SinglePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - float value = 7.101f; - int num = 10000; - for(int i = 0; i < num;i++){ - encoder.encode(value + 2 * i, baos); - } - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - Decoder decoder = new SinglePrecisionDecoder(); - for(int i = 0; i < num;i++){ - if(decoder.hasNext(bais)){ - assertEquals(value + 2 * i, decoder.readFloat(bais), delta); - continue; - } - fail(); - } - } - - @Test - public void testDouble() throws IOException { - Encoder encoder = new DoublePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - double value = 7.101f; - int num = 1000; - for(int i = 0; i < num;i++){ - encoder.encode(value + 2 * i, baos); - } - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - Decoder decoder = new DoublePrecisionDecoder(); - for(int i = 0; i < num;i++){ - if(decoder.hasNext(bais)){ -// System.out.println("turn "+i); - assertEquals(value + 2 * i, decoder.readDouble(bais), delta); - continue; - } - fail(); - } - } - - private void testFloatLength(List valueList, boolean isDebug, int repeatCount) throws Exception { - Encoder encoder = new SinglePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - for (int i = 0; i < repeatCount; i++) { - for (float value : valueList) { - encoder.encode(value, baos); - } - encoder.flush(baos); - } - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - for (int i = 0; i < repeatCount; i++) { - - Decoder decoder = new SinglePrecisionDecoder(); - for (float value : valueList) { -// System.out.println("Repeat: "+i+" value: "+value); - if(decoder.hasNext(bais)){ - float value_ = decoder.readFloat(bais); - if (isDebug) { - LOGGER.debug("{} // {}", value_, value); - } - assertEquals(value, value_, delta); - continue; - } - fail(); - } - } - } - - private void testDoubleLength(List valueList, boolean isDebug, int repeatCount) throws Exception { - Encoder encoder = new DoublePrecisionEncoder(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - for (int i = 0; i < repeatCount; i++) { - for (double value : valueList) { - encoder.encode(value, baos); - } - encoder.flush(baos); - } - - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - - for (int i = 0; i < repeatCount; i++) { - Decoder decoder = new DoublePrecisionDecoder(); - for (double value : valueList) { - if(decoder.hasNext(bais)){ - double value_ = decoder.readDouble(bais); - if (isDebug) { - LOGGER.debug("{} // {}", value_, value); - } - assertEquals(value, value_, delta); - continue; - } - fail(); - } - } - } + private static final Logger LOGGER = LoggerFactory.getLogger(GorillaDecoderTest.class); + private List floatList; + private List doubleList; + private final double delta = 0.0000001; + private final int floatMaxPointValue = 10000; + private final long doubleMaxPointValue = 1000000000000000L; + + @Before + public void setUp() throws Exception { + floatList = new ArrayList(); + int hybridCount = 11; + int hybridNum = 50; + int hybridStart = 2000; + for (int i = 0; i < hybridNum; i++) { + for (int j = 0; j < hybridCount; j++) { + floatList.add((float) hybridStart / floatMaxPointValue); + } + for (int j = 0; j < hybridCount; j++) { + floatList.add((float) hybridStart / floatMaxPointValue); + hybridStart += 3; + } + + hybridCount += 2; + } + + doubleList = new ArrayList(); + int hybridCountDouble = 11; + int hybridNumDouble = 50; + long hybridStartDouble = 2000; + + for (int i = 0; i < hybridNumDouble; i++) { + for (int j = 0; j < hybridCountDouble; j++) { + doubleList.add((double) hybridStartDouble / doubleMaxPointValue); + } + for (int j = 0; j < hybridCountDouble; j++) { + doubleList.add((double) hybridStartDouble / doubleMaxPointValue); + hybridStart += 3; + } + + hybridCountDouble += 2; + } + } + + @After + public void tearDown() throws Exception {} + + @Test + public void testNegativeNumber() throws IOException { + Encoder encoder = new SinglePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + float value = -7.101f; + encoder.encode(value, baos); + encoder.encode(value - 2, baos); + encoder.encode(value - 4, baos); + encoder.flush(baos); + encoder.encode(value, baos); + encoder.encode(value - 2, baos); + encoder.encode(value - 4, baos); + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + for (int i = 0; i < 2; i++) { + Decoder decoder = new SinglePrecisionDecoder(); + if (decoder.hasNext(bais)) { + assertEquals(value, decoder.readFloat(bais), delta); + } + if (decoder.hasNext(bais)) { + assertEquals(value - 2, decoder.readFloat(bais), delta); + } + if (decoder.hasNext(bais)) { + assertEquals(value - 4, decoder.readFloat(bais), delta); + } + } + } + + @Test + public void testZeroNumber() throws IOException { + Encoder encoder = new DoublePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + double value = 0f; + encoder.encode(value, baos); + encoder.encode(value, baos); + encoder.encode(value, baos); + encoder.flush(baos); + encoder.encode(value, baos); + encoder.encode(value, baos); + encoder.encode(value, baos); + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + for (int i = 0; i < 2; i++) { + Decoder decoder = new DoublePrecisionDecoder(); + if (decoder.hasNext(bais)) { + assertEquals(value, decoder.readDouble(bais), delta); + } + if (decoder.hasNext(bais)) { + assertEquals(value, decoder.readDouble(bais), delta); + } + if (decoder.hasNext(bais)) { + assertEquals(value, decoder.readDouble(bais), delta); + } + } + } + + @Test + public void testFloatRepeat() throws Exception { + for (int i = 1; i <= 10; i++) { + testFloatLength(floatList, false, i); + } + } + + @Test + public void testDoubleRepeat() throws Exception { + for (int i = 1; i <= 10; i++) { + testDoubleLength(doubleList, false, i); + } + } + + @Test + public void testFloat() throws IOException { + Encoder encoder = new SinglePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + float value = 7.101f; + int num = 10000; + for (int i = 0; i < num; i++) { + encoder.encode(value + 2 * i, baos); + } + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + Decoder decoder = new SinglePrecisionDecoder(); + for (int i = 0; i < num; i++) { + if (decoder.hasNext(bais)) { + assertEquals(value + 2 * i, decoder.readFloat(bais), delta); + continue; + } + fail(); + } + } + + @Test + public void testDouble() throws IOException { + Encoder encoder = new DoublePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + double value = 7.101f; + int num = 1000; + for (int i = 0; i < num; i++) { + encoder.encode(value + 2 * i, baos); + } + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + Decoder decoder = new DoublePrecisionDecoder(); + for (int i = 0; i < num; i++) { + if (decoder.hasNext(bais)) { + // System.out.println("turn "+i); + assertEquals(value + 2 * i, decoder.readDouble(bais), delta); + continue; + } + fail(); + } + } + + private void testFloatLength(List valueList, boolean isDebug, int repeatCount) + throws Exception { + Encoder encoder = new SinglePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + for (int i = 0; i < repeatCount; i++) { + for (float value : valueList) { + encoder.encode(value, baos); + } + encoder.flush(baos); + } + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + for (int i = 0; i < repeatCount; i++) { + + Decoder decoder = new SinglePrecisionDecoder(); + for (float value : valueList) { + // System.out.println("Repeat: "+i+" value: "+value); + if (decoder.hasNext(bais)) { + float value_ = decoder.readFloat(bais); + if (isDebug) { + LOGGER.debug("{} // {}", value_, value); + } + assertEquals(value, value_, delta); + continue; + } + fail(); + } + } + } + + private void testDoubleLength(List valueList, boolean isDebug, int repeatCount) + throws Exception { + Encoder encoder = new DoublePrecisionEncoder(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + for (int i = 0; i < repeatCount; i++) { + for (double value : valueList) { + encoder.encode(value, baos); + } + encoder.flush(baos); + } + + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + + for (int i = 0; i < repeatCount; i++) { + Decoder decoder = new DoublePrecisionDecoder(); + for (double value : valueList) { + if (decoder.hasNext(bais)) { + double value_ = decoder.readDouble(bais); + if (isDebug) { + LOGGER.debug("{} // {}", value_, value); + } + assertEquals(value, value_, delta); + continue; + } + fail(); + } + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoderTest.java index 3de3c25c..749e6994 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/IntRleDecoderTest.java @@ -1,240 +1,239 @@ -package cn.edu.tsinghua.tsfile.encoding.decoder; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import cn.edu.tsinghua.tsfile.encoding.encoder.IntRleEncoder; -import cn.edu.tsinghua.tsfile.encoding.encoder.RleEncoder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class IntRleDecoderTest { - private List rleList; - private List bpList; - private List hybridList; - private int rleBitWidth; - private int bpBitWidth; - private int hybridWidth; - - @Before - public void setUp() throws Exception { - rleList = new ArrayList(); - int rleCount = 11; - int rleNum = 18; - int rleStart = 11; - for(int i = 0; i < rleNum;i++){ - for(int j = 0;j < rleCount;j++){ - rleList.add(rleStart); - } - for(int j = 0;j < rleCount;j++){ - rleList.add(rleStart-1); - } - rleCount += 2; - rleStart *= -3; - } - rleBitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(rleList); - - bpList = new ArrayList(); - int bpCount = 100000; - int bpStart = 11; - for(int i = 0; i < bpCount;i++){ - bpStart += 3; - if(i % 2 == 1){ - bpList.add(bpStart*-1); - }else{ - bpList.add(bpStart); - } - } - bpBitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(bpList); - - hybridList = new ArrayList(); - int hybridCount = 11; - int hybridNum = 1000; - int hybridStart = 20; - - for(int i = 0;i < hybridNum;i++){ - for(int j = 0;j < hybridCount;j++){ - hybridStart += 3; - if(j % 2 == 1){ - hybridList.add(hybridStart*-1); - }else{ - hybridList.add(hybridStart); - } - } - for(int j = 0;j < hybridCount;j++){ - if(i % 2 == 1){ - hybridList.add(hybridStart*-1); - }else{ - hybridList.add(hybridStart); - } - } - hybridCount += 2; - } - hybridWidth = ReadWriteStreamUtils.getIntMaxBitWidth(hybridList); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testRleReadBigInt() throws IOException{ - List list = new ArrayList<>(); - for(int i = 7000000; i < 10000000;i++){ - list.add(i); - } - int width = ReadWriteStreamUtils.getIntMaxBitWidth(list); - testLength(list,width,false,1); - for(int i = 1;i < 10;i++){ - testLength(list,width,false,i); - } - } - - @Test - public void testRleReadInt() throws IOException{ - for(int i = 1;i < 10;i++){ - testLength(rleList,rleBitWidth,false,i); - } - } - - @Test - public void testMaxRLERepeatNUM() throws IOException{ - List repeatList = new ArrayList<>(); - int rleCount = 17; - int rleNum = 5; - int rleStart = 11; - for(int i = 0; i < rleNum;i++){ - for(int j = 0;j < rleCount;j++){ - repeatList.add(rleStart); - } - for(int j = 0;j < rleCount;j++){ - repeatList.add(rleStart / 3); - } - rleCount *= 7; - rleStart *= -3; - } - int bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(repeatList); - for(int i = 1;i < 10;i++){ - testLength(repeatList,bitWidth,false,i); - } - } - - @Test - public void testBitPackingReadInt() throws IOException{ - for(int i = 1;i < 10;i++){ - testLength(bpList,bpBitWidth,false,i); - } - } - - @Test - public void testHybridReadInt() throws IOException{ - for(int i = 1;i < 3;i++){ - testLength(hybridList,hybridWidth,false,i); - } - } - - @Test - public void testHybridReadBoolean() throws IOException{ - for(int i = 1;i < 10;i++){ - testLength(hybridList,hybridWidth,false,i); - } - } - - @Test - public void testBitPackingReadHeader() throws IOException{ - for(int i = 1;i < 505;i++){ - testBitPackedReadHeader(i); - } - } - - public void testBooleanLength(List list,int bitWidth,boolean isDebug,int repeatCount) throws IOException{ - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(int value : list){ - if(value % 2 == 0){ - encoder.encode(false, baos); - } else { - encoder.encode(true, baos); - } - - } - encoder.flush(baos); - } - - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - RleDecoder decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(int value : list){ - boolean value_ = decoder.readBoolean(bais); - if(isDebug){ - System.out.println(value_+"/"+value); - } - if(value % 2 == 0){ - assertEquals(false, value_); - } else{ - assertEquals(true, value_); - } - - } - } - } - - public void testLength(List list,int bitWidth,boolean isDebug,int repeatCount) throws IOException{ - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(int value : list){ - encoder.encode(value, baos); - } - encoder.flush(baos); - } - - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - RleDecoder decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(int value : list){ - int value_ = decoder.readInt(bais); - if(isDebug){ - System.out.println(value_+"/"+value); - } - assertEquals(value, value_); - } - } - } - - private void testBitPackedReadHeader(int num) throws IOException{ - List list = new ArrayList(); - - for(int i = 0; i < num;i++){ - list.add(i); - } - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - int bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(list); - RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); - for(int value : list){ - encoder.encode(value, baos); - } - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - ReadWriteStreamUtils.readUnsignedVarInt(bais); - assertEquals(bitWidth, bais.read()); - int header = ReadWriteStreamUtils.readUnsignedVarInt(bais); - int group = header >> 1; - assertEquals(group, (num+7)/8); - int lastBitPackedNum = bais.read(); - if(num % 8 == 0){ - assertEquals(lastBitPackedNum,8); - } else{ - assertEquals(lastBitPackedNum, num % 8); - } - } -} +package cn.edu.tsinghua.tsfile.encoding.decoder; + +import static org.junit.Assert.*; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import cn.edu.tsinghua.tsfile.encoding.encoder.IntRleEncoder; +import cn.edu.tsinghua.tsfile.encoding.encoder.RleEncoder; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class IntRleDecoderTest { + private List rleList; + private List bpList; + private List hybridList; + private int rleBitWidth; + private int bpBitWidth; + private int hybridWidth; + + @Before + public void setUp() throws Exception { + rleList = new ArrayList(); + int rleCount = 11; + int rleNum = 18; + int rleStart = 11; + for (int i = 0; i < rleNum; i++) { + for (int j = 0; j < rleCount; j++) { + rleList.add(rleStart); + } + for (int j = 0; j < rleCount; j++) { + rleList.add(rleStart - 1); + } + rleCount += 2; + rleStart *= -3; + } + rleBitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(rleList); + + bpList = new ArrayList(); + int bpCount = 100000; + int bpStart = 11; + for (int i = 0; i < bpCount; i++) { + bpStart += 3; + if (i % 2 == 1) { + bpList.add(bpStart * -1); + } else { + bpList.add(bpStart); + } + } + bpBitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(bpList); + + hybridList = new ArrayList(); + int hybridCount = 11; + int hybridNum = 1000; + int hybridStart = 20; + + for (int i = 0; i < hybridNum; i++) { + for (int j = 0; j < hybridCount; j++) { + hybridStart += 3; + if (j % 2 == 1) { + hybridList.add(hybridStart * -1); + } else { + hybridList.add(hybridStart); + } + } + for (int j = 0; j < hybridCount; j++) { + if (i % 2 == 1) { + hybridList.add(hybridStart * -1); + } else { + hybridList.add(hybridStart); + } + } + hybridCount += 2; + } + hybridWidth = ReadWriteStreamUtils.getIntMaxBitWidth(hybridList); + } + + @After + public void tearDown() throws Exception {} + + @Test + public void testRleReadBigInt() throws IOException { + List list = new ArrayList<>(); + for (int i = 7000000; i < 10000000; i++) { + list.add(i); + } + int width = ReadWriteStreamUtils.getIntMaxBitWidth(list); + testLength(list, width, false, 1); + for (int i = 1; i < 10; i++) { + testLength(list, width, false, i); + } + } + + @Test + public void testRleReadInt() throws IOException { + for (int i = 1; i < 10; i++) { + testLength(rleList, rleBitWidth, false, i); + } + } + + @Test + public void testMaxRLERepeatNUM() throws IOException { + List repeatList = new ArrayList<>(); + int rleCount = 17; + int rleNum = 5; + int rleStart = 11; + for (int i = 0; i < rleNum; i++) { + for (int j = 0; j < rleCount; j++) { + repeatList.add(rleStart); + } + for (int j = 0; j < rleCount; j++) { + repeatList.add(rleStart / 3); + } + rleCount *= 7; + rleStart *= -3; + } + int bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(repeatList); + for (int i = 1; i < 10; i++) { + testLength(repeatList, bitWidth, false, i); + } + } + + @Test + public void testBitPackingReadInt() throws IOException { + for (int i = 1; i < 10; i++) { + testLength(bpList, bpBitWidth, false, i); + } + } + + @Test + public void testHybridReadInt() throws IOException { + for (int i = 1; i < 3; i++) { + testLength(hybridList, hybridWidth, false, i); + } + } + + @Test + public void testHybridReadBoolean() throws IOException { + for (int i = 1; i < 10; i++) { + testLength(hybridList, hybridWidth, false, i); + } + } + + @Test + public void testBitPackingReadHeader() throws IOException { + for (int i = 1; i < 505; i++) { + testBitPackedReadHeader(i); + } + } + + public void testBooleanLength(List list, int bitWidth, boolean isDebug, int repeatCount) + throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (int value : list) { + if (value % 2 == 0) { + encoder.encode(false, baos); + } else { + encoder.encode(true, baos); + } + + } + encoder.flush(baos); + } + + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + RleDecoder decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (int value : list) { + boolean value_ = decoder.readBoolean(bais); + if (isDebug) { + System.out.println(value_ + "/" + value); + } + if (value % 2 == 0) { + assertEquals(false, value_); + } else { + assertEquals(true, value_); + } + + } + } + } + + public void testLength(List list, int bitWidth, boolean isDebug, int repeatCount) + throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (int value : list) { + encoder.encode(value, baos); + } + encoder.flush(baos); + } + + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + RleDecoder decoder = new IntRleDecoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (int value : list) { + int value_ = decoder.readInt(bais); + if (isDebug) { + System.out.println(value_ + "/" + value); + } + assertEquals(value, value_); + } + } + } + + private void testBitPackedReadHeader(int num) throws IOException { + List list = new ArrayList(); + + for (int i = 0; i < num; i++) { + list.add(i); + } + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int bitWidth = ReadWriteStreamUtils.getIntMaxBitWidth(list); + RleEncoder encoder = new IntRleEncoder(EndianType.LITTLE_ENDIAN); + for (int value : list) { + encoder.encode(value, baos); + } + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + ReadWriteStreamUtils.readUnsignedVarInt(bais); + assertEquals(bitWidth, bais.read()); + int header = ReadWriteStreamUtils.readUnsignedVarInt(bais); + int group = header >> 1; + assertEquals(group, (num + 7) / 8); + int lastBitPackedNum = bais.read(); + if (num % 8 == 0) { + assertEquals(lastBitPackedNum, 8); + } else { + assertEquals(lastBitPackedNum, num % 8); + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoderTest.java index ea4378c7..79435a4b 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/LongRleDecoderTest.java @@ -1,206 +1,203 @@ -package cn.edu.tsinghua.tsfile.encoding.decoder; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import cn.edu.tsinghua.tsfile.encoding.common.EndianType; -import cn.edu.tsinghua.tsfile.encoding.encoder.RleEncoder; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; -import cn.edu.tsinghua.tsfile.encoding.encoder.LongRleEncoder; - -public class LongRleDecoderTest { - private List rleList; - private List bpList; - private List hybridList; - private int rleBitWidth; - private int bpBitWidth; - private int hybridWidth; - - @Before - public void setUp() throws Exception { - rleList = new ArrayList(); - int rleCount = 11; - int rleNum = 38; - long rleStart = 11; - for(int i = 0; i < rleNum;i++){ - for(int j = 0;j < rleCount;j++){ - rleList.add(rleStart); - } - for(int j = 0;j < rleCount;j++){ - rleList.add(rleStart-1); - } - rleCount += 2; - rleStart *= -3; - } - rleBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(rleList); - - bpList = new ArrayList(); - int bpCount = 15; - long bpStart = 11; - for(int i = 0; i < bpCount;i++){ - bpStart *= 3; - if(i % 2 == 1){ - bpList.add(bpStart*-1); - }else{ - bpList.add(bpStart); - } - } - bpBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(bpList); - - hybridList = new ArrayList(); - int hybridCount = 11; - int hybridNum = 1000; - long hybridStart = 20; - - for(int i = 0;i < hybridNum;i++){ - for(int j = 0;j < hybridCount;j++){ - hybridStart += 3; - if(j % 2 == 1){ - hybridList.add(hybridStart*-1); - }else{ - hybridList.add(hybridStart); - } - } - for(int j = 0;j < hybridCount;j++){ - if(i % 2 == 1){ - hybridList.add(hybridStart*-1); - }else{ - hybridList.add(hybridStart); - } - } - hybridCount += 2; - } - - hybridWidth = ReadWriteStreamUtils.getLongMaxBitWidth(hybridList); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testRleReadBigLong() throws IOException{ - List list = new ArrayList<>(); - for(long i = 8000000; i < 8400000;i++){ - list.add(i); - } - int width = ReadWriteStreamUtils.getLongMaxBitWidth(list); - testLength(list,width,false,1); - for(int i = 1;i < 10;i++){ - testLength(list,width,false,i); - } - } - - @Test - public void testRleReadLong() throws IOException{ - for(int i = 1;i < 2;i++){ - testLength(rleList,rleBitWidth,false,i); - } - } - - @Test - public void testMaxRLERepeatNUM() throws IOException{ - List repeatList = new ArrayList<>(); - int rleCount = 17; - int rleNum = 5; - long rleStart = 11; - for(int i = 0; i < rleNum;i++){ - for(int j = 0;j < rleCount;j++){ - repeatList.add(rleStart); - } - for(int j = 0;j < rleCount;j++){ - repeatList.add(rleStart / 3); - } - rleCount *= 7; - rleStart *= -3; - } - int bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(repeatList); - for(int i = 1;i < 10;i++){ - testLength(repeatList,bitWidth,false,i); - } - } - - @Test - public void testBitPackingReadLong() throws IOException{ - for(int i = 1;i < 10;i++){ - testLength(bpList,bpBitWidth,false,i); - } - } - - @Test - public void testHybridReadLong() throws IOException{ - for(int i = 1;i < 10;i++){ - long start = System.currentTimeMillis(); - testLength(hybridList,hybridWidth,false,i); - long end = System.currentTimeMillis(); - System.out.println(String.format("Turn %d use time %d ms",i, end-start)); - } - } - - @Test - public void testBitPackingReadHeader() throws IOException{ - for(int i = 1;i < 505;i++){ - testBitPackedReadHeader(i); - } - } - - private void testBitPackedReadHeader(int num) throws IOException{ - List list = new ArrayList(); - - for(long i = 0; i < num;i++){ - list.add(i); - } - - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - int bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(list); - RleEncoder encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); - for(long value : list){ - encoder.encode(value, baos); - } - encoder.flush(baos); - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - ReadWriteStreamUtils.readUnsignedVarInt(bais); - assertEquals(bitWidth, bais.read()); - int header = ReadWriteStreamUtils.readUnsignedVarInt(bais); - int group = header >> 1; - assertEquals(group, (num+7)/8); - int lastBitPackedNum = bais.read(); - if(num % 8 == 0){ - assertEquals(lastBitPackedNum,8); - } else{ - assertEquals(lastBitPackedNum, num % 8); - } - } - - public void testLength(List list,int bitWidth,boolean isDebug,int repeatCount) throws IOException{ - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - RleEncoder encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(long value : list){ - encoder.encode(value, baos); - } - encoder.flush(baos); - } - - ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); - RleDecoder decoder = new LongRleDecoder(EndianType.LITTLE_ENDIAN); - for(int i = 0;i < repeatCount;i++){ - for(long value : list){ - long value_ = decoder.readLong(bais); - if(isDebug){ - System.out.println(value_+"/"+value); - } - assertEquals(value, value_); - } - } - } -} +package cn.edu.tsinghua.tsfile.encoding.decoder; + +import static org.junit.Assert.*; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import cn.edu.tsinghua.tsfile.encoding.common.EndianType; +import cn.edu.tsinghua.tsfile.encoding.encoder.RleEncoder; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.common.utils.ReadWriteStreamUtils; +import cn.edu.tsinghua.tsfile.encoding.encoder.LongRleEncoder; + +public class LongRleDecoderTest { + private List rleList; + private List bpList; + private List hybridList; + private int rleBitWidth; + private int bpBitWidth; + private int hybridWidth; + + @Before + public void setUp() throws Exception { + rleList = new ArrayList(); + int rleCount = 11; + int rleNum = 38; + long rleStart = 11; + for (int i = 0; i < rleNum; i++) { + for (int j = 0; j < rleCount; j++) { + rleList.add(rleStart); + } + for (int j = 0; j < rleCount; j++) { + rleList.add(rleStart - 1); + } + rleCount += 2; + rleStart *= -3; + } + rleBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(rleList); + + bpList = new ArrayList(); + int bpCount = 15; + long bpStart = 11; + for (int i = 0; i < bpCount; i++) { + bpStart *= 3; + if (i % 2 == 1) { + bpList.add(bpStart * -1); + } else { + bpList.add(bpStart); + } + } + bpBitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(bpList); + + hybridList = new ArrayList(); + int hybridCount = 11; + int hybridNum = 1000; + long hybridStart = 20; + + for (int i = 0; i < hybridNum; i++) { + for (int j = 0; j < hybridCount; j++) { + hybridStart += 3; + if (j % 2 == 1) { + hybridList.add(hybridStart * -1); + } else { + hybridList.add(hybridStart); + } + } + for (int j = 0; j < hybridCount; j++) { + if (i % 2 == 1) { + hybridList.add(hybridStart * -1); + } else { + hybridList.add(hybridStart); + } + } + hybridCount += 2; + } + + hybridWidth = ReadWriteStreamUtils.getLongMaxBitWidth(hybridList); + } + + @After + public void tearDown() throws Exception {} + + @Test + public void testRleReadBigLong() throws IOException { + List list = new ArrayList<>(); + for (long i = 8000000; i < 8400000; i++) { + list.add(i); + } + int width = ReadWriteStreamUtils.getLongMaxBitWidth(list); + testLength(list, width, false, 1); + for (int i = 1; i < 10; i++) { + testLength(list, width, false, i); + } + } + + @Test + public void testRleReadLong() throws IOException { + for (int i = 1; i < 2; i++) { + testLength(rleList, rleBitWidth, false, i); + } + } + + @Test + public void testMaxRLERepeatNUM() throws IOException { + List repeatList = new ArrayList<>(); + int rleCount = 17; + int rleNum = 5; + long rleStart = 11; + for (int i = 0; i < rleNum; i++) { + for (int j = 0; j < rleCount; j++) { + repeatList.add(rleStart); + } + for (int j = 0; j < rleCount; j++) { + repeatList.add(rleStart / 3); + } + rleCount *= 7; + rleStart *= -3; + } + int bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(repeatList); + for (int i = 1; i < 10; i++) { + testLength(repeatList, bitWidth, false, i); + } + } + + @Test + public void testBitPackingReadLong() throws IOException { + for (int i = 1; i < 10; i++) { + testLength(bpList, bpBitWidth, false, i); + } + } + + @Test + public void testHybridReadLong() throws IOException { + for (int i = 1; i < 10; i++) { + long start = System.currentTimeMillis(); + testLength(hybridList, hybridWidth, false, i); + long end = System.currentTimeMillis(); + System.out.println(String.format("Turn %d use time %d ms", i, end - start)); + } + } + + @Test + public void testBitPackingReadHeader() throws IOException { + for (int i = 1; i < 505; i++) { + testBitPackedReadHeader(i); + } + } + + private void testBitPackedReadHeader(int num) throws IOException { + List list = new ArrayList(); + + for (long i = 0; i < num; i++) { + list.add(i); + } + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int bitWidth = ReadWriteStreamUtils.getLongMaxBitWidth(list); + RleEncoder encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); + for (long value : list) { + encoder.encode(value, baos); + } + encoder.flush(baos); + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + ReadWriteStreamUtils.readUnsignedVarInt(bais); + assertEquals(bitWidth, bais.read()); + int header = ReadWriteStreamUtils.readUnsignedVarInt(bais); + int group = header >> 1; + assertEquals(group, (num + 7) / 8); + int lastBitPackedNum = bais.read(); + if (num % 8 == 0) { + assertEquals(lastBitPackedNum, 8); + } else { + assertEquals(lastBitPackedNum, num % 8); + } + } + + public void testLength(List list, int bitWidth, boolean isDebug, int repeatCount) + throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + RleEncoder encoder = new LongRleEncoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (long value : list) { + encoder.encode(value, baos); + } + encoder.flush(baos); + } + + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + RleDecoder decoder = new LongRleDecoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < repeatCount; i++) { + for (long value : list) { + long value_ = decoder.readLong(bais); + if (isDebug) { + System.out.println(value_ + "/" + value); + } + assertEquals(value, value_); + } + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderIntegerTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderIntegerTest.java index 972a30ab..1d05f84f 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderIntegerTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderIntegerTest.java @@ -1,12 +1,10 @@ package cn.edu.tsinghua.tsfile.encoding.decoder.delta; import static org.junit.Assert.assertEquals; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Random; - import cn.edu.tsinghua.tsfile.encoding.decoder.DeltaBinaryDecoder; import cn.edu.tsinghua.tsfile.encoding.encoder.DeltaBinaryEncoder; import org.junit.Before; diff --git a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderLongTest.java b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderLongTest.java index 91bb9875..01645679 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderLongTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/encoding/decoder/delta/DeltaBinaryEncoderLongTest.java @@ -1,12 +1,10 @@ package cn.edu.tsinghua.tsfile.encoding.decoder.delta; import static org.junit.Assert.assertEquals; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Random; - import cn.edu.tsinghua.tsfile.encoding.decoder.DeltaBinaryDecoder; import cn.edu.tsinghua.tsfile.encoding.encoder.DeltaBinaryEncoder; import org.junit.Before; diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetaDataTest.java index 2b9995be..d35eef3b 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetaDataTest.java @@ -6,11 +6,9 @@ import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; - import org.junit.After; import org.junit.Before; import org.junit.Test; - import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; @@ -18,75 +16,77 @@ import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; public class RowGroupBlockMetaDataTest { - public static final String DELTA_OBJECT_UID = "delta-3312"; - final String PATH = "target/outputRowGroupBlock.ksn"; - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteIntoFile() throws IOException { - TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(); - metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); - metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); - metaData.setDeltaObjectID(DELTA_OBJECT_UID); - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); - - Utils.isRowGroupBlockMetadataEqual(metaData,ReadWriteThriftFormatUtils.read(fis, new RowGroupBlockMetaData())); - } - - @Test - public void testConvertToThrift() throws UnsupportedEncodingException { - TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(null); - metaData.setDeltaObjectID(DELTA_OBJECT_UID); - Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); - metaData.setRowGroups(new ArrayList<>()); - Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); - metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); - Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); - metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); - Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); - } - - @Test - public void testConvertToTSF() throws UnsupportedEncodingException { - RowGroupBlockMetaData metaDataInThrift = new RowGroupBlockMetaData(new ArrayList<>()); - metaDataInThrift.setDelta_object_id(DELTA_OBJECT_UID); - TsRowGroupBlockMetaData metaDataInTSF = new TsRowGroupBlockMetaData(); - metaDataInTSF.convertToTSF(metaDataInThrift); - Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); - - -// metaDataInThrift.setRow_groups_metadata(new ArrayList<>()); -// metaDataInTSF.convertToTSF(metaDataInThrift); -// Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); - - metaDataInThrift.getRow_groups_metadata().add(TestHelper.createSimpleRowGroupMetaDataInThrift()); - metaDataInTSF.convertToTSF(metaDataInThrift); - Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); - - metaDataInThrift.getRow_groups_metadata().add(TestHelper.createSimpleRowGroupMetaDataInThrift()); - metaDataInTSF.convertToTSF(metaDataInThrift); - Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); - } + public static final String DELTA_OBJECT_UID = "delta-3312"; + final String PATH = "target/outputRowGroupBlock.ksn"; + + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteIntoFile() throws IOException { + TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(); + metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); + metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); + metaData.setDeltaObjectID(DELTA_OBJECT_UID); + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); + + Utils.isRowGroupBlockMetadataEqual(metaData, + ReadWriteThriftFormatUtils.read(fis, new RowGroupBlockMetaData())); + } + + @Test + public void testConvertToThrift() throws UnsupportedEncodingException { + TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(null); + metaData.setDeltaObjectID(DELTA_OBJECT_UID); + Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); + metaData.setRowGroups(new ArrayList<>()); + Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); + metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); + Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); + metaData.addRowGroupMetaData(TestHelper.createSimpleRowGroupMetaDataInTSF()); + Utils.isRowGroupBlockMetadataEqual(metaData, metaData.convertToThrift()); + } + + @Test + public void testConvertToTSF() throws UnsupportedEncodingException { + RowGroupBlockMetaData metaDataInThrift = new RowGroupBlockMetaData(new ArrayList<>()); + metaDataInThrift.setDelta_object_id(DELTA_OBJECT_UID); + TsRowGroupBlockMetaData metaDataInTSF = new TsRowGroupBlockMetaData(); + metaDataInTSF.convertToTSF(metaDataInThrift); + Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); + + + // metaDataInThrift.setRow_groups_metadata(new ArrayList<>()); + // metaDataInTSF.convertToTSF(metaDataInThrift); + // Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); + + metaDataInThrift.getRow_groups_metadata() + .add(TestHelper.createSimpleRowGroupMetaDataInThrift()); + metaDataInTSF.convertToTSF(metaDataInThrift); + Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); + + metaDataInThrift.getRow_groups_metadata() + .add(TestHelper.createSimpleRowGroupMetaDataInThrift()); + metaDataInTSF.convertToTSF(metaDataInThrift); + Utils.isRowGroupBlockMetadataEqual(metaDataInTSF, metaDataInTSF.convertToThrift()); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadataBigTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadataBigTest.java index 0a39f3ba..f40bca54 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadataBigTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadataBigTest.java @@ -6,99 +6,98 @@ import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; - import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; - import org.junit.After; import org.junit.Before; import org.junit.Test; public class RowGroupBlockMetadataBigTest { - private static int deviceNum = 100; - private static int sensorNum = 1000; - private static String PATH = "target/test-big.ksn"; - public static final String DELTA_OBJECT_UID = "delta-3312"; + private static int deviceNum = 100; + private static int sensorNum = 1000; + private static String PATH = "target/test-big.ksn"; + public static final String DELTA_OBJECT_UID = "delta-3312"; + + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } - @Before - public void setUp() throws Exception { - } + private static RowGroupMetaData createSimpleRowGroupMetaDataInTSF() + throws UnsupportedEncodingException { + RowGroupMetaData metaData = new RowGroupMetaData(RowGroupMetaDataTest.DELTA_OBJECT_UID, + RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.TOTAL_BYTE_SIZE, new ArrayList<>(), + RowGroupMetaDataTest.DELTA_OBJECT_TYPE); + metaData.setPath(RowGroupMetaDataTest.FILE_PATH); + for (int i = 0; i < sensorNum; i++) { + metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); + } + return metaData; + } - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } + @Test + public void test() throws IOException { + System.out.println("-------------Start Metadata big data test------------"); + long startTime = System.currentTimeMillis(); + List rowGroupMetaDatas = new ArrayList<>(); + for (int i = 0; i < deviceNum; i++) { + rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF()); + } + TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(rowGroupMetaDatas); + metaData.setDeltaObjectID(DELTA_OBJECT_UID); + System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime) + "ms"); - private static RowGroupMetaData createSimpleRowGroupMetaDataInTSF() throws UnsupportedEncodingException { - RowGroupMetaData metaData = new RowGroupMetaData(RowGroupMetaDataTest.DELTA_OBJECT_UID, - RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.TOTAL_BYTE_SIZE, new ArrayList<>(), - RowGroupMetaDataTest.DELTA_OBJECT_TYPE); - metaData.setPath(RowGroupMetaDataTest.FILE_PATH); - for (int i = 0; i < sensorNum; i++) { - metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); - } - return metaData; - } + startTime = System.currentTimeMillis(); + RowGroupBlockMetaData metaDataInThrift = metaData.convertToThrift(); + System.out.println("2: covernet to Thrift " + (System.currentTimeMillis() - startTime) + "ms"); - @Test - public void test() throws IOException { - System.out.println("-------------Start Metadata big data test------------"); - long startTime = System.currentTimeMillis(); - List rowGroupMetaDatas = new ArrayList<>(); - for (int i = 0; i < deviceNum; i++) { - rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF()); - } - TsRowGroupBlockMetaData metaData = new TsRowGroupBlockMetaData(rowGroupMetaDatas); - metaData.setDeltaObjectID(DELTA_OBJECT_UID); - System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime)+"ms"); + Utils.isRowGroupBlockMetadataEqual(metaData, metaDataInThrift); - startTime = System.currentTimeMillis(); - RowGroupBlockMetaData metaDataInThrift = metaData.convertToThrift(); - System.out.println("2: covernet to Thrift " + (System.currentTimeMillis() - startTime)+"ms"); + startTime = System.currentTimeMillis(); + File file = new File(PATH); + if (file.exists()) + file.delete(); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(metaDataInThrift, out.getOutputStream()); + out.close(); + System.out.println("3: write to File" + (System.currentTimeMillis() - startTime) + "ms"); - Utils.isRowGroupBlockMetadataEqual(metaData, metaDataInThrift); - - startTime = System.currentTimeMillis(); - File file = new File(PATH); - if (file.exists()) - file.delete(); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(metaDataInThrift, out.getOutputStream()); - out.close(); - System.out.println("3: write to File" + (System.currentTimeMillis() - startTime)+"ms"); + FileInputStream fis = new FileInputStream(file); + System.out.println("file size: " + fis.available()); + fis.close(); - FileInputStream fis = new FileInputStream(file); - System.out.println("file size: " + fis.available()); - fis.close(); - - FileInputStream fis2 = new FileInputStream(new File(PATH)); - startTime = System.currentTimeMillis(); - RowGroupBlockMetaData metaDataInThrift2 = ReadWriteThriftFormatUtils.readRowGroupBlockMetaData(fis2); - System.out.println("4: read from File" + (System.currentTimeMillis() - startTime)+"ms"); - Utils.isRowGroupBlockMetadataEqual(metaData, metaDataInThrift2); - System.out.println("-------------End Metadata big data test------------"); - } + FileInputStream fis2 = new FileInputStream(new File(PATH)); + startTime = System.currentTimeMillis(); + RowGroupBlockMetaData metaDataInThrift2 = + ReadWriteThriftFormatUtils.readRowGroupBlockMetaData(fis2); + System.out.println("4: read from File" + (System.currentTimeMillis() - startTime) + "ms"); + Utils.isRowGroupBlockMetadataEqual(metaData, metaDataInThrift2); + System.out.println("-------------End Metadata big data test------------"); + } -// public static void main(String[] args) throws IOException { - // long startTime = System.currentTimeMillis(); - // File file = new File(PATH); - // RandomAccessOutputStream outputStream = new - // RandomAccessOutputStream(file, "rw"); - // byte[] b = new byte[20*1024*1024]; - // outputStream.write(b); - // outputStream.close(); - // System.out.println("3: "+(System.currentTimeMillis()-startTime)); - // FileInputStream fis = new FileInputStream(file); - // System.out.println("file size: "+fis.available()); - // fis.close(); - // - // if (file.exists()) - // file.delete(); -// } + // public static void main(String[] args) throws IOException { + // long startTime = System.currentTimeMillis(); + // File file = new File(PATH); + // RandomAccessOutputStream outputStream = new + // RandomAccessOutputStream(file, "rw"); + // byte[] b = new byte[20*1024*1024]; + // outputStream.write(b); + // outputStream.close(); + // System.out.println("3: "+(System.currentTimeMillis()-startTime)); + // FileInputStream fis = new FileInputStream(file); + // System.out.println("file size: "+fis.available()); + // fis.close(); + // + // if (file.exists()) + // file.delete(); + // } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadtaTimeTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadtaTimeTest.java index 4379e8f5..6cd38749 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadtaTimeTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupBlockMetadtaTimeTest.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.*; - import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; @@ -14,138 +13,148 @@ import org.junit.Test; public class RowGroupBlockMetadtaTimeTest { - private static int deviceNum = 20; - private static int sensorNum = 50; - private static String PATH = "target/test-big.ksn"; - public static final String DELTA_OBJECT_UID = "delta-3312"; + private static int deviceNum = 20; + private static int sensorNum = 50; + private static String PATH = "target/test-big.ksn"; + public static final String DELTA_OBJECT_UID = "delta-3312"; - @Before - public void setUp() throws Exception { - } + @Before + public void setUp() throws Exception {} - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } - private static RowGroupMetaData createSimpleRowGroupMetaDataInTSF(String delta_object_uid) throws UnsupportedEncodingException { - RowGroupMetaData metaData = new RowGroupMetaData(delta_object_uid, - RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.TOTAL_BYTE_SIZE, new ArrayList<>(), - RowGroupMetaDataTest.DELTA_OBJECT_TYPE); - metaData.setPath(RowGroupMetaDataTest.FILE_PATH); - for (int i = 0; i < sensorNum; i++) { - metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); - } - return metaData; + private static RowGroupMetaData createSimpleRowGroupMetaDataInTSF(String delta_object_uid) + throws UnsupportedEncodingException { + RowGroupMetaData metaData = new RowGroupMetaData(delta_object_uid, + RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.TOTAL_BYTE_SIZE, new ArrayList<>(), + RowGroupMetaDataTest.DELTA_OBJECT_TYPE); + metaData.setPath(RowGroupMetaDataTest.FILE_PATH); + for (int i = 0; i < sensorNum; i++) { + metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); } + return metaData; + } - //@Test - public void test1() throws IOException { - System.out.println(); - test_one_io(10); - test_one_io(50); - test_one_io(100); - test_one_io(10000); - } + // @Test + public void test1() throws IOException { + System.out.println(); + test_one_io(10); + test_one_io(50); + test_one_io(100); + test_one_io(10000); + } - //@Test - public void test2() throws IOException { - System.out.println(); - test_multi_io(10); - test_multi_io(50); - test_multi_io(100); - test_multi_io(10000); - } + // @Test + public void test2() throws IOException { + System.out.println(); + test_multi_io(10); + test_multi_io(50); + test_multi_io(100); + test_multi_io(10000); + } - public void test_multi_io(int delta_object_num) throws IOException { - System.out.println("-------------Start Metadata multi_io test_object" + delta_object_num + "------------"); - long startTime = System.currentTimeMillis(); - Map tsRowGroupBlockMetaDataMap = new HashMap<>(); - List delta_object_list = new ArrayList<>(); - for (int i = 0; i < delta_object_num; i++) { - delta_object_list.add("delta-" + i); - } - List rowGroupMetaDatas; - for (int i = 0; i < delta_object_num; i++) { - String delta_object_id = delta_object_list.get(i); - rowGroupMetaDatas = new ArrayList<>(); - for (int j = 0; j < deviceNum; j++) { - rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF(delta_object_id)); - } - TsRowGroupBlockMetaData tsRowGroupBlockMetaData = new TsRowGroupBlockMetaData(rowGroupMetaDatas); - tsRowGroupBlockMetaData.setDeltaObjectID(delta_object_id); - tsRowGroupBlockMetaDataMap.put(delta_object_id, tsRowGroupBlockMetaData); - } + public void test_multi_io(int delta_object_num) throws IOException { + System.out.println( + "-------------Start Metadata multi_io test_object" + delta_object_num + "------------"); + long startTime = System.currentTimeMillis(); + Map tsRowGroupBlockMetaDataMap = new HashMap<>(); + List delta_object_list = new ArrayList<>(); + for (int i = 0; i < delta_object_num; i++) { + delta_object_list.add("delta-" + i); + } + List rowGroupMetaDatas; + for (int i = 0; i < delta_object_num; i++) { + String delta_object_id = delta_object_list.get(i); + rowGroupMetaDatas = new ArrayList<>(); + for (int j = 0; j < deviceNum; j++) { + rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF(delta_object_id)); + } + TsRowGroupBlockMetaData tsRowGroupBlockMetaData = + new TsRowGroupBlockMetaData(rowGroupMetaDatas); + tsRowGroupBlockMetaData.setDeltaObjectID(delta_object_id); + tsRowGroupBlockMetaDataMap.put(delta_object_id, tsRowGroupBlockMetaData); + } - System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime)+"ms"); + System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime) + "ms"); - File file = new File(PATH); - if (file.exists()) - file.delete(); - startTime = System.currentTimeMillis(); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - long offset; - long offset_index; - int metadataBlockSize; - long start; - long end; - String current_deltaobject; - TsRowGroupBlockMetaData current_tsRowGroupBlockMetaData; - Long start_index = out.getPos(); - Iterator> iterator = tsRowGroupBlockMetaDataMap.entrySet().iterator(); - while (iterator.hasNext()) { - start = Long.MAX_VALUE; - end = Long.MIN_VALUE; - Map.Entry entry = iterator.next(); - current_deltaobject = entry.getKey(); - current_tsRowGroupBlockMetaData = entry.getValue(); - for (RowGroupMetaData rowGroupMetaData : current_tsRowGroupBlockMetaData.getRowGroups()) { - for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData.getTimeSeriesChunkMetaDataList()) { - start = Long.min(start, timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime()); - end = Long.max(end, timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); - } - } - //flush tsRowGroupBlockMetaDatas in order - RowGroupBlockMetaData rowGroupBlockMetaData = current_tsRowGroupBlockMetaData.convertToThrift(); - ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(rowGroupBlockMetaData, out.getOutputStream()); + File file = new File(PATH); + if (file.exists()) + file.delete(); + startTime = System.currentTimeMillis(); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + long offset; + long offset_index; + int metadataBlockSize; + long start; + long end; + String current_deltaobject; + TsRowGroupBlockMetaData current_tsRowGroupBlockMetaData; + Long start_index = out.getPos(); + Iterator> iterator = + tsRowGroupBlockMetaDataMap.entrySet().iterator(); + while (iterator.hasNext()) { + start = Long.MAX_VALUE; + end = Long.MIN_VALUE; + Map.Entry entry = iterator.next(); + current_deltaobject = entry.getKey(); + current_tsRowGroupBlockMetaData = entry.getValue(); + for (RowGroupMetaData rowGroupMetaData : current_tsRowGroupBlockMetaData.getRowGroups()) { + for (TimeSeriesChunkMetaData timeSeriesChunkMetaData : rowGroupMetaData + .getTimeSeriesChunkMetaDataList()) { + start = Long.min(start, + timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getStartTime()); + end = Long.max(end, timeSeriesChunkMetaData.getTInTimeSeriesChunkMetaData().getEndTime()); } - Long end_index = out.getPos(); - out.close(); - System.out.println("2:" + (end_index - start_index) + " bytes write to File: " + (System.currentTimeMillis() - startTime)+"ms"); - System.out.println("-------------End Metadata multi_io test------------"); - System.out.println(); + } + // flush tsRowGroupBlockMetaDatas in order + RowGroupBlockMetaData rowGroupBlockMetaData = + current_tsRowGroupBlockMetaData.convertToThrift(); + ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(rowGroupBlockMetaData, + out.getOutputStream()); } + Long end_index = out.getPos(); + out.close(); + System.out.println("2:" + (end_index - start_index) + " bytes write to File: " + + (System.currentTimeMillis() - startTime) + "ms"); + System.out.println("-------------End Metadata multi_io test------------"); + System.out.println(); + } - public void test_one_io(int delta_object_num) throws IOException { - System.out.println("-------------Start Metadata one_io test delta_object" + delta_object_num + "------------"); - long startTime = System.currentTimeMillis(); - String delta_object_id = "test"; - List rowGroupMetaDatas = new ArrayList<>(); - for (int i = 0; i < delta_object_num; i++) { - for (int j = 0; j < deviceNum; j++) { - rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF(delta_object_id)); - } - } - TsRowGroupBlockMetaData tsRowGroupBlockMetaData = new TsRowGroupBlockMetaData(rowGroupMetaDatas); - tsRowGroupBlockMetaData.setDeltaObjectID(delta_object_id); - System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime)+"ms"); - File file = new File(PATH); - if (file.exists()) - file.delete(); - startTime = System.currentTimeMillis(); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - String current_deltaobject; - TsRowGroupBlockMetaData current_tsRowGroupBlockMetaData; - Long start_index = out.getPos(); - ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata( - tsRowGroupBlockMetaData.convertToThrift(), out.getOutputStream() - ); - Long end_index = out.getPos(); - out.close(); - System.out.println("2:" + (end_index - start_index) + " bytes write to File: " + (System.currentTimeMillis() - startTime)+"ms"); - System.out.println("-------------End Metadata one_io test------------"); - System.out.println(); + public void test_one_io(int delta_object_num) throws IOException { + System.out.println( + "-------------Start Metadata one_io test delta_object" + delta_object_num + "------------"); + long startTime = System.currentTimeMillis(); + String delta_object_id = "test"; + List rowGroupMetaDatas = new ArrayList<>(); + for (int i = 0; i < delta_object_num; i++) { + for (int j = 0; j < deviceNum; j++) { + rowGroupMetaDatas.add(createSimpleRowGroupMetaDataInTSF(delta_object_id)); + } } + TsRowGroupBlockMetaData tsRowGroupBlockMetaData = + new TsRowGroupBlockMetaData(rowGroupMetaDatas); + tsRowGroupBlockMetaData.setDeltaObjectID(delta_object_id); + System.out.println("1: create Metadata " + (System.currentTimeMillis() - startTime) + "ms"); + File file = new File(PATH); + if (file.exists()) + file.delete(); + startTime = System.currentTimeMillis(); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + String current_deltaobject; + TsRowGroupBlockMetaData current_tsRowGroupBlockMetaData; + Long start_index = out.getPos(); + ReadWriteThriftFormatUtils.writeRowGroupBlockMetadata(tsRowGroupBlockMetaData.convertToThrift(), + out.getOutputStream()); + Long end_index = out.getPos(); + out.close(); + System.out.println("2:" + (end_index - start_index) + " bytes write to File: " + + (System.currentTimeMillis() - startTime) + "ms"); + System.out.println("-------------End Metadata one_io test------------"); + System.out.println(); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaDataTest.java index f889995d..f11543d8 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/RowGroupMetaDataTest.java @@ -1,101 +1,99 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; - -import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; -import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; -import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; - -import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class RowGroupMetaDataTest { - - public static final String DELTA_OBJECT_UID = "delta-3312"; - public static final long MAX_NUM_ROWS = 34432432432L; - public static final long TOTAL_BYTE_SIZE = 434235463L; - public static final String FILE_PATH = "/home/user/dev"; - public static final String DELTA_OBJECT_TYPE = "device_type_good"; - final String PATH = "target/outputRowGroup.ksn"; - - @Before - public void setUp() throws Exception {} - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteIntoFile() throws IOException { - RowGroupMetaData metaData = TestHelper.createSimpleRowGroupMetaDataInTSF(); - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - - Utils.isRowGroupMetaDataEqual(metaData, - ReadWriteThriftFormatUtils.read(fis, new cn.edu.tsinghua.tsfile.format.RowGroupMetaData())); - } - - @Test - public void testConvertToThrift() throws UnsupportedEncodingException { - RowGroupMetaData metaData = new RowGroupMetaData(DELTA_OBJECT_UID, MAX_NUM_ROWS, - TOTAL_BYTE_SIZE, null, DELTA_OBJECT_TYPE); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - metaData.setPath(FILE_PATH); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - metaData.setTimeSeriesChunkMetaDataList(new ArrayList<>()); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - } - - @Test - public void testConvertToTSF() throws UnsupportedEncodingException { - cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaData = - new cn.edu.tsinghua.tsfile.format.RowGroupMetaData(null, DELTA_OBJECT_UID, TOTAL_BYTE_SIZE, - MAX_NUM_ROWS, DELTA_OBJECT_TYPE, System.currentTimeMillis()); - RowGroupMetaData metaData = new RowGroupMetaData(); - metaData.convertToTSF(rowGroupMetaData); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - - rowGroupMetaData.setFile_path(FILE_PATH); - metaData.convertToTSF(rowGroupMetaData); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - - rowGroupMetaData.setTsc_metadata(new ArrayList<>()); - metaData.convertToTSF(rowGroupMetaData); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - - rowGroupMetaData.getTsc_metadata() - .add(TestHelper.createSimpleTimeSeriesChunkMetaDataInThrift()); - metaData.convertToTSF(rowGroupMetaData); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - - rowGroupMetaData.getTsc_metadata() - .add(TestHelper.createSimpleTimeSeriesChunkMetaDataInThrift()); - metaData.convertToTSF(rowGroupMetaData); - Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); - } - -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; +import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; +import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; +import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class RowGroupMetaDataTest { + + public static final String DELTA_OBJECT_UID = "delta-3312"; + public static final long MAX_NUM_ROWS = 34432432432L; + public static final long TOTAL_BYTE_SIZE = 434235463L; + public static final String FILE_PATH = "/home/user/dev"; + public static final String DELTA_OBJECT_TYPE = "device_type_good"; + final String PATH = "target/outputRowGroup.ksn"; + + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteIntoFile() throws IOException { + RowGroupMetaData metaData = TestHelper.createSimpleRowGroupMetaDataInTSF(); + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + + Utils.isRowGroupMetaDataEqual(metaData, + ReadWriteThriftFormatUtils.read(fis, new cn.edu.tsinghua.tsfile.format.RowGroupMetaData())); + } + + @Test + public void testConvertToThrift() throws UnsupportedEncodingException { + RowGroupMetaData metaData = new RowGroupMetaData(DELTA_OBJECT_UID, MAX_NUM_ROWS, + TOTAL_BYTE_SIZE, null, DELTA_OBJECT_TYPE); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + metaData.setPath(FILE_PATH); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + metaData.setTimeSeriesChunkMetaDataList(new ArrayList<>()); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + metaData.addTimeSeriesChunkMetaData(TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF()); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + } + + @Test + public void testConvertToTSF() throws UnsupportedEncodingException { + cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaData = + new cn.edu.tsinghua.tsfile.format.RowGroupMetaData(null, DELTA_OBJECT_UID, TOTAL_BYTE_SIZE, + MAX_NUM_ROWS, DELTA_OBJECT_TYPE, System.currentTimeMillis()); + RowGroupMetaData metaData = new RowGroupMetaData(); + metaData.convertToTSF(rowGroupMetaData); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + + rowGroupMetaData.setFile_path(FILE_PATH); + metaData.convertToTSF(rowGroupMetaData); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + + rowGroupMetaData.setTsc_metadata(new ArrayList<>()); + metaData.convertToTSF(rowGroupMetaData); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + + rowGroupMetaData.getTsc_metadata() + .add(TestHelper.createSimpleTimeSeriesChunkMetaDataInThrift()); + metaData.convertToTSF(rowGroupMetaData); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + + rowGroupMetaData.getTsc_metadata() + .add(TestHelper.createSimpleTimeSeriesChunkMetaDataInThrift()); + metaData.convertToTSF(rowGroupMetaData); + Utils.isRowGroupMetaDataEqual(metaData, metaData.convertToThrift()); + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaDataTest.java index 09bbf271..9e7369e6 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TInTimeSeriesChunkMetaDataTest.java @@ -1,132 +1,130 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.format.DataType; -import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; -import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; -import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; -import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; -import cn.edu.tsinghua.tsfile.format.FreqType; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; - -public class TInTimeSeriesChunkMetaDataTest { - private TInTimeSeriesChunkMetaData metaData; - public static List frequencies1; - public static List frequencies2; - public static final long startTime = 523372036854775806L;; - public static final long endTime = 523372036854775806L;; - final String PATH = "target/outputT.ksn"; - - @Before - public void setUp() throws Exception { - metaData = new TInTimeSeriesChunkMetaData(); - frequencies1 = new ArrayList(); - - frequencies2 = new ArrayList(); - frequencies2.add(132); - frequencies2.add(432); - frequencies2.add(35435); - } - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteIntoFile() throws IOException { - TInTimeSeriesChunkMetaData metaData = TestHelper.createT2inTSF(TSDataType.TEXT, - TSFreqType.IRREGULAR_FREQ, frequencies2, startTime, endTime); - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - Utils.isTSeriesChunkMetadataEqual(metaData, - ReadWriteThriftFormatUtils.read(fis, new TimeInTimeSeriesChunkMetaData())); - } - - @Test - public void testConvertToThrift() { - for (TSDataType dataType : TSDataType.values()) { - TInTimeSeriesChunkMetaData metaData = - new TInTimeSeriesChunkMetaData(dataType, startTime, endTime); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - for (TSFreqType freqType : TSFreqType.values()) { - metaData.setFreqType(freqType); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - List frequencies = new ArrayList(); - metaData.setFrequencies(frequencies); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - frequencies.add(132); - frequencies.add(432); - frequencies.add(35435); - metaData.setFrequencies(frequencies); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - frequencies.clear(); - metaData.setFrequencies(frequencies); - Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - } - } - } - - @Test - public void testConvertToTSF() { - for (DataType dataType : DataType.values()) { - TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData = - new TimeInTimeSeriesChunkMetaData(dataType, startTime, endTime); - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - for (FreqType freqType : FreqType.values()) { - timeInTimeSeriesChunkMetaData.setFreq_type(freqType); - - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - - List frequencies = new ArrayList(); - timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - - frequencies.add(132); - frequencies.add(432); - frequencies.add(35435); - timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - - frequencies.clear(); - timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); - metaData.convertToTSF(timeInTimeSeriesChunkMetaData); - Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); - } - } - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.format.DataType; +import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; +import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; +import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; +import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; +import cn.edu.tsinghua.tsfile.format.FreqType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; + +public class TInTimeSeriesChunkMetaDataTest { + private TInTimeSeriesChunkMetaData metaData; + public static List frequencies1; + public static List frequencies2; + public static final long startTime = 523372036854775806L;; + public static final long endTime = 523372036854775806L;; + final String PATH = "target/outputT.ksn"; + + @Before + public void setUp() throws Exception { + metaData = new TInTimeSeriesChunkMetaData(); + frequencies1 = new ArrayList(); + + frequencies2 = new ArrayList(); + frequencies2.add(132); + frequencies2.add(432); + frequencies2.add(35435); + } + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteIntoFile() throws IOException { + TInTimeSeriesChunkMetaData metaData = TestHelper.createT2inTSF(TSDataType.TEXT, + TSFreqType.IRREGULAR_FREQ, frequencies2, startTime, endTime); + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + Utils.isTSeriesChunkMetadataEqual(metaData, + ReadWriteThriftFormatUtils.read(fis, new TimeInTimeSeriesChunkMetaData())); + } + + @Test + public void testConvertToThrift() { + for (TSDataType dataType : TSDataType.values()) { + TInTimeSeriesChunkMetaData metaData = + new TInTimeSeriesChunkMetaData(dataType, startTime, endTime); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + for (TSFreqType freqType : TSFreqType.values()) { + metaData.setFreqType(freqType); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + List frequencies = new ArrayList(); + metaData.setFrequencies(frequencies); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + frequencies.add(132); + frequencies.add(432); + frequencies.add(35435); + metaData.setFrequencies(frequencies); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + frequencies.clear(); + metaData.setFrequencies(frequencies); + Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + } + } + } + + @Test + public void testConvertToTSF() { + for (DataType dataType : DataType.values()) { + TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData = + new TimeInTimeSeriesChunkMetaData(dataType, startTime, endTime); + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + for (FreqType freqType : FreqType.values()) { + timeInTimeSeriesChunkMetaData.setFreq_type(freqType); + + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + + List frequencies = new ArrayList(); + timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + + frequencies.add(132); + frequencies.add(432); + frequencies.add(35435); + timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + + frequencies.clear(); + timeInTimeSeriesChunkMetaData.setFrequencies(frequencies); + metaData.convertToTSF(timeInTimeSeriesChunkMetaData); + Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData); + } + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaDataTest.java index d8e488f9..9894d940 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesChunkMetaDataTest.java @@ -1,132 +1,130 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; -import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; -import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; -import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; -import cn.edu.tsinghua.tsfile.format.CompressionType; -import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.format.TimeSeriesChunkType; -import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; - -public class TimeSeriesChunkMetaDataTest { - - public static final String MEASUREMENT_UID = "sensor231"; - public static final long FILE_OFFSET = 2313424242L; - public static final long MAX_NUM_ROWS = 423432425L; - public static final long TOTAL_BYTE_SIZE = 432453453L; - public static final long DATA_PAGE_OFFSET = 42354334L; - public static final long DICTIONARY_PAGE_OFFSET = 23434543L; - public static final long INDEX_PAGE_OFFSET = 34243453L; - final String PATH = "target/outputTimeSeriesChunk.ksn"; - - @Before - public void setUp() throws Exception {} - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteIntoFile() throws IOException { - TimeSeriesChunkMetaData metaData = TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF(); - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, - ReadWriteThriftFormatUtils.read(fis, new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData())); - } - - @Test - public void testConvertToThrift() throws UnsupportedEncodingException { - for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) { - for (TSChunkType chunkType : TSChunkType.values()) { - TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, - FILE_OFFSET, compressionTypeName); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); - - metaData.setNumRows(MAX_NUM_ROWS); - metaData.setTotalByteSize(TOTAL_BYTE_SIZE); - - metaData.setJsonMetaData(TestHelper.getJSONArray()); - - metaData.setDataPageOffset(DATA_PAGE_OFFSET); - metaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET); - metaData.setIndexPageOffset(INDEX_PAGE_OFFSET); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); - for (TInTimeSeriesChunkMetaData tSeriesMetaData : TestHelper - .generateTSeriesChunkMetaDataListInTSF()) { - metaData.setTInTimeSeriesChunkMetaData(tSeriesMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); - for (VInTimeSeriesChunkMetaData vSeriesMetaData : TestHelper - .generateVSeriesChunkMetaDataListInTSF()) { - metaData.setVInTimeSeriesChunkMetaData(vSeriesMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); - } - } - } - } - } - - @Test - public void testConvertToTSF() throws UnsupportedEncodingException { - for (CompressionType compressionType : CompressionType.values()) { - for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) { - TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData(); - cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaData = - new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, - FILE_OFFSET, compressionType); - metaData.convertToTSF(timeSeriesChunkMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); - - timeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS); - timeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE); - - timeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray()); - timeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET); - timeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET); - timeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET); - - metaData.convertToTSF(timeSeriesChunkMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); - - for (TimeInTimeSeriesChunkMetaData tSeriesChunkMetaData : TestHelper - .generateTimeInTimeSeriesChunkMetaDataInThrift()) { - timeSeriesChunkMetaData.setTime_tsc(tSeriesChunkMetaData); - metaData.convertToTSF(timeSeriesChunkMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); - for (ValueInTimeSeriesChunkMetaData vSeriesChunkMetaData : TestHelper - .generateValueInTimeSeriesChunkMetaDataInThrift()) { - timeSeriesChunkMetaData.setValue_tsc(vSeriesChunkMetaData); - metaData.convertToTSF(timeSeriesChunkMetaData); - Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); - } - } - } - } - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; +import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; +import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; +import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; +import cn.edu.tsinghua.tsfile.format.CompressionType; +import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.format.TimeSeriesChunkType; +import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; + +public class TimeSeriesChunkMetaDataTest { + + public static final String MEASUREMENT_UID = "sensor231"; + public static final long FILE_OFFSET = 2313424242L; + public static final long MAX_NUM_ROWS = 423432425L; + public static final long TOTAL_BYTE_SIZE = 432453453L; + public static final long DATA_PAGE_OFFSET = 42354334L; + public static final long DICTIONARY_PAGE_OFFSET = 23434543L; + public static final long INDEX_PAGE_OFFSET = 34243453L; + final String PATH = "target/outputTimeSeriesChunk.ksn"; + + @Before + public void setUp() throws Exception {} + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteIntoFile() throws IOException { + TimeSeriesChunkMetaData metaData = TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF(); + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, ReadWriteThriftFormatUtils.read(fis, + new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData())); + } + + @Test + public void testConvertToThrift() throws UnsupportedEncodingException { + for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) { + for (TSChunkType chunkType : TSChunkType.values()) { + TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, + FILE_OFFSET, compressionTypeName); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); + + metaData.setNumRows(MAX_NUM_ROWS); + metaData.setTotalByteSize(TOTAL_BYTE_SIZE); + + metaData.setJsonMetaData(TestHelper.getJSONArray()); + + metaData.setDataPageOffset(DATA_PAGE_OFFSET); + metaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET); + metaData.setIndexPageOffset(INDEX_PAGE_OFFSET); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); + for (TInTimeSeriesChunkMetaData tSeriesMetaData : TestHelper + .generateTSeriesChunkMetaDataListInTSF()) { + metaData.setTInTimeSeriesChunkMetaData(tSeriesMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); + for (VInTimeSeriesChunkMetaData vSeriesMetaData : TestHelper + .generateVSeriesChunkMetaDataListInTSF()) { + metaData.setVInTimeSeriesChunkMetaData(vSeriesMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift()); + } + } + } + } + } + + @Test + public void testConvertToTSF() throws UnsupportedEncodingException { + for (CompressionType compressionType : CompressionType.values()) { + for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) { + TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData(); + cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaData = + new cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, + FILE_OFFSET, compressionType); + metaData.convertToTSF(timeSeriesChunkMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); + + timeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS); + timeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE); + + timeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray()); + timeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET); + timeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET); + timeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET); + + metaData.convertToTSF(timeSeriesChunkMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); + + for (TimeInTimeSeriesChunkMetaData tSeriesChunkMetaData : TestHelper + .generateTimeInTimeSeriesChunkMetaDataInThrift()) { + timeSeriesChunkMetaData.setTime_tsc(tSeriesChunkMetaData); + metaData.convertToTSF(timeSeriesChunkMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); + for (ValueInTimeSeriesChunkMetaData vSeriesChunkMetaData : TestHelper + .generateValueInTimeSeriesChunkMetaDataInThrift()) { + timeSeriesChunkMetaData.setValue_tsc(vSeriesChunkMetaData); + metaData.convertToTSF(timeSeriesChunkMetaData); + Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData); + } + } + } + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadataTest.java index d226a3a8..0ff3102c 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TimeSeriesMetadataTest.java @@ -2,9 +2,7 @@ import java.util.ArrayList; import java.util.List; - import org.junit.Test; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; @@ -19,8 +17,7 @@ public class TimeSeriesMetadataTest { @Test public void testConvertToThrift() { for (TSDataType dataType : TSDataType.values()) { - TimeSeriesMetadata timeSeries = - new TimeSeriesMetadata(measurementUID, dataType); + TimeSeriesMetadata timeSeries = new TimeSeriesMetadata(measurementUID, dataType); Utils.isTimeSeriesEqual(timeSeries, timeSeries.convertToThrift()); for (TSFreqType freqType : TSFreqType.values()) { diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaDataTest.java index 1c4c78b0..6c25872f 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/TsFileMetaDataTest.java @@ -1,150 +1,148 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import cn.edu.tsinghua.tsfile.file.metadata.converter.TsFileMetaDataConverter; -import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; -import cn.edu.tsinghua.tsfile.format.TimeSeries; -import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; -import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; -import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; -import cn.edu.tsinghua.tsfile.format.DeltaObject; -import cn.edu.tsinghua.tsfile.format.FileMetaData; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class TsFileMetaDataTest { - private TsFileMetaDataConverter converter = new TsFileMetaDataConverter(); - final String PATH = "target/output1.ksn"; - final int VERSION = 123; - - public static Map properties = new HashMap<>(); - public static Map tsDeltaObjectMap = new HashMap<>(); - public static Map deltaObjectMap = new HashMap<>(); - static { - properties.put("s1", "sensor1"); - properties.put("s2", "sensor2"); - properties.put("s3", "sensor3"); - } - - static { - tsDeltaObjectMap.put("d1", new TsDeltaObject(123, 456, 789, 901)); - tsDeltaObjectMap.put("d2", new TsDeltaObject(123, 456, 789, 901)); - tsDeltaObjectMap.put("d3", new TsDeltaObject(123, 456, 789, 901)); - } - - static { - deltaObjectMap.put("d1", new DeltaObject(123, 456, 789, 901)); - deltaObjectMap.put("d2", new DeltaObject(123, 456, 789, 901)); - deltaObjectMap.put("d3", new DeltaObject(123, 456, 789, 901)); - } - - @Before - public void setUp() throws Exception { - converter = new TsFileMetaDataConverter(); - } - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteFileMetaData() throws IOException { - TsFileMetaData tsfMetaData = new TsFileMetaData(tsDeltaObjectMap, null, VERSION); - tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); - tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); - tsfMetaData.setCreatedBy("tsf"); - List jsonMetaData = new ArrayList(); - jsonMetaData.add("fsdfsfsd"); - jsonMetaData.add("424fd"); - tsfMetaData.setJsonMetaData(jsonMetaData); - - tsfMetaData.setProps(properties); - tsfMetaData.addProp("key1", "value1"); - - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.writeFileMetaData(converter.toThriftFileMetadata(tsfMetaData), - out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - - FileMetaData fileMetaData2 = - ReadWriteThriftFormatUtils.readFileMetaData(fis); - Utils.isFileMetaDataEqual(tsfMetaData, fileMetaData2); - } - - @Test - public void testCreateFileMetaDataInThrift() throws UnsupportedEncodingException { - TsFileMetaData tsfMetaData = new TsFileMetaData(tsDeltaObjectMap, null, VERSION); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - - tsfMetaData.setCreatedBy("tsf"); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - - List jsonMetaData = new ArrayList(); - tsfMetaData.setJsonMetaData(jsonMetaData); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - jsonMetaData.add("fsdfsfsd"); - jsonMetaData.add("424fd"); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - - tsfMetaData.setProps(properties); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - - tsfMetaData.setTimeSeriesList(new ArrayList()); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - - tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); - Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); - } - - @Test - public void testCreateTSFMetadata() throws UnsupportedEncodingException { - FileMetaData fileMetaData = new FileMetaData(VERSION, deltaObjectMap, null); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - - fileMetaData.setCreated_by("tsf"); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - - List jsonMetaData = new ArrayList(); - fileMetaData.setJson_metadata(jsonMetaData); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - jsonMetaData.add("fsdfsfsd"); - jsonMetaData.add("424fd"); - fileMetaData.setJson_metadata(jsonMetaData); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - - fileMetaData.setProperties(properties); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - - fileMetaData.setTimeseries_list(new ArrayList()); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - fileMetaData.getTimeseries_list().add(TestHelper.createSimpleTimeSeriesInThrift()); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - fileMetaData.getTimeseries_list().add(TestHelper.createSimpleTimeSeriesInThrift()); - Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); - - } - -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import cn.edu.tsinghua.tsfile.file.metadata.converter.TsFileMetaDataConverter; +import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; +import cn.edu.tsinghua.tsfile.format.TimeSeries; +import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; +import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; +import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; +import cn.edu.tsinghua.tsfile.format.DeltaObject; +import cn.edu.tsinghua.tsfile.format.FileMetaData; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class TsFileMetaDataTest { + private TsFileMetaDataConverter converter = new TsFileMetaDataConverter(); + final String PATH = "target/output1.ksn"; + final int VERSION = 123; + + public static Map properties = new HashMap<>(); + public static Map tsDeltaObjectMap = new HashMap<>(); + public static Map deltaObjectMap = new HashMap<>(); + static { + properties.put("s1", "sensor1"); + properties.put("s2", "sensor2"); + properties.put("s3", "sensor3"); + } + + static { + tsDeltaObjectMap.put("d1", new TsDeltaObject(123, 456, 789, 901)); + tsDeltaObjectMap.put("d2", new TsDeltaObject(123, 456, 789, 901)); + tsDeltaObjectMap.put("d3", new TsDeltaObject(123, 456, 789, 901)); + } + + static { + deltaObjectMap.put("d1", new DeltaObject(123, 456, 789, 901)); + deltaObjectMap.put("d2", new DeltaObject(123, 456, 789, 901)); + deltaObjectMap.put("d3", new DeltaObject(123, 456, 789, 901)); + } + + @Before + public void setUp() throws Exception { + converter = new TsFileMetaDataConverter(); + } + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteFileMetaData() throws IOException { + TsFileMetaData tsfMetaData = new TsFileMetaData(tsDeltaObjectMap, null, VERSION); + tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); + tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); + tsfMetaData.setCreatedBy("tsf"); + List jsonMetaData = new ArrayList(); + jsonMetaData.add("fsdfsfsd"); + jsonMetaData.add("424fd"); + tsfMetaData.setJsonMetaData(jsonMetaData); + + tsfMetaData.setProps(properties); + tsfMetaData.addProp("key1", "value1"); + + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.writeFileMetaData(converter.toThriftFileMetadata(tsfMetaData), + out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + + FileMetaData fileMetaData2 = ReadWriteThriftFormatUtils.readFileMetaData(fis); + Utils.isFileMetaDataEqual(tsfMetaData, fileMetaData2); + } + + @Test + public void testCreateFileMetaDataInThrift() throws UnsupportedEncodingException { + TsFileMetaData tsfMetaData = new TsFileMetaData(tsDeltaObjectMap, null, VERSION); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + + tsfMetaData.setCreatedBy("tsf"); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + + List jsonMetaData = new ArrayList(); + tsfMetaData.setJsonMetaData(jsonMetaData); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + jsonMetaData.add("fsdfsfsd"); + jsonMetaData.add("424fd"); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + + tsfMetaData.setProps(properties); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + + tsfMetaData.setTimeSeriesList(new ArrayList()); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + + tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + tsfMetaData.addTimeSeriesMetaData(TestHelper.createSimpleTimeSeriesInTSF()); + Utils.isFileMetaDataEqual(tsfMetaData, converter.toThriftFileMetadata(tsfMetaData)); + } + + @Test + public void testCreateTSFMetadata() throws UnsupportedEncodingException { + FileMetaData fileMetaData = new FileMetaData(VERSION, deltaObjectMap, null); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + + fileMetaData.setCreated_by("tsf"); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + + List jsonMetaData = new ArrayList(); + fileMetaData.setJson_metadata(jsonMetaData); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + jsonMetaData.add("fsdfsfsd"); + jsonMetaData.add("424fd"); + fileMetaData.setJson_metadata(jsonMetaData); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + + fileMetaData.setProperties(properties); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + + fileMetaData.setTimeseries_list(new ArrayList()); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + fileMetaData.getTimeseries_list().add(TestHelper.createSimpleTimeSeriesInThrift()); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + fileMetaData.getTimeseries_list().add(TestHelper.createSimpleTimeSeriesInThrift()); + Utils.isFileMetaDataEqual(converter.toTsFileMetadata(fileMetaData), fileMetaData); + + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaDataTest.java index 38dfc354..0ee00e62 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/VInTimeSeriesChunkMetaDataTest.java @@ -1,104 +1,105 @@ -package cn.edu.tsinghua.tsfile.file.metadata; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; -import cn.edu.tsinghua.tsfile.format.DataType; -import cn.edu.tsinghua.tsfile.format.Digest; -import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; -import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; -import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; -import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class VInTimeSeriesChunkMetaDataTest { - private VInTimeSeriesChunkMetaData metaData; - public static final int MAX_ERROR = 1232; -// public static final String maxString = "3244324"; -// public static final String minString = "fddsfsfgd"; - final String PATH = "target/outputV.ksn"; - @Before - public void setUp() throws Exception { - metaData = new VInTimeSeriesChunkMetaData(); - } - - @After - public void tearDown() throws Exception { - File file = new File(PATH); - if (file.exists()) - file.delete(); - } - - @Test - public void testWriteIntoFile() throws IOException { - VInTimeSeriesChunkMetaData metaData = TestHelper.createSimpleV2InTSF(TSDataType.TEXT, new TsDigest()); - - File file = new File(PATH); - if (file.exists()) - file.delete(); - FileOutputStream fos = new FileOutputStream(file); - TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); - ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); - - out.close(); - fos.close(); - - FileInputStream fis = new FileInputStream(new File(PATH)); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - Utils.isVSeriesChunkMetadataEqual(metaData, - ReadWriteThriftFormatUtils.read(fis, new ValueInTimeSeriesChunkMetaData())); - } - - @Test - public void testConvertToThrift() throws UnsupportedEncodingException { - for (TSDataType dataType : TSDataType.values()) { - VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - metaData.setMaxError(3123); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - metaData.setMaxError(-11); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - TsDigest digest = new TsDigest(); - metaData.setDigest(digest); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - - metaData.setDigest(TestHelper.createSimpleTsDigest()); - Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); - } - } - - @Test - public void testConvertToTSF() throws UnsupportedEncodingException { - for (DataType dataType : DataType.values()) { - ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData = - new ValueInTimeSeriesChunkMetaData(dataType); - metaData.convertToTSF(valueInTimeSeriesChunkMetaData); - Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); - - valueInTimeSeriesChunkMetaData.setMax_error(3123); - metaData.convertToTSF(valueInTimeSeriesChunkMetaData); - Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); - - valueInTimeSeriesChunkMetaData.setMax_error(-231); - metaData.convertToTSF(valueInTimeSeriesChunkMetaData); - Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); - - valueInTimeSeriesChunkMetaData.setDigest(new Digest()); - metaData.convertToTSF(valueInTimeSeriesChunkMetaData); - Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); - - valueInTimeSeriesChunkMetaData.setDigest(TestHelper.createSimpleDigest()); - metaData.convertToTSF(valueInTimeSeriesChunkMetaData); - Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); - } - } -} +package cn.edu.tsinghua.tsfile.file.metadata; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.utils.TestHelper; +import cn.edu.tsinghua.tsfile.format.DataType; +import cn.edu.tsinghua.tsfile.format.Digest; +import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; +import cn.edu.tsinghua.tsfile.file.metadata.utils.Utils; +import cn.edu.tsinghua.tsfile.file.utils.ReadWriteThriftFormatUtils; +import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class VInTimeSeriesChunkMetaDataTest { + private VInTimeSeriesChunkMetaData metaData; + public static final int MAX_ERROR = 1232; + // public static final String maxString = "3244324"; + // public static final String minString = "fddsfsfgd"; + final String PATH = "target/outputV.ksn"; + + @Before + public void setUp() throws Exception { + metaData = new VInTimeSeriesChunkMetaData(); + } + + @After + public void tearDown() throws Exception { + File file = new File(PATH); + if (file.exists()) + file.delete(); + } + + @Test + public void testWriteIntoFile() throws IOException { + VInTimeSeriesChunkMetaData metaData = + TestHelper.createSimpleV2InTSF(TSDataType.TEXT, new TsDigest()); + + File file = new File(PATH); + if (file.exists()) + file.delete(); + FileOutputStream fos = new FileOutputStream(file); + TsRandomAccessFileWriter out = new TsRandomAccessFileWriter(file, "rw"); + ReadWriteThriftFormatUtils.write(metaData.convertToThrift(), out.getOutputStream()); + + out.close(); + fos.close(); + + FileInputStream fis = new FileInputStream(new File(PATH)); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + Utils.isVSeriesChunkMetadataEqual(metaData, + ReadWriteThriftFormatUtils.read(fis, new ValueInTimeSeriesChunkMetaData())); + } + + @Test + public void testConvertToThrift() throws UnsupportedEncodingException { + for (TSDataType dataType : TSDataType.values()) { + VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + metaData.setMaxError(3123); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + metaData.setMaxError(-11); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + TsDigest digest = new TsDigest(); + metaData.setDigest(digest); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + + metaData.setDigest(TestHelper.createSimpleTsDigest()); + Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift()); + } + } + + @Test + public void testConvertToTSF() throws UnsupportedEncodingException { + for (DataType dataType : DataType.values()) { + ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData = + new ValueInTimeSeriesChunkMetaData(dataType); + metaData.convertToTSF(valueInTimeSeriesChunkMetaData); + Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); + + valueInTimeSeriesChunkMetaData.setMax_error(3123); + metaData.convertToTSF(valueInTimeSeriesChunkMetaData); + Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); + + valueInTimeSeriesChunkMetaData.setMax_error(-231); + metaData.convertToTSF(valueInTimeSeriesChunkMetaData); + Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); + + valueInTimeSeriesChunkMetaData.setDigest(new Digest()); + metaData.convertToTSF(valueInTimeSeriesChunkMetaData); + Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); + + valueInTimeSeriesChunkMetaData.setDigest(TestHelper.createSimpleDigest()); + metaData.convertToTSF(valueInTimeSeriesChunkMetaData); + Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData); + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatisticsTest.java index fd949d04..d0e3a242 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BigDecimalStatisticsTest.java @@ -1,58 +1,56 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import static org.junit.Assert.assertEquals; - import java.math.BigDecimal; - import org.junit.Test; public class BigDecimalStatisticsTest { - private static final double maxError = 0.0001d; - - @Test - public void testUpdate() { - Statistics bigStats = new BigDecimalStatistics(); - BigDecimal up1 = new BigDecimal("1.232"); - BigDecimal up2 = new BigDecimal("2.232"); - bigStats.updateStats(up1); - assertEquals(false, bigStats.isEmpty()); - bigStats.updateStats(up2); - assertEquals(false, bigStats.isEmpty()); - assertEquals(up2, (BigDecimal) bigStats.getMax()); - assertEquals(up1, (BigDecimal) bigStats.getMin()); - assertEquals(up2.add(up1).doubleValue(), bigStats.getSum(),maxError); - assertEquals(up1, (BigDecimal) bigStats.getFirst()); - assertEquals(up2, (BigDecimal) bigStats.getLast()); - } - - @Test - public void testMerge() { - Statistics bigStats1 = new BigDecimalStatistics(); - Statistics bigStats2 = new BigDecimalStatistics(); - - BigDecimal down1 = new BigDecimal("1.232"); - BigDecimal up1 = new BigDecimal("2.232"); - bigStats1.updateStats(down1); - bigStats1.updateStats(up1); - BigDecimal up2 = new BigDecimal("200.232"); - bigStats2.updateStats(up2); - - Statistics bigStats3 = new BigDecimalStatistics(); - bigStats3.mergeStatistics(bigStats1); - assertEquals(false, bigStats3.isEmpty()); - assertEquals(up1, (BigDecimal) bigStats3.getMax()); - assertEquals(down1, (BigDecimal) bigStats3.getMin()); - assertEquals(up1.add(down1).doubleValue(), bigStats3.getSum(),maxError); - assertEquals(down1, (BigDecimal) bigStats3.getFirst()); - assertEquals(up1, (BigDecimal) bigStats3.getLast()); - - bigStats3.mergeStatistics(bigStats2); - assertEquals(up2, (BigDecimal) bigStats3.getMax()); - assertEquals(down1, (BigDecimal) bigStats3.getMin()); - assertEquals(up1.add(down1).add(up2).doubleValue(), bigStats3.getSum(),maxError); - assertEquals(down1, (BigDecimal) bigStats3.getFirst()); - assertEquals(up2, (BigDecimal) bigStats3.getLast()); - - } + private static final double maxError = 0.0001d; + + @Test + public void testUpdate() { + Statistics bigStats = new BigDecimalStatistics(); + BigDecimal up1 = new BigDecimal("1.232"); + BigDecimal up2 = new BigDecimal("2.232"); + bigStats.updateStats(up1); + assertEquals(false, bigStats.isEmpty()); + bigStats.updateStats(up2); + assertEquals(false, bigStats.isEmpty()); + assertEquals(up2, (BigDecimal) bigStats.getMax()); + assertEquals(up1, (BigDecimal) bigStats.getMin()); + assertEquals(up2.add(up1).doubleValue(), bigStats.getSum(), maxError); + assertEquals(up1, (BigDecimal) bigStats.getFirst()); + assertEquals(up2, (BigDecimal) bigStats.getLast()); + } + + @Test + public void testMerge() { + Statistics bigStats1 = new BigDecimalStatistics(); + Statistics bigStats2 = new BigDecimalStatistics(); + + BigDecimal down1 = new BigDecimal("1.232"); + BigDecimal up1 = new BigDecimal("2.232"); + bigStats1.updateStats(down1); + bigStats1.updateStats(up1); + BigDecimal up2 = new BigDecimal("200.232"); + bigStats2.updateStats(up2); + + Statistics bigStats3 = new BigDecimalStatistics(); + bigStats3.mergeStatistics(bigStats1); + assertEquals(false, bigStats3.isEmpty()); + assertEquals(up1, (BigDecimal) bigStats3.getMax()); + assertEquals(down1, (BigDecimal) bigStats3.getMin()); + assertEquals(up1.add(down1).doubleValue(), bigStats3.getSum(), maxError); + assertEquals(down1, (BigDecimal) bigStats3.getFirst()); + assertEquals(up1, (BigDecimal) bigStats3.getLast()); + + bigStats3.mergeStatistics(bigStats2); + assertEquals(up2, (BigDecimal) bigStats3.getMax()); + assertEquals(down1, (BigDecimal) bigStats3.getMin()); + assertEquals(up1.add(down1).add(up2).doubleValue(), bigStats3.getSum(), maxError); + assertEquals(down1, (BigDecimal) bigStats3.getFirst()); + assertEquals(up2, (BigDecimal) bigStats3.getLast()); + + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatisticsTest.java index d23c223f..e4db55d3 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/BooleanStatisticsTest.java @@ -1,53 +1,52 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import org.junit.Test; - import static org.junit.Assert.assertEquals; /** * @author CGF */ public class BooleanStatisticsTest { - private static final double maxError = 0.0001d; - - @Test - public void testUpdate() { - Statistics booleanStatistics = new BooleanStatistics(); - booleanStatistics.updateStats(true); - assertEquals(false, booleanStatistics.isEmpty()); - booleanStatistics.updateStats(false); - assertEquals(false, booleanStatistics.isEmpty()); - assertEquals(true, (boolean) booleanStatistics.getMax()); - assertEquals(false, (boolean) booleanStatistics.getMin()); - assertEquals(0, (double) booleanStatistics.getSum(), maxError); - assertEquals(true, (boolean) booleanStatistics.getFirst()); - assertEquals(false, (boolean) booleanStatistics.getLast()); - } - - @Test - public void testMerge() { - Statistics booleanStats1 = new BooleanStatistics(); - Statistics booleanStats2 = new BooleanStatistics(); - - booleanStats1.updateStats(false); - booleanStats1.updateStats(false); - - booleanStats2.updateStats(true); - - Statistics booleanStats3 = new BooleanStatistics(); - booleanStats3.mergeStatistics(booleanStats1); - assertEquals(false, booleanStats3.isEmpty()); - assertEquals(false, (boolean) booleanStats3.getMax()); - assertEquals(false, (boolean) booleanStats3.getMin()); - assertEquals(0, (double) booleanStats3.getSum(), maxError); - assertEquals(false, (boolean) booleanStats3.getFirst()); - assertEquals(false, (boolean) booleanStats3.getLast()); - - booleanStats3.mergeStatistics(booleanStats2); - assertEquals(true, (boolean) booleanStats3.getMax()); - assertEquals(false, (boolean) booleanStats3.getMin()); - assertEquals(0, (double) booleanStats3.getSum(), maxError); - assertEquals(false, (boolean) booleanStats3.getFirst()); - assertEquals(true, (boolean) booleanStats3.getLast()); - } + private static final double maxError = 0.0001d; + + @Test + public void testUpdate() { + Statistics booleanStatistics = new BooleanStatistics(); + booleanStatistics.updateStats(true); + assertEquals(false, booleanStatistics.isEmpty()); + booleanStatistics.updateStats(false); + assertEquals(false, booleanStatistics.isEmpty()); + assertEquals(true, (boolean) booleanStatistics.getMax()); + assertEquals(false, (boolean) booleanStatistics.getMin()); + assertEquals(0, (double) booleanStatistics.getSum(), maxError); + assertEquals(true, (boolean) booleanStatistics.getFirst()); + assertEquals(false, (boolean) booleanStatistics.getLast()); + } + + @Test + public void testMerge() { + Statistics booleanStats1 = new BooleanStatistics(); + Statistics booleanStats2 = new BooleanStatistics(); + + booleanStats1.updateStats(false); + booleanStats1.updateStats(false); + + booleanStats2.updateStats(true); + + Statistics booleanStats3 = new BooleanStatistics(); + booleanStats3.mergeStatistics(booleanStats1); + assertEquals(false, booleanStats3.isEmpty()); + assertEquals(false, (boolean) booleanStats3.getMax()); + assertEquals(false, (boolean) booleanStats3.getMin()); + assertEquals(0, (double) booleanStats3.getSum(), maxError); + assertEquals(false, (boolean) booleanStats3.getFirst()); + assertEquals(false, (boolean) booleanStats3.getLast()); + + booleanStats3.mergeStatistics(booleanStats2); + assertEquals(true, (boolean) booleanStats3.getMax()); + assertEquals(false, (boolean) booleanStats3.getMin()); + assertEquals(0, (double) booleanStats3.getSum(), maxError); + assertEquals(false, (boolean) booleanStats3.getFirst()); + assertEquals(true, (boolean) booleanStats3.getLast()); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatisticsTest.java index 1ae02958..1586b7a4 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/DoubleStatisticsTest.java @@ -1,51 +1,50 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import static org.junit.Assert.assertEquals; - import org.junit.Test; public class DoubleStatisticsTest { - private static final double maxError = 0.0001d; - - @Test - public void testUpdate() { - Statistics doubleStats = new DoubleStatistics(); - doubleStats.updateStats(1.34d); - assertEquals(false, doubleStats.isEmpty()); - doubleStats.updateStats(2.32d); - assertEquals(false, doubleStats.isEmpty()); - assertEquals(2.32d, (double) doubleStats.getMax(), maxError); - assertEquals(1.34d, (double) doubleStats.getMin(), maxError); - assertEquals(2.32d + 1.34d, (double) doubleStats.getSum(), maxError); - assertEquals(1.34d, (double) doubleStats.getFirst(), maxError); - assertEquals(2.32d, (double) doubleStats.getLast(), maxError); - } - - @Test - public void testMerge() { - Statistics doubleStats1 = new DoubleStatistics(); - Statistics doubleStats2 = new DoubleStatistics(); - - doubleStats1.updateStats(1.34d); - doubleStats1.updateStats(100.13453d); - - doubleStats2.updateStats(200.435d); - - Statistics doubleStats3 = new DoubleStatistics(); - doubleStats3.mergeStatistics(doubleStats1); - assertEquals(false, doubleStats3.isEmpty()); - assertEquals(100.13453d, (double) doubleStats3.getMax(), maxError); - assertEquals(1.34d, (double) doubleStats3.getMin(), maxError); - assertEquals(100.13453d + 1.34d, (double) doubleStats3.getSum(), maxError); - assertEquals(1.34d, (double) doubleStats3.getFirst(), maxError); - assertEquals(100.13453d, (double) doubleStats3.getLast(), maxError); - - doubleStats3.mergeStatistics(doubleStats2); - assertEquals(200.435d, (double) doubleStats3.getMax(), maxError); - assertEquals(1.34d, (double) doubleStats3.getMin(), maxError); - assertEquals(100.13453d + 1.34d + 200.435d, (double) doubleStats3.getSum(), maxError); - assertEquals(1.34d, (double) doubleStats3.getFirst(), maxError); - assertEquals(200.435d, (double) doubleStats3.getLast(), maxError); - } + private static final double maxError = 0.0001d; + + @Test + public void testUpdate() { + Statistics doubleStats = new DoubleStatistics(); + doubleStats.updateStats(1.34d); + assertEquals(false, doubleStats.isEmpty()); + doubleStats.updateStats(2.32d); + assertEquals(false, doubleStats.isEmpty()); + assertEquals(2.32d, (double) doubleStats.getMax(), maxError); + assertEquals(1.34d, (double) doubleStats.getMin(), maxError); + assertEquals(2.32d + 1.34d, (double) doubleStats.getSum(), maxError); + assertEquals(1.34d, (double) doubleStats.getFirst(), maxError); + assertEquals(2.32d, (double) doubleStats.getLast(), maxError); + } + + @Test + public void testMerge() { + Statistics doubleStats1 = new DoubleStatistics(); + Statistics doubleStats2 = new DoubleStatistics(); + + doubleStats1.updateStats(1.34d); + doubleStats1.updateStats(100.13453d); + + doubleStats2.updateStats(200.435d); + + Statistics doubleStats3 = new DoubleStatistics(); + doubleStats3.mergeStatistics(doubleStats1); + assertEquals(false, doubleStats3.isEmpty()); + assertEquals(100.13453d, (double) doubleStats3.getMax(), maxError); + assertEquals(1.34d, (double) doubleStats3.getMin(), maxError); + assertEquals(100.13453d + 1.34d, (double) doubleStats3.getSum(), maxError); + assertEquals(1.34d, (double) doubleStats3.getFirst(), maxError); + assertEquals(100.13453d, (double) doubleStats3.getLast(), maxError); + + doubleStats3.mergeStatistics(doubleStats2); + assertEquals(200.435d, (double) doubleStats3.getMax(), maxError); + assertEquals(1.34d, (double) doubleStats3.getMin(), maxError); + assertEquals(100.13453d + 1.34d + 200.435d, (double) doubleStats3.getSum(), maxError); + assertEquals(1.34d, (double) doubleStats3.getFirst(), maxError); + assertEquals(200.435d, (double) doubleStats3.getLast(), maxError); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatisticsTest.java index 0ad8814a..236250c3 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/FloatStatisticsTest.java @@ -1,52 +1,51 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import static org.junit.Assert.assertEquals; - import org.junit.Test; public class FloatStatisticsTest { - private static final float maxError = 0.0001f; - - @Test - public void testUpdate() { - Statistics floatStats = new FloatStatistics(); - floatStats.updateStats(1.34f); - assertEquals(false, floatStats.isEmpty()); - floatStats.updateStats(2.32f); - assertEquals(false, floatStats.isEmpty()); - assertEquals(2.32f, (double) floatStats.getMax(), maxError); - assertEquals(1.34f, (double) floatStats.getMin(), maxError); - assertEquals(2.32f + 1.34f, (double) floatStats.getSum(), maxError); - assertEquals(1.34f, (double) floatStats.getFirst(), maxError); - assertEquals(2.32f, (double) floatStats.getLast(), maxError); - } - - @Test - public void testMerge() { - Statistics floatStats1 = new FloatStatistics(); - Statistics floatStats2 = new FloatStatistics(); - - floatStats1.updateStats(1.34f); - floatStats1.updateStats(100.13453f); - - floatStats2.updateStats(200.435f); - - Statistics floatStats3 = new FloatStatistics(); - floatStats3.mergeStatistics(floatStats1); - assertEquals(false, floatStats3.isEmpty()); - assertEquals(100.13453f, (float) floatStats3.getMax(), maxError); - assertEquals(1.34f, (float) floatStats3.getMin(), maxError); - assertEquals(100.13453f + 1.34f, (float) floatStats3.getSum(), maxError); - assertEquals(1.34f, (float) floatStats3.getFirst(), maxError); - assertEquals(100.13453f, (float) floatStats3.getLast(), maxError); - - floatStats3.mergeStatistics(floatStats2); - assertEquals(200.435d, (float) floatStats3.getMax(), maxError); - assertEquals(1.34d, (float) floatStats3.getMin(), maxError); - assertEquals(100.13453f + 1.34f + 200.435d, (float) floatStats3.getSum(), maxError); - assertEquals(1.34f, (float) floatStats3.getFirst(), maxError); - assertEquals(200.435f, (float) floatStats3.getLast(), maxError); - - } + private static final float maxError = 0.0001f; + + @Test + public void testUpdate() { + Statistics floatStats = new FloatStatistics(); + floatStats.updateStats(1.34f); + assertEquals(false, floatStats.isEmpty()); + floatStats.updateStats(2.32f); + assertEquals(false, floatStats.isEmpty()); + assertEquals(2.32f, (double) floatStats.getMax(), maxError); + assertEquals(1.34f, (double) floatStats.getMin(), maxError); + assertEquals(2.32f + 1.34f, (double) floatStats.getSum(), maxError); + assertEquals(1.34f, (double) floatStats.getFirst(), maxError); + assertEquals(2.32f, (double) floatStats.getLast(), maxError); + } + + @Test + public void testMerge() { + Statistics floatStats1 = new FloatStatistics(); + Statistics floatStats2 = new FloatStatistics(); + + floatStats1.updateStats(1.34f); + floatStats1.updateStats(100.13453f); + + floatStats2.updateStats(200.435f); + + Statistics floatStats3 = new FloatStatistics(); + floatStats3.mergeStatistics(floatStats1); + assertEquals(false, floatStats3.isEmpty()); + assertEquals(100.13453f, (float) floatStats3.getMax(), maxError); + assertEquals(1.34f, (float) floatStats3.getMin(), maxError); + assertEquals(100.13453f + 1.34f, (float) floatStats3.getSum(), maxError); + assertEquals(1.34f, (float) floatStats3.getFirst(), maxError); + assertEquals(100.13453f, (float) floatStats3.getLast(), maxError); + + floatStats3.mergeStatistics(floatStats2); + assertEquals(200.435d, (float) floatStats3.getMax(), maxError); + assertEquals(1.34d, (float) floatStats3.getMin(), maxError); + assertEquals(100.13453f + 1.34f + 200.435d, (float) floatStats3.getSum(), maxError); + assertEquals(1.34f, (float) floatStats3.getFirst(), maxError); + assertEquals(200.435f, (float) floatStats3.getLast(), maxError); + + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatisticsTest.java index 1d09e495..d35d9ca8 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/IntegerStatisticsTest.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.file.metadata.statistics; import static org.junit.Assert.assertEquals; - import org.junit.Test; public class IntegerStatisticsTest { @@ -15,9 +14,9 @@ public void testUpdate() { assertEquals(false, intStats.isEmpty()); assertEquals(2, (int) intStats.getMax()); assertEquals(1, (int) intStats.getMin()); - assertEquals(1, (int)intStats.getFirst()); - assertEquals(3, (int)intStats.getSum()); - assertEquals(2, (int)intStats.getLast()); + assertEquals(1, (int) intStats.getFirst()); + assertEquals(3, (int) intStats.getSum()); + assertEquals(2, (int) intStats.getLast()); } @Test @@ -35,15 +34,15 @@ public void testMerge() { assertEquals(false, intStats3.isEmpty()); assertEquals(100, (int) intStats3.getMax()); assertEquals(1, (int) intStats3.getMin()); - assertEquals(1, (int)intStats3.getFirst()); - assertEquals(1+100, (int)intStats3.getSum()); - assertEquals(100, (int)intStats3.getLast()); - + assertEquals(1, (int) intStats3.getFirst()); + assertEquals(1 + 100, (int) intStats3.getSum()); + assertEquals(100, (int) intStats3.getLast()); + intStats3.mergeStatistics(intStats2); assertEquals(200, (int) intStats3.getMax()); assertEquals(1, (int) intStats3.getMin()); - assertEquals(1, (int)intStats3.getFirst()); - assertEquals(101+200, (int)intStats3.getSum()); - assertEquals(200, (int)intStats3.getLast()); + assertEquals(1, (int) intStats3.getFirst()); + assertEquals(101 + 200, (int) intStats3.getSum()); + assertEquals(200, (int) intStats3.getLast()); } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatisticsTest.java index 5eee5e2f..f4d4a14a 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/LongStatisticsTest.java @@ -5,69 +5,69 @@ public class LongStatisticsTest { - @Test - public void testUpdate() { - Statistics longStats = new LongStatistics(); - assertEquals(true, longStats.isEmpty()); - long firstValue = -120985402913209l; - long secondValue = 1251465332132513l; - longStats.updateStats(firstValue); - assertEquals(false, longStats.isEmpty()); - longStats.updateStats(secondValue); - assertEquals(false, longStats.isEmpty()); - assertEquals(secondValue, (long) longStats.getMax()); - assertEquals(firstValue, (long) longStats.getMin()); - assertEquals(firstValue, (long) longStats.getFirst()); - assertEquals(firstValue + secondValue, (long) longStats.getSum()); - assertEquals(secondValue, (long) longStats.getLast()); - } + @Test + public void testUpdate() { + Statistics longStats = new LongStatistics(); + assertEquals(true, longStats.isEmpty()); + long firstValue = -120985402913209l; + long secondValue = 1251465332132513l; + longStats.updateStats(firstValue); + assertEquals(false, longStats.isEmpty()); + longStats.updateStats(secondValue); + assertEquals(false, longStats.isEmpty()); + assertEquals(secondValue, (long) longStats.getMax()); + assertEquals(firstValue, (long) longStats.getMin()); + assertEquals(firstValue, (long) longStats.getFirst()); + assertEquals(firstValue + secondValue, (long) longStats.getSum()); + assertEquals(secondValue, (long) longStats.getLast()); + } - @Test - public void testMerge() { - Statistics longStats1 = new LongStatistics(); - Statistics longStats2 = new LongStatistics(); - assertEquals(true, longStats1.isEmpty()); - assertEquals(true, longStats2.isEmpty()); - long max1 = 100000000000l; - long max2 = 200000000000l; - longStats1.updateStats(1l); - longStats1.updateStats(max1); - longStats2.updateStats(max2); + @Test + public void testMerge() { + Statistics longStats1 = new LongStatistics(); + Statistics longStats2 = new LongStatistics(); + assertEquals(true, longStats1.isEmpty()); + assertEquals(true, longStats2.isEmpty()); + long max1 = 100000000000l; + long max2 = 200000000000l; + longStats1.updateStats(1l); + longStats1.updateStats(max1); + longStats2.updateStats(max2); - Statistics longStats3 = new LongStatistics(); - longStats3.mergeStatistics(longStats1); - assertEquals(false, longStats3.isEmpty()); - assertEquals(max1, (long) longStats3.getMax()); - assertEquals(1, (long) longStats3.getMin()); - assertEquals(max1 + 1, (long) longStats3.getSum()); - assertEquals(1, (long) longStats3.getFirst()); - assertEquals(max1, (long) longStats3.getLast()); + Statistics longStats3 = new LongStatistics(); + longStats3.mergeStatistics(longStats1); + assertEquals(false, longStats3.isEmpty()); + assertEquals(max1, (long) longStats3.getMax()); + assertEquals(1, (long) longStats3.getMin()); + assertEquals(max1 + 1, (long) longStats3.getSum()); + assertEquals(1, (long) longStats3.getFirst()); + assertEquals(max1, (long) longStats3.getLast()); - longStats3.mergeStatistics(longStats2); - assertEquals(max2, (long) longStats3.getMax()); - assertEquals(1, (long) longStats3.getMin()); - assertEquals(max2 + max1 + 1, (long) longStats3.getSum()); - assertEquals(1, (long) longStats3.getFirst()); - assertEquals(max2, (long) longStats3.getLast()); + longStats3.mergeStatistics(longStats2); + assertEquals(max2, (long) longStats3.getMax()); + assertEquals(1, (long) longStats3.getMin()); + assertEquals(max2 + max1 + 1, (long) longStats3.getSum()); + assertEquals(1, (long) longStats3.getFirst()); + assertEquals(max2, (long) longStats3.getLast()); - // Test mismatch - IntegerStatistics intStats5 = new IntegerStatistics(); - intStats5.updateStats(-10000); - try { - longStats3.mergeStatistics(intStats5); - } catch (StatisticsClassException e) { - // that's true route - } catch (Exception e) { - fail(); - } + // Test mismatch + IntegerStatistics intStats5 = new IntegerStatistics(); + intStats5.updateStats(-10000); + try { + longStats3.mergeStatistics(intStats5); + } catch (StatisticsClassException e) { + // that's true route + } catch (Exception e) { + fail(); + } - assertEquals(max2, (long) longStats3.getMax()); - // if not merge, the min value will not be changed by smaller value in - // intStats5 - assertEquals(1, (long) longStats3.getMin()); - assertEquals(max2 + max1 + 1, (long) longStats3.getSum()); - assertEquals(1, (long) longStats3.getFirst()); - assertEquals(max2, (long) longStats3.getLast()); - } + assertEquals(max2, (long) longStats3.getMax()); + // if not merge, the min value will not be changed by smaller value in + // intStats5 + assertEquals(1, (long) longStats3.getMin()); + assertEquals(max2 + max1 + 1, (long) longStats3.getSum()); + assertEquals(1, (long) longStats3.getFirst()); + assertEquals(max2, (long) longStats3.getLast()); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StringStatisticsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StringStatisticsTest.java index 747f816e..b8533ca0 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StringStatisticsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/statistics/StringStatisticsTest.java @@ -2,50 +2,49 @@ import cn.edu.tsinghua.tsfile.common.utils.Binary; import org.junit.Test; - import static org.junit.Assert.assertEquals; public class StringStatisticsTest { - private static final double maxError = 0.0001d; - - @Test - public void testUpdate() { - Statistics binaryStats = new BinaryStatistics(); - binaryStats.updateStats(new Binary("aaa")); - assertEquals(false, binaryStats.isEmpty()); - binaryStats.updateStats(new Binary("bbb")); - assertEquals(false, binaryStats.isEmpty()); - assertEquals("bbb", binaryStats.getMax().getStringValue()); - assertEquals("aaa", binaryStats.getMin().getStringValue()); - assertEquals(0, binaryStats.getSum(), maxError); - assertEquals("aaa", binaryStats.getFirst().getStringValue()); - assertEquals("bbb", binaryStats.getLast().getStringValue()); - } - - @Test - public void testMerge() { - Statistics stringStats1 = new BinaryStatistics(); - Statistics stringStats2 = new BinaryStatistics(); - - stringStats1.updateStats(new Binary("aaa")); - stringStats1.updateStats(new Binary("ccc")); - - stringStats2.updateStats(new Binary("ddd")); - - Statistics stringStats3 = new BinaryStatistics(); - stringStats3.mergeStatistics(stringStats1); - assertEquals(false, stringStats3.isEmpty()); - assertEquals("ccc", (String) stringStats3.getMax().getStringValue()); - assertEquals("aaa", (String) stringStats3.getMin().getStringValue()); - assertEquals(0, stringStats3.getSum(), maxError); - assertEquals("aaa", (String) stringStats3.getFirst().getStringValue()); - assertEquals("ccc", stringStats3.getLast().getStringValue()); - - stringStats3.mergeStatistics(stringStats2); - assertEquals("ddd", (String) stringStats3.getMax().getStringValue()); - assertEquals("aaa", (String) stringStats3.getMin().getStringValue()); - assertEquals(0, stringStats3.getSum(), maxError); - assertEquals("aaa", (String) stringStats3.getFirst().getStringValue()); - assertEquals("ddd", stringStats3.getLast().getStringValue()); - } + private static final double maxError = 0.0001d; + + @Test + public void testUpdate() { + Statistics binaryStats = new BinaryStatistics(); + binaryStats.updateStats(new Binary("aaa")); + assertEquals(false, binaryStats.isEmpty()); + binaryStats.updateStats(new Binary("bbb")); + assertEquals(false, binaryStats.isEmpty()); + assertEquals("bbb", binaryStats.getMax().getStringValue()); + assertEquals("aaa", binaryStats.getMin().getStringValue()); + assertEquals(0, binaryStats.getSum(), maxError); + assertEquals("aaa", binaryStats.getFirst().getStringValue()); + assertEquals("bbb", binaryStats.getLast().getStringValue()); + } + + @Test + public void testMerge() { + Statistics stringStats1 = new BinaryStatistics(); + Statistics stringStats2 = new BinaryStatistics(); + + stringStats1.updateStats(new Binary("aaa")); + stringStats1.updateStats(new Binary("ccc")); + + stringStats2.updateStats(new Binary("ddd")); + + Statistics stringStats3 = new BinaryStatistics(); + stringStats3.mergeStatistics(stringStats1); + assertEquals(false, stringStats3.isEmpty()); + assertEquals("ccc", (String) stringStats3.getMax().getStringValue()); + assertEquals("aaa", (String) stringStats3.getMin().getStringValue()); + assertEquals(0, stringStats3.getSum(), maxError); + assertEquals("aaa", (String) stringStats3.getFirst().getStringValue()); + assertEquals("ccc", stringStats3.getLast().getStringValue()); + + stringStats3.mergeStatistics(stringStats2); + assertEquals("ddd", (String) stringStats3.getMax().getStringValue()); + assertEquals("aaa", (String) stringStats3.getMin().getStringValue()); + assertEquals(0, stringStats3.getSum(), maxError); + assertEquals("aaa", (String) stringStats3.getFirst().getStringValue()); + assertEquals("ddd", stringStats3.getLast().getStringValue()); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/TestHelper.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/TestHelper.java index 5c2b9b11..eba03bc5 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/TestHelper.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/TestHelper.java @@ -6,7 +6,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSChunkType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSFreqType; import cn.edu.tsinghua.tsfile.file.metadata.enums.CompressionTypeName; @@ -33,11 +32,11 @@ import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; public class TestHelper { - private static final String MAX_VALUE = "321"; - private static final String MIN_VALUE = "123"; - private static final String SUM_VALUE = "321123"; - private static final String FIRST_VALUE = "1"; - private static final String LAST_VALUE = "222"; + private static final String MAX_VALUE = "321"; + private static final String MIN_VALUE = "123"; + private static final String SUM_VALUE = "321123"; + private static final String FIRST_VALUE = "1"; + private static final String LAST_VALUE = "222"; public static RowGroupMetaData createSimpleRowGroupMetaDataInTSF() throws UnsupportedEncodingException { @@ -55,7 +54,8 @@ public static cn.edu.tsinghua.tsfile.format.RowGroupMetaData createSimpleRowGrou cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaData = new cn.edu.tsinghua.tsfile.format.RowGroupMetaData(new ArrayList<>(), RowGroupMetaDataTest.DELTA_OBJECT_UID, RowGroupMetaDataTest.TOTAL_BYTE_SIZE, - RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.DELTA_OBJECT_TYPE, System.currentTimeMillis()); + RowGroupMetaDataTest.MAX_NUM_ROWS, RowGroupMetaDataTest.DELTA_OBJECT_TYPE, + System.currentTimeMillis()); rowGroupMetaData.setFile_path(RowGroupMetaDataTest.FILE_PATH); rowGroupMetaData.setTsc_metadata(new ArrayList<>()); rowGroupMetaData.getTsc_metadata() @@ -77,9 +77,11 @@ public static TimeSeriesChunkMetaData createSimpleTimeSeriesChunkMetaDataInTSF() metaData.setDataPageOffset(TimeSeriesChunkMetaDataTest.DATA_PAGE_OFFSET); metaData.setDictionaryPageOffset(TimeSeriesChunkMetaDataTest.DICTIONARY_PAGE_OFFSET); metaData.setIndexPageOffset(TimeSeriesChunkMetaDataTest.INDEX_PAGE_OFFSET); - metaData.setTInTimeSeriesChunkMetaData(TestHelper.createT2inTSF(TSDataType.BOOLEAN, - TSFreqType.IRREGULAR_FREQ, null, TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime)); - metaData.setVInTimeSeriesChunkMetaData(TestHelper.createSimpleV2InTSF(TSDataType.BOOLEAN, new TsDigest())); + metaData.setTInTimeSeriesChunkMetaData( + TestHelper.createT2inTSF(TSDataType.BOOLEAN, TSFreqType.IRREGULAR_FREQ, null, + TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime)); + metaData.setVInTimeSeriesChunkMetaData( + TestHelper.createSimpleV2InTSF(TSDataType.BOOLEAN, new TsDigest())); return metaData; } @@ -97,7 +99,8 @@ public static cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData createSimple metaData.setIndex_page_offset(TimeSeriesChunkMetaDataTest.INDEX_PAGE_OFFSET); metaData.setTime_tsc(TestHelper.createT2inThrift(DataType.BOOLEAN, FreqType.IRREGULAR_FREQ, null, TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime)); - metaData.setValue_tsc(TestHelper.createSimpleV2InThrift(DataType.BOOLEAN, createSimpleDigest())); + metaData + .setValue_tsc(TestHelper.createSimpleV2InThrift(DataType.BOOLEAN, createSimpleDigest())); return metaData; } @@ -121,8 +124,8 @@ public static TimeSeriesMetadata createSimpleTimeSeriesInTSF() { } public static TimeSeries createSimpleTimeSeriesInThrift() { - TimeSeries timeSeries = new TimeSeries(TimeSeriesMetadataTest.measurementUID, - DataType.TEXT, ""); + TimeSeries timeSeries = + new TimeSeries(TimeSeriesMetadataTest.measurementUID, DataType.TEXT, ""); timeSeries.setFreq_type(FreqType.MULTI_FREQ); timeSeries.setType_length(TimeSeriesMetadataTest.typeLength); List frequencies = new ArrayList(); @@ -140,7 +143,7 @@ public static TimeSeries createSimpleTimeSeriesInThrift() { } public static TInTimeSeriesChunkMetaData createT1inTSF(TSDataType dataType, long startTime, - long endTime) { + long endTime) { TInTimeSeriesChunkMetaData metaData = new TInTimeSeriesChunkMetaData(dataType, startTime, endTime); return metaData; @@ -279,7 +282,8 @@ public static ValueInTimeSeriesChunkMetaData createSimpleV1InThrift(DataType dat return metaData; } - public static VInTimeSeriesChunkMetaData createSimpleV2InTSF(TSDataType dataType, TsDigest digest) throws UnsupportedEncodingException { + public static VInTimeSeriesChunkMetaData createSimpleV2InTSF(TSDataType dataType, TsDigest digest) + throws UnsupportedEncodingException { VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType); metaData.setMaxError(VInTimeSeriesChunkMetaDataTest.MAX_ERROR); metaData.setDigest(digest); @@ -300,27 +304,27 @@ public static VInTimeSeriesChunkMetaData createSimpleV1InTSF(TSDataType dataType metaData.setDigest(digest); return metaData; } - + public static TsDigest createSimpleTsDigest() { - TsDigest digest = new TsDigest(); - digest.addStatistics("max", ByteBuffer.wrap(BytesUtils.StringToBytes(MAX_VALUE))); - digest.addStatistics("min", ByteBuffer.wrap(BytesUtils.StringToBytes(MIN_VALUE))); - digest.addStatistics("sum", ByteBuffer.wrap(BytesUtils.StringToBytes(SUM_VALUE))); - digest.addStatistics("first", ByteBuffer.wrap(BytesUtils.StringToBytes(FIRST_VALUE))); - digest.addStatistics("last", ByteBuffer.wrap(BytesUtils.StringToBytes(LAST_VALUE))); - return digest; + TsDigest digest = new TsDigest(); + digest.addStatistics("max", ByteBuffer.wrap(BytesUtils.StringToBytes(MAX_VALUE))); + digest.addStatistics("min", ByteBuffer.wrap(BytesUtils.StringToBytes(MIN_VALUE))); + digest.addStatistics("sum", ByteBuffer.wrap(BytesUtils.StringToBytes(SUM_VALUE))); + digest.addStatistics("first", ByteBuffer.wrap(BytesUtils.StringToBytes(FIRST_VALUE))); + digest.addStatistics("last", ByteBuffer.wrap(BytesUtils.StringToBytes(LAST_VALUE))); + return digest; } - + public static Digest createSimpleDigest() { - Digest digest = new Digest(); - Map statistics = new HashMap<>(); - digest.setStatistics(statistics); - digest.getStatistics().put("max", ByteBuffer.wrap(BytesUtils.StringToBytes(MAX_VALUE))); - digest.getStatistics().put("min", ByteBuffer.wrap(BytesUtils.StringToBytes(MIN_VALUE))); - digest.getStatistics().put("sum", ByteBuffer.wrap(BytesUtils.StringToBytes(SUM_VALUE))); - digest.getStatistics().put("first", ByteBuffer.wrap(BytesUtils.StringToBytes(FIRST_VALUE))); - digest.getStatistics().put("last", ByteBuffer.wrap(BytesUtils.StringToBytes(LAST_VALUE))); - return digest; + Digest digest = new Digest(); + Map statistics = new HashMap<>(); + digest.setStatistics(statistics); + digest.getStatistics().put("max", ByteBuffer.wrap(BytesUtils.StringToBytes(MAX_VALUE))); + digest.getStatistics().put("min", ByteBuffer.wrap(BytesUtils.StringToBytes(MIN_VALUE))); + digest.getStatistics().put("sum", ByteBuffer.wrap(BytesUtils.StringToBytes(SUM_VALUE))); + digest.getStatistics().put("first", ByteBuffer.wrap(BytesUtils.StringToBytes(FIRST_VALUE))); + digest.getStatistics().put("last", ByteBuffer.wrap(BytesUtils.StringToBytes(LAST_VALUE))); + return digest; } public static List getJSONArray() { diff --git a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/Utils.java b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/Utils.java index 2aaeeb90..f7dd5641 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/Utils.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/file/metadata/utils/Utils.java @@ -1,287 +1,312 @@ -package cn.edu.tsinghua.tsfile.file.metadata.utils; - -import static org.junit.Assert.*; - -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Map; - -import cn.edu.tsinghua.tsfile.file.metadata.TsFileMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.TsRowGroupBlockMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.TimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.VInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.TInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.format.TimeSeries; -import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.RowGroupMetaData; -import cn.edu.tsinghua.tsfile.file.metadata.TimeSeriesMetadata; -import cn.edu.tsinghua.tsfile.file.metadata.TsDeltaObject; -import cn.edu.tsinghua.tsfile.format.DeltaObject; -import cn.edu.tsinghua.tsfile.format.FileMetaData; -import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; - -public class Utils { - public static void isListEqual(List listA, List listB, String name) { - if ((listA == null) ^ (listB == null)) { - System.out.println("error"); - fail(String.format("one of %s is null", name)); - } - if ((listA != null) && (listB != null)) { - if (listA.size() != listB.size()) { - fail(String.format("%s size is different", name)); - } - for (int i = 0; i < listA.size(); i++) { - assertTrue(listA.get(i).equals(listB.get(i))); - } - } - } - - public static void isMapStringEqual(Map mapA, Map mapB, String name) { - if ((mapA == null) ^ (mapB == null)) { - System.out.println("error"); - fail(String.format("one of %s is null", name)); - } - if ((mapA != null) && (mapB != null)) { - if (mapA.size() != mapB.size()) { - fail(String.format("%s size is different", name)); - } - for (String key : mapA.keySet()) { - assertTrue(mapA.get(key).equals(mapB.get(key))); - } - } - } - - public static void isMapBufferEqual(Map mapA, Map mapB, String name) { - if ((mapA == null) ^ (mapB == null)) { - System.out.println("error"); - fail(String.format("one of %s is null", name)); - } - if ((mapA != null) && (mapB != null)) { - if (mapA.size() != mapB.size()) { - fail(String.format("%s size is different", name)); - } - for (String key : mapB.keySet()) { - ByteBuffer b = mapB.get(key); - ByteBuffer a = mapA.get(key); - assertTrue(b.equals(a)); - } - } -} - - - /** - * when one of A and B is Null, A != B, so test case fails. - * - * @param objectA - * @param objectB - * @param name - * @return false - A and B both are NULL, so we do not need to check whether - * their members are equal - * @return true - A and B both are not NULL, so we need to check their members - */ - public static boolean isTwoObjectsNotNULL(Object objectA, Object objectB, String name) { - if ((objectA == null) && (objectB == null)) - return false; - if ((objectA == null) ^ (objectB == null)) - fail(String.format("one of %s is null", name)); - return true; - } - - public static void isStringSame(Object str1, Object str2, String name) { - if ((str1 == null) && (str2 == null)) - return; - if ((str1 == null) ^ (str2 == null)) - fail(String.format("one of %s string is null", name)); - assertTrue(str1.toString().equals(str2.toString())); - } - - public static void isTimeSeriesEqual(TimeSeriesMetadata timeSeriesInTSF, TimeSeries timeSeriesInThrift) { - if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getMeasurementUID(), timeSeriesInThrift.getMeasurement_uid(), - "sensorUID")) { - assertTrue(timeSeriesInTSF.getMeasurementUID().equals(timeSeriesInThrift.getMeasurement_uid())); - } - assertTrue(timeSeriesInTSF.getTypeLength() == timeSeriesInThrift.getType_length()); - if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getType(), timeSeriesInThrift.getType(), "data type")) { - assertTrue(timeSeriesInTSF.getType().toString() == timeSeriesInThrift.getType().toString()); - } - if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getFreqType(), timeSeriesInThrift.getFreq_type(), "freq type")) { - assertTrue(timeSeriesInTSF.getFreqType().toString() == timeSeriesInThrift.getFreq_type().toString()); - } - - Utils.isListEqual(timeSeriesInTSF.getFrequencies(), timeSeriesInThrift.getFrequencies(), "frequencies"); - Utils.isListEqual(timeSeriesInTSF.getEnumValues(), timeSeriesInThrift.getEnum_values(), "data values"); - } - - public static void isTimeSeriesListEqual(List timeSeriesInTSF, - List timeSeriesInThrift) { - if (timeSeriesInTSF == null && timeSeriesInThrift == null) - return; - - if (timeSeriesInTSF == null && timeSeriesInThrift == null) - return; - if ((timeSeriesInTSF == null) ^ (timeSeriesInThrift == null)) - fail("one list is null"); - if (timeSeriesInThrift.size() != timeSeriesInTSF.size()) - fail("list size is different"); - for (int i = 0; i < timeSeriesInThrift.size(); i++) { - isTimeSeriesEqual(timeSeriesInTSF.get(i), timeSeriesInThrift.get(i)); - } - } - - public static void isTSeriesChunkMetadataEqual(TInTimeSeriesChunkMetaData tSeriesMetaData, - TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData) { - if (Utils.isTwoObjectsNotNULL(tSeriesMetaData, timeInTimeSeriesChunkMetaData, - "TimeInTimeSeriesChunkMetaData")) { - Utils.isStringSame(tSeriesMetaData.getDataType(), timeInTimeSeriesChunkMetaData.getData_type(), - "data type"); - Utils.isStringSame(tSeriesMetaData.getFreqType(), timeInTimeSeriesChunkMetaData.getFreq_type(), - "freq type"); - assertTrue(tSeriesMetaData.getStartTime() == timeInTimeSeriesChunkMetaData.getStartime()); - assertTrue(tSeriesMetaData.getEndTime() == timeInTimeSeriesChunkMetaData.getEndtime()); - Utils.isListEqual(tSeriesMetaData.getFrequencies(), timeInTimeSeriesChunkMetaData.getFrequencies(), - "frequencies"); - Utils.isListEqual(tSeriesMetaData.getEnumValues(), timeInTimeSeriesChunkMetaData.getEnum_values(), - "data values"); - } - } - - public static void isDeltaObjectEqual(TsDeltaObject deltaObjectInTSF, DeltaObject deltaObjectInTHrift) { - if (Utils.isTwoObjectsNotNULL(deltaObjectInTSF, deltaObjectInTHrift, "Delta object")) { - assertTrue(deltaObjectInTSF.offset == deltaObjectInTHrift.getOffset()); - assertTrue(deltaObjectInTSF.metadataBlockSize == deltaObjectInTHrift.getMetadata_block_size()); - assertTrue(deltaObjectInTSF.startTime == deltaObjectInTHrift.getStart_time()); - assertTrue(deltaObjectInTSF.endTime == deltaObjectInTHrift.getEnd_time()); - } - } - - public static void isVSeriesChunkMetadataEqual(VInTimeSeriesChunkMetaData vSeriesMetaData, - ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData) { - if (Utils.isTwoObjectsNotNULL(vSeriesMetaData, valueInTimeSeriesChunkMetaData, - "ValueInTimeSeriesChunkMetaData")) { - assertTrue(vSeriesMetaData.getMaxError() == valueInTimeSeriesChunkMetaData.getMax_error()); - assertTrue(vSeriesMetaData.getDataType().toString() - .equals(valueInTimeSeriesChunkMetaData.getData_type().toString())); - if (Utils.isTwoObjectsNotNULL(vSeriesMetaData.getDigest(), valueInTimeSeriesChunkMetaData.getDigest(), - "Digest")) { - Utils.isMapBufferEqual(vSeriesMetaData.getDigest().getStatistics(), - valueInTimeSeriesChunkMetaData.getDigest().getStatistics(), - "Diges statistics map"); - } - Utils.isListEqual(vSeriesMetaData.getEnumValues(), valueInTimeSeriesChunkMetaData.getEnum_values(), - "data values"); - } - } - - public static void isTimeSeriesChunkMetaDataEqual(TimeSeriesChunkMetaData timeSeriesChunkMetaDataInTSF, - cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaDataInThrift) { - if (Utils.isTwoObjectsNotNULL(timeSeriesChunkMetaDataInTSF, timeSeriesChunkMetaDataInThrift, - "TimeSeriesChunkMetaData")) { - assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getMeasurementUID() - .equals(timeSeriesChunkMetaDataInThrift.getMeasurement_uid())); - assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getTsChunkType().toString() - .equals(timeSeriesChunkMetaDataInThrift.getTimeseries_chunk_type().toString())); - assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getFileOffset() == timeSeriesChunkMetaDataInThrift - .getFile_offset()); - assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getCompression().toString() - .equals(timeSeriesChunkMetaDataInThrift.getCompression_type().toString())); - - assertTrue(timeSeriesChunkMetaDataInTSF.getNumRows() == timeSeriesChunkMetaDataInThrift.getNum_rows()); - assertTrue(timeSeriesChunkMetaDataInTSF.getTotalByteSize() == timeSeriesChunkMetaDataInThrift - .getTotal_byte_size()); - assertTrue(timeSeriesChunkMetaDataInTSF.getDataPageOffset() == timeSeriesChunkMetaDataInThrift - .getData_page_offset()); - assertTrue(timeSeriesChunkMetaDataInTSF.getDictionaryPageOffset() == timeSeriesChunkMetaDataInThrift - .getDictionary_page_offset()); - assertTrue(timeSeriesChunkMetaDataInTSF.getIndexPageOffset() == timeSeriesChunkMetaDataInThrift - .getIndex_page_offset()); - Utils.isListEqual(timeSeriesChunkMetaDataInTSF.getJsonMetaData(), - timeSeriesChunkMetaDataInThrift.getJson_metadata(), "json metadata"); - - Utils.isTSeriesChunkMetadataEqual(timeSeriesChunkMetaDataInTSF.getTInTimeSeriesChunkMetaData(), - timeSeriesChunkMetaDataInThrift.getTime_tsc()); - Utils.isVSeriesChunkMetadataEqual(timeSeriesChunkMetaDataInTSF.getVInTimeSeriesChunkMetaData(), - timeSeriesChunkMetaDataInThrift.getValue_tsc()); - } - } - - public static void isRowGroupMetaDataEqual(RowGroupMetaData rowGroupMetaDataInTSF, - cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaDataInThrift) { - if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF, rowGroupMetaDataInThrift, "RowGroupMetaData")) { - assertTrue(rowGroupMetaDataInTSF.getDeltaObjectID().equals(rowGroupMetaDataInThrift.getDelta_object_id())); - assertTrue( - rowGroupMetaDataInTSF.getDeltaObjectType().equals(rowGroupMetaDataInThrift.getDelta_object_type())); - assertTrue(rowGroupMetaDataInTSF.getTotalByteSize() == rowGroupMetaDataInThrift.getTotal_byte_size()); - assertTrue(rowGroupMetaDataInTSF.getNumOfRows() == rowGroupMetaDataInThrift.getMax_num_rows()); - - if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getPath(), rowGroupMetaDataInThrift.getFile_path(), - "Row group metadata file path")) { - assertTrue(rowGroupMetaDataInTSF.getPath().equals(rowGroupMetaDataInThrift.getFile_path())); - } - - if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getMetaDatas(), - rowGroupMetaDataInThrift.getTsc_metadata(), "TimeSeriesChunkMetaData List")) { - List listTSF = rowGroupMetaDataInTSF.getMetaDatas(); - List listThrift = rowGroupMetaDataInThrift - .getTsc_metadata(); - - if (listTSF.size() != listThrift.size()) { - fail("TimeSeriesGroupMetaData List size is different"); - } - - for (int i = 0; i < listTSF.size(); i++) { - Utils.isTimeSeriesChunkMetaDataEqual(listTSF.get(i), listThrift.get(i)); - } - } - } - } - - public static void isRowGroupBlockMetadataEqual(TsRowGroupBlockMetaData rowGroupBlockMetaDataInTSF, - RowGroupBlockMetaData rowGroupBlockMetaDataInThrift) { - if (Utils.isTwoObjectsNotNULL(rowGroupBlockMetaDataInTSF, rowGroupBlockMetaDataInThrift, - "RowGroupBlockMetaData")) { - if (Utils.isTwoObjectsNotNULL(rowGroupBlockMetaDataInTSF.getRowGroups(), - rowGroupBlockMetaDataInThrift.getRow_groups_metadata(), "Row Group List")) { - List listTSF = rowGroupBlockMetaDataInTSF.getRowGroups(); - List listThrift = rowGroupBlockMetaDataInThrift - .getRow_groups_metadata(); - if (listTSF.size() != listThrift.size()) { - fail("TimeSeriesGroupMetaData List size is different"); - } - // long maxNumRows = 0; - for (int i = 0; i < listTSF.size(); i++) { - Utils.isRowGroupMetaDataEqual(listTSF.get(i), listThrift.get(i)); - // maxNumRows += listTSF.get(i).getNumOfRows(); - } - Utils.isStringSame(rowGroupBlockMetaDataInTSF.getDeltaObjectID(), rowGroupBlockMetaDataInThrift.getDelta_object_id(), "delta object id"); - } - } - } - - public static void isFileMetaDataEqual(TsFileMetaData fileMetaDataInTSF, FileMetaData fileMetaDataInThrift) { - if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF, fileMetaDataInThrift, "File MetaData")) { - assertEquals(fileMetaDataInThrift.version, fileMetaDataInTSF.getCurrentVersion()); - assertEquals(fileMetaDataInThrift.getCreated_by(), fileMetaDataInTSF.getCreatedBy()); - Utils.isTimeSeriesListEqual(fileMetaDataInTSF.getTimeSeriesList(), fileMetaDataInThrift.getTimeseries_list()); - Utils.isListEqual(fileMetaDataInTSF.getJsonMetaData(), fileMetaDataInThrift.getJson_metadata(), "json metadata"); - if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF.getProps(), fileMetaDataInThrift.getProperties(), "user specified properties")) { - Utils.isMapStringEqual(fileMetaDataInTSF.getProps(), fileMetaDataInThrift.getProperties(), "Filemetadata properties"); - } - if(Utils.isTwoObjectsNotNULL(fileMetaDataInTSF.getDeltaObjectMap(), fileMetaDataInThrift.getDelta_object_map(), "delta object map")) { - Map mapInTSF = fileMetaDataInTSF.getDeltaObjectMap(); - Map mapInThrift = fileMetaDataInThrift.getDelta_object_map(); - if(mapInThrift.size() == mapInTSF.size()) { - for(String key: mapInTSF.keySet()) { - if(mapInThrift.containsKey(key)) { - isDeltaObjectEqual(mapInTSF.get(key), mapInThrift.get(key)); - } else { - fail(String.format("delta object map in thrift does not contain key %s", key)); - } - } - } else { - fail(String.format("%s size is different", "delta object map")); - } - } - } - } -} +package cn.edu.tsinghua.tsfile.file.metadata.utils; + +import static org.junit.Assert.*; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import cn.edu.tsinghua.tsfile.file.metadata.TsFileMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.TsRowGroupBlockMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.TimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.VInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.TInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.format.TimeInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.format.TimeSeries; +import cn.edu.tsinghua.tsfile.format.ValueInTimeSeriesChunkMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.RowGroupMetaData; +import cn.edu.tsinghua.tsfile.file.metadata.TimeSeriesMetadata; +import cn.edu.tsinghua.tsfile.file.metadata.TsDeltaObject; +import cn.edu.tsinghua.tsfile.format.DeltaObject; +import cn.edu.tsinghua.tsfile.format.FileMetaData; +import cn.edu.tsinghua.tsfile.format.RowGroupBlockMetaData; + +public class Utils { + public static void isListEqual(List listA, List listB, String name) { + if ((listA == null) ^ (listB == null)) { + System.out.println("error"); + fail(String.format("one of %s is null", name)); + } + if ((listA != null) && (listB != null)) { + if (listA.size() != listB.size()) { + fail(String.format("%s size is different", name)); + } + for (int i = 0; i < listA.size(); i++) { + assertTrue(listA.get(i).equals(listB.get(i))); + } + } + } + + public static void isMapStringEqual(Map mapA, Map mapB, + String name) { + if ((mapA == null) ^ (mapB == null)) { + System.out.println("error"); + fail(String.format("one of %s is null", name)); + } + if ((mapA != null) && (mapB != null)) { + if (mapA.size() != mapB.size()) { + fail(String.format("%s size is different", name)); + } + for (String key : mapA.keySet()) { + assertTrue(mapA.get(key).equals(mapB.get(key))); + } + } + } + + public static void isMapBufferEqual(Map mapA, Map mapB, + String name) { + if ((mapA == null) ^ (mapB == null)) { + System.out.println("error"); + fail(String.format("one of %s is null", name)); + } + if ((mapA != null) && (mapB != null)) { + if (mapA.size() != mapB.size()) { + fail(String.format("%s size is different", name)); + } + for (String key : mapB.keySet()) { + ByteBuffer b = mapB.get(key); + ByteBuffer a = mapA.get(key); + assertTrue(b.equals(a)); + } + } + } + + + /** + * when one of A and B is Null, A != B, so test case fails. + * + * @param objectA + * @param objectB + * @param name + * @return false - A and B both are NULL, so we do not need to check whether their members are + * equal + * @return true - A and B both are not NULL, so we need to check their members + */ + public static boolean isTwoObjectsNotNULL(Object objectA, Object objectB, String name) { + if ((objectA == null) && (objectB == null)) + return false; + if ((objectA == null) ^ (objectB == null)) + fail(String.format("one of %s is null", name)); + return true; + } + + public static void isStringSame(Object str1, Object str2, String name) { + if ((str1 == null) && (str2 == null)) + return; + if ((str1 == null) ^ (str2 == null)) + fail(String.format("one of %s string is null", name)); + assertTrue(str1.toString().equals(str2.toString())); + } + + public static void isTimeSeriesEqual(TimeSeriesMetadata timeSeriesInTSF, + TimeSeries timeSeriesInThrift) { + if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getMeasurementUID(), + timeSeriesInThrift.getMeasurement_uid(), "sensorUID")) { + assertTrue( + timeSeriesInTSF.getMeasurementUID().equals(timeSeriesInThrift.getMeasurement_uid())); + } + assertTrue(timeSeriesInTSF.getTypeLength() == timeSeriesInThrift.getType_length()); + if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getType(), timeSeriesInThrift.getType(), + "data type")) { + assertTrue(timeSeriesInTSF.getType().toString() == timeSeriesInThrift.getType().toString()); + } + if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getFreqType(), timeSeriesInThrift.getFreq_type(), + "freq type")) { + assertTrue( + timeSeriesInTSF.getFreqType().toString() == timeSeriesInThrift.getFreq_type().toString()); + } + + Utils.isListEqual(timeSeriesInTSF.getFrequencies(), timeSeriesInThrift.getFrequencies(), + "frequencies"); + Utils.isListEqual(timeSeriesInTSF.getEnumValues(), timeSeriesInThrift.getEnum_values(), + "data values"); + } + + public static void isTimeSeriesListEqual(List timeSeriesInTSF, + List timeSeriesInThrift) { + if (timeSeriesInTSF == null && timeSeriesInThrift == null) + return; + + if (timeSeriesInTSF == null && timeSeriesInThrift == null) + return; + if ((timeSeriesInTSF == null) ^ (timeSeriesInThrift == null)) + fail("one list is null"); + if (timeSeriesInThrift.size() != timeSeriesInTSF.size()) + fail("list size is different"); + for (int i = 0; i < timeSeriesInThrift.size(); i++) { + isTimeSeriesEqual(timeSeriesInTSF.get(i), timeSeriesInThrift.get(i)); + } + } + + public static void isTSeriesChunkMetadataEqual(TInTimeSeriesChunkMetaData tSeriesMetaData, + TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData) { + if (Utils.isTwoObjectsNotNULL(tSeriesMetaData, timeInTimeSeriesChunkMetaData, + "TimeInTimeSeriesChunkMetaData")) { + Utils.isStringSame(tSeriesMetaData.getDataType(), + timeInTimeSeriesChunkMetaData.getData_type(), "data type"); + Utils.isStringSame(tSeriesMetaData.getFreqType(), + timeInTimeSeriesChunkMetaData.getFreq_type(), "freq type"); + assertTrue(tSeriesMetaData.getStartTime() == timeInTimeSeriesChunkMetaData.getStartime()); + assertTrue(tSeriesMetaData.getEndTime() == timeInTimeSeriesChunkMetaData.getEndtime()); + Utils.isListEqual(tSeriesMetaData.getFrequencies(), + timeInTimeSeriesChunkMetaData.getFrequencies(), "frequencies"); + Utils.isListEqual(tSeriesMetaData.getEnumValues(), + timeInTimeSeriesChunkMetaData.getEnum_values(), "data values"); + } + } + + public static void isDeltaObjectEqual(TsDeltaObject deltaObjectInTSF, + DeltaObject deltaObjectInTHrift) { + if (Utils.isTwoObjectsNotNULL(deltaObjectInTSF, deltaObjectInTHrift, "Delta object")) { + assertTrue(deltaObjectInTSF.offset == deltaObjectInTHrift.getOffset()); + assertTrue( + deltaObjectInTSF.metadataBlockSize == deltaObjectInTHrift.getMetadata_block_size()); + assertTrue(deltaObjectInTSF.startTime == deltaObjectInTHrift.getStart_time()); + assertTrue(deltaObjectInTSF.endTime == deltaObjectInTHrift.getEnd_time()); + } + } + + public static void isVSeriesChunkMetadataEqual(VInTimeSeriesChunkMetaData vSeriesMetaData, + ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData) { + if (Utils.isTwoObjectsNotNULL(vSeriesMetaData, valueInTimeSeriesChunkMetaData, + "ValueInTimeSeriesChunkMetaData")) { + assertTrue(vSeriesMetaData.getMaxError() == valueInTimeSeriesChunkMetaData.getMax_error()); + assertTrue(vSeriesMetaData.getDataType().toString() + .equals(valueInTimeSeriesChunkMetaData.getData_type().toString())); + if (Utils.isTwoObjectsNotNULL(vSeriesMetaData.getDigest(), + valueInTimeSeriesChunkMetaData.getDigest(), "Digest")) { + Utils.isMapBufferEqual(vSeriesMetaData.getDigest().getStatistics(), + valueInTimeSeriesChunkMetaData.getDigest().getStatistics(), "Diges statistics map"); + } + Utils.isListEqual(vSeriesMetaData.getEnumValues(), + valueInTimeSeriesChunkMetaData.getEnum_values(), "data values"); + } + } + + public static void isTimeSeriesChunkMetaDataEqual( + TimeSeriesChunkMetaData timeSeriesChunkMetaDataInTSF, + cn.edu.tsinghua.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaDataInThrift) { + if (Utils.isTwoObjectsNotNULL(timeSeriesChunkMetaDataInTSF, timeSeriesChunkMetaDataInThrift, + "TimeSeriesChunkMetaData")) { + assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getMeasurementUID() + .equals(timeSeriesChunkMetaDataInThrift.getMeasurement_uid())); + assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getTsChunkType().toString() + .equals(timeSeriesChunkMetaDataInThrift.getTimeseries_chunk_type().toString())); + assertTrue(timeSeriesChunkMetaDataInTSF.getProperties() + .getFileOffset() == timeSeriesChunkMetaDataInThrift.getFile_offset()); + assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getCompression().toString() + .equals(timeSeriesChunkMetaDataInThrift.getCompression_type().toString())); + + assertTrue(timeSeriesChunkMetaDataInTSF.getNumRows() == timeSeriesChunkMetaDataInThrift + .getNum_rows()); + assertTrue(timeSeriesChunkMetaDataInTSF.getTotalByteSize() == timeSeriesChunkMetaDataInThrift + .getTotal_byte_size()); + assertTrue(timeSeriesChunkMetaDataInTSF.getDataPageOffset() == timeSeriesChunkMetaDataInThrift + .getData_page_offset()); + assertTrue( + timeSeriesChunkMetaDataInTSF.getDictionaryPageOffset() == timeSeriesChunkMetaDataInThrift + .getDictionary_page_offset()); + assertTrue(timeSeriesChunkMetaDataInTSF + .getIndexPageOffset() == timeSeriesChunkMetaDataInThrift.getIndex_page_offset()); + Utils.isListEqual(timeSeriesChunkMetaDataInTSF.getJsonMetaData(), + timeSeriesChunkMetaDataInThrift.getJson_metadata(), "json metadata"); + + Utils.isTSeriesChunkMetadataEqual( + timeSeriesChunkMetaDataInTSF.getTInTimeSeriesChunkMetaData(), + timeSeriesChunkMetaDataInThrift.getTime_tsc()); + Utils.isVSeriesChunkMetadataEqual( + timeSeriesChunkMetaDataInTSF.getVInTimeSeriesChunkMetaData(), + timeSeriesChunkMetaDataInThrift.getValue_tsc()); + } + } + + public static void isRowGroupMetaDataEqual(RowGroupMetaData rowGroupMetaDataInTSF, + cn.edu.tsinghua.tsfile.format.RowGroupMetaData rowGroupMetaDataInThrift) { + if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF, rowGroupMetaDataInThrift, + "RowGroupMetaData")) { + assertTrue(rowGroupMetaDataInTSF.getDeltaObjectID() + .equals(rowGroupMetaDataInThrift.getDelta_object_id())); + assertTrue(rowGroupMetaDataInTSF.getDeltaObjectType() + .equals(rowGroupMetaDataInThrift.getDelta_object_type())); + assertTrue(rowGroupMetaDataInTSF.getTotalByteSize() == rowGroupMetaDataInThrift + .getTotal_byte_size()); + assertTrue( + rowGroupMetaDataInTSF.getNumOfRows() == rowGroupMetaDataInThrift.getMax_num_rows()); + + if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getPath(), + rowGroupMetaDataInThrift.getFile_path(), "Row group metadata file path")) { + assertTrue(rowGroupMetaDataInTSF.getPath().equals(rowGroupMetaDataInThrift.getFile_path())); + } + + if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getMetaDatas(), + rowGroupMetaDataInThrift.getTsc_metadata(), "TimeSeriesChunkMetaData List")) { + List listTSF = rowGroupMetaDataInTSF.getMetaDatas(); + List listThrift = + rowGroupMetaDataInThrift.getTsc_metadata(); + + if (listTSF.size() != listThrift.size()) { + fail("TimeSeriesGroupMetaData List size is different"); + } + + for (int i = 0; i < listTSF.size(); i++) { + Utils.isTimeSeriesChunkMetaDataEqual(listTSF.get(i), listThrift.get(i)); + } + } + } + } + + public static void isRowGroupBlockMetadataEqual( + TsRowGroupBlockMetaData rowGroupBlockMetaDataInTSF, + RowGroupBlockMetaData rowGroupBlockMetaDataInThrift) { + if (Utils.isTwoObjectsNotNULL(rowGroupBlockMetaDataInTSF, rowGroupBlockMetaDataInThrift, + "RowGroupBlockMetaData")) { + if (Utils.isTwoObjectsNotNULL(rowGroupBlockMetaDataInTSF.getRowGroups(), + rowGroupBlockMetaDataInThrift.getRow_groups_metadata(), "Row Group List")) { + List listTSF = rowGroupBlockMetaDataInTSF.getRowGroups(); + List listThrift = + rowGroupBlockMetaDataInThrift.getRow_groups_metadata(); + if (listTSF.size() != listThrift.size()) { + fail("TimeSeriesGroupMetaData List size is different"); + } + // long maxNumRows = 0; + for (int i = 0; i < listTSF.size(); i++) { + Utils.isRowGroupMetaDataEqual(listTSF.get(i), listThrift.get(i)); + // maxNumRows += listTSF.get(i).getNumOfRows(); + } + Utils.isStringSame(rowGroupBlockMetaDataInTSF.getDeltaObjectID(), + rowGroupBlockMetaDataInThrift.getDelta_object_id(), "delta object id"); + } + } + } + + public static void isFileMetaDataEqual(TsFileMetaData fileMetaDataInTSF, + FileMetaData fileMetaDataInThrift) { + if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF, fileMetaDataInThrift, "File MetaData")) { + assertEquals(fileMetaDataInThrift.version, fileMetaDataInTSF.getCurrentVersion()); + assertEquals(fileMetaDataInThrift.getCreated_by(), fileMetaDataInTSF.getCreatedBy()); + Utils.isTimeSeriesListEqual(fileMetaDataInTSF.getTimeSeriesList(), + fileMetaDataInThrift.getTimeseries_list()); + Utils.isListEqual(fileMetaDataInTSF.getJsonMetaData(), + fileMetaDataInThrift.getJson_metadata(), "json metadata"); + if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF.getProps(), + fileMetaDataInThrift.getProperties(), "user specified properties")) { + Utils.isMapStringEqual(fileMetaDataInTSF.getProps(), fileMetaDataInThrift.getProperties(), + "Filemetadata properties"); + } + if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF.getDeltaObjectMap(), + fileMetaDataInThrift.getDelta_object_map(), "delta object map")) { + Map mapInTSF = fileMetaDataInTSF.getDeltaObjectMap(); + Map mapInThrift = fileMetaDataInThrift.getDelta_object_map(); + if (mapInThrift.size() == mapInTSF.size()) { + for (String key : mapInTSF.keySet()) { + if (mapInThrift.containsKey(key)) { + isDeltaObjectEqual(mapInTSF.get(key), mapInThrift.get(key)); + } else { + fail(String.format("delta object map in thrift does not contain key %s", key)); + } + } + } else { + fail(String.format("%s size is different", "delta object map")); + } + } + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileReadTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileReadTest.java index 79780fb4..f7764c23 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileReadTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileReadTest.java @@ -8,70 +8,75 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; - import java.io.IOException; import java.util.ArrayList; public class TsFileReadTest { - public static void main(String args[]) throws IOException, WriteProcessException { - String path = "src/test/resources/test.ts"; + public static void main(String args[]) throws IOException, WriteProcessException { + String path = "src/test/resources/test.ts"; - // read example : no filter - TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); - TsFile readTsFile = new TsFile(input); - ArrayList paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // read example : no filter + TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); + TsFile readTsFile = new TsFile(input); + ArrayList paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10 - FilterExpression timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true) - , FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // time filter : 4 <= time < 10 + FilterExpression timeFilter = + FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // value filter : device_1.sensor_2 < 20 - FilterExpression valueFilter = FilterFactory.ltEq(FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, false); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // value filter : device_1.sensor_2 < 20 + FilterExpression valueFilter = FilterFactory.ltEq( + FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, + false); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10, value filter : device_1.sensor_2 > 20 - timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - valueFilter = FilterFactory.gtEq(FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 21, true); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - readTsFile.close(); + // time filter : 4 <= time < 10, value filter : device_1.sensor_2 > 20 + timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + valueFilter = FilterFactory.gtEq( + FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 21, + true); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); } + readTsFile.close(); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileTest.java index b43aa36c..b702898e 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileTest.java @@ -4,12 +4,10 @@ import java.io.File; import java.io.FileReader; import java.io.IOException; - import org.json.JSONObject; import org.json.JSONTokener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; @@ -18,54 +16,53 @@ * Created by kangrong on 17/3/27. */ public class TsFileTest { - private static final Logger LOG = LoggerFactory.getLogger(TsFileTest.class); + private static final Logger LOG = LoggerFactory.getLogger(TsFileTest.class); - /** - * TO be deleted - * @param args - * @throws IOException - */ - public static void main(String[] args) throws IOException, WriteProcessException { - args = new String[]{ - "/Volumes/KINGSTON/2_nc.csv", - "/Users/kangrong/out.tsfile", - "/Volumes/KINGSTON/4_new_nc.json" - }; - String inputCSV = args[0]; - String outputFilePath = args[1]; - String schemaJsonPath = args[2]; - JSONObject schemaObj = new JSONObject(new JSONTokener(new FileReader(new File(schemaJsonPath)))); - File outputFile = new File(outputFilePath); - if(outputFile.exists()) - outputFile.delete(); - TsFile tsfile = new TsFile(outputFile, schemaObj); - //write - BufferedReader br = new BufferedReader(new FileReader(inputCSV)); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime; - String line; - while ((line = br.readLine()) != null) { - if(lineCount > 100000) - break; - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - try { - tsfile.writeLine(line); - } catch (Exception e) { - e.printStackTrace(); - } - lineCount++; - } + /** + * TO be deleted + * + * @param args + * @throws IOException + */ + public static void main(String[] args) throws IOException, WriteProcessException { + args = new String[] {"/Volumes/KINGSTON/2_nc.csv", "/Users/kangrong/out.tsfile", + "/Volumes/KINGSTON/4_new_nc.json"}; + String inputCSV = args[0]; + String outputFilePath = args[1]; + String schemaJsonPath = args[2]; + JSONObject schemaObj = + new JSONObject(new JSONTokener(new FileReader(new File(schemaJsonPath)))); + File outputFile = new File(outputFilePath); + if (outputFile.exists()) + outputFile.delete(); + TsFile tsfile = new TsFile(outputFile, schemaObj); + // write + BufferedReader br = new BufferedReader(new FileReader(inputCSV)); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime; + String line; + while ((line = br.readLine()) != null) { + if (lineCount > 100000) + break; + if (lineCount % 1000000 == 0) { endTime = System.currentTimeMillis(); LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - tsfile.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputCSV, FileUtils.Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputFilePath, FileUtils.Unit.MB)); - br.close(); + } + try { + tsfile.writeLine(line); + } catch (Exception e) { + e.printStackTrace(); + } + lineCount++; } -} \ No newline at end of file + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + tsfile.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputCSV, FileUtils.Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputFilePath, FileUtils.Unit.MB)); + br.close(); + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileWriteTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileWriteTest.java index a5b089da..cfc8d13a 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileWriteTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/TsFileWriteTest.java @@ -3,9 +3,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; - import org.json.JSONObject; - import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; import cn.edu.tsinghua.tsfile.timeseries.write.record.DataPoint; @@ -15,64 +13,55 @@ public class TsFileWriteTest { - public static void main(String args[]) throws IOException, WriteProcessException { - String path = "src/test/resources/test.ts"; - String s = "{\n" + - " \"schema\": [\n" + - " {\n" + - " \"measurement_id\": \"sensor_1\",\n" + - " \"data_type\": \"FLOAT\",\n" + - " \"encoding\": \"RLE\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_2\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_3\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " }\n" + - " ],\n" + - " \"properties\": \n" + - " {\n" + - " \"key1\": \"value1\",\n"+ - " \"key2\": \"value2\"\n"+ - " },\n" + - " \"row_group_size\": 134217728\n" + - "}"; - JSONObject schemaObject = new JSONObject(s); + public static void main(String args[]) throws IOException, WriteProcessException { + String path = "src/test/resources/test.ts"; + String s = "{\n" + " \"schema\": [\n" + " {\n" + + " \"measurement_id\": \"sensor_1\",\n" + + " \"data_type\": \"FLOAT\",\n" + " \"encoding\": \"RLE\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_2\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_3\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " }\n" + " ],\n" + " \"properties\": \n" + " {\n" + + " \"key1\": \"value1\",\n" + " \"key2\": \"value2\"\n" + + " },\n" + " \"row_group_size\": 134217728\n" + "}"; + JSONObject schemaObject = new JSONObject(s); - TsFile tsFile = new TsFile(new File(path), schemaObject); + TsFile tsFile = new TsFile(new File(path), schemaObject); - tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); - tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); - tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); - tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); + tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); + tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); + tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); + tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); - TSRecord tsRecord1 = new TSRecord(6, "device_1"); - tsRecord1.dataPointList = new ArrayList() {{ - add(new FloatDataPoint("sensor_1", 7.2f)); - add(new IntDataPoint("sensor_2", 10)); - add(new IntDataPoint("sensor_3", 11)); - }}; - TSRecord tsRecord2 = new TSRecord(7, "device_1"); - tsRecord2.dataPointList = new ArrayList() {{ - add(new FloatDataPoint("sensor_1", 6.2f)); - add(new IntDataPoint("sensor_2", 20)); - add(new IntDataPoint("sensor_3", 21)); - }}; - TSRecord tsRecord3 = new TSRecord(8, "device_1"); - tsRecord3.dataPointList = new ArrayList() {{ - add(new FloatDataPoint("sensor_1", 9.2f)); - add(new IntDataPoint("sensor_2", 30)); - add(new IntDataPoint("sensor_3", 31)); - }}; - tsFile.writeRecord(tsRecord1); - tsFile.writeRecord(tsRecord2); - tsFile.writeRecord(tsRecord3); - tsFile.close(); - } + TSRecord tsRecord1 = new TSRecord(6, "device_1"); + tsRecord1.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 7.2f)); + add(new IntDataPoint("sensor_2", 10)); + add(new IntDataPoint("sensor_3", 11)); + } + }; + TSRecord tsRecord2 = new TSRecord(7, "device_1"); + tsRecord2.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 6.2f)); + add(new IntDataPoint("sensor_2", 20)); + add(new IntDataPoint("sensor_3", 21)); + } + }; + TSRecord tsRecord3 = new TSRecord(8, "device_1"); + tsRecord3.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 9.2f)); + add(new IntDataPoint("sensor_2", 30)); + add(new IntDataPoint("sensor_3", 31)); + } + }; + tsFile.writeRecord(tsRecord1); + tsFile.writeRecord(tsRecord2); + tsFile.writeRecord(tsRecord3); + tsFile.close(); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/conf/TSFileDescriptorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/conf/TSFileDescriptorTest.java index 6d4e8cae..d2dcc5f1 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/conf/TSFileDescriptorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/conf/TSFileDescriptorTest.java @@ -1,53 +1,54 @@ package cn.edu.tsinghua.tsfile.timeseries.conf; import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; - import static org.junit.Assert.assertEquals; + /** * Note that this test case should run separately. + * * @author XuYi */ public class TSFileDescriptorTest { - public int groupSizeInBytePre = 128 * 1024 * 1024; - public int pageSizeInBytePre = 1024 * 1024; - public int maxNumberOfPointsInPagePre = 1024 * 1024; - public String timeSeriesDataTypePre = "INT64"; - public int maxStringLengthPre = 128; - public int floatPrecisionPre = 2; - public String timeSeriesEncoderPre = "TS_2DIFF"; - public String valueEncoderPre = "PALIN"; - public String compressorPre = "UNCOMPRESSED"; - public TSFileConfig config; - -// @Before -// public void before() { -// config = TSFileDescriptor.getInstance().getConfig(); -// } -// -// @After -// public void after() { -// config.groupSizeInByte = groupSizeInBytePre; -// config.pageSizeInByte = pageSizeInBytePre; -// config.maxNumberOfPointsInPage = maxNumberOfPointsInPagePre; -// config.timeSeriesDataType = timeSeriesDataTypePre; -// config.maxStringLength = maxStringLengthPre; -// config.floatPrecision = floatPrecisionPre; -// config.timeSeriesEncoder = timeSeriesEncoderPre; -// config.valueEncoder = valueEncoderPre; -// config.compressor = compressorPre; -// } -// -// @Test -// public void testLoadProp() { -// assertEquals(config.groupSizeInByte, 123456789); -// assertEquals(config.pageSizeInByte, 123456); -// assertEquals(config.maxNumberOfPointsInPage, 12345); -// assertEquals(config.timeSeriesDataType, "INT32"); -// assertEquals(config.maxStringLength, 64); -// assertEquals(config.floatPrecision, 5); -// assertEquals(config.timeSeriesEncoder, "RLE"); -// assertEquals(config.valueEncoder, "RLE"); -// assertEquals(config.compressor, "SNAPPY"); -// } + public int groupSizeInBytePre = 128 * 1024 * 1024; + public int pageSizeInBytePre = 1024 * 1024; + public int maxNumberOfPointsInPagePre = 1024 * 1024; + public String timeSeriesDataTypePre = "INT64"; + public int maxStringLengthPre = 128; + public int floatPrecisionPre = 2; + public String timeSeriesEncoderPre = "TS_2DIFF"; + public String valueEncoderPre = "PALIN"; + public String compressorPre = "UNCOMPRESSED"; + public TSFileConfig config; + + // @Before + // public void before() { + // config = TSFileDescriptor.getInstance().getConfig(); + // } + // + // @After + // public void after() { + // config.groupSizeInByte = groupSizeInBytePre; + // config.pageSizeInByte = pageSizeInBytePre; + // config.maxNumberOfPointsInPage = maxNumberOfPointsInPagePre; + // config.timeSeriesDataType = timeSeriesDataTypePre; + // config.maxStringLength = maxStringLengthPre; + // config.floatPrecision = floatPrecisionPre; + // config.timeSeriesEncoder = timeSeriesEncoderPre; + // config.valueEncoder = valueEncoderPre; + // config.compressor = compressorPre; + // } + // + // @Test + // public void testLoadProp() { + // assertEquals(config.groupSizeInByte, 123456789); + // assertEquals(config.pageSizeInByte, 123456); + // assertEquals(config.maxNumberOfPointsInPage, 12345); + // assertEquals(config.timeSeriesDataType, "INT32"); + // assertEquals(config.maxStringLength, 64); + // assertEquals(config.floatPrecision, 5); + // assertEquals(config.timeSeriesEncoder, "RLE"); + // assertEquals(config.valueEncoder, "RLE"); + // assertEquals(config.compressor, "SNAPPY"); + // } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/constant/TimeseriesTestConstant.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/constant/TimeseriesTestConstant.java index 0d382cd2..402f49a1 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/constant/TimeseriesTestConstant.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/constant/TimeseriesTestConstant.java @@ -9,7 +9,7 @@ * */ public class TimeseriesTestConstant { - public static final float float_min_delta = 0.00001f; - public static final double double_min_delta = 0.00001d; - public static final Random random = new Random(System.currentTimeMillis()); + public static final float float_min_delta = 0.00001f; + public static final double double_min_delta = 0.00001d; + public static final Random random = new Random(System.currentTimeMillis()); } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileRead.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileRead.java index 8a4c3182..e742f9a6 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileRead.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileRead.java @@ -1,85 +1,87 @@ package cn.edu.tsinghua.tsfile.timeseries.demo; /** - * The class is to show how to read TsFile file named "test.ts". - * The TsFile file "test.ts" is generated from class TsFileWrite1 or class TsFileWrite2, - * they generate the same TsFile file by two different ways + * The class is to show how to read TsFile file named "test.ts". The TsFile file "test.ts" is + * generated from class TsFileWrite1 or class TsFileWrite2, they generate the same TsFile file by + * two different ways */ - import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; - import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; - import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; - import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; - import cn.edu.tsinghua.tsfile.timeseries.read.TsRandomAccessLocalFileReader; - import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; - import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - - import java.io.IOException; - import java.util.ArrayList; +import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.read.TsRandomAccessLocalFileReader; +import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; +import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; +import java.io.IOException; +import java.util.ArrayList; public class TsFileRead { - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException { - String path = "test.ts"; + String path = "test.ts"; - // read example : no filter - TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); - TsFile readTsFile = new TsFile(input); - ArrayList paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // read example : no filter + TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); + TsFile readTsFile = new TsFile(input); + ArrayList paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10 - FilterExpression timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), - FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // time filter : 4 <= time < 10 + FilterExpression timeFilter = + FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // value filter : device_1.sensor_2 < 20 - FilterExpression valueFilter = FilterFactory - .ltEq(FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, false); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // value filter : device_1.sensor_2 < 20 + FilterExpression valueFilter = FilterFactory.ltEq( + FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, + false); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10, value filter : device_1.sensor_3 > 20 - timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), - FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - valueFilter = FilterFactory - .gtEq(FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 20, false); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } + // time filter : 4 <= time < 10, value filter : device_1.sensor_3 > 20 + timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + valueFilter = FilterFactory.gtEq( + FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 20, + false); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileReadTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileReadTest.java index 37c71131..08afed32 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileReadTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileReadTest.java @@ -7,74 +7,76 @@ import cn.edu.tsinghua.tsfile.timeseries.read.TsRandomAccessLocalFileReader; import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; - import java.io.IOException; import java.util.ArrayList; public class TsFileReadTest { - public static void main(String[] args) throws IOException { - // TODO Auto-generated method stub - String path = "test.ts"; + public static void main(String[] args) throws IOException { + // TODO Auto-generated method stub + String path = "test.ts"; - // read example : no filter - TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); - TsFile readTsFile = new TsFile(input); - ArrayList paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // read example : no filter + TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(path); + TsFile readTsFile = new TsFile(input); + ArrayList paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + OnePassQueryDataSet onePassQueryDataSet = readTsFile.query(paths, null, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10 - FilterExpression timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), - FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // time filter : 4 <= time < 10 + FilterExpression timeFilter = + FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, null); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // value filter : device_1.sensor_2 < 20 - FilterExpression valueFilter = FilterFactory - .ltEq(FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, false); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - System.out.println("------------"); + // value filter : device_1.sensor_2 < 20 + FilterExpression valueFilter = FilterFactory.ltEq( + FilterFactory.intFilterSeries("device_1", "sensor_2", FilterSeriesType.VALUE_FILTER), 20, + false); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, null, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + System.out.println("------------"); - // time filter : 4 <= time < 10, value filter : device_1.sensor_3 > 20 - timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), - FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); - valueFilter = FilterFactory - .gtEq(FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 20, false); - input = new TsRandomAccessLocalFileReader(path); - readTsFile = new TsFile(input); - paths = new ArrayList<>(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - paths.add(new Path("device_1.sensor_3")); - onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); - while (onePassQueryDataSet.hasNextRecord()) { - System.out.println(onePassQueryDataSet.getNextRecord()); - } - } + // time filter : 4 <= time < 10, value filter : device_1.sensor_3 > 20 + timeFilter = FilterFactory.and(FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 4L, true), + FilterFactory.ltEq(FilterFactory.timeFilterSeries(), 10L, false)); + valueFilter = FilterFactory.gtEq( + FilterFactory.intFilterSeries("device_1", "sensor_3", FilterSeriesType.VALUE_FILTER), 20, + false); + input = new TsRandomAccessLocalFileReader(path); + readTsFile = new TsFile(input); + paths = new ArrayList<>(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + paths.add(new Path("device_1.sensor_3")); + onePassQueryDataSet = readTsFile.query(paths, timeFilter, valueFilter); + while (onePassQueryDataSet.hasNextRecord()) { + System.out.println(onePassQueryDataSet.getNextRecord()); + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite1.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite1.java index 2f7b3004..ed4d37c1 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite1.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite1.java @@ -2,9 +2,7 @@ import java.io.File; import java.util.ArrayList; - import org.json.JSONObject; - import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; import cn.edu.tsinghua.tsfile.timeseries.write.record.DataPoint; @@ -14,71 +12,59 @@ public class TsFileWrite1 { - public static void main(String args[]) { - try { - String path = "test.ts"; - String s = "{\n" + - " \"schema\": [\n" + - " {\n" + - " \"measurement_id\": \"sensor_1\",\n" + - " \"data_type\": \"FLOAT\",\n" + - " \"encoding\": \"RLE\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_2\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_3\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " }\n" + - " ],\n" + - " \"row_group_size\": 134217728\n" + - "}"; - JSONObject schemaObject = new JSONObject(s); - - TsRandomAccessFileWriter output = new TsRandomAccessFileWriter(new File(path)); - TsFile tsFile = new TsFile(output, schemaObject); + public static void main(String args[]) { + try { + String path = "test.ts"; + String s = "{\n" + " \"schema\": [\n" + " {\n" + + " \"measurement_id\": \"sensor_1\",\n" + + " \"data_type\": \"FLOAT\",\n" + " \"encoding\": \"RLE\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_2\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_3\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " }\n" + " ],\n" + " \"row_group_size\": 134217728\n" + "}"; + JSONObject schemaObject = new JSONObject(s); - tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); - tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); - tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); - tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); + TsRandomAccessFileWriter output = new TsRandomAccessFileWriter(new File(path)); + TsFile tsFile = new TsFile(output, schemaObject); - TSRecord tsRecord1 = new TSRecord(6, "device_1"); - tsRecord1.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 7.2f)); - add(new IntDataPoint("sensor_2", 10)); - add(new IntDataPoint("sensor_3", 11)); - } - }; - TSRecord tsRecord2 = new TSRecord(7, "device_1"); - tsRecord2.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 6.2f)); - add(new IntDataPoint("sensor_2", 20)); - add(new IntDataPoint("sensor_3", 21)); - } - }; - TSRecord tsRecord3 = new TSRecord(8, "device_1"); - tsRecord3.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 9.2f)); - add(new IntDataPoint("sensor_2", 30)); - add(new IntDataPoint("sensor_3", 31)); - } - }; - tsFile.writeRecord(tsRecord1); - tsFile.writeRecord(tsRecord2); - tsFile.writeRecord(tsRecord3); + tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); + tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); + tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); + tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); - tsFile.close(); - } catch (Throwable e) { - e.printStackTrace(); - System.out.println(e.getMessage()); + TSRecord tsRecord1 = new TSRecord(6, "device_1"); + tsRecord1.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 7.2f)); + add(new IntDataPoint("sensor_2", 10)); + add(new IntDataPoint("sensor_3", 11)); } + }; + TSRecord tsRecord2 = new TSRecord(7, "device_1"); + tsRecord2.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 6.2f)); + add(new IntDataPoint("sensor_2", 20)); + add(new IntDataPoint("sensor_3", 21)); + } + }; + TSRecord tsRecord3 = new TSRecord(8, "device_1"); + tsRecord3.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 9.2f)); + add(new IntDataPoint("sensor_2", 30)); + add(new IntDataPoint("sensor_3", 31)); + } + }; + tsFile.writeRecord(tsRecord1); + tsFile.writeRecord(tsRecord2); + tsFile.writeRecord(tsRecord3); + + tsFile.close(); + } catch (Throwable e) { + e.printStackTrace(); + System.out.println(e.getMessage()); } -} \ No newline at end of file + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite2.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite2.java index d429dc39..9581fe9f 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite2.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWrite2.java @@ -4,7 +4,6 @@ * Created by beyyes on 17/12/5. */ import java.io.File; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; @@ -15,81 +14,84 @@ public class TsFileWrite2 { - public static void main(String args[]) { - try { - TsFileWriter tsFileWriter = new TsFileWriter(new File("test.ts")); + public static void main(String args[]) { + try { + TsFileWriter tsFileWriter = new TsFileWriter(new File("test.ts")); - // add measurements - tsFileWriter.addMeasurement(new MeasurementDescriptor("sensor_1", TSDataType.FLOAT, TSEncoding.RLE)); - tsFileWriter.addMeasurement(new MeasurementDescriptor("sensor_2", TSDataType.INT32, TSEncoding.TS_2DIFF)); - tsFileWriter.addMeasurement(new MeasurementDescriptor("sensor_3", TSDataType.INT32, TSEncoding.TS_2DIFF)); + // add measurements + tsFileWriter + .addMeasurement(new MeasurementDescriptor("sensor_1", TSDataType.FLOAT, TSEncoding.RLE)); + tsFileWriter.addMeasurement( + new MeasurementDescriptor("sensor_2", TSDataType.INT32, TSEncoding.TS_2DIFF)); + tsFileWriter.addMeasurement( + new MeasurementDescriptor("sensor_3", TSDataType.INT32, TSEncoding.TS_2DIFF)); - // construct TSRecord - TSRecord tsRecord = new TSRecord(1, "device_1"); - DataPoint dPoint1 = new FloatDataPoint("sensor_1", 1.2f); - DataPoint dPoint2 = new IntDataPoint("sensor_2", 20); - DataPoint dPoint3; - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsFileWriter.write(tsRecord); + // construct TSRecord + TSRecord tsRecord = new TSRecord(1, "device_1"); + DataPoint dPoint1 = new FloatDataPoint("sensor_1", 1.2f); + DataPoint dPoint2 = new IntDataPoint("sensor_2", 20); + DataPoint dPoint3; + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(2, "device_1"); - dPoint2 = new IntDataPoint("sensor_2", 20); - dPoint3 = new IntDataPoint("sensor_3", 50); - tsRecord.addTuple(dPoint2); - tsRecord.addTuple(dPoint3); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(2, "device_1"); + dPoint2 = new IntDataPoint("sensor_2", 20); + dPoint3 = new IntDataPoint("sensor_3", 50); + tsRecord.addTuple(dPoint2); + tsRecord.addTuple(dPoint3); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(3, "device_1"); - dPoint1 = new FloatDataPoint("sensor_1", 1.4f); - dPoint2 = new IntDataPoint("sensor_2", 21); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(3, "device_1"); + dPoint1 = new FloatDataPoint("sensor_1", 1.4f); + dPoint2 = new IntDataPoint("sensor_2", 21); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(4, "device_1"); - dPoint1 = new FloatDataPoint("sensor_1", 1.2f); - dPoint2 = new IntDataPoint("sensor_2", 20); - dPoint3 = new IntDataPoint("sensor_3", 51); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsRecord.addTuple(dPoint3); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(4, "device_1"); + dPoint1 = new FloatDataPoint("sensor_1", 1.2f); + dPoint2 = new IntDataPoint("sensor_2", 20); + dPoint3 = new IntDataPoint("sensor_3", 51); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsRecord.addTuple(dPoint3); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(6, "device_1"); - dPoint1 = new FloatDataPoint("sensor_1", 7.2f); - dPoint2 = new IntDataPoint("sensor_2", 10); - dPoint3 = new IntDataPoint("sensor_3", 11); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsRecord.addTuple(dPoint3); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(6, "device_1"); + dPoint1 = new FloatDataPoint("sensor_1", 7.2f); + dPoint2 = new IntDataPoint("sensor_2", 10); + dPoint3 = new IntDataPoint("sensor_3", 11); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsRecord.addTuple(dPoint3); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(7, "device_1"); - dPoint1 = new FloatDataPoint("sensor_1", 6.2f); - dPoint2 = new IntDataPoint("sensor_2", 20); - dPoint3 = new IntDataPoint("sensor_3", 21); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsRecord.addTuple(dPoint3); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(7, "device_1"); + dPoint1 = new FloatDataPoint("sensor_1", 6.2f); + dPoint2 = new IntDataPoint("sensor_2", 20); + dPoint3 = new IntDataPoint("sensor_3", 21); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsRecord.addTuple(dPoint3); + tsFileWriter.write(tsRecord); - tsRecord = new TSRecord(8, "device_1"); - dPoint1 = new FloatDataPoint("sensor_1", 9.2f); - dPoint2 = new IntDataPoint("sensor_2", 30); - dPoint3 = new IntDataPoint("sensor_3", 31); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsRecord.addTuple(dPoint3); - tsFileWriter.write(tsRecord); + tsRecord = new TSRecord(8, "device_1"); + dPoint1 = new FloatDataPoint("sensor_1", 9.2f); + dPoint2 = new IntDataPoint("sensor_2", 30); + dPoint3 = new IntDataPoint("sensor_3", 31); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsRecord.addTuple(dPoint3); + tsFileWriter.write(tsRecord); - // close TsFile - tsFileWriter.close(); - } catch (Throwable e) { - e.printStackTrace(); - System.out.println(e.getMessage()); - } + // close TsFile + tsFileWriter.close(); + } catch (Throwable e) { + e.printStackTrace(); + System.out.println(e.getMessage()); } + } -} \ No newline at end of file +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest.java index ba1b9f9f..04212389 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest.java @@ -4,7 +4,6 @@ import java.io.File; import java.util.ArrayList; import org.json.JSONObject; - import cn.edu.tsinghua.tsfile.common.utils.TsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile; import cn.edu.tsinghua.tsfile.timeseries.write.record.DataPoint; @@ -14,71 +13,59 @@ public class TsFileWriteTest { - public static void main(String args[]) { - try { - String path = "test.ts"; - String s = "{\n" + - " \"schema\": [\n" + - " {\n" + - " \"measurement_id\": \"sensor_1\",\n" + - " \"data_type\": \"FLOAT\",\n" + - " \"encoding\": \"RLE\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_2\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " },\n" + - " {\n" + - " \"measurement_id\": \"sensor_3\",\n" + - " \"data_type\": \"INT32\",\n" + - " \"encoding\": \"TS_2DIFF\"\n" + - " }\n" + - " ],\n" + - " \"row_group_size\": 134217728\n" + - "}"; - JSONObject schemaObject = new JSONObject(s); + public static void main(String args[]) { + try { + String path = "test.ts"; + String s = "{\n" + " \"schema\": [\n" + " {\n" + + " \"measurement_id\": \"sensor_1\",\n" + + " \"data_type\": \"FLOAT\",\n" + " \"encoding\": \"RLE\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_2\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " },\n" + " {\n" + " \"measurement_id\": \"sensor_3\",\n" + + " \"data_type\": \"INT32\",\n" + " \"encoding\": \"TS_2DIFF\"\n" + + " }\n" + " ],\n" + " \"row_group_size\": 134217728\n" + "}"; + JSONObject schemaObject = new JSONObject(s); - TsRandomAccessFileWriter output = new TsRandomAccessFileWriter(new File(path)); - TsFile tsFile = new TsFile(output, schemaObject); + TsRandomAccessFileWriter output = new TsRandomAccessFileWriter(new File(path)); + TsFile tsFile = new TsFile(output, schemaObject); - tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); - tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); - tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); - tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); + tsFile.writeLine("device_1,1, sensor_1, 1.2, sensor_2, 20, sensor_3,"); + tsFile.writeLine("device_1,2, sensor_1, , sensor_2, 20, sensor_3, 50"); + tsFile.writeLine("device_1,3, sensor_1, 1.4, sensor_2, 21, sensor_3,"); + tsFile.writeLine("device_1,4, sensor_1, 1.2, sensor_2, 20, sensor_3, 51"); - TSRecord tsRecord1 = new TSRecord(6, "device_1"); - tsRecord1.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 7.2f)); - add(new IntDataPoint("sensor_2", 10)); - add(new IntDataPoint("sensor_3", 11)); - } - }; - TSRecord tsRecord2 = new TSRecord(7, "device_1"); - tsRecord2.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 6.2f)); - add(new IntDataPoint("sensor_2", 20)); - add(new IntDataPoint("sensor_3", 21)); - } - }; - TSRecord tsRecord3 = new TSRecord(8, "device_1"); - tsRecord3.dataPointList = new ArrayList() { - { - add(new FloatDataPoint("sensor_1", 9.2f)); - add(new IntDataPoint("sensor_2", 30)); - add(new IntDataPoint("sensor_3", 31)); - } - }; - tsFile.writeRecord(tsRecord1); - tsFile.writeRecord(tsRecord2); - tsFile.writeRecord(tsRecord3); + TSRecord tsRecord1 = new TSRecord(6, "device_1"); + tsRecord1.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 7.2f)); + add(new IntDataPoint("sensor_2", 10)); + add(new IntDataPoint("sensor_3", 11)); + } + }; + TSRecord tsRecord2 = new TSRecord(7, "device_1"); + tsRecord2.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 6.2f)); + add(new IntDataPoint("sensor_2", 20)); + add(new IntDataPoint("sensor_3", 21)); + } + }; + TSRecord tsRecord3 = new TSRecord(8, "device_1"); + tsRecord3.dataPointList = new ArrayList() { + { + add(new FloatDataPoint("sensor_1", 9.2f)); + add(new IntDataPoint("sensor_2", 30)); + add(new IntDataPoint("sensor_3", 31)); + } + }; + tsFile.writeRecord(tsRecord1); + tsFile.writeRecord(tsRecord2); + tsFile.writeRecord(tsRecord3); - tsFile.close(); - } catch (Throwable e) { - e.printStackTrace(); - System.out.println(e.getMessage()); - } - } + tsFile.close(); + } catch (Throwable e) { + e.printStackTrace(); + System.out.println(e.getMessage()); + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest2.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest2.java index 7be9a68d..67fc3833 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest2.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/TsFileWriteTest2.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.demo; import java.io.File; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; @@ -12,30 +11,32 @@ public class TsFileWriteTest2 { - public static void main(String args[]) { - try { - TsFileWriter tsFileWriter = new TsFileWriter(new File("test1.ts")); - - // add measurements - tsFileWriter.addMeasurement(new MeasurementDescriptor("cpu_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); - tsFileWriter.addMeasurement(new MeasurementDescriptor("memory_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); - - // construct TSRecord - TSRecord tsRecord = new TSRecord(1000, "hxd"); - DataPoint dPoint1 = new FloatDataPoint("cpu_utility", 90.0f); - DataPoint dPoint2 = new FloatDataPoint("memory_utility", 80.0f); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - - // write TSRecord to TsFile - tsFileWriter.write(tsRecord); - - // close TsFile - tsFileWriter.close(); - } catch (Throwable e) { - e.printStackTrace(); - System.out.println(e.getMessage()); - } - } + public static void main(String args[]) { + try { + TsFileWriter tsFileWriter = new TsFileWriter(new File("test1.ts")); + + // add measurements + tsFileWriter.addMeasurement( + new MeasurementDescriptor("cpu_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); + tsFileWriter.addMeasurement( + new MeasurementDescriptor("memory_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); + + // construct TSRecord + TSRecord tsRecord = new TSRecord(1000, "hxd"); + DataPoint dPoint1 = new FloatDataPoint("cpu_utility", 90.0f); + DataPoint dPoint2 = new FloatDataPoint("memory_utility", 80.0f); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + + // write TSRecord to TsFile + tsFileWriter.write(tsRecord); + + // close TsFile + tsFileWriter.close(); + } catch (Throwable e) { + e.printStackTrace(); + System.out.println(e.getMessage()); + } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteDemo.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteDemo.java index d7d95d86..04649201 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteDemo.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteDemo.java @@ -8,7 +8,6 @@ import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; - import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; import cn.edu.tsinghua.tsfile.timeseries.write.io.TsFileIOWriter; import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; @@ -18,95 +17,96 @@ import org.json.JSONTokener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; /** - * Write Demo provides a JAVA application that receives CSV file and writes to TSfile format. - * This application requires four parameters (inputDataFilePath,outputDataFilePath, errorFile - * and schemaFile) and one optional parameter (confFile). - * Four parameters are needed: inputDataFilePath, outputDataFilePath, errorFile and schemaFile. + * Write Demo provides a JAVA application that receives CSV file and writes to TSfile format. This + * application requires four parameters (inputDataFilePath,outputDataFilePath, errorFile and + * schemaFile) and one optional parameter (confFile). Four parameters are needed: inputDataFilePath, + * outputDataFilePath, errorFile and schemaFile. * * @author kangrong */ public class WriteDemo { - static final Logger LOG = LoggerFactory.getLogger(WriteDemo.class); - public static TsFileWriter tsFileWriter; - public static String inputDataFile; - public static String outputDataFile; - public static String errorOutputDataFile; - public static JSONObject jsonSchema; + static final Logger LOG = LoggerFactory.getLogger(WriteDemo.class); + public static TsFileWriter tsFileWriter; + public static String inputDataFile; + public static String outputDataFile; + public static String errorOutputDataFile; + public static JSONObject jsonSchema; - private static void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - FileSchema schema = new FileSchema(jsonSchema); - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - tsFileWriter = new TsFileWriter(file, schema, conf); + private static void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + FileSchema schema = new FileSchema(jsonSchema); + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + tsFileWriter = new TsFileWriter(file, schema, conf); - // write to file - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); + // write to file + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); } + LOG.info("write to file successfully!!"); + } - private static void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - BufferedReader br = new BufferedReader(new FileReader(inputDataFile)); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime; - String line; - while ((line = br.readLine()) != null) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - LOG.info("write line:{},inner space consumer:{},use time:{}", lineCount, - tsFileWriter.calculateMemSizeForAllGroup(), endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - // String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, schema); - tsFileWriter.write(record); - lineCount++; - } + private static void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + BufferedReader br = new BufferedReader(new FileReader(inputDataFile)); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime; + String line; + while ((line = br.readLine()) != null) { + if (lineCount % 1000000 == 0) { endTime = System.currentTimeMillis(); + LOG.info("write line:{},inner space consumer:{},use time:{}", lineCount, + tsFileWriter.calculateMemSizeForAllGroup(), endTime); LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - tsFileWriter.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, FileUtils.Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.MB)); - br.close(); + } + // String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, schema); + tsFileWriter.write(record); + lineCount++; } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + tsFileWriter.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, FileUtils.Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.MB)); + br.close(); + } - public static void main(String[] args) throws JSONException, IOException, InterruptedException, WriteProcessException { - if (args.length < 4) { - LOG.error("\n\ninput args format error, you should run as: " + - " \n"); - return; - } - inputDataFile = args[0]; - outputDataFile = args[1]; - errorOutputDataFile = args[2]; - System.out.println(args[3]); - String path = args[3]; - JSONObject obj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); - System.out.println(obj); - if (!obj.has(JsonFormatConstant.JSON_SCHEMA)) { - LOG.error("input schema format error"); - return; - } - jsonSchema = obj; - System.out.println(args.length); - write(); + public static void main(String[] args) + throws JSONException, IOException, InterruptedException, WriteProcessException { + if (args.length < 4) { + LOG.error("\n\ninput args format error, you should run as: " + + " \n"); + return; + } + inputDataFile = args[0]; + outputDataFile = args[1]; + errorOutputDataFile = args[2]; + System.out.println(args[3]); + String path = args[3]; + JSONObject obj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); + System.out.println(obj); + if (!obj.has(JsonFormatConstant.JSON_SCHEMA)) { + LOG.error("input schema format error"); + return; } + jsonSchema = obj; + System.out.println(args.length); + write(); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteTsFileDemo.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteTsFileDemo.java index a7a46597..9bb1935f 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteTsFileDemo.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/demo/WriteTsFileDemo.java @@ -2,7 +2,6 @@ import java.io.File; import java.io.IOException; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; @@ -14,17 +13,19 @@ public class WriteTsFileDemo { - public static void main(String[] args) throws WriteProcessException, IOException { - TsFileWriter tsFileWriter=new TsFileWriter(new File("test.ts")); - tsFileWriter.addMeasurement(new MeasurementDescriptor("cpu_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); - tsFileWriter.addMeasurement(new MeasurementDescriptor("memory_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); - TSRecord tsRecord=new TSRecord(1000, "user1.thinkpad.T200"); - DataPoint dPoint1=new FloatDataPoint("cpu_utility", 90.0f); - DataPoint dPoint2=new FloatDataPoint("memory_utility", 80.0f); - tsRecord.addTuple(dPoint1); - tsRecord.addTuple(dPoint2); - tsFileWriter.write(tsRecord); - tsFileWriter.close(); - } + public static void main(String[] args) throws WriteProcessException, IOException { + TsFileWriter tsFileWriter = new TsFileWriter(new File("test.ts")); + tsFileWriter.addMeasurement( + new MeasurementDescriptor("cpu_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); + tsFileWriter.addMeasurement( + new MeasurementDescriptor("memory_utility", TSDataType.FLOAT, TSEncoding.TS_2DIFF)); + TSRecord tsRecord = new TSRecord(1000, "user1.thinkpad.T200"); + DataPoint dPoint1 = new FloatDataPoint("cpu_utility", 90.0f); + DataPoint dPoint2 = new FloatDataPoint("memory_utility", 80.0f); + tsRecord.addTuple(dPoint1); + tsRecord.addTuple(dPoint2); + tsFileWriter.write(tsRecord); + tsFileWriter.close(); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/CrossSeriesFilterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/CrossSeriesFilterTest.java index 8e85f436..e9a6e47e 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/CrossSeriesFilterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/CrossSeriesFilterTest.java @@ -1,55 +1,53 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.DoubleFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSAnd; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSOr; - -/** - * - * @author CGF - * - */ -public class CrossSeriesFilterTest { - private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; - private static String measurementINT = FilterTestConstant.measurementINT; - private static String deltaObjectDOUBLE = FilterTestConstant.deltaObjectDOUBLE; - private static String measurementDOUBLE = FilterTestConstant.measurementDOUBLE; - - private static final IntFilterSeries intFilterSeries = FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, - FilterSeriesType.VALUE_FILTER); - - private static final DoubleFilterSeries doubleFilterSeries = FilterFactory.doubleFilterSeries(deltaObjectDOUBLE, - measurementDOUBLE, FilterSeriesType.VALUE_FILTER); - - @Test - public void testCrossSeriesFilterApi() { - SingleSeriesFilterExpression left = FilterFactory.ltEq(intFilterSeries, 60, true); - - SingleSeriesFilterExpression right = FilterFactory.ltEq(doubleFilterSeries, 60.0, true); - - CSAnd csand = (CSAnd) FilterFactory.and(left, right); - assertEquals(csand.getLeft(), left); - assertEquals(csand.getRight(), right); - assertEquals(csand.toString(), - "[FilterSeries (deltaObjectINT,measurementINT,INT32,VALUE_FILTER) <= 60] & [FilterSeries (deltaObjectDOUBLE,measurementDOUBLE,DOUBLE,VALUE_FILTER) <= 60.0]"); - - CSOr csor = (CSOr) FilterFactory.or(left, right); - assertEquals(csor.getLeft(), left); - assertEquals(csor.getRight(), right); - assertEquals(csor.toString(), - "[FilterSeries (deltaObjectINT,measurementINT,INT32,VALUE_FILTER) <= 60] | [FilterSeries (deltaObjectDOUBLE,measurementDOUBLE,DOUBLE,VALUE_FILTER) <= 60.0]"); - } - - @Test - public void XX() { - - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.DoubleFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSAnd; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.CSOr; + +/** + * + * @author CGF + * + */ +public class CrossSeriesFilterTest { + private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; + private static String measurementINT = FilterTestConstant.measurementINT; + private static String deltaObjectDOUBLE = FilterTestConstant.deltaObjectDOUBLE; + private static String measurementDOUBLE = FilterTestConstant.measurementDOUBLE; + + private static final IntFilterSeries intFilterSeries = + FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, FilterSeriesType.VALUE_FILTER); + + private static final DoubleFilterSeries doubleFilterSeries = FilterFactory + .doubleFilterSeries(deltaObjectDOUBLE, measurementDOUBLE, FilterSeriesType.VALUE_FILTER); + + @Test + public void testCrossSeriesFilterApi() { + SingleSeriesFilterExpression left = FilterFactory.ltEq(intFilterSeries, 60, true); + + SingleSeriesFilterExpression right = FilterFactory.ltEq(doubleFilterSeries, 60.0, true); + + CSAnd csand = (CSAnd) FilterFactory.and(left, right); + assertEquals(csand.getLeft(), left); + assertEquals(csand.getRight(), right); + assertEquals(csand.toString(), + "[FilterSeries (deltaObjectINT,measurementINT,INT32,VALUE_FILTER) <= 60] & [FilterSeries (deltaObjectDOUBLE,measurementDOUBLE,DOUBLE,VALUE_FILTER) <= 60.0]"); + + CSOr csor = (CSOr) FilterFactory.or(left, right); + assertEquals(csor.getLeft(), left); + assertEquals(csor.getRight(), right); + assertEquals(csor.toString(), + "[FilterSeries (deltaObjectINT,measurementINT,INT32,VALUE_FILTER) <= 60] | [FilterSeries (deltaObjectDOUBLE,measurementDOUBLE,DOUBLE,VALUE_FILTER) <= 60.0]"); + } + + @Test + public void XX() { + + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/DigestVistiorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/DigestVistiorTest.java index 93199b97..eba30e76 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/DigestVistiorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/DigestVistiorTest.java @@ -1,152 +1,175 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.nio.ByteBuffer; - -import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.DigestVisitor; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DigestForFilter; - -/** - * - * @author CGF - * - */ -public class DigestVistiorTest { - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - private DigestVisitor digestVistor = new DigestVisitor(); - - private ByteBuffer b1 = ByteBuffer.wrap(BytesUtils.intToBytes(45)); - private ByteBuffer b2 = ByteBuffer.wrap(BytesUtils.intToBytes(78)); - private DigestForFilter digest1 = new DigestForFilter(b1, b2, TSDataType.INT32); // (45, 78, INT32) - - @Test - public void testIntegerEq() { - Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - assertTrue(digestVistor.satisfy(digest1, eq)); - - Eq eqNot = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 90); - assertFalse(digestVistor.satisfy(digest1, eqNot)); - } - - @Test - public void testIntegerNotEq() { - NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 60); - assertTrue(digestVistor.satisfy(digest1, notEq)); - - NotEq eqNot = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - assertFalse(digestVistor.satisfy(digest1, eqNot)); - } - - @Test - public void testIntegerLtEq() { - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 990, true); - assertTrue(digestVistor.satisfy(digest1, ltEq1)); - - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - assertTrue(digestVistor.satisfy(digest1, ltEq2)); - - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 60, true); - assertTrue(digestVistor.satisfy(digest1, ltEq3)); - - LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 44, true); - assertFalse(digestVistor.satisfy(digest1, ltEq4)); - - LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); - assertFalse(digestVistor.satisfy(digest1, ltEq5)); - } - - @Test - public void testIntegerGtEq() { - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 990, true); - assertFalse(digestVistor.satisfy(digest1, gtEq1)); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - assertTrue(digestVistor.satisfy(digest1, gtEq2)); - - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 60, true); - assertTrue(digestVistor.satisfy(digest1, gtEq3)); - - GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 44, true); - assertTrue(digestVistor.satisfy(digest1, gtEq4)); - - GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); - assertTrue(digestVistor.satisfy(digest1, gtEq5)); - - GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 78, false); - assertFalse(digestVistor.satisfy(digest1, gtEq6)); - } - - @Test - public void testNot() { - Not not = (Not) FilterFactory.not(FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 990, true)); - assertTrue(digestVistor.satisfy(digest1, not)); - } - - @Test - public void testAnd() { - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 77, true); - And and = (And) FilterFactory.and(ltEq1, gtEq1); - assertTrue(digestVistor.satisfy(digest1, and)); - - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 79, true); - And and2 = (And) FilterFactory.and(ltEq2, gtEq2); - assertFalse(digestVistor.satisfy(digest1, and2)); - } - - @Test - public void testOr() { - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 77, true); - Or or = (Or) FilterFactory.or(ltEq1, gtEq1); - assertTrue(digestVistor.satisfy(digest1, or)); - - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 33, true); - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 79, true); - Or or2 = (Or) FilterFactory.or(ltEq2, gtEq2); - assertFalse(digestVistor.satisfy(digest1, or2)); - } - - @Test - public void testNullValue() { - // Value is null - LtEq ltEq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), null, true); - assertFalse(digestVistor.satisfy(digest1, ltEq)); - - GtEq gtEq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), null, true); - assertFalse(digestVistor.satisfy(digest1, gtEq)); - - Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), null); - assertFalse(digestVistor.satisfy(digest1, eq)); - - NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), null); - assertFalse(digestVistor.satisfy(digest1, notEq)); - - // FilterSeriesType is null - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, - null), 3, true); - assertFalse(digestVistor.satisfy(digest1, ltEq2)); - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import java.nio.ByteBuffer; +import cn.edu.tsinghua.tsfile.common.utils.BytesUtils; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.DigestVisitor; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DigestForFilter; + +/** + * + * @author CGF + * + */ +public class DigestVistiorTest { + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + private DigestVisitor digestVistor = new DigestVisitor(); + + private ByteBuffer b1 = ByteBuffer.wrap(BytesUtils.intToBytes(45)); + private ByteBuffer b2 = ByteBuffer.wrap(BytesUtils.intToBytes(78)); + private DigestForFilter digest1 = new DigestForFilter(b1, b2, TSDataType.INT32); // (45, 78, + // INT32) + + @Test + public void testIntegerEq() { + Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45); + assertTrue(digestVistor.satisfy(digest1, eq)); + + Eq eqNot = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 90); + assertFalse(digestVistor.satisfy(digest1, eqNot)); + } + + @Test + public void testIntegerNotEq() { + NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 60); + assertTrue(digestVistor.satisfy(digest1, notEq)); + + NotEq eqNot = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45); + assertFalse(digestVistor.satisfy(digest1, eqNot)); + } + + @Test + public void testIntegerLtEq() { + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 990, true); + assertTrue(digestVistor.satisfy(digest1, ltEq1)); + + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + assertTrue(digestVistor.satisfy(digest1, ltEq2)); + + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 60, true); + assertTrue(digestVistor.satisfy(digest1, ltEq3)); + + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 44, true); + assertFalse(digestVistor.satisfy(digest1, ltEq4)); + + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); + assertFalse(digestVistor.satisfy(digest1, ltEq5)); + } + + @Test + public void testIntegerGtEq() { + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 990, true); + assertFalse(digestVistor.satisfy(digest1, gtEq1)); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + assertTrue(digestVistor.satisfy(digest1, gtEq2)); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 60, true); + assertTrue(digestVistor.satisfy(digest1, gtEq3)); + + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 44, true); + assertTrue(digestVistor.satisfy(digest1, gtEq4)); + + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); + assertTrue(digestVistor.satisfy(digest1, gtEq5)); + + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 78, false); + assertFalse(digestVistor.satisfy(digest1, gtEq6)); + } + + @Test + public void testNot() { + Not not = + (Not) FilterFactory.not(FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 990, true)); + assertTrue(digestVistor.satisfy(digest1, not)); + } + + @Test + public void testAnd() { + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 77, true); + And and = (And) FilterFactory.and(ltEq1, gtEq1); + assertTrue(digestVistor.satisfy(digest1, and)); + + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 79, true); + And and2 = (And) FilterFactory.and(ltEq2, gtEq2); + assertFalse(digestVistor.satisfy(digest1, and2)); + } + + @Test + public void testOr() { + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 46, true); + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 77, true); + Or or = (Or) FilterFactory.or(ltEq1, gtEq1); + assertTrue(digestVistor.satisfy(digest1, or)); + + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 33, true); + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 79, true); + Or or2 = (Or) FilterFactory.or(ltEq2, gtEq2); + assertFalse(digestVistor.satisfy(digest1, or2)); + } + + @Test + public void testNullValue() { + // Value is null + LtEq ltEq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), null, true); + assertFalse(digestVistor.satisfy(digest1, ltEq)); + + GtEq gtEq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), null, true); + assertFalse(digestVistor.satisfy(digest1, gtEq)); + + Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), null); + assertFalse(digestVistor.satisfy(digest1, eq)); + + NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), null); + assertFalse(digestVistor.satisfy(digest1, notEq)); + + // FilterSeriesType is null + LtEq ltEq2 = FilterFactory + .ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, null), 3, true); + assertFalse(digestVistor.satisfy(digest1, ltEq2)); + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterApiMethodsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterApiMethodsTest.java index d5709c16..8d1d13c7 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterApiMethodsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterApiMethodsTest.java @@ -1,129 +1,129 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.DoubleFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.BooleanFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FloatFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.LongFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; - -/** - * - * @author CGF - * - */ -public class FilterApiMethodsTest { - private static final Logger LOG = LoggerFactory.getLogger(FilterApiMethodsTest.class); - - private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; - private static String measurementINT = FilterTestConstant.measurementINT; - private static String deltaObjectLONG = FilterTestConstant.deltaObjectLONG; - private static String measurementLONG = FilterTestConstant.measurementLONG; - private static String deltaObjectFLOAT = FilterTestConstant.deltaObjectFLOAT; - private static String measurementFLOAT = FilterTestConstant.measurementFLOAT; - private static String deltaObjectDOUBLE = FilterTestConstant.deltaObjectDOUBLE; - private static String measurementDOUBLE = FilterTestConstant.measurementDOUBLE; - private static String deltaObjectBOOLEAN = FilterTestConstant.deltaObjectBOOLEAN; - private static String measurementBOOLEAN = FilterTestConstant.measurementBOOLEAN; - - private static final IntFilterSeries intFilterSeries = FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, FilterSeriesType.VALUE_FILTER); - private static final LongFilterSeries longFilterSeries = FilterFactory.longFilterSeries(deltaObjectLONG, measurementLONG, FilterSeriesType.VALUE_FILTER); - private static final FloatFilterSeries floatFilterSeries = - FilterFactory.floatFilterSeries(deltaObjectFLOAT, measurementFLOAT, FilterSeriesType.VALUE_FILTER); - private static final BooleanFilterSeries booleanFilterSeries = - FilterFactory.booleanFilterSeries(deltaObjectBOOLEAN, measurementBOOLEAN, FilterSeriesType.VALUE_FILTER); - private static final DoubleFilterSeries doubleFilterSeries = - FilterFactory.doubleFilterSeries(deltaObjectDOUBLE, measurementDOUBLE, FilterSeriesType.VALUE_FILTER); - - @Test - public void testFilterSeriesCreation() { - assertEquals(intFilterSeries.getDeltaObjectUID(), deltaObjectINT); - assertEquals(intFilterSeries.getMeasurementUID(), measurementINT); - assertEquals(intFilterSeries.getSeriesDataType(), TSDataType.INT32); - - assertEquals(longFilterSeries.getDeltaObjectUID(), deltaObjectLONG); - assertEquals(longFilterSeries.getMeasurementUID(), measurementLONG); - assertEquals(longFilterSeries.getSeriesDataType(), TSDataType.INT64); - - assertEquals(floatFilterSeries.getDeltaObjectUID(), deltaObjectFLOAT); - assertEquals(floatFilterSeries.getMeasurementUID(), measurementFLOAT); - assertEquals(floatFilterSeries.getSeriesDataType(), TSDataType.FLOAT); - - assertEquals(booleanFilterSeries.getDeltaObjectUID(), deltaObjectBOOLEAN); - assertEquals(booleanFilterSeries.getMeasurementUID(), measurementBOOLEAN); - assertEquals(booleanFilterSeries.getSeriesDataType(), TSDataType.BOOLEAN); - - assertEquals(doubleFilterSeries.getDeltaObjectUID(), deltaObjectDOUBLE); - assertEquals(doubleFilterSeries.getMeasurementUID(), measurementDOUBLE); - assertEquals(doubleFilterSeries.getSeriesDataType(), TSDataType.DOUBLE); - - assertFalse(intFilterSeries.equals(longFilterSeries)); - - } - - @Test - public void testUnaryOperators() { - SingleSeriesFilterExpression fe = FilterFactory.eq(intFilterSeries, 15); - assertTrue(fe instanceof Eq); - assertEquals(((Eq) fe).getValue(), 15); - - SingleSeriesFilterExpression lteq = FilterFactory.ltEq(intFilterSeries, 11, true); - assertTrue(lteq instanceof LtEq); - assertEquals(((LtEq) lteq).getValue(), 11); - - SingleSeriesFilterExpression gteq = FilterFactory.gtEq(intFilterSeries, 22, true); - assertTrue(gteq instanceof GtEq); - assertEquals(((GtEq) gteq).getValue(), 22); - - SingleSeriesFilterExpression noteq = FilterFactory.noteq(intFilterSeries, 11); - assertTrue(noteq instanceof NotEq); - assertEquals(((NotEq) noteq).getValue(), 11); - - SingleSeriesFilterExpression not = FilterFactory.not(noteq); - assertTrue(not instanceof Not); - assertEquals(((NotEq) noteq).getValue(), 11); - } - - @Test - public void testBinaryOperators() { - SingleSeriesFilterExpression ltEq = FilterFactory.ltEq(intFilterSeries, 60, true); - SingleSeriesFilterExpression gtEq = FilterFactory.gtEq(intFilterSeries, 15, true); - SingleSeriesFilterExpression and = (SingleSeriesFilterExpression) FilterFactory.and(ltEq, gtEq); - - assertEquals(((And)and).getLeft(), ltEq); - assertEquals(((And)and).getRight(), gtEq); - LOG.info(and.toString()); - - SingleSeriesFilterExpression or = (SingleSeriesFilterExpression) FilterFactory.or(ltEq, gtEq); - Assert.assertEquals(((Or)or).getLeft(), ltEq); - Assert.assertEquals(((Or)or).getRight(), gtEq); - LOG.info(or.toString()); - } - - @Test - public void testFilterCreation() { - SingleSeriesFilterExpression fe = FilterFactory.eq(intFilterSeries, 15); - assertTrue(fe instanceof Eq); - assertEquals(((Eq) fe).getValue(), 15); - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.DoubleFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.BooleanFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FloatFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.LongFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; + +/** + * + * @author CGF + * + */ +public class FilterApiMethodsTest { + private static final Logger LOG = LoggerFactory.getLogger(FilterApiMethodsTest.class); + + private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; + private static String measurementINT = FilterTestConstant.measurementINT; + private static String deltaObjectLONG = FilterTestConstant.deltaObjectLONG; + private static String measurementLONG = FilterTestConstant.measurementLONG; + private static String deltaObjectFLOAT = FilterTestConstant.deltaObjectFLOAT; + private static String measurementFLOAT = FilterTestConstant.measurementFLOAT; + private static String deltaObjectDOUBLE = FilterTestConstant.deltaObjectDOUBLE; + private static String measurementDOUBLE = FilterTestConstant.measurementDOUBLE; + private static String deltaObjectBOOLEAN = FilterTestConstant.deltaObjectBOOLEAN; + private static String measurementBOOLEAN = FilterTestConstant.measurementBOOLEAN; + + private static final IntFilterSeries intFilterSeries = + FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, FilterSeriesType.VALUE_FILTER); + private static final LongFilterSeries longFilterSeries = FilterFactory + .longFilterSeries(deltaObjectLONG, measurementLONG, FilterSeriesType.VALUE_FILTER); + private static final FloatFilterSeries floatFilterSeries = FilterFactory + .floatFilterSeries(deltaObjectFLOAT, measurementFLOAT, FilterSeriesType.VALUE_FILTER); + private static final BooleanFilterSeries booleanFilterSeries = FilterFactory + .booleanFilterSeries(deltaObjectBOOLEAN, measurementBOOLEAN, FilterSeriesType.VALUE_FILTER); + private static final DoubleFilterSeries doubleFilterSeries = FilterFactory + .doubleFilterSeries(deltaObjectDOUBLE, measurementDOUBLE, FilterSeriesType.VALUE_FILTER); + + @Test + public void testFilterSeriesCreation() { + assertEquals(intFilterSeries.getDeltaObjectUID(), deltaObjectINT); + assertEquals(intFilterSeries.getMeasurementUID(), measurementINT); + assertEquals(intFilterSeries.getSeriesDataType(), TSDataType.INT32); + + assertEquals(longFilterSeries.getDeltaObjectUID(), deltaObjectLONG); + assertEquals(longFilterSeries.getMeasurementUID(), measurementLONG); + assertEquals(longFilterSeries.getSeriesDataType(), TSDataType.INT64); + + assertEquals(floatFilterSeries.getDeltaObjectUID(), deltaObjectFLOAT); + assertEquals(floatFilterSeries.getMeasurementUID(), measurementFLOAT); + assertEquals(floatFilterSeries.getSeriesDataType(), TSDataType.FLOAT); + + assertEquals(booleanFilterSeries.getDeltaObjectUID(), deltaObjectBOOLEAN); + assertEquals(booleanFilterSeries.getMeasurementUID(), measurementBOOLEAN); + assertEquals(booleanFilterSeries.getSeriesDataType(), TSDataType.BOOLEAN); + + assertEquals(doubleFilterSeries.getDeltaObjectUID(), deltaObjectDOUBLE); + assertEquals(doubleFilterSeries.getMeasurementUID(), measurementDOUBLE); + assertEquals(doubleFilterSeries.getSeriesDataType(), TSDataType.DOUBLE); + + assertFalse(intFilterSeries.equals(longFilterSeries)); + + } + + @Test + public void testUnaryOperators() { + SingleSeriesFilterExpression fe = FilterFactory.eq(intFilterSeries, 15); + assertTrue(fe instanceof Eq); + assertEquals(((Eq) fe).getValue(), 15); + + SingleSeriesFilterExpression lteq = FilterFactory.ltEq(intFilterSeries, 11, true); + assertTrue(lteq instanceof LtEq); + assertEquals(((LtEq) lteq).getValue(), 11); + + SingleSeriesFilterExpression gteq = FilterFactory.gtEq(intFilterSeries, 22, true); + assertTrue(gteq instanceof GtEq); + assertEquals(((GtEq) gteq).getValue(), 22); + + SingleSeriesFilterExpression noteq = FilterFactory.noteq(intFilterSeries, 11); + assertTrue(noteq instanceof NotEq); + assertEquals(((NotEq) noteq).getValue(), 11); + + SingleSeriesFilterExpression not = FilterFactory.not(noteq); + assertTrue(not instanceof Not); + assertEquals(((NotEq) noteq).getValue(), 11); + } + + @Test + public void testBinaryOperators() { + SingleSeriesFilterExpression ltEq = FilterFactory.ltEq(intFilterSeries, 60, true); + SingleSeriesFilterExpression gtEq = FilterFactory.gtEq(intFilterSeries, 15, true); + SingleSeriesFilterExpression and = (SingleSeriesFilterExpression) FilterFactory.and(ltEq, gtEq); + + assertEquals(((And) and).getLeft(), ltEq); + assertEquals(((And) and).getRight(), gtEq); + LOG.info(and.toString()); + + SingleSeriesFilterExpression or = (SingleSeriesFilterExpression) FilterFactory.or(ltEq, gtEq); + Assert.assertEquals(((Or) or).getLeft(), ltEq); + Assert.assertEquals(((Or) or).getRight(), gtEq); + LOG.info(or.toString()); + } + + @Test + public void testFilterCreation() { + SingleSeriesFilterExpression fe = FilterFactory.eq(intFilterSeries, 15); + assertTrue(fe instanceof Eq); + assertEquals(((Eq) fe).getValue(), 15); + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterTestConstant.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterTestConstant.java index 361bd805..d76bc1b6 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterTestConstant.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterTestConstant.java @@ -7,21 +7,21 @@ * */ public class FilterTestConstant { - public static final String JSON_SCHEMA = "schema"; - - // name rule : deltaObject + DataType - public static final String deltaObjectUID = "deltaObjectUID"; - public static final String deltaObjectINT = "deltaObjectINT"; - public static final String deltaObjectLONG = "deltaObjectLONG"; - public static final String deltaObjectFLOAT = "deltaObjectFLOAT"; - public static final String deltaObjectDOUBLE = "deltaObjectDOUBLE"; - public static final String deltaObjectBOOLEAN = "deltaObjectBOOLEAN"; - - // name rule : measurementObject + DataType - public static final String measurementUID = "measurementUID"; - public static final String measurementINT = "measurementINT"; - public static final String measurementLONG = "measurementLONG"; - public static final String measurementDOUBLE = "measurementDOUBLE"; - public static final String measurementFLOAT = "measurementFLOAT"; - public static final String measurementBOOLEAN = "measurementBOOLEAN"; + public static final String JSON_SCHEMA = "schema"; + + // name rule : deltaObject + DataType + public static final String deltaObjectUID = "deltaObjectUID"; + public static final String deltaObjectINT = "deltaObjectINT"; + public static final String deltaObjectLONG = "deltaObjectLONG"; + public static final String deltaObjectFLOAT = "deltaObjectFLOAT"; + public static final String deltaObjectDOUBLE = "deltaObjectDOUBLE"; + public static final String deltaObjectBOOLEAN = "deltaObjectBOOLEAN"; + + // name rule : measurementObject + DataType + public static final String measurementUID = "measurementUID"; + public static final String measurementINT = "measurementINT"; + public static final String measurementLONG = "measurementLONG"; + public static final String measurementDOUBLE = "measurementDOUBLE"; + public static final String measurementFLOAT = "measurementFLOAT"; + public static final String measurementBOOLEAN = "measurementBOOLEAN"; } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierDoubleTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierDoubleTest.java index 217af482..1c9fb794 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierDoubleTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierDoubleTest.java @@ -1,420 +1,447 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DoubleInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.DoubleFilterVerifier; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; - -/** - * - * @author CGF - * - */ -public class FilterVerifierDoubleTest { - - private static final Logger LOG = LoggerFactory.getLogger(FilterVerifierDoubleTest.class); - - private static final double double_min_delta = 0.00001f; - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - @Test - public void eqTest() { - Eq eq = FilterFactory - .eq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45.0); - DoubleInterval x = (DoubleInterval) new DoubleFilterVerifier().getInterval(eq); - assertEquals(x.count, 2); - assertEquals(x.v[0], 45, double_min_delta); - assertEquals(x.v[1], 45, double_min_delta); - } - - @Test - public void ltEqTest() { - LtEq ltEq = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45.0, true); - DoubleInterval x = (DoubleInterval) new DoubleFilterVerifier().getInterval(ltEq); - assertEquals(x.count, 2); - assertEquals(x.v[0], -Double.MAX_VALUE, double_min_delta); - assertEquals(x.v[1], 45.0f, double_min_delta); - - ltEq = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), -45.0, true); - SingleValueVisitor visitor = new SingleValueVisitor<>(ltEq); - Assert.assertTrue(visitor.verify(-46.0)); - Assert.assertFalse(visitor.verify(-40.0)); - Assert.assertFalse(visitor.verify(70.0)); - } - - @Test - public void andOrTest() { - // [470,1200) & (500,800]|[1000,2000) - - GtEq gtEq1 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); - LtEq ltEq1 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); - LtEq ltEq2 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - GtEq gtEq3 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); - LtEq ltEq3 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - Or or1 = (Or) FilterFactory.or(and2, and3); - - And andCombine1 = (And) FilterFactory.and(and1, or1); - DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(andCombine1); - - LOG.info("and+Or Test"); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], 500.0f, double_min_delta); - assertEquals(ans.flag[0], false); - assertEquals(ans.v[1], 800.0f, double_min_delta); - assertEquals(ans.flag[1], true); - assertEquals(ans.v[2], 1000.0f, double_min_delta); - assertEquals(ans.flag[2], true); - assertEquals(ans.v[3], 1200.0f, double_min_delta); - assertEquals(ans.flag[3], false); - - - // for filter test coverage - // [400, 500) (600, 800] - GtEq gtEq4 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400.0, true); - LtEq ltEq4 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); - And and4 = (And) FilterFactory.and(gtEq4, ltEq4); - - GtEq gtEq5 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600.0, false); - LtEq ltEq5 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - And and5 = (And) FilterFactory.and(gtEq5, ltEq5); - - And andNew = (And) FilterFactory.and(and4, and5); - DoubleInterval ansNew = (DoubleInterval) new DoubleFilterVerifier().getInterval(andNew); - assertEquals(ansNew.count, 0); - - // for filter test coverage2 - // [600, 800] [400, 500] - GtEq gtEq6 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600.0, true); - LtEq ltEq6 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, false); - And and6 = (And) FilterFactory.and(gtEq6, ltEq6); - - GtEq gtEq7 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400.0, false); - LtEq ltEq8 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, true); - And and7 = (And) FilterFactory.and(gtEq7, ltEq8); - - And andCombine3 = (And) FilterFactory.and(and6, and7); - DoubleInterval intervalAns = - (DoubleInterval) new DoubleFilterVerifier().getInterval(andCombine3); - assertEquals(intervalAns.count, 0); - } - - @Test - public void notEqTest() { - NotEq notEq = FilterFactory - .noteq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0); - DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(notEq); - - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Double.MAX_VALUE, double_min_delta); - assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 1000.0f, double_min_delta); - assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 1000.0f, double_min_delta); - assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Double.MAX_VALUE, double_min_delta); - assertEquals(ans.flag[3], true); - } - - @Test - public void orTest() { - // [470,1200) | (500,800] | [1000,2000) | [100,200] - - GtEq gtEq_11 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); - LtEq ltEq_11 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); - LtEq ltEq_12 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); - LtEq ltEq_l3 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); - LtEq ltEq_14 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or or1 = (Or) FilterFactory.or(and2, and3); - Or or2 = (Or) FilterFactory.or(or1, and4); - - Or orCombine = (Or) FilterFactory.or(and1, or2); - //DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(orCombine); - LOG.info("all : or Test output"); - - // answer may have overlap, but is right - SingleValueVisitor vistor = new SingleValueVisitor<>(orCombine); - assertTrue(vistor.verify(500.0)); - assertTrue(vistor.verify(600.0)); - assertTrue(vistor.verify(1199.0)); - assertTrue(vistor.verify(1999.0)); - assertFalse(vistor.verify(5.0)); - assertFalse(vistor.verify(2000.0)); - assertFalse(vistor.verify(469.0)); - assertFalse(vistor.verify(99.0)); - assertTrue(vistor.verify(100.0)); - assertTrue(vistor.verify(200.0)); - assertFalse(vistor.verify(201.0)); - - } - - @Test - public void orborderTest() { - // [470,1200] | [1200, 1500] - - GtEq gtEq_11 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); - LtEq ltEq_11 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); - LtEq ltEq_12 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, false); - LtEq ltEq_l3 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - LtEq ltEq_14 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or or1 = (Or) FilterFactory.or(and1, and2); - SingleValueVisitor vistor = new SingleValueVisitor<>(or1); - assertTrue(vistor.verify(1200.0)); - - Or or2 = (Or) FilterFactory.or(and3, and4); - SingleValueVisitor vistor2 = new SingleValueVisitor<>(or2); - // DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or2); - assertTrue(vistor2.verify(1000.0)); - - GtEq gtEq_16 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, false); - LtEq ltEq_l6 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); - And and6 = (And) FilterFactory.and(gtEq_16, ltEq_l6); - GtEq gtEq_17 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - LtEq ltEq_17 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); - And and7 = (And) FilterFactory.and(gtEq_17, ltEq_17); - Or or7 = (Or) FilterFactory.or(and6, and7); - DoubleInterval ans7 = (DoubleInterval) new DoubleFilterVerifier().getInterval(or7); - assertEquals(ans7.v[0], 800.0f, double_min_delta); - - GtEq gtEq_18 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1800.0, false); - LtEq ltEq_l8 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); - And and8 = (And) FilterFactory.and(gtEq_18, ltEq_l8); - GtEq gtEq_19 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); - LtEq ltEq_19 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 3000.0, true); - And and9 = (And) FilterFactory.and(gtEq_19, ltEq_19); - Or or9 = (Or) FilterFactory.or(and8, and9); - DoubleInterval ans9 = (DoubleInterval) new DoubleFilterVerifier().getInterval(or9); - assertEquals(ans9.v[0], 800.0, double_min_delta); - assertEquals(ans9.v[1], 3000.0, double_min_delta); - - } - - @Test - public void bugTest1() { - // [470,1200] | [1200, 1500] - - GtEq gtEq_11 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); - LtEq ltEq_11 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); - LtEq ltEq_12 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - And and = (And) FilterFactory.and(and1, and2); - DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(and); - assertEquals(interval.v[0], 1200.0, double_min_delta); - assertEquals(interval.v[1], 1200.0, double_min_delta); - - } - - @Test - public void bugTest2() { - // [470,1200] | [1200, 1500] - - GtEq gtEq_11 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); - LtEq ltEq_11 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 20.0, true); - LtEq ltEq_12 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - And and = (And) FilterFactory.and(and1, and2); - DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(and); - assertEquals(interval.v[0], 100.0, double_min_delta); - - } - - @Test - public void bugTest3() { - // [470,1200] | [1200, 1500] - - GtEq gtEq_11 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); - LtEq ltEq_11 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); - LtEq ltEq_12 = FilterFactory.ltEq( - FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - Or or = (Or) FilterFactory.or(and1, and2); - DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(or); - assertEquals(interval.v[0], 100.0, double_min_delta); - assertEquals(interval.v[1], 200.0, double_min_delta); - - } - - @Test - public void andOrBorderTest() { - double theta = 0.0001; - - // And Operator - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, true); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - // Or Operator - gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L, theta); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L, theta); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Double.MAX_VALUE, theta); assertEquals(ans.flag[3], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L, theta); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L, theta); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Double.MAX_VALUE, theta); assertEquals(ans.flag[3], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Double.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Double.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, true); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Double.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Double.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Double.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.DoubleInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.DoubleFilterVerifier; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; + +/** + * + * @author CGF + * + */ +public class FilterVerifierDoubleTest { + + private static final Logger LOG = LoggerFactory.getLogger(FilterVerifierDoubleTest.class); + + private static final double double_min_delta = 0.00001f; + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + @Test + public void eqTest() { + Eq eq = FilterFactory.eq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45.0); + DoubleInterval x = (DoubleInterval) new DoubleFilterVerifier().getInterval(eq); + assertEquals(x.count, 2); + assertEquals(x.v[0], 45, double_min_delta); + assertEquals(x.v[1], 45, double_min_delta); + } + + @Test + public void ltEqTest() { + LtEq ltEq = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45.0, true); + DoubleInterval x = (DoubleInterval) new DoubleFilterVerifier().getInterval(ltEq); + assertEquals(x.count, 2); + assertEquals(x.v[0], -Double.MAX_VALUE, double_min_delta); + assertEquals(x.v[1], 45.0f, double_min_delta); + + ltEq = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), -45.0, true); + SingleValueVisitor visitor = new SingleValueVisitor<>(ltEq); + Assert.assertTrue(visitor.verify(-46.0)); + Assert.assertFalse(visitor.verify(-40.0)); + Assert.assertFalse(visitor.verify(70.0)); + } + + @Test + public void andOrTest() { + // [470,1200) & (500,800]|[1000,2000) + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + Or or1 = (Or) FilterFactory.or(and2, and3); + + And andCombine1 = (And) FilterFactory.and(and1, or1); + DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(andCombine1); + + LOG.info("and+Or Test"); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], 500.0f, double_min_delta); + assertEquals(ans.flag[0], false); + assertEquals(ans.v[1], 800.0f, double_min_delta); + assertEquals(ans.flag[1], true); + assertEquals(ans.v[2], 1000.0f, double_min_delta); + assertEquals(ans.flag[2], true); + assertEquals(ans.v[3], 1200.0f, double_min_delta); + assertEquals(ans.flag[3], false); + + + // for filter test coverage + // [400, 500) (600, 800] + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400.0, true); + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); + And and4 = (And) FilterFactory.and(gtEq4, ltEq4); + + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600.0, false); + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + And and5 = (And) FilterFactory.and(gtEq5, ltEq5); + + And andNew = (And) FilterFactory.and(and4, and5); + DoubleInterval ansNew = (DoubleInterval) new DoubleFilterVerifier().getInterval(andNew); + assertEquals(ansNew.count, 0); + + // for filter test coverage2 + // [600, 800] [400, 500] + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600.0, true); + LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, false); + And and6 = (And) FilterFactory.and(gtEq6, ltEq6); + + GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400.0, false); + LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, true); + And and7 = (And) FilterFactory.and(gtEq7, ltEq8); + + And andCombine3 = (And) FilterFactory.and(and6, and7); + DoubleInterval intervalAns = + (DoubleInterval) new DoubleFilterVerifier().getInterval(andCombine3); + assertEquals(intervalAns.count, 0); + } + + @Test + public void notEqTest() { + NotEq notEq = FilterFactory.noteq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0); + DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(notEq); + + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Double.MAX_VALUE, double_min_delta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 1000.0f, double_min_delta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 1000.0f, double_min_delta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Double.MAX_VALUE, double_min_delta); + assertEquals(ans.flag[3], true); + } + + @Test + public void orTest() { + // [470,1200) | (500,800] | [1000,2000) | [100,200] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or or1 = (Or) FilterFactory.or(and2, and3); + Or or2 = (Or) FilterFactory.or(or1, and4); + + Or orCombine = (Or) FilterFactory.or(and1, or2); + // DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(orCombine); + LOG.info("all : or Test output"); + + // answer may have overlap, but is right + SingleValueVisitor vistor = new SingleValueVisitor<>(orCombine); + assertTrue(vistor.verify(500.0)); + assertTrue(vistor.verify(600.0)); + assertTrue(vistor.verify(1199.0)); + assertTrue(vistor.verify(1999.0)); + assertFalse(vistor.verify(5.0)); + assertFalse(vistor.verify(2000.0)); + assertFalse(vistor.verify(469.0)); + assertFalse(vistor.verify(99.0)); + assertTrue(vistor.verify(100.0)); + assertTrue(vistor.verify(200.0)); + assertFalse(vistor.verify(201.0)); + + } + + @Test + public void orborderTest() { + // [470,1200] | [1200, 1500] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, false); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or or1 = (Or) FilterFactory.or(and1, and2); + SingleValueVisitor vistor = new SingleValueVisitor<>(or1); + assertTrue(vistor.verify(1200.0)); + + Or or2 = (Or) FilterFactory.or(and3, and4); + SingleValueVisitor vistor2 = new SingleValueVisitor<>(or2); + // DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or2); + assertTrue(vistor2.verify(1000.0)); + + GtEq gtEq_16 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, false); + LtEq ltEq_l6 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); + And and6 = (And) FilterFactory.and(gtEq_16, ltEq_l6); + GtEq gtEq_17 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + LtEq ltEq_17 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0, true); + And and7 = (And) FilterFactory.and(gtEq_17, ltEq_17); + Or or7 = (Or) FilterFactory.or(and6, and7); + DoubleInterval ans7 = (DoubleInterval) new DoubleFilterVerifier().getInterval(or7); + assertEquals(ans7.v[0], 800.0f, double_min_delta); + + GtEq gtEq_18 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1800.0, false); + LtEq ltEq_l8 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0, false); + And and8 = (And) FilterFactory.and(gtEq_18, ltEq_l8); + GtEq gtEq_19 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0, true); + LtEq ltEq_19 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 3000.0, true); + And and9 = (And) FilterFactory.and(gtEq_19, ltEq_19); + Or or9 = (Or) FilterFactory.or(and8, and9); + DoubleInterval ans9 = (DoubleInterval) new DoubleFilterVerifier().getInterval(or9); + assertEquals(ans9.v[0], 800.0, double_min_delta); + assertEquals(ans9.v[1], 3000.0, double_min_delta); + + } + + @Test + public void bugTest1() { + // [470,1200] | [1200, 1500] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0, true); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + And and = (And) FilterFactory.and(and1, and2); + DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(and); + assertEquals(interval.v[0], 1200.0, double_min_delta); + assertEquals(interval.v[1], 1200.0, double_min_delta); + + } + + @Test + public void bugTest2() { + // [470,1200] | [1200, 1500] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 20.0, true); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + And and = (And) FilterFactory.and(and1, and2); + DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(and); + assertEquals(interval.v[0], 100.0, double_min_delta); + + } + + @Test + public void bugTest3() { + // [470,1200] | [1200, 1500] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0, true); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200.0, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + Or or = (Or) FilterFactory.or(and1, and2); + DoubleInterval interval = (DoubleInterval) new DoubleFilterVerifier().getInterval(or); + assertEquals(interval.v[0], 100.0, double_min_delta); + assertEquals(interval.v[1], 200.0, double_min_delta); + + } + + @Test + public void andOrBorderTest() { + double theta = 0.0001; + + // And Operator + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2.0, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + DoubleInterval ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, true); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + // Or Operator + gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L, theta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L, theta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Double.MAX_VALUE, theta); + assertEquals(ans.flag[3], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L, theta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L, theta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Double.MAX_VALUE, theta); + assertEquals(ans.flag[3], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Double.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Double.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.doubleFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0, true); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Double.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (DoubleInterval) new DoubleFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Double.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Double.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierFloatTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierFloatTest.java index a782dd9a..20d8640d 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierFloatTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierFloatTest.java @@ -1,297 +1,373 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.FloatInterval; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.FloatFilterVerifier; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; - -public class FilterVerifierFloatTest { - - private static final Logger LOG = LoggerFactory.getLogger(FilterVerifierFloatTest.class); - - private static final float float_min_delta = 0.00001f; - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - @Test - public void eqTest() { - Eq eq = FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45.0f); - FloatInterval x = (FloatInterval) new FloatFilterVerifier().getInterval(eq); - assertEquals(x.count, 2); - assertEquals(x.v[0], 45, float_min_delta); - assertEquals(x.v[1], 45, float_min_delta); - } - - @Test - public void ltEqTest() { - LtEq ltEq = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45.0f, true); - FloatInterval x = (FloatInterval) new FloatFilterVerifier().getInterval(ltEq); - assertEquals(x.count, 2); - assertEquals(x.v[0], -Float.MAX_VALUE, float_min_delta); - assertEquals(x.v[1], 45.0f, float_min_delta); - - ltEq = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), -45.0f, true); - SingleValueVisitor visitor = new SingleValueVisitor<>(ltEq); - Assert.assertTrue(visitor.verify(-46.0f)); - Assert.assertFalse(visitor.verify(-40.0f)); - Assert.assertFalse(visitor.verify(70.0f)); - } - - @Test - public void andOrTest() { - // [470,1200) & (500,800]|[1000,2000) - - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - Or or1 = (Or) FilterFactory.or(and2, and3); - - And andCombine1 = (And) FilterFactory.and(and1, or1); - FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(andCombine1); - - LOG.info("and+Or Test"); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], 500.0f, float_min_delta); assertEquals(ans.flag[0], false); - assertEquals(ans.v[1], 800.0f, float_min_delta); assertEquals(ans.flag[1], true); - assertEquals(ans.v[2], 1000.0f, float_min_delta); assertEquals(ans.flag[2], true); - assertEquals(ans.v[3], 1200.0f, float_min_delta); assertEquals(ans.flag[3], false); - - - // for filter test coverage - // [400, 500) (600, 800] - GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400.0f, true); - LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); - And and4 = (And) FilterFactory.and(gtEq4, ltEq4); - - GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600.0f, false); - LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - And and5 = (And) FilterFactory.and(gtEq5, ltEq5); - - And andNew = (And) FilterFactory.and(and4, and5); - FloatInterval ansNew = (FloatInterval) new FloatFilterVerifier().getInterval(andNew); - assertEquals(ansNew.count, 0); - - // for filter test coverage2 - // [600, 800] [400, 500] - GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600.0f, true); - LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, false); - And and6 = (And) FilterFactory.and(gtEq6, ltEq6); - - GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400.0f, false); - LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, true); - And and7 = (And) FilterFactory.and(gtEq7, ltEq8); - - And andCombine3 = (And) FilterFactory.and(and6, and7); - FloatInterval intervalAns = (FloatInterval) new FloatFilterVerifier().getInterval(andCombine3); - assertEquals(intervalAns.count, 0); - } - - @Test - public void notEqTest() { - NotEq notEq = FilterFactory.noteq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f); - FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(notEq); - - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Float.MAX_VALUE, float_min_delta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 1000.0f, float_min_delta); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 1000.0f, float_min_delta); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Float.MAX_VALUE, float_min_delta); assertEquals(ans.flag[3], true); - } - - @Test - public void orTest() { - // [470,1200) | (500,800] | [1000,2000) | [100,200] - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); - LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100.0f, true); - LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200.0f, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or or1 = (Or) FilterFactory.or(and2, and3); - Or or2 = (Or) FilterFactory.or(or1, and4); - - Or orCombine = (Or) FilterFactory.or(and1, or2); - FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(orCombine); - // System.out.println(ans); - // LOG.info("or Test output"); - - // answer may have overlap, but is right - SingleValueVisitor vistor = new SingleValueVisitor<>(orCombine); - assertTrue(vistor.verify(500.0f)); - assertTrue(vistor.verify(600.0f)); - assertTrue(vistor.verify(1199.0f)); - assertTrue(vistor.verify(1999.0f)); - assertFalse(vistor.verify(5.0f)); - assertFalse(vistor.verify(2000.0f)); - assertFalse(vistor.verify(469.0f)); - assertFalse(vistor.verify(99.0f)); - assertTrue(vistor.verify(100.0f)); - assertTrue(vistor.verify(200.0f)); - assertFalse(vistor.verify(201.0f)); - - } - - @Test - public void orborderTest() { - // [470,1200] | [1200, 1500] - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, true); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, true); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0f, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, false); - LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or or1 = (Or) FilterFactory.or(and1, and2); - SingleValueVisitor vistor = new SingleValueVisitor<>(or1); - assertTrue(vistor.verify(1200.0f)); - - Or or2 = (Or) FilterFactory.or(and3, and4); - SingleValueVisitor vistor2 = new SingleValueVisitor<>(or2); - // FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(or2); - assertTrue(vistor2.verify(1000.0f)); - - GtEq gtEq_16 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, false); - LtEq ltEq_l6 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); - And and6 = (And) FilterFactory.and(gtEq_16, ltEq_l6); - GtEq gtEq_17 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - LtEq ltEq_17 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); - And and7 = (And) FilterFactory.and(gtEq_17, ltEq_17); - Or or7 = (Or) FilterFactory.or(and6, and7); - FloatInterval ans7 = (FloatInterval) new FloatFilterVerifier().getInterval(or7); - assertEquals(ans7.v[0], 800.0f, float_min_delta); - - GtEq gtEq_18 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1800.0f, false); - LtEq ltEq_l8 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); - And and8 = (And) FilterFactory.and(gtEq_18, ltEq_l8); - GtEq gtEq_19 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); - LtEq ltEq_19 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 3000.0f, true); - And and9 = (And) FilterFactory.and(gtEq_19, ltEq_19); - Or or9 = (Or) FilterFactory.or(and8, and9); - FloatInterval ans9 = (FloatInterval) new FloatFilterVerifier().getInterval(or9); - assertEquals(ans9.v[0], 800.0f, float_min_delta); - assertEquals(ans9.v[1], 3000.0f, float_min_delta); - } - - @Test - public void andOrBorderTest() { - double theta = 0.0001; - - // And Operator - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, true); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - // Or Operator - gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L, theta); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L, theta); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Float.MAX_VALUE, theta); assertEquals(ans.flag[3], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L, theta); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L, theta); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Float.MAX_VALUE, theta); assertEquals(ans.flag[3], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Float.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Float.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, true); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Float.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], -Float.MAX_VALUE, theta); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Float.MAX_VALUE, theta); assertEquals(ans.flag[1], true); - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.FloatInterval; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.FloatFilterVerifier; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; + +public class FilterVerifierFloatTest { + + private static final Logger LOG = LoggerFactory.getLogger(FilterVerifierFloatTest.class); + + private static final float float_min_delta = 0.00001f; + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + @Test + public void eqTest() { + Eq eq = FilterFactory.eq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45.0f); + FloatInterval x = (FloatInterval) new FloatFilterVerifier().getInterval(eq); + assertEquals(x.count, 2); + assertEquals(x.v[0], 45, float_min_delta); + assertEquals(x.v[1], 45, float_min_delta); + } + + @Test + public void ltEqTest() { + LtEq ltEq = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45.0f, true); + FloatInterval x = (FloatInterval) new FloatFilterVerifier().getInterval(ltEq); + assertEquals(x.count, 2); + assertEquals(x.v[0], -Float.MAX_VALUE, float_min_delta); + assertEquals(x.v[1], 45.0f, float_min_delta); + + ltEq = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), -45.0f, true); + SingleValueVisitor visitor = new SingleValueVisitor<>(ltEq); + Assert.assertTrue(visitor.verify(-46.0f)); + Assert.assertFalse(visitor.verify(-40.0f)); + Assert.assertFalse(visitor.verify(70.0f)); + } + + @Test + public void andOrTest() { + // [470,1200) & (500,800]|[1000,2000) + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + Or or1 = (Or) FilterFactory.or(and2, and3); + + And andCombine1 = (And) FilterFactory.and(and1, or1); + FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(andCombine1); + + LOG.info("and+Or Test"); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], 500.0f, float_min_delta); + assertEquals(ans.flag[0], false); + assertEquals(ans.v[1], 800.0f, float_min_delta); + assertEquals(ans.flag[1], true); + assertEquals(ans.v[2], 1000.0f, float_min_delta); + assertEquals(ans.flag[2], true); + assertEquals(ans.v[3], 1200.0f, float_min_delta); + assertEquals(ans.flag[3], false); + + + // for filter test coverage + // [400, 500) (600, 800] + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400.0f, true); + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); + And and4 = (And) FilterFactory.and(gtEq4, ltEq4); + + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600.0f, false); + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + And and5 = (And) FilterFactory.and(gtEq5, ltEq5); + + And andNew = (And) FilterFactory.and(and4, and5); + FloatInterval ansNew = (FloatInterval) new FloatFilterVerifier().getInterval(andNew); + assertEquals(ansNew.count, 0); + + // for filter test coverage2 + // [600, 800] [400, 500] + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600.0f, true); + LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, false); + And and6 = (And) FilterFactory.and(gtEq6, ltEq6); + + GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400.0f, false); + LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, true); + And and7 = (And) FilterFactory.and(gtEq7, ltEq8); + + And andCombine3 = (And) FilterFactory.and(and6, and7); + FloatInterval intervalAns = (FloatInterval) new FloatFilterVerifier().getInterval(andCombine3); + assertEquals(intervalAns.count, 0); + } + + @Test + public void notEqTest() { + NotEq notEq = FilterFactory.noteq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f); + FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(notEq); + + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Float.MAX_VALUE, float_min_delta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 1000.0f, float_min_delta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 1000.0f, float_min_delta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Float.MAX_VALUE, float_min_delta); + assertEquals(ans.flag[3], true); + } + + @Test + public void orTest() { + // [470,1200) | (500,800] | [1000,2000) | [100,200] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500.0f, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100.0f, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200.0f, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or or1 = (Or) FilterFactory.or(and2, and3); + Or or2 = (Or) FilterFactory.or(or1, and4); + + Or orCombine = (Or) FilterFactory.or(and1, or2); + FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(orCombine); + // System.out.println(ans); + // LOG.info("or Test output"); + + // answer may have overlap, but is right + SingleValueVisitor vistor = new SingleValueVisitor<>(orCombine); + assertTrue(vistor.verify(500.0f)); + assertTrue(vistor.verify(600.0f)); + assertTrue(vistor.verify(1199.0f)); + assertTrue(vistor.verify(1999.0f)); + assertFalse(vistor.verify(5.0f)); + assertFalse(vistor.verify(2000.0f)); + assertFalse(vistor.verify(469.0f)); + assertFalse(vistor.verify(99.0f)); + assertTrue(vistor.verify(100.0f)); + assertTrue(vistor.verify(200.0f)); + assertFalse(vistor.verify(201.0f)); + + } + + @Test + public void orborderTest() { + // [470,1200] | [1200, 1500] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470.0f, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, true); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200.0f, true); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1500.0f, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, false); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or or1 = (Or) FilterFactory.or(and1, and2); + SingleValueVisitor vistor = new SingleValueVisitor<>(or1); + assertTrue(vistor.verify(1200.0f)); + + Or or2 = (Or) FilterFactory.or(and3, and4); + SingleValueVisitor vistor2 = new SingleValueVisitor<>(or2); + // FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(or2); + assertTrue(vistor2.verify(1000.0f)); + + GtEq gtEq_16 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, false); + LtEq ltEq_l6 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); + And and6 = (And) FilterFactory.and(gtEq_16, ltEq_l6); + GtEq gtEq_17 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + LtEq ltEq_17 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000.0f, true); + And and7 = (And) FilterFactory.and(gtEq_17, ltEq_17); + Or or7 = (Or) FilterFactory.or(and6, and7); + FloatInterval ans7 = (FloatInterval) new FloatFilterVerifier().getInterval(or7); + assertEquals(ans7.v[0], 800.0f, float_min_delta); + + GtEq gtEq_18 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1800.0f, false); + LtEq ltEq_l8 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000.0f, false); + And and8 = (And) FilterFactory.and(gtEq_18, ltEq_l8); + GtEq gtEq_19 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800.0f, true); + LtEq ltEq_19 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 3000.0f, true); + And and9 = (And) FilterFactory.and(gtEq_19, ltEq_19); + Or or9 = (Or) FilterFactory.or(and8, and9); + FloatInterval ans9 = (FloatInterval) new FloatFilterVerifier().getInterval(or9); + assertEquals(ans9.v[0], 800.0f, float_min_delta); + assertEquals(ans9.v[1], 3000.0f, float_min_delta); + } + + @Test + public void andOrBorderTest() { + double theta = 0.0001; + + // And Operator + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2.0f, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + FloatInterval ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, true); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + // Or Operator + gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L, theta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L, theta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Float.MAX_VALUE, theta); + assertEquals(ans.flag[3], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L, theta); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L, theta); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Float.MAX_VALUE, theta); + assertEquals(ans.flag[3], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Float.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Float.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.floatFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2.0f, true); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Float.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (FloatInterval) new FloatFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], -Float.MAX_VALUE, theta); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Float.MAX_VALUE, theta); + assertEquals(ans.flag[1], true); + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierIntTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierIntTest.java index 614f67da..a1befeac 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierIntTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierIntTest.java @@ -1,484 +1,593 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.IntInterval; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.IntFilterVerifier; - -/** - * - * @author CGF - * - */ -public class FilterVerifierIntTest { - - private static String deltaObjectUID = "d"; - private static String measurementUID = "s"; - - @Test - public void eqTest() { - Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - IntInterval x = (IntInterval) new IntFilterVerifier().getInterval(eq); - assertEquals(x.count, 2); - assertEquals(x.v[0], 45); - assertEquals(x.v[1], 45); - } - - @Test - public void ltEqTest() { - LtEq ltEq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - IntInterval x= (IntInterval) new IntFilterVerifier().getInterval(ltEq); - assertEquals(x.count, 2); - assertEquals(x.v[0], Integer.MIN_VALUE); - assertEquals(x.v[1], 45); - } - - @Test - public void andOrTest() { - // [470,1200) & (500,800]|[1000,2000) ans:(500,800], [1000,1200) - - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470, true); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - Or or1 = (Or) FilterFactory.or(and2, and3); - - And andCombine1 = (And) FilterFactory.and(and1, or1); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(andCombine1); - System.out.println(ans); - // ans.output(); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], 500); - assertEquals(ans.flag[0], false); - assertEquals(ans.v[1], 800); - assertEquals(ans.flag[1], true); - assertEquals(ans.v[2], 1000); - assertEquals(ans.flag[2], true); - assertEquals(ans.v[3], 1200); - assertEquals(ans.flag[3], false); - - // for filter test coverage - // [400, 500) (600, 800] - GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400, true); - LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); - And and4 = (And) FilterFactory.and(gtEq4, ltEq4); - - GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); - LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and5 = (And) FilterFactory.and(gtEq5, ltEq5); - - And andNew = (And) FilterFactory.and(and4, and5); - IntInterval ansNew = (IntInterval) new IntFilterVerifier().getInterval(andNew); - - assertEquals(ansNew.count, 0); - - // for filter test coverage2 - // [600, 800] [400, 500] - GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); - LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and6 = (And) FilterFactory.and(gtEq6, ltEq6); - - GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400, false); - LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); - And and7 = (And) FilterFactory.and(gtEq7, ltEq8); - - And andCombine3 = (And) FilterFactory.and(and6, and7); - - IntInterval intervalAns = (IntInterval) new IntFilterVerifier().getInterval(andCombine3); - - assertEquals(intervalAns.count, 0); - } - - @Test - public void notEqTest() { - NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(notEq); - - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Integer.MIN_VALUE); - assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 1000); - assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 1000); - assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Integer.MAX_VALUE); - assertEquals(ans.flag[3], true); - } - - @Test - public void orTest() { - // [470,1200) | (500,800] | [1000,2000) | [100,200] ===> ans:[100,200], [470,2000) - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); - LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100, true); - LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or o1 = (Or) FilterFactory.or(and2, and3); - Or o2 = (Or) FilterFactory.or(o1, and4); - - Or or = (Or) FilterFactory.or(and1, o2); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(o2); - System.out.println(ans); - - SingleValueVisitor vistor = new SingleValueVisitor<>(or); - assertTrue(vistor.verify(500)); - assertTrue(vistor.verify(600)); - assertTrue(vistor.verify(1199)); - assertTrue(vistor.verify(1999)); - assertFalse(vistor.verify(5)); - assertFalse(vistor.verify(2000)); - assertFalse(vistor.verify(469)); - assertFalse(vistor.verify(99)); - assertTrue(vistor.verify(100)); - assertTrue(vistor.verify(200)); - assertFalse(vistor.verify(201)); - - } - - @Test - public void additionTest() { - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 4000, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 4500, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - And and = (And) FilterFactory.and(and1, and2); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(and); - assertEquals(ans.count, 0); - } - - @Test - public void notOperatorTest() { - - // [30,600) | (500,800] | [1000,2000) | [100,200] - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 30, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); - //GtEq gtEq_112 = FilterApi.gtEq(FilterApi.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); - //LtEq ltEq_112 = FilterApi.ltEq(FilterApi.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 30, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - //Or or1 = (Or) FilterApi.or(gtEq_112, ltEq_112); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - //GtEq gtEq_122 = FilterApi.gtEq(FilterApi.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - //LtEq ltEq_122 = FilterApi.ltEq(FilterApi.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - //Or or2 = (Or) FilterApi.or(gtEq_122, ltEq_122); - - GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); - LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100, true); - LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - - Or or_12 = (Or) FilterFactory.or(and1, and2); - Or or_34 = (Or) FilterFactory.or(and3, and4); - Or or = (Or) FilterFactory.or(or_12, or_34); - Not notAll = (Not) FilterFactory.not(or); - //IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(notAll); - - SingleValueVisitor vistor = new SingleValueVisitor<>(notAll); - assertFalse(vistor.verify(500)); - assertFalse(vistor.verify(600)); - assertFalse(vistor.verify(1199)); - assertFalse(vistor.verify(1999)); - assertTrue(vistor.verify(5)); - assertTrue(vistor.verify(2000)); - assertFalse(vistor.verify(469)); - assertFalse(vistor.verify(99)); - assertFalse(vistor.verify(100)); - assertFalse(vistor.verify(200)); - assertFalse(vistor.verify(201)); - } - - @Test - public void minMaxValueTest() { - - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MAX_VALUE, true); - IntInterval i = (IntInterval) new IntFilterVerifier().getInterval(gtEq1); - assertEquals(i.v[0], Integer.MAX_VALUE); - assertEquals(i.v[1], Integer.MAX_VALUE); - assertEquals(i.flag[0], true); - assertEquals(i.flag[1], true); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MAX_VALUE, false); - IntInterval i2 = (IntInterval) new IntFilterVerifier().getInterval(gtEq2); - assertEquals(i2.v[0], Integer.MAX_VALUE); - assertEquals(i2.v[1], Integer.MAX_VALUE); - assertEquals(i2.flag[0], false); - assertEquals(i2.flag[1], false); - - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MIN_VALUE, true); - IntInterval i3 = (IntInterval) new IntFilterVerifier().getInterval(ltEq1); - assertEquals(i3.v[0], Integer.MIN_VALUE); - assertEquals(i3.v[1], Integer.MIN_VALUE); - assertEquals(i3.flag[0], true); - assertEquals(i3.flag[1], true); - - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MIN_VALUE, false); - IntInterval i4 = (IntInterval) new IntFilterVerifier().getInterval(ltEq2); - assertEquals(i4.v[0], Integer.MIN_VALUE); - assertEquals(i4.v[1], Integer.MIN_VALUE); - assertEquals(i4.flag[0], false); - assertEquals(i4.flag[1], false); - - NotEq noteq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MIN_VALUE); - IntInterval i5 = (IntInterval) new IntFilterVerifier().getInterval(noteq); - assertEquals(i5.v[0], Integer.MIN_VALUE); - assertEquals(i5.v[1], Integer.MIN_VALUE); - assertEquals(i5.v[2], Integer.MIN_VALUE); - assertEquals(i5.v[3], Integer.MAX_VALUE); - assertEquals(i5.flag[0], false); - assertEquals(i5.flag[1], false); - assertEquals(i5.flag[2], false); - assertEquals(i5.flag[3], true); - - NotEq noteq2 = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), - Integer.MAX_VALUE); - IntInterval i6 = (IntInterval) new IntFilterVerifier().getInterval(noteq2); - assertEquals(i6.v[0], Integer.MIN_VALUE); - assertEquals(i6.v[1], Integer.MAX_VALUE); - assertEquals(i6.v[2], Integer.MAX_VALUE); - assertEquals(i6.v[3], Integer.MAX_VALUE); - assertEquals(i6.flag[0], true); - assertEquals(i6.flag[1], false); - assertEquals(i6.flag[2], false); - assertEquals(i6.flag[3], false); - } - - @Test - public void unionBugTest() { - - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 300, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 350, false); - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400, true); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - Or o1 = (Or) FilterFactory.or(and2, and3); - - Or or = (Or) FilterFactory.or(and1, o1); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(or); - System.out.println(ans); - } - - @Test - public void unionCoverageTest() { - - // right first cross - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - Or o1 = (Or) FilterFactory.or(and1, and2); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(o1); - assertEquals(ans.v[0], 200); assertEquals(ans.flag[0], false); - assertEquals(ans.v[1], 800); assertEquals(ans.flag[1], false); - - // right covers left - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - - GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); - LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1600, true); - And and4 = (And) FilterFactory.and(gtEq4, ltEq4); - - Or o2 = (Or) FilterFactory.or(and3, and4); - IntInterval ans2 = (IntInterval) new IntFilterVerifier().getInterval(o2); - // System.out.println(ans2); - assertEquals(ans2.v[0], 200); assertEquals(ans2.flag[0], false); - assertEquals(ans2.v[1], 1600); assertEquals(ans2.flag[1], true); - - // right first cross (2) - GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); - LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and5 = (And) FilterFactory.and(gtEq5, ltEq5); - - GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); - LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and6 = (And) FilterFactory.and(gtEq6, ltEq6); - - Or o3 = (Or) FilterFactory.or(and5, and6); - IntInterval ans3 = (IntInterval) new IntFilterVerifier().getInterval(o3); - //System.out.println(ans3); - assertEquals(ans3.v[0], 200); assertEquals(ans3.flag[0], false); - assertEquals(ans3.v[1], 800); assertEquals(ans3.flag[1], true); - - // left first cross (2) - GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); - LtEq ltEq7 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); - And and7 = (And) FilterFactory.and(gtEq7, ltEq7); - - GtEq gtEq8 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 700, false); - LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and8 = (And) FilterFactory.and(gtEq8, ltEq8); - - Or o4 = (Or) FilterFactory.or(and7, and8); - IntInterval ans4 = (IntInterval) new IntFilterVerifier().getInterval(o4); - System.out.println(ans4); - assertEquals(ans4.v[0], 200); assertEquals(ans4.flag[0], true); - assertEquals(ans4.v[1], 800); assertEquals(ans4.flag[1], true); - - // right first - GtEq gtEq9 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); - LtEq ltEq9 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and9 = (And) FilterFactory.and(gtEq9, ltEq9); - - GtEq gtEq10 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); - LtEq ltEq10 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); - And and10 = (And) FilterFactory.and(gtEq10, ltEq10); - - Or o5 = (Or) FilterFactory.or(and9, and10); - IntInterval ans5 = (IntInterval) new IntFilterVerifier().getInterval(o5); - System.out.println(ans4); - assertEquals(ans5.v[0], 600); assertEquals(ans5.flag[0], true); - assertEquals(ans5.v[1], 800); assertEquals(ans5.flag[1], true); - - // left first cross (1) - GtEq gtEq11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); - LtEq ltEq11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); - And and11 = (And) FilterFactory.and(gtEq11, ltEq11); - - GtEq gtEq12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); - LtEq ltEq12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); - And and12 = (And) FilterFactory.and(gtEq12, ltEq12); - - Or o6 = (Or) FilterFactory.or(and11, and12); - IntInterval ans6 = (IntInterval) new IntFilterVerifier().getInterval(o6); - System.out.println(ans4); - assertEquals(ans6.v[0], 600); assertEquals(ans6.flag[0], true); - assertEquals(ans6.v[1], 800); assertEquals(ans6.flag[1], true); - } - - @Test - public void andOrBorderTest() { - - // And Operator - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, true); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - // Or Operator - gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Integer.MAX_VALUE); assertEquals(ans.flag[3], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Integer.MAX_VALUE); assertEquals(ans.flag[3], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Integer.MAX_VALUE); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Integer.MAX_VALUE); assertEquals(ans.flag[1], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2, true); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Integer.MAX_VALUE); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = (IntInterval) new IntFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Integer.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Integer.MAX_VALUE); assertEquals(ans.flag[1], true); - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.IntInterval; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.IntFilterVerifier; + +/** + * + * @author CGF + * + */ +public class FilterVerifierIntTest { + + private static String deltaObjectUID = "d"; + private static String measurementUID = "s"; + + @Test + public void eqTest() { + Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45); + IntInterval x = (IntInterval) new IntFilterVerifier().getInterval(eq); + assertEquals(x.count, 2); + assertEquals(x.v[0], 45); + assertEquals(x.v[1], 45); + } + + @Test + public void ltEqTest() { + LtEq ltEq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + IntInterval x = (IntInterval) new IntFilterVerifier().getInterval(ltEq); + assertEquals(x.count, 2); + assertEquals(x.v[0], Integer.MIN_VALUE); + assertEquals(x.v[1], 45); + } + + @Test + public void andOrTest() { + // [470,1200) & (500,800]|[1000,2000) ans:(500,800], [1000,1200) + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + Or or1 = (Or) FilterFactory.or(and2, and3); + + And andCombine1 = (And) FilterFactory.and(and1, or1); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(andCombine1); + System.out.println(ans); + // ans.output(); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], 500); + assertEquals(ans.flag[0], false); + assertEquals(ans.v[1], 800); + assertEquals(ans.flag[1], true); + assertEquals(ans.v[2], 1000); + assertEquals(ans.flag[2], true); + assertEquals(ans.v[3], 1200); + assertEquals(ans.flag[3], false); + + // for filter test coverage + // [400, 500) (600, 800] + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400, true); + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); + And and4 = (And) FilterFactory.and(gtEq4, ltEq4); + + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and5 = (And) FilterFactory.and(gtEq5, ltEq5); + + And andNew = (And) FilterFactory.and(and4, and5); + IntInterval ansNew = (IntInterval) new IntFilterVerifier().getInterval(andNew); + + assertEquals(ansNew.count, 0); + + // for filter test coverage2 + // [600, 800] [400, 500] + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); + LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and6 = (And) FilterFactory.and(gtEq6, ltEq6); + + GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400, false); + LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); + And and7 = (And) FilterFactory.and(gtEq7, ltEq8); + + And andCombine3 = (And) FilterFactory.and(and6, and7); + + IntInterval intervalAns = (IntInterval) new IntFilterVerifier().getInterval(andCombine3); + + assertEquals(intervalAns.count, 0); + } + + @Test + public void notEqTest() { + NotEq notEq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(notEq); + + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 1000); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 1000); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Integer.MAX_VALUE); + assertEquals(ans.flag[3], true); + } + + @Test + public void orTest() { + // [470,1200) | (500,800] | [1000,2000) | [100,200] ===> ans:[100,200], [470,2000) + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or o1 = (Or) FilterFactory.or(and2, and3); + Or o2 = (Or) FilterFactory.or(o1, and4); + + Or or = (Or) FilterFactory.or(and1, o2); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(o2); + System.out.println(ans); + + SingleValueVisitor vistor = new SingleValueVisitor<>(or); + assertTrue(vistor.verify(500)); + assertTrue(vistor.verify(600)); + assertTrue(vistor.verify(1199)); + assertTrue(vistor.verify(1999)); + assertFalse(vistor.verify(5)); + assertFalse(vistor.verify(2000)); + assertFalse(vistor.verify(469)); + assertFalse(vistor.verify(99)); + assertTrue(vistor.verify(100)); + assertTrue(vistor.verify(200)); + assertFalse(vistor.verify(201)); + + } + + @Test + public void additionTest() { + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 4000, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 4500, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + And and = (And) FilterFactory.and(and1, and2); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(and); + assertEquals(ans.count, 0); + } + + @Test + public void notOperatorTest() { + + // [30,600) | (500,800] | [1000,2000) | [100,200] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 30, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); + // GtEq gtEq_112 = FilterApi.gtEq(FilterApi.intFilterSeries(deltaObjectUID, + // measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); + // LtEq ltEq_112 = FilterApi.ltEq(FilterApi.intFilterSeries(deltaObjectUID, + // measurementUID, FilterSeriesType.VALUE_FILTER), 30, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + // Or or1 = (Or) FilterApi.or(gtEq_112, ltEq_112); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + // GtEq gtEq_122 = FilterApi.gtEq(FilterApi.intFilterSeries(deltaObjectUID, + // measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + // LtEq ltEq_122 = FilterApi.ltEq(FilterApi.intFilterSeries(deltaObjectUID, + // measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + // Or or2 = (Or) FilterApi.or(gtEq_122, ltEq_122); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000, true); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + + Or or_12 = (Or) FilterFactory.or(and1, and2); + Or or_34 = (Or) FilterFactory.or(and3, and4); + Or or = (Or) FilterFactory.or(or_12, or_34); + Not notAll = (Not) FilterFactory.not(or); + // IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(notAll); + + SingleValueVisitor vistor = new SingleValueVisitor<>(notAll); + assertFalse(vistor.verify(500)); + assertFalse(vistor.verify(600)); + assertFalse(vistor.verify(1199)); + assertFalse(vistor.verify(1999)); + assertTrue(vistor.verify(5)); + assertTrue(vistor.verify(2000)); + assertFalse(vistor.verify(469)); + assertFalse(vistor.verify(99)); + assertFalse(vistor.verify(100)); + assertFalse(vistor.verify(200)); + assertFalse(vistor.verify(201)); + } + + @Test + public void minMaxValueTest() { + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MAX_VALUE, true); + IntInterval i = (IntInterval) new IntFilterVerifier().getInterval(gtEq1); + assertEquals(i.v[0], Integer.MAX_VALUE); + assertEquals(i.v[1], Integer.MAX_VALUE); + assertEquals(i.flag[0], true); + assertEquals(i.flag[1], true); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MAX_VALUE, false); + IntInterval i2 = (IntInterval) new IntFilterVerifier().getInterval(gtEq2); + assertEquals(i2.v[0], Integer.MAX_VALUE); + assertEquals(i2.v[1], Integer.MAX_VALUE); + assertEquals(i2.flag[0], false); + assertEquals(i2.flag[1], false); + + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MIN_VALUE, true); + IntInterval i3 = (IntInterval) new IntFilterVerifier().getInterval(ltEq1); + assertEquals(i3.v[0], Integer.MIN_VALUE); + assertEquals(i3.v[1], Integer.MIN_VALUE); + assertEquals(i3.flag[0], true); + assertEquals(i3.flag[1], true); + + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MIN_VALUE, false); + IntInterval i4 = (IntInterval) new IntFilterVerifier().getInterval(ltEq2); + assertEquals(i4.v[0], Integer.MIN_VALUE); + assertEquals(i4.v[1], Integer.MIN_VALUE); + assertEquals(i4.flag[0], false); + assertEquals(i4.flag[1], false); + + NotEq noteq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MIN_VALUE); + IntInterval i5 = (IntInterval) new IntFilterVerifier().getInterval(noteq); + assertEquals(i5.v[0], Integer.MIN_VALUE); + assertEquals(i5.v[1], Integer.MIN_VALUE); + assertEquals(i5.v[2], Integer.MIN_VALUE); + assertEquals(i5.v[3], Integer.MAX_VALUE); + assertEquals(i5.flag[0], false); + assertEquals(i5.flag[1], false); + assertEquals(i5.flag[2], false); + assertEquals(i5.flag[3], true); + + NotEq noteq2 = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), Integer.MAX_VALUE); + IntInterval i6 = (IntInterval) new IntFilterVerifier().getInterval(noteq2); + assertEquals(i6.v[0], Integer.MIN_VALUE); + assertEquals(i6.v[1], Integer.MAX_VALUE); + assertEquals(i6.v[2], Integer.MAX_VALUE); + assertEquals(i6.v[3], Integer.MAX_VALUE); + assertEquals(i6.flag[0], true); + assertEquals(i6.flag[1], false); + assertEquals(i6.flag[2], false); + assertEquals(i6.flag[3], false); + } + + @Test + public void unionBugTest() { + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 300, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 350, false); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400, true); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + Or o1 = (Or) FilterFactory.or(and2, and3); + + Or or = (Or) FilterFactory.or(and1, o1); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(or); + System.out.println(ans); + } + + @Test + public void unionCoverageTest() { + + // right first cross + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + Or o1 = (Or) FilterFactory.or(and1, and2); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(o1); + assertEquals(ans.v[0], 200); + assertEquals(ans.flag[0], false); + assertEquals(ans.v[1], 800); + assertEquals(ans.flag[1], false); + + // right covers left + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500, true); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1600, true); + And and4 = (And) FilterFactory.and(gtEq4, ltEq4); + + Or o2 = (Or) FilterFactory.or(and3, and4); + IntInterval ans2 = (IntInterval) new IntFilterVerifier().getInterval(o2); + // System.out.println(ans2); + assertEquals(ans2.v[0], 200); + assertEquals(ans2.flag[0], false); + assertEquals(ans2.v[1], 1600); + assertEquals(ans2.flag[1], true); + + // right first cross (2) + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and5 = (And) FilterFactory.and(gtEq5, ltEq5); + + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, false); + LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and6 = (And) FilterFactory.and(gtEq6, ltEq6); + + Or o3 = (Or) FilterFactory.or(and5, and6); + IntInterval ans3 = (IntInterval) new IntFilterVerifier().getInterval(o3); + // System.out.println(ans3); + assertEquals(ans3.v[0], 200); + assertEquals(ans3.flag[0], false); + assertEquals(ans3.v[1], 800); + assertEquals(ans3.flag[1], true); + + // left first cross (2) + GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200, true); + LtEq ltEq7 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, false); + And and7 = (And) FilterFactory.and(gtEq7, ltEq7); + + GtEq gtEq8 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 700, false); + LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and8 = (And) FilterFactory.and(gtEq8, ltEq8); + + Or o4 = (Or) FilterFactory.or(and7, and8); + IntInterval ans4 = (IntInterval) new IntFilterVerifier().getInterval(o4); + System.out.println(ans4); + assertEquals(ans4.v[0], 200); + assertEquals(ans4.flag[0], true); + assertEquals(ans4.v[1], 800); + assertEquals(ans4.flag[1], true); + + // right first + GtEq gtEq9 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); + LtEq ltEq9 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and9 = (And) FilterFactory.and(gtEq9, ltEq9); + + GtEq gtEq10 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); + LtEq ltEq10 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); + And and10 = (And) FilterFactory.and(gtEq10, ltEq10); + + Or o5 = (Or) FilterFactory.or(and9, and10); + IntInterval ans5 = (IntInterval) new IntFilterVerifier().getInterval(o5); + System.out.println(ans4); + assertEquals(ans5.v[0], 600); + assertEquals(ans5.flag[0], true); + assertEquals(ans5.v[1], 800); + assertEquals(ans5.flag[1], true); + + // left first cross (1) + GtEq gtEq11 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, true); + LtEq ltEq11 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 700, true); + And and11 = (And) FilterFactory.and(gtEq11, ltEq11); + + GtEq gtEq12 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600, false); + LtEq ltEq12 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800, true); + And and12 = (And) FilterFactory.and(gtEq12, ltEq12); + + Or o6 = (Or) FilterFactory.or(and11, and12); + IntInterval ans6 = (IntInterval) new IntFilterVerifier().getInterval(o6); + System.out.println(ans4); + assertEquals(ans6.v[0], 600); + assertEquals(ans6.flag[0], true); + assertEquals(ans6.v[1], 800); + assertEquals(ans6.flag[1], true); + } + + @Test + public void andOrBorderTest() { + + // And Operator + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + IntInterval ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, true); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + // Or Operator + gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Integer.MAX_VALUE); + assertEquals(ans.flag[3], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Integer.MAX_VALUE); + assertEquals(ans.flag[3], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Integer.MAX_VALUE); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Integer.MAX_VALUE); + assertEquals(ans.flag[1], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2, true); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Integer.MAX_VALUE); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = (IntInterval) new IntFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Integer.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Integer.MAX_VALUE); + assertEquals(ans.flag[1], true); + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierLongTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierLongTest.java index beee84df..d6264795 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierLongTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/FilterVerifierLongTest.java @@ -1,243 +1,294 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.utils.LongInterval; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.LongFilterVerifier; - -/** - * - * @author CGF - * - */ -public class FilterVerifierLongTest { - - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - @Test - public void eqTest() { - Eq eq = FilterFactory.eq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45L); - LongInterval x = new LongFilterVerifier().getInterval(eq); - assertEquals(x.count, 2); - assertEquals(x.v[0], 45L); - assertEquals(x.v[1], 45L); - } - - @Test - public void ltEqTest() { - LtEq ltEq = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45L, true); - LongInterval x= new LongFilterVerifier().getInterval(ltEq); - assertEquals(x.count, 2); - assertEquals(x.v[0], Long.MIN_VALUE); - assertEquals(x.v[1], 45L); - } - - @Test - public void andOrTest() { - // [470,1200) & (500,800]|[1000,2000) - - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470L, true); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200L, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - - GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); - LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); - And and2 = (And) FilterFactory.and(gtEq2, ltEq2); - - GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000L, true); - LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000L, false); - And and3 = (And) FilterFactory.and(gtEq3, ltEq3); - Or or1 = (Or) FilterFactory.or(and2, and3); - - And andCombine1 = (And) FilterFactory.and(and1, or1); - LongInterval ans = new LongFilterVerifier().getInterval(andCombine1); - // ans.output(); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], 500L); - assertEquals(ans.flag[0], false); - assertEquals(ans.v[1], 800L); - assertEquals(ans.flag[1], true); - assertEquals(ans.v[2], 1000L); - assertEquals(ans.flag[2], true); - assertEquals(ans.v[3], 1200L); - assertEquals(ans.flag[3], false); - - // for filter test coverage - // [400, 500) (600, 800] - GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400L, true); - LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); - And and4 = (And) FilterFactory.and(gtEq4, ltEq4); - - GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600L, false); - LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); - And and5 = (And) FilterFactory.and(gtEq5, ltEq5); - - And andNew = (And) FilterFactory.and(and4, and5); - LongInterval ansNew = new LongFilterVerifier().getInterval(andNew); - assertEquals(ansNew.count, 0); - - // for filter test coverage2 - // [600, 800] [400, 500] - GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 600L, true); - LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800L, false); - And and6 = (And) FilterFactory.and(gtEq6, ltEq6); - - GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 400L, false); - LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500L, true); - And and7 = (And) FilterFactory.and(gtEq7, ltEq8); - - And andCombine3 = (And) FilterFactory.and(and6, and7); - LongInterval intervalAns = new LongFilterVerifier().getInterval(andCombine3); - assertEquals(intervalAns.count, 0); - } - - @Test - public void andOrBorderTest() { - - // And Operator - GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - And and1 = (And) FilterFactory.and(gtEq1, ltEq1); - LongInterval ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, true); - and1 = (And) FilterFactory.and(gtEq1, ltEq1); - ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - and1 = (And) FilterFactory.and(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(and1); - assertEquals(ans.count, 0); - - // Or Operator - gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Long.MAX_VALUE); assertEquals(ans.flag[3], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 2L); assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 2L); assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Long.MAX_VALUE); assertEquals(ans.flag[3], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, true); - ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Long.MAX_VALUE); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Long.MAX_VALUE); assertEquals(ans.flag[1], true); - - gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); - ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2L, true); - or1 = (Or) FilterFactory.or(gtEq1, ltEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Long.MAX_VALUE); assertEquals(ans.flag[1], true); - or1 = (Or) FilterFactory.or(ltEq1, gtEq1); - ans = new LongFilterVerifier().getInterval(or1); - assertEquals(ans.count, 2); - assertEquals(ans.v[0], Long.MIN_VALUE); assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], Long.MAX_VALUE); assertEquals(ans.flag[1], true); - } - - @Test - public void notEqTest() { - NotEq notEq = FilterFactory.noteq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000L); - LongInterval ans = new LongFilterVerifier().getInterval(notEq); - - assertEquals(ans.count, 4); - assertEquals(ans.v[0], Long.MIN_VALUE); - assertEquals(ans.flag[0], true); - assertEquals(ans.v[1], 1000L); - assertEquals(ans.flag[1], false); - assertEquals(ans.v[2], 1000L); - assertEquals(ans.flag[2], false); - assertEquals(ans.v[3], Long.MAX_VALUE); - assertEquals(ans.flag[3], true); - } - - @Test - public void orTest() { - // [470,1200) | (500,800] | [1000,2000) | [100,200] - - GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 470L, true); - LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1200L, false); - And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); - - GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); - LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); - And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); - - GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 1000L, true); - LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 2000L, false); - And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); - - GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 100L, true); - LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 200L, true); - And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); - - Or o1 = (Or) FilterFactory.or(and2, and3); - Or o2 = (Or) FilterFactory.or(o1, and4); - - Or or = (Or) FilterFactory.or(and1, o2); - // LongInterval ans = (LongInterval) new LongFilterVerifier().getInterval(or); - // System.out.println(ans); - - // answer may have overlap, but is right - SingleValueVisitor vistor = new SingleValueVisitor<>(or); - assertTrue(vistor.verify(500L)); - assertTrue(vistor.verify(600L)); - assertTrue(vistor.verify(1199L)); - assertTrue(vistor.verify(1999L)); - assertFalse(vistor.verify(5L)); - assertFalse(vistor.verify(2000L)); - assertFalse(vistor.verify(469L)); - assertFalse(vistor.verify(99L)); - assertTrue(vistor.verify(100L)); - assertTrue(vistor.verify(200L)); - assertFalse(vistor.verify(201L)); - - } - -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.utils.LongInterval; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.LongFilterVerifier; + +/** + * + * @author CGF + * + */ +public class FilterVerifierLongTest { + + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + @Test + public void eqTest() { + Eq eq = FilterFactory.eq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45L); + LongInterval x = new LongFilterVerifier().getInterval(eq); + assertEquals(x.count, 2); + assertEquals(x.v[0], 45L); + assertEquals(x.v[1], 45L); + } + + @Test + public void ltEqTest() { + LtEq ltEq = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45L, true); + LongInterval x = new LongFilterVerifier().getInterval(ltEq); + assertEquals(x.count, 2); + assertEquals(x.v[0], Long.MIN_VALUE); + assertEquals(x.v[1], 45L); + } + + @Test + public void andOrTest() { + // [470,1200) & (500,800]|[1000,2000) + + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470L, true); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200L, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + + GtEq gtEq2 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); + LtEq ltEq2 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); + And and2 = (And) FilterFactory.and(gtEq2, ltEq2); + + GtEq gtEq3 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000L, true); + LtEq ltEq3 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000L, false); + And and3 = (And) FilterFactory.and(gtEq3, ltEq3); + Or or1 = (Or) FilterFactory.or(and2, and3); + + And andCombine1 = (And) FilterFactory.and(and1, or1); + LongInterval ans = new LongFilterVerifier().getInterval(andCombine1); + // ans.output(); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], 500L); + assertEquals(ans.flag[0], false); + assertEquals(ans.v[1], 800L); + assertEquals(ans.flag[1], true); + assertEquals(ans.v[2], 1000L); + assertEquals(ans.flag[2], true); + assertEquals(ans.v[3], 1200L); + assertEquals(ans.flag[3], false); + + // for filter test coverage + // [400, 500) (600, 800] + GtEq gtEq4 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400L, true); + LtEq ltEq4 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); + And and4 = (And) FilterFactory.and(gtEq4, ltEq4); + + GtEq gtEq5 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600L, false); + LtEq ltEq5 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); + And and5 = (And) FilterFactory.and(gtEq5, ltEq5); + + And andNew = (And) FilterFactory.and(and4, and5); + LongInterval ansNew = new LongFilterVerifier().getInterval(andNew); + assertEquals(ansNew.count, 0); + + // for filter test coverage2 + // [600, 800] [400, 500] + GtEq gtEq6 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 600L, true); + LtEq ltEq6 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800L, false); + And and6 = (And) FilterFactory.and(gtEq6, ltEq6); + + GtEq gtEq7 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 400L, false); + LtEq ltEq8 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500L, true); + And and7 = (And) FilterFactory.and(gtEq7, ltEq8); + + And andCombine3 = (And) FilterFactory.and(and6, and7); + LongInterval intervalAns = new LongFilterVerifier().getInterval(andCombine3); + assertEquals(intervalAns.count, 0); + } + + @Test + public void andOrBorderTest() { + + // And Operator + GtEq gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); + LtEq ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2L, false); + And and1 = (And) FilterFactory.and(gtEq1, ltEq1); + LongInterval ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, true); + and1 = (And) FilterFactory.and(gtEq1, ltEq1); + ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + and1 = (And) FilterFactory.and(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(and1); + assertEquals(ans.count, 0); + + // Or Operator + gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + Or or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Long.MAX_VALUE); + assertEquals(ans.flag[3], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 2L); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 2L); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Long.MAX_VALUE); + assertEquals(ans.flag[3], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, true); + ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Long.MAX_VALUE); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Long.MAX_VALUE); + assertEquals(ans.flag[1], true); + + gtEq1 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, false); + ltEq1 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 2L, true); + or1 = (Or) FilterFactory.or(gtEq1, ltEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Long.MAX_VALUE); + assertEquals(ans.flag[1], true); + or1 = (Or) FilterFactory.or(ltEq1, gtEq1); + ans = new LongFilterVerifier().getInterval(or1); + assertEquals(ans.count, 2); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], Long.MAX_VALUE); + assertEquals(ans.flag[1], true); + } + + @Test + public void notEqTest() { + NotEq notEq = FilterFactory.noteq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000L); + LongInterval ans = new LongFilterVerifier().getInterval(notEq); + + assertEquals(ans.count, 4); + assertEquals(ans.v[0], Long.MIN_VALUE); + assertEquals(ans.flag[0], true); + assertEquals(ans.v[1], 1000L); + assertEquals(ans.flag[1], false); + assertEquals(ans.v[2], 1000L); + assertEquals(ans.flag[2], false); + assertEquals(ans.v[3], Long.MAX_VALUE); + assertEquals(ans.flag[3], true); + } + + @Test + public void orTest() { + // [470,1200) | (500,800] | [1000,2000) | [100,200] + + GtEq gtEq_11 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 470L, true); + LtEq ltEq_11 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1200L, false); + And and1 = (And) FilterFactory.and(gtEq_11, ltEq_11); + + GtEq gtEq_12 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 500L, false); + LtEq ltEq_12 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 800L, true); + And and2 = (And) FilterFactory.and(gtEq_12, ltEq_12); + + GtEq gtEq_13 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 1000L, true); + LtEq ltEq_l3 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 2000L, false); + And and3 = (And) FilterFactory.and(gtEq_13, ltEq_l3); + + GtEq gtEq_14 = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 100L, true); + LtEq ltEq_14 = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 200L, true); + And and4 = (And) FilterFactory.and(gtEq_14, ltEq_14); + + Or o1 = (Or) FilterFactory.or(and2, and3); + Or o2 = (Or) FilterFactory.or(o1, and4); + + Or or = (Or) FilterFactory.or(and1, o2); + // LongInterval ans = (LongInterval) new LongFilterVerifier().getInterval(or); + // System.out.println(ans); + + // answer may have overlap, but is right + SingleValueVisitor vistor = new SingleValueVisitor<>(or); + assertTrue(vistor.verify(500L)); + assertTrue(vistor.verify(600L)); + assertTrue(vistor.verify(1199L)); + assertTrue(vistor.verify(1999L)); + assertFalse(vistor.verify(5L)); + assertFalse(vistor.verify(2000L)); + assertFalse(vistor.verify(469L)); + assertFalse(vistor.verify(99L)); + assertTrue(vistor.verify(100L)); + assertTrue(vistor.verify(200L)); + assertFalse(vistor.verify(201L)); + + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/IntervalTimeVisitorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/IntervalTimeVisitorTest.java index d0422c13..a7327db1 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/IntervalTimeVisitorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/IntervalTimeVisitorTest.java @@ -1,70 +1,68 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.IntervalTimeVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; - -/** - * - * @author CGF - * - */ -public class IntervalTimeVisitorTest { - - private static final IntervalTimeVisitor filter = new IntervalTimeVisitor(); - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - @Test - public void test() { - - Eq eq = FilterFactory.eq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 45L); - assertTrue(filter.satisfy(eq, 10L, 50L)); - - NotEq noteq = FilterFactory.noteq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 45L); - assertTrue(filter.satisfy(noteq, 10L, 30L)); - assertFalse(filter.satisfy(noteq, 45L, 45L)); - assertTrue(filter.satisfy(noteq, 45L, 46L)); - assertTrue(filter.satisfy(noteq, 20L, 46L)); - - LtEq lteq = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 45L, true); - assertTrue(filter.satisfy(lteq, 10L, 50L)); - - GtEq gteq = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 45L, true); - assertTrue(filter.satisfy(gteq, 10L, 50L)); - - LtEq left = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 55L, true); - - - GtEq right = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, - FilterSeriesType.VALUE_FILTER), 35L, true); - - And andLeftRightNotEquals = (And) FilterFactory.and(left, right); - Or or = (Or) FilterFactory.or(left, right); - assertTrue(filter.satisfy(or, 10L, 50L)); - - Not not = (Not) FilterFactory.not(andLeftRightNotEquals); - assertTrue(filter.satisfy(andLeftRightNotEquals, 10L, 50L)); - assertFalse(filter.satisfy(not, 10L, 50L)); - - And andLeftRightEquals = (And) FilterFactory.and(left, left); - assertTrue(filter.satisfy(andLeftRightEquals, 55L, 55L)); - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.IntervalTimeVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; + +/** + * + * @author CGF + * + */ +public class IntervalTimeVisitorTest { + + private static final IntervalTimeVisitor filter = new IntervalTimeVisitor(); + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + @Test + public void test() { + + Eq eq = FilterFactory.eq(FilterFactory.longFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45L); + assertTrue(filter.satisfy(eq, 10L, 50L)); + + NotEq noteq = FilterFactory.noteq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45L); + assertTrue(filter.satisfy(noteq, 10L, 30L)); + assertFalse(filter.satisfy(noteq, 45L, 45L)); + assertTrue(filter.satisfy(noteq, 45L, 46L)); + assertTrue(filter.satisfy(noteq, 20L, 46L)); + + LtEq lteq = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45L, true); + assertTrue(filter.satisfy(lteq, 10L, 50L)); + + GtEq gteq = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45L, true); + assertTrue(filter.satisfy(gteq, 10L, 50L)); + + LtEq left = FilterFactory.ltEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 55L, true); + + + GtEq right = FilterFactory.gtEq(FilterFactory.longFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 35L, true); + + And andLeftRightNotEquals = (And) FilterFactory.and(left, right); + Or or = (Or) FilterFactory.or(left, right); + assertTrue(filter.satisfy(or, 10L, 50L)); + + Not not = (Not) FilterFactory.not(andLeftRightNotEquals); + assertTrue(filter.satisfy(andLeftRightNotEquals, 10L, 50L)); + assertFalse(filter.satisfy(not, 10L, 50L)); + + And andLeftRightEquals = (And) FilterFactory.and(left, left); + assertTrue(filter.satisfy(andLeftRightEquals, 55L, 55L)); + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/InvertExpressionVisitorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/InvertExpressionVisitorTest.java index 36f6089b..4a7a27ab 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/InvertExpressionVisitorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/InvertExpressionVisitorTest.java @@ -1,97 +1,99 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.SingleUnaryExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.InvertExpressionVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * - * @author CGF - * - */ -public class InvertExpressionVisitorTest { - private static final Logger LOG = LoggerFactory.getLogger(InvertExpressionVisitorTest.class); - private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; - private static String measurementINT = FilterTestConstant.measurementINT; - - private static final IntFilterSeries intFilterSeries = - FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, FilterSeriesType.VALUE_FILTER); - - private static final InvertExpressionVisitor invertor = new InvertExpressionVisitor(); - - SingleSeriesFilterExpression ltEq = FilterFactory.ltEq(intFilterSeries, 60, true); - SingleSeriesFilterExpression gEq = FilterFactory.gtEq(intFilterSeries, 30, false); - SingleSeriesFilterExpression eq = FilterFactory.eq(intFilterSeries, 60); - - - @Test - public void testInvertUnaryOperator() { - - // ltEq(60, true) -> gEq(60, false); - FilterExpression notLtEq = invertor.invert(ltEq); - assertEquals(notLtEq.toString(), - "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) > 60"); - - - // gEq(30, false) -> ltEq(30, true); - FilterExpression notGEq = invertor.invert(gEq); - // LOG.info(notGEq.toString()); - assertEquals(notGEq.toString(), - "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) <= 30"); - - - // Eq(60) -> notEq(60); - FilterExpression noteq = invertor.invert(eq); - // LOG.info(noteq.toString()); - assertEquals(noteq.toString(), - "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) != 60"); - - } - - @Test - public void testInvertBinaryOperator() { - - // AND(ltEq(60, true), gEq(30, false)) -> OR[gtEq(60, false), ltEq(30, true)] - FilterExpression and = FilterFactory.and(ltEq, gEq); - FilterExpression andInvert = invertor.invert(and); - assertEquals(andInvert.toString(), - "OR: ( FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) > 60," - + "FilterSeries (" + deltaObjectINT + "," + measurementINT +",INT32,VALUE_FILTER) <= 30 )"); - - - // OR(eq(60), not(ltEq(60,true)) -> AND(notEq(60), ltEq(60,true)); - FilterExpression or = FilterFactory.or(eq, FilterFactory.not(ltEq)); - FilterExpression orInvert = invertor.invert(or); - assertEquals(orInvert.toString(), - "AND: ( FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) != 60," - + "FilterSeries (" + deltaObjectINT +"," + measurementINT + ",INT32,VALUE_FILTER) <= 60 )"); - - - // { 20, [300,500), (500, 6000) } -> - SingleSeriesFilterExpression eqQ = FilterFactory.eq(intFilterSeries, 20); - SingleSeriesFilterExpression ltEqQ = FilterFactory.ltEq(intFilterSeries, 6000, false); - SingleSeriesFilterExpression gtEqQ = FilterFactory.gtEq(intFilterSeries, 300, true); - SingleSeriesFilterExpression notEqQ = FilterFactory.noteq(intFilterSeries, 500); - FilterExpression complex = FilterFactory.and(eqQ, FilterFactory.and(ltEqQ, FilterFactory.and(gtEqQ, notEqQ))); - FilterExpression complexInvert = invertor.invert(complex); - LOG.info(complexInvert.toString()); - assertTrue(complexInvert instanceof Or); - assertTrue(((Or)complexInvert).getRight() instanceof Or); - SingleUnaryExpression e = (SingleUnaryExpression) ((Or)complexInvert).getLeft(); - assertTrue((Integer)e.getValue() == 20); - - } - - -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.IntFilterSeries; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.SingleUnaryExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.InvertExpressionVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * + * @author CGF + * + */ +public class InvertExpressionVisitorTest { + private static final Logger LOG = LoggerFactory.getLogger(InvertExpressionVisitorTest.class); + private static String deltaObjectINT = FilterTestConstant.deltaObjectINT; + private static String measurementINT = FilterTestConstant.measurementINT; + + private static final IntFilterSeries intFilterSeries = + FilterFactory.intFilterSeries(deltaObjectINT, measurementINT, FilterSeriesType.VALUE_FILTER); + + private static final InvertExpressionVisitor invertor = new InvertExpressionVisitor(); + + SingleSeriesFilterExpression ltEq = FilterFactory.ltEq(intFilterSeries, 60, true); + SingleSeriesFilterExpression gEq = FilterFactory.gtEq(intFilterSeries, 30, false); + SingleSeriesFilterExpression eq = FilterFactory.eq(intFilterSeries, 60); + + + @Test + public void testInvertUnaryOperator() { + + // ltEq(60, true) -> gEq(60, false); + FilterExpression notLtEq = invertor.invert(ltEq); + assertEquals(notLtEq.toString(), + "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) > 60"); + + + // gEq(30, false) -> ltEq(30, true); + FilterExpression notGEq = invertor.invert(gEq); + // LOG.info(notGEq.toString()); + assertEquals(notGEq.toString(), + "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) <= 30"); + + + // Eq(60) -> notEq(60); + FilterExpression noteq = invertor.invert(eq); + // LOG.info(noteq.toString()); + assertEquals(noteq.toString(), + "FilterSeries (" + deltaObjectINT + "," + measurementINT + ",INT32,VALUE_FILTER) != 60"); + + } + + @Test + public void testInvertBinaryOperator() { + + // AND(ltEq(60, true), gEq(30, false)) -> OR[gtEq(60, false), ltEq(30, true)] + FilterExpression and = FilterFactory.and(ltEq, gEq); + FilterExpression andInvert = invertor.invert(and); + assertEquals(andInvert.toString(), + "OR: ( FilterSeries (" + deltaObjectINT + "," + measurementINT + + ",INT32,VALUE_FILTER) > 60," + "FilterSeries (" + deltaObjectINT + "," + + measurementINT + ",INT32,VALUE_FILTER) <= 30 )"); + + + // OR(eq(60), not(ltEq(60,true)) -> AND(notEq(60), ltEq(60,true)); + FilterExpression or = FilterFactory.or(eq, FilterFactory.not(ltEq)); + FilterExpression orInvert = invertor.invert(or); + assertEquals(orInvert.toString(), + "AND: ( FilterSeries (" + deltaObjectINT + "," + measurementINT + + ",INT32,VALUE_FILTER) != 60," + "FilterSeries (" + deltaObjectINT + "," + + measurementINT + ",INT32,VALUE_FILTER) <= 60 )"); + + + // { 20, [300,500), (500, 6000) } -> + SingleSeriesFilterExpression eqQ = FilterFactory.eq(intFilterSeries, 20); + SingleSeriesFilterExpression ltEqQ = FilterFactory.ltEq(intFilterSeries, 6000, false); + SingleSeriesFilterExpression gtEqQ = FilterFactory.gtEq(intFilterSeries, 300, true); + SingleSeriesFilterExpression notEqQ = FilterFactory.noteq(intFilterSeries, 500); + FilterExpression complex = + FilterFactory.and(eqQ, FilterFactory.and(ltEqQ, FilterFactory.and(gtEqQ, notEqQ))); + FilterExpression complexInvert = invertor.invert(complex); + LOG.info(complexInvert.toString()); + assertTrue(complexInvert instanceof Or); + assertTrue(((Or) complexInvert).getRight() instanceof Or); + SingleUnaryExpression e = (SingleUnaryExpression) ((Or) complexInvert).getLeft(); + assertTrue((Integer) e.getValue() == 20); + + } + + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/NoFilterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/NoFilterTest.java index c220d6a2..ca9734ae 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/NoFilterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/NoFilterTest.java @@ -5,8 +5,8 @@ public class NoFilterTest { - @Test - public void noFilterTest() { + @Test + public void noFilterTest() { - } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/SingleValueVisitorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/SingleValueVisitorTest.java index 6a068be6..b5d83623 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/SingleValueVisitorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filter/SingleValueVisitorTest.java @@ -1,126 +1,138 @@ -package cn.edu.tsinghua.tsfile.timeseries.filter; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; -import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitorFactory; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; -import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; - -/** - * - * @author CGF - * - */ -public class SingleValueVisitorTest { - - private static final SingleValueVisitor int32Vistor = SingleValueVisitorFactory.getSingleValueVisitor(TSDataType.INT32); - private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; - private static String measurementUID = FilterTestConstant.measurementUID; - - @Test - public void genericErrorTest() { - Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - SingleValueVisitor vistor = SingleValueVisitorFactory.getSingleValueVisitor(TSDataType.INT32); - //System.out.println(vistor.satisfyObject(10.0, eq)); - assertFalse(vistor.satisfyObject(45L, eq)); - } - - @Test - public void egTest() { - Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - assertFalse(int32Vistor.satisfyObject(10, eq)); - assertTrue(int32Vistor.satisfyObject(45, eq)); - } - - @Test - public void noteqTest() { - NotEq noteq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45); - assertTrue(int32Vistor.satisfyObject(10, noteq)); - assertFalse(int32Vistor.satisfyObject(45, noteq)); - } - - @Test - public void ltTest() { - LtEq lteq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - assertTrue(int32Vistor.satisfyObject(45, lteq1)); - assertTrue(int32Vistor.satisfyObject(44, lteq1)); - assertTrue(int32Vistor.satisfyObject(0, lteq1)); - assertFalse(int32Vistor.satisfyObject(46, lteq1)); - assertFalse(int32Vistor.satisfyObject(146, lteq1)); - - LtEq lteq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); - assertTrue(int32Vistor.satisfyObject(44, lteq2)); - assertTrue(int32Vistor.satisfyObject(0, lteq2)); - assertFalse(int32Vistor.satisfyObject(45, lteq2)); - assertFalse(int32Vistor.satisfyObject(46, lteq2)); - assertFalse(int32Vistor.satisfyObject(146, lteq2)); - } - - @Test - public void gtTest() { - GtEq gteq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - assertTrue(int32Vistor.satisfyObject(45, gteq1)); - assertTrue(int32Vistor.satisfyObject(46, gteq1)); - assertTrue(int32Vistor.satisfyObject(146, gteq1)); - assertFalse(int32Vistor.satisfyObject(44, gteq1)); - assertFalse(int32Vistor.satisfyObject(0, gteq1)); - - GtEq gteq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); - assertTrue(int32Vistor.satisfyObject(46, gteq2)); - assertTrue(int32Vistor.satisfyObject(146, gteq2)); - assertFalse(int32Vistor.satisfyObject(44, gteq2)); - assertFalse(int32Vistor.satisfyObject(0, gteq2)); - assertFalse(int32Vistor.satisfyObject(45, gteq2)); - } - - @Test - public void notTest() { - GtEq gteq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); - Not not = (Not) FilterFactory.not(gteq1); // < 45 - assertTrue(int32Vistor.satisfyObject(44, not)); - assertTrue(int32Vistor.satisfyObject(0, not)); - assertFalse(int32Vistor.satisfyObject(45, not)); - assertFalse(int32Vistor.satisfyObject(46, not)); - assertFalse(int32Vistor.satisfyObject(146, not)); - } - - @Test - public void andTest() { - GtEq gteq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 5, true); - LtEq lteq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); - And and = (And) FilterFactory.and(gteq, lteq); - assertTrue(int32Vistor.satisfyObject(5, and)); - assertTrue(int32Vistor.satisfyObject(44, and)); - assertTrue(int32Vistor.satisfyObject(40, and)); - assertFalse(int32Vistor.satisfyObject(45, and)); - assertFalse(int32Vistor.satisfyObject(46, and)); - assertFalse(int32Vistor.satisfyObject(11115, and)); - } - - @Test - public void orTest() { - GtEq gteq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 550, false); - LtEq lteq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, FilterSeriesType.VALUE_FILTER), 52, true); - Or or = (Or) FilterFactory.or(gteq, lteq); - - assertTrue(int32Vistor.satisfyObject(551, or)); - assertTrue(int32Vistor.satisfyObject(5500, or)); - assertTrue(int32Vistor.satisfyObject(52, or)); - assertTrue(int32Vistor.satisfyObject(51, or)); - assertTrue(int32Vistor.satisfyObject(5, or)); - assertFalse(int32Vistor.satisfyObject(550, or)); - assertFalse(int32Vistor.satisfyObject(53, or)); - } -} +package cn.edu.tsinghua.tsfile.timeseries.filter; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.NotEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitor; +import cn.edu.tsinghua.tsfile.timeseries.filter.visitorImpl.SingleValueVisitorFactory; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeriesType; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Eq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Not; +import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.Or; + +/** + * + * @author CGF + * + */ +public class SingleValueVisitorTest { + + private static final SingleValueVisitor int32Vistor = + SingleValueVisitorFactory.getSingleValueVisitor(TSDataType.INT32); + private static String deltaObjectUID = FilterTestConstant.deltaObjectUID; + private static String measurementUID = FilterTestConstant.measurementUID; + + @Test + public void genericErrorTest() { + Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45); + SingleValueVisitor vistor = + SingleValueVisitorFactory.getSingleValueVisitor(TSDataType.INT32); + // System.out.println(vistor.satisfyObject(10.0, eq)); + assertFalse(vistor.satisfyObject(45L, eq)); + } + + @Test + public void egTest() { + Eq eq = FilterFactory.eq(FilterFactory.intFilterSeries(deltaObjectUID, measurementUID, + FilterSeriesType.VALUE_FILTER), 45); + assertFalse(int32Vistor.satisfyObject(10, eq)); + assertTrue(int32Vistor.satisfyObject(45, eq)); + } + + @Test + public void noteqTest() { + NotEq noteq = FilterFactory.noteq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45); + assertTrue(int32Vistor.satisfyObject(10, noteq)); + assertFalse(int32Vistor.satisfyObject(45, noteq)); + } + + @Test + public void ltTest() { + LtEq lteq1 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + assertTrue(int32Vistor.satisfyObject(45, lteq1)); + assertTrue(int32Vistor.satisfyObject(44, lteq1)); + assertTrue(int32Vistor.satisfyObject(0, lteq1)); + assertFalse(int32Vistor.satisfyObject(46, lteq1)); + assertFalse(int32Vistor.satisfyObject(146, lteq1)); + + LtEq lteq2 = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); + assertTrue(int32Vistor.satisfyObject(44, lteq2)); + assertTrue(int32Vistor.satisfyObject(0, lteq2)); + assertFalse(int32Vistor.satisfyObject(45, lteq2)); + assertFalse(int32Vistor.satisfyObject(46, lteq2)); + assertFalse(int32Vistor.satisfyObject(146, lteq2)); + } + + @Test + public void gtTest() { + GtEq gteq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + assertTrue(int32Vistor.satisfyObject(45, gteq1)); + assertTrue(int32Vistor.satisfyObject(46, gteq1)); + assertTrue(int32Vistor.satisfyObject(146, gteq1)); + assertFalse(int32Vistor.satisfyObject(44, gteq1)); + assertFalse(int32Vistor.satisfyObject(0, gteq1)); + + GtEq gteq2 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); + assertTrue(int32Vistor.satisfyObject(46, gteq2)); + assertTrue(int32Vistor.satisfyObject(146, gteq2)); + assertFalse(int32Vistor.satisfyObject(44, gteq2)); + assertFalse(int32Vistor.satisfyObject(0, gteq2)); + assertFalse(int32Vistor.satisfyObject(45, gteq2)); + } + + @Test + public void notTest() { + GtEq gteq1 = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, true); + Not not = (Not) FilterFactory.not(gteq1); // < 45 + assertTrue(int32Vistor.satisfyObject(44, not)); + assertTrue(int32Vistor.satisfyObject(0, not)); + assertFalse(int32Vistor.satisfyObject(45, not)); + assertFalse(int32Vistor.satisfyObject(46, not)); + assertFalse(int32Vistor.satisfyObject(146, not)); + } + + @Test + public void andTest() { + GtEq gteq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 5, true); + LtEq lteq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 45, false); + And and = (And) FilterFactory.and(gteq, lteq); + assertTrue(int32Vistor.satisfyObject(5, and)); + assertTrue(int32Vistor.satisfyObject(44, and)); + assertTrue(int32Vistor.satisfyObject(40, and)); + assertFalse(int32Vistor.satisfyObject(45, and)); + assertFalse(int32Vistor.satisfyObject(46, and)); + assertFalse(int32Vistor.satisfyObject(11115, and)); + } + + @Test + public void orTest() { + GtEq gteq = FilterFactory.gtEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 550, false); + LtEq lteq = FilterFactory.ltEq(FilterFactory.intFilterSeries(deltaObjectUID, + measurementUID, FilterSeriesType.VALUE_FILTER), 52, true); + Or or = (Or) FilterFactory.or(gteq, lteq); + + assertTrue(int32Vistor.satisfyObject(551, or)); + assertTrue(int32Vistor.satisfyObject(5500, or)); + assertTrue(int32Vistor.satisfyObject(52, or)); + assertTrue(int32Vistor.satisfyObject(51, or)); + assertTrue(int32Vistor.satisfyObject(5, or)); + assertFalse(int32Vistor.satisfyObject(550, or)); + assertFalse(int32Vistor.satisfyObject(53, or)); + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/OperatorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/OperatorTest.java index f3e89bb1..d500728a 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/OperatorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/OperatorTest.java @@ -13,209 +13,220 @@ * Created by zhangjinrui on 2017/12/18. */ public class OperatorTest { - private static final long EFFICIENCY_TEST_COUNT = 10000000; - private static final long TESTED_TIMESTAMP = 1513585371L; - private TimeValuePairFilterVisitorImpl timeValuePairFilterVisitor = new TimeValuePairFilterVisitorImpl(); - - @Test - public void testEq() { - Filter timeEq = TimeFilter.eq(100L); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(100)), timeEq)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101, new TsPrimitiveType.TsInt(100)), timeEq)); - - Filter filter2 = FilterFactory.and(TimeFilter.eq(100L), ValueFilter.eq(50)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(50)), filter2)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(51)), filter2)); - - Filter filter3 = ValueFilter.eq(true); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsBoolean(true)), filter3)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsBoolean(false)), filter3)); - } - - @Test - public void testGt() { - Filter timeGt = TimeFilter.gt(TESTED_TIMESTAMP); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeGt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeGt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeGt)); - - Filter valueGt = ValueFilter.gt(0.01f); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(0.02f)), valueGt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(0.01f)), valueGt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(-0.01f)), valueGt)); - - Filter binaryFilter = ValueFilter.gt(new Binary("test1")); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsBinary(new Binary("test2"))), binaryFilter)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsBinary(new Binary("test0"))), binaryFilter)); - } - - @Test - public void testGtEq() { - Filter timeGtEq = TimeFilter.gtEq(TESTED_TIMESTAMP); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeGtEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeGtEq)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeGtEq)); - - Filter valueGtEq = ValueFilter.gtEq(0.01); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(0.02)), valueGtEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(0.01)), valueGtEq)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(-0.01)), valueGtEq)); - } - - @Test - public void testLt() { - Filter timeLt = TimeFilter.lt(TESTED_TIMESTAMP); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLt)); - - Filter valueLt = ValueFilter.lt(100L); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLt)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLt)); - } - - @Test - public void testLtEq() { - Filter timeLtEq = TimeFilter.ltEq(TESTED_TIMESTAMP); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLtEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLtEq)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLtEq)); - - Filter valueLtEq = ValueFilter.ltEq(100L); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLtEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLtEq)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLtEq)); - } - - @Test - public void testNoRestriction() { - Filter timeNoRestriction = TimeFilter.noRestriction(); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeNoRestriction)); + private static final long EFFICIENCY_TEST_COUNT = 10000000; + private static final long TESTED_TIMESTAMP = 1513585371L; + private TimeValuePairFilterVisitorImpl timeValuePairFilterVisitor = + new TimeValuePairFilterVisitorImpl(); + + @Test + public void testEq() { + Filter timeEq = TimeFilter.eq(100L); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(100)), timeEq)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101, new TsPrimitiveType.TsInt(100)), timeEq)); + + Filter filter2 = FilterFactory.and(TimeFilter.eq(100L), ValueFilter.eq(50)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(50)), filter2)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(51)), filter2)); + + Filter filter3 = ValueFilter.eq(true); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsBoolean(true)), filter3)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsBoolean(false)), filter3)); + } + + @Test + public void testGt() { + Filter timeGt = TimeFilter.gt(TESTED_TIMESTAMP); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeGt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeGt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeGt)); + + Filter valueGt = ValueFilter.gt(0.01f); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(0.02f)), valueGt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(0.01f)), valueGt)); + Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsFloat(-0.01f)), valueGt)); + + Filter binaryFilter = ValueFilter.gt(new Binary("test1")); + Assert.assertEquals(true, + timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsBinary(new Binary("test2"))), + binaryFilter)); + Assert.assertEquals(false, + timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsBinary(new Binary("test0"))), + binaryFilter)); + } + + @Test + public void testGtEq() { + Filter timeGtEq = TimeFilter.gtEq(TESTED_TIMESTAMP); + Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeGtEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeGtEq)); + Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeGtEq)); + + Filter valueGtEq = ValueFilter.gtEq(0.01); + Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(0.02)), valueGtEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(0.01)), valueGtEq)); + Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsDouble(-0.01)), valueGtEq)); + } + + @Test + public void testLt() { + Filter timeLt = TimeFilter.lt(TESTED_TIMESTAMP); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLt)); + + Filter valueLt = ValueFilter.lt(100L); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLt)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLt)); + } + + @Test + public void testLtEq() { + Filter timeLtEq = TimeFilter.ltEq(TESTED_TIMESTAMP); + Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLtEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLtEq)); + Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLtEq)); + + Filter valueLtEq = ValueFilter.ltEq(100L); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLtEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLtEq)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLtEq)); + } + + @Test + public void testNoRestriction() { + Filter timeNoRestriction = TimeFilter.noRestriction(); + Assert.assertEquals(true, + timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), + timeNoRestriction)); + + Filter valueNoRestriction = ValueFilter.noRestriction(); + Assert.assertEquals(true, + timeValuePairFilterVisitor.satisfy( + new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsLong(100L)), + valueNoRestriction)); + } + + @Test + public void testNot() { + Filter timeLt = TimeFilter.not(TimeFilter.lt(TESTED_TIMESTAMP)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLt)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLt)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLt)); + + Filter valueLt = ValueFilter.not(ValueFilter.lt(100L)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLt)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLt)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLt)); + } + + @Test + public void testNotEq() { + Filter timeNotEq = TimeFilter.notEq(100L); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(100)), timeNotEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101, new TsPrimitiveType.TsInt(100)), timeNotEq)); + + Filter valueNotEq = ValueFilter.notEq(50); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(50)), valueNotEq)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(100, new TsPrimitiveType.TsInt(51)), valueNotEq)); + } + + @Test + public void testAndOr() { + Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(50.9)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), andFilter)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), andFilter)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(99L, new TsPrimitiveType.TsDouble(50)), andFilter)); + + Filter orFilter = FilterFactory.or(andFilter, TimeFilter.eq(1000L)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), orFilter)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), orFilter)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(1000L, new TsPrimitiveType.TsDouble(50)), orFilter)); + + Filter andFilter2 = FilterFactory.and(orFilter, ValueFilter.notEq(50.0)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), andFilter2)); + Assert.assertEquals(false, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), andFilter2)); + Assert.assertEquals(true, timeValuePairFilterVisitor + .satisfy(new TimeValuePair(1000L, new TsPrimitiveType.TsDouble(51)), andFilter2)); + } + + @Test + public void testWrongUsage() { + Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(true)); + TimeValuePair timeValuePair = new TimeValuePair(101L, new TsPrimitiveType.TsLong(50)); + try { + timeValuePairFilterVisitor.satisfy(timeValuePair, andFilter); + Assert.fail(); + } catch (ClassCastException e) { - Filter valueNoRestriction = ValueFilter.noRestriction(); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsLong(100L)), valueNoRestriction)); } - - @Test - public void testNot() { - Filter timeLt = TimeFilter.not(TimeFilter.lt(TESTED_TIMESTAMP)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP - 1, new TsPrimitiveType.TsInt(100)), timeLt)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsInt(100)), timeLt)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP + 1, new TsPrimitiveType.TsInt(100)), timeLt)); - - Filter valueLt = ValueFilter.not(ValueFilter.lt(100L)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(99L)), valueLt)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(100L)), valueLt)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(TESTED_TIMESTAMP, new TsPrimitiveType.TsLong(101L)), valueLt)); - } - - @Test - public void testNotEq() { - Filter timeNotEq = TimeFilter.notEq(100L); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(100)), timeNotEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101, new TsPrimitiveType.TsInt(100)), timeNotEq)); - - Filter valueNotEq = ValueFilter.notEq(50); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(50)), valueNotEq)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(100, new TsPrimitiveType.TsInt(51)), valueNotEq)); - } - - @Test - public void testAndOr() { - Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(50.9)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), andFilter)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), andFilter)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(99L, new TsPrimitiveType.TsDouble(50)), andFilter)); - - Filter orFilter = FilterFactory.or(andFilter, TimeFilter.eq(1000L)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), orFilter)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), orFilter)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(1000L, new TsPrimitiveType.TsDouble(50)), orFilter)); - - Filter andFilter2 = FilterFactory.and(orFilter, ValueFilter.notEq(50.0)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(50)), andFilter2)); - Assert.assertEquals(false, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(101L, new TsPrimitiveType.TsDouble(60)), andFilter2)); - Assert.assertEquals(true, timeValuePairFilterVisitor.satisfy( - new TimeValuePair(1000L, new TsPrimitiveType.TsDouble(51)), andFilter2)); - } - - @Test - public void testWrongUsage() { - Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(true)); - TimeValuePair timeValuePair = new TimeValuePair(101L, new TsPrimitiveType.TsLong(50)); - try { - timeValuePairFilterVisitor.satisfy(timeValuePair, andFilter); - Assert.fail(); - }catch (ClassCastException e){ - - } - } - - @Test - public void efficiencyTest() { - Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(50.9)); - Filter orFilter = FilterFactory.or(andFilter, TimeFilter.eq(1000L)); - - long startTime = System.currentTimeMillis(); - for (long i = 0; i < EFFICIENCY_TEST_COUNT; i++) { - TimeValuePair tvPair = new TimeValuePair(Long.valueOf(i), new TsPrimitiveType.TsDouble(i + 0.1)); - timeValuePairFilterVisitor.satisfy(tvPair, orFilter); - } - long endTime = System.currentTimeMillis(); - System.out.println("EfficiencyTest for Filter: \n\tFilter Expression = " + orFilter + "\n\tCOUNT = " + EFFICIENCY_TEST_COUNT + - "\n\tTotal Time = " + (endTime - startTime) + "ms."); + } + + @Test + public void efficiencyTest() { + Filter andFilter = FilterFactory.and(TimeFilter.gt(100L), ValueFilter.lt(50.9)); + Filter orFilter = FilterFactory.or(andFilter, TimeFilter.eq(1000L)); + + long startTime = System.currentTimeMillis(); + for (long i = 0; i < EFFICIENCY_TEST_COUNT; i++) { + TimeValuePair tvPair = + new TimeValuePair(Long.valueOf(i), new TsPrimitiveType.TsDouble(i + 0.1)); + timeValuePairFilterVisitor.satisfy(tvPair, orFilter); } + long endTime = System.currentTimeMillis(); + System.out + .println("EfficiencyTest for Filter: \n\tFilter Expression = " + orFilter + "\n\tCOUNT = " + + EFFICIENCY_TEST_COUNT + "\n\tTotal Time = " + (endTime - startTime) + "ms."); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/QueryFilterOptimizerTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/QueryFilterOptimizerTest.java index 51c461d8..41fb56e3 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/QueryFilterOptimizerTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/filterV2/QueryFilterOptimizerTest.java @@ -14,7 +14,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.util.ArrayList; import java.util.List; @@ -23,165 +22,179 @@ */ public class QueryFilterOptimizerTest { - private QueryFilterOptimizer queryFilterOptimizer = QueryFilterOptimizer.getInstance(); - private List selectedSeries; - - @Before - public void before() { - selectedSeries = new ArrayList<>(); - selectedSeries.add(new Path("d1.s1")); - selectedSeries.add(new Path("d2.s1")); - selectedSeries.add(new Path("d1.s2")); - selectedSeries.add(new Path("d1.s2")); - } - - @After - public void after() { - selectedSeries.clear(); + private QueryFilterOptimizer queryFilterOptimizer = QueryFilterOptimizer.getInstance(); + private List selectedSeries; + + @Before + public void before() { + selectedSeries = new ArrayList<>(); + selectedSeries.add(new Path("d1.s1")); + selectedSeries.add(new Path("d2.s1")); + selectedSeries.add(new Path("d1.s2")); + selectedSeries.add(new Path("d1.s2")); + } + + @After + public void after() { + selectedSeries.clear(); + } + + @Test + public void testTimeOnly() { + try { + Filter timeFilter = TimeFilter.lt(100L); + QueryFilter queryFilter = new GlobalTimeFilter(timeFilter); + System.out.println(queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries)); + + QueryFilter queryFilter2 = + QueryFilterFactory.or( + QueryFilterFactory.and(new GlobalTimeFilter(TimeFilter.lt(50L)), + new GlobalTimeFilter(TimeFilter.gt(10L))), + new GlobalTimeFilter(TimeFilter.gt(200L))); + QueryFilterPrinter + .print(queryFilterOptimizer.convertGlobalTimeFilter(queryFilter2, selectedSeries)); + + } catch (QueryFilterOptimizationException e) { + e.printStackTrace(); } - @Test - public void testTimeOnly() { - try { - Filter timeFilter = TimeFilter.lt(100L); - QueryFilter queryFilter = new GlobalTimeFilter(timeFilter); - System.out.println(queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries)); - - QueryFilter queryFilter2 = QueryFilterFactory.or( - QueryFilterFactory.and(new GlobalTimeFilter(TimeFilter.lt(50L)), new GlobalTimeFilter(TimeFilter.gt(10L))), - new GlobalTimeFilter(TimeFilter.gt(200L))); - QueryFilterPrinter.print(queryFilterOptimizer.convertGlobalTimeFilter(queryFilter2, selectedSeries)); - } catch (QueryFilterOptimizationException e) { - e.printStackTrace(); - } + } + @Test + public void testSeriesOnly() { + try { + Filter filter1 = FilterFactory + .and(FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)), TimeFilter.gt(1400L)); + SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); - } + Filter filter2 = FilterFactory.and( + FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)), TimeFilter.gt(1400L)); + SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); - @Test - public void testSeriesOnly() { - try { - Filter filter1 = FilterFactory.and(FilterFactory.or( - ValueFilter.gt(100L), ValueFilter.lt(50L)), TimeFilter.gt(1400L)); - SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); + Filter filter3 = FilterFactory + .or(FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)), TimeFilter.gt(1400L)); + SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); - Filter filter2 = FilterFactory.and(FilterFactory.or( - ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)), TimeFilter.gt(1400L)); - SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); + QueryFilter queryFilter = QueryFilterFactory + .and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), seriesFilter3); + Assert.assertEquals(true, queryFilter.toString().equals( + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries).toString())); - Filter filter3 = FilterFactory.or(FilterFactory.or( - ValueFilter.gt(100.5), ValueFilter.lt(50.6)), TimeFilter.gt(1400L)); - SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); - - QueryFilter queryFilter = QueryFilterFactory.and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), seriesFilter3); - Assert.assertEquals(true, queryFilter.toString().equals( - queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries).toString())); - - } catch (QueryFilterOptimizationException e) { - e.printStackTrace(); - } + } catch (QueryFilterOptimizationException e) { + e.printStackTrace(); } - - @Test - public void testOneTimeAndSeries() { - Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); - SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); - - Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); - SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); - - Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); - SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); - - Filter timeFilter = TimeFilter.lt(14001234L); - QueryFilter globalTimeFilter = new GlobalTimeFilter(timeFilter); - QueryFilter queryFilter = QueryFilterFactory.and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), globalTimeFilter); - QueryFilterPrinter.print(queryFilter); - try { - String rightRet = "[[d2.s1:((value > 100 || value < 50) && time < 14001234)] || [d1.s2:((value > 100.5 || value < 50.6) && time < 14001234)]]"; - QueryFilter regularFilter = queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); - Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); - QueryFilterPrinter.print(regularFilter); - } catch (QueryFilterOptimizationException e) { - Assert.fail(); - } + } + + @Test + public void testOneTimeAndSeries() { + Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); + SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); + + Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); + SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); + + Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); + SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); + + Filter timeFilter = TimeFilter.lt(14001234L); + QueryFilter globalTimeFilter = new GlobalTimeFilter(timeFilter); + QueryFilter queryFilter = QueryFilterFactory + .and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), globalTimeFilter); + QueryFilterPrinter.print(queryFilter); + try { + String rightRet = + "[[d2.s1:((value > 100 || value < 50) && time < 14001234)] || [d1.s2:((value > 100.5 || value < 50.6) && time < 14001234)]]"; + QueryFilter regularFilter = + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); + Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); + QueryFilterPrinter.print(regularFilter); + } catch (QueryFilterOptimizationException e) { + Assert.fail(); + } + } + + @Test + public void testOneTimeOrSeries() { + Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); + SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); + + Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); + SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); + + Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); + SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); + Filter timeFilter = TimeFilter.lt(14001234L); + QueryFilter globalTimeFilter = new GlobalTimeFilter(timeFilter); + QueryFilter queryFilter = QueryFilterFactory + .or(QueryFilterFactory.or(seriesFilter1, seriesFilter2), globalTimeFilter); + QueryFilterPrinter.print(queryFilter); + + try { + String rightRet = + "[[[[[d1.s1:time < 14001234] || [d2.s1:time < 14001234]] || [d1.s2:time < 14001234]] || " + + "[d1.s2:time < 14001234]] || [[d2.s1:(value > 100 || value < 50)] || [d1.s2:(value > 100.5 || value < 50.6)]]]"; + QueryFilter regularFilter = + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); + Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); + QueryFilterPrinter.print(regularFilter); + } catch (QueryFilterOptimizationException e) { + Assert.fail(); + } + } + + @Test + public void testTwoTimeCombine() { + Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); + SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); + + Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); + SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); + + Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); + SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); + + QueryFilter globalTimeFilter1 = new GlobalTimeFilter(TimeFilter.lt(14001234L)); + QueryFilter globalTimeFilter2 = new GlobalTimeFilter(TimeFilter.gt(14001000L)); + QueryFilter queryFilter = + QueryFilterFactory.or(QueryFilterFactory.or(seriesFilter1, seriesFilter2), + QueryFilterFactory.and(globalTimeFilter1, globalTimeFilter2)); + + try { + String rightRet = + "[[[[[d1.s1:(time < 14001234 && time > 14001000)] || [d2.s1:(time < 14001234 && time > 14001000)]] " + + "|| [d1.s2:(time < 14001234 && time > 14001000)]] || [d1.s2:(time < 14001234 && time > 14001000)]] " + + "|| [[d2.s1:(value > 100 || value < 50)] || [d1.s2:(value > 100.5 || value < 50.6)]]]"; + QueryFilter regularFilter = + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); + Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); + } catch (QueryFilterOptimizationException e) { + Assert.fail(); } - @Test - public void testOneTimeOrSeries() { - Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); - SeriesFilter seriesFilter1 = new SeriesFilter<>( - new Path("d2.s1"), filter1); - - Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); - SeriesFilter seriesFilter2 = new SeriesFilter<>( - new Path("d1.s2"), filter2); - - Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); - SeriesFilter seriesFilter3 = new SeriesFilter<>( - new Path("d2.s2"), filter3); - Filter timeFilter = TimeFilter.lt(14001234L); - QueryFilter globalTimeFilter = new GlobalTimeFilter(timeFilter); - QueryFilter queryFilter = QueryFilterFactory.or(QueryFilterFactory.or(seriesFilter1, seriesFilter2), globalTimeFilter); - QueryFilterPrinter.print(queryFilter); - - try { - String rightRet = "[[[[[d1.s1:time < 14001234] || [d2.s1:time < 14001234]] || [d1.s2:time < 14001234]] || " + - "[d1.s2:time < 14001234]] || [[d2.s1:(value > 100 || value < 50)] || [d1.s2:(value > 100.5 || value < 50.6)]]]"; - QueryFilter regularFilter = queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); - Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); - QueryFilterPrinter.print(regularFilter); - } catch (QueryFilterOptimizationException e) { - Assert.fail(); - } + QueryFilter queryFilter2 = + QueryFilterFactory.and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), + QueryFilterFactory.and(globalTimeFilter1, globalTimeFilter2)); + + try { + String rightRet2 = + "[[d2.s1:((value > 100 || value < 50) && (time < 14001234 && time > 14001000))] || " + + "[d1.s2:((value > 100.5 || value < 50.6) && (time < 14001234 && time > 14001000))]]"; + QueryFilter regularFilter2 = + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter2, selectedSeries); + Assert.assertEquals(true, rightRet2.equals(regularFilter2.toString())); + } catch (QueryFilterOptimizationException e) { + Assert.fail(); } - @Test - public void testTwoTimeCombine() { - Filter filter1 = FilterFactory.or(ValueFilter.gt(100L), ValueFilter.lt(50L)); - SeriesFilter seriesFilter1 = new SeriesFilter<>(new Path("d2.s1"), filter1); - - Filter filter2 = FilterFactory.or(ValueFilter.gt(100.5f), ValueFilter.lt(50.6f)); - SeriesFilter seriesFilter2 = new SeriesFilter<>(new Path("d1.s2"), filter2); - - Filter filter3 = FilterFactory.or(ValueFilter.gt(100.5), ValueFilter.lt(50.6)); - SeriesFilter seriesFilter3 = new SeriesFilter<>(new Path("d2.s2"), filter3); - - QueryFilter globalTimeFilter1 = new GlobalTimeFilter(TimeFilter.lt(14001234L)); - QueryFilter globalTimeFilter2 = new GlobalTimeFilter(TimeFilter.gt(14001000L)); - QueryFilter queryFilter = QueryFilterFactory.or(QueryFilterFactory.or(seriesFilter1, seriesFilter2), - QueryFilterFactory.and(globalTimeFilter1, globalTimeFilter2)); - - try { - String rightRet = "[[[[[d1.s1:(time < 14001234 && time > 14001000)] || [d2.s1:(time < 14001234 && time > 14001000)]] " + - "|| [d1.s2:(time < 14001234 && time > 14001000)]] || [d1.s2:(time < 14001234 && time > 14001000)]] " + - "|| [[d2.s1:(value > 100 || value < 50)] || [d1.s2:(value > 100.5 || value < 50.6)]]]"; - QueryFilter regularFilter = queryFilterOptimizer.convertGlobalTimeFilter(queryFilter, selectedSeries); - Assert.assertEquals(true, rightRet.equals(regularFilter.toString())); - } catch (QueryFilterOptimizationException e) { - Assert.fail(); - } - - QueryFilter queryFilter2 = QueryFilterFactory.and(QueryFilterFactory.or(seriesFilter1, seriesFilter2), - QueryFilterFactory.and(globalTimeFilter1, globalTimeFilter2)); - - try { - String rightRet2 = "[[d2.s1:((value > 100 || value < 50) && (time < 14001234 && time > 14001000))] || " + - "[d1.s2:((value > 100.5 || value < 50.6) && (time < 14001234 && time > 14001000))]]"; - QueryFilter regularFilter2 = queryFilterOptimizer.convertGlobalTimeFilter(queryFilter2, selectedSeries); - Assert.assertEquals(true, rightRet2.equals(regularFilter2.toString())); - } catch (QueryFilterOptimizationException e) { - Assert.fail(); - } - - QueryFilter queryFilter3 = QueryFilterFactory.or(queryFilter2, queryFilter); - QueryFilterPrinter.print(queryFilter3); - try { - QueryFilter regularFilter3 = queryFilterOptimizer.convertGlobalTimeFilter(queryFilter3, selectedSeries); - QueryFilterPrinter.print(regularFilter3); - } catch (QueryFilterOptimizationException e) { - Assert.fail(); - } + QueryFilter queryFilter3 = QueryFilterFactory.or(queryFilter2, queryFilter); + QueryFilterPrinter.print(queryFilter3); + try { + QueryFilter regularFilter3 = + queryFilterOptimizer.convertGlobalTimeFilter(queryFilter3, selectedSeries); + QueryFilterPrinter.print(regularFilter3); + } catch (QueryFilterOptimizationException e) { + Assert.fail(); } -} \ No newline at end of file + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigDataCSV.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigDataCSV.java index 15e26aad..2cbbcb45 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigDataCSV.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigDataCSV.java @@ -10,7 +10,6 @@ import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -20,213 +19,206 @@ import java.util.Set; public class GenerateBigDataCSV { - private static final Logger LOG = LoggerFactory.getLogger(GenerateBigDataCSV.class); - private static String inputDataFile; + private static final Logger LOG = LoggerFactory.getLogger(GenerateBigDataCSV.class); + private static String inputDataFile; - // private String[] deviceList; - // To be configure - private static int deviceCount = 3; - // s0:broken line - // s1:line - // s2:square wave with frequency noise - // s3:long for sin with glitch - // s4:log - private static int[][] brokenLineConfigs = { {1000, 1, 100}, {200000, 5, 200000}, - {10000, 2, 50000}}; - private static long[][] lineConfigs = { {1L << 32, 1}, {1L << 22, 4}, {10000, 2}}; - private static float[] squareAmplitude = {12.5f, 1273.143f, 1823767.4f}; - private static float[] squareBaseLine = {25f, 2273.143f, 2823767.4f}; - private static int[] squareLength = {300, 5000, 20000}; - private static double[][] sinAbnormalConfigs = { {0.28, 20}, {0.3, 100}, {0.35, 50}}; - // y = A*sin(wt), sinConfigs:w,A - private static double[][] sinConfigs = { {0.05, 2000}, {0.03, 1000}, {0.001, 200}}; - private static int[][] maxMinVertical = { {5000, 4000}, {1000, 800}, {100, 70}}; - private static int[][] maxMinHorizontal = { {5, 2}, {20, 10}, {50, 30}}; - private static double[] glitchProbability = {0.008, 0.01, 0.005}; + // private String[] deviceList; + // To be configure + private static int deviceCount = 3; + // s0:broken line + // s1:line + // s2:square wave with frequency noise + // s3:long for sin with glitch + // s4:log + private static int[][] brokenLineConfigs = + {{1000, 1, 100}, {200000, 5, 200000}, {10000, 2, 50000}}; + private static long[][] lineConfigs = {{1L << 32, 1}, {1L << 22, 4}, {10000, 2}}; + private static float[] squareAmplitude = {12.5f, 1273.143f, 1823767.4f}; + private static float[] squareBaseLine = {25f, 2273.143f, 2823767.4f}; + private static int[] squareLength = {300, 5000, 20000}; + private static double[][] sinAbnormalConfigs = {{0.28, 20}, {0.3, 100}, {0.35, 50}}; + // y = A*sin(wt), sinConfigs:w,A + private static double[][] sinConfigs = {{0.05, 2000}, {0.03, 1000}, {0.001, 200}}; + private static int[][] maxMinVertical = {{5000, 4000}, {1000, 800}, {100, 70}}; + private static int[][] maxMinHorizontal = {{5, 2}, {20, 10}, {50, 30}}; + private static double[] glitchProbability = {0.008, 0.01, 0.005}; - private static String deltaObjectType = "root.laptop"; + private static String deltaObjectType = "root.laptop"; - private static float freqWave[] = {0, 0, 0}; + private static float freqWave[] = {0, 0, 0}; - private static void getNextRecordToFile(long timestamp, long index, FileWriter fw) throws IOException { - for (int i = 0; i < deviceCount; i++) { - StringContainer sc = new StringContainer(","); - sc.addTail("d" + i, timestamp+index, deltaObjectType); - if (sensorSet.contains("s0")) { - // s0:broken line, int - if ((index % brokenLineConfigs[i][2]) == 0) - brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; - brokenLineConfigs[i][0] += brokenLineConfigs[i][1]; - if (brokenLineConfigs[i][0] < 0) { - brokenLineConfigs[i][0] = -brokenLineConfigs[i][0]; - brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; - } - sc.addTail("s0", brokenLineConfigs[i][0]); - } - if (sensorSet.contains("s1")) { - // s1:line, long - lineConfigs[i][0] += lineConfigs[i][1]; - if (lineConfigs[i][0] < 0) - lineConfigs[i][0] = 0; - sc.addTail("s1", lineConfigs[i][0]); - } - if (sensorSet.contains("s2")) {// s2:square wave, float - if ((index % squareLength[i]) == 0) { - squareAmplitude[i] = -squareAmplitude[i]; - if (hasWrittenFreq[i] == 0) { - if ((double) index == squareLength[i]) { - System.out.println("d"+i+":time:"+index+",sin sin"); - hasWrittenFreq[i] = 1; - } - } else if (hasWrittenFreq[i] == 1) { - hasWrittenFreq[i] = 2; - } - } - freqWave[i] = - (hasWrittenFreq[i] == 1) ? (float) (squareAmplitude[i] / 2 * Math - .sin(sinAbnormalConfigs[i][0] * 2 * Math.PI * index)) : 0; - sc.addTail("s2", freqWave[i] + squareBaseLine[i] + squareAmplitude[i]); - } - if (sensorSet.contains("s3")) { - // s3:sin, long - sc.addTail("s3", generateSinGlitch(timestamp+index, i)); + private static void getNextRecordToFile(long timestamp, long index, FileWriter fw) + throws IOException { + for (int i = 0; i < deviceCount; i++) { + StringContainer sc = new StringContainer(","); + sc.addTail("d" + i, timestamp + index, deltaObjectType); + if (sensorSet.contains("s0")) { + // s0:broken line, int + if ((index % brokenLineConfigs[i][2]) == 0) + brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; + brokenLineConfigs[i][0] += brokenLineConfigs[i][1]; + if (brokenLineConfigs[i][0] < 0) { + brokenLineConfigs[i][0] = -brokenLineConfigs[i][0]; + brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; + } + sc.addTail("s0", brokenLineConfigs[i][0]); + } + if (sensorSet.contains("s1")) { + // s1:line, long + lineConfigs[i][0] += lineConfigs[i][1]; + if (lineConfigs[i][0] < 0) + lineConfigs[i][0] = 0; + sc.addTail("s1", lineConfigs[i][0]); + } + if (sensorSet.contains("s2")) {// s2:square wave, float + if ((index % squareLength[i]) == 0) { + squareAmplitude[i] = -squareAmplitude[i]; + if (hasWrittenFreq[i] == 0) { + if ((double) index == squareLength[i]) { + System.out.println("d" + i + ":time:" + index + ",sin sin"); + hasWrittenFreq[i] = 1; } - fw.write(sc.toString() + "\r\n"); + } else if (hasWrittenFreq[i] == 1) { + hasWrittenFreq[i] = 2; + } } + freqWave[i] = + (hasWrittenFreq[i] == 1) + ? (float) (squareAmplitude[i] / 2 + * Math.sin(sinAbnormalConfigs[i][0] * 2 * Math.PI * index)) + : 0; + sc.addTail("s2", freqWave[i] + squareBaseLine[i] + squareAmplitude[i]); + } + if (sensorSet.contains("s3")) { + // s3:sin, long + sc.addTail("s3", generateSinGlitch(timestamp + index, i)); + } + fw.write(sc.toString() + "\r\n"); } + } - private static Random r = new Random(); - private static int[] width = {-1, -1, -1}; - private static int[] mid = {0, 0, 0}; - private static long[] upPeek = {0, 0, 0}; - private static long[] downPeek = {0, 0, 0}; - private static long[] base = {0, 0, 0}; - private static long[] startAbTime = {0, 0, 0}; + private static Random r = new Random(); + private static int[] width = {-1, -1, -1}; + private static int[] mid = {0, 0, 0}; + private static long[] upPeek = {0, 0, 0}; + private static long[] downPeek = {0, 0, 0}; + private static long[] base = {0, 0, 0}; + private static long[] startAbTime = {0, 0, 0}; - private static long generateSinGlitch(long t, int i) { - if (r.nextDouble() < glitchProbability[i] && width[i] == -1) { - startAbTime[i] = t; - base[i] = - (long) (maxMinVertical[i][0] + sinConfigs[i][1] + sinConfigs[i][1] - * Math.sin(sinConfigs[i][0] * t)); - width[i] = - r.nextInt(maxMinHorizontal[i][0] - maxMinHorizontal[i][1]) - + maxMinHorizontal[i][1]; + private static long generateSinGlitch(long t, int i) { + if (r.nextDouble() < glitchProbability[i] && width[i] == -1) { + startAbTime[i] = t; + base[i] = (long) (maxMinVertical[i][0] + sinConfigs[i][1] + + sinConfigs[i][1] * Math.sin(sinConfigs[i][0] * t)); + width[i] = + r.nextInt(maxMinHorizontal[i][0] - maxMinHorizontal[i][1]) + maxMinHorizontal[i][1]; - if (width[i] < 2) - width[i] = 2; - mid[i] = r.nextInt(width[i] - 1) + 1; - upPeek[i] = - maxMinVertical[i][1] + r.nextInt(maxMinVertical[i][0] - maxMinVertical[i][1]); - downPeek[i] = - maxMinVertical[i][1] + r.nextInt(maxMinVertical[i][0] - maxMinVertical[i][1]); - return base[i]; + if (width[i] < 2) + width[i] = 2; + mid[i] = r.nextInt(width[i] - 1) + 1; + upPeek[i] = maxMinVertical[i][1] + r.nextInt(maxMinVertical[i][0] - maxMinVertical[i][1]); + downPeek[i] = maxMinVertical[i][1] + r.nextInt(maxMinVertical[i][0] - maxMinVertical[i][1]); + return base[i]; + } else { + if (width[i] != -1) { + long value; + // up + if (t - startAbTime[i] <= mid[i]) { + value = (long) (base[i] + ((double) t - startAbTime[i]) / mid[i] * upPeek[i]); } else { - if (width[i] != -1) { - long value; - // up - if (t - startAbTime[i] <= mid[i]) { - value = (long) (base[i] + ((double) t - startAbTime[i]) / mid[i] * upPeek[i]); - } else { - value = - (long) (base[i] + upPeek[i] - ((double) t - mid[i] - startAbTime[i]) - / (width[i] - mid[i]) * downPeek[i]); - } - if (t - startAbTime[i] == width[i]) - width[i] = -1; - // down - return value; - } else { - return (long) (maxMinVertical[i][0] + sinConfigs[i][1] + sinConfigs[i][1] - * Math.sin(sinConfigs[i][0] * t)); - } + value = (long) (base[i] + upPeek[i] + - ((double) t - mid[i] - startAbTime[i]) / (width[i] - mid[i]) * downPeek[i]); } + if (t - startAbTime[i] == width[i]) + width[i] = -1; + // down + return value; + } else { + return (long) (maxMinVertical[i][0] + sinConfigs[i][1] + + sinConfigs[i][1] * Math.sin(sinConfigs[i][0] * t)); + } } + } - private static JSONObject generateTestDataSchema(String schemaOutputFilePath) throws IOException { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s0 = new JSONObject(); - s0.put(JsonFormatConstant.MEASUREMENT_UID, "s0"); - s0.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s0.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s0); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); + private static JSONObject generateTestDataSchema(String schemaOutputFilePath) throws IOException { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s0 = new JSONObject(); + s0.put(JsonFormatConstant.MEASUREMENT_UID, "s0"); + s0.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s0.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s0); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - File file = new File(schemaOutputFilePath); - FileWriter fw = new FileWriter(file); - fw.write(jsonSchema.toString()); - fw.close(); - return jsonSchema; - } + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + File file = new File(schemaOutputFilePath); + FileWriter fw = new FileWriter(file); + fw.write(jsonSchema.toString()); + fw.close(); + return jsonSchema; + } - private static Set sensorSet = new HashSet<>(); - private static long lineCount; - private static double writeFreqFraction[] = {0, 0, 0}; - // 0:not write->1:writing->2:written - private static int hasWrittenFreq[] = {0, 0, 0}; + private static Set sensorSet = new HashSet<>(); + private static long lineCount; + private static double writeFreqFraction[] = {0, 0, 0}; + // 0:not write->1:writing->2:written + private static int hasWrittenFreq[] = {0, 0, 0}; - public static void main(String[] args) throws IOException, InterruptedException { - if (args.length < 4) { - System.err.println("sensorName:s0:int,s1:long,s2:float,s3:long sin"); - System.err - .println("input format: [...]"); - return; - } - LOG.info("write start!"); - inputDataFile = args[0]; - String schemaOutputFilePath = args[1]; - lineCount = Long.valueOf(args[2]); - sensorSet.addAll(Arrays.asList(args).subList(3, args.length)); - generateTestDataSchema(schemaOutputFilePath); - int i = 0; - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - FileWriter fw = new FileWriter(file); - deviceCount = 3; - for (int j = 0; j < deviceCount; j++) { - writeFreqFraction[i] = r.nextDouble(); - } + public static void main(String[] args) throws IOException, InterruptedException { + if (args.length < 4) { + System.err.println("sensorName:s0:int,s1:long,s2:float,s3:long sin"); + System.err.println( + "input format: [...]"); + return; + } + LOG.info("write start!"); + inputDataFile = args[0]; + String schemaOutputFilePath = args[1]; + lineCount = Long.valueOf(args[2]); + sensorSet.addAll(Arrays.asList(args).subList(3, args.length)); + generateTestDataSchema(schemaOutputFilePath); + int i = 0; + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + FileWriter fw = new FileWriter(file); + deviceCount = 3; + for (int j = 0; j < deviceCount; j++) { + writeFreqFraction[i] = r.nextDouble(); + } - long start = System.currentTimeMillis(); - // TODO to be changed - // start = 0; - while (i < lineCount) { - if (i % 1000000 == 0) { - LOG.info("generate line count:{}", i); - } - getNextRecordToFile(start,i, fw); - i++; - } - fw.close(); - LOG.info("write finished!"); + long start = System.currentTimeMillis(); + // TODO to be changed + // start = 0; + while (i < lineCount) { + if (i % 1000000 == 0) { + LOG.info("generate line count:{}", i); + } + getNextRecordToFile(start, i, fw); + i++; } + fw.close(); + LOG.info("write finished!"); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigTSFile.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigTSFile.java index 87f4afdd..084f390f 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigTSFile.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/generator/GenerateBigTSFile.java @@ -5,12 +5,10 @@ import java.io.IOException; import java.util.HashSet; import java.util.Set; - import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; @@ -25,176 +23,165 @@ import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; public class GenerateBigTSFile { - private static final Logger LOG = LoggerFactory.getLogger(GenerateBigTSFile.class); - private static TsFileWriter writer; - private static String outputDataFile; - private static TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - - private static int setRowGroupSize = conf.groupSizeInByte; - // To be configure - private static int deviceCount = 3; - // s0:broken line - // s1:line - // s2:sin - // s3:square wave - // s4:log - private static int[][] brokenLineConfigs = {{100, 1, 100}, {0, -1, 200000}, {10000, 2, 50000}}; - private static long[][] lineConfigs = {{1L << 32, 1}, {0, -1}, {10000, 2}}; - private static float[] squareAmplitude = {12.5f, 1273.143f, 1823767.4f}; - private static float[] squareBaseLine = {25f, 1273.143f, 1823767.4f}; - private static int[] squareLength = {150, 5000, 20000}; - // y = A*sin(wt), sinConfigs:w,A - private static double[][] sinConfigs = {{0.05, 10}, {0.3, 100}, {2, 50}}; - private static double[][] sinAbnormalConfigs = {{0.8, 20}, {0.3, 100}, {2, 50}}; - private static String deltaObjectType = "root.laptop"; - - - private static void getNextRecord(long timestamp, long index) throws IOException { - for (int i = 0; i < deviceCount; i++) { - StringContainer sc = new StringContainer(","); - sc.addTail("d" + i, timestamp, deltaObjectType); - if (sensorSet.contains("s0")) { - // s0:broken line, int - if ((timestamp % brokenLineConfigs[i][2]) == 0) - brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; - brokenLineConfigs[i][0] += brokenLineConfigs[i][1]; - sc.addTail("s0", brokenLineConfigs[i][0]); - } - if (sensorSet.contains("s1")) { - // s1:line, long - lineConfigs[i][0] += lineConfigs[i][1]; - if (lineConfigs[i][0] < 0) - lineConfigs[i][0] = 0; - sc.addTail("s1", lineConfigs[i][0]); - } - if (sensorSet.contains("s2")) { - // s2:square wave, float - if ((timestamp % squareLength[i]) == 0) - squareAmplitude[i] = -squareAmplitude[i]; - sc.addTail("s2", squareBaseLine[i] + squareAmplitude[i]); - } - if (sensorSet.contains("s3")) { - // s3:sin, double - if (index > 5000 && index < 8000) - sc.addTail( - "s3", - sinAbnormalConfigs[i][1] + sinAbnormalConfigs[i][1] - * Math.sin(sinAbnormalConfigs[i][0] * timestamp)); - else - sc.addTail( - "s3", - sinConfigs[i][1] + sinConfigs[i][1] - * Math.sin(sinConfigs[i][0] * timestamp)); - } - strLines[i] = sc.toString(); - } + private static final Logger LOG = LoggerFactory.getLogger(GenerateBigTSFile.class); + private static TsFileWriter writer; + private static String outputDataFile; + private static TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + + private static int setRowGroupSize = conf.groupSizeInByte; + // To be configure + private static int deviceCount = 3; + // s0:broken line + // s1:line + // s2:sin + // s3:square wave + // s4:log + private static int[][] brokenLineConfigs = {{100, 1, 100}, {0, -1, 200000}, {10000, 2, 50000}}; + private static long[][] lineConfigs = {{1L << 32, 1}, {0, -1}, {10000, 2}}; + private static float[] squareAmplitude = {12.5f, 1273.143f, 1823767.4f}; + private static float[] squareBaseLine = {25f, 1273.143f, 1823767.4f}; + private static int[] squareLength = {150, 5000, 20000}; + // y = A*sin(wt), sinConfigs:w,A + private static double[][] sinConfigs = {{0.05, 10}, {0.3, 100}, {2, 50}}; + private static double[][] sinAbnormalConfigs = {{0.8, 20}, {0.3, 100}, {2, 50}}; + private static String deltaObjectType = "root.laptop"; + + + private static void getNextRecord(long timestamp, long index) throws IOException { + for (int i = 0; i < deviceCount; i++) { + StringContainer sc = new StringContainer(","); + sc.addTail("d" + i, timestamp, deltaObjectType); + if (sensorSet.contains("s0")) { + // s0:broken line, int + if ((timestamp % brokenLineConfigs[i][2]) == 0) + brokenLineConfigs[i][1] = -brokenLineConfigs[i][1]; + brokenLineConfigs[i][0] += brokenLineConfigs[i][1]; + sc.addTail("s0", brokenLineConfigs[i][0]); + } + if (sensorSet.contains("s1")) { + // s1:line, long + lineConfigs[i][0] += lineConfigs[i][1]; + if (lineConfigs[i][0] < 0) + lineConfigs[i][0] = 0; + sc.addTail("s1", lineConfigs[i][0]); + } + if (sensorSet.contains("s2")) { + // s2:square wave, float + if ((timestamp % squareLength[i]) == 0) + squareAmplitude[i] = -squareAmplitude[i]; + sc.addTail("s2", squareBaseLine[i] + squareAmplitude[i]); + } + if (sensorSet.contains("s3")) { + // s3:sin, double + if (index > 5000 && index < 8000) + sc.addTail("s3", sinAbnormalConfigs[i][1] + + sinAbnormalConfigs[i][1] * Math.sin(sinAbnormalConfigs[i][0] * timestamp)); + else + sc.addTail("s3", + sinConfigs[i][1] + sinConfigs[i][1] * Math.sin(sinConfigs[i][0] * timestamp)); + } + strLines[i] = sc.toString(); } + } - private static String[] strLines; - - private static Set sensorSet = new HashSet<>(); - - private static void writeToFile(long spaceLimit) throws InterruptedException, IOException { - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime; - long currentSpace = 0; - long startTimestamp = System.currentTimeMillis(); - while (currentSpace < spaceLimit) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - currentSpace = - (long) FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.B) - + writer.calculateMemSizeForAllGroup(); - LOG.info("write line:{},use time:{}s, space:{}", lineCount, - (endTime - startTime) / 1000, - FileUtils.transformUnit(currentSpace, FileUtils.Unit.MB)); + private static String[] strLines; - } - getNextRecord(startTimestamp + lineCount, lineCount); - try { + private static Set sensorSet = new HashSet<>(); - for (String str : strLines) { - TSRecord ts = RecordUtils.parseSimpleTupleRecord(str, fileSchema); - writer.write(ts); - } - } catch (WriteProcessException e) { - e.printStackTrace(); - } - lineCount++; - } - writer.close(); + private static void writeToFile(long spaceLimit) throws InterruptedException, IOException { + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime; + long currentSpace = 0; + long startTimestamp = System.currentTimeMillis(); + while (currentSpace < spaceLimit) { + if (lineCount % 1000000 == 0) { endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.MB)); - } - - private static JSONObject generateTestSchema() { - conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s0 = new JSONObject(); - s0.put(JsonFormatConstant.MEASUREMENT_UID, "s0"); - s0.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s0.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s0); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - return jsonSchema; - } - - private static FileSchema fileSchema; - - public static void main(String[] args) throws IOException, InterruptedException, WriteProcessException { - if (args.length < 3) { - System.err.println("input format: [rowGroupSize(MB)]"); - return; + currentSpace = (long) FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.B) + + writer.calculateMemSizeForAllGroup(); + LOG.info("write line:{},use time:{}s, space:{}", lineCount, (endTime - startTime) / 1000, + FileUtils.transformUnit(currentSpace, FileUtils.Unit.MB)); + + } + getNextRecord(startTimestamp + lineCount, lineCount); + try { + + for (String str : strLines) { + TSRecord ts = RecordUtils.parseSimpleTupleRecord(str, fileSchema); + writer.write(ts); } - outputDataFile = args[0]; - if (new File(outputDataFile).exists()) - new File(outputDataFile).delete(); - fileSchema = new FileSchema(generateTestSchema()); - long size = - (long) FileUtils - .transformUnitToByte(Double.valueOf(args[1]), FileUtils.Unit.valueOf(args[2])); - if (args.length >= 4) - setRowGroupSize = - (int) FileUtils.transformUnitToByte(Integer.valueOf(args[3]), FileUtils.Unit.MB); - conf.groupSizeInByte = setRowGroupSize; - deviceCount = 1; - strLines = new String[deviceCount]; - sensorSet.add("s0"); - sensorSet.add("s1"); - sensorSet.add("s2"); - // write file - writer = new TsFileWriter(new File(outputDataFile), fileSchema, conf); - System.out.println("setRowGroupSize: " + setRowGroupSize + ",total target:" + size); - writeToFile(size); + } catch (WriteProcessException e) { + e.printStackTrace(); + } + lineCount++; + } + writer.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.MB)); + } + + private static JSONObject generateTestSchema() { + conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s0 = new JSONObject(); + s0.put(JsonFormatConstant.MEASUREMENT_UID, "s0"); + s0.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s0.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s0); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + return jsonSchema; + } + + private static FileSchema fileSchema; + + public static void main(String[] args) + throws IOException, InterruptedException, WriteProcessException { + if (args.length < 3) { + System.err.println("input format: [rowGroupSize(MB)]"); + return; } + outputDataFile = args[0]; + if (new File(outputDataFile).exists()) + new File(outputDataFile).delete(); + fileSchema = new FileSchema(generateTestSchema()); + long size = (long) FileUtils.transformUnitToByte(Double.valueOf(args[1]), + FileUtils.Unit.valueOf(args[2])); + if (args.length >= 4) + setRowGroupSize = + (int) FileUtils.transformUnitToByte(Integer.valueOf(args[3]), FileUtils.Unit.MB); + conf.groupSizeInByte = setRowGroupSize; + deviceCount = 1; + strLines = new String[deviceCount]; + sensorSet.add("s0"); + sensorSet.add("s1"); + sensorSet.add("s2"); + // write file + writer = new TsFileWriter(new File(outputDataFile), fileSchema, conf); + System.out.println("setRowGroupSize: " + setRowGroupSize + ",total target:" + size); + writeToFile(size); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/CrossQueryTimeGeneratorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/CrossQueryTimeGeneratorTest.java index 07fc4416..5018f100 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/CrossQueryTimeGeneratorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/CrossQueryTimeGeneratorTest.java @@ -11,44 +11,50 @@ import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData; import org.junit.Assert; import org.junit.Test; - import java.io.IOException; public class CrossQueryTimeGeneratorTest { - private String d1 = "d1"; - private String s1 = "s1"; - private String s2 = "s2"; - private String s3 = "s3"; - private String s4 = "s4"; - - - @Test - public void singleValueFilterSplitTest() throws IOException, ProcessorException { - SingleSeriesFilterExpression timeFilter = FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 60L, true); - SingleSeriesFilterExpression s1Filter = FilterFactory.ltEq(FilterFactory.intFilterSeries(d1, s1, FilterSeriesType.VALUE_FILTER), 100, true); - SingleSeriesFilterExpression s2Filter = FilterFactory.ltEq(FilterFactory.intFilterSeries(d1, s2, FilterSeriesType.VALUE_FILTER), 200, true); - SingleSeriesFilterExpression s3Filter = FilterFactory.ltEq(FilterFactory.intFilterSeries(d1, s3, FilterSeriesType.VALUE_FILTER), 200, true); - SingleSeriesFilterExpression s4Filter = FilterFactory.ltEq(FilterFactory.intFilterSeries(d1, s4, FilterSeriesType.VALUE_FILTER), 200, true); - - // [ (s1&s2) | s3 ] & s4 - CrossSeriesFilterExpression crossFilter = FilterFactory.csAnd(s1Filter, s2Filter); - CrossSeriesFilterExpression crossFilter2 = FilterFactory.csOr(crossFilter, s3Filter); - CrossSeriesFilterExpression crossFilter3 = FilterFactory.csAnd(crossFilter2, s4Filter); - - CrossQueryTimeGenerator generator = new CrossQueryTimeGenerator(timeFilter, null, crossFilter3, 1000) { - @Override - public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, SingleSeriesFilterExpression valueFilter, - int valueFilterNumber) throws ProcessorException, IOException { - DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true); - //data.putTime(10L); - //data.putInt(6); - return data; - } + private String d1 = "d1"; + private String s1 = "s1"; + private String s2 = "s2"; + private String s3 = "s3"; + private String s4 = "s4"; + + + @Test + public void singleValueFilterSplitTest() throws IOException, ProcessorException { + SingleSeriesFilterExpression timeFilter = + FilterFactory.gtEq(FilterFactory.timeFilterSeries(), 60L, true); + SingleSeriesFilterExpression s1Filter = FilterFactory + .ltEq(FilterFactory.intFilterSeries(d1, s1, FilterSeriesType.VALUE_FILTER), 100, true); + SingleSeriesFilterExpression s2Filter = FilterFactory + .ltEq(FilterFactory.intFilterSeries(d1, s2, FilterSeriesType.VALUE_FILTER), 200, true); + SingleSeriesFilterExpression s3Filter = FilterFactory + .ltEq(FilterFactory.intFilterSeries(d1, s3, FilterSeriesType.VALUE_FILTER), 200, true); + SingleSeriesFilterExpression s4Filter = FilterFactory + .ltEq(FilterFactory.intFilterSeries(d1, s4, FilterSeriesType.VALUE_FILTER), 200, true); + + // [ (s1&s2) | s3 ] & s4 + CrossSeriesFilterExpression crossFilter = FilterFactory.csAnd(s1Filter, s2Filter); + CrossSeriesFilterExpression crossFilter2 = FilterFactory.csOr(crossFilter, s3Filter); + CrossSeriesFilterExpression crossFilter3 = FilterFactory.csAnd(crossFilter2, s4Filter); + + CrossQueryTimeGenerator generator = + new CrossQueryTimeGenerator(timeFilter, null, crossFilter3, 1000) { + @Override + public DynamicOneColumnData getDataInNextBatch(DynamicOneColumnData res, int fetchSize, + SingleSeriesFilterExpression valueFilter, int valueFilterNumber) + throws ProcessorException, IOException { + DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true); + // data.putTime(10L); + // data.putInt(6); + return data; + } }; - long[] time = generator.generateTimes(); - Assert.assertEquals(time.length, 0); - // System.out.println(time.length); - } + long[] time = generator.generateTimes(); + Assert.assertEquals(time.length, 0); + // System.out.println(time.length); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/DynamicOneColumnDataTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/DynamicOneColumnDataTest.java index 376c78d9..a9992525 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/DynamicOneColumnDataTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/DynamicOneColumnDataTest.java @@ -10,90 +10,90 @@ */ public class DynamicOneColumnDataTest { - private static final int MAXN = 100; + private static final int MAXN = 100; - @Test - public void testPutGetMethod() { - DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true); - for (int i = 0; i < MAXN; i++) { - data.putTime(i + 10); - } - // Assert.assertEquals(data.timeArrayIdx, 100); + @Test + public void testPutGetMethod() { + DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true); + for (int i = 0; i < MAXN; i++) { + data.putTime(i + 10); + } + // Assert.assertEquals(data.timeArrayIdx, 100); - for (int i = 0; i < MAXN; i++) { - Assert.assertEquals(data.getTime(i), i + 10); - } + for (int i = 0; i < MAXN; i++) { + Assert.assertEquals(data.getTime(i), i + 10); } + } - @Test - public void emptyTimeTest() { - DynamicOneColumnData data1 = new DynamicOneColumnData(TSDataType.INT32, true, true); - for (int i = 1; i <= 10; i++) { - if (i % 2 == 0) { - data1.putTime(i); - data1.putInt(i); - } else { - data1.putEmptyTime(i); - } - } + @Test + public void emptyTimeTest() { + DynamicOneColumnData data1 = new DynamicOneColumnData(TSDataType.INT32, true, true); + for (int i = 1; i <= 10; i++) { + if (i % 2 == 0) { + data1.putTime(i); + data1.putInt(i); + } else { + data1.putEmptyTime(i); + } + } - for (int i = 0; i < data1.valueLength; i++) { - Assert.assertEquals((i + 1) * 2, data1.getTime(i)); - Assert.assertEquals((i + 1) * 2, data1.getInt(i)); - } + for (int i = 0; i < data1.valueLength; i++) { + Assert.assertEquals((i + 1) * 2, data1.getTime(i)); + Assert.assertEquals((i + 1) * 2, data1.getInt(i)); + } - for (int i = 0; i < data1.emptyTimeLength; i++) { - Assert.assertEquals((i + 1) * 2, data1.getTime(i)); - } + for (int i = 0; i < data1.emptyTimeLength; i++) { + Assert.assertEquals((i + 1) * 2, data1.getTime(i)); + } - DynamicOneColumnData data2 = new DynamicOneColumnData(TSDataType.INT32, true, false); - for (int i = 5; i <= 20; i++) { - data2.putTime(i); - data1.putInt(i); - } + DynamicOneColumnData data2 = new DynamicOneColumnData(TSDataType.INT32, true, false); + for (int i = 5; i <= 20; i++) { + data2.putTime(i); + data1.putInt(i); } + } - @Test - public void removeLastEmptyTimeTest() { - DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true, true); - data.putEmptyTime(10); - Assert.assertEquals(1, data.emptyTimeLength); - Assert.assertEquals(10, data.getEmptyTime(0)); + @Test + public void removeLastEmptyTimeTest() { + DynamicOneColumnData data = new DynamicOneColumnData(TSDataType.INT32, true, true); + data.putEmptyTime(10); + Assert.assertEquals(1, data.emptyTimeLength); + Assert.assertEquals(10, data.getEmptyTime(0)); - data.removeLastEmptyTime(); - Assert.assertEquals(0, data.emptyTimeLength); - for (int i = 1; i <= 10; i++) { - data.putEmptyTime(i * 10); - } - Assert.assertEquals(10, data.emptyTimeLength); - for (int i = 0; i < data.emptyTimeLength; i++) { - Assert.assertEquals((i + 1) * 10, data.getEmptyTime(i)); - } + data.removeLastEmptyTime(); + Assert.assertEquals(0, data.emptyTimeLength); + for (int i = 1; i <= 10; i++) { + data.putEmptyTime(i * 10); + } + Assert.assertEquals(10, data.emptyTimeLength); + for (int i = 0; i < data.emptyTimeLength; i++) { + Assert.assertEquals((i + 1) * 10, data.getEmptyTime(i)); + } - data.clearData(); - for (int i = 1; i <= 10001; i++) { - data.putEmptyTime(i); - } - for (int i = 0; i < data.emptyTimeLength; i++) { - Assert.assertEquals(i + 1, data.getEmptyTime(i)); - } - Assert.assertEquals(10001, data.emptyTimeLength); - Assert.assertEquals(9, data.emptyTimeArrayIdx); - Assert.assertEquals(785, data.curEmptyTimeIdx); + data.clearData(); + for (int i = 1; i <= 10001; i++) { + data.putEmptyTime(i); + } + for (int i = 0; i < data.emptyTimeLength; i++) { + Assert.assertEquals(i + 1, data.getEmptyTime(i)); + } + Assert.assertEquals(10001, data.emptyTimeLength); + Assert.assertEquals(9, data.emptyTimeArrayIdx); + Assert.assertEquals(785, data.curEmptyTimeIdx); - data.removeLastEmptyTime(); - data.removeLastEmptyTime(); - for (int i = 0; i < data.emptyTimeLength; i++) { - // System.out.println(i+1 + " " + data.getEmptyTime(i)); - Assert.assertEquals(i + 1, data.getEmptyTime(i)); - } - data.putEmptyTime(20000); - Assert.assertEquals(9, data.emptyTimeArrayIdx); - Assert.assertEquals(784, data.curEmptyTimeIdx); - // System.out.println(data.emptyTimeArrayIdx + " " + data.curEmptyTimeIdx); - for (int i = 0; i < data.emptyTimeLength - 1; i++) { - Assert.assertEquals(i + 1, data.getEmptyTime(i)); - } - Assert.assertEquals(20000, data.getEmptyTime(data.emptyTimeLength - 1)); + data.removeLastEmptyTime(); + data.removeLastEmptyTime(); + for (int i = 0; i < data.emptyTimeLength; i++) { + // System.out.println(i+1 + " " + data.getEmptyTime(i)); + Assert.assertEquals(i + 1, data.getEmptyTime(i)); + } + data.putEmptyTime(20000); + Assert.assertEquals(9, data.emptyTimeArrayIdx); + Assert.assertEquals(784, data.curEmptyTimeIdx); + // System.out.println(data.emptyTimeArrayIdx + " " + data.curEmptyTimeIdx); + for (int i = 0; i < data.emptyTimeLength - 1; i++) { + Assert.assertEquals(i + 1, data.getEmptyTime(i)); } + Assert.assertEquals(20000, data.getEmptyTime(data.emptyTimeLength - 1)); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/OnePassQueryDataSetTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/OnePassQueryDataSetTest.java index f92ded48..a5b2da39 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/OnePassQueryDataSetTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/OnePassQueryDataSetTest.java @@ -9,55 +9,38 @@ public class OnePassQueryDataSetTest { - @Test - public void emptyQueryDataTest() { - String[] ret = new String[]{ - "1 null null", - "2 2 null", - "3 null null", - "4 4 null", - "5 null 5.0", - "6 6 6.0", - "7 null 7.0", - "8 8 8.0", - "9 null 9.0", - "10 10 10.0", - "11 null 11.0", - "12 null 12.0", - "13 null 13.0", - "14 null 14.0", - "15 null 15.0", - "16 null 16.0", - "17 null 17.0", - "18 null 18.0", - "19 null 19.0", - "20 null 20.0" - }; - DynamicOneColumnData data1 = new DynamicOneColumnData(TSDataType.INT32, true, true); - for (int i = 1; i <= 10; i++) { - if (i % 2 == 0) { - data1.putTime(i); - data1.putInt(i); - } else { - data1.putEmptyTime(i); - } - } + @Test + public void emptyQueryDataTest() { + String[] ret = new String[] {"1 null null", "2 2 null", "3 null null", + "4 4 null", "5 null 5.0", "6 6 6.0", "7 null 7.0", "8 8 8.0", + "9 null 9.0", "10 10 10.0", "11 null 11.0", "12 null 12.0", + "13 null 13.0", "14 null 14.0", "15 null 15.0", "16 null 16.0", + "17 null 17.0", "18 null 18.0", "19 null 19.0", "20 null 20.0"}; + DynamicOneColumnData data1 = new DynamicOneColumnData(TSDataType.INT32, true, true); + for (int i = 1; i <= 10; i++) { + if (i % 2 == 0) { + data1.putTime(i); + data1.putInt(i); + } else { + data1.putEmptyTime(i); + } + } - DynamicOneColumnData data2 = new DynamicOneColumnData(TSDataType.FLOAT, true, false); - for (int i = 5; i <= 20; i++) { - data2.putTime(i); - data2.putFloat(i); - } + DynamicOneColumnData data2 = new DynamicOneColumnData(TSDataType.FLOAT, true, false); + for (int i = 5; i <= 20; i++) { + data2.putTime(i); + data2.putFloat(i); + } - OnePassQueryDataSet onePassQueryDataSet = new OnePassQueryDataSet(); - onePassQueryDataSet.mapRet.put("d1.s1", data1); - onePassQueryDataSet.mapRet.put("d1.s2", data2); + OnePassQueryDataSet onePassQueryDataSet = new OnePassQueryDataSet(); + onePassQueryDataSet.mapRet.put("d1.s1", data1); + onePassQueryDataSet.mapRet.put("d1.s2", data2); - int cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - Assert.assertEquals(ret[cnt], onePassQueryDataSet.getNextRecord().toString()); - cnt ++; - // System.out.println(onePassQueryDataSet.getNextRecord()); - } + int cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + Assert.assertEquals(ret[cnt], onePassQueryDataSet.getNextRecord().toString()); + cnt++; + // System.out.println(onePassQueryDataSet.getNextRecord()); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEnginePerf.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEnginePerf.java index b2ada02e..84e4f2b2 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEnginePerf.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEnginePerf.java @@ -1,208 +1,218 @@ -package cn.edu.tsinghua.tsfile.timeseries.read; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.util.Scanner; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; -import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; -import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; - -public class QueryEnginePerf { - private static final Logger LOG = LoggerFactory.getLogger(QueryEnginePerf.class); - public static final int ROW_COUNT = 199; - public static TsFileWriter innerWriter; - static public String inputDataFile; - static public String outputDataFile; - static public String errorOutputDataFile; - static public JSONObject jsonSchema; - - public static void generateFile() throws IOException, InterruptedException, WriteProcessException { - prepare(); - write(); - } - - public static void prepare() throws IOException { - inputDataFile = "src/test/resources/perTestInputData"; - outputDataFile = "src/test/resources/perTestOutputData.ksn"; - errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; - jsonSchema = generateTestData(); - generateSampleInputDataFile(); - } - - public static void after() { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - } - - static private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file.getParentFile().mkdirs(); - FileWriter fw = new FileWriter(file); - - long startTime = 1L; - int i; - for (i = 0; i < 169; i++) { - String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - } - - for (; i < 170; i++) { - String d1 = "root.vehicle.d1,170,s1,1000001,s2,17002.0,s3,17003"; - fw.write(d1 + "\r\n"); - } - for (; i < 179; i++) { - String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - } - for (; i < 189; i++) { - String d1 = "root.vehicle.d1," + (startTime + i) + ",s1," + ((i + 1) * 100 + 1) + ",s2," + (1.0 * (i + 1) * 100 + 2) - + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - } - String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,1,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,1,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,2,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,2,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - i++; - for (; i < 199; i++) { - d1 = "root.vehicle.d1," + (startTime + i) + ",s1," + ((i + 1) * 100 + 1) + ",s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," - + ((i + 1) * 100 + 3); - fw.write(d1 + "\r\n"); - } - fw.close(); - } - - static public void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - - // LOG.info(jsonSchema.toString()); - FileSchema schema = new FileSchema(jsonSchema); - - // TSFileDescriptor.conf.rowGroupSize = 2000; - // TSFileDescriptor.conf.pageSize = 100; - innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); - - // write - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); - } - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "vehicle"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - // System.out.println(jsonSchema); - return jsonSchema; - } - - static public void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - Scanner in = getDataFile(inputDataFile); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime = System.currentTimeMillis(); - assert in != null; - while (in.hasNextLine()) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - // logger.info("write line:{},inner space consumer:{},use - // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - innerWriter.write(record); - lineCount++; - } - endTime = System.currentTimeMillis(); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - innerWriter.close(); - in.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); - } - - static private Scanner getDataFile(String path) { - File file = new File(path); - try { - Scanner in = new Scanner(file); - return in; - } catch (FileNotFoundException e) { - e.printStackTrace(); - return null; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.read; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Scanner; +import org.json.JSONArray; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; +import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; +import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; + +public class QueryEnginePerf { + private static final Logger LOG = LoggerFactory.getLogger(QueryEnginePerf.class); + public static final int ROW_COUNT = 199; + public static TsFileWriter innerWriter; + static public String inputDataFile; + static public String outputDataFile; + static public String errorOutputDataFile; + static public JSONObject jsonSchema; + + public static void generateFile() + throws IOException, InterruptedException, WriteProcessException { + prepare(); + write(); + } + + public static void prepare() throws IOException { + inputDataFile = "src/test/resources/perTestInputData"; + outputDataFile = "src/test/resources/perTestOutputData.ksn"; + errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; + jsonSchema = generateTestData(); + generateSampleInputDataFile(); + } + + public static void after() { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + static private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file.getParentFile().mkdirs(); + FileWriter fw = new FileWriter(file); + + long startTime = 1L; + int i; + for (i = 0; i < 169; i++) { + String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,,s2," + (1.0 * (i + 1) * 100 + 2) + + ",s3," + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + } + + for (; i < 170; i++) { + String d1 = "root.vehicle.d1,170,s1,1000001,s2,17002.0,s3,17003"; + fw.write(d1 + "\r\n"); + } + for (; i < 179; i++) { + String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,,s2," + (1.0 * (i + 1) * 100 + 2) + + ",s3," + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + } + for (; i < 189; i++) { + String d1 = "root.vehicle.d1," + (startTime + i) + ",s1," + ((i + 1) * 100 + 1) + ",s2," + + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + } + String d1 = "root.vehicle.d1," + (startTime + i) + ",s1,1,s2," + (1.0 * (i + 1) * 100 + 2) + + ",s3," + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,1,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,2,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,2,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + d1 = "root.vehicle.d1," + (startTime + i) + ",s1,3,s2," + (1.0 * (i + 1) * 100 + 2) + ",s3," + + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + i++; + for (; i < 199; i++) { + d1 = "root.vehicle.d1," + (startTime + i) + ",s1," + ((i + 1) * 100 + 1) + ",s2," + + (1.0 * (i + 1) * 100 + 2) + ",s3," + ((i + 1) * 100 + 3); + fw.write(d1 + "\r\n"); + } + fw.close(); + } + + static public void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + + // LOG.info(jsonSchema.toString()); + FileSchema schema = new FileSchema(jsonSchema); + + // TSFileDescriptor.conf.rowGroupSize = 2000; + // TSFileDescriptor.conf.pageSize = 100; + innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); + + // write + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); + } + LOG.info("write to file successfully!!"); + } + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "vehicle"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + // System.out.println(jsonSchema); + return jsonSchema; + } + + static public void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + Scanner in = getDataFile(inputDataFile); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime = System.currentTimeMillis(); + assert in != null; + while (in.hasNextLine()) { + if (lineCount % 1000000 == 0) { + endTime = System.currentTimeMillis(); + // logger.info("write line:{},inner space consumer:{},use + // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + } + String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + innerWriter.write(record); + lineCount++; + } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + innerWriter.close(); + in.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); + } + + static private Scanner getDataFile(String path) { + File file = new File(path); + try { + Scanner in = new Scanner(file); + return in; + } catch (FileNotFoundException e) { + e.printStackTrace(); + return null; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEngineTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEngineTest.java index c3cf263e..0602ed4f 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEngineTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/QueryEngineTest.java @@ -1,12 +1,9 @@ package cn.edu.tsinghua.tsfile.timeseries.read; import static org.junit.Assert.*; - import java.io.IOException; import java.util.ArrayList; import java.util.List; - - import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryEngine; @@ -18,200 +15,210 @@ public class QueryEngineTest { - private QueryEngine engine; - ITsRandomAccessFileReader raf; - - @BeforeClass - public static void setUpBeforeClass() throws InterruptedException, WriteProcessException, IOException { - QueryEnginePerf.generateFile(); - } - - @Before - public void prepare() throws IOException, InterruptedException, WriteProcessException { - String fileName = "src/test/resources/perTestOutputData.ksn"; - raf = new TsRandomAccessLocalFileReader(fileName); - engine = new QueryEngine(raf, 10); - - - List paths = new ArrayList(); - paths.add(new Path("device_1.sensor_1")); - paths.add(new Path("device_1.sensor_2")); - } - - @After - public void after() throws IOException { - raf.close(); - } - - @AfterClass - public static void tearDownAfterClass() throws InterruptedException, WriteProcessException, IOException { - QueryEnginePerf.after(); - } - - @Test - public void readAllInOneColumn() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2|root.vehicle.d1.s3"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(199, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void readOneColumnWithTimeFilter() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", "0,(>178)&(<=198)", "null", "null"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(20, count); - } catch (IOException e) { - e.printStackTrace(); - fail(); - } - } - - @Test - public void readOneColumnWithValueFilter() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1", "null", "null", "2,root.vehicle.d1.s1,(>=18901)"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(4, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void readOneColumnWithTimeAndValueFilter1() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1", "0,(>278)&(<=298)", "null", "2,root.vehicle.d1.s1,(>10294)"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(0, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void readOneColumnWithTimeAndValueFilter2() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1", "0,(>=186)", "null", "2,root.vehicle.d1.s1,(>10211)"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(6, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void crossRead1() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", "0,(<=197)", "null", "[2,root.vehicle.d1.s1,(<10)]"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(8, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void crossRead2() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", "0,(>=0)", "null", "[2,root.vehicle.d1.s2,(>17802)]"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(21, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - @Test - public void crossRead3() { - QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2|root.vehicle.d1.s3" - , "0,(<=190)", "null", "[2,root.vehicle.d1.s2,(>17802)]&[2,root.vehicle.d1.s3,(>18703)&(<18903)]"); - try { - OnePassQueryDataSet res = engine.query(config); - int count = output(res, true); - assertEquals(1, count); - } catch (IOException e) { - fail(); - e.printStackTrace(); - } - } - - private static int output(OnePassQueryDataSet res, boolean printToConsole) { - int cnt = 0; - - //Output Labels - if (printToConsole) { - System.out.printf("+---------------+"); - for (int i = 0; i < res.mapRet.keySet().size(); i++) { - System.out.printf("---------------+"); - } - System.out.printf("\n"); - - System.out.printf("|%15s|", "Timestamp"); - for (String name : res.mapRet.keySet()) { - System.out.printf("%15s|", name); - } - System.out.printf("\n"); - - System.out.printf("+---------------+"); - for (int i = 0; i < res.mapRet.keySet().size(); i++) { - System.out.printf("---------------+"); - } - System.out.printf("\n"); - } - // output values - OldRowRecord r; - - while ((r = res.getNextRecord()) != null) { - StringBuilder line = new StringBuilder(); - line.append(String.valueOf(r.timestamp)); - - if (printToConsole) { - System.out.printf("|%15s|", String.valueOf(r.timestamp)); - } - - for (int i = 0; i < r.fields.size(); i++) { - line.append("\t" + r.fields.get(i).getStringValue()); - if (printToConsole) { - System.out.printf("%15s|", String.valueOf(r.fields.get(i).getStringValue())); - } - } - - if (printToConsole) { - System.out.printf("\n"); - } - -// bw.write(line.toString()); -// bw.newLine(); - cnt++; - } - - if (printToConsole) { - System.out.printf("+---------------+"); - for (int i = 0; i < res.mapRet.keySet().size(); i++) { - System.out.printf("---------------+"); - } - System.out.printf("\n"); - } -// bw.close(); - System.out.println("Result size : " + cnt); - return cnt; - } + private QueryEngine engine; + ITsRandomAccessFileReader raf; + + @BeforeClass + public static void setUpBeforeClass() + throws InterruptedException, WriteProcessException, IOException { + QueryEnginePerf.generateFile(); + } + + @Before + public void prepare() throws IOException, InterruptedException, WriteProcessException { + String fileName = "src/test/resources/perTestOutputData.ksn"; + raf = new TsRandomAccessLocalFileReader(fileName); + engine = new QueryEngine(raf, 10); + + + List paths = new ArrayList(); + paths.add(new Path("device_1.sensor_1")); + paths.add(new Path("device_1.sensor_2")); + } + + @After + public void after() throws IOException { + raf.close(); + } + + @AfterClass + public static void tearDownAfterClass() + throws InterruptedException, WriteProcessException, IOException { + QueryEnginePerf.after(); + } + + @Test + public void readAllInOneColumn() { + QueryConfig config = + new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2|root.vehicle.d1.s3"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(199, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void readOneColumnWithTimeFilter() { + QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", + "0,(>178)&(<=198)", "null", "null"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(20, count); + } catch (IOException e) { + e.printStackTrace(); + fail(); + } + } + + @Test + public void readOneColumnWithValueFilter() { + QueryConfig config = + new QueryConfig("root.vehicle.d1.s1", "null", "null", "2,root.vehicle.d1.s1,(>=18901)"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(4, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void readOneColumnWithTimeAndValueFilter1() { + QueryConfig config = new QueryConfig("root.vehicle.d1.s1", "0,(>278)&(<=298)", "null", + "2,root.vehicle.d1.s1,(>10294)"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(0, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void readOneColumnWithTimeAndValueFilter2() { + QueryConfig config = + new QueryConfig("root.vehicle.d1.s1", "0,(>=186)", "null", "2,root.vehicle.d1.s1,(>10211)"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(6, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void crossRead1() { + QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", "0,(<=197)", + "null", "[2,root.vehicle.d1.s1,(<10)]"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(8, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void crossRead2() { + QueryConfig config = new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2", "0,(>=0)", "null", + "[2,root.vehicle.d1.s2,(>17802)]"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(21, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + @Test + public void crossRead3() { + QueryConfig config = + new QueryConfig("root.vehicle.d1.s1|root.vehicle.d1.s2|root.vehicle.d1.s3", "0,(<=190)", + "null", "[2,root.vehicle.d1.s2,(>17802)]&[2,root.vehicle.d1.s3,(>18703)&(<18903)]"); + try { + OnePassQueryDataSet res = engine.query(config); + int count = output(res, true); + assertEquals(1, count); + } catch (IOException e) { + fail(); + e.printStackTrace(); + } + } + + private static int output(OnePassQueryDataSet res, boolean printToConsole) { + int cnt = 0; + + // Output Labels + if (printToConsole) { + System.out.printf("+---------------+"); + for (int i = 0; i < res.mapRet.keySet().size(); i++) { + System.out.printf("---------------+"); + } + System.out.printf("\n"); + + System.out.printf("|%15s|", "Timestamp"); + for (String name : res.mapRet.keySet()) { + System.out.printf("%15s|", name); + } + System.out.printf("\n"); + + System.out.printf("+---------------+"); + for (int i = 0; i < res.mapRet.keySet().size(); i++) { + System.out.printf("---------------+"); + } + System.out.printf("\n"); + } + // output values + OldRowRecord r; + + while ((r = res.getNextRecord()) != null) { + StringBuilder line = new StringBuilder(); + line.append(String.valueOf(r.timestamp)); + + if (printToConsole) { + System.out.printf("|%15s|", String.valueOf(r.timestamp)); + } + + for (int i = 0; i < r.fields.size(); i++) { + line.append("\t" + r.fields.get(i).getStringValue()); + if (printToConsole) { + System.out.printf("%15s|", String.valueOf(r.fields.get(i).getStringValue())); + } + } + + if (printToConsole) { + System.out.printf("\n"); + } + + // bw.write(line.toString()); + // bw.newLine(); + cnt++; + } + + if (printToConsole) { + System.out.printf("+---------------+"); + for (int i = 0; i < res.mapRet.keySet().size(); i++) { + System.out.printf("---------------+"); + } + System.out.printf("\n"); + } + // bw.close(); + System.out.println("Result size : " + cnt); + return cnt; + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadPerf.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadPerf.java index 1fe44f3b..1ea0e126 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadPerf.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadPerf.java @@ -1,226 +1,218 @@ -package cn.edu.tsinghua.tsfile.timeseries.read; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.util.Scanner; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; -import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; -import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; - -public class ReadPerf { - private static final Logger LOG = LoggerFactory.getLogger(ReadPerf.class); - public static final int ROW_COUNT = 1000; - public static TsFileWriter innerWriter; - public static String inputDataFile; - public static String outputDataFile = "src/test/resources/perTestOutputData.ksn"; - public static String errorOutputDataFile; - public static JSONObject jsonSchema; - - public static void generateFile() throws IOException, InterruptedException, WriteProcessException { - prepare(); - write(); - } - - public static void prepare() throws IOException { - inputDataFile = "src/test/resources/perTestInputData"; - errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; - jsonSchema = generateTestData(); - generateSampleInputDataFile(); - } - - public static void after() { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - } - - static private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file.getParentFile().mkdirs(); - FileWriter fw = new FileWriter(file); - - long startTime = 1480562618000L; - startTime = startTime - startTime % 1000; - for (int i = 0; i < ROW_COUNT; i++) { - // write d1 - String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); - if (i % 20 < 10) { - // LOG.info("write null to d1:" + (startTime + i)); - d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2); - } - if (i % 5 == 0) - d1 += ",s3," + (i * 10 + 3); - if (i % 8 == 0) - d1 += ",s4," + "dog" + i; - if (i % 9 == 0) - d1 += ",s5," + "false"; - if (i % 10 == 0) - d1 += ",s6," + ((int)(i/9.0)*100)/100.0; - if (i % 11 == 0) - d1 += ",s7," + ((int)(i/10.0)*100)/100.0; - fw.write(d1 + "\r\n"); - - // write d2 - String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); - if (i % 20 < 5) { - // LOG.info("write null to d2:" + (startTime + i)); - d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); - } - if (i % 5 == 0) - d2 += ",s1," + (i * 10 + 1); - if (i % 8 == 0) - d2 += ",s4," + "dog" + i%4; - fw.write(d2 + "\r\n"); - } - // write error - String d = - "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," - + (ROW_COUNT * 10 + 3); - fw.write(d + "\r\n"); - d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); - fw.write(d + "\r\n"); - fw.close(); - } - - static public void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - - //LOG.info(jsonSchema.toString()); - FileSchema schema = new FileSchema(jsonSchema); - - // TSFileDescriptor.conf.rowGroupSize = 2000; - // TSFileDescriptor.conf.pageSize = 100; - innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); - - // write - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); - } - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s5 = new JSONObject(); - s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); - s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); - s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s6 = new JSONObject(); - s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); - s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - JSONObject s7 = new JSONObject(); - s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); - s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); - measureGroup1.put(s5); - measureGroup1.put(s6); - measureGroup1.put(s7); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - //System.out.println(jsonSchema); - return jsonSchema; - } - - static public void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - Scanner in = getDataFile(inputDataFile); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime = System.currentTimeMillis(); - assert in != null; - while (in.hasNextLine()) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - // logger.info("write line:{},inner space consumer:{},use - // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - innerWriter.write(record); - lineCount++; - } - endTime = System.currentTimeMillis(); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - innerWriter.close(); - in.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); - } - - static private Scanner getDataFile(String path) { - File file = new File(path); - try { - Scanner in = new Scanner(file); - return in; - } catch (FileNotFoundException e) { - e.printStackTrace(); - return null; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.read; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Scanner; +import org.json.JSONArray; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; +import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; +import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; + +public class ReadPerf { + private static final Logger LOG = LoggerFactory.getLogger(ReadPerf.class); + public static final int ROW_COUNT = 1000; + public static TsFileWriter innerWriter; + public static String inputDataFile; + public static String outputDataFile = "src/test/resources/perTestOutputData.ksn"; + public static String errorOutputDataFile; + public static JSONObject jsonSchema; + + public static void generateFile() + throws IOException, InterruptedException, WriteProcessException { + prepare(); + write(); + } + + public static void prepare() throws IOException { + inputDataFile = "src/test/resources/perTestInputData"; + errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; + jsonSchema = generateTestData(); + generateSampleInputDataFile(); + } + + public static void after() { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + static private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file.getParentFile().mkdirs(); + FileWriter fw = new FileWriter(file); + + long startTime = 1480562618000L; + startTime = startTime - startTime % 1000; + for (int i = 0; i < ROW_COUNT; i++) { + // write d1 + String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); + if (i % 20 < 10) { + // LOG.info("write null to d1:" + (startTime + i)); + d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2); + } + if (i % 5 == 0) + d1 += ",s3," + (i * 10 + 3); + if (i % 8 == 0) + d1 += ",s4," + "dog" + i; + if (i % 9 == 0) + d1 += ",s5," + "false"; + if (i % 10 == 0) + d1 += ",s6," + ((int) (i / 9.0) * 100) / 100.0; + if (i % 11 == 0) + d1 += ",s7," + ((int) (i / 10.0) * 100) / 100.0; + fw.write(d1 + "\r\n"); + + // write d2 + String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); + if (i % 20 < 5) { + // LOG.info("write null to d2:" + (startTime + i)); + d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); + } + if (i % 5 == 0) + d2 += ",s1," + (i * 10 + 1); + if (i % 8 == 0) + d2 += ",s4," + "dog" + i % 4; + fw.write(d2 + "\r\n"); + } + // write error + String d = "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," + + (ROW_COUNT * 10 + 3); + fw.write(d + "\r\n"); + d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); + fw.write(d + "\r\n"); + fw.close(); + } + + static public void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + + // LOG.info(jsonSchema.toString()); + FileSchema schema = new FileSchema(jsonSchema); + + // TSFileDescriptor.conf.rowGroupSize = 2000; + // TSFileDescriptor.conf.pageSize = 100; + innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); + + // write + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); + } + LOG.info("write to file successfully!!"); + } + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s5 = new JSONObject(); + s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); + s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); + s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s6 = new JSONObject(); + s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); + s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + JSONObject s7 = new JSONObject(); + s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); + s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); + measureGroup1.put(s5); + measureGroup1.put(s6); + measureGroup1.put(s7); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + // System.out.println(jsonSchema); + return jsonSchema; + } + + static public void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + Scanner in = getDataFile(inputDataFile); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime = System.currentTimeMillis(); + assert in != null; + while (in.hasNextLine()) { + if (lineCount % 1000000 == 0) { + endTime = System.currentTimeMillis(); + // logger.info("write line:{},inner space consumer:{},use + // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + } + String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + innerWriter.write(record); + lineCount++; + } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + innerWriter.close(); + in.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); + } + + static private Scanner getDataFile(String path) { + File file = new File(path); + try { + Scanner in = new Scanner(file); + return in; + } catch (FileNotFoundException e) { + e.printStackTrace(); + return null; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadTest.java index b0cdaa40..851580a2 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/ReadTest.java @@ -1,302 +1,298 @@ -package cn.edu.tsinghua.tsfile.timeseries.read; - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; - -import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; -import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; -import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryEngine; -import cn.edu.tsinghua.tsfile.timeseries.read.support.Field; -import cn.edu.tsinghua.tsfile.timeseries.read.support.OldRowRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryConfig; - -public class ReadTest { - - private static String fileName = "src/test/resources/perTestOutputData.ksn"; - private static TsRandomAccessLocalFileReader inputFile; - private static QueryEngine engine = null; - static ITsRandomAccessFileReader raf; - private static QueryConfig configOneSeriesWithNoFilter = new QueryConfig("d1.s1"); - - private static QueryConfig configTwoSeriesWithNoFilter = new QueryConfig("d1.s1|d2.s2"); - - private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter = new QueryConfig("d2.s1|d2.s4", - "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(>9722)"); - private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter2 = new QueryConfig("d2.s2", - "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(!=9722)"); - - private static QueryConfig configWithTwoSeriesTimeValueCrossFilter = new QueryConfig("d1.s1|d2.s2", - "0,(>=1480562618970)&(<1480562618977)", "null", "[2,d2.s2,(>9722)]"); - - private static QueryConfig configWithCrossSeriesTimeValueFilter = new QueryConfig("d1.s1|d2.s2", - "0,(>=1480562618950)&(<=1480562618960)", "null", "[2,d2.s3,(>9541)|(<=9511)]&[2,d1.s1,(<=9562)]"); - - private static QueryConfig configWithCrossSeriesTimeValueFilterOrOpe = new QueryConfig("d1.s1|d2.s2", - "0,((>=1480562618906)&(<=1480562618915))|((>=1480562618928)&(<=1480562618933))", "null", - "[2,d1.s1,(<=9321)]|[2,d2.s2,(>9312)]"); - - private static QueryConfig booleanConfig = new QueryConfig("d1.s5", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s5,(=false)"); - - private static QueryConfig greatStringConfig = new QueryConfig("d1.s4", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(>dog97)"); - - private static QueryConfig lessStringConfig = new QueryConfig("d1.s4", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(=1480562618970)&(<=1480562618981)", "null", "2,d1.s6,(>103.0)"); - - private static QueryConfig doubleConfig = new QueryConfig("d1.s7", - "0,(>=1480562618021)&(<=1480562618033)", "null", "2,d1.s7,(<=7.0)"); - - private static QueryConfig floatDoubleConfigFilter = new QueryConfig("d1.s6", - "0,(>=1480562618005)&(<1480562618977)", "null", "2,d1.s6,(>=103.0)"); - - @Before - public void prepare() throws IOException, InterruptedException, WriteProcessException { - ReadPerf.generateFile(); - } - - @After - public void after() { - ReadPerf.after(); - } - - @Test - public void queryOneMeasurementWithoutFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configOneSeriesWithNoFilter, fileName); - - int count = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (count == 0) { - assertEquals(r.timestamp, 1480562618010L); - } - if (count == 499) { - assertEquals(r.timestamp, 1480562618999L); - } - count++; - } - assertEquals(count, 500); - } - - @Test - public void queryTwoMeasurementsWithoutFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configTwoSeriesWithNoFilter, fileName); - int count = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (count == 0) { - if (count == 0) { - assertEquals(1480562618005L, r.timestamp); - } - } - count++; - } - assertEquals(count, 750); -// // verify d1.s1 -// DynamicOneColumnData d1s1Data = onePassQueryDataSet.mapRet.get("d1.s1"); -// assertEquals(d1s1Data.length, 500); -// assertEquals(d1s1Data.getTime(0), 1480562618010L); -// assertEquals(d1s1Data.getInt(0), 101); -// assertEquals(d1s1Data.getTime(d1s1Data.length - 1), 1480562618999L); -// assertEquals(d1s1Data.getInt(d1s1Data.length - 1), 9991); -// -// // verify d2.s2 -// DynamicOneColumnData d2s2Data = onePassQueryDataSet.mapRet.get("d2.s2"); -// assertEquals(d2s2Data.length, 750); -// assertEquals(d2s2Data.getTime(500), 1480562618670L); -// assertEquals(d2s2Data.getLong(500), 6702L); -// assertEquals(d2s2Data.getTime(d2s2Data.length - 1), 1480562618999L); -// assertEquals(d2s2Data.getLong(d2s2Data.length - 1), 9992L); - } - - @Test - public void queryTwoMeasurementsWithSingleFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithTwoSeriesTimeValueNoCrossFilter2, fileName); - - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - System.out.println(r); - } - - } - - @Test - public void queryWithTwoSeriesTimeValueFilterCrossTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithTwoSeriesTimeValueCrossFilter, fileName); - - // time filter & value filter - // verify d1.s1, d2.s1 - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.timestamp, 1480562618973L); - } else if (cnt == 2) { - assertEquals(r.timestamp, 1480562618974L); - } else if (cnt == 3) { - assertEquals(r.timestamp, 1480562618975L); - } - //System.out.println(r); - cnt++; - } - assertEquals(cnt, 5); - } - - @Test - public void queryWithCrossSeriesTimeValueFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithCrossSeriesTimeValueFilter, fileName); - // time filter & value filter - // verify d1.s1, d2.s1 - /** - 1480562618950 9501 9502 - 1480562618954 9541 9542 - 1480562618955 9551 9552 - 1480562618956 9561 9562 - */ - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.timestamp, 1480562618950L); - } else if (cnt == 2) { - assertEquals(r.timestamp, 1480562618954L); - } else if (cnt == 3) { - assertEquals(r.timestamp, 1480562618955L); - } else if (cnt == 4) { - assertEquals(r.timestamp, 1480562618956L); - } - //System.out.println(r); - cnt++; - } - assertEquals(cnt, 5); - - OnePassQueryDataSet onePassQueryDataSetOrOpe = QueryEngine.query(configWithCrossSeriesTimeValueFilterOrOpe, fileName); - // time filter & value filter - // verify d1.s1, d2.s1 - /** - 1480562618910 9101 9102 - 1480562618911 9111 9112 - 1480562618912 9121 9122 - 1480562618913 9131 9132 - 1480562618914 9141 9142 - 1480562618915 9151 9152 - 1480562618930 9301 9302 - 1480562618931 9311 9312 - 1480562618932 9321 9322 - 1480562618933 9331 9332 - */ - cnt = 1; - while (onePassQueryDataSetOrOpe.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSetOrOpe.getNextRecord(); - //System.out.println(r); - if (cnt == 4) { - assertEquals(r.timestamp, 1480562618913L); - } else if (cnt == 7) { - assertEquals(r.timestamp, 1480562618930L); - } - cnt++; - } - assertEquals(cnt, 11); - } - - // @Test - public void queryBooleanTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(booleanConfig, fileName); - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618972L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getBoolV(), false); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618981L); - Field f2 = r.getFields().get(0); - assertEquals(f2.getBoolV(), false); - } - cnt++; - } - } - - @Test - public void queryStringTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(lessStringConfig, fileName); - int cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618976L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getStringValue(), "dog976"); - } - // System.out.println(r); - cnt++; - } - Assert.assertEquals(cnt, 0); - - onePassQueryDataSet = QueryEngine.query(greatStringConfig, fileName); - cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 0) { - assertEquals(r.getTime(), 1480562618976L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getStringValue(), "dog976"); - } - // System.out.println(r); - cnt++; - } - Assert.assertEquals(cnt, 1); - } - - @Test - public void queryFloatTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(floatConfig, fileName); - int cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618980L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getFloatV(), 108.0, 0.0); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618990L); - Field f2 = r.getFields().get(0); - assertEquals(f2.getFloatV(), 110.0, 0.0); - } - cnt++; - } - } - - @Test - public void queryDoubleTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(doubleConfig, fileName); - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618022L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getDoubleV(), 2.0, 0.0); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618033L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getDoubleV(), 3.0, 0.0); - } - cnt++; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.read; + +import static org.junit.Assert.assertEquals; +import java.io.IOException; +import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; +import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; +import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryEngine; +import cn.edu.tsinghua.tsfile.timeseries.read.support.Field; +import cn.edu.tsinghua.tsfile.timeseries.read.support.OldRowRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryConfig; + +public class ReadTest { + + private static String fileName = "src/test/resources/perTestOutputData.ksn"; + private static TsRandomAccessLocalFileReader inputFile; + private static QueryEngine engine = null; + static ITsRandomAccessFileReader raf; + private static QueryConfig configOneSeriesWithNoFilter = new QueryConfig("d1.s1"); + + private static QueryConfig configTwoSeriesWithNoFilter = new QueryConfig("d1.s1|d2.s2"); + + private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter = new QueryConfig( + "d2.s1|d2.s4", "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(>9722)"); + private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter2 = + new QueryConfig("d2.s2", "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(!=9722)"); + + private static QueryConfig configWithTwoSeriesTimeValueCrossFilter = new QueryConfig( + "d1.s1|d2.s2", "0,(>=1480562618970)&(<1480562618977)", "null", "[2,d2.s2,(>9722)]"); + + private static QueryConfig configWithCrossSeriesTimeValueFilter = + new QueryConfig("d1.s1|d2.s2", "0,(>=1480562618950)&(<=1480562618960)", "null", + "[2,d2.s3,(>9541)|(<=9511)]&[2,d1.s1,(<=9562)]"); + + private static QueryConfig configWithCrossSeriesTimeValueFilterOrOpe = + new QueryConfig("d1.s1|d2.s2", + "0,((>=1480562618906)&(<=1480562618915))|((>=1480562618928)&(<=1480562618933))", "null", + "[2,d1.s1,(<=9321)]|[2,d2.s2,(>9312)]"); + + private static QueryConfig booleanConfig = + new QueryConfig("d1.s5", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s5,(=false)"); + + private static QueryConfig greatStringConfig = + new QueryConfig("d1.s4", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(>dog97)"); + + private static QueryConfig lessStringConfig = + new QueryConfig("d1.s4", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(=1480562618970)&(<=1480562618981)", "null", "2,d1.s6,(>103.0)"); + + private static QueryConfig doubleConfig = + new QueryConfig("d1.s7", "0,(>=1480562618021)&(<=1480562618033)", "null", "2,d1.s7,(<=7.0)"); + + private static QueryConfig floatDoubleConfigFilter = + new QueryConfig("d1.s6", "0,(>=1480562618005)&(<1480562618977)", "null", "2,d1.s6,(>=103.0)"); + + @Before + public void prepare() throws IOException, InterruptedException, WriteProcessException { + ReadPerf.generateFile(); + } + + @After + public void after() { + ReadPerf.after(); + } + + @Test + public void queryOneMeasurementWithoutFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configOneSeriesWithNoFilter, fileName); + + int count = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (count == 0) { + assertEquals(r.timestamp, 1480562618010L); + } + if (count == 499) { + assertEquals(r.timestamp, 1480562618999L); + } + count++; + } + assertEquals(count, 500); + } + + @Test + public void queryTwoMeasurementsWithoutFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configTwoSeriesWithNoFilter, fileName); + int count = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (count == 0) { + if (count == 0) { + assertEquals(1480562618005L, r.timestamp); + } + } + count++; + } + assertEquals(count, 750); + // // verify d1.s1 + // DynamicOneColumnData d1s1Data = onePassQueryDataSet.mapRet.get("d1.s1"); + // assertEquals(d1s1Data.length, 500); + // assertEquals(d1s1Data.getTime(0), 1480562618010L); + // assertEquals(d1s1Data.getInt(0), 101); + // assertEquals(d1s1Data.getTime(d1s1Data.length - 1), 1480562618999L); + // assertEquals(d1s1Data.getInt(d1s1Data.length - 1), 9991); + // + // // verify d2.s2 + // DynamicOneColumnData d2s2Data = onePassQueryDataSet.mapRet.get("d2.s2"); + // assertEquals(d2s2Data.length, 750); + // assertEquals(d2s2Data.getTime(500), 1480562618670L); + // assertEquals(d2s2Data.getLong(500), 6702L); + // assertEquals(d2s2Data.getTime(d2s2Data.length - 1), 1480562618999L); + // assertEquals(d2s2Data.getLong(d2s2Data.length - 1), 9992L); + } + + @Test + public void queryTwoMeasurementsWithSingleFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithTwoSeriesTimeValueNoCrossFilter2, fileName); + + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + System.out.println(r); + } + + } + + @Test + public void queryWithTwoSeriesTimeValueFilterCrossTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithTwoSeriesTimeValueCrossFilter, fileName); + + // time filter & value filter + // verify d1.s1, d2.s1 + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.timestamp, 1480562618973L); + } else if (cnt == 2) { + assertEquals(r.timestamp, 1480562618974L); + } else if (cnt == 3) { + assertEquals(r.timestamp, 1480562618975L); + } + // System.out.println(r); + cnt++; + } + assertEquals(cnt, 5); + } + + @Test + public void queryWithCrossSeriesTimeValueFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithCrossSeriesTimeValueFilter, fileName); + // time filter & value filter + // verify d1.s1, d2.s1 + /** + * 1480562618950 9501 9502 1480562618954 9541 9542 1480562618955 9551 9552 1480562618956 9561 + * 9562 + */ + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.timestamp, 1480562618950L); + } else if (cnt == 2) { + assertEquals(r.timestamp, 1480562618954L); + } else if (cnt == 3) { + assertEquals(r.timestamp, 1480562618955L); + } else if (cnt == 4) { + assertEquals(r.timestamp, 1480562618956L); + } + // System.out.println(r); + cnt++; + } + assertEquals(cnt, 5); + + OnePassQueryDataSet onePassQueryDataSetOrOpe = + QueryEngine.query(configWithCrossSeriesTimeValueFilterOrOpe, fileName); + // time filter & value filter + // verify d1.s1, d2.s1 + /** + * 1480562618910 9101 9102 1480562618911 9111 9112 1480562618912 9121 9122 1480562618913 9131 + * 9132 1480562618914 9141 9142 1480562618915 9151 9152 1480562618930 9301 9302 1480562618931 + * 9311 9312 1480562618932 9321 9322 1480562618933 9331 9332 + */ + cnt = 1; + while (onePassQueryDataSetOrOpe.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSetOrOpe.getNextRecord(); + // System.out.println(r); + if (cnt == 4) { + assertEquals(r.timestamp, 1480562618913L); + } else if (cnt == 7) { + assertEquals(r.timestamp, 1480562618930L); + } + cnt++; + } + assertEquals(cnt, 11); + } + + // @Test + public void queryBooleanTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(booleanConfig, fileName); + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618972L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getBoolV(), false); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618981L); + Field f2 = r.getFields().get(0); + assertEquals(f2.getBoolV(), false); + } + cnt++; + } + } + + @Test + public void queryStringTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(lessStringConfig, fileName); + int cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618976L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getStringValue(), "dog976"); + } + // System.out.println(r); + cnt++; + } + Assert.assertEquals(cnt, 0); + + onePassQueryDataSet = QueryEngine.query(greatStringConfig, fileName); + cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 0) { + assertEquals(r.getTime(), 1480562618976L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getStringValue(), "dog976"); + } + // System.out.println(r); + cnt++; + } + Assert.assertEquals(cnt, 1); + } + + @Test + public void queryFloatTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(floatConfig, fileName); + int cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618980L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getFloatV(), 108.0, 0.0); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618990L); + Field f2 = r.getFields().get(0); + assertEquals(f2.getFloatV(), 110.0, 0.0); + } + cnt++; + } + } + + @Test + public void queryDoubleTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(doubleConfig, fileName); + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618022L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getDoubleV(), 2.0, 0.0); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618033L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getDoubleV(), 3.0, 0.0); + } + cnt++; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/TimePlainEncodeReadTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/TimePlainEncodeReadTest.java index 7570c213..af37591c 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/TimePlainEncodeReadTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/TimePlainEncodeReadTest.java @@ -1,303 +1,300 @@ -package cn.edu.tsinghua.tsfile.timeseries.read; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; -import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; -import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryConfig; -import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryEngine; -import cn.edu.tsinghua.tsfile.timeseries.read.support.Field; -import cn.edu.tsinghua.tsfile.timeseries.read.support.OldRowRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -import static org.junit.Assert.assertEquals; - -public class TimePlainEncodeReadTest { - - private static String fileName = "src/test/resources/perTestOutputData.ksn"; - private static TsRandomAccessLocalFileReader inputFile; - private static QueryEngine engine = null; - static ITsRandomAccessFileReader raf; - private static QueryConfig configOneSeriesWithNoFilter = new QueryConfig("d1.s1"); - - private static QueryConfig configTwoSeriesWithNoFilter = new QueryConfig("d1.s1|d2.s2"); - - private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter = new QueryConfig("d2.s1|d2.s4", - "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(>9722)"); - private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter2 = new QueryConfig("d2.s2", - "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(!=9722)"); - - private static QueryConfig configWithTwoSeriesTimeValueCrossFilter = new QueryConfig("d1.s1|d2.s2", - "0,(>=1480562618970)&(<1480562618977)", "null", "[2,d2.s2,(>9722)]"); - - private static QueryConfig configWithCrossSeriesTimeValueFilter = new QueryConfig("d1.s1|d2.s2", - "0,(>=1480562618950)&(<=1480562618960)", "null", "[2,d2.s3,(>9541)|(<=9511)]&[2,d1.s1,(<=9562)]"); - - private static QueryConfig configWithCrossSeriesTimeValueFilterOrOpe = new QueryConfig("d1.s1|d2.s2", - "0,((>=1480562618906)&(<=1480562618915))|((>=1480562618928)&(<=1480562618933))", "null", - "[2,d1.s1,(<=9321)]|[2,d2.s2,(>9312)]"); - - private static QueryConfig booleanConfig = new QueryConfig("d1.s5", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s5,(=false)"); - - private static QueryConfig greatStringConfig = new QueryConfig("d1.s4", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(>dog97)"); - - private static QueryConfig lessStringConfig = new QueryConfig("d1.s4", - "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(=1480562618970)&(<=1480562618981)", "null", "2,d1.s6,(>103.0)"); - - private static QueryConfig doubleConfig = new QueryConfig("d1.s7", - "0,(>=1480562618021)&(<=1480562618033)", "null", "2,d1.s7,(<=7.0)"); - - private static QueryConfig floatDoubleConfigFilter = new QueryConfig("d1.s6", - "0,(>=1480562618005)&(<1480562618977)", "null", "2,d1.s6,(>=103.0)"); - - @Before - public void prepare() throws IOException, InterruptedException, WriteProcessException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "PLAIN"; - ReadPerf.generateFile(); - } - - @After - public void after() { - ReadPerf.after(); - } - - @Test - public void queryOneMeasurementWithoutFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configOneSeriesWithNoFilter, fileName); - - int count = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (count == 0) { - assertEquals(r.timestamp, 1480562618010L); - } - if (count == 499) { - assertEquals(r.timestamp, 1480562618999L); - } - count++; - } - assertEquals(count, 500); - } - - @Test - public void queryTwoMeasurementsWithoutFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configTwoSeriesWithNoFilter, fileName); - int count = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (count == 0) { - if (count == 0) { - assertEquals(1480562618005L, r.timestamp); - } - } - count++; - } - assertEquals(count, 750); -// // verify d1.s1 -// DynamicOneColumnData d1s1Data = onePassQueryDataSet.mapRet.get("d1.s1"); -// assertEquals(d1s1Data.length, 500); -// assertEquals(d1s1Data.getTime(0), 1480562618010L); -// assertEquals(d1s1Data.getInt(0), 101); -// assertEquals(d1s1Data.getTime(d1s1Data.length - 1), 1480562618999L); -// assertEquals(d1s1Data.getInt(d1s1Data.length - 1), 9991); -// -// // verify d2.s2 -// DynamicOneColumnData d2s2Data = onePassQueryDataSet.mapRet.get("d2.s2"); -// assertEquals(d2s2Data.length, 750); -// assertEquals(d2s2Data.getTime(500), 1480562618670L); -// assertEquals(d2s2Data.getLong(500), 6702L); -// assertEquals(d2s2Data.getTime(d2s2Data.length - 1), 1480562618999L); -// assertEquals(d2s2Data.getLong(d2s2Data.length - 1), 9992L); - } - - @Test - public void queryTwoMeasurementsWithSingleFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithTwoSeriesTimeValueNoCrossFilter2, fileName); - - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - System.out.println(r); - } - - } - - @Test - public void queryWithTwoSeriesTimeValueFilterCrossTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithTwoSeriesTimeValueCrossFilter, fileName); - - // time filter & value filter - // verify d1.s1, d2.s1 - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.timestamp, 1480562618973L); - } else if (cnt == 2) { - assertEquals(r.timestamp, 1480562618974L); - } else if (cnt == 3) { - assertEquals(r.timestamp, 1480562618975L); - } - //System.out.println(r); - cnt++; - } - assertEquals(cnt, 5); - } - - @Test - public void queryWithCrossSeriesTimeValueFilterTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(configWithCrossSeriesTimeValueFilter, fileName); - // time filter & value filter - // verify d1.s1, d2.s1 - /** - 1480562618950 9501 9502 - 1480562618954 9541 9542 - 1480562618955 9551 9552 - 1480562618956 9561 9562 - */ - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.timestamp, 1480562618950L); - } else if (cnt == 2) { - assertEquals(r.timestamp, 1480562618954L); - } else if (cnt == 3) { - assertEquals(r.timestamp, 1480562618955L); - } else if (cnt == 4) { - assertEquals(r.timestamp, 1480562618956L); - } - //System.out.println(r); - cnt++; - } - assertEquals(cnt, 5); - - OnePassQueryDataSet onePassQueryDataSetOrOpe = QueryEngine.query(configWithCrossSeriesTimeValueFilterOrOpe, fileName); - // time filter & value filter - // verify d1.s1, d2.s1 - /** - 1480562618910 9101 9102 - 1480562618911 9111 9112 - 1480562618912 9121 9122 - 1480562618913 9131 9132 - 1480562618914 9141 9142 - 1480562618915 9151 9152 - 1480562618930 9301 9302 - 1480562618931 9311 9312 - 1480562618932 9321 9322 - 1480562618933 9331 9332 - */ - cnt = 1; - while (onePassQueryDataSetOrOpe.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSetOrOpe.getNextRecord(); - //System.out.println(r); - if (cnt == 4) { - assertEquals(r.timestamp, 1480562618913L); - } else if (cnt == 7) { - assertEquals(r.timestamp, 1480562618930L); - } - cnt++; - } - assertEquals(cnt, 11); - } - - // @Test - public void queryBooleanTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(booleanConfig, fileName); - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618972L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getBoolV(), false); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618981L); - Field f2 = r.getFields().get(0); - assertEquals(f2.getBoolV(), false); - } - cnt++; - } - } - - @Test - public void queryStringTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(lessStringConfig, fileName); - int cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618976L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getStringValue(), "dog976"); - } - // System.out.println(r); - cnt++; - } - Assert.assertEquals(cnt, 0); - - onePassQueryDataSet = QueryEngine.query(greatStringConfig, fileName); - cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 0) { - assertEquals(r.getTime(), 1480562618976L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getStringValue(), "dog976"); - } - // System.out.println(r); - cnt++; - } - Assert.assertEquals(cnt, 1); - } - - @Test - public void queryFloatTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(floatConfig, fileName); - int cnt = 0; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618980L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getFloatV(), 108.0, 0.0); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618990L); - Field f2 = r.getFields().get(0); - assertEquals(f2.getFloatV(), 110.0, 0.0); - } - cnt++; - } - } - - @Test - public void queryDoubleTest() throws IOException { - OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(doubleConfig, fileName); - int cnt = 1; - while (onePassQueryDataSet.hasNextRecord()) { - OldRowRecord r = onePassQueryDataSet.getNextRecord(); - if (cnt == 1) { - assertEquals(r.getTime(), 1480562618022L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getDoubleV(), 2.0, 0.0); - } - if (cnt == 2) { - assertEquals(r.getTime(), 1480562618033L); - Field f1 = r.getFields().get(0); - assertEquals(f1.getDoubleV(), 3.0, 0.0); - } - cnt++; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.read; + +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileReader; +import cn.edu.tsinghua.tsfile.timeseries.read.query.OnePassQueryDataSet; +import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryConfig; +import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryEngine; +import cn.edu.tsinghua.tsfile.timeseries.read.support.Field; +import cn.edu.tsinghua.tsfile.timeseries.read.support.OldRowRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import java.io.IOException; +import static org.junit.Assert.assertEquals; + +public class TimePlainEncodeReadTest { + + private static String fileName = "src/test/resources/perTestOutputData.ksn"; + private static TsRandomAccessLocalFileReader inputFile; + private static QueryEngine engine = null; + static ITsRandomAccessFileReader raf; + private static QueryConfig configOneSeriesWithNoFilter = new QueryConfig("d1.s1"); + + private static QueryConfig configTwoSeriesWithNoFilter = new QueryConfig("d1.s1|d2.s2"); + + private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter = new QueryConfig( + "d2.s1|d2.s4", "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(>9722)"); + private static QueryConfig configWithTwoSeriesTimeValueNoCrossFilter2 = + new QueryConfig("d2.s2", "0,(>=1480562618970)&(<1480562618977)", "null", "2,d2.s2,(!=9722)"); + + private static QueryConfig configWithTwoSeriesTimeValueCrossFilter = new QueryConfig( + "d1.s1|d2.s2", "0,(>=1480562618970)&(<1480562618977)", "null", "[2,d2.s2,(>9722)]"); + + private static QueryConfig configWithCrossSeriesTimeValueFilter = + new QueryConfig("d1.s1|d2.s2", "0,(>=1480562618950)&(<=1480562618960)", "null", + "[2,d2.s3,(>9541)|(<=9511)]&[2,d1.s1,(<=9562)]"); + + private static QueryConfig configWithCrossSeriesTimeValueFilterOrOpe = + new QueryConfig("d1.s1|d2.s2", + "0,((>=1480562618906)&(<=1480562618915))|((>=1480562618928)&(<=1480562618933))", "null", + "[2,d1.s1,(<=9321)]|[2,d2.s2,(>9312)]"); + + private static QueryConfig booleanConfig = + new QueryConfig("d1.s5", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s5,(=false)"); + + private static QueryConfig greatStringConfig = + new QueryConfig("d1.s4", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(>dog97)"); + + private static QueryConfig lessStringConfig = + new QueryConfig("d1.s4", "0,(>=1480562618970)&(<=1480562618981)", "null", "2,d1.s4,(=1480562618970)&(<=1480562618981)", "null", "2,d1.s6,(>103.0)"); + + private static QueryConfig doubleConfig = + new QueryConfig("d1.s7", "0,(>=1480562618021)&(<=1480562618033)", "null", "2,d1.s7,(<=7.0)"); + + private static QueryConfig floatDoubleConfigFilter = + new QueryConfig("d1.s6", "0,(>=1480562618005)&(<1480562618977)", "null", "2,d1.s6,(>=103.0)"); + + @Before + public void prepare() throws IOException, InterruptedException, WriteProcessException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "PLAIN"; + ReadPerf.generateFile(); + } + + @After + public void after() { + ReadPerf.after(); + } + + @Test + public void queryOneMeasurementWithoutFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configOneSeriesWithNoFilter, fileName); + + int count = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (count == 0) { + assertEquals(r.timestamp, 1480562618010L); + } + if (count == 499) { + assertEquals(r.timestamp, 1480562618999L); + } + count++; + } + assertEquals(count, 500); + } + + @Test + public void queryTwoMeasurementsWithoutFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configTwoSeriesWithNoFilter, fileName); + int count = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (count == 0) { + if (count == 0) { + assertEquals(1480562618005L, r.timestamp); + } + } + count++; + } + assertEquals(count, 750); + // // verify d1.s1 + // DynamicOneColumnData d1s1Data = onePassQueryDataSet.mapRet.get("d1.s1"); + // assertEquals(d1s1Data.length, 500); + // assertEquals(d1s1Data.getTime(0), 1480562618010L); + // assertEquals(d1s1Data.getInt(0), 101); + // assertEquals(d1s1Data.getTime(d1s1Data.length - 1), 1480562618999L); + // assertEquals(d1s1Data.getInt(d1s1Data.length - 1), 9991); + // + // // verify d2.s2 + // DynamicOneColumnData d2s2Data = onePassQueryDataSet.mapRet.get("d2.s2"); + // assertEquals(d2s2Data.length, 750); + // assertEquals(d2s2Data.getTime(500), 1480562618670L); + // assertEquals(d2s2Data.getLong(500), 6702L); + // assertEquals(d2s2Data.getTime(d2s2Data.length - 1), 1480562618999L); + // assertEquals(d2s2Data.getLong(d2s2Data.length - 1), 9992L); + } + + @Test + public void queryTwoMeasurementsWithSingleFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithTwoSeriesTimeValueNoCrossFilter2, fileName); + + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + System.out.println(r); + } + + } + + @Test + public void queryWithTwoSeriesTimeValueFilterCrossTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithTwoSeriesTimeValueCrossFilter, fileName); + + // time filter & value filter + // verify d1.s1, d2.s1 + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.timestamp, 1480562618973L); + } else if (cnt == 2) { + assertEquals(r.timestamp, 1480562618974L); + } else if (cnt == 3) { + assertEquals(r.timestamp, 1480562618975L); + } + // System.out.println(r); + cnt++; + } + assertEquals(cnt, 5); + } + + @Test + public void queryWithCrossSeriesTimeValueFilterTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = + QueryEngine.query(configWithCrossSeriesTimeValueFilter, fileName); + // time filter & value filter + // verify d1.s1, d2.s1 + /** + * 1480562618950 9501 9502 1480562618954 9541 9542 1480562618955 9551 9552 1480562618956 9561 + * 9562 + */ + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.timestamp, 1480562618950L); + } else if (cnt == 2) { + assertEquals(r.timestamp, 1480562618954L); + } else if (cnt == 3) { + assertEquals(r.timestamp, 1480562618955L); + } else if (cnt == 4) { + assertEquals(r.timestamp, 1480562618956L); + } + // System.out.println(r); + cnt++; + } + assertEquals(cnt, 5); + + OnePassQueryDataSet onePassQueryDataSetOrOpe = + QueryEngine.query(configWithCrossSeriesTimeValueFilterOrOpe, fileName); + // time filter & value filter + // verify d1.s1, d2.s1 + /** + * 1480562618910 9101 9102 1480562618911 9111 9112 1480562618912 9121 9122 1480562618913 9131 + * 9132 1480562618914 9141 9142 1480562618915 9151 9152 1480562618930 9301 9302 1480562618931 + * 9311 9312 1480562618932 9321 9322 1480562618933 9331 9332 + */ + cnt = 1; + while (onePassQueryDataSetOrOpe.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSetOrOpe.getNextRecord(); + // System.out.println(r); + if (cnt == 4) { + assertEquals(r.timestamp, 1480562618913L); + } else if (cnt == 7) { + assertEquals(r.timestamp, 1480562618930L); + } + cnt++; + } + assertEquals(cnt, 11); + } + + // @Test + public void queryBooleanTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(booleanConfig, fileName); + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618972L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getBoolV(), false); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618981L); + Field f2 = r.getFields().get(0); + assertEquals(f2.getBoolV(), false); + } + cnt++; + } + } + + @Test + public void queryStringTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(lessStringConfig, fileName); + int cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618976L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getStringValue(), "dog976"); + } + // System.out.println(r); + cnt++; + } + Assert.assertEquals(cnt, 0); + + onePassQueryDataSet = QueryEngine.query(greatStringConfig, fileName); + cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 0) { + assertEquals(r.getTime(), 1480562618976L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getStringValue(), "dog976"); + } + // System.out.println(r); + cnt++; + } + Assert.assertEquals(cnt, 1); + } + + @Test + public void queryFloatTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(floatConfig, fileName); + int cnt = 0; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618980L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getFloatV(), 108.0, 0.0); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618990L); + Field f2 = r.getFields().get(0); + assertEquals(f2.getFloatV(), 110.0, 0.0); + } + cnt++; + } + } + + @Test + public void queryDoubleTest() throws IOException { + OnePassQueryDataSet onePassQueryDataSet = QueryEngine.query(doubleConfig, fileName); + int cnt = 1; + while (onePassQueryDataSet.hasNextRecord()) { + OldRowRecord r = onePassQueryDataSet.getNextRecord(); + if (cnt == 1) { + assertEquals(r.getTime(), 1480562618022L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getDoubleV(), 2.0, 0.0); + } + if (cnt == 2) { + assertEquals(r.getTime(), 1480562618033L); + Field f1 = r.getFields().get(0); + assertEquals(f1.getDoubleV(), 3.0, 0.0); + } + cnt++; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/qp/PathTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/qp/PathTest.java index 2ef314ae..d85b4ad1 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/qp/PathTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/read/qp/PathTest.java @@ -2,63 +2,62 @@ import cn.edu.tsinghua.tsfile.timeseries.read.support.Path; import org.junit.Test; - import static org.junit.Assert.*; public class PathTest { - private void testPath(Path path, String device, String measurement, String full) { - assertEquals(device, path.getDeltaObjectToString()); - assertEquals(measurement, path.getMeasurementToString()); - assertEquals(full, path.getFullPath()); - } - - @Test - public void construct() throws Exception { - Path path = new Path("a.b.c"); - testPath(path, "a.b", "c", "a.b.c"); - path = new Path("c"); - testPath(path, "", "c", "c"); - path = new Path(""); - testPath(path, "", "", ""); - } - - @Test - public void startWith() throws Exception { - Path path = new Path("a.b.c"); - assertTrue(path.startWith(new Path(""))); - assertTrue(path.startWith(new Path("a"))); - assertTrue(path.startWith(new Path("a.b.c"))); - } - - @Test - public void mergePath() throws Exception { - Path prefix = new Path("a.b.c"); - Path suffix = new Path("d.e"); - Path suffix1 = new Path(""); - testPath(Path.mergePath(prefix, suffix), "a.b.c.d", "e", "a.b.c.d.e"); - testPath(Path.mergePath(prefix, suffix1), "a.b", "c", "a.b.c"); - } - - @Test - public void addHeadPath() throws Exception { - Path desc = new Path("a.b.c"); - Path head = new Path("d.e"); - Path head1 = new Path(""); - testPath(Path.addPrefixPath(desc, head), "d.e.a.b", "c", "d.e.a.b.c"); - testPath(Path.mergePath(desc, head1), "a.b", "c", "a.b.c"); - } - - @Test - public void replace() throws Exception { - Path src = new Path("a.b.c"); - Path rep1 = new Path(""); - Path rep2 = new Path("d"); - Path rep3 = new Path("d.e.f"); - Path rep4 = new Path("d.e.f.g"); - testPath(Path.replace(rep1,src), "a.b", "c", "a.b.c"); - testPath(Path.replace(rep2,src), "d.b", "c", "d.b.c"); - testPath(Path.replace(rep3,src), "d.e", "f", "d.e.f"); - testPath(Path.replace(rep4,src), "d.e.f", "g", "d.e.f.g"); - } - -} \ No newline at end of file + private void testPath(Path path, String device, String measurement, String full) { + assertEquals(device, path.getDeltaObjectToString()); + assertEquals(measurement, path.getMeasurementToString()); + assertEquals(full, path.getFullPath()); + } + + @Test + public void construct() throws Exception { + Path path = new Path("a.b.c"); + testPath(path, "a.b", "c", "a.b.c"); + path = new Path("c"); + testPath(path, "", "c", "c"); + path = new Path(""); + testPath(path, "", "", ""); + } + + @Test + public void startWith() throws Exception { + Path path = new Path("a.b.c"); + assertTrue(path.startWith(new Path(""))); + assertTrue(path.startWith(new Path("a"))); + assertTrue(path.startWith(new Path("a.b.c"))); + } + + @Test + public void mergePath() throws Exception { + Path prefix = new Path("a.b.c"); + Path suffix = new Path("d.e"); + Path suffix1 = new Path(""); + testPath(Path.mergePath(prefix, suffix), "a.b.c.d", "e", "a.b.c.d.e"); + testPath(Path.mergePath(prefix, suffix1), "a.b", "c", "a.b.c"); + } + + @Test + public void addHeadPath() throws Exception { + Path desc = new Path("a.b.c"); + Path head = new Path("d.e"); + Path head1 = new Path(""); + testPath(Path.addPrefixPath(desc, head), "d.e.a.b", "c", "d.e.a.b.c"); + testPath(Path.mergePath(desc, head1), "a.b", "c", "a.b.c"); + } + + @Test + public void replace() throws Exception { + Path src = new Path("a.b.c"); + Path rep1 = new Path(""); + Path rep2 = new Path("d"); + Path rep3 = new Path("d.e.f"); + Path rep4 = new Path("d.e.f.g"); + testPath(Path.replace(rep1, src), "a.b", "c", "a.b.c"); + testPath(Path.replace(rep2, src), "d.b", "c", "d.b.c"); + testPath(Path.replace(rep3, src), "d.e", "f", "d.e.f"); + testPath(Path.replace(rep4, src), "d.e.f", "g", "d.e.f.g"); + } + +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/PageReaderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/PageReaderTest.java index bdc1899b..acf5faa3 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/PageReaderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/PageReaderTest.java @@ -10,7 +10,6 @@ import cn.edu.tsinghua.tsfile.timeseries.write.series.ValueWriter; import org.junit.Assert; import org.junit.Test; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -20,177 +19,180 @@ */ public class PageReaderTest { - private static final int POINTS_COUNT_IN_ONE_PAGE = 1000000; - - @Test - public void testLong() { - - LoopWriteReadTest test = new LoopWriteReadTest("Test INT64", new LongRleEncoder(EndianType.BIG_ENDIAN), - new LongRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT64, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Long.valueOf(Long.MAX_VALUE - i); - } - }; - test.test(); - } - - @Test - public void testBoolean() { - LoopWriteReadTest test = new LoopWriteReadTest("Test Boolean", new IntRleEncoder(EndianType.BIG_ENDIAN), - new IntRleDecoder(EndianType.BIG_ENDIAN), TSDataType.BOOLEAN, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return i % 3 == 0 ? true : false; - } - }; - test.test(); - } - - @Test - public void testInt() { - LoopWriteReadTest test = new LoopWriteReadTest("Test INT32", new IntRleEncoder(EndianType.BIG_ENDIAN), - new IntRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT32, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Integer.valueOf(i); - } - }; - test.test(); - } + private static final int POINTS_COUNT_IN_ONE_PAGE = 1000000; - @Test - public void testFloat() { - LoopWriteReadTest test = new LoopWriteReadTest("Test FLOAT", new SinglePrecisionEncoder(), - new SinglePrecisionDecoder(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Float.valueOf(i) / 10 - Float.valueOf(i) / 100; - } - }; - test.test(); - - LoopWriteReadTest test2 = new LoopWriteReadTest("Test FLOAT", new SinglePrecisionEncoder(), - new SinglePrecisionDecoder(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Float.valueOf(i) / 100 - Float.valueOf(i) / 10; - } - }; - test2.test(); - } + @Test + public void testLong() { - @Test - public void testDouble() { - LoopWriteReadTest test = new LoopWriteReadTest("Test Double", new DoublePrecisionEncoder(), - new DoublePrecisionDecoder(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Double.valueOf(i) / 10 - Double.valueOf(i) / 100; - } + LoopWriteReadTest test = + new LoopWriteReadTest("Test INT64", new LongRleEncoder(EndianType.BIG_ENDIAN), + new LongRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT64, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Long.valueOf(Long.MAX_VALUE - i); + } }; - test.test(); - - LoopWriteReadTest test2 = new LoopWriteReadTest("Test Double", new DoublePrecisionEncoder(), - new DoublePrecisionDecoder(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return Double.valueOf(i) / 1000 - Double.valueOf(i) / 100; - } - }; - test2.test(); - } - - @Test - public void testBinary() { - LoopWriteReadTest test = new LoopWriteReadTest("Test Double", - new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.TEXT, 1000), - new PlainDecoder(EndianType.LITTLE_ENDIAN), - TSDataType.TEXT, - POINTS_COUNT_IN_ONE_PAGE) { - @Override - public Object generateValueByIndex(int i) { - return new Binary(new StringBuilder("TEST TEXT").append(i).toString()); - } + test.test(); + } + + @Test + public void testBoolean() { + LoopWriteReadTest test = new LoopWriteReadTest("Test Boolean", + new IntRleEncoder(EndianType.BIG_ENDIAN), new IntRleDecoder(EndianType.BIG_ENDIAN), + TSDataType.BOOLEAN, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return i % 3 == 0 ? true : false; + } + }; + test.test(); + } + + @Test + public void testInt() { + LoopWriteReadTest test = + new LoopWriteReadTest("Test INT32", new IntRleEncoder(EndianType.BIG_ENDIAN), + new IntRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT32, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Integer.valueOf(i); + } }; - test.test(); + test.test(); + } + + @Test + public void testFloat() { + LoopWriteReadTest test = new LoopWriteReadTest("Test FLOAT", new SinglePrecisionEncoder(), + new SinglePrecisionDecoder(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Float.valueOf(i) / 10 - Float.valueOf(i) / 100; + } + }; + test.test(); + + LoopWriteReadTest test2 = new LoopWriteReadTest("Test FLOAT", new SinglePrecisionEncoder(), + new SinglePrecisionDecoder(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Float.valueOf(i) / 100 - Float.valueOf(i) / 10; + } + }; + test2.test(); + } + + @Test + public void testDouble() { + LoopWriteReadTest test = new LoopWriteReadTest("Test Double", new DoublePrecisionEncoder(), + new DoublePrecisionDecoder(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Double.valueOf(i) / 10 - Double.valueOf(i) / 100; + } + }; + test.test(); + + LoopWriteReadTest test2 = new LoopWriteReadTest("Test Double", new DoublePrecisionEncoder(), + new DoublePrecisionDecoder(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return Double.valueOf(i) / 1000 - Double.valueOf(i) / 100; + } + }; + test2.test(); + } + + @Test + public void testBinary() { + LoopWriteReadTest test = new LoopWriteReadTest("Test Double", + new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.TEXT, 1000), + new PlainDecoder(EndianType.LITTLE_ENDIAN), TSDataType.TEXT, POINTS_COUNT_IN_ONE_PAGE) { + @Override + public Object generateValueByIndex(int i) { + return new Binary(new StringBuilder("TEST TEXT").append(i).toString()); + } + }; + test.test(); + } + + private abstract static class LoopWriteReadTest { + private Encoder encoder; + private Decoder decoder; + private TSDataType dataType; + private ValueWriter valueWriter; + private String name; + private int count; + + public LoopWriteReadTest(String name, Encoder encoder, Decoder decoder, TSDataType dataType, + int count) { + this.name = name; + this.encoder = encoder; + this.decoder = decoder; + this.dataType = dataType; + this.count = count; } - private abstract static class LoopWriteReadTest { - private Encoder encoder; - private Decoder decoder; - private TSDataType dataType; - private ValueWriter valueWriter; - private String name; - private int count; - - public LoopWriteReadTest(String name, Encoder encoder, Decoder decoder, TSDataType dataType, int count) { - this.name = name; - this.encoder = encoder; - this.decoder = decoder; - this.dataType = dataType; - this.count = count; - } - - public void test() { - try { - valueWriter = new ValueWriter(); - valueWriter.setTimeEncoder(new DeltaBinaryEncoder.LongDeltaEncoder()); - valueWriter.setValueEncoder(this.encoder); - writeData(); - - InputStream page = new ByteArrayInputStream(valueWriter.getBytes().toByteArray()); - PageReader pageReader = new PageReader(page, dataType, decoder, new DeltaBinaryDecoder.LongDeltaDecoder()); - - int index = 0; - long startTimestamp = System.currentTimeMillis(); - while (pageReader.hasNext()) { - TimeValuePair timeValuePair = pageReader.next(); - Assert.assertEquals(Long.valueOf(index), (Long) timeValuePair.getTimestamp()); - Assert.assertEquals(generateValueByIndex(index), timeValuePair.getValue().getValue()); - index++; - } - long endTimestamp = System.currentTimeMillis(); - System.out.println("TestName: [" + name + "]\n\tTSDataType: " + dataType + - "\tRead-Count:" + count + "\tTime-used:" + (endTimestamp - startTimestamp) + "ms"); - Assert.assertEquals(count, index); - } catch (IOException e) { - e.printStackTrace(); - Assert.fail("Fail when executing test: [" + name + "]"); - } + public void test() { + try { + valueWriter = new ValueWriter(); + valueWriter.setTimeEncoder(new DeltaBinaryEncoder.LongDeltaEncoder()); + valueWriter.setValueEncoder(this.encoder); + writeData(); + + InputStream page = new ByteArrayInputStream(valueWriter.getBytes().toByteArray()); + PageReader pageReader = + new PageReader(page, dataType, decoder, new DeltaBinaryDecoder.LongDeltaDecoder()); + + int index = 0; + long startTimestamp = System.currentTimeMillis(); + while (pageReader.hasNext()) { + TimeValuePair timeValuePair = pageReader.next(); + Assert.assertEquals(Long.valueOf(index), (Long) timeValuePair.getTimestamp()); + Assert.assertEquals(generateValueByIndex(index), timeValuePair.getValue().getValue()); + index++; } + long endTimestamp = System.currentTimeMillis(); + System.out.println("TestName: [" + name + "]\n\tTSDataType: " + dataType + "\tRead-Count:" + + count + "\tTime-used:" + (endTimestamp - startTimestamp) + "ms"); + Assert.assertEquals(count, index); + } catch (IOException e) { + e.printStackTrace(); + Assert.fail("Fail when executing test: [" + name + "]"); + } + } - private void writeData() throws IOException { - for (int i = 0; i < count; i++) { - switch (dataType) { - case BOOLEAN: - valueWriter.write(Long.valueOf(i), (Boolean) generateValueByIndex(i)); - break; - case INT32: - valueWriter.write(Long.valueOf(i), (Integer) generateValueByIndex(i)); - break; - case INT64: - valueWriter.write(Long.valueOf(i), (Long) generateValueByIndex(i)); - break; - case FLOAT: - valueWriter.write(Long.valueOf(i), (Float) generateValueByIndex(i)); - break; - case DOUBLE: - valueWriter.write(Long.valueOf(i), (Double) generateValueByIndex(i)); - break; - case TEXT: - valueWriter.write(Long.valueOf(i), (Binary) generateValueByIndex(i)); - break; - case ENUMS: - case INT96: - case FIXED_LEN_BYTE_ARRAY: - case BIGDECIMAL: - break; - } - } + private void writeData() throws IOException { + for (int i = 0; i < count; i++) { + switch (dataType) { + case BOOLEAN: + valueWriter.write(Long.valueOf(i), (Boolean) generateValueByIndex(i)); + break; + case INT32: + valueWriter.write(Long.valueOf(i), (Integer) generateValueByIndex(i)); + break; + case INT64: + valueWriter.write(Long.valueOf(i), (Long) generateValueByIndex(i)); + break; + case FLOAT: + valueWriter.write(Long.valueOf(i), (Float) generateValueByIndex(i)); + break; + case DOUBLE: + valueWriter.write(Long.valueOf(i), (Double) generateValueByIndex(i)); + break; + case TEXT: + valueWriter.write(Long.valueOf(i), (Binary) generateValueByIndex(i)); + break; + case ENUMS: + case INT96: + case FIXED_LEN_BYTE_ARRAY: + case BIGDECIMAL: + break; } - - public abstract Object generateValueByIndex(int i); + } } + public abstract Object generateValueByIndex(int i); + } + } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/ReadOnlyTsFileTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/ReadOnlyTsFileTest.java index 58060222..4d288b27 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/ReadOnlyTsFileTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/ReadOnlyTsFileTest.java @@ -22,7 +22,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; /** @@ -30,78 +29,71 @@ */ public class ReadOnlyTsFileTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - private int rowCount = 1000; - private ReadOnlyTsFile tsFile; - - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; - TsFileGeneratorForTest.generateFile(rowCount, 16 * 1024 * 1024, 10000); - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - tsFile = new ReadOnlyTsFile(randomAccessFileReader); - } + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + private int rowCount = 1000; + private ReadOnlyTsFile tsFile; - @After - public void after() throws IOException { - tsFile.close(); - TsFileGeneratorForTest.after(); - } + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; + TsFileGeneratorForTest.generateFile(rowCount, 16 * 1024 * 1024, 10000); + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + tsFile = new ReadOnlyTsFile(randomAccessFileReader); + } - @Test - public void queryTest() throws IOException { - Filter filter = TimeFilter.lt(1480562618100L); - Filter filter2 = ValueFilter.gt(new Binary("dog")); - Filter filter3 = FilterFactory.and(TimeFilter.gtEq(1480562618000L), TimeFilter.ltEq(1480562618100L)); + @After + public void after() throws IOException { + tsFile.close(); + TsFileGeneratorForTest.after(); + } - QueryFilter queryFilter = QueryFilterFactory.or( - QueryFilterFactory.and( - new SeriesFilter<>(new Path("d1.s1"), filter), - new SeriesFilter<>(new Path("d1.s4"), filter2)), - new GlobalTimeFilter(filter3) - ); + @Test + public void queryTest() throws IOException { + Filter filter = TimeFilter.lt(1480562618100L); + Filter filter2 = ValueFilter.gt(new Binary("dog")); + Filter filter3 = + FilterFactory.and(TimeFilter.gtEq(1480562618000L), TimeFilter.ltEq(1480562618100L)); - QueryExpression queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s4")) - .setQueryFilter(queryFilter); - QueryDataSet queryDataSet = tsFile.query(queryExpression); - long aimedTimestamp = 1480562618000L; - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - aimedTimestamp++; - } + QueryFilter queryFilter = + QueryFilterFactory.or(QueryFilterFactory.and(new SeriesFilter<>(new Path("d1.s1"), filter), + new SeriesFilter<>(new Path("d1.s4"), filter2)), new GlobalTimeFilter(filter3)); - queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s4")); - queryDataSet = tsFile.query(queryExpression); - aimedTimestamp = 1480562618000L; - int count = 0; - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - aimedTimestamp++; - count++; - } - Assert.assertEquals(rowCount, count); + QueryExpression queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s4")).setQueryFilter(queryFilter); + QueryDataSet queryDataSet = tsFile.query(queryExpression); + long aimedTimestamp = 1480562618000L; + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + aimedTimestamp++; + } - queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s4")) - .setQueryFilter(new GlobalTimeFilter(filter3)); - queryDataSet = tsFile.query(queryExpression); - aimedTimestamp = 1480562618000L; - count = 0; - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - aimedTimestamp++; - count++; - } - Assert.assertEquals(101, count); + queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s4")); + queryDataSet = tsFile.query(queryExpression); + aimedTimestamp = 1480562618000L; + int count = 0; + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + aimedTimestamp++; + count++; + } + Assert.assertEquals(rowCount, count); + queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s4")).setQueryFilter(new GlobalTimeFilter(filter3)); + queryDataSet = tsFile.query(queryExpression); + aimedTimestamp = 1480562618000L; + count = 0; + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + aimedTimestamp++; + count++; } + Assert.assertEquals(101, count); + + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderByTimestampTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderByTimestampTest.java index c449614c..45757361 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderByTimestampTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderByTimestampTest.java @@ -17,7 +17,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -27,54 +26,59 @@ */ public class SeriesReaderByTimestampTest { - private static final String FILE_PATH = TsFileGeneratorForSeriesReaderByTimestamp.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - private MetadataQuerierByFileImpl metadataQuerierByFile; - private int rowCount = 1000000; + private static final String FILE_PATH = TsFileGeneratorForSeriesReaderByTimestamp.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + private MetadataQuerierByFileImpl metadataQuerierByFile; + private int rowCount = 1000000; - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; - TsFileGeneratorForSeriesReaderByTimestamp.generateFile(rowCount, 10 * 1024 * 1024, 10000); - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - } + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; + TsFileGeneratorForSeriesReaderByTimestamp.generateFile(rowCount, 10 * 1024 * 1024, 10000); + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); + } - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForSeriesReaderByTimestamp.after(); - } + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForSeriesReaderByTimestamp.after(); + } - @Test - public void readByTimestamp() throws IOException { - SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - List encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); - SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList); + @Test + public void readByTimestamp() throws IOException { + SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + List encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); + SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); - List timeValuePairList = new ArrayList<>(); - int count = 0; - while (seriesReader.hasNext()) { - TimeValuePair timeValuePair = seriesReader.next(); - if (count % 100 == 0) { - timeValuePairList.add(new TimeValuePair(timeValuePair.getTimestamp() - 1, null)); - timeValuePairList.add(timeValuePair); - } - count++; - } + List timeValuePairList = new ArrayList<>(); + int count = 0; + while (seriesReader.hasNext()) { + TimeValuePair timeValuePair = seriesReader.next(); + if (count % 100 == 0) { + timeValuePairList.add(new TimeValuePair(timeValuePair.getTimestamp() - 1, null)); + timeValuePairList.add(timeValuePair); + } + count++; + } - long startTimestamp = System.currentTimeMillis(); - count = 0; + long startTimestamp = System.currentTimeMillis(); + count = 0; - SeriesReaderFromSingleFileByTimestampImpl seriesReaderFromSingleFileByTimestamp = new SeriesReaderFromSingleFileByTimestampImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList); + SeriesReaderFromSingleFileByTimestampImpl seriesReaderFromSingleFileByTimestamp = + new SeriesReaderFromSingleFileByTimestampImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); - for (TimeValuePair timeValuePair : timeValuePairList) { - TsPrimitiveType value = seriesReaderFromSingleFileByTimestamp.getValueInTimestamp(timeValuePair.getTimestamp()); - Assert.assertEquals(timeValuePair.getValue(), value); - count ++; - } - long endTimestamp = System.currentTimeMillis(); - System.out.println("SeriesReadWithFilterTest. [Time used]: " + (endTimestamp - startTimestamp) + - " ms. [Read Count]: " + count); + for (TimeValuePair timeValuePair : timeValuePairList) { + TsPrimitiveType value = + seriesReaderFromSingleFileByTimestamp.getValueInTimestamp(timeValuePair.getTimestamp()); + Assert.assertEquals(timeValuePair.getValue(), value); + count++; } + long endTimestamp = System.currentTimeMillis(); + System.out.println("SeriesReadWithFilterTest. [Time used]: " + (endTimestamp - startTimestamp) + + " ms. [Read Count]: " + count); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderTest.java index 1a790efd..cbdec192 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/SeriesReaderTest.java @@ -21,7 +21,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; import java.util.List; @@ -30,81 +29,87 @@ */ public class SeriesReaderTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - private MetadataQuerierByFileImpl metadataQuerierByFile; - private int rowCount = 1000000; + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + private MetadataQuerierByFileImpl metadataQuerierByFile; + private int rowCount = 1000000; - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; - TsFileGeneratorForTest.generateFile(rowCount, 10 * 1024 * 1024, 10000); - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - } + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; + TsFileGeneratorForTest.generateFile(rowCount, 10 * 1024 * 1024, 10000); + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); + } - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForTest.after(); - } + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForTest.after(); + } - @Test - public void readTest() throws IOException { - int count = 0; - SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - List encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); + @Test + public void readTest() throws IOException { + int count = 0; + SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + List encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); - SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList); - long startTime = TsFileGeneratorForTest.START_TIMESTAMP; - long startTimestamp = System.currentTimeMillis(); - while (seriesReader.hasNext()) { - TimeValuePair timeValuePair = seriesReader.next(); - Assert.assertEquals(startTime, timeValuePair.getTimestamp()); - startTime++; - count++; - } - long endTimestamp = System.currentTimeMillis(); - Assert.assertEquals(rowCount, count); - System.out.println("SeriesReadTest. [Time used]: " + (endTimestamp - startTimestamp) + - " ms. [Read Count]: " + count); + SeriesReader seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); + long startTime = TsFileGeneratorForTest.START_TIMESTAMP; + long startTimestamp = System.currentTimeMillis(); + while (seriesReader.hasNext()) { + TimeValuePair timeValuePair = seriesReader.next(); + Assert.assertEquals(startTime, timeValuePair.getTimestamp()); + startTime++; + count++; + } + long endTimestamp = System.currentTimeMillis(); + Assert.assertEquals(rowCount, count); + System.out.println("SeriesReadTest. [Time used]: " + (endTimestamp - startTimestamp) + + " ms. [Read Count]: " + count); - encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s4")); - seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList); - count = 0; - startTimestamp = System.currentTimeMillis(); - while (seriesReader.hasNext()) { - TimeValuePair timeValuePair = seriesReader.next(); - startTime++; - count++; - } - endTimestamp = System.currentTimeMillis(); - System.out.println("SeriesReadTest. [Time used]: " + (endTimestamp - startTimestamp) + - " ms. [Read Count]: " + count); + encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s4")); + seriesReader = new SeriesReaderFromSingleFileWithoutFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList); + count = 0; + startTimestamp = System.currentTimeMillis(); + while (seriesReader.hasNext()) { + TimeValuePair timeValuePair = seriesReader.next(); + startTime++; + count++; } + endTimestamp = System.currentTimeMillis(); + System.out.println("SeriesReadTest. [Time used]: " + (endTimestamp - startTimestamp) + + " ms. [Read Count]: " + count); + } - @Test - public void readWithFilterTest() throws IOException { - SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - List encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); + @Test + public void readWithFilterTest() throws IOException { + SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + List encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d1.s1")); - Filter filter = new FilterFactory().or( - FilterFactory.and(TimeFilter.gt(1480563570029L), TimeFilter.lt(1480563570033L)), - FilterFactory.and(ValueFilter.gtEq(9520331), ValueFilter.ltEq(9520361))); - SeriesFilter seriesFilter = new SeriesFilter<>(new Path("d1.s1"), filter); - SeriesReader seriesReader = new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, encodedSeriesChunkDescriptorList, seriesFilter.getFilter()); + Filter filter = new FilterFactory().or( + FilterFactory.and(TimeFilter.gt(1480563570029L), TimeFilter.lt(1480563570033L)), + FilterFactory.and(ValueFilter.gtEq(9520331), ValueFilter.ltEq(9520361))); + SeriesFilter seriesFilter = new SeriesFilter<>(new Path("d1.s1"), filter); + SeriesReader seriesReader = new SeriesReaderFromSingleFileWithFilterImpl(seriesChunkLoader, + encodedSeriesChunkDescriptorList, seriesFilter.getFilter()); - long startTimestamp = System.currentTimeMillis(); - int count = 0; - long aimedTimestamp = 1480563570030L; - while (seriesReader.hasNext()) { - TimeValuePair timeValuePair = seriesReader.next(); - count++; - Assert.assertEquals(aimedTimestamp++, timeValuePair.getTimestamp()); - } - long endTimestamp = System.currentTimeMillis(); - System.out.println("SeriesReadWithFilterTest. [Time used]: " + (endTimestamp - startTimestamp) + - " ms. [Read Count]: " + count); + long startTimestamp = System.currentTimeMillis(); + int count = 0; + long aimedTimestamp = 1480563570030L; + while (seriesReader.hasNext()) { + TimeValuePair timeValuePair = seriesReader.next(); + count++; + Assert.assertEquals(aimedTimestamp++, timeValuePair.getTimestamp()); } + long endTimestamp = System.currentTimeMillis(); + System.out.println("SeriesReadWithFilterTest. [Time used]: " + (endTimestamp - startTimestamp) + + " ms. [Read Count]: " + count); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForSeriesReaderByTimestamp.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForSeriesReaderByTimestamp.java index 01b3a1ea..71699912 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForSeriesReaderByTimestamp.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForSeriesReaderByTimestamp.java @@ -1,237 +1,231 @@ -package cn.edu.tsinghua.tsfile.timeseries.readV2; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; -import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; -import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; -import org.json.JSONArray; -import org.json.JSONObject; -import org.junit.Ignore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.util.Scanner; - -@Ignore -public class TsFileGeneratorForSeriesReaderByTimestamp { - - private static int rowCount; - private static int rowGroupSize; - private static int pageSize; - - private static final Logger LOG = LoggerFactory.getLogger(TsFileGeneratorForSeriesReaderByTimestamp.class); - public static TsFileWriter innerWriter; - public static String inputDataFile; - public static String outputDataFile = "src/test/resources/testTsFile.ts"; - public static String errorOutputDataFile; - public static JSONObject jsonSchema; - - public static final long START_TIMESTAMP = 1480562618000L; - - private static int preRowGroupSize; - private static int prePageSize; - - public static void generateFile(int rc, int rs, int ps) throws IOException, InterruptedException, WriteProcessException { - rowCount = rc; - rowGroupSize = rs; - pageSize = ps; - prepare(); - write(); - } - - public static void prepare() throws IOException { - inputDataFile = "src/test/resources/perTestInputData"; - errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; - jsonSchema = generateTestData(); - generateSampleInputDataFile(); - } - - public static void after() { - TSFileDescriptor.getInstance().getConfig().groupSizeInByte = preRowGroupSize; - TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = prePageSize; - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - } - - static private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file.getParentFile().mkdirs(); - FileWriter fw = new FileWriter(file); - - long startTime = START_TIMESTAMP; - for (int i = 0; i < rowCount; i += 2) { - // write d1 - String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); - if (i % 5 == 0) - d1 += ",s3," + (i * 10 + 3); - if (i % 8 == 0) - d1 += ",s4," + "dog" + i; - if (i % 9 == 0) - d1 += ",s5," + "false"; - if (i % 10 == 0) - d1 += ",s6," + ((int) (i / 9.0) * 100) / 100.0; - if (i % 11 == 0) - d1 += ",s7," + ((int) (i / 10.0) * 100) / 100.0; - fw.write(d1 + "\r\n"); - - // write d2 - String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); - if (i % 20 < 5) { - // LOG.info("write null to d2:" + (startTime + i)); - d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); - } - if (i % 5 == 0) - d2 += ",s1," + (i * 10 + 1); - if (i % 8 == 0) - d2 += ",s4," + "dog" + i % 4; - fw.write(d2 + "\r\n"); - } - // write error - String d = - "d2,3," + (startTime + rowCount) + ",s2," + (rowCount * 10 + 2) + ",s3," - + (rowCount * 10 + 3); - fw.write(d + "\r\n"); - d = "d2," + (startTime + rowCount + 1) + ",2,s-1," + (rowCount * 10 + 2); - fw.write(d + "\r\n"); - fw.close(); - } - - static public void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - - //LOG.info(jsonSchema.toString()); - FileSchema schema = new FileSchema(jsonSchema); - preRowGroupSize = TSFileDescriptor.getInstance().getConfig().groupSizeInByte; - prePageSize = TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage; - TSFileDescriptor.getInstance().getConfig().groupSizeInByte = rowGroupSize; - TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = pageSize; - innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); - - // write - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); - } - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s5 = new JSONObject(); - s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); - s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); - s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s6 = new JSONObject(); - s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); - s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - JSONObject s7 = new JSONObject(); - s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); - s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); - measureGroup1.put(s5); - measureGroup1.put(s6); - measureGroup1.put(s7); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - //System.out.println(jsonSchema); - return jsonSchema; - } - - static public void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - Scanner in = getDataFile(inputDataFile); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime = System.currentTimeMillis(); - assert in != null; - while (in.hasNextLine()) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - // logger.info("write line:{},inner space consumer:{},use - // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - innerWriter.write(record); - lineCount++; - } - endTime = System.currentTimeMillis(); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - innerWriter.close(); - in.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); - } - - static private Scanner getDataFile(String path) { - File file = new File(path); - try { - Scanner in = new Scanner(file); - return in; - } catch (FileNotFoundException e) { - e.printStackTrace(); - return null; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.readV2; + +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; +import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; +import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Ignore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Scanner; + +@Ignore +public class TsFileGeneratorForSeriesReaderByTimestamp { + + private static int rowCount; + private static int rowGroupSize; + private static int pageSize; + + private static final Logger LOG = + LoggerFactory.getLogger(TsFileGeneratorForSeriesReaderByTimestamp.class); + public static TsFileWriter innerWriter; + public static String inputDataFile; + public static String outputDataFile = "src/test/resources/testTsFile.ts"; + public static String errorOutputDataFile; + public static JSONObject jsonSchema; + + public static final long START_TIMESTAMP = 1480562618000L; + + private static int preRowGroupSize; + private static int prePageSize; + + public static void generateFile(int rc, int rs, int ps) + throws IOException, InterruptedException, WriteProcessException { + rowCount = rc; + rowGroupSize = rs; + pageSize = ps; + prepare(); + write(); + } + + public static void prepare() throws IOException { + inputDataFile = "src/test/resources/perTestInputData"; + errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; + jsonSchema = generateTestData(); + generateSampleInputDataFile(); + } + + public static void after() { + TSFileDescriptor.getInstance().getConfig().groupSizeInByte = preRowGroupSize; + TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = prePageSize; + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + static private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file.getParentFile().mkdirs(); + FileWriter fw = new FileWriter(file); + + long startTime = START_TIMESTAMP; + for (int i = 0; i < rowCount; i += 2) { + // write d1 + String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); + if (i % 5 == 0) + d1 += ",s3," + (i * 10 + 3); + if (i % 8 == 0) + d1 += ",s4," + "dog" + i; + if (i % 9 == 0) + d1 += ",s5," + "false"; + if (i % 10 == 0) + d1 += ",s6," + ((int) (i / 9.0) * 100) / 100.0; + if (i % 11 == 0) + d1 += ",s7," + ((int) (i / 10.0) * 100) / 100.0; + fw.write(d1 + "\r\n"); + + // write d2 + String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); + if (i % 20 < 5) { + // LOG.info("write null to d2:" + (startTime + i)); + d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); + } + if (i % 5 == 0) + d2 += ",s1," + (i * 10 + 1); + if (i % 8 == 0) + d2 += ",s4," + "dog" + i % 4; + fw.write(d2 + "\r\n"); + } + // write error + String d = "d2,3," + (startTime + rowCount) + ",s2," + (rowCount * 10 + 2) + ",s3," + + (rowCount * 10 + 3); + fw.write(d + "\r\n"); + d = "d2," + (startTime + rowCount + 1) + ",2,s-1," + (rowCount * 10 + 2); + fw.write(d + "\r\n"); + fw.close(); + } + + static public void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + + // LOG.info(jsonSchema.toString()); + FileSchema schema = new FileSchema(jsonSchema); + preRowGroupSize = TSFileDescriptor.getInstance().getConfig().groupSizeInByte; + prePageSize = TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage; + TSFileDescriptor.getInstance().getConfig().groupSizeInByte = rowGroupSize; + TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = pageSize; + innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); + + // write + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); + } + LOG.info("write to file successfully!!"); + } + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s5 = new JSONObject(); + s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); + s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); + s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s6 = new JSONObject(); + s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); + s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + JSONObject s7 = new JSONObject(); + s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); + s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); + measureGroup1.put(s5); + measureGroup1.put(s6); + measureGroup1.put(s7); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + // System.out.println(jsonSchema); + return jsonSchema; + } + + static public void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + Scanner in = getDataFile(inputDataFile); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime = System.currentTimeMillis(); + assert in != null; + while (in.hasNextLine()) { + if (lineCount % 1000000 == 0) { + endTime = System.currentTimeMillis(); + // logger.info("write line:{},inner space consumer:{},use + // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + } + String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + innerWriter.write(record); + lineCount++; + } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + innerWriter.close(); + in.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); + } + + static private Scanner getDataFile(String path) { + File file = new File(path); + try { + Scanner in = new Scanner(file); + return in; + } catch (FileNotFoundException e) { + e.printStackTrace(); + return null; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForTest.java index 60a1825f..1c6ae9b7 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/TsFileGeneratorForTest.java @@ -1,230 +1,223 @@ -package cn.edu.tsinghua.tsfile.timeseries.readV2; - -import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; -import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; -import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; -import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; -import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; -import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; -import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; -import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; -import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; -import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; -import org.json.JSONArray; -import org.json.JSONObject; -import org.junit.Ignore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.util.Scanner; - -@Ignore -public class TsFileGeneratorForTest { - - private static int rowCount; - private static int rowGroupSize; - private static int pageSize; - - private static final Logger LOG = LoggerFactory.getLogger(TsFileGeneratorForTest.class); - public static TsFileWriter innerWriter; - public static String inputDataFile; - public static String outputDataFile = "src/test/resources/testTsFile.ts"; - public static String errorOutputDataFile; - public static JSONObject jsonSchema; - - public static final long START_TIMESTAMP = 1480562618000L; - - public static void generateFile(int rc, int rs, int ps) throws IOException, InterruptedException, WriteProcessException { - rowCount = rc; - rowGroupSize = rs; - pageSize = ps; - prepare(); - write(); - } - - public static void prepare() throws IOException { - inputDataFile = "src/test/resources/perTestInputData"; - errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; - jsonSchema = generateTestData(); - generateSampleInputDataFile(); - } - - public static void after() { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - } - - static private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file.getParentFile().mkdirs(); - FileWriter fw = new FileWriter(file); - - long startTime = START_TIMESTAMP; - for (int i = 0; i < rowCount; i++) { - // write d1 - String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); - if (i % 5 == 0) - d1 += ",s3," + (i * 10 + 3); - if (i % 8 == 0) - d1 += ",s4," + "dog" + i; - if (i % 9 == 0) - d1 += ",s5," + "false"; - if (i % 10 == 0) - d1 += ",s6," + ((int) (i / 9.0) * 100) / 100.0; - if (i % 11 == 0) - d1 += ",s7," + ((int) (i / 10.0) * 100) / 100.0; - fw.write(d1 + "\r\n"); - - // write d2 - String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); - if (i % 20 < 5) { - // LOG.info("write null to d2:" + (startTime + i)); - d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); - } - if (i % 5 == 0) - d2 += ",s1," + (i * 10 + 1); - if (i % 8 == 0) - d2 += ",s4," + "dog" + i % 4; - fw.write(d2 + "\r\n"); - } - // write error - String d = - "d2,3," + (startTime + rowCount) + ",s2," + (rowCount * 10 + 2) + ",s3," - + (rowCount * 10 + 3); - fw.write(d + "\r\n"); - d = "d2," + (startTime + rowCount + 1) + ",2,s-1," + (rowCount * 10 + 2); - fw.write(d + "\r\n"); - fw.close(); - } - - static public void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - - //LOG.info(jsonSchema.toString()); - FileSchema schema = new FileSchema(jsonSchema); - TSFileDescriptor.getInstance().getConfig().groupSizeInByte = rowGroupSize; - TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = pageSize; - innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); - - // write - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); - } - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s5 = new JSONObject(); - s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); - s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); - s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s6 = new JSONObject(); - s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); - s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - JSONObject s7 = new JSONObject(); - s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); - s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.RLE.toString()); - - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); - measureGroup1.put(s5); - measureGroup1.put(s6); - measureGroup1.put(s7); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - //System.out.println(jsonSchema); - return jsonSchema; - } - - static public void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - Scanner in = getDataFile(inputDataFile); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime = System.currentTimeMillis(); - assert in != null; - while (in.hasNextLine()) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - // logger.info("write line:{},inner space consumer:{},use - // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - innerWriter.write(record); - lineCount++; - } - endTime = System.currentTimeMillis(); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - innerWriter.close(); - in.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); - LOG.info("out file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); - } - - static private Scanner getDataFile(String path) { - File file = new File(path); - try { - Scanner in = new Scanner(file); - return in; - } catch (FileNotFoundException e) { - e.printStackTrace(); - return null; - } - } -} +package cn.edu.tsinghua.tsfile.timeseries.readV2; + +import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; +import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; +import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; +import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; +import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; +import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils; +import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter; +import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; +import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord; +import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Ignore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Scanner; + +@Ignore +public class TsFileGeneratorForTest { + + private static int rowCount; + private static int rowGroupSize; + private static int pageSize; + + private static final Logger LOG = LoggerFactory.getLogger(TsFileGeneratorForTest.class); + public static TsFileWriter innerWriter; + public static String inputDataFile; + public static String outputDataFile = "src/test/resources/testTsFile.ts"; + public static String errorOutputDataFile; + public static JSONObject jsonSchema; + + public static final long START_TIMESTAMP = 1480562618000L; + + public static void generateFile(int rc, int rs, int ps) + throws IOException, InterruptedException, WriteProcessException { + rowCount = rc; + rowGroupSize = rs; + pageSize = ps; + prepare(); + write(); + } + + public static void prepare() throws IOException { + inputDataFile = "src/test/resources/perTestInputData"; + errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; + jsonSchema = generateTestData(); + generateSampleInputDataFile(); + } + + public static void after() { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + static private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file.getParentFile().mkdirs(); + FileWriter fw = new FileWriter(file); + + long startTime = START_TIMESTAMP; + for (int i = 0; i < rowCount; i++) { + // write d1 + String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); + if (i % 5 == 0) + d1 += ",s3," + (i * 10 + 3); + if (i % 8 == 0) + d1 += ",s4," + "dog" + i; + if (i % 9 == 0) + d1 += ",s5," + "false"; + if (i % 10 == 0) + d1 += ",s6," + ((int) (i / 9.0) * 100) / 100.0; + if (i % 11 == 0) + d1 += ",s7," + ((int) (i / 10.0) * 100) / 100.0; + fw.write(d1 + "\r\n"); + + // write d2 + String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); + if (i % 20 < 5) { + // LOG.info("write null to d2:" + (startTime + i)); + d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3); + } + if (i % 5 == 0) + d2 += ",s1," + (i * 10 + 1); + if (i % 8 == 0) + d2 += ",s4," + "dog" + i % 4; + fw.write(d2 + "\r\n"); + } + // write error + String d = "d2,3," + (startTime + rowCount) + ",s2," + (rowCount * 10 + 2) + ",s3," + + (rowCount * 10 + 3); + fw.write(d + "\r\n"); + d = "d2," + (startTime + rowCount + 1) + ",2,s-1," + (rowCount * 10 + 2); + fw.write(d + "\r\n"); + fw.close(); + } + + static public void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + + // LOG.info(jsonSchema.toString()); + FileSchema schema = new FileSchema(jsonSchema); + TSFileDescriptor.getInstance().getConfig().groupSizeInByte = rowGroupSize; + TSFileDescriptor.getInstance().getConfig().maxNumberOfPointsInPage = pageSize; + innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); + + // write + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); + } + LOG.info("write to file successfully!!"); + } + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s5 = new JSONObject(); + s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); + s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); + s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s6 = new JSONObject(); + s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); + s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + JSONObject s7 = new JSONObject(); + s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); + s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.RLE.toString()); + + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); + measureGroup1.put(s5); + measureGroup1.put(s6); + measureGroup1.put(s7); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + // System.out.println(jsonSchema); + return jsonSchema; + } + + static public void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + Scanner in = getDataFile(inputDataFile); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime = System.currentTimeMillis(); + assert in != null; + while (in.hasNextLine()) { + if (lineCount % 1000000 == 0) { + endTime = System.currentTimeMillis(); + // logger.info("write line:{},inner space consumer:{},use + // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + } + String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + innerWriter.write(record); + lineCount++; + } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + innerWriter.close(); + in.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); + LOG.info("out file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); + } + + static private Scanner getDataFile(String path) { + File file = new File(path); + try { + Scanner in = new Scanner(file); + return in; + } catch (FileNotFoundException e) { + e.printStackTrace(); + return null; + } + } +} diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImplTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImplTest.java index 93879aff..1ab2f682 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImplTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/MetadataQuerierByFileImplTest.java @@ -9,7 +9,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; - import java.io.IOException; import java.util.List; @@ -18,24 +17,26 @@ */ public class MetadataQuerierByFileImplTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TsFileGeneratorForTest.generateFile(1000000, 1024 * 1024, 10000); - } + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TsFileGeneratorForTest.generateFile(1000000, 1024 * 1024, 10000); + } - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForTest.after(); - } + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForTest.after(); + } - @Test - public void test() throws IOException { - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - MetadataQuerierByFileImpl metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - List encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d2.s1")); - } + @Test + public void test() throws IOException { + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + MetadataQuerierByFileImpl metadataQuerierByFile = + new MetadataQuerierByFileImpl(randomAccessFileReader); + List encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d2.s1")); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderTest.java index 50d2360c..92dae499 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/controller/SeriesChunkLoaderTest.java @@ -11,7 +11,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; import java.util.List; @@ -21,30 +20,34 @@ public class SeriesChunkLoaderTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TsFileGeneratorForTest.generateFile(1000000, 1024 * 1024, 10000); - } - - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForTest.after(); - } - - @Test - public void test() throws IOException { - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - MetadataQuerierByFileImpl metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - List encodedSeriesChunkDescriptorList = metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d2.s1")); - - SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - for (EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor : encodedSeriesChunkDescriptorList) { - MemSeriesChunk memSeriesChunk = seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); - Assert.assertEquals(encodedSeriesChunkDescriptor.getLengthOfBytes(), memSeriesChunk.getSeriesChunkBodyStream().available()); - } + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TsFileGeneratorForTest.generateFile(1000000, 1024 * 1024, 10000); + } + + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForTest.after(); + } + + @Test + public void test() throws IOException { + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + MetadataQuerierByFileImpl metadataQuerierByFile = + new MetadataQuerierByFileImpl(randomAccessFileReader); + List encodedSeriesChunkDescriptorList = + metadataQuerierByFile.getSeriesChunkDescriptorList(new Path("d2.s1")); + + SeriesChunkLoaderImpl seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + for (EncodedSeriesChunkDescriptor encodedSeriesChunkDescriptor : encodedSeriesChunkDescriptorList) { + MemSeriesChunk memSeriesChunk = + seriesChunkLoader.getMemSeriesChunk(encodedSeriesChunkDescriptor); + Assert.assertEquals(encodedSeriesChunkDescriptor.getLengthOfBytes(), + memSeriesChunk.getSeriesChunkBodyStream().available()); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/executor/QueryExecutorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/executor/QueryExecutorTest.java index cb5bd67f..d09c33ad 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/executor/QueryExecutorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/executor/QueryExecutorTest.java @@ -31,7 +31,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; /** @@ -40,113 +39,111 @@ public class QueryExecutorTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - private MetadataQuerierByFileImpl metadataQuerierByFile; - private SeriesChunkLoader seriesChunkLoader; - private int rowCount = 10000; - private QueryWithQueryFilterExecutorImpl queryExecutorWithQueryFilter; - - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; - TsFileGeneratorForTest.generateFile(rowCount, 16 * 1024 * 1024, 10000); - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - queryExecutorWithQueryFilter = new QueryWithQueryFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); - } - - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForTest.after(); + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + private MetadataQuerierByFileImpl metadataQuerierByFile; + private SeriesChunkLoader seriesChunkLoader; + private int rowCount = 10000; + private QueryWithQueryFilterExecutorImpl queryExecutorWithQueryFilter; + + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; + TsFileGeneratorForTest.generateFile(rowCount, 16 * 1024 * 1024, 10000); + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); + seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + queryExecutorWithQueryFilter = + new QueryWithQueryFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); + } + + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForTest.after(); + } + + @Test + public void query1() throws IOException { + Filter filter = TimeFilter.lt(1480562618100L); + Filter filter2 = ValueFilter.gt(new Binary("dog")); + // Filter filter3 = FilterFactory.and(TimeFilter.gtEq(1480562618000L), + // TimeFilter.ltEq(1480562618100L)); + + QueryFilter queryFilter = QueryFilterFactory.and(new SeriesFilter<>(new Path("d1.s1"), filter), + new SeriesFilter<>(new Path("d1.s4"), filter2)); + + // QueryFilter queryFilter = new SeriesFilter<>(new SeriesDescriptor(new Path("d1.s1"), + // TSDataType.INT32), filter); + + QueryExpression queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s2")).addSelectedPath(new Path("d1.s4")) + .addSelectedPath(new Path("d1.s5")).setQueryFilter(queryFilter); + long startTimestamp = System.currentTimeMillis(); + QueryDataSet queryDataSet = queryExecutorWithQueryFilter.execute(queryExpression); + long aimedTimestamp = 1480562618000L; + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + System.out.println(rowRecord); + aimedTimestamp += 8; } - - @Test - public void query1() throws IOException { - Filter filter = TimeFilter.lt(1480562618100L); - Filter filter2 = ValueFilter.gt(new Binary("dog")); -// Filter filter3 = FilterFactory.and(TimeFilter.gtEq(1480562618000L), TimeFilter.ltEq(1480562618100L)); - - QueryFilter queryFilter = QueryFilterFactory.and( - new SeriesFilter<>(new Path("d1.s1"), filter), - new SeriesFilter<>(new Path("d1.s4"), filter2) - ); - -// QueryFilter queryFilter = new SeriesFilter<>(new SeriesDescriptor(new Path("d1.s1"), TSDataType.INT32), filter); - - QueryExpression queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s2")) - .addSelectedPath(new Path("d1.s4")) - .addSelectedPath(new Path("d1.s5")) - .setQueryFilter(queryFilter); - long startTimestamp = System.currentTimeMillis(); - QueryDataSet queryDataSet = queryExecutorWithQueryFilter.execute(queryExpression); - long aimedTimestamp = 1480562618000L; - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - System.out.println(rowRecord); - aimedTimestamp += 8; - } - long endTimestamp = System.currentTimeMillis(); - System.out.println("[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); + long endTimestamp = System.currentTimeMillis(); + System.out.println( + "[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); + } + + @Test + public void queryWithoutFilter() throws IOException { + QueryExecutor queryExecutor = + new QueryWithoutFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); + + QueryExpression queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s2")).addSelectedPath(new Path("d1.s2")) + .addSelectedPath(new Path("d1.s4")).addSelectedPath(new Path("d1.s5")); + + long aimedTimestamp = 1480562618000L; + int count = 0; + long startTimestamp = System.currentTimeMillis(); + QueryDataSet queryDataSet = queryExecutor.execute(queryExpression); + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + aimedTimestamp++; + count++; } - - @Test - public void queryWithoutFilter() throws IOException { - QueryExecutor queryExecutor = new QueryWithoutFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); - - QueryExpression queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s2")) - .addSelectedPath(new Path("d1.s2")) - .addSelectedPath(new Path("d1.s4")) - .addSelectedPath(new Path("d1.s5")); - - long aimedTimestamp = 1480562618000L; - int count = 0; - long startTimestamp = System.currentTimeMillis(); - QueryDataSet queryDataSet = queryExecutor.execute(queryExpression); - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - aimedTimestamp++; - count++; - } - Assert.assertEquals(rowCount, count); - long endTimestamp = System.currentTimeMillis(); - System.out.println("[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); - } - - @Test - public void queryWithGlobalTimeFilter() throws IOException { - QueryExecutor queryExecutor = new QueryWithGlobalTimeFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); - - QueryFilter queryFilter = new GlobalTimeFilter(FilterFactory.and(TimeFilter.gtEq(1480562618100L), TimeFilter.lt(1480562618200L))); - QueryExpression queryExpression = QueryExpression.create() - .addSelectedPath(new Path("d1.s1")) - .addSelectedPath(new Path("d1.s2")) - .addSelectedPath(new Path("d1.s2")) - .addSelectedPath(new Path("d1.s4")) - .addSelectedPath(new Path("d1.s5")) - .setQueryFilter(queryFilter); - - - long aimedTimestamp = 1480562618100L; - int count = 0; - long startTimestamp = System.currentTimeMillis(); - QueryDataSet queryDataSet = queryExecutor.execute(queryExpression); - while (queryDataSet.hasNext()) { - RowRecord rowRecord = queryDataSet.next(); - Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); - aimedTimestamp++; - count++; - } - Assert.assertEquals(100, count); - long endTimestamp = System.currentTimeMillis(); - System.out.println("[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); + Assert.assertEquals(rowCount, count); + long endTimestamp = System.currentTimeMillis(); + System.out.println( + "[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); + } + + @Test + public void queryWithGlobalTimeFilter() throws IOException { + QueryExecutor queryExecutor = + new QueryWithGlobalTimeFilterExecutorImpl(seriesChunkLoader, metadataQuerierByFile); + + QueryFilter queryFilter = new GlobalTimeFilter( + FilterFactory.and(TimeFilter.gtEq(1480562618100L), TimeFilter.lt(1480562618200L))); + QueryExpression queryExpression = QueryExpression.create().addSelectedPath(new Path("d1.s1")) + .addSelectedPath(new Path("d1.s2")).addSelectedPath(new Path("d1.s2")) + .addSelectedPath(new Path("d1.s4")).addSelectedPath(new Path("d1.s5")) + .setQueryFilter(queryFilter); + + + long aimedTimestamp = 1480562618100L; + int count = 0; + long startTimestamp = System.currentTimeMillis(); + QueryDataSet queryDataSet = queryExecutor.execute(queryExpression); + while (queryDataSet.hasNext()) { + RowRecord rowRecord = queryDataSet.next(); + Assert.assertEquals(aimedTimestamp, rowRecord.getTimestamp()); + aimedTimestamp++; + count++; } + Assert.assertEquals(100, count); + long endTimestamp = System.currentTimeMillis(); + System.out.println( + "[Query]:" + queryExpression + "\n[Time]: " + (endTimestamp - startTimestamp) + "ms"); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeTest.java index 076b5860..e91930b0 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/NodeTest.java @@ -9,7 +9,6 @@ import cn.edu.tsinghua.tsfile.timeseries.readV2.reader.SeriesReader; import org.junit.Assert; import org.junit.Test; - import java.io.IOException; /** @@ -17,91 +16,91 @@ */ public class NodeTest { - @Test - public void testLeafNode() throws IOException { - int index = 0; - long[] timestamps = new long[]{1, 2, 3, 4, 5, 6, 7}; - SeriesReader seriesReader = new FakedSeriesReader(timestamps); - Node leafNode = new LeafNode(seriesReader); - while (leafNode.hasNext()) { - Assert.assertEquals(timestamps[index++], leafNode.next()); - } - } - - @Test - public void testOrNode() throws IOException { - long[] ret = new long[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20}; - long[] left = new long[]{1, 3, 5, 7, 9, 10, 20}; - long[] right = new long[]{2, 3, 4, 5, 6, 7, 8}; - testOr(ret, left, right); - testOr(new long[]{}, new long[]{}, new long[]{}); - testOr(new long[]{1}, new long[]{1}, new long[]{}); - testOr(new long[]{1}, new long[]{1}, new long[]{1}); - testOr(new long[]{1, 2}, new long[]{1}, new long[]{1, 2}); - testOr(new long[]{1, 2}, new long[]{1, 2}, new long[]{1, 2}); - testOr(new long[]{1, 2, 3}, new long[]{1, 2}, new long[]{1, 2, 3}); + @Test + public void testLeafNode() throws IOException { + int index = 0; + long[] timestamps = new long[] {1, 2, 3, 4, 5, 6, 7}; + SeriesReader seriesReader = new FakedSeriesReader(timestamps); + Node leafNode = new LeafNode(seriesReader); + while (leafNode.hasNext()) { + Assert.assertEquals(timestamps[index++], leafNode.next()); } - - private void testOr(long[] ret, long[] left, long[] right) throws IOException { - int index = 0; - Node orNode = new OrNode(new LeafNode(new FakedSeriesReader(left)), - new LeafNode(new FakedSeriesReader(right))); - while (orNode.hasNext()) { - long value = orNode.next(); - Assert.assertEquals(ret[index++], value); - } - Assert.assertEquals(ret.length, index); + } + + @Test + public void testOrNode() throws IOException { + long[] ret = new long[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20}; + long[] left = new long[] {1, 3, 5, 7, 9, 10, 20}; + long[] right = new long[] {2, 3, 4, 5, 6, 7, 8}; + testOr(ret, left, right); + testOr(new long[] {}, new long[] {}, new long[] {}); + testOr(new long[] {1}, new long[] {1}, new long[] {}); + testOr(new long[] {1}, new long[] {1}, new long[] {1}); + testOr(new long[] {1, 2}, new long[] {1}, new long[] {1, 2}); + testOr(new long[] {1, 2}, new long[] {1, 2}, new long[] {1, 2}); + testOr(new long[] {1, 2, 3}, new long[] {1, 2}, new long[] {1, 2, 3}); + } + + private void testOr(long[] ret, long[] left, long[] right) throws IOException { + int index = 0; + Node orNode = new OrNode(new LeafNode(new FakedSeriesReader(left)), + new LeafNode(new FakedSeriesReader(right))); + while (orNode.hasNext()) { + long value = orNode.next(); + Assert.assertEquals(ret[index++], value); } - - @Test - public void testAndNode() throws IOException { - testAnd(new long[]{}, new long[]{1, 2, 3, 4}, new long[]{}); - testAnd(new long[]{}, new long[]{1, 2, 3, 4, 8}, new long[]{5, 6, 7}); - testAnd(new long[]{2}, new long[]{1, 2, 3, 4}, new long[]{2, 5, 6}); - testAnd(new long[]{1, 2, 3}, new long[]{1, 2, 3, 4}, new long[]{1, 2, 3}); - testAnd(new long[]{1, 2, 3, 9}, new long[]{1, 2, 3, 4, 9}, new long[]{1, 2, 3, 8, 9}); + Assert.assertEquals(ret.length, index); + } + + @Test + public void testAndNode() throws IOException { + testAnd(new long[] {}, new long[] {1, 2, 3, 4}, new long[] {}); + testAnd(new long[] {}, new long[] {1, 2, 3, 4, 8}, new long[] {5, 6, 7}); + testAnd(new long[] {2}, new long[] {1, 2, 3, 4}, new long[] {2, 5, 6}); + testAnd(new long[] {1, 2, 3}, new long[] {1, 2, 3, 4}, new long[] {1, 2, 3}); + testAnd(new long[] {1, 2, 3, 9}, new long[] {1, 2, 3, 4, 9}, new long[] {1, 2, 3, 8, 9}); + } + + private void testAnd(long[] ret, long[] left, long[] right) throws IOException { + int index = 0; + Node andNode = new AndNode(new LeafNode(new FakedSeriesReader(left)), + new LeafNode(new FakedSeriesReader(right))); + while (andNode.hasNext()) { + long value = andNode.next(); + Assert.assertEquals(ret[index++], value); } + Assert.assertEquals(ret.length, index); + } - private void testAnd(long[] ret, long[] left, long[] right) throws IOException { - int index = 0; - Node andNode = new AndNode(new LeafNode(new FakedSeriesReader(left)), - new LeafNode(new FakedSeriesReader(right))); - while (andNode.hasNext()) { - long value = andNode.next(); - Assert.assertEquals(ret[index++], value); - } - Assert.assertEquals(ret.length, index); - } + private static class FakedSeriesReader implements SeriesReader { - private static class FakedSeriesReader implements SeriesReader { + private long[] timestamps; + private int index; - private long[] timestamps; - private int index; - - public FakedSeriesReader(long[] timestamps) { - this.timestamps = timestamps; - index = 0; - } + public FakedSeriesReader(long[] timestamps) { + this.timestamps = timestamps; + index = 0; + } - @Override - public boolean hasNext() throws IOException { - return index < timestamps.length; - } + @Override + public boolean hasNext() throws IOException { + return index < timestamps.length; + } - @Override - public TimeValuePair next() throws IOException { - return new TimeValuePair(timestamps[index++], new TsPrimitiveType.TsLong(1L)); - } + @Override + public TimeValuePair next() throws IOException { + return new TimeValuePair(timestamps[index++], new TsPrimitiveType.TsLong(1L)); + } - @Override - public void skipCurrentTimeValuePair() throws IOException { - next(); - } + @Override + public void skipCurrentTimeValuePair() throws IOException { + next(); + } - @Override - public void close() throws IOException { + @Override + public void close() throws IOException { - } } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorTest.java index efb6e478..b413cd81 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/readV2/query/timegenerator/TimestampGeneratorTest.java @@ -23,7 +23,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import java.io.IOException; /** @@ -31,46 +30,47 @@ */ public class TimestampGeneratorTest { - private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; - private ITsRandomAccessFileReader randomAccessFileReader; - private MetadataQuerierByFileImpl metadataQuerierByFile; - private SeriesChunkLoader seriesChunkLoader; + private static final String FILE_PATH = TsFileGeneratorForTest.outputDataFile; + private ITsRandomAccessFileReader randomAccessFileReader; + private MetadataQuerierByFileImpl metadataQuerierByFile; + private SeriesChunkLoader seriesChunkLoader; - @Before - public void before() throws InterruptedException, WriteProcessException, IOException { - TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; - TsFileGeneratorForTest.generateFile(1000, 10 * 1024 * 1024, 10000); - randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); - metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); - seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); - } + @Before + public void before() throws InterruptedException, WriteProcessException, IOException { + TSFileDescriptor.getInstance().getConfig().timeSeriesEncoder = "TS_2DIFF"; + TsFileGeneratorForTest.generateFile(1000, 10 * 1024 * 1024, 10000); + randomAccessFileReader = new TsRandomAccessLocalFileReader(FILE_PATH); + metadataQuerierByFile = new MetadataQuerierByFileImpl(randomAccessFileReader); + seriesChunkLoader = new SeriesChunkLoaderImpl(randomAccessFileReader); + } - @After - public void after() throws IOException { - randomAccessFileReader.close(); - TsFileGeneratorForTest.after(); - } + @After + public void after() throws IOException { + randomAccessFileReader.close(); + TsFileGeneratorForTest.after(); + } - @Test - public void testTimeGenerator() throws IOException { - long startTimestamp = 1480562618000L; - Filter filter = TimeFilter.lt(1480562618100L); - Filter filter2 = ValueFilter.gt(new Binary("dog")); - Filter filter3 = FilterFactory.and(TimeFilter.gtEq(1480562618000L), TimeFilter.ltEq(1480562618100L)); + @Test + public void testTimeGenerator() throws IOException { + long startTimestamp = 1480562618000L; + Filter filter = TimeFilter.lt(1480562618100L); + Filter filter2 = ValueFilter.gt(new Binary("dog")); + Filter filter3 = + FilterFactory.and(TimeFilter.gtEq(1480562618000L), TimeFilter.ltEq(1480562618100L)); - QueryFilter queryFilter = QueryFilterFactory.or( - QueryFilterFactory.and( - new SeriesFilter<>(new Path("d1.s1"), filter), - new SeriesFilter<>(new Path("d1.s4"), filter2) - ), - new SeriesFilter<>(new Path("d1.s1"), filter3)); + QueryFilter queryFilter = QueryFilterFactory.or( + QueryFilterFactory.and(new SeriesFilter<>(new Path("d1.s1"), filter), + new SeriesFilter<>(new Path("d1.s4"), filter2)), + new SeriesFilter<>(new Path("d1.s1"), filter3)); - TimestampGeneratorByQueryFilterImpl timestampGenerator = new TimestampGeneratorByQueryFilterImpl(queryFilter, seriesChunkLoader, metadataQuerierByFile); - while (timestampGenerator.hasNext()) { -// System.out.println(timestampGenerator.next()); - Assert.assertEquals(startTimestamp, timestampGenerator.next()); - startTimestamp += 1; - } - Assert.assertEquals(1480562618101L, startTimestamp); + TimestampGeneratorByQueryFilterImpl timestampGenerator = + new TimestampGeneratorByQueryFilterImpl(queryFilter, seriesChunkLoader, + metadataQuerierByFile); + while (timestampGenerator.hasNext()) { + // System.out.println(timestampGenerator.next()); + Assert.assertEquals(startTimestamp, timestampGenerator.next()); + startTimestamp += 1; } + Assert.assertEquals(1480562618101L, startTimestamp); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtilsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtilsTest.java index 8441afa6..818c111e 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtilsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/FileUtilsTest.java @@ -1,9 +1,7 @@ package cn.edu.tsinghua.tsfile.timeseries.utils; import static org.junit.Assert.assertEquals; - import org.junit.Test; - import cn.edu.tsinghua.tsfile.timeseries.constant.TimeseriesTestConstant; import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils.Unit; @@ -14,36 +12,33 @@ */ public class FileUtilsTest { - @Test - public void testConvertUnit() { - long kb = 3 * 1024; - long mb = kb * 1024; - long gb = mb * 1024; - assertEquals(3.0 * 1024, FileUtils.transformUnit(kb, Unit.B), - TimeseriesTestConstant.double_min_delta); - assertEquals(3, FileUtils.transformUnit(kb, Unit.KB), - TimeseriesTestConstant.double_min_delta); - - assertEquals(3, FileUtils.transformUnit(mb, Unit.MB), - TimeseriesTestConstant.double_min_delta); - assertEquals(3, FileUtils.transformUnit(gb, Unit.GB), - TimeseriesTestConstant.double_min_delta); - } - - @Test - public void testConvertToByte() { - assertEquals(3l, (long) FileUtils.transformUnitToByte(3, Unit.B)); - assertEquals(3l * 1024, (long) FileUtils.transformUnitToByte(3, Unit.KB)); - assertEquals(3l * 1024 * 1024, (long) FileUtils.transformUnitToByte(3, Unit.MB)); - assertEquals(3l * 1024 * 1024 * 1024, (long) FileUtils.transformUnitToByte(3, Unit.GB)); - } - -// @Deprecated -// public void testGetLocalFileByte() { -// String fileName = "src/test/resources/test_schema.json"; -// assertEquals(843.0, FileUtils.getLocalFileByte(fileName, Unit.B), -// TimeseriesTestConstant.double_min_delta); -// assertEquals(0.82, FileUtils.getLocalFileByte(fileName, Unit.KB), -// TimeseriesTestConstant.double_min_delta); -// } + @Test + public void testConvertUnit() { + long kb = 3 * 1024; + long mb = kb * 1024; + long gb = mb * 1024; + assertEquals(3.0 * 1024, FileUtils.transformUnit(kb, Unit.B), + TimeseriesTestConstant.double_min_delta); + assertEquals(3, FileUtils.transformUnit(kb, Unit.KB), TimeseriesTestConstant.double_min_delta); + + assertEquals(3, FileUtils.transformUnit(mb, Unit.MB), TimeseriesTestConstant.double_min_delta); + assertEquals(3, FileUtils.transformUnit(gb, Unit.GB), TimeseriesTestConstant.double_min_delta); + } + + @Test + public void testConvertToByte() { + assertEquals(3l, (long) FileUtils.transformUnitToByte(3, Unit.B)); + assertEquals(3l * 1024, (long) FileUtils.transformUnitToByte(3, Unit.KB)); + assertEquals(3l * 1024 * 1024, (long) FileUtils.transformUnitToByte(3, Unit.MB)); + assertEquals(3l * 1024 * 1024 * 1024, (long) FileUtils.transformUnitToByte(3, Unit.GB)); + } + + // @Deprecated + // public void testGetLocalFileByte() { + // String fileName = "src/test/resources/test_schema.json"; + // assertEquals(843.0, FileUtils.getLocalFileByte(fileName, Unit.B), + // TimeseriesTestConstant.double_min_delta); + // assertEquals(0.82, FileUtils.getLocalFileByte(fileName, Unit.KB), + // TimeseriesTestConstant.double_min_delta); + // } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtilsTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtilsTest.java index a5da5f92..5f302190 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtilsTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/RecordUtilsTest.java @@ -13,9 +13,7 @@ import org.json.JSONObject; import org.junit.Before; import org.junit.Test; - import java.util.List; - import static org.junit.Assert.assertEquals; /** @@ -24,194 +22,187 @@ * */ public class RecordUtilsTest { - FileSchema schema; - JSONObject jsonSchema = generateTestData(); - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s5 = new JSONObject(); - s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); - s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.ENUMS.toString()); - s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.BITMAP.toString()); - s5.put(JsonFormatConstant.ENUM_VALUES, new JSONArray("[\"MAN\",\"WOMAN\"]")); - JSONObject s6 = new JSONObject(); - s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); - s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); - s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s7 = new JSONObject(); - s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); - s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONArray columnGroup1 = new JSONArray(); - columnGroup1.put(s1); - columnGroup1.put(s2); - columnGroup1.put(s3); - columnGroup1.put(s4); - columnGroup1.put(s5); - columnGroup1.put(s6); - columnGroup1.put(s7); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, columnGroup1); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "1"); - return jsonSchema; - } - - @Before - public void prepare() throws WriteProcessException { - schema = new FileSchema(jsonSchema); - } - - @Test - public void testParseSimpleTupleRecordInt() { - String testString = "d1,1471522347000,s1,1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - assertEquals(record.deltaObjectId, "d1"); - List tuples = record.dataPointList; - assertEquals(1, tuples.size()); - DataPoint tuple = tuples.get(0); - // System.err.println(tuple.getValue()); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - - testString = "d1,1471522347000,s1,1,"; - record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - assertEquals(record.deltaObjectId, "d1"); - tuples = record.dataPointList; - assertEquals(1, tuples.size()); - tuple = tuples.get(0); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - - testString = "d1,1471522347000,s1,1,s2"; - record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - assertEquals(record.deltaObjectId, "d1"); - tuples = record.dataPointList; - assertEquals(1, tuples.size()); - tuple = tuples.get(0); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - - } - - @Test - public void testParseSimpleTupleRecordNull() { - String testString = "d1,1471522347000,s1,1,s2,,s3,"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - List tuples = record.dataPointList; - assertEquals(tuples.size(), 1); - DataPoint tuple = tuples.get(0); - // System.err.println(tuple.getValue()); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - } - - @Test - public void testParseSimpleTupleRecordAll() { - String testString = - "d1,1471522347000,s1,1,s2,134134287192587,s3,1.4,s4,1.128794817,s5,MAN,s6,true"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - assertEquals(record.deltaObjectId, "d1"); - List tuples = record.dataPointList; - assertEquals(6, tuples.size()); - DataPoint tuple = tuples.get(0); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - tuple = tuples.get(1); - assertEquals(tuple.getMeasurementId(), "s2"); - assertEquals(tuple.getType(), TSDataType.INT64); - assertEquals(tuple.getValue(), 134134287192587l); - tuple = tuples.get(2); - assertEquals(tuple.getMeasurementId(), "s3"); - assertEquals(tuple.getType(), TSDataType.FLOAT); - assertEquals(tuple.getValue(), 1.4f); - tuple = tuples.get(3); - assertEquals(tuple.getMeasurementId(), "s4"); - assertEquals(tuple.getType(), TSDataType.DOUBLE); - assertEquals(tuple.getValue(), 1.128794817d); - tuple = tuples.get(4); - assertEquals(tuple.getMeasurementId(), "s5"); - assertEquals(tuple.getType(), TSDataType.ENUMS); - assertEquals(tuple.getValue(), 1); - tuple = tuples.get(5); - assertEquals(tuple.getMeasurementId(), "s6"); - assertEquals(tuple.getType(), TSDataType.BOOLEAN); - assertEquals(tuple.getValue(), true); - } - - @Test - public void testError() { - String testString = "d1,1471522347000,s1,1,s2,s123"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - List tuples = record.dataPointList; - assertEquals(tuples.size(), 1); - DataPoint tuple = tuples.get(0); - // System.err.println(tuple.getValue()); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - } - - @Test - public void testErrorMeasurementAndTimeStamp() { - String testString = "d1,1471522347000,s1,1,s123,1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - List tuples = record.dataPointList; - assertEquals(tuples.size(), 1); - DataPoint tuple = tuples.get(0); - // System.err.println(tuple.getValue()); - assertEquals(tuple.getMeasurementId(), "s1"); - assertEquals(tuple.getType(), TSDataType.INT32); - assertEquals(tuple.getValue(), 1); - - testString = "d1,1dsjhk,s1,1,s123,1"; - record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, -1); - tuples = record.dataPointList; - assertEquals(tuples.size(), 0); - - - testString = "d1,1471522347000,s8,1"; - record = RecordUtils.parseSimpleTupleRecord(testString, schema); - assertEquals(record.time, 1471522347000l); - tuples = record.dataPointList; - assertEquals(tuples.size(), 0); - - } + FileSchema schema; + JSONObject jsonSchema = generateTestData(); + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s5 = new JSONObject(); + s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); + s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.ENUMS.toString()); + s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.BITMAP.toString()); + s5.put(JsonFormatConstant.ENUM_VALUES, new JSONArray("[\"MAN\",\"WOMAN\"]")); + JSONObject s6 = new JSONObject(); + s6.put(JsonFormatConstant.MEASUREMENT_UID, "s6"); + s6.put(JsonFormatConstant.DATA_TYPE, TSDataType.BOOLEAN.toString()); + s6.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s7 = new JSONObject(); + s7.put(JsonFormatConstant.MEASUREMENT_UID, "s7"); + s7.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s7.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONArray columnGroup1 = new JSONArray(); + columnGroup1.put(s1); + columnGroup1.put(s2); + columnGroup1.put(s3); + columnGroup1.put(s4); + columnGroup1.put(s5); + columnGroup1.put(s6); + columnGroup1.put(s7); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, columnGroup1); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "1"); + return jsonSchema; + } + + @Before + public void prepare() throws WriteProcessException { + schema = new FileSchema(jsonSchema); + } + + @Test + public void testParseSimpleTupleRecordInt() { + String testString = "d1,1471522347000,s1,1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + assertEquals(record.deltaObjectId, "d1"); + List tuples = record.dataPointList; + assertEquals(1, tuples.size()); + DataPoint tuple = tuples.get(0); + // System.err.println(tuple.getValue()); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + + testString = "d1,1471522347000,s1,1,"; + record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + assertEquals(record.deltaObjectId, "d1"); + tuples = record.dataPointList; + assertEquals(1, tuples.size()); + tuple = tuples.get(0); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + + testString = "d1,1471522347000,s1,1,s2"; + record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + assertEquals(record.deltaObjectId, "d1"); + tuples = record.dataPointList; + assertEquals(1, tuples.size()); + tuple = tuples.get(0); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + + } + + @Test + public void testParseSimpleTupleRecordNull() { + String testString = "d1,1471522347000,s1,1,s2,,s3,"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + List tuples = record.dataPointList; + assertEquals(tuples.size(), 1); + DataPoint tuple = tuples.get(0); + // System.err.println(tuple.getValue()); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + } + + @Test + public void testParseSimpleTupleRecordAll() { + String testString = + "d1,1471522347000,s1,1,s2,134134287192587,s3,1.4,s4,1.128794817,s5,MAN,s6,true"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + assertEquals(record.deltaObjectId, "d1"); + List tuples = record.dataPointList; + assertEquals(6, tuples.size()); + DataPoint tuple = tuples.get(0); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + tuple = tuples.get(1); + assertEquals(tuple.getMeasurementId(), "s2"); + assertEquals(tuple.getType(), TSDataType.INT64); + assertEquals(tuple.getValue(), 134134287192587l); + tuple = tuples.get(2); + assertEquals(tuple.getMeasurementId(), "s3"); + assertEquals(tuple.getType(), TSDataType.FLOAT); + assertEquals(tuple.getValue(), 1.4f); + tuple = tuples.get(3); + assertEquals(tuple.getMeasurementId(), "s4"); + assertEquals(tuple.getType(), TSDataType.DOUBLE); + assertEquals(tuple.getValue(), 1.128794817d); + tuple = tuples.get(4); + assertEquals(tuple.getMeasurementId(), "s5"); + assertEquals(tuple.getType(), TSDataType.ENUMS); + assertEquals(tuple.getValue(), 1); + tuple = tuples.get(5); + assertEquals(tuple.getMeasurementId(), "s6"); + assertEquals(tuple.getType(), TSDataType.BOOLEAN); + assertEquals(tuple.getValue(), true); + } + + @Test + public void testError() { + String testString = "d1,1471522347000,s1,1,s2,s123"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + List tuples = record.dataPointList; + assertEquals(tuples.size(), 1); + DataPoint tuple = tuples.get(0); + // System.err.println(tuple.getValue()); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + } + + @Test + public void testErrorMeasurementAndTimeStamp() { + String testString = "d1,1471522347000,s1,1,s123,1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + List tuples = record.dataPointList; + assertEquals(tuples.size(), 1); + DataPoint tuple = tuples.get(0); + // System.err.println(tuple.getValue()); + assertEquals(tuple.getMeasurementId(), "s1"); + assertEquals(tuple.getType(), TSDataType.INT32); + assertEquals(tuple.getValue(), 1); + + testString = "d1,1dsjhk,s1,1,s123,1"; + record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, -1); + tuples = record.dataPointList; + assertEquals(tuples.size(), 0); + + + testString = "d1,1471522347000,s8,1"; + record = RecordUtils.parseSimpleTupleRecord(testString, schema); + assertEquals(record.time, 1471522347000l); + tuples = record.dataPointList; + assertEquals(tuples.size(), 0); + + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainerTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainerTest.java index 7f1b0267..f40c6dab 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainerTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/StringContainerTest.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.utils; import static org.junit.Assert.*; - import org.junit.Test; /** @@ -11,151 +10,151 @@ */ public class StringContainerTest { - @Test - public void testAddTailStringArray() { - StringContainer a = new StringContainer(); - a.addTail("a", "b", "c"); - a.addTail(); - a.addTail("a", "b", "c"); - assertEquals("abcabc", a.toString()); - } - - @Test - public void testAddTailStringContainer() { - StringContainer a = new StringContainer(); - a.addTail("a", "b", "c"); - a.addTail(); - StringContainer b = new StringContainer(); - b.addTail("a", "b", "c"); - a.addTail(b); - assertEquals("abcabc", a.toString()); - } - - @Test - public void testAddHeadStringArray() { - StringContainer a = new StringContainer(); - a.addTail("a", "b", "c"); - StringContainer b = new StringContainer(); - b.addTail("a", "b", "c"); - b.addHead("1", "2", "3"); - a.addHead("!", "@", "#"); - a.addHead(b); - a.addTail(b); - assertEquals("123abc!@#abc123abc", a.toString()); - } - - @Test - public void testAddHeadStringArrayWithSeparator() { - StringContainer c = new StringContainer(","); - c.addHead("a", "b", "c"); - assertEquals("a,b,c", c.toString()); - StringContainer a = new StringContainer(","); - a.addTail("a", "b", "c"); - assertEquals("a,b,c", a.toString()); - StringContainer b = new StringContainer(); - b.addTail("a", "b", "c"); - b.addHead("1", "2", "3"); - a.addHead("!", "@", "#"); - a.addHead(b); - a.addTail(b); - assertEquals("1,2,3,a,b,c,!,@,#,a,b,c,1,2,3,a,b,c", a.toString()); - } - - @Test - public void testGetSubString() { - StringContainer a = new StringContainer(); - try { - a.getSubString(0); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - a.addHead("a", "bbb", "cc"); - assertEquals("a", a.getSubString(0)); - assertEquals("cc", a.getSubString(-1)); - assertEquals("bbb", a.getSubString(-2)); - try { - a.getSubString(4); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - a.addTail("dd", "eeee"); - assertEquals("a", a.getSubString(0)); - assertEquals("cc", a.getSubString(-3)); - assertEquals("dd", a.getSubString(3)); - assertEquals("eeee", a.getSubString(-1)); - try { - a.getSubString(9); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - } - - @Test - public void testGetSubStringContainer() { - StringContainer a = new StringContainer(); - try { - a.getSubStringContainer(0, 1); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - a.addTail("a", "bbb", "cc"); - assertEquals("", a.getSubStringContainer(1, 0).toString()); - assertEquals("a", a.getSubStringContainer(0, 0).toString()); - assertEquals("bbbcc", a.getSubStringContainer(1, -1).toString()); - assertEquals("bbb", a.getSubStringContainer(-2, -2).toString()); - try { - a.getSubStringContainer(1, 4); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - a.addHead("dd", "eeee"); - assertEquals("eeeea", a.getSubStringContainer(1, 2).toString()); - assertEquals("eeeea", a.getSubStringContainer(1, -3).toString()); - assertEquals("dd", a.getSubStringContainer(0, 0).toString()); - assertEquals("cc", a.getSubStringContainer(-1, -1).toString()); - assertEquals("ddeeeeabbbcc", a.getSubStringContainer(-5, -1).toString()); - try { - a.getSubString(9); - } catch (Exception e) { - assertTrue(e instanceof IndexOutOfBoundsException); - } - } - - @Test - public void testEqual() { - StringContainer c = new StringContainer(","); - c.addHead("a", "b", "c123"); - c.addTail("a", "12", "c"); - c.addTail("1284736", "b", "c"); - StringContainer copyC = c.clone(); - assertTrue(c.equals(copyC)); - assertFalse(c==copyC); - - } - - @Test - public void testHashCode() { - StringContainer c1 = new StringContainer(","); - c1.addHead("a", "b", "c123"); - c1.addTail("a", "12", "c"); - c1.addTail("1284736", "b", "c"); - StringContainer c2 = new StringContainer("."); - c2.addHead("a", "b", "c123"); - c2.addTail("a", "12", "c"); - c2.addTail("1284736", "b", "c"); - StringContainer copyC = c1.clone(); - assertEquals(c1.hashCode(), copyC.hashCode()); - assertNotEquals(c1.hashCode(),c2.hashCode()); - - StringContainer c3 = new StringContainer(","); - c3.addHead("a", "b", "c123"); - assertNotEquals(c1.hashCode(),c3.hashCode()); - - StringContainer c4 = new StringContainer(","); - c4.addTail("a", "b", "c123"); - assertNotEquals(c1.hashCode(),c4.hashCode()); - - - } - + @Test + public void testAddTailStringArray() { + StringContainer a = new StringContainer(); + a.addTail("a", "b", "c"); + a.addTail(); + a.addTail("a", "b", "c"); + assertEquals("abcabc", a.toString()); + } + + @Test + public void testAddTailStringContainer() { + StringContainer a = new StringContainer(); + a.addTail("a", "b", "c"); + a.addTail(); + StringContainer b = new StringContainer(); + b.addTail("a", "b", "c"); + a.addTail(b); + assertEquals("abcabc", a.toString()); + } + + @Test + public void testAddHeadStringArray() { + StringContainer a = new StringContainer(); + a.addTail("a", "b", "c"); + StringContainer b = new StringContainer(); + b.addTail("a", "b", "c"); + b.addHead("1", "2", "3"); + a.addHead("!", "@", "#"); + a.addHead(b); + a.addTail(b); + assertEquals("123abc!@#abc123abc", a.toString()); + } + + @Test + public void testAddHeadStringArrayWithSeparator() { + StringContainer c = new StringContainer(","); + c.addHead("a", "b", "c"); + assertEquals("a,b,c", c.toString()); + StringContainer a = new StringContainer(","); + a.addTail("a", "b", "c"); + assertEquals("a,b,c", a.toString()); + StringContainer b = new StringContainer(); + b.addTail("a", "b", "c"); + b.addHead("1", "2", "3"); + a.addHead("!", "@", "#"); + a.addHead(b); + a.addTail(b); + assertEquals("1,2,3,a,b,c,!,@,#,a,b,c,1,2,3,a,b,c", a.toString()); + } + + @Test + public void testGetSubString() { + StringContainer a = new StringContainer(); + try { + a.getSubString(0); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + a.addHead("a", "bbb", "cc"); + assertEquals("a", a.getSubString(0)); + assertEquals("cc", a.getSubString(-1)); + assertEquals("bbb", a.getSubString(-2)); + try { + a.getSubString(4); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + a.addTail("dd", "eeee"); + assertEquals("a", a.getSubString(0)); + assertEquals("cc", a.getSubString(-3)); + assertEquals("dd", a.getSubString(3)); + assertEquals("eeee", a.getSubString(-1)); + try { + a.getSubString(9); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + } + + @Test + public void testGetSubStringContainer() { + StringContainer a = new StringContainer(); + try { + a.getSubStringContainer(0, 1); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + a.addTail("a", "bbb", "cc"); + assertEquals("", a.getSubStringContainer(1, 0).toString()); + assertEquals("a", a.getSubStringContainer(0, 0).toString()); + assertEquals("bbbcc", a.getSubStringContainer(1, -1).toString()); + assertEquals("bbb", a.getSubStringContainer(-2, -2).toString()); + try { + a.getSubStringContainer(1, 4); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + a.addHead("dd", "eeee"); + assertEquals("eeeea", a.getSubStringContainer(1, 2).toString()); + assertEquals("eeeea", a.getSubStringContainer(1, -3).toString()); + assertEquals("dd", a.getSubStringContainer(0, 0).toString()); + assertEquals("cc", a.getSubStringContainer(-1, -1).toString()); + assertEquals("ddeeeeabbbcc", a.getSubStringContainer(-5, -1).toString()); + try { + a.getSubString(9); + } catch (Exception e) { + assertTrue(e instanceof IndexOutOfBoundsException); + } + } + + @Test + public void testEqual() { + StringContainer c = new StringContainer(","); + c.addHead("a", "b", "c123"); + c.addTail("a", "12", "c"); + c.addTail("1284736", "b", "c"); + StringContainer copyC = c.clone(); + assertTrue(c.equals(copyC)); + assertFalse(c == copyC); + + } + + @Test + public void testHashCode() { + StringContainer c1 = new StringContainer(","); + c1.addHead("a", "b", "c123"); + c1.addTail("a", "12", "c"); + c1.addTail("1284736", "b", "c"); + StringContainer c2 = new StringContainer("."); + c2.addHead("a", "b", "c123"); + c2.addTail("a", "12", "c"); + c2.addTail("1284736", "b", "c"); + StringContainer copyC = c1.clone(); + assertEquals(c1.hashCode(), copyC.hashCode()); + assertNotEquals(c1.hashCode(), c2.hashCode()); + + StringContainer c3 = new StringContainer(","); + c3.addHead("a", "b", "c123"); + assertNotEquals(c1.hashCode(), c3.hashCode()); + + StringContainer c4 = new StringContainer(","); + c4.addTail("a", "b", "c123"); + assertNotEquals(c1.hashCode(), c4.hashCode()); + + + } + } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCacheTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCacheTest.java index 0bd4412d..8d236411 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCacheTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/utils/cache/LRUCacheTest.java @@ -9,33 +9,33 @@ */ public class LRUCacheTest { - private LRUCache cache; + private LRUCache cache; - @Test - public void test() { - try { - int testCount = 1000; - int cacheSize = 5; - cache = new LRUCache(cacheSize) { - @Override - public void beforeRemove(Integer object) { - return; - } - - @Override - public Integer loadObjectByKey(Integer key) { - return key * 10; - } - }; + @Test + public void test() { + try { + int testCount = 1000; + int cacheSize = 5; + cache = new LRUCache(cacheSize) { + @Override + public void beforeRemove(Integer object) { + return; + } - for (int i = 1; i < testCount; i++) { - Assert.assertEquals(i * 10, (int) cache.get(i)); - Assert.assertEquals((i - 1) * 10, (int) cache.get(i - 1)); - } - } catch (CacheException e) { - e.printStackTrace(); - Assert.fail(); + @Override + public Integer loadObjectByKey(Integer key) { + return key * 10; } + }; + + for (int i = 1; i < testCount; i++) { + Assert.assertEquals(i * 10, (int) cache.get(i)); + Assert.assertEquals((i - 1) * 10, (int) cache.get(i - 1)); + } + } catch (CacheException e) { + e.printStackTrace(); + Assert.fail(); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/PerfTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/PerfTest.java index 873436fb..5d7289b0 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/PerfTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/PerfTest.java @@ -6,7 +6,6 @@ import java.io.IOException; import java.util.Random; import java.util.Scanner; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; @@ -20,7 +19,6 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.utils.ITsRandomAccessFileWriter; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils; @@ -37,182 +35,177 @@ * @author kangrong */ public class PerfTest { - private static final Logger LOG = LoggerFactory.getLogger(PerfTest.class); - public static final int ROW_COUNT = 100; - public static TsFileWriter innerWriter; - static public String inputDataFile; - static public String outputDataFile; - static public String errorOutputDataFile; - static public JSONObject jsonSchema; - static public Random r = new Random(); - - @Before - public void prepare() throws IOException { - inputDataFile = "src/test/resources/perTestInputData"; - outputDataFile = "src/test/resources/perTestOutputData.ksn"; - errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; - jsonSchema = generateTestData(); - generateSampleInputDataFile(); - } - - @After - public void after() { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); + private static final Logger LOG = LoggerFactory.getLogger(PerfTest.class); + public static final int ROW_COUNT = 100; + public static TsFileWriter innerWriter; + static public String inputDataFile; + static public String outputDataFile; + static public String errorOutputDataFile; + static public JSONObject jsonSchema; + static public Random r = new Random(); + + @Before + public void prepare() throws IOException { + inputDataFile = "src/test/resources/perTestInputData"; + outputDataFile = "src/test/resources/perTestOutputData.ksn"; + errorOutputDataFile = "src/test/resources/perTestErrorOutputData.ksn"; + jsonSchema = generateTestData(); + generateSampleInputDataFile(); + } + + @After + public void after() { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + static private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + FileWriter fw = new FileWriter(file); + + long startTime = System.currentTimeMillis(); + startTime = startTime - startTime % 1000; + Random rm = new Random(); + for (int i = 0; i < ROW_COUNT; i++) { + String string4 = ",s4," + (char) (97 + i % 26); + // write d1 + String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2) + string4; + if (rm.nextInt(1000) < 100) { + // LOG.info("write null to d1:" + (startTime + i)); + d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2) + string4; + } + if (i % 5 == 0) + d1 += ",s3," + (i * 10 + 3); + fw.write(d1 + "\r\n"); + + // write d2 + String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3) + string4; + if (rm.nextInt(1000) < 100) { + // LOG.info("write null to d2:" + (startTime + i)); + d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3) + string4; + } + if (i % 5 == 0) + d2 += ",s1," + (i * 10 + 1); + fw.write(d2 + "\r\n"); } - - static private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - FileWriter fw = new FileWriter(file); - - long startTime = System.currentTimeMillis(); - startTime = startTime - startTime % 1000; - Random rm = new Random(); - for (int i = 0; i < ROW_COUNT; i++) { - String string4 = ",s4," + (char) (97 + i % 26); - // write d1 - String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2) + string4; - if (rm.nextInt(1000) < 100) { - // LOG.info("write null to d1:" + (startTime + i)); - d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2) + string4; - } - if (i % 5 == 0) - d1 += ",s3," + (i * 10 + 3); - fw.write(d1 + "\r\n"); - - // write d2 - String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3) + string4; - if (rm.nextInt(1000) < 100) { - // LOG.info("write null to d2:" + (startTime + i)); - d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3) + string4; - } - if (i % 5 == 0) - d2 += ",s1," + (i * 10 + 1); - fw.write(d2 + "\r\n"); - } - // write error - String d = - "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," - + (ROW_COUNT * 10 + 3); - fw.write(d + "\r\n"); - d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); - fw.write(d + "\r\n"); - fw.close(); + // write error + String d = "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," + + (ROW_COUNT * 10 + 3); + fw.write(d + "\r\n"); + d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); + fw.write(d + "\r\n"); + fw.close(); + } + + @Test + public void writeTest() throws IOException, InterruptedException, WriteProcessException { + write(); + } + + static public void write() throws IOException, InterruptedException, WriteProcessException { + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + + // LOG.info(jsonSchema.toString()); + FileSchema schema = new FileSchema(jsonSchema); + + // TSFileDescriptor.conf.rowGroupSize = 2000; + // TSFileDescriptor.conf.pageSize = 100; + innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); + + // write + try { + writeToFile(schema); + } catch (WriteProcessException e) { + e.printStackTrace(); } - - @Test - public void writeTest() throws IOException, InterruptedException, WriteProcessException { - write(); - } - - static public void write() throws IOException, InterruptedException, WriteProcessException { - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - - //LOG.info(jsonSchema.toString()); - FileSchema schema = new FileSchema(jsonSchema); - - // TSFileDescriptor.conf.rowGroupSize = 2000; - // TSFileDescriptor.conf.pageSize = 100; - innerWriter = new TsFileWriter(file, schema, TSFileDescriptor.getInstance().getConfig()); - - // write - try { - writeToFile(schema); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write to file successfully!!"); + LOG.info("write to file successfully!!"); + } + + static private Scanner getDataFile(String path) { + File file = new File(path); + try { + Scanner in = new Scanner(file); + return in; + } catch (FileNotFoundException e) { + e.printStackTrace(); + return null; } - - static private Scanner getDataFile(String path) { - File file = new File(path); - try { - Scanner in = new Scanner(file); - return in; - } catch (FileNotFoundException e) { - e.printStackTrace(); - return null; - } - } - - static public void writeToFile(FileSchema schema) throws InterruptedException, IOException, WriteProcessException { - Scanner in = getDataFile(inputDataFile); - long lineCount = 0; - long startTime = System.currentTimeMillis(); - long endTime = System.currentTimeMillis(); - assert in != null; - while (in.hasNextLine()) { - if (lineCount % 1000000 == 0) { - endTime = System.currentTimeMillis(); - // logger.info("write line:{},inner space consumer:{},use - // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); - LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - } - String str = in.nextLine(); - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - innerWriter.write(record); - lineCount++; - } + } + + static public void writeToFile(FileSchema schema) + throws InterruptedException, IOException, WriteProcessException { + Scanner in = getDataFile(inputDataFile); + long lineCount = 0; + long startTime = System.currentTimeMillis(); + long endTime = System.currentTimeMillis(); + assert in != null; + while (in.hasNextLine()) { + if (lineCount % 1000000 == 0) { endTime = System.currentTimeMillis(); + // logger.info("write line:{},inner space consumer:{},use + // time:{}",lineCount,innerWriter.calculateMemSizeForEachGroup(),endTime); LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - innerWriter.close(); - endTime = System.currentTimeMillis(); - LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); - LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); - LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); - } - - private static JSONObject generateTestData() { - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, - conf.valueEncoder); - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONObject s5 = new JSONObject(); - s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); - s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.ENUMS.toString()); - s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, - TSEncoding.PLAIN.toString()); - JSONArray measureGroup1 = new JSONArray(); - measureGroup1.put(s1); - measureGroup1.put(s2); - measureGroup1.put(s3); - measureGroup1.put(s4); - measureGroup1.put(s5); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); - return jsonSchema; + } + String str = in.nextLine(); + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + innerWriter.write(record); + lineCount++; } + endTime = System.currentTimeMillis(); + LOG.info("write line:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + innerWriter.close(); + endTime = System.currentTimeMillis(); + LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000); + LOG.info("src file size:{}GB", FileUtils.getLocalFileByte(inputDataFile, Unit.GB)); + LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, Unit.MB)); + } + + private static JSONObject generateTestData() { + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONObject s5 = new JSONObject(); + s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5"); + s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.ENUMS.toString()); + s5.put(JsonFormatConstant.MEASUREMENT_ENCODING, TSEncoding.PLAIN.toString()); + JSONArray measureGroup1 = new JSONArray(); + measureGroup1.put(s1); + measureGroup1.put(s2); + measureGroup1.put(s3); + measureGroup1.put(s4); + measureGroup1.put(s5); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1); + return jsonSchema; + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/ReadPageInMemTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/ReadPageInMemTest.java index 35c8810f..e6f9048d 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/ReadPageInMemTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/ReadPageInMemTest.java @@ -2,18 +2,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; - import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.util.List; - import org.json.JSONArray; import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.Test; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; @@ -28,153 +25,154 @@ public class ReadPageInMemTest { - private String filePath = "TsFileReadPageInMem"; - private File file = new File(filePath); - private TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - private TsFileWriter innerWriter; - private FileSchema fileSchema = null; - - private int pageSize; - private int RowGroupSize; - private int pageCheckSizeThreshold; - private int defaultMaxStringLength; - @Before - public void setUp() throws Exception { - file.delete(); - pageSize = conf.pageSizeInByte; - conf.pageSizeInByte = 200; - RowGroupSize = conf.groupSizeInByte; - conf.groupSizeInByte = 100000; - pageCheckSizeThreshold = conf.pageCheckSizeThreshold; - conf.pageCheckSizeThreshold = 1; - defaultMaxStringLength = conf.maxStringLength; - conf.maxStringLength = 2; - fileSchema = new FileSchema(getJsonSchema()); - innerWriter = new TsFileWriter(new File(filePath), fileSchema, conf); - } - - @After - public void tearDown() throws Exception { - file.delete(); - conf.pageSizeInByte = pageSize; - conf.groupSizeInByte = RowGroupSize; - conf.pageCheckSizeThreshold = pageCheckSizeThreshold; - conf.maxStringLength = defaultMaxStringLength; - } - - @Test - public void OneDeltaObjectTest() { - String line = ""; - for (int i = 1; i <= 3; i++) { - line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - for (int i = 4; i < 100; i++) { - line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - try { - innerWriter.close(); - } catch (IOException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - - @Test - public void MultiDeltaObjectTest() throws IOException { - - String line = ""; - for (int i = 1; i <= 3; i++) { - line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - for (int i = 1; i <= 3; i++) { - line = "root.car.d2," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - - for (int i = 4; i < 100; i++) { - line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - - for (int i = 4; i < 100; i++) { - line = "root.car.d2," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; - TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); - try { - innerWriter.write(record); - } catch (IOException | WriteProcessException e) { - e.printStackTrace(); - fail(e.getMessage()); - } - } - - innerWriter.close(); - } - - private static JSONObject getJsonSchema() { - - TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - JSONObject s1 = new JSONObject(); - s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); - s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); - s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - - JSONObject s2 = new JSONObject(); - s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); - s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); - s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - - JSONObject s3 = new JSONObject(); - s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); - s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); - s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - - JSONObject s4 = new JSONObject(); - s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); - s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); - s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); - - JSONArray measureGroup = new JSONArray(); - measureGroup.put(s1); - measureGroup.put(s2); - measureGroup.put(s3); - measureGroup.put(s4); - - JSONObject jsonSchema = new JSONObject(); - jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); - jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup); - return jsonSchema; - } + private String filePath = "TsFileReadPageInMem"; + private File file = new File(filePath); + private TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + private TsFileWriter innerWriter; + private FileSchema fileSchema = null; + + private int pageSize; + private int RowGroupSize; + private int pageCheckSizeThreshold; + private int defaultMaxStringLength; + + @Before + public void setUp() throws Exception { + file.delete(); + pageSize = conf.pageSizeInByte; + conf.pageSizeInByte = 200; + RowGroupSize = conf.groupSizeInByte; + conf.groupSizeInByte = 100000; + pageCheckSizeThreshold = conf.pageCheckSizeThreshold; + conf.pageCheckSizeThreshold = 1; + defaultMaxStringLength = conf.maxStringLength; + conf.maxStringLength = 2; + fileSchema = new FileSchema(getJsonSchema()); + innerWriter = new TsFileWriter(new File(filePath), fileSchema, conf); + } + + @After + public void tearDown() throws Exception { + file.delete(); + conf.pageSizeInByte = pageSize; + conf.groupSizeInByte = RowGroupSize; + conf.pageCheckSizeThreshold = pageCheckSizeThreshold; + conf.maxStringLength = defaultMaxStringLength; + } + + @Test + public void OneDeltaObjectTest() { + String line = ""; + for (int i = 1; i <= 3; i++) { + line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + for (int i = 4; i < 100; i++) { + line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + try { + innerWriter.close(); + } catch (IOException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + + @Test + public void MultiDeltaObjectTest() throws IOException { + + String line = ""; + for (int i = 1; i <= 3; i++) { + line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + for (int i = 1; i <= 3; i++) { + line = "root.car.d2," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + + for (int i = 4; i < 100; i++) { + line = "root.car.d1," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + + for (int i = 4; i < 100; i++) { + line = "root.car.d2," + i + ",s1,1,s2,1,s3,0.1,s4,0.1"; + TSRecord record = RecordUtils.parseSimpleTupleRecord(line, fileSchema); + try { + innerWriter.write(record); + } catch (IOException | WriteProcessException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + + innerWriter.close(); + } + + private static JSONObject getJsonSchema() { + + TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + JSONObject s1 = new JSONObject(); + s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1"); + s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString()); + s1.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + + JSONObject s2 = new JSONObject(); + s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2"); + s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString()); + s2.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + + JSONObject s3 = new JSONObject(); + s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3"); + s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString()); + s3.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + + JSONObject s4 = new JSONObject(); + s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4"); + s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString()); + s4.put(JsonFormatConstant.MEASUREMENT_ENCODING, conf.valueEncoder); + + JSONArray measureGroup = new JSONArray(); + measureGroup.put(s1); + measureGroup.put(s2); + measureGroup.put(s3); + measureGroup.put(s4); + + JSONObject jsonSchema = new JSONObject(); + jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type"); + jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup); + return jsonSchema; + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/WriteTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/WriteTest.java index 5b41ef9f..b5e1ced4 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/WriteTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/WriteTest.java @@ -2,13 +2,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; - import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.Random; - import org.json.JSONArray; import org.json.JSONObject; import org.json.JSONTokener; @@ -18,7 +16,6 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig; import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor; import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; @@ -37,194 +34,190 @@ * @author kangrong */ public class WriteTest { - private static final Logger LOG = LoggerFactory.getLogger(WriteTest.class); - private final int ROW_COUNT = 20; - private TsFileWriter tsFileWriter; - private String inputDataFile; - private String outputDataFile; - private String errorOutputDataFile; - private String schemaFile; - private Random rm = new Random(); - private FileSchema schema; - private int stageSize = 4; - private int stageState = -1; - private int prePageSize; - private int prePageCheckThres; - private TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); - private JSONArray measurementArray; - - @Before - public void prepare() throws IOException, WriteProcessException { - inputDataFile = "src/test/resources/writeTestInputData"; - outputDataFile = "src/test/resources/writeTestOutputData.ksn"; - errorOutputDataFile = "src/test/resources/writeTestErrorOutputData.ksn"; - schemaFile = "src/test/resources/test_write_schema.json"; - // for each row, flush page forcely - prePageSize = conf.pageSizeInByte; - conf.pageSizeInByte = 0; - prePageCheckThres = conf.pageCheckSizeThreshold; - conf.pageCheckSizeThreshold = 0; - - try { - generateSampleInputDataFile(); - } catch (IOException e) { - fail(); - } - File file = new File(outputDataFile); - File errorFile = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); - if (errorFile.exists()) - errorFile.delete(); - JSONObject emptySchema = new JSONObject( - "{\"delta_type\": \"test_type\",\"properties\": {\n" + - "\"key1\": \"value1\",\n" + - "\"key2\": \"value2\"\n" + - "},\"schema\": [],}"); - measurementArray = - new JSONObject(new JSONTokener(new FileReader(new File(schemaFile)))).getJSONArray(JsonFormatConstant - .JSON_SCHEMA); - schema = new FileSchema(emptySchema); - LOG.info(schema.toString()); - tsFileWriter = new TsFileWriter(file, schema, conf); + private static final Logger LOG = LoggerFactory.getLogger(WriteTest.class); + private final int ROW_COUNT = 20; + private TsFileWriter tsFileWriter; + private String inputDataFile; + private String outputDataFile; + private String errorOutputDataFile; + private String schemaFile; + private Random rm = new Random(); + private FileSchema schema; + private int stageSize = 4; + private int stageState = -1; + private int prePageSize; + private int prePageCheckThres; + private TSFileConfig conf = TSFileDescriptor.getInstance().getConfig(); + private JSONArray measurementArray; + + @Before + public void prepare() throws IOException, WriteProcessException { + inputDataFile = "src/test/resources/writeTestInputData"; + outputDataFile = "src/test/resources/writeTestOutputData.ksn"; + errorOutputDataFile = "src/test/resources/writeTestErrorOutputData.ksn"; + schemaFile = "src/test/resources/test_write_schema.json"; + // for each row, flush page forcely + prePageSize = conf.pageSizeInByte; + conf.pageSizeInByte = 0; + prePageCheckThres = conf.pageCheckSizeThreshold; + conf.pageCheckSizeThreshold = 0; + + try { + generateSampleInputDataFile(); + } catch (IOException e) { + fail(); } - - @After - public void after() { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - file = new File(outputDataFile); - if (file.exists()) - file.delete(); - file = new File(errorOutputDataFile); - if (file.exists()) - file.delete(); + File file = new File(outputDataFile); + File errorFile = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + if (errorFile.exists()) + errorFile.delete(); + JSONObject emptySchema = new JSONObject("{\"delta_type\": \"test_type\",\"properties\": {\n" + + "\"key1\": \"value1\",\n" + "\"key2\": \"value2\"\n" + "},\"schema\": [],}"); + measurementArray = new JSONObject(new JSONTokener(new FileReader(new File(schemaFile)))) + .getJSONArray(JsonFormatConstant.JSON_SCHEMA); + schema = new FileSchema(emptySchema); + LOG.info(schema.toString()); + tsFileWriter = new TsFileWriter(file, schema, conf); + } + + @After + public void after() { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + file = new File(outputDataFile); + if (file.exists()) + file.delete(); + file = new File(errorOutputDataFile); + if (file.exists()) + file.delete(); + } + + @After + public void end() { + conf.pageSizeInByte = prePageSize; + conf.pageCheckSizeThreshold = prePageCheckThres; + } + + private void generateSampleInputDataFile() throws IOException { + File file = new File(inputDataFile); + if (file.exists()) + file.delete(); + FileWriter fw = new FileWriter(file); + + long startTime = System.currentTimeMillis(); + startTime = startTime - startTime % 1000; + + // first stage:int, long, float, double, boolean, enums + for (int i = 0; i < ROW_COUNT; i++) { + // write d1 + String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); + if (rm.nextInt(1000) < 100) { + d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2) + ",s4,HIGH"; + } + if (i % 5 == 0) + d1 += ",s3," + (i * 10 + 3); + fw.write(d1 + "\r\n"); + + // write d2 + String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); + if (rm.nextInt(1000) < 100) { + d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3) + ",s5,MAN"; + } + if (i % 5 == 0) + d2 += ",s1," + (i * 10 + 1); + fw.write(d2 + "\r\n"); } - - @After - public void end() { - conf.pageSizeInByte = prePageSize; - conf.pageCheckSizeThreshold = prePageCheckThres; + // write error + String d = "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," + + (ROW_COUNT * 10 + 3); + fw.write(d + "\r\n"); + d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); + fw.write(d + "\r\n"); + fw.close(); + } + + @Test + public void writeTest() throws IOException, InterruptedException { + try { + write(); + } catch (WriteProcessException e) { + e.printStackTrace(); } - - private void generateSampleInputDataFile() throws IOException { - File file = new File(inputDataFile); - if (file.exists()) - file.delete(); - FileWriter fw = new FileWriter(file); - - long startTime = System.currentTimeMillis(); - startTime = startTime - startTime % 1000; - - // first stage:int, long, float, double, boolean, enums - for (int i = 0; i < ROW_COUNT; i++) { - // write d1 - String d1 = "d1," + (startTime + i) + ",s1," + (i * 10 + 1) + ",s2," + (i * 10 + 2); - if (rm.nextInt(1000) < 100) { - d1 = "d1," + (startTime + i) + ",s1,,s2," + (i * 10 + 2) + ",s4,HIGH"; - } - if (i % 5 == 0) - d1 += ",s3," + (i * 10 + 3); - fw.write(d1 + "\r\n"); - - // write d2 - String d2 = "d2," + (startTime + i) + ",s2," + (i * 10 + 2) + ",s3," + (i * 10 + 3); - if (rm.nextInt(1000) < 100) { - d2 = "d2," + (startTime + i) + ",s2,,s3," + (i * 10 + 3) + ",s5,MAN"; - } - if (i % 5 == 0) - d2 += ",s1," + (i * 10 + 1); - fw.write(d2 + "\r\n"); - } - // write error - String d = - "d2,3," + (startTime + ROW_COUNT) + ",s2," + (ROW_COUNT * 10 + 2) + ",s3," - + (ROW_COUNT * 10 + 3); - fw.write(d + "\r\n"); - d = "d2," + (startTime + ROW_COUNT + 1) + ",2,s-1," + (ROW_COUNT * 10 + 2); - fw.write(d + "\r\n"); - fw.close(); + LOG.info("write processing has finished"); + + TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(outputDataFile); + TsFile readTsFile = new TsFile(input); + String value1 = readTsFile.getProp("key1"); + Assert.assertEquals("value1", value1); + String value2 = readTsFile.getProp("key2"); + Assert.assertEquals("value2", value2); + } + + public void write() throws IOException, WriteProcessException { + long lineCount = 0; + long startTime = System.currentTimeMillis(); + String[] strings; + // add all measurement except the last one at before writing + for (int i = 0; i < measurementArray.length() - 1; i++) { + tsFileWriter.addMeasurementByJson((JSONObject) measurementArray.get(i)); } - - @Test - public void writeTest() throws IOException, InterruptedException { - try { - write(); - } catch (WriteProcessException e) { - e.printStackTrace(); - } - LOG.info("write processing has finished"); - - TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(outputDataFile); - TsFile readTsFile = new TsFile(input); - String value1 = readTsFile.getProp("key1"); - Assert.assertEquals("value1", value1); - String value2 = readTsFile.getProp("key2"); - Assert.assertEquals("value2", value2); + while (true) { + if (lineCount % stageSize == 0) { + LOG.info("write line:{},use time:{}s", lineCount, + (System.currentTimeMillis() - startTime) / 1000); + stageState++; + LOG.info("stage:" + stageState); + if (stageState == stageDeltaObjectIds.length) + break; + } + if (lineCount == ROW_COUNT / 2) + tsFileWriter + .addMeasurementByJson((JSONObject) measurementArray.get(measurementArray.length() - 1)); + strings = getNextRecord(lineCount, stageState); + for (String str : strings) { + TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); + System.out.println(str); + tsFileWriter.write(record); + } + lineCount++; } - - public void write() throws IOException, WriteProcessException { - long lineCount = 0; - long startTime = System.currentTimeMillis(); - String[] strings; - //add all measurement except the last one at before writing - for (int i = 0; i < measurementArray.length() - 1; i++) { - tsFileWriter.addMeasurementByJson((JSONObject) measurementArray.get(i)); - } - while (true) { - if (lineCount % stageSize == 0) { - LOG.info("write line:{},use time:{}s", lineCount, - (System.currentTimeMillis() - startTime) / 1000); - stageState++; - LOG.info("stage:" + stageState); - if (stageState == stageDeltaObjectIds.length) - break; - } - if (lineCount == ROW_COUNT / 2) - tsFileWriter.addMeasurementByJson((JSONObject) measurementArray.get(measurementArray.length() - 1)); - strings = getNextRecord(lineCount, stageState); - for (String str : strings) { - TSRecord record = RecordUtils.parseSimpleTupleRecord(str, schema); - System.out.println(str); - tsFileWriter.write(record); - } - lineCount++; - } - //test duplicate measurement adding - JSONObject dupMeasure = (JSONObject) measurementArray.get(measurementArray.length() - 1); - try { - tsFileWriter.addMeasurementByJson(dupMeasure); - }catch (WriteProcessException e){ - assertEquals("given measurement has exists! "+ - dupMeasure.getString(JsonFormatConstant.MEASUREMENT_UID), e.getMessage()); - } - try { - tsFileWriter.close(); - } catch (IOException e) { - fail("close writer failed"); - } - LOG.info("stage size: {}, write {} group data", stageSize, lineCount); + // test duplicate measurement adding + JSONObject dupMeasure = (JSONObject) measurementArray.get(measurementArray.length() - 1); + try { + tsFileWriter.addMeasurementByJson(dupMeasure); + } catch (WriteProcessException e) { + assertEquals("given measurement has exists! " + + dupMeasure.getString(JsonFormatConstant.MEASUREMENT_UID), e.getMessage()); } - - private String[][] stageDeltaObjectIds = {{"d1", "d2", "d3"}, {"d1"}, {"d2", "d3"}}; - private String[] measurementIds = {"s0", "s1", "s2", "s3", "s4", "s5"}; - private long longBase = System.currentTimeMillis() * 1000; - private String[] enums = {"MAN", "WOMAN"}; - - private String[] getNextRecord(long lineCount, int stage) { - - String[] ret = new String[stageDeltaObjectIds[stage].length]; - for (int i = 0; i < ret.length; i++) { - StringContainer sc = new StringContainer(JsonFormatConstant.TSRECORD_SEPARATOR); - sc.addTail(stageDeltaObjectIds[stage][i], lineCount); - sc.addTail(measurementIds[0], lineCount * 10 + i, measurementIds[1], longBase - + lineCount * 20 + i, measurementIds[2], (lineCount * 30 + i) / 3.0, - measurementIds[3], (longBase + lineCount * 40 + i) / 7.0); - sc.addTail(measurementIds[4], ((lineCount + i) & 1) == 0); - sc.addTail(measurementIds[5], enums[(int) (lineCount + i) % enums.length]); - ret[i] = sc.toString(); - } - return ret; + try { + tsFileWriter.close(); + } catch (IOException e) { + fail("close writer failed"); + } + LOG.info("stage size: {}, write {} group data", stageSize, lineCount); + } + + private String[][] stageDeltaObjectIds = {{"d1", "d2", "d3"}, {"d1"}, {"d2", "d3"}}; + private String[] measurementIds = {"s0", "s1", "s2", "s3", "s4", "s5"}; + private long longBase = System.currentTimeMillis() * 1000; + private String[] enums = {"MAN", "WOMAN"}; + + private String[] getNextRecord(long lineCount, int stage) { + + String[] ret = new String[stageDeltaObjectIds[stage].length]; + for (int i = 0; i < ret.length; i++) { + StringContainer sc = new StringContainer(JsonFormatConstant.TSRECORD_SEPARATOR); + sc.addTail(stageDeltaObjectIds[stage][i], lineCount); + sc.addTail(measurementIds[0], lineCount * 10 + i, measurementIds[1], + longBase + lineCount * 20 + i, measurementIds[2], (lineCount * 30 + i) / 3.0, + measurementIds[3], (longBase + lineCount * 40 + i) / 7.0); + sc.addTail(measurementIds[4], ((lineCount + i) & 1) == 0); + sc.addTail(measurementIds[5], enums[(int) (lineCount + i) % enums.length]); + ret[i] = sc.toString(); } + return ret; + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverterTest.java index b8f172e1..90ea4699 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/JsonConverterTest.java @@ -1,7 +1,6 @@ package cn.edu.tsinghua.tsfile.timeseries.write.schema.converter; import static org.junit.Assert.*; - import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; @@ -11,7 +10,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; import cn.edu.tsinghua.tsfile.timeseries.write.exception.InvalidJsonSchemaException; import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException; @@ -20,7 +18,6 @@ import org.json.JSONObject; import org.json.JSONTokener; import org.junit.Test; - import cn.edu.tsinghua.tsfile.file.metadata.TimeSeriesMetadata; import cn.edu.tsinghua.tsfile.timeseries.write.desc.MeasurementDescriptor; import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; @@ -30,102 +27,101 @@ */ public class JsonConverterTest { - @Test - public void testJsonConverter() throws WriteProcessException { - String path = "src/test/resources/test_schema.json"; - JSONObject obj = null; - try { - obj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); - } catch (JSONException | FileNotFoundException e) { - e.printStackTrace(); - fail(); - } - - FileSchema fileSchema = new FileSchema(obj); - Collection measurements = fileSchema.getDescriptor().values(); - String[] measureDesStrings = - { - "[,s3,ENUMS,BITMAP,,SNAPPY,[MAN, WOMAN],]", - "[,s4,DOUBLE,RLE,max_point_number:2,UNCOMPRESSED,]", - "[,s5,INT32,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]", - "[,s1,INT32,RLE,max_point_number:2,UNCOMPRESSED,]", - "[,s2,INT64,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]", - - }; - int i = 0; - for (MeasurementDescriptor desc : measurements) { - assertEquals(measureDesStrings[i++], desc.toString()); - } + @Test + public void testJsonConverter() throws WriteProcessException { + String path = "src/test/resources/test_schema.json"; + JSONObject obj = null; + try { + obj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); + } catch (JSONException | FileNotFoundException e) { + e.printStackTrace(); + fail(); + } - List tsMetadataList = fileSchema.getTimeSeriesMetadatas(); - String[] tsMetadatas = - { - "TimeSeriesMetadata: measurementUID s1, type length 0, DataType INT32, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s2, type length 0, DataType INT64, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s3, type length 0, DataType ENUMS, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s4, type length 0, DataType DOUBLE, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s5, type length 0, DataType INT32, FreqType null,frequencies null", - }; - Collections.sort(tsMetadataList, (x,y)->x.getMeasurementUID().compareTo(y.getMeasurementUID())); - Arrays.sort(tsMetadatas, (x,y)->x.compareTo(y)); - for (int j = 0; j < tsMetadataList.size(); j++) { - assertEquals(tsMetadatas[j], tsMetadataList.get(j).toString()); - } + FileSchema fileSchema = new FileSchema(obj); + Collection measurements = fileSchema.getDescriptor().values(); + String[] measureDesStrings = {"[,s3,ENUMS,BITMAP,,SNAPPY,[MAN, WOMAN],]", + "[,s4,DOUBLE,RLE,max_point_number:2,UNCOMPRESSED,]", + "[,s5,INT32,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]", + "[,s1,INT32,RLE,max_point_number:2,UNCOMPRESSED,]", + "[,s2,INT64,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]", + }; + int i = 0; + for (MeasurementDescriptor desc : measurements) { + assertEquals(measureDesStrings[i++], desc.toString()); } - @Test - public void testConvertInJsonAndFileSchema() throws InvalidJsonSchemaException { - String path = "src/test/resources/test_schema.json"; - JSONObject srcObj = null; - try { - srcObj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); - } catch (JSONException | FileNotFoundException e) { - e.printStackTrace(); - fail(); - } - FileSchema fileSchema = new FileSchema(srcObj); - JSONObject descObj = JsonConverter.converterFileSchemaToJson(fileSchema); - //check schema - assertTrue(descObj.has(JsonFormatConstant.JSON_SCHEMA)); - JSONArray srcSchemaArray = srcObj.getJSONArray(JsonFormatConstant.JSON_SCHEMA); - JSONArray descSchemaArray = descObj.getJSONArray(JsonFormatConstant.JSON_SCHEMA); - assertEquals(srcSchemaArray.length(), descSchemaArray.length()); - Map descSchemaMap = new HashMap<>(); - for (int i = 0; i < descSchemaArray.length(); i++) { - JSONObject descMeasureObj = descSchemaArray.getJSONObject(i); - assertTrue(descMeasureObj.has(JsonFormatConstant.MEASUREMENT_UID)); - descSchemaMap.put(descMeasureObj.getString(JsonFormatConstant.MEASUREMENT_UID), descMeasureObj); - } - for (int i = 0; i < srcSchemaArray.length(); i++) { - JSONObject srcMeasureObj = srcSchemaArray.getJSONObject(i); - assertTrue(srcMeasureObj.has(JsonFormatConstant.MEASUREMENT_UID)); - String measureUID = srcMeasureObj.getString(JsonFormatConstant.MEASUREMENT_UID); - assertTrue(descSchemaMap.containsKey(measureUID)); - checkJsonObjectEqual(srcMeasureObj, descSchemaMap.get(measureUID)); - } - //check properties - if(srcObj.has(JsonFormatConstant.PROPERTIES)){ - assertTrue(descObj.has(JsonFormatConstant.PROPERTIES)); - JSONObject srcProps = srcObj.getJSONObject(JsonFormatConstant.PROPERTIES); - JSONObject descProps = descObj.getJSONObject(JsonFormatConstant.PROPERTIES); - checkJsonObjectEqual(srcProps, descProps); - } + List tsMetadataList = fileSchema.getTimeSeriesMetadatas(); + String[] tsMetadatas = { + "TimeSeriesMetadata: measurementUID s1, type length 0, DataType INT32, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s2, type length 0, DataType INT64, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s3, type length 0, DataType ENUMS, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s4, type length 0, DataType DOUBLE, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s5, type length 0, DataType INT32, FreqType null,frequencies null",}; + Collections.sort(tsMetadataList, + (x, y) -> x.getMeasurementUID().compareTo(y.getMeasurementUID())); + Arrays.sort(tsMetadatas, (x, y) -> x.compareTo(y)); + for (int j = 0; j < tsMetadataList.size(); j++) { + assertEquals(tsMetadatas[j], tsMetadataList.get(j).toString()); } - /** - * check whether two given JSONObjects are equal. - * @param obj1 the first JSONObject - * @param obj2 the second JSONObject - */ - private void checkJsonObjectEqual(JSONObject obj1, JSONObject obj2){ - assertEquals(obj1.keySet().size(), obj2.keySet().size()); - obj1.keySet().forEach(k->{ - String key = (String) k; - assertTrue(obj2.has(key)); - assertTrue(obj2.has(key)); - assertEquals(obj1.get((String) k).toString(), obj2.get((String) k).toString()); - }); + } + + @Test + public void testConvertInJsonAndFileSchema() throws InvalidJsonSchemaException { + String path = "src/test/resources/test_schema.json"; + JSONObject srcObj = null; + try { + srcObj = new JSONObject(new JSONTokener(new FileReader(new File(path)))); + } catch (JSONException | FileNotFoundException e) { + e.printStackTrace(); + fail(); + } + FileSchema fileSchema = new FileSchema(srcObj); + JSONObject descObj = JsonConverter.converterFileSchemaToJson(fileSchema); + // check schema + assertTrue(descObj.has(JsonFormatConstant.JSON_SCHEMA)); + JSONArray srcSchemaArray = srcObj.getJSONArray(JsonFormatConstant.JSON_SCHEMA); + JSONArray descSchemaArray = descObj.getJSONArray(JsonFormatConstant.JSON_SCHEMA); + assertEquals(srcSchemaArray.length(), descSchemaArray.length()); + Map descSchemaMap = new HashMap<>(); + for (int i = 0; i < descSchemaArray.length(); i++) { + JSONObject descMeasureObj = descSchemaArray.getJSONObject(i); + assertTrue(descMeasureObj.has(JsonFormatConstant.MEASUREMENT_UID)); + descSchemaMap.put(descMeasureObj.getString(JsonFormatConstant.MEASUREMENT_UID), + descMeasureObj); + } + for (int i = 0; i < srcSchemaArray.length(); i++) { + JSONObject srcMeasureObj = srcSchemaArray.getJSONObject(i); + assertTrue(srcMeasureObj.has(JsonFormatConstant.MEASUREMENT_UID)); + String measureUID = srcMeasureObj.getString(JsonFormatConstant.MEASUREMENT_UID); + assertTrue(descSchemaMap.containsKey(measureUID)); + checkJsonObjectEqual(srcMeasureObj, descSchemaMap.get(measureUID)); } + // check properties + if (srcObj.has(JsonFormatConstant.PROPERTIES)) { + assertTrue(descObj.has(JsonFormatConstant.PROPERTIES)); + JSONObject srcProps = srcObj.getJSONObject(JsonFormatConstant.PROPERTIES); + JSONObject descProps = descObj.getJSONObject(JsonFormatConstant.PROPERTIES); + checkJsonObjectEqual(srcProps, descProps); + } + } + + /** + * check whether two given JSONObjects are equal. + * + * @param obj1 the first JSONObject + * @param obj2 the second JSONObject + */ + private void checkJsonObjectEqual(JSONObject obj1, JSONObject obj2) { + assertEquals(obj1.keySet().size(), obj2.keySet().size()); + obj1.keySet().forEach(k -> { + String key = (String) k; + assertTrue(obj2.has(key)); + assertTrue(obj2.has(key)); + assertEquals(obj1.get((String) k).toString(), obj2.get((String) k).toString()); + }); + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/SchemaBuilderTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/SchemaBuilderTest.java index 51768415..4f0ba1da 100644 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/SchemaBuilderTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/SchemaBuilderTest.java @@ -9,70 +9,64 @@ import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema; import cn.edu.tsinghua.tsfile.timeseries.write.schema.SchemaBuilder; import org.junit.Test; - import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - import static org.junit.Assert.assertEquals; /** * @author qiaojialin */ public class SchemaBuilderTest { - @Test - public void testJsonConverter() throws WriteProcessException { - - SchemaBuilder builder = new SchemaBuilder(); - Map props = new HashMap<>(); - props.put("enum_values", "[\"MAN\",\"WOMAN\"]"); - props.put("compressor", "SNAPPY"); - MeasurementDescriptor descriptor = new MeasurementDescriptor("s3", TSDataType.ENUMS, TSEncoding.BITMAP, props); - builder.addSeries(descriptor); - props.clear(); - props.put(JsonFormatConstant.MAX_POINT_NUMBER, "3"); - builder.addSeries("s4", TSDataType.DOUBLE, "RLE", props); - builder.addSeries("s5", TSDataType.INT32, TSEncoding.TS_2DIFF, null); - props.clear(); - props.put(JsonFormatConstant.MAX_POINT_NUMBER, "2"); - builder.setProps(props); - builder.addProp("key", "value"); - FileSchema fileSchema = builder.build(); + @Test + public void testJsonConverter() throws WriteProcessException { - assertEquals("value", fileSchema.getProp("key")); - assertEquals("{max_point_number=2, key=value}", fileSchema.getProps().toString()); + SchemaBuilder builder = new SchemaBuilder(); + Map props = new HashMap<>(); + props.put("enum_values", "[\"MAN\",\"WOMAN\"]"); + props.put("compressor", "SNAPPY"); + MeasurementDescriptor descriptor = + new MeasurementDescriptor("s3", TSDataType.ENUMS, TSEncoding.BITMAP, props); + builder.addSeries(descriptor); + props.clear(); + props.put(JsonFormatConstant.MAX_POINT_NUMBER, "3"); + builder.addSeries("s4", TSDataType.DOUBLE, "RLE", props); + builder.addSeries("s5", TSDataType.INT32, TSEncoding.TS_2DIFF, null); + props.clear(); + props.put(JsonFormatConstant.MAX_POINT_NUMBER, "2"); + builder.setProps(props); + builder.addProp("key", "value"); + FileSchema fileSchema = builder.build(); - Collection measurements = fileSchema.getDescriptor().values(); - String[] measureDesStrings = - { - "[,s3,ENUMS,BITMAP,,SNAPPY,[MAN, WOMAN],]", - "[,s4,DOUBLE,RLE,max_point_number:3,UNCOMPRESSED,]", - "[,s5,INT32,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]" - }; - int i = 0; - for (MeasurementDescriptor desc : measurements) { - assertEquals(measureDesStrings[i++], desc.toString()); - } + assertEquals("value", fileSchema.getProp("key")); + assertEquals("{max_point_number=2, key=value}", fileSchema.getProps().toString()); - List tsMetadatas = fileSchema.getTimeSeriesMetadatas(); - String[] tsMetadataList = - { - "TimeSeriesMetadata: measurementUID s3, type length 0, DataType ENUMS, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s4, type length 0, DataType DOUBLE, FreqType null,frequencies null", - "TimeSeriesMetadata: measurementUID s5, type length 0, DataType INT32, FreqType null,frequencies null", - }; - Arrays.sort(tsMetadataList, (x,y)->x.compareTo(y)); - Collections.sort(tsMetadatas, (x,y)->x.getMeasurementUID().compareTo(y.getMeasurementUID())); - for (int j = 0; j < tsMetadatas.size(); j++) { - if(!tsMetadataList[j].equals(tsMetadatas.get(j).toString())) { - System.err.println(tsMetadatas.get(j).toString()); - System.err.println(tsMetadataList[j]); - } - assertEquals(tsMetadataList[j], tsMetadatas.get(j).toString()); - } + Collection measurements = fileSchema.getDescriptor().values(); + String[] measureDesStrings = {"[,s3,ENUMS,BITMAP,,SNAPPY,[MAN, WOMAN],]", + "[,s4,DOUBLE,RLE,max_point_number:3,UNCOMPRESSED,]", + "[,s5,INT32,TS_2DIFF,max_point_number:2,UNCOMPRESSED,]"}; + int i = 0; + for (MeasurementDescriptor desc : measurements) { + assertEquals(measureDesStrings[i++], desc.toString()); + } + List tsMetadatas = fileSchema.getTimeSeriesMetadatas(); + String[] tsMetadataList = { + "TimeSeriesMetadata: measurementUID s3, type length 0, DataType ENUMS, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s4, type length 0, DataType DOUBLE, FreqType null,frequencies null", + "TimeSeriesMetadata: measurementUID s5, type length 0, DataType INT32, FreqType null,frequencies null",}; + Arrays.sort(tsMetadataList, (x, y) -> x.compareTo(y)); + Collections.sort(tsMetadatas, (x, y) -> x.getMeasurementUID().compareTo(y.getMeasurementUID())); + for (int j = 0; j < tsMetadatas.size(); j++) { + if (!tsMetadataList[j].equals(tsMetadatas.get(j).toString())) { + System.err.println(tsMetadatas.get(j).toString()); + System.err.println(tsMetadataList[j]); + } + assertEquals(tsMetadataList[j], tsMetadatas.get(j).toString()); } + + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverterTest.java index 24b21de6..9ac328af 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSDataTypeConverterTest.java @@ -2,11 +2,9 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; - import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; import cn.edu.tsinghua.tsfile.common.exception.metadata.MetadataArgsErrorException; import org.junit.Test; - import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; /** @@ -15,43 +13,41 @@ * */ public class TSDataTypeConverterTest { - private String noExists = "no_exists"; - private String errIntStr1 = "lqwk"; - private String[] enum_values = {"a", "s", "2", "d"}; - private String enum_values_tring = "a" + JsonFormatConstant.ENUM_VALUES_SEPARATOR + "s" + JsonFormatConstant.ENUM_VALUES_SEPARATOR - + "2" + JsonFormatConstant.ENUM_VALUES_SEPARATOR + "d"; + private String noExists = "no_exists"; + private String errIntStr1 = "lqwk"; + private String[] enum_values = {"a", "s", "2", "d"}; + private String enum_values_tring = "a" + JsonFormatConstant.ENUM_VALUES_SEPARATOR + "s" + + JsonFormatConstant.ENUM_VALUES_SEPARATOR + "2" + JsonFormatConstant.ENUM_VALUES_SEPARATOR + + "d"; - @Test - public void testCheckParameterNoParameter() { - TSDataType type = TSDataType.BIGDECIMAL; - try { - assertEquals(null, - TSDataTypeConverter.checkParameter(type, noExists, noExists)); - } catch (Exception e) { - assertTrue(e instanceof MetadataArgsErrorException); - } + @Test + public void testCheckParameterNoParameter() { + TSDataType type = TSDataType.BIGDECIMAL; + try { + assertEquals(null, TSDataTypeConverter.checkParameter(type, noExists, noExists)); + } catch (Exception e) { + assertTrue(e instanceof MetadataArgsErrorException); } + } - @Test - public void testCheckParameterRLE() { - TSDataType type = TSDataType.ENUMS; - String[] ret = null; - try { - ret = - (String[]) TSDataTypeConverter.checkParameter(type, - JsonFormatConstant.ENUM_VALUES, enum_values_tring); - } catch (MetadataArgsErrorException e1) { - assertTrue(false); - } - for (int i = 0; i < ret.length; i++) { - assertEquals(enum_values[i], ret[i]); - } + @Test + public void testCheckParameterRLE() { + TSDataType type = TSDataType.ENUMS; + String[] ret = null; + try { + ret = (String[]) TSDataTypeConverter.checkParameter(type, JsonFormatConstant.ENUM_VALUES, + enum_values_tring); + } catch (MetadataArgsErrorException e1) { + assertTrue(false); + } + for (int i = 0; i < ret.length; i++) { + assertEquals(enum_values[i], ret[i]); + } - try { - TSDataTypeConverter.checkParameter(type, JsonFormatConstant.MAX_POINT_NUMBER, - errIntStr1); - } catch (Exception e) { - assertTrue(e instanceof MetadataArgsErrorException); - } + try { + TSDataTypeConverter.checkParameter(type, JsonFormatConstant.MAX_POINT_NUMBER, errIntStr1); + } catch (Exception e) { + assertTrue(e instanceof MetadataArgsErrorException); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverterTest.java index 7acf6ca3..18786d88 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/schema/converter/TSEncodingConverterTest.java @@ -3,9 +3,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; - import org.junit.Test; - import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant; import cn.edu.tsinghua.tsfile.common.exception.metadata.MetadataArgsErrorException; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding; @@ -16,53 +14,54 @@ * */ public class TSEncodingConverterTest { - private String noExists = "no_exists"; - private String intStr1 = "1"; - private Integer int1 = 1; - private String errIntStr1 = "lqwk"; - @Test - public void testCheckParameterNoParameter() { - TSEncoding encode = TSEncoding.PLAIN; - try { - assertEquals(null, TSEncodingConverter.checkParameter(encode, noExists, noExists)); - } catch (Exception e) { - assertTrue(e instanceof MetadataArgsErrorException); - } + private String noExists = "no_exists"; + private String intStr1 = "1"; + private Integer int1 = 1; + private String errIntStr1 = "lqwk"; + + @Test + public void testCheckParameterNoParameter() { + TSEncoding encode = TSEncoding.PLAIN; + try { + assertEquals(null, TSEncodingConverter.checkParameter(encode, noExists, noExists)); + } catch (Exception e) { + assertTrue(e instanceof MetadataArgsErrorException); } + } - @Test - public void testCheckParameterRLE() { - TSEncoding encode = TSEncoding.RLE; - try { - assertEquals(int1, - TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, intStr1)); - } catch (MetadataArgsErrorException e1) { - assertTrue(false); - } - try { - TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, errIntStr1); - fail(); - } catch (Exception e) { - assertTrue(e instanceof MetadataArgsErrorException); - assertEquals("paramter max_point_number meets error integer format :lqwk", e.getMessage()); - } + @Test + public void testCheckParameterRLE() { + TSEncoding encode = TSEncoding.RLE; + try { + assertEquals(int1, + TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, intStr1)); + } catch (MetadataArgsErrorException e1) { + assertTrue(false); } + try { + TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, errIntStr1); + fail(); + } catch (Exception e) { + assertTrue(e instanceof MetadataArgsErrorException); + assertEquals("paramter max_point_number meets error integer format :lqwk", e.getMessage()); + } + } - @Test - public void testCheckParameterTS_2DIFF() { - TSEncoding encode = TSEncoding.TS_2DIFF; - try { - assertEquals(int1, - TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, intStr1)); - } catch (MetadataArgsErrorException e1) { - assertTrue(false); - } - try { - TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, errIntStr1); - fail(); - } catch (Exception e) { - assertTrue(e instanceof MetadataArgsErrorException); - assertEquals("paramter max_point_number meets error integer format :lqwk", e.getMessage()); - } + @Test + public void testCheckParameterTS_2DIFF() { + TSEncoding encode = TSEncoding.TS_2DIFF; + try { + assertEquals(int1, + TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, intStr1)); + } catch (MetadataArgsErrorException e1) { + assertTrue(false); + } + try { + TSEncodingConverter.checkParameter(encode, JsonFormatConstant.MAX_POINT_NUMBER, errIntStr1); + fail(); + } catch (Exception e) { + assertTrue(e instanceof MetadataArgsErrorException); + assertEquals("paramter max_point_number meets error integer format :lqwk", e.getMessage()); } + } } diff --git a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriterTest.java b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriterTest.java index 0ae8a5df..0e6e4a13 100755 --- a/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriterTest.java +++ b/src/test/java/cn/edu/tsinghua/tsfile/timeseries/write/series/ValueWriterTest.java @@ -8,13 +8,10 @@ import cn.edu.tsinghua.tsfile.encoding.encoder.PlainEncoder; import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType; import cn.edu.tsinghua.tsfile.timeseries.constant.TimeseriesTestConstant; - import org.junit.Assert; import org.junit.Test; - import java.io.ByteArrayInputStream; import java.io.IOException; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @@ -25,52 +22,52 @@ */ public class ValueWriterTest { - @Test - public void testNoFreq() { - ValueWriter writer = new ValueWriter(); - writer.setTimeEncoder(new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.INT64, 0)); - writer.setValueEncoder(new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.INT64, 0)); - short s1 = 12; - boolean b1 = false; - int i1 = 1; - long l1 = 123142120391L; - float f1 = 2.2f; - double d1 = 1294283.4323d; - String str1 = "I have a dream"; - int timeCount = 0; - try { - writer.write(timeCount++, s1); - writer.write(timeCount++, b1); - writer.write(timeCount++, i1); - writer.write(timeCount++, l1); - writer.write(timeCount++, f1); - writer.write(timeCount++, d1); - writer.write(timeCount++, new Binary(str1)); - assertEquals(101, writer.estimateMaxMemSize()); - ListByteArrayOutputStream input = writer.getBytes(); - ByteArrayInputStream in = new ByteArrayInputStream(input.toByteArray()); - writer.reset(); - assertEquals(0, writer.estimateMaxMemSize()); - int timeSize = ReadWriteStreamUtils.readUnsignedVarInt(in); - byte[] timeBytes = new byte[timeSize]; - int ret = in.read(timeBytes); - if(ret != timeBytes.length) - fail(); - ByteArrayInputStream timeInputStream = new ByteArrayInputStream(timeBytes); - PlainDecoder decoder = new PlainDecoder(EndianType.LITTLE_ENDIAN); - for (int i = 0; i < timeCount; i++) { - assertEquals(i, decoder.readLong(timeInputStream)); - } - assertEquals(s1, decoder.readShort(in)); - assertEquals(b1, decoder.readBoolean(in)); - assertEquals(i1, decoder.readInt(in)); - assertEquals(l1, decoder.readLong(in)); - Assert.assertEquals(f1, decoder.readFloat(in), TimeseriesTestConstant.float_min_delta); - assertEquals(d1, decoder.readDouble(in), TimeseriesTestConstant.double_min_delta); - assertEquals(str1, decoder.readBinary(in).getStringValue()); + @Test + public void testNoFreq() { + ValueWriter writer = new ValueWriter(); + writer.setTimeEncoder(new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.INT64, 0)); + writer.setValueEncoder(new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.INT64, 0)); + short s1 = 12; + boolean b1 = false; + int i1 = 1; + long l1 = 123142120391L; + float f1 = 2.2f; + double d1 = 1294283.4323d; + String str1 = "I have a dream"; + int timeCount = 0; + try { + writer.write(timeCount++, s1); + writer.write(timeCount++, b1); + writer.write(timeCount++, i1); + writer.write(timeCount++, l1); + writer.write(timeCount++, f1); + writer.write(timeCount++, d1); + writer.write(timeCount++, new Binary(str1)); + assertEquals(101, writer.estimateMaxMemSize()); + ListByteArrayOutputStream input = writer.getBytes(); + ByteArrayInputStream in = new ByteArrayInputStream(input.toByteArray()); + writer.reset(); + assertEquals(0, writer.estimateMaxMemSize()); + int timeSize = ReadWriteStreamUtils.readUnsignedVarInt(in); + byte[] timeBytes = new byte[timeSize]; + int ret = in.read(timeBytes); + if (ret != timeBytes.length) + fail(); + ByteArrayInputStream timeInputStream = new ByteArrayInputStream(timeBytes); + PlainDecoder decoder = new PlainDecoder(EndianType.LITTLE_ENDIAN); + for (int i = 0; i < timeCount; i++) { + assertEquals(i, decoder.readLong(timeInputStream)); + } + assertEquals(s1, decoder.readShort(in)); + assertEquals(b1, decoder.readBoolean(in)); + assertEquals(i1, decoder.readInt(in)); + assertEquals(l1, decoder.readLong(in)); + Assert.assertEquals(f1, decoder.readFloat(in), TimeseriesTestConstant.float_min_delta); + assertEquals(d1, decoder.readDouble(in), TimeseriesTestConstant.double_min_delta); + assertEquals(str1, decoder.readBinary(in).getStringValue()); - } catch (IOException e) { - fail(); - } + } catch (IOException e) { + fail(); } + } }