@@ -51,14 +51,7 @@ class VRLRecordReader(cobolSchema: Copybook,
5151 private var byteIndex = startingFileOffset
5252 private var recordIndex = startRecordId - 1
5353
54- final private val copyBookRecordSize = cobolSchema.getRecordSize
55- final private val (recordLengthField, lengthFieldExpr) = ReaderParametersValidator .getEitherFieldAndExpression(readerProperties.lengthFieldExpression, readerProperties.lengthFieldMap, cobolSchema)
56- final private val lengthField = recordLengthField.map(_.field)
57- final private val lengthMap = recordLengthField.map(_.valueMap).getOrElse(Map .empty)
58- final private val isLengthMapEmpty = lengthMap.isEmpty
5954 final private val segmentIdField = ReaderParametersValidator .getSegmentIdField(readerProperties.multisegment, cobolSchema)
60- final private val recordLengthAdjustment = readerProperties.rdwAdjustment
61- final private val useRdw = lengthField.isEmpty && lengthFieldExpr.isEmpty
6255 final private val minimumRecordLength = readerProperties.minimumRecordLength
6356 final private val maximumRecordLength = readerProperties.maximumRecordLength
6457
@@ -90,13 +83,7 @@ class VRLRecordReader(cobolSchema: Copybook,
9083 None
9184 }
9285 case None =>
93- if (useRdw) {
94- fetchRecordUsingRdwHeaders()
95- } else if (lengthField.nonEmpty) {
96- fetchRecordUsingRecordLengthField()
97- } else {
98- fetchRecordUsingRecordLengthFieldExpression(lengthFieldExpr.get)
99- }
86+ fetchRecordUsingRdwHeaders()
10087 }
10188
10289 binaryData match {
@@ -117,110 +104,6 @@ class VRLRecordReader(cobolSchema: Copybook,
117104
118105 def getRecordIndex : Long = recordIndex
119106
120- private def fetchRecordUsingRecordLengthField (): Option [Array [Byte ]] = {
121- if (lengthField.isEmpty) {
122- throw new IllegalStateException (s " For variable length reader either RDW record headers or record length field should be provided. " )
123- }
124-
125- val lengthFieldBlock = lengthField.get.binaryProperties.offset + lengthField.get.binaryProperties.actualSize
126-
127- val binaryDataStart = dataStream.next(readerProperties.startOffset + lengthFieldBlock)
128-
129- byteIndex += readerProperties.startOffset + lengthFieldBlock
130-
131- if (binaryDataStart.length < readerProperties.startOffset + lengthFieldBlock) {
132- return None
133- }
134-
135- val recordLength = lengthField match {
136- case Some (lengthAST) => getRecordLengthFromField(lengthAST, binaryDataStart)
137- case None => copyBookRecordSize
138- }
139-
140- val restOfDataLength = recordLength - lengthFieldBlock + readerProperties.endOffset
141-
142- byteIndex += restOfDataLength
143-
144- if (restOfDataLength > 0 ) {
145- Some (binaryDataStart ++ dataStream.next(restOfDataLength))
146- } else {
147- Some (binaryDataStart)
148- }
149- }
150-
151- final private def getRecordLengthFromField (lengthAST : Primitive , binaryDataStart : Array [Byte ]): Int = {
152- val length = if (isLengthMapEmpty) {
153- cobolSchema.extractPrimitiveField(lengthAST, binaryDataStart, readerProperties.startOffset) match {
154- case i : Int => i
155- case l : Long => l.toInt
156- case s : String => s.toInt
157- case null => throw new IllegalStateException (s " Null encountered as a record length field (offset: $byteIndex, raw value: ${getBytesAsHexString(binaryDataStart)}). " )
158- case _ => throw new IllegalStateException (s " Record length value of the field ${lengthAST.name} must be an integral type. " )
159- }
160- } else {
161- cobolSchema.extractPrimitiveField(lengthAST, binaryDataStart, readerProperties.startOffset) match {
162- case i : Int => getRecordLengthFromMapping(i.toString)
163- case l : Long => getRecordLengthFromMapping(l.toString)
164- case s : String => getRecordLengthFromMapping(s)
165- case null => throw new IllegalStateException (s " Null encountered as a record length field (offset: $byteIndex, raw value: ${getBytesAsHexString(binaryDataStart)}). " )
166- case _ => throw new IllegalStateException (s " Record length value of the field ${lengthAST.name} must be an integral type. " )
167- }
168- }
169- length + recordLengthAdjustment
170- }
171-
172- final private def getRecordLengthFromMapping (v : String ): Int = {
173- lengthMap.get(v) match {
174- case Some (len) => len
175- case None => throw new IllegalStateException (s " Record length value ' $v' is not mapped to a record length. " )
176- }
177- }
178-
179- final private def getBytesAsHexString (bytes : Array [Byte ]): String = {
180- bytes.map(" %02X" format _).mkString
181- }
182-
183- private def fetchRecordUsingRecordLengthFieldExpression (expr : RecordLengthExpression ): Option [Array [Byte ]] = {
184- val lengthFieldBlock = expr.requiredBytesToread
185- val evaluator = expr.evaluator
186-
187- val binaryDataStart = dataStream.next(readerProperties.startOffset + lengthFieldBlock)
188-
189- byteIndex += readerProperties.startOffset + lengthFieldBlock
190-
191- if (binaryDataStart.length < readerProperties.startOffset + lengthFieldBlock) {
192- return None
193- }
194-
195- expr.fields.foreach{
196- case (name, field) =>
197- val obj = cobolSchema.extractPrimitiveField(field, binaryDataStart, readerProperties.startOffset)
198- try {
199- obj match {
200- case i : Int => evaluator.setValue(name, i)
201- case l : Long => evaluator.setValue(name, l.toInt)
202- case s : String => evaluator.setValue(name, s.toInt)
203- case _ => throw new IllegalStateException (s " Record length value of the field ${field.name} must be an integral type. " )
204- }
205- } catch {
206- case ex : NumberFormatException =>
207- throw new IllegalStateException (s " Encountered an invalid value of the record length field. Cannot parse ' $obj' as an integer in: ${field.name} = ' $obj'. " , ex)
208- }
209- }
210-
211- val recordLength = evaluator.eval()
212-
213- val restOfDataLength = recordLength - lengthFieldBlock + readerProperties.endOffset
214-
215- byteIndex += restOfDataLength
216-
217- if (restOfDataLength > 0 ) {
218- Some (binaryDataStart ++ dataStream.next(restOfDataLength))
219- } else {
220- Some (binaryDataStart)
221- }
222- }
223-
224107 private def fetchRecordUsingRdwHeaders (): Option [Array [Byte ]] = {
225108 val rdwHeaderBlock = recordHeaderParser.getHeaderLength
226109
0 commit comments