Skip to content

Commit f4c3c13

Browse files
committed
Fixes #7626 - Corrects stopping logic when nrows argument is supplied
Added test for #7626 Added test for #7626
1 parent 837db72 commit f4c3c13

File tree

2 files changed

+15
-5
lines changed

2 files changed

+15
-5
lines changed

pandas/io/tests/parser/common.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -427,6 +427,18 @@ def test_read_nrows(self):
427427
with tm.assertRaisesRegexp(ValueError, msg):
428428
self.read_csv(StringIO(self.data1), nrows='foo')
429429

430+
def test_read_nrows_large(self):
431+
# GH-7626 - Read only nrows of data in for large inputs (>262144b)
432+
header_narrow = '\t'.join(['COL_HEADER_'+str(i) for i in range(10)]) + '\n'
433+
data_narrow = '\t'.join(['somedatasomedatasomedata1' for i in range(10)])+'\n'
434+
header_wide = '\t'.join(['COL_HEADER_'+str(i) for i in range(15)]) + '\n'
435+
data_wide = '\t'.join(['somedatasomedatasomedata2' for i in range(15)])+'\n'
436+
test_input = header_narrow + data_narrow*1050 + header_wide + data_wide*2
437+
438+
df = self.read_table(StringIO(test_input), nrows=1010)
439+
440+
self.assertTrue(df.size == 1010*10)
441+
430442
def test_read_chunksize(self):
431443
reader = self.read_csv(StringIO(self.data1), index_col=0, chunksize=2)
432444
df = self.read_csv(StringIO(self.data1), index_col=0)

pandas/src/parser/tokenizer.c

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -726,16 +726,14 @@ int skip_this_line(parser_t *self, int64_t rownum) {
726726
}
727727
}
728728

729-
int tokenize_bytes(parser_t *self, size_t line_limit)
729+
int tokenize_bytes(parser_t *self, size_t line_limit, int start_lines)
730730
{
731-
int i, slen, start_lines;
731+
int i, slen;
732732
long maxstreamsize;
733733
char c;
734734
char *stream;
735735
char *buf = self->data + self->datapos;
736736

737-
start_lines = self->lines;
738-
739737
if (make_stream_space(self, self->datalen - self->datapos) < 0) {
740738
self->error_msg = "out of memory";
741739
return -1;
@@ -1384,7 +1382,7 @@ int _tokenize_helper(parser_t *self, size_t nrows, int all) {
13841382
TRACE(("_tokenize_helper: Trying to process %d bytes, datalen=%d, datapos= %d\n",
13851383
self->datalen - self->datapos, self->datalen, self->datapos));
13861384

1387-
status = tokenize_bytes(self, nrows);
1385+
status = tokenize_bytes(self, nrows, start_lines);
13881386

13891387
if (status < 0) {
13901388
// XXX

0 commit comments

Comments
 (0)