BUG: Corrects stopping logic when nrows argument is supplied (Fixes #7626)

Fixed code formatting

Added test to C Parser Only suite, added whatsnew entry
This commit is contained in:
Jeff Carey 2016-11-25 00:29:12 -08:00
parent 725453deb2
commit 6f1965aaeb
4 changed files with 38 additions and 5 deletions

View File

@ -67,6 +67,7 @@ Bug Fixes
- Bug in ``pd.read_csv()`` in which the ``dtype`` parameter was not being respected for empty data (:issue:`14712`)
- Bug in ``pd.read_csv()`` in which the ``nrows`` parameter was not being respected for large input when using the C engine for parsing (:issue:`7626`)

View File

@ -371,3 +371,20 @@ No,No,No"""
result = self.read_csv(StringIO(data), names=names)
tm.assert_frame_equal(result, expected)
def test_read_nrows_large(self):
# gh-7626 - Read only nrows of data in for large inputs (>262144b)
header_narrow = '\t'.join(['COL_HEADER_' + str(i)
for i in range(10)]) + '\n'
data_narrow = '\t'.join(['somedatasomedatasomedata1'
for i in range(10)]) + '\n'
header_wide = '\t'.join(['COL_HEADER_' + str(i)
for i in range(15)]) + '\n'
data_wide = '\t'.join(['somedatasomedatasomedata2'
for i in range(15)]) + '\n'
test_input = (header_narrow + data_narrow * 1050 +
header_wide + data_wide * 2)
df = self.read_csv(StringIO(test_input), sep='\t', nrows=1010)
self.assertTrue(df.size == 1010 * 10)

View File

@ -427,6 +427,23 @@ bar,foo"""
with tm.assertRaisesRegexp(ValueError, msg):
self.read_csv(StringIO(self.data1), nrows='foo')
def test_read_nrows_large(self):
# GH-7626 - Read only nrows of data in for large inputs (>262144b)
header_narrow = '\t'.join(['COL_HEADER_' + str(i)
for i in range(10)]) + '\n'
data_narrow = '\t'.join(['somedatasomedatasomedata1'
for i in range(10)]) + '\n'
header_wide = '\t'.join(['COL_HEADER_' + str(i)
for i in range(15)]) + '\n'
data_wide = '\t'.join(['somedatasomedatasomedata2'
for i in range(15)]) + '\n'
test_input = (header_narrow + data_narrow * 1050 +
header_wide + data_wide * 2)
df = self.read_csv(StringIO(test_input), sep="\t", nrows=1010)
self.assertTrue(df.size == 1010 * 10)
def test_read_chunksize(self):
reader = self.read_csv(StringIO(self.data1), index_col=0, chunksize=2)
df = self.read_csv(StringIO(self.data1), index_col=0)

View File

@ -726,16 +726,14 @@ int skip_this_line(parser_t *self, int64_t rownum) {
}
}
int tokenize_bytes(parser_t *self, size_t line_limit)
int tokenize_bytes(parser_t *self, size_t line_limit, int start_lines)
{
int i, slen, start_lines;
int i, slen;
long maxstreamsize;
char c;
char *stream;
char *buf = self->data + self->datapos;
start_lines = self->lines;
if (make_stream_space(self, self->datalen - self->datapos) < 0) {
self->error_msg = "out of memory";
return -1;
@ -1384,7 +1382,7 @@ int _tokenize_helper(parser_t *self, size_t nrows, int all) {
TRACE(("_tokenize_helper: Trying to process %d bytes, datalen=%d, datapos= %d\n",
self->datalen - self->datapos, self->datalen, self->datapos));
status = tokenize_bytes(self, nrows);
status = tokenize_bytes(self, nrows, start_lines);
if (status < 0) {
// XXX