Skip to content

BUG: fix for GH14252 #14261

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/source/whatsnew/v0.19.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1576,3 +1576,4 @@ Bug Fixes
- Bugs in ``stack``, ``get_dummies``, ``make_axis_dummies`` which don't preserve categorical dtypes in (multi)indexes (:issue:`13854`)
- ``PeridIndex`` can now accept ``list`` and ``array`` which contains ``pd.NaT`` (:issue:`13430`)
- Bug in ``df.groupby`` where ``.median()`` returns arbitrary values if grouped dataframe contains empty bins (:issue:`13629`)
- Bug in ``pandas.concat`` does not preserve Index name (:issue:`14252`)
7 changes: 4 additions & 3 deletions pandas/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,10 @@ def _set_levels(self, levels, level=None, copy=False, validate=True,
raise ValueError('Length of levels must match length of level.')

if level is None:
new_levels = FrozenList(
_ensure_index(lev, copy=copy)._shallow_copy()
for lev in levels)
new_levels = []
for lev in levels:
new_levels.append(
_ensure_index(lev, copy=copy)._shallow_copy())
else:
level = [self._get_level_number(l) for l in level]
new_levels = list(self._levels)
Expand Down
15 changes: 9 additions & 6 deletions pandas/tools/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -1369,7 +1369,7 @@ def __init__(self, objs, axis=0, join='outer', join_axes=None,
clean_keys.append(k)
clean_objs.append(v)
objs = clean_objs
keys = clean_keys
keys = Index(clean_keys, name=keys.name)

if len(objs) == 0:
raise ValueError('All objects passed were None')
Expand Down Expand Up @@ -1685,7 +1685,6 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None):

# also copies
names = names + _get_consensus_names(indexes)

return MultiIndex(levels=levels, labels=label_list, names=names,
verify_integrity=False)

Expand All @@ -1694,8 +1693,8 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None):
kpieces = len(indexes)

# also copies
new_names = list(names)
new_levels = list(levels)
new_names = names
new_levels = levels

# construct labels
new_labels = []
Expand Down Expand Up @@ -1723,8 +1722,12 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None):
if len(new_names) < len(new_levels):
new_names.extend(new_index.names)

return MultiIndex(levels=new_levels, labels=new_labels, names=new_names,
verify_integrity=False)
if any(new_names):
return MultiIndex(levels=new_levels, labels=new_labels,
names=new_names, verify_integrity=False)
else:
return MultiIndex(levels=new_levels, labels=new_labels,
verify_integrity=False)


def _should_fill(lname, rname):
Expand Down
9 changes: 9 additions & 0 deletions pandas/tools/tests/test_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import pandas as pd
from pandas.compat import lrange, lzip
from pandas.tools.merge import merge, concat, MergeError
from pandas.core.base import FrozenList
from pandas.util.testing import (assert_frame_equal,
assert_series_equal,
slow)
Expand Down Expand Up @@ -834,6 +835,14 @@ def test_merge_right_vs_left(self):
merged2 = merged2.ix[:, merged1.columns]
assert_frame_equal(merged1, merged2)

def test_concat_keys(self):
df = pd.DataFrame({'foo': [1, 2, 3, 4],
'bar': [0.1, 0.2, 0.3, 0.4]})
index = pd.Index(['a', 'b'], name='baz')

concatted = pd.concat([df, df], keys=index)
self.assertEqual(FrozenList(['baz', None]), concatted.index.names)

def test_compress_group_combinations(self):

# ~ 40000000 possible unique groups
Expand Down