Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DOC/CLN: Index shared docs with appending actual docstrings #20022

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
154 changes: 55 additions & 99 deletions pandas/core/indexes/base.py

Large diffs are not rendered by default.

35 changes: 16 additions & 19 deletions pandas/core/indexes/category.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,13 @@

from pandas.util._decorators import Appender, cache_readonly
from pandas.core.config import get_option
from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core.indexes.base import Index
from pandas.core import accessor
import pandas.core.common as com
import pandas.core.base as base
import pandas.core.missing as missing
import pandas.core.indexes.base as ibase

_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(dict(target_klass='CategoricalIndex'))


class CategoricalIndex(Index, accessor.PandasDelegate):
"""
Expand Down Expand Up @@ -191,7 +188,7 @@ def _simple_new(cls, values, name=None, categories=None, ordered=None,
result._reset_identity()
return result

@Appender(_index_shared_docs['_shallow_copy'])
@Appender(Index._shallow_copy.__doc__)
def _shallow_copy(self, values=None, categories=None, ordered=None,
dtype=None, **kwargs):
# categories and ordered can't be part of attributes,
Expand Down Expand Up @@ -322,7 +319,7 @@ def ordered(self):
def _reverse_indexer(self):
return self._data._reverse_indexer()

@Appender(_index_shared_docs['__contains__'] % _index_doc_kwargs)
@Appender(Index.__contains__.__doc__)
def __contains__(self, key):
hash(key)

Expand All @@ -331,7 +328,7 @@ def __contains__(self, key):

return key in self.values

@Appender(_index_shared_docs['contains'] % _index_doc_kwargs)
@Appender(Index.contains.__doc__)
def contains(self, key):
hash(key)

Expand All @@ -344,7 +341,7 @@ def __array__(self, dtype=None):
""" the array interface, return my values """
return np.array(self._data, dtype=dtype)

@Appender(_index_shared_docs['astype'])
@Appender(Index.astype.__doc__)
def astype(self, dtype, copy=True):
if is_interval_dtype(dtype):
from pandas import IntervalIndex
Expand All @@ -362,7 +359,7 @@ def _isnan(self):
""" return if each value is nan"""
return self._data.codes == -1

@Appender(ibase._index_shared_docs['fillna'])
@Appender(Index.fillna.__doc__)
def fillna(self, value, downcast=None):
self._assert_can_do_op(value)
return CategoricalIndex(self._data.fillna(value), name=self.name)
Expand All @@ -389,7 +386,7 @@ def is_monotonic_increasing(self):
def is_monotonic_decreasing(self):
return Index(self.codes).is_monotonic_decreasing

@Appender(_index_shared_docs['index_unique'] % _index_doc_kwargs)
@Appender(Index.unique.__doc__)
def unique(self, level=None):
if level is not None:
self._validate_index_level(level)
Expand All @@ -399,7 +396,7 @@ def unique(self, level=None):
return self._shallow_copy(result, categories=result.categories,
ordered=result.ordered)

@Appender(base._shared_docs['duplicated'] % _index_doc_kwargs)
@Appender(Index.duplicated.__doc__)
def duplicated(self, keep='first'):
from pandas._libs.hashtable import duplicated_int64
codes = self.codes.astype('i8')
Expand Down Expand Up @@ -462,7 +459,7 @@ def _can_reindex(self, indexer):
""" always allow reindexing """
pass

@Appender(_index_shared_docs['where'])
@Appender(Index.where.__doc__)
def where(self, cond, other=None):
if other is None:
other = self._na_value
Expand Down Expand Up @@ -558,7 +555,7 @@ def _reindex_non_unique(self, target):

return new_target, indexer, new_indexer

@Appender(_index_shared_docs['get_indexer'] % _index_doc_kwargs)
@Appender(Index.get_indexer.__doc__)
def get_indexer(self, target, method=None, limit=None, tolerance=None):
from pandas.core.arrays.categorical import _recode_for_categories

Expand Down Expand Up @@ -594,7 +591,7 @@ def get_indexer(self, target, method=None, limit=None, tolerance=None):
indexer, _ = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer)

@Appender(_index_shared_docs['get_indexer_non_unique'] % _index_doc_kwargs)
@Appender(Index.get_indexer_non_unique.__doc__)
def get_indexer_non_unique(self, target):
target = ibase._ensure_index(target)

Expand All @@ -605,15 +602,15 @@ def get_indexer_non_unique(self, target):
indexer, missing = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer), missing

@Appender(_index_shared_docs['_convert_scalar_indexer'])
@Appender(Index._convert_scalar_indexer.__doc__)
def _convert_scalar_indexer(self, key, kind=None):
if self.categories._defer_to_indexing:
return self.categories._convert_scalar_indexer(key, kind=kind)

return super(CategoricalIndex, self)._convert_scalar_indexer(
key, kind=kind)

@Appender(_index_shared_docs['_convert_list_indexer'])
@Appender(Index._convert_list_indexer.__doc__)
def _convert_list_indexer(self, keyarr, kind=None):
# Return our indexer or raise if all of the values are not included in
# the categories
Expand All @@ -631,7 +628,7 @@ def _convert_list_indexer(self, keyarr, kind=None):

return self.get_indexer(keyarr)

@Appender(_index_shared_docs['_convert_arr_indexer'])
@Appender(Index._convert_arr_indexer.__doc__)
def _convert_arr_indexer(self, keyarr):
keyarr = com._asarray_tuplesafe(keyarr)

Expand All @@ -640,11 +637,11 @@ def _convert_arr_indexer(self, keyarr):

return self._shallow_copy(keyarr)

@Appender(_index_shared_docs['_convert_index_indexer'])
@Appender(Index._convert_index_indexer.__doc__)
def _convert_index_indexer(self, keyarr):
return self._shallow_copy(keyarr)

@Appender(_index_shared_docs['take'] % _index_doc_kwargs)
@Appender(Index.take.__doc__)
def take(self, indices, axis=0, allow_fill=True,
fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
Expand Down
11 changes: 4 additions & 7 deletions pandas/core/indexes/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,12 @@
from pandas.errors import NullFrequencyError, PerformanceWarning
import pandas.io.formats.printing as printing

from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core.indexes.base import Index
from pandas.util._decorators import Appender, cache_readonly
import pandas.core.dtypes.concat as _concat
import pandas.tseries.frequencies as frequencies
from pandas.tseries.offsets import Tick, DateOffset

import pandas.core.indexes.base as ibase
_index_doc_kwargs = dict(ibase._index_doc_kwargs)


class DatelikeOps(object):
""" common ops for DatetimeIndex/PeriodIndex, but not TimedeltaIndex """
Expand Down Expand Up @@ -256,7 +253,7 @@ def _box_values_as_index(self):
def _format_with_header(self, header, **kwargs):
return header + list(self._format_native_types(**kwargs))

@Appender(_index_shared_docs['__contains__'] % _index_doc_kwargs)
@Appender(Index.__contains__.__doc__)
def __contains__(self, key):
try:
res = self.get_loc(key)
Expand Down Expand Up @@ -400,7 +397,7 @@ def sort_values(self, return_indexer=False, ascending=True):

return self._simple_new(sorted_values, **attribs)

@Appender(_index_shared_docs['take'] % _index_doc_kwargs)
@Appender(Index.take.__doc__)
def take(self, indices, axis=0, allow_fill=True,
fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
Expand Down Expand Up @@ -953,7 +950,7 @@ def repeat(self, repeats, *args, **kwargs):
return self._shallow_copy(self.asi8.repeat(repeats),
freq=freq)

@Appender(_index_shared_docs['where'] % _index_doc_kwargs)
@Appender(Index.where.__doc__)
def where(self, cond, other=None):
other = _ensure_datetimelike_to_i8(other)
values = _ensure_datetimelike_to_i8(self)
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/indexes/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from pandas.errors import PerformanceWarning
from pandas.core.algorithms import checked_add_with_arr

from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core.indexes.base import Index
from pandas.core.indexes.numeric import Int64Index, Float64Index
import pandas.compat as compat
from pandas.tseries.frequencies import to_offset, get_period_alias, Resolution
Expand Down Expand Up @@ -968,7 +968,7 @@ def _format_native_types(self, na_rep='NaT', date_format=None, **kwargs):
format=format,
na_rep=na_rep)

@Appender(_index_shared_docs['astype'])
@Appender(Index.astype.__doc__)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if (is_datetime64_ns_dtype(dtype) and
Expand Down
27 changes: 10 additions & 17 deletions pandas/core/indexes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@
is_integer,
pandas_dtype)
from pandas.core.indexes.base import (
Index, _ensure_index,
default_pprint, _index_shared_docs)
Index, _ensure_index, default_pprint)

from pandas._libs import Timestamp, Timedelta
from pandas._libs.interval import (
Expand All @@ -42,12 +41,6 @@
from pandas.tseries.frequencies import to_offset
from pandas.tseries.offsets import DateOffset

import pandas.core.indexes.base as ibase
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(
dict(klass='IntervalIndex',
target_klass='IntervalIndex or list of Intervals'))


_VALID_CLOSED = set(['left', 'right', 'both', 'neither'])

Expand Down Expand Up @@ -302,7 +295,7 @@ def _simple_new(cls, left, right, closed=None, name=None, copy=False,
result._reset_identity()
return result

@Appender(_index_shared_docs['_shallow_copy'])
@Appender(Index._shallow_copy.__doc__)
def _shallow_copy(self, left=None, right=None, **kwargs):
if left is None:

Expand Down Expand Up @@ -729,15 +722,15 @@ def __reduce__(self):
d.update(self._get_attributes_dict())
return _new_IntervalIndex, (self.__class__, d), None

@Appender(_index_shared_docs['copy'])
@Appender(Index.copy.__doc__)
def copy(self, deep=False, name=None):
left = self.left.copy(deep=True) if deep else self.left
right = self.right.copy(deep=True) if deep else self.right
name = name if name is not None else self.name
closed = self.closed
return type(self).from_arrays(left, right, closed=closed, name=name)

@Appender(_index_shared_docs['astype'])
@Appender(Index.astype.__doc__)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if is_interval_dtype(dtype) and dtype != self.dtype:
Expand Down Expand Up @@ -832,7 +825,7 @@ def is_non_overlapping_monotonic(self):
return bool((self.right[:-1] <= self.left[1:]).all() or
(self.left[:-1] >= self.right[1:]).all())

@Appender(_index_shared_docs['_convert_scalar_indexer'])
@Appender(Index._convert_scalar_indexer.__doc__)
def _convert_scalar_indexer(self, key, kind=None):
if kind == 'iloc':
return super(IntervalIndex, self)._convert_scalar_indexer(
Expand All @@ -842,7 +835,7 @@ def _convert_scalar_indexer(self, key, kind=None):
def _maybe_cast_slice_bound(self, label, side, kind):
return getattr(self, side)._maybe_cast_slice_bound(label, side, kind)

@Appender(_index_shared_docs['_convert_list_indexer'])
@Appender(Index._convert_list_indexer.__doc__)
def _convert_list_indexer(self, keyarr, kind=None):
"""
we are passed a list-like indexer. Return the
Expand Down Expand Up @@ -1034,7 +1027,7 @@ def get_value(self, series, key):
loc = self.get_loc(key)
return series.iloc[loc]

@Appender(_index_shared_docs['get_indexer'] % _index_doc_kwargs)
@Appender(Index.get_indexer.__doc__)
def get_indexer(self, target, method=None, limit=None, tolerance=None):

self._check_method(method)
Expand Down Expand Up @@ -1135,12 +1128,12 @@ def _get_reindexer(self, target):

return np.concatenate(indexer)

@Appender(_index_shared_docs['get_indexer_non_unique'] % _index_doc_kwargs)
@Appender(Index.get_indexer_non_unique.__doc__)
def get_indexer_non_unique(self, target):
target = self._maybe_cast_indexed(_ensure_index(target))
return super(IntervalIndex, self).get_indexer_non_unique(target)

@Appender(_index_shared_docs['where'])
@Appender(Index.where.__doc__)
def where(self, cond, other=None):
if other is None:
other = self._na_value
Expand Down Expand Up @@ -1215,7 +1208,7 @@ def _concat_same_dtype(self, to_concat, name):
raise ValueError(msg)
return super(IntervalIndex, self)._concat_same_dtype(to_concat, name)

@Appender(_index_shared_docs['take'] % _index_doc_kwargs)
@Appender(Index.take.__doc__)
def take(self, indices, axis=0, allow_fill=True,
fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
Expand Down
Loading