Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLN: small things in pytables #29958

Merged
merged 1 commit into from
Dec 2, 2019
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 21 additions & 28 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def read_hdf(path_or_buf, key=None, mode: str = "r", **kwargs):
key : object, optional
The group identifier in the store. Can be omitted if the HDF file
contains a single pandas object.
mode : {'r', 'r+', 'a'}, optional
mode : {'r', 'r+', 'a'}, default 'r'
Mode to use when opening the file. Ignored if path_or_buf is a
:class:`pandas.HDFStore`. Default is 'r'.
where : list, optional
Expand Down Expand Up @@ -417,7 +417,7 @@ def read_hdf(path_or_buf, key=None, mode: str = "r", **kwargs):
raise


def _is_metadata_of(group, parent_group) -> bool:
def _is_metadata_of(group: "Node", parent_group: "Node") -> bool:
"""Check if a given group is a metadata group for a given parent_group."""
if group._v_depth <= parent_group._v_depth:
return False
Expand Down Expand Up @@ -932,9 +932,7 @@ def func(_start, _stop, _where):
# retrieve the objs, _where is always passed as a set of
# coordinates here
objs = [
t.read(
where=_where, columns=columns, start=_start, stop=_stop, **kwargs
)
t.read(where=_where, columns=columns, start=_start, stop=_stop)
for t in tbls
]

Expand All @@ -957,7 +955,7 @@ def func(_start, _stop, _where):

return it.get_result(coordinates=True)

def put(self, key: str, value, format=None, append=False, **kwargs):
def put(self, key: str, value: FrameOrSeries, format=None, append=False, **kwargs):
"""
Store object in HDFStore.

Expand Down Expand Up @@ -986,8 +984,8 @@ def put(self, key: str, value, format=None, append=False, **kwargs):
"""
if format is None:
format = get_option("io.hdf.default_format") or "fixed"
kwargs = self._validate_format(format, kwargs)
self._write_to_group(key, value, append=append, **kwargs)
format = self._validate_format(format)
self._write_to_group(key, value, format=format, append=append, **kwargs)

def remove(self, key: str, where=None, start=None, stop=None):
"""
Expand Down Expand Up @@ -1046,7 +1044,7 @@ def remove(self, key: str, where=None, start=None, stop=None):
def append(
self,
key: str,
value,
value: FrameOrSeries,
format=None,
append=True,
columns=None,
Expand Down Expand Up @@ -1096,8 +1094,10 @@ def append(
dropna = get_option("io.hdf.dropna_table")
if format is None:
format = get_option("io.hdf.default_format") or "table"
kwargs = self._validate_format(format, kwargs)
self._write_to_group(key, value, append=append, dropna=dropna, **kwargs)
format = self._validate_format(format)
self._write_to_group(
key, value, format=format, append=append, dropna=dropna, **kwargs
)

def append_to_multiple(
self,
Expand Down Expand Up @@ -1418,17 +1418,16 @@ def _check_if_open(self):
if not self.is_open:
raise ClosedFileError(f"{self._path} file is not open!")

def _validate_format(self, format: str, kwargs: Dict[str, Any]) -> Dict[str, Any]:
""" validate / deprecate formats; return the new kwargs """
kwargs = kwargs.copy()
def _validate_format(self, format: str) -> str:
""" validate / deprecate formats """

# validate
try:
kwargs["format"] = _FORMAT_MAP[format.lower()]
format = _FORMAT_MAP[format.lower()]
except KeyError:
raise TypeError(f"invalid HDFStore format specified [{format}]")

return kwargs
return format

def _create_storer(
self,
Expand Down Expand Up @@ -1532,7 +1531,7 @@ def error(t):
def _write_to_group(
self,
key: str,
value,
value: FrameOrSeries,
format,
axes=None,
index=True,
Expand Down Expand Up @@ -1615,10 +1614,10 @@ def _write_to_group(
if isinstance(s, Table) and index:
s.create_index(columns=index)

def _read_group(self, group: "Node", **kwargs):
def _read_group(self, group: "Node"):
s = self._create_storer(group)
s.infer_axes()
return s.read(**kwargs)
return s.read()


class TableIterator:
Expand Down Expand Up @@ -2752,28 +2751,22 @@ def f(values, freq=None, tz=None):

return klass

def validate_read(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
def validate_read(self, columns, where):
"""
remove table keywords from kwargs and return
raise if any keywords are passed which are not-None
"""
kwargs = copy.copy(kwargs)

columns = kwargs.pop("columns", None)
if columns is not None:
raise TypeError(
"cannot pass a column specification when reading "
"a Fixed format store. this store must be "
"selected in its entirety"
)
where = kwargs.pop("where", None)
if where is not None:
raise TypeError(
"cannot pass a where specification when reading "
"from a Fixed format store. this store must be "
"selected in its entirety"
)
return kwargs

@property
def is_exists(self) -> bool:
Expand Down Expand Up @@ -3085,7 +3078,7 @@ def read(
start: Optional[int] = None,
stop: Optional[int] = None,
):
self.validate_read({"where": where, "columns": columns})
self.validate_read(columns, where)
index = self.read_index("index", start=start, stop=stop)
values = self.read_array("values", start=start, stop=stop)
return Series(values, index=index, name=self.name)
Expand Down Expand Up @@ -3142,7 +3135,7 @@ def read(
stop: Optional[int] = None,
):
# start, stop applied to rows, so 0th axis only
self.validate_read({"columns": columns, "where": where})
self.validate_read(columns, where)
select_axis = self.obj_type()._get_block_manager_axis(0)

axes = []
Expand Down