Skip to content

Commit

Permalink
Allow Variables to pass through to super class
Browse files Browse the repository at this point in the history
  • Loading branch information
macanudo527 committed Feb 11, 2025
1 parent 376f4c3 commit f2bc30f
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 4 deletions.
2 changes: 2 additions & 0 deletions src/dali/abstract_ccxt_pair_converter_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,8 @@ def __init__(
self.__csv_pricing_dict: Dict[str, Any] = _CSV_PRICING_DICT
self.__default_csv_reader: ExchangeNameAndClass = ExchangeNameAndClass(_KRAKEN, _CSV_PRICING_DICT[_KRAKEN])
self.__exchange_csv_reader: Dict[str, Any] = {}
self._logger.info("Untradeable assets: %s", untradeable_assets)
self._logger.info("Kraken update file: %s", kraken_csv_update_file)
self.__kraken_csv_update_file: Optional[str] = kraken_csv_update_file

# key: name of exchange, value: AVLTree of all snapshots of the graph
Expand Down
2 changes: 2 additions & 0 deletions src/dali/plugin/pair_converter/ccxt.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def __init__(
exchange_locked: Optional[bool] = None,
untradeable_assets: Optional[str] = None,
aliases: Optional[str] = None,
kraken_csv_update_file: Optional[str] = None,
) -> None:
cache_modifier = fiat_priority if fiat_priority else ""
super().__init__(
Expand All @@ -67,6 +68,7 @@ def __init__(
untradeable_assets=untradeable_assets,
aliases=aliases,
cache_modifier=cache_modifier,
kraken_csv_update_file=kraken_csv_update_file,
)
if fiat_priority:
weight: float = STANDARD_WEIGHT
Expand Down
2 changes: 2 additions & 0 deletions src/dali/plugin/pair_converter/ccxt_exchangerate_host.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def __init__(
exchange_locked: Optional[bool] = None,
untradeable_assets: Optional[str] = None,
aliases: Optional[str] = None,
kraken_csv_update_file: Optional[str] = None,
) -> None:
cache_modifier = fiat_priority if fiat_priority else ""
super().__init__(
Expand All @@ -70,6 +71,7 @@ def __init__(
untradeable_assets=untradeable_assets,
aliases=aliases,
cache_modifier=cache_modifier,
kraken_csv_update_file=kraken_csv_update_file,
)
self.__fiat_list: List[str] = []
self._fiat_priority: Dict[str, float]
Expand Down
12 changes: 8 additions & 4 deletions src/dali/plugin/pair_converter/csv/kraken.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from multiprocessing.pool import ThreadPool
from os import makedirs, path, remove
from typing import Dict, Generator, List, NamedTuple, Optional, Set, Tuple, cast
from zipfile import BadZipFile, ZipFile, is_zipfile
from zipfile import BadZipFile, ZipFile, is_zipfile, ZIP_DEFLATED

import requests
from progressbar import ProgressBar, UnknownLength
Expand Down Expand Up @@ -168,6 +168,10 @@ def __init__(self, transaction_manifest: TransactionManifest, force_download: bo
self.__force_download: bool = force_download
self.__unchunked_assets: Set[str] = transaction_manifest.assets

self.__logger.info("Path to update file: %s", update_file)
if path.exists(update_file):
self.__logger.info("Update file found. Combining the unified CSV file with the update file.")

if update_file is not None and path.exists(update_file):
self.__logger.info("Combining the unified CSV file with the update file. This may take a few minutes.")
self.__logger.info("After the process is complete the update file will be deleted.")
Expand Down Expand Up @@ -518,7 +522,7 @@ def _unzip_and_chunk(self, base_asset: str, quote_asset: str, all_bars: bool = F
# This function is used to combine two zip files into a new zip file.
# This is sometimes necessary when quarterly updates are released by Kraken, but
# the unified CSV file is not updated yet.
def combine_zip_files(zip_file1, zip_file2, output_zip_file):
def combine_zip_files(self, zip_file1, zip_file2, output_zip_file):
csv_files = {}

# Read the first zip file
Expand All @@ -533,12 +537,12 @@ def combine_zip_files(zip_file1, zip_file2, output_zip_file):
if file_name.endswith('.csv'):
csv_data = zip_ref2.read(file_name).decode(encoding="utf-8")
if file_name in csv_files:
csv_files[file_name] += '\n' + csv_data
csv_files[file_name] += csv_data
else:
csv_files[file_name] = csv_data

# Write the combined csv files to a new zip file
with ZipFile(output_zip_file, 'w', zipfile.ZIP_DEFLATED) as zip_out:
with ZipFile(output_zip_file, 'w', ZIP_DEFLATED) as zip_out:
for file_name, data in csv_files.items():
zip_out.writestr(file_name, data)

Expand Down

0 comments on commit f2bc30f

Please sign in to comment.