diff --git a/src/dali/abstract_ccxt_pair_converter_plugin.py b/src/dali/abstract_ccxt_pair_converter_plugin.py index d1ac5bb1..dcc5a990 100644 --- a/src/dali/abstract_ccxt_pair_converter_plugin.py +++ b/src/dali/abstract_ccxt_pair_converter_plugin.py @@ -292,6 +292,8 @@ def __init__( self.__csv_pricing_dict: Dict[str, Any] = _CSV_PRICING_DICT self.__default_csv_reader: ExchangeNameAndClass = ExchangeNameAndClass(_KRAKEN, _CSV_PRICING_DICT[_KRAKEN]) self.__exchange_csv_reader: Dict[str, Any] = {} + self._logger.info("Untradeable assets: %s", untradeable_assets) + self._logger.info("Kraken update file: %s", kraken_csv_update_file) self.__kraken_csv_update_file: Optional[str] = kraken_csv_update_file # key: name of exchange, value: AVLTree of all snapshots of the graph diff --git a/src/dali/plugin/pair_converter/ccxt.py b/src/dali/plugin/pair_converter/ccxt.py index 5eb86ce6..fb27af69 100755 --- a/src/dali/plugin/pair_converter/ccxt.py +++ b/src/dali/plugin/pair_converter/ccxt.py @@ -58,6 +58,7 @@ def __init__( exchange_locked: Optional[bool] = None, untradeable_assets: Optional[str] = None, aliases: Optional[str] = None, + kraken_csv_update_file: Optional[str] = None, ) -> None: cache_modifier = fiat_priority if fiat_priority else "" super().__init__( @@ -67,6 +68,7 @@ def __init__( untradeable_assets=untradeable_assets, aliases=aliases, cache_modifier=cache_modifier, + kraken_csv_update_file=kraken_csv_update_file, ) if fiat_priority: weight: float = STANDARD_WEIGHT diff --git a/src/dali/plugin/pair_converter/ccxt_exchangerate_host.py b/src/dali/plugin/pair_converter/ccxt_exchangerate_host.py index 9c1871f7..f760971e 100644 --- a/src/dali/plugin/pair_converter/ccxt_exchangerate_host.py +++ b/src/dali/plugin/pair_converter/ccxt_exchangerate_host.py @@ -62,6 +62,7 @@ def __init__( exchange_locked: Optional[bool] = None, untradeable_assets: Optional[str] = None, aliases: Optional[str] = None, + kraken_csv_update_file: Optional[str] = None, ) -> None: cache_modifier = fiat_priority if fiat_priority else "" super().__init__( @@ -70,6 +71,7 @@ def __init__( untradeable_assets=untradeable_assets, aliases=aliases, cache_modifier=cache_modifier, + kraken_csv_update_file=kraken_csv_update_file, ) self.__fiat_list: List[str] = [] self._fiat_priority: Dict[str, float] diff --git a/src/dali/plugin/pair_converter/csv/kraken.py b/src/dali/plugin/pair_converter/csv/kraken.py index 811a76bb..75b022d6 100755 --- a/src/dali/plugin/pair_converter/csv/kraken.py +++ b/src/dali/plugin/pair_converter/csv/kraken.py @@ -31,7 +31,7 @@ from multiprocessing.pool import ThreadPool from os import makedirs, path, remove from typing import Dict, Generator, List, NamedTuple, Optional, Set, Tuple, cast -from zipfile import BadZipFile, ZipFile, is_zipfile +from zipfile import BadZipFile, ZipFile, is_zipfile, ZIP_DEFLATED import requests from progressbar import ProgressBar, UnknownLength @@ -168,6 +168,10 @@ def __init__(self, transaction_manifest: TransactionManifest, force_download: bo self.__force_download: bool = force_download self.__unchunked_assets: Set[str] = transaction_manifest.assets + self.__logger.info("Path to update file: %s", update_file) + if path.exists(update_file): + self.__logger.info("Update file found. Combining the unified CSV file with the update file.") + if update_file is not None and path.exists(update_file): self.__logger.info("Combining the unified CSV file with the update file. This may take a few minutes.") self.__logger.info("After the process is complete the update file will be deleted.") @@ -518,7 +522,7 @@ def _unzip_and_chunk(self, base_asset: str, quote_asset: str, all_bars: bool = F # This function is used to combine two zip files into a new zip file. # This is sometimes necessary when quarterly updates are released by Kraken, but # the unified CSV file is not updated yet. - def combine_zip_files(zip_file1, zip_file2, output_zip_file): + def combine_zip_files(self, zip_file1, zip_file2, output_zip_file): csv_files = {} # Read the first zip file @@ -533,12 +537,12 @@ def combine_zip_files(zip_file1, zip_file2, output_zip_file): if file_name.endswith('.csv'): csv_data = zip_ref2.read(file_name).decode(encoding="utf-8") if file_name in csv_files: - csv_files[file_name] += '\n' + csv_data + csv_files[file_name] += csv_data else: csv_files[file_name] = csv_data # Write the combined csv files to a new zip file - with ZipFile(output_zip_file, 'w', zipfile.ZIP_DEFLATED) as zip_out: + with ZipFile(output_zip_file, 'w', ZIP_DEFLATED) as zip_out: for file_name, data in csv_files.items(): zip_out.writestr(file_name, data)