Skip to content

Commit

Permalink
moved S3 cache methods to __init__.py
Browse files Browse the repository at this point in the history
- so they can be used for sdists if needed
- removed boot imports from top of files
- inlined several helper methods so the work with the local import of boot
  • Loading branch information
adamfeuer committed Nov 5, 2014
1 parent 231d2ab commit 601d118
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 55 deletions.
50 changes: 49 additions & 1 deletion pip_accel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
# Modules included in our package.
from pip_accel.bdist import get_binary_dist, install_binary_dist
from pip_accel.config import (binary_index, download_cache, index_version_file,
on_debian, source_index)
on_debian, source_index, s3_cache_bucket, s3_cache_prefix)
from pip_accel.req import Requirement
from pip_accel.utils import run

Expand Down Expand Up @@ -487,5 +487,53 @@ def dependency_links(self):
def dependency_links(self, value):
logger.debug("Custom package finder ignoring 'dependency_links' value (%r) ..", value)


def cache_file_exists(cache_file, binary_index):
"""Check if S3 cache is configured; if so, check the cache to see if the file exists there.
If it does exist in the S3 cache, download it to the local cache so it can be used."""
if os.path.isfile(cache_file):
return True
if s3_cache_bucket is None:
return False
logger.debug("S3_CACHE_BUCKET is set, attempting to read file from S3 cache.")
try:
import boto
from boto.s3.key import Key
bucket = boto.connect_s3().get_bucket(s3_cache_bucket)
s3_key = get_s3_key_path(binary_index, cache_file)
logger.info("Downloading {} from S3 cache.".format(s3_key))
key = bucket.get_key(s3_key)
if key is not None:
key.get_contents_to_filename(cache_file)
return True
except ImportError:
logger.debug("boto module not found - cannot read file from S3 cache.")
return False


def store_file_into_s3_cache(cache_file, binary_index):
"""If the S3 cache is configured, store the file there."""
if s3_cache_bucket is None:
return False
logger.debug("S3_CACHE_BUCKET is set, attempting to store file in S3 cache.")
try:
import boto
from boto.s3.key import Key
bucket = boto.connect_s3().get_bucket(s3_cache_bucket)
s3_key = get_s3_key_path(binary_index, cache_file)
logger.info("Storing file {} into S3 cache at {}.".format(cache_file, s3_key))
key = Key(bucket)
key.key = s3_key
key.set_contents_from_filename(cache_file)
return True
except ImportError:
logger.debug("boto module not found - cannot store file into S3 cache.")
return False


def get_s3_key_path(binary_index, cache_file):
return '/'.join([s3_cache_prefix, cache_file.replace(binary_index + '/', '')])


if __name__ == '__main__':
main()
58 changes: 4 additions & 54 deletions pip_accel/bdist.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,11 @@
import time

# External dependencies.
import boto
from boto.s3.key import Key
from humanfriendly import Spinner, Timer

# Modules included in our package.
from pip_accel.config import binary_index, on_debian, s3_cache_bucket, s3_cache_prefix
import pip_accel
from pip_accel.config import binary_index, on_debian
from pip_accel.deps import sanity_check_dependencies
from pip_accel.utils import get_python_version

Expand Down Expand Up @@ -79,7 +78,7 @@ def get_binary_dist(package, version, directory, url=None, python='/usr/bin/pyth
url = None
tag = hashlib.sha1(str(version + url).encode()).hexdigest() if url else version
cache_file = os.path.join(binary_index, '%s:%s:%s.tar.gz' % (package, tag, get_python_version()))
if not cache_file_exists(cache_file, binary_index):
if not pip_accel.cache_file_exists(cache_file, binary_index):
logger.debug("%s (%s) hasn't been cached yet, doing so now.", package, version)
# Build the binary distribution.
try:
Expand All @@ -97,7 +96,7 @@ def get_binary_dist(package, version, directory, url=None, python='/usr/bin/pyth
# moving the transformed binary distribution into its final place.
os.rename(transformed_file, cache_file)
logger.debug("%s (%s) cached as %s.", package, version, cache_file)
store_file_into_s3_cache(cache_file, binary_index)
pip_accel.store_file_into_s3_cache(cache_file, binary_index)
archive = tarfile.open(cache_file, 'r:gz')
for member in archive.getmembers():
yield member, archive.extractfile(member.name)
Expand Down Expand Up @@ -328,52 +327,3 @@ class NoBuildOutput(Exception):
"""


def cache_file_exists(cache_file, binary_index):
if os.path.isfile(cache_file):
return True
if s3_cache_bucket is None:
return False
logger.debug("S3_CACHE_BUCKET is set, attempting to read file from S3 cache.")
try:
import boto
bucket = get_s3_bucket()
s3_key = get_s3_key_path(binary_index, cache_file)
logger.info("Downloading {} from S3 cache.".format(s3_key))
key = bucket.get_key(s3_key)
if key is not None:
key.get_contents_to_filename(cache_file)
return True
except ImportError:
logger.debug("boto module not found - cannot read file from S3 cache.")
return False


def store_file_into_s3_cache(cache_file, binary_index):
if s3_cache_bucket is None:
return False
logger.debug("S3_CACHE_BUCKET is set, attempting to store file in S3 cache.")
try:
import boto
bucket = get_s3_bucket()
s3_key = get_s3_key_path(binary_index, cache_file)
logger.info("Storing file {} into S3 cache at {}.".format(cache_file, s3_key))
key = Key(bucket)
key.key = s3_key
key.set_contents_from_filename(cache_file)
return True
except ImportError:
logger.debug("boto module not found - cannot store file into S3 cache.")
return False


def get_s3_key_path(binary_index, cache_file):
return '/'.join([s3_cache_prefix, cache_file.replace(binary_index + '/', '')])


def get_s3_bucket():
return get_s3_connection().get_bucket(s3_cache_bucket)


def get_s3_connection():
return boto.connect_s3()

0 comments on commit 601d118

Please sign in to comment.