diff --git a/transforms/Dockerfile.python.template b/transforms/Dockerfile.python.template index 1bde08841..9f38097b7 100644 --- a/transforms/Dockerfile.python.template +++ b/transforms/Dockerfile.python.template @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/Dockerfile.spark.template b/transforms/Dockerfile.spark.template index 0a183d8d9..1af783438 100644 --- a/transforms/Dockerfile.spark.template +++ b/transforms/Dockerfile.spark.template @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/doc_id/Dockerfile.python b/transforms/universal/doc_id/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/doc_id/Dockerfile.python +++ b/transforms/universal/doc_id/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/doc_id/Dockerfile.spark b/transforms/universal/doc_id/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/doc_id/Dockerfile.spark +++ b/transforms/universal/doc_id/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/filter/Dockerfile.python b/transforms/universal/filter/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/filter/Dockerfile.python +++ b/transforms/universal/filter/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/filter/Dockerfile.spark b/transforms/universal/filter/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/filter/Dockerfile.spark +++ b/transforms/universal/filter/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/hap/Dockerfile.python b/transforms/universal/hap/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/hap/Dockerfile.python +++ b/transforms/universal/hap/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment