From cef1dea5e6e8de3d1bed3a50aef77a0f79981e37 Mon Sep 17 00:00:00 2001 From: Maroun Touma Date: Thu, 9 Jan 2025 07:27:23 -0500 Subject: [PATCH] change *:root to *:users Signed-off-by: Maroun Touma --- transforms/Dockerfile.python.template | 6 +++--- transforms/Dockerfile.spark.template | 6 +++--- transforms/universal/doc_id/Dockerfile.python | 6 +++--- transforms/universal/doc_id/Dockerfile.spark | 6 +++--- transforms/universal/filter/Dockerfile.python | 6 +++--- transforms/universal/filter/Dockerfile.spark | 6 +++--- transforms/universal/hap/Dockerfile.python | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/transforms/Dockerfile.python.template b/transforms/Dockerfile.python.template index 1bde08841..9f38097b7 100644 --- a/transforms/Dockerfile.python.template +++ b/transforms/Dockerfile.python.template @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/Dockerfile.spark.template b/transforms/Dockerfile.spark.template index 0a183d8d9..1af783438 100644 --- a/transforms/Dockerfile.spark.template +++ b/transforms/Dockerfile.spark.template @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/doc_id/Dockerfile.python b/transforms/universal/doc_id/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/doc_id/Dockerfile.python +++ b/transforms/universal/doc_id/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/doc_id/Dockerfile.spark b/transforms/universal/doc_id/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/doc_id/Dockerfile.spark +++ b/transforms/universal/doc_id/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/filter/Dockerfile.python b/transforms/universal/filter/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/filter/Dockerfile.python +++ b/transforms/universal/filter/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment diff --git a/transforms/universal/filter/Dockerfile.spark b/transforms/universal/filter/Dockerfile.spark index 0a183d8d9..1af783438 100644 --- a/transforms/universal/filter/Dockerfile.spark +++ b/transforms/universal/filter/Dockerfile.spark @@ -10,15 +10,15 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=spark:root data-processing-dist data-processing-dist +COPY --chown=spark:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME}[spark] # Install project source ## Copy the python version of the tansform -COPY --chown=spark:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=spark:root requirements.txt requirements.txt +COPY --chown=spark:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=spark:users requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/transforms/universal/hap/Dockerfile.python b/transforms/universal/hap/Dockerfile.python index 1bde08841..9f38097b7 100644 --- a/transforms/universal/hap/Dockerfile.python +++ b/transforms/universal/hap/Dockerfile.python @@ -14,13 +14,13 @@ ARG TRANSFORM_NAME # Copy and install data processing libraries # These are expected to be placed in the docker context before this is run (see the make image). -COPY --chown=dpk:root data-processing-dist data-processing-dist +COPY --chown=dpk:users data-processing-dist data-processing-dist RUN pip install data-processing-dist/${DPK_WHEEL_FILE_NAME} # END OF STEPS destined for a data-prep-kit base image -COPY --chown=dpk:root dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ -COPY --chown=dpk:root requirements.txt requirements.txt +COPY --chown=dpk:users dpk_${TRANSFORM_NAME}/ dpk_${TRANSFORM_NAME}/ +COPY --chown=dpk:users requirements.txt requirements.txt RUN pip install --no-cache-dir -r requirements.txt # Set environment