diff --git a/technologies/job/spark/metadata.yaml b/technologies/job/spark/metadata.yaml index 90b38d8da..06be5f28a 100644 --- a/technologies/job/spark/metadata.yaml +++ b/technologies/job/spark/metadata.yaml @@ -81,7 +81,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "2.4-py-3.7" - version: "2.4-py-3.7-1.170.0" + version: "2.4-py-3.7-1.194.0_sparkWithoutMinio" - id: "3.0" label: "3.0" available: true @@ -150,7 +150,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.0-py-3.7" - version: "3.0-py-3.7-1.139.0" + version: "3.0-py-3.7-1.194.0_sparkWithoutMinio" - id: "3.1" label: "3.1" available: true @@ -221,7 +221,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-py-3.7" - version: "3.1-py-3.7-1.139.0" + version: "3.1-py-3.7-1.194.0_sparkWithoutMinio" - id: "3.8" label: "3.8" available: true @@ -230,7 +230,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-py-3.8" - version: "3.1-py-3.8-1.139.0" + version: "3.1-py-3.8-1.194.0_sparkWithoutMinio" - id: "3.9" label: "3.9" available: true @@ -239,7 +239,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-py-3.9" - version: "3.1-py-3.9-1.139.0" + version: "3.1-py-3.9-1.194.0_sparkWithoutMinio" - id: "3.5" label: "3.5" available: true @@ -314,7 +314,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.5-py-3.12" - version: "3.5-py-3.12-1.190.0" + version: "3.5-py-3.12-1.194.0_sparkWithoutMinio" - id: 3.1-aws label: 3.1 AWS available: true @@ -385,7 +385,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-aws-py-3.7" - version: "3.1-aws-py-3.7-1.139.0" + version: "3.1-aws-py-3.7-1.194.0_sparkWithoutMinio" - id: "3.8" label: "3.8" available: true @@ -394,7 +394,7 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-aws-py-3.8" - version: "3.1-aws-py-3.8-1.139.0" + version: "3.1-aws-py-3.8-1.194.0_sparkWithoutMinio" - id: "3.9" label: "3.9" available: true @@ -403,4 +403,4 @@ contexts: dockerInfo: image: "saagie/spark" baseTag: "3.1-aws-py-3.9" - version: "3.1-aws-py-3.9-1.139.0" \ No newline at end of file + version: "3.1-aws-py-3.9-1.194.0_sparkWithoutMinio" \ No newline at end of file diff --git a/technologies/job/spark/spark-2.4/innerContexts/python/entrypoint.sh b/technologies/job/spark/spark-2.4/innerContexts/python/entrypoint.sh index c3f75c118..5cd5a58e7 100755 --- a/technologies/job/spark/spark-2.4/innerContexts/python/entrypoint.sh +++ b/technologies/job/spark/spark-2.4/innerContexts/python/entrypoint.sh @@ -44,40 +44,19 @@ if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH" fi +# BEGIN SAAGIE SPECIFIC CODE cd /sandbox - # parse content and if pyfiles extract minio url and inject it -if [ -f main_script ] && grep -q "\--py-files" main_script; +if [ -f *.zip ] then - PYSPARK_FILES="`grep -Po '.*--py-files=\K[^ ]+' main_script`" -fi; - -if [ -n "$PYSPARK_FILES" ]; then - PYTHONPATH="$PYTHONPATH:$PYSPARK_FILES" - #Copy and unzip pyfiles - if [[ $PYSPARK_FILES == *[,]* ]];then - echo "PYSPARK_FILES contains comma" - pyfiles=$(echo $PYSPARK_FILES | tr "," "\n") - - for file in $pyfiles - do - echo ">>> [$file]" - wget -nv $file - done - else - echo ">>> [$PYSPARK_FILES]" - wget -nv $PYSPARK_FILES - fi - if [ -f *.zip ] - then - unzip -q *.zip - fi - if [ -f "requirements.txt" ] - then - pip install -r requirements.txt - fi - rm -Rf /opt/spark/work-dir - ln -s /sandbox/ /opt/spark/work-dir + unzip -q *.zip +fi +if [ -f "requirements.txt" ] +then + pip install -r requirements.txt fi +rm -Rf /opt/spark/work-dir +ln -s /sandbox/ /opt/spark/work-dir +# END SAAGIE SPECIFIC CODE PYSPARK_ARGS="" if [ -n "$PYSPARK_APP_ARGS" ]; then diff --git a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/build.me b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/build.me index 97e80da08..7fabe2b1b 100644 --- a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/build.me +++ b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/build.me @@ -1 +1 @@ -12/10/2023 \ No newline at end of file +28/03/2025 \ No newline at end of file diff --git a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/dockerInfo.yaml b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/dockerInfo.yaml index ac49d306d..4a0f5836f 100644 --- a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/dockerInfo.yaml +++ b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 2.4-py-3.7 -dynamicVersion: 1.170.0_SDKTECHNO-246 -version: 2.4-py-3.7-1.170.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 2.4-py-3.7-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/image_test.yaml b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/image_test.yaml index cebaa6a2d..008cb825a 100644 --- a/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/image_test.yaml +++ b/technologies/job/spark/spark-2.4/innerContexts/python/spark-2.4-python-3.7/image_test.yaml @@ -28,7 +28,6 @@ fileContentTests: expectedContents: [ 'exec /sbin/tini -s -- ', - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*' diff --git a/technologies/job/spark/spark-3.0/innerContexts/python/entrypoint.sh b/technologies/job/spark/spark-3.0/innerContexts/python/entrypoint.sh index 72183de3a..79e467dae 100644 --- a/technologies/job/spark/spark-3.0/innerContexts/python/entrypoint.sh +++ b/technologies/job/spark/spark-3.0/innerContexts/python/entrypoint.sh @@ -38,39 +38,16 @@ fi # BEGIN SAAGIE SPECIFIC CODE cd /sandbox - # parse content and if pyfiles extract minio url and inject it -if [ -f main_script ] && grep -q "\--py-files" main_script; +if [ -f *.zip ] then - PYSPARK_FILES="`grep -Po '.*--py-files=\K[^ ]+' main_script`" -fi; - -if [ -n "$PYSPARK_FILES" ]; then - PYTHONPATH="$PYTHONPATH:$PYSPARK_FILES" - #Copy and unzip pyfiles - if [[ $PYSPARK_FILES == *[,]* ]];then - echo "PYSPARK_FILES contains comma" - pyfiles=$(echo $PYSPARK_FILES | tr "," "\n") - - for file in $pyfiles - do - echo ">>> [$file]" - wget -nv $file - done - else - echo ">>> [$PYSPARK_FILES]" - wget -nv $PYSPARK_FILES - fi - if [ -f *.zip ] - then - unzip -q *.zip - fi - if [ -f "requirements.txt" ] - then - pip install -r requirements.txt - fi - rm -Rf /opt/spark/work-dir - ln -s /sandbox/ /opt/spark/work-dir + unzip -q *.zip +fi +if [ -f "requirements.txt" ] +then + pip install -r requirements.txt fi +rm -Rf /opt/spark/work-dir +ln -s /sandbox/ /opt/spark/work-dir # END SAAGIE SPECIFIC CODE diff --git a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/build.me b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/build.me index 5510227f4..7fabe2b1b 100644 --- a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/build.me +++ b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/build.me @@ -1 +1 @@ -04/11/2021 +28/03/2025 \ No newline at end of file diff --git a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/dockerInfo.yaml b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/dockerInfo.yaml index 6d7766aaf..b9eab54a3 100644 --- a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/dockerInfo.yaml +++ b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.0-py-3.7 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.0-py-3.7-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.0-py-3.7-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/image_test.yaml b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/image_test.yaml index 5923cfdd5..0ba07f272 100644 --- a/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/image_test.yaml +++ b/technologies/job/spark/spark-3.0/innerContexts/python/spark-3.0-python-3.7/image_test.yaml @@ -27,7 +27,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/entrypoint.sh b/technologies/job/spark/spark-3.1/innerContexts/python/entrypoint.sh index 623d142cb..4f74464e5 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/entrypoint.sh +++ b/technologies/job/spark/spark-3.1/innerContexts/python/entrypoint.sh @@ -38,39 +38,16 @@ fi # BEGIN SAAGIE SPECIFIC CODE cd /sandbox - # parse content and if pyfiles extract minio url and inject it -if [ -f main_script ] && grep -q "\--py-files" main_script; +if [ -f *.zip ] then - PYSPARK_FILES="`grep -Po '.*--py-files=\K[^ ]+' main_script`" -fi; - -if [ -n "$PYSPARK_FILES" ]; then - PYTHONPATH="$PYTHONPATH:$PYSPARK_FILES" - #Copy and unzip pyfiles - if [[ $PYSPARK_FILES == *[,]* ]];then - echo "PYSPARK_FILES contains comma" - pyfiles=$(echo $PYSPARK_FILES | tr "," "\n") - - for file in $pyfiles - do - echo ">>> [$file]" - wget -nv $file - done - else - echo ">>> [$PYSPARK_FILES]" - wget -nv $PYSPARK_FILES - fi - if [ -f *.zip ] - then - unzip -q *.zip - fi - if [ -f "requirements.txt" ] - then - pip install -r requirements.txt - fi - rm -Rf /opt/spark/work-dir - ln -s /sandbox/ /opt/spark/work-dir + unzip -q *.zip +fi +if [ -f "requirements.txt" ] +then + pip install -r requirements.txt fi +rm -Rf /opt/spark/work-dir +ln -s /sandbox/ /opt/spark/work-dir # END SAAGIE SPECIFIC CODE diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/build.me b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/build.me index d29294db6..7fabe2b1b 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/build.me +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/build.me @@ -1 +1 @@ -12/12/2022 +28/03/2025 \ No newline at end of file diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/dockerInfo.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/dockerInfo.yaml index af403efd8..77d609bc2 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/dockerInfo.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-py-3.7 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-py-3.7-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-py-3.7-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/image_test.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/image_test.yaml index 5b2cc84a6..019085087 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/image_test.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.7/image_test.yaml @@ -27,7 +27,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/build.me b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/build.me index 5510227f4..7fabe2b1b 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/build.me +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/build.me @@ -1 +1 @@ -04/11/2021 +28/03/2025 \ No newline at end of file diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/dockerInfo.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/dockerInfo.yaml index a05508753..958e20831 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/dockerInfo.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-py-3.8 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-py-3.8-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-py-3.8-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/image_test.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/image_test.yaml index 5b2cc84a6..019085087 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/image_test.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.8/image_test.yaml @@ -27,7 +27,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/build.me b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/build.me index 5510227f4..7fabe2b1b 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/build.me +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/build.me @@ -1 +1 @@ -04/11/2021 +28/03/2025 \ No newline at end of file diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/dockerInfo.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/dockerInfo.yaml index cce81cdea..88467dd20 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/dockerInfo.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-py-3.9 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-py-3.9-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-py-3.9-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/image_test.yaml b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/image_test.yaml index 5b2cc84a6..019085087 100644 --- a/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/image_test.yaml +++ b/technologies/job/spark/spark-3.1/innerContexts/python/spark-3.1-python-3.9/image_test.yaml @@ -27,7 +27,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-3.5/innerContexts/python/entrypoint.sh b/technologies/job/spark/spark-3.5/innerContexts/python/entrypoint.sh index ccfbfbfbf..15bf4d83f 100755 --- a/technologies/job/spark/spark-3.5/innerContexts/python/entrypoint.sh +++ b/technologies/job/spark/spark-3.5/innerContexts/python/entrypoint.sh @@ -18,6 +18,7 @@ # # Prevent any errors from being silently ignored set -eo pipefail +set -x attempt_setup_fake_passwd_entry() { # Check whether there is a passwd entry for the container UID @@ -48,39 +49,16 @@ fi # BEGIN SAAGIE SPECIFIC CODE cd /sandbox - # parse content and if pyfiles extract minio url and inject it -if [ -f main_script ] && grep -q "\--py-files" main_script; +if [ -f *.zip ] then - PYSPARK_FILES="`grep -Po '.*--py-files=\K[^ ]+' main_script`" -fi; - -if [ -n "$PYSPARK_FILES" ]; then - PYTHONPATH="$PYTHONPATH:$PYSPARK_FILES" - #Copy and unzip pyfiles - if [[ $PYSPARK_FILES == *[,]* ]];then - echo "PYSPARK_FILES contains comma" - pyfiles=$(echo $PYSPARK_FILES | tr "," "\n") - - for file in $pyfiles - do - echo ">>> [$file]" - wget -nv $file - done - else - echo ">>> [$PYSPARK_FILES]" - wget -nv $PYSPARK_FILES - fi - if [ -f *.zip ] - then - unzip -q *.zip - fi - if [ -f "requirements.txt" ] - then - pip install -r requirements.txt - fi - rm -Rf /opt/spark/work-dir - ln -s /sandbox/ /opt/spark/work-dir + unzip -q *.zip +fi +if [ -f "requirements.txt" ] +then + pip install -r requirements.txt fi +rm -Rf /opt/spark/work-dir +ln -s /sandbox/ /opt/spark/work-dir # END SAAGIE SPECIFIC CODE SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*" diff --git a/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/build.me b/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/build.me index e69de29bb..f32a5804e 100644 --- a/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/build.me +++ b/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/build.me @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/dockerInfo.yaml b/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/dockerInfo.yaml index 4300b238b..b59f745cb 100644 --- a/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/dockerInfo.yaml +++ b/technologies/job/spark/spark-3.5/innerContexts/python/spark-3.5-python-3.12/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.5-py-3.12 -dynamicVersion: 1.190.0_spark-3.5 -version: 3.5-py-3.12-1.190.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.5-py-3.12-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/entrypoint.sh b/technologies/job/spark/spark-aws-3.1/innerContexts/python/entrypoint.sh index 623d142cb..4f74464e5 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/entrypoint.sh +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/entrypoint.sh @@ -38,39 +38,16 @@ fi # BEGIN SAAGIE SPECIFIC CODE cd /sandbox - # parse content and if pyfiles extract minio url and inject it -if [ -f main_script ] && grep -q "\--py-files" main_script; +if [ -f *.zip ] then - PYSPARK_FILES="`grep -Po '.*--py-files=\K[^ ]+' main_script`" -fi; - -if [ -n "$PYSPARK_FILES" ]; then - PYTHONPATH="$PYTHONPATH:$PYSPARK_FILES" - #Copy and unzip pyfiles - if [[ $PYSPARK_FILES == *[,]* ]];then - echo "PYSPARK_FILES contains comma" - pyfiles=$(echo $PYSPARK_FILES | tr "," "\n") - - for file in $pyfiles - do - echo ">>> [$file]" - wget -nv $file - done - else - echo ">>> [$PYSPARK_FILES]" - wget -nv $PYSPARK_FILES - fi - if [ -f *.zip ] - then - unzip -q *.zip - fi - if [ -f "requirements.txt" ] - then - pip install -r requirements.txt - fi - rm -Rf /opt/spark/work-dir - ln -s /sandbox/ /opt/spark/work-dir + unzip -q *.zip +fi +if [ -f "requirements.txt" ] +then + pip install -r requirements.txt fi +rm -Rf /opt/spark/work-dir +ln -s /sandbox/ /opt/spark/work-dir # END SAAGIE SPECIFIC CODE diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/dockerInfo.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/dockerInfo.yaml index 927a8e06a..92cb1f1b1 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/dockerInfo.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-aws-py-3.7 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-aws-py-3.7-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-aws-py-3.7-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/image_test.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/image_test.yaml index 6accbed38..ed87afeb8 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/image_test.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.7/image_test.yaml @@ -22,7 +22,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/dockerInfo.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/dockerInfo.yaml index cace4bd68..bb6334055 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/dockerInfo.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-aws-py-3.8 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-aws-py-3.8-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-aws-py-3.8-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/image_test.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/image_test.yaml index 6accbed38..ed87afeb8 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/image_test.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.8/image_test.yaml @@ -22,7 +22,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/dockerInfo.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/dockerInfo.yaml index 40a096cfc..27e1c00fd 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/dockerInfo.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/dockerInfo.yaml @@ -1,4 +1,4 @@ image: saagie/spark baseTag: 3.1-aws-py-3.9 -dynamicVersion: 1.139.0_SDKTECHNO-207 -version: 3.1-aws-py-3.9-1.139.0 +dynamicVersion: 1.194.0_sparkWithoutMinio +version: 3.1-aws-py-3.9-1.194.0_sparkWithoutMinio diff --git a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/image_test.yaml b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/image_test.yaml index 6accbed38..ed87afeb8 100644 --- a/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/image_test.yaml +++ b/technologies/job/spark/spark-aws-3.1/innerContexts/python/spark-aws-3.1-python-3.9/image_test.yaml @@ -22,7 +22,6 @@ fileContentTests: path: "/opt/entrypoint.sh" expectedContents: [ - 'wget -nv \$PYSPARK_FILES', 'unzip -q \*\.zip', 'ln -s /sandbox/ /opt/spark/work-dir', '--py-files=/sandbox/\*', diff --git a/version.properties b/version.properties index 31786417f..44de68a7a 100644 --- a/version.properties +++ b/version.properties @@ -1,6 +1,6 @@ -version.buildmeta= +version.buildmeta=sparkWithoutMinio version.major=1 version.minor=194 version.patch=0 version.prerelease= -version.semver=1.194.0 +version.semver=1.194.0+sparkWithoutMinio