Skip to content

Commit

Permalink
Merge branch 'IBM:dev' into new-branch
Browse files Browse the repository at this point in the history
Signed-off-by: Pankaj Thorat <[email protected]>
  • Loading branch information
pankajskku committed Dec 3, 2024
2 parents 72b4063 + ec89271 commit 83c015c
Show file tree
Hide file tree
Showing 374 changed files with 7,018 additions and 2,867 deletions.
14 changes: 11 additions & 3 deletions .make.versions
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ DPK_MAJOR_VERSION=0
# The minor version is incremented manually when significant features have been added that are backward compatible with the previous major.minor release.
DPK_MINOR_VERSION=2
# The minor version is incremented AUTOMATICALLY by the release.sh script when a new release is set.
DPK_MICRO_VERSION=2
DPK_MICRO_VERSION=3
# The suffix is generally always set in the main/development branch and only nulled out when creating release branches.
# It can be manually incremented, for example, to allow publishing a new intermediate version wheel to pypi.
DPK_VERSION_SUFFIX=.dev2
DPK_VERSION_SUFFIX=.dev0

DPK_VERSION=$(DPK_MAJOR_VERSION).$(DPK_MINOR_VERSION).$(DPK_MICRO_VERSION)$(DPK_VERSION_SUFFIX)

Expand All @@ -39,7 +39,7 @@ DPK_LIB_KFP_SHARED=$(DPK_VERSION)
KFP_DOCKER_VERSION=$(DOCKER_IMAGE_VERSION)
KFP_DOCKER_VERSION_v2=$(DOCKER_IMAGE_VERSION)

DPK_CONNECTOR_VERSION=0.2.3.dev0
DPK_CONNECTOR_VERSION=0.2.4.dev0

################## ################## ################## ################## ################## ##################
# Begin versions that the repo depends on.
Expand All @@ -59,3 +59,11 @@ else
WORKFLOW_SUPPORT_LIB=kfp_v1_workflow_support
endif

################################################################################
# This defines the transforms' package version number as would be used
# when publishing the wheel. In general, only the micro version
# number should be advanced relative to the DPK_VERSION.
#
# If you change the versions numbers, be sure to run "make set-versions" to
# update version numbers across the transform (e.g., pyproject.toml).
TRANSFORMS_PKG_VERSION=0.2.3.dev0
2 changes: 1 addition & 1 deletion data-connector-lib/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "data_prep_connector"
version = "0.2.3.dev1"
version = "0.2.4.dev0"
requires-python = ">=3.10,<3.13"
keywords = [
"data",
Expand Down
3 changes: 1 addition & 2 deletions data-processing-lib/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "data_prep_toolkit"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
keywords = ["data", "data preprocessing", "data preparation", "llm", "generative", "ai", "fine-tuning", "llmapps" ]
requires-python = ">=3.10,<3.13"
description = "Data Preparation Toolkit Library for Ray and Python"
Expand All @@ -16,7 +16,6 @@ dynamic = ["dependencies", "optional-dependencies"]
Repository = "https://github.com/IBM/data-prep-kit"
Issues = "https://github.com/IBM/data-prep-kit/issues"
Documentation = "https://ibm.github.io/data-prep-kit/doc"
"Transform project" = "https://github.com/IBM/data-prep-kit/tree/dev/transforms/universal/noop"

[build-system]
requires = ["setuptools>=68.0.0", "wheel", "setuptools_scm[toml]>=7.1.0"]
Expand Down
13 changes: 9 additions & 4 deletions data-processing-lib/spark/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,14 @@ setup::

set-versions: .check-env
$(MAKE) TOML_VERSION=$(DPK_LIB_VERSION) .defaults.update-toml
sed -e 's/"pyspark...*",/"pyspark>=${SPARK_VERSION}",/' \
pyproject.toml > tt.toml
mv tt.toml pyproject.toml
if [ -e pyproject.toml ]; then \
cat pyproject.toml | sed -e 's/"spark[default]==.*",/"spark[default]==$(SPARK_VERSION)",/' > tt.toml; \
mv tt.toml pyproject.toml; \
fi
if [ -e requirements.txt ]; then \
cat requirements.txt | sed -e 's/ray[default]==.*/ray[default]==$(SPARK_VERSION)/' > tt.txt; \
mv tt.txt requirements.txt; \
fi

build:: build-dist

Expand All @@ -26,7 +31,7 @@ publish-dist :: .check-env .defaults.publish-dist

publish-image:: .defaults.publish-image

venv:: pyproject.toml
venv::
$(MAKE) .defaults.spark-lib-src-venv
pip install pytest pytest-cov

Expand Down
4 changes: 2 additions & 2 deletions kfp/kfp_support_lib/kfp_v1_workflow_support/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "data_prep_toolkit_kfp_v1"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
requires-python = ">=3.10,<3.13"
description = "Data Preparation Kit Library. KFP support"
license = {text = "Apache-2.0"}
Expand All @@ -13,7 +13,7 @@ authors = [
]
dependencies = [
"kfp==1.8.22",
"data-prep-toolkit-kfp-shared==0.2.2.dev2",
"data-prep-toolkit-kfp-shared==0.2.3.dev0",
]

[build-system]
Expand Down
4 changes: 2 additions & 2 deletions kfp/kfp_support_lib/kfp_v2_workflow_support/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "data_prep_toolkit_kfp_v2"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
requires-python = ">=3.10,<3.13"
description = "Data Preparation Kit Library. KFP support"
license = {text = "Apache-2.0"}
Expand All @@ -14,7 +14,7 @@ authors = [
dependencies = [
"kfp==2.8.0",
"kfp-kubernetes==1.2.0",
"data-prep-toolkit-kfp-shared==0.2.2.dev2",
"data-prep-toolkit-kfp-shared==0.2.3.dev0",
]

[build-system]
Expand Down
4 changes: 2 additions & 2 deletions kfp/kfp_support_lib/shared_workflow_support/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "data_prep_toolkit_kfp_shared"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
requires-python = ">=3.10,<3.13"
description = "Data Preparation Kit Library. KFP support"
license = {text = "Apache-2.0"}
Expand All @@ -14,7 +14,7 @@ authors = [
dependencies = [
"requests",
"kubernetes",
"data-prep-toolkit[ray]==0.2.2.dev2",
"data-prep-toolkit[ray]==0.2.3.dev0",
]

[build-system]
Expand Down
37 changes: 37 additions & 0 deletions release-notes.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,42 @@
# Data Prep Kit Release notes

## Release 0.2.2 - 11/25/2024

### General
1. Update RAG example to use granite model
1. Updated transforms with Docling 2
1. Added single package for dpk with extra for \[spark\] and \[ray\]
1. Added single package for transforms with extra for \[all\] or \[individual-transform-name\]


### data-prep-toolkit libraries (python, ray, spark)

1. Fix metadata logging even when actors crash
1. Add multilock for ray workers downloads/cleanup
1. Multiple updates to spark runtime
1. Added support for python 3.12
1. refactoring of data access code


### KFP Workloads

1. Modify superpipeline params type Str/json
1. Set kuberay apiserver version
1. Add Super pipeline for code transforms


### Transforms

1. Enhance pdf2parquet with docling2 support for extracting HTML, DOCS, etc.
1. Added web2parquet transform
1. Added HAP transform

### HTTP Connector 0.2.3

1. Enhanced parameter/configuration allows the user to customize crawler settings
1. implement subdomain focus feature in data-prep-connector


## Release 0.2.2- HTTP Connector Module - 10/23/2024

### General
Expand Down
2 changes: 1 addition & 1 deletion scripts/check-workflows.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ if [ ! -d transforms ]; then
echo Please run this script from the top of the repository
exit 1
fi
KFP_BLACK_LIST="doc_chunk pdf2parquet pii_redactor text_encoder license_select repo_level_ordering header_cleanser"
KFP_BLACK_LIST="doc_chunk pdf2parquet pii_redactor text_encoder license_select repo_level_ordering header_cleanser fdedup"
while [ $# -ne 0 ]; do
case $1 in
-show-kfp-black-list) echo $KFP_BLACK_LIST; exit 0;
Expand Down
2 changes: 1 addition & 1 deletion transforms/code/code2parquet/python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "dpk_code2parquet_transform_python"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
requires-python = ">=3.10,<3.13"
description = "code2parquet Python Transform"
license = {text = "Apache-2.0"}
Expand Down
2 changes: 1 addition & 1 deletion transforms/code/code2parquet/python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
data-prep-toolkit==0.2.2.dev2
data-prep-toolkit==0.2.3.dev0
parameterized
pandas
6 changes: 3 additions & 3 deletions transforms/code/code2parquet/ray/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "dpk_code2parquet_transform_ray"
version = "0.2.2.dev2"
version = "0.2.3.dev0"
requires-python = ">=3.10,<3.13"
description = "code2parquet Ray Transform"
license = {text = "Apache-2.0"}
Expand All @@ -10,8 +10,8 @@ authors = [
{ name = "Boris Lublinsky", email = "[email protected]" },
]
dependencies = [
"data-prep-toolkit[ray]==0.2.2.dev2",
"dpk-code2parquet-transform-python==0.2.2.dev2",
"data-prep-toolkit[ray]==0.2.3.dev0",
"dpk-code2parquet-transform-python==0.2.3.dev0",
"parameterized",
"pandas",
]
Expand Down
8 changes: 8 additions & 0 deletions transforms/code/code_profiler/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,12 @@ For each new target language, the offline phase is utilized to create determinis

In the online phase, the system dynamically generates profiling outputs for any incoming code snippets. This is achieved by extracting concepts from the snippets using the rules in the database and storing these extractions in a tabular format. The structured tabular format allows for generating additional concept columns, which are then utilized to create comprehensive profiling reports.

The following runtimes are available:
* [python](python/README.md) - provides the base python-based transformation
implementation and python runtime.
* [ray](ray/README.md) - enables the running of the base python transformation
in a Ray runtime

Please refer to the playbook at `transforms/code/code_profiler/notebook_example/code-profiler.ipynb` to run the pythonic code profiler


Loading

0 comments on commit 83c015c

Please sign in to comment.