Skip to content

Commit

Permalink
Merge pull request #357 from IBM/releasing
Browse files Browse the repository at this point in the history
Remove requirements.txt from filter and doc_id spark transforms
  • Loading branch information
daw3rd authored Jun 28, 2024
2 parents 220bdcb + 75f45f1 commit be02afc
Show file tree
Hide file tree
Showing 6 changed files with 96 additions and 10 deletions.
8 changes: 5 additions & 3 deletions transforms/universal/doc_id/spark/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,12 @@ RUN cd data-processing-lib-python && pip install --no-cache-dir -e .
COPY --chown=spark:root data-processing-lib-spark/ data-processing-lib-spark/
RUN cd data-processing-lib-spark && pip install --no-cache-dir -e .

COPY requirements.txt requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
# Install project source
COPY --chown=spark:root src/ src/
COPY --chown=spark:root pyproject.toml pyproject.toml
RUN pip install --no-cache-dir -e .

# copy source data
# copy source main
COPY ./src/doc_id_transform_spark.py .
COPY ./src/doc_id_local.py local/

Expand Down
45 changes: 45 additions & 0 deletions transforms/universal/doc_id/spark/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
[project]
name = "dpk_doc_id_transform_spark"
version = "0.2.1.dev0"
requires-python = ">=3.10"
description = "Doc ID Spark Transform"
license = {text = "Apache-2.0"}
readme = {file = "README.md", content-type = "text/markdown"}
authors = [
{ name = "Constantin Adam", email = "[email protected]" },
{ name = "Boris Lublinsky", email = "[email protected]" },
]
dependencies = [
"data-prep-toolkit-spark==0.2.1.dev0",
]

[build-system]
requires = ["setuptools>=68.0.0", "wheel", "setuptools_scm[toml]>=7.1.0"]
build-backend = "setuptools.build_meta"

[project.optional-dependencies]
dev = [
"twine",
"pytest>=7.3.2",
"pytest-dotenv>=0.5.2",
"pytest-env>=1.0.0",
"pre-commit>=3.3.2",
"pytest-cov>=4.1.0",
"pytest-mock>=3.10.0",
"moto==5.0.5",
"markupsafe==2.0.1",
]

[options]
package_dir = ["src","test"]

[options.packages.find]
where = ["src/"]

[tool.pytest.ini_options]
# Currently we use low coverage since we have to run tests separately (see makefile)
#addopts = "--cov --cov-report term-missing --cov-fail-under 25"
markers = ["unit: unit tests", "integration: integration tests"]

[tool.coverage.run]
include = ["src/*"]
2 changes: 0 additions & 2 deletions transforms/universal/doc_id/spark/requirements.txt

This file was deleted.

9 changes: 6 additions & 3 deletions transforms/universal/filter/spark/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,13 @@ RUN cd data-processing-lib-python && pip install --no-cache-dir -e .
COPY --chown=spark:root data-processing-lib-spark/ data-processing-lib-spark/
RUN cd data-processing-lib-spark && pip install --no-cache-dir -e .

COPY requirements.txt requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
# Install project source
COPY --chown=spark:root src/ src/
COPY --chown=spark:root pyproject.toml pyproject.toml
RUN pip install --no-cache-dir -e .

# copy source main

# copy source data
COPY ./src/filter_transform_spark.py .
COPY ./src/filter_local.py local/

Expand Down
40 changes: 40 additions & 0 deletions transforms/universal/filter/spark/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
[project]
name = "dpk_filter_transform_spark"
version = "0.2.1.dev0"
requires-python = ">=3.10"
description = "Doc ID Spark Transform"
license = {text = "Apache-2.0"}
readme = {file = "README.md", content-type = "text/markdown"}
authors = [
{ name = "Constantin Adam", email = "[email protected]" },
]
dependencies = [
"data-prep-toolkit-spark==0.2.1.dev0",
]

[project.optional-dependencies]
dev = [
"twine",
"pytest>=7.3.2",
"pytest-dotenv>=0.5.2",
"pytest-env>=1.0.0",
"pre-commit>=3.3.2",
"pytest-cov>=4.1.0",
"pytest-mock>=3.10.0",
"moto==5.0.5",
"markupsafe==2.0.1",
]

[options]
package_dir = ["src","test"]

[options.packages.find]
where = ["src/"]

[tool.pytest.ini_options]
# Currently we use low coverage since we have to run tests separately (see makefile)
#addopts = "--cov --cov-report term-missing --cov-fail-under 25"
markers = ["unit: unit tests", "integration: integration tests"]

[tool.coverage.run]
include = ["src/*"]
2 changes: 0 additions & 2 deletions transforms/universal/filter/spark/requirements.txt

This file was deleted.

0 comments on commit be02afc

Please sign in to comment.