Skip to content

Commit

Permalink
add unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
thanh-nguyen-dang committed Apr 18, 2024
1 parent e97628a commit 8e5a36d
Show file tree
Hide file tree
Showing 7 changed files with 56 additions and 20 deletions.
32 changes: 18 additions & 14 deletions .github/workflows/image_build_and_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,6 @@ jobs:
python3.9 -m poetry config virtualenvs.create true
python3.9 -m poetry install -vv --no-interaction --with dev
- name: Run standalone tests
# Run the tests that do not require Spark and ElasticSearch
run: python3.9 -m poetry run pytest -v tests/standalone_tests --cov=tube --cov-report term-missing --cov-report xml

- name: Make dir cor compose-etl
working-directory: ..
run: git clone https://github.com/uc-cdis/compose-etl.git --branch master --single-branch
Expand All @@ -72,6 +68,24 @@ jobs:
cp -f tests/integrated_tests/gen3/tube/etlMapping.yaml ../compose-etl/configs/etlMapping.yaml
cp -f tests/integrated_tests/gen3/tube/user.yaml ../compose-etl/configs/user.yaml
- name: Make XDG_DATA_HOME's tube
run: mkdir -p $XDG_DATA_HOME/gen3/tube
env:
XDG_DATA_HOME: /home/runner/work/tube

- name: Copy file to the XDG_DATA_HOME folder
run: cp tests/integrated_tests/gen3/tube/{creds.json,etlMapping.yaml,user.yaml} $XDG_DATA_HOME/gen3/tube/
env:
XDG_DATA_HOME: /home/runner/work/tube

- name: Run standalone tests
env:
ES_URL: localhost
POSTGRES_PORT: 5432
XDG_DATA_HOME: /home/runner/work/tube
# Run the tests that do not require Spark and ElasticSearch
run: python3.9 -m poetry run pytest -v tests/standalone_tests --cov=tube --cov-report term-missing --cov-report xml

- name: Init postgres database
run: psql -d postgresql://postgres:postgres@localhost/metadata_db -f tests/integrated_tests/metadata_db.sql

Expand Down Expand Up @@ -107,16 +121,6 @@ jobs:
- name: Check all docker networks
run: docker network ls

- name: Make XDG_DATA_HOME's tube
run: mkdir -p $XDG_DATA_HOME/gen3/tube
env:
XDG_DATA_HOME: /home/runner/work/tube

- name: Copy file to the XDG_DATA_HOME folder
run: cp tests/integrated_tests/gen3/tube/{creds.json,etlMapping.yaml,user.yaml} $XDG_DATA_HOME/gen3/tube/
env:
XDG_DATA_HOME: /home/runner/work/tube

- name: Run ETL process
working-directory: ../compose-etl
run: docker-compose run tube bash -c "python run_config.py; python run_etl.py"
Expand Down
3 changes: 1 addition & 2 deletions tests/dataframe_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,13 @@
import tube.settings as config

from unittest.mock import patch
from pyspark import SparkContext
from tube.utils.spark import make_spark_context
from tube.utils.dd import init_dictionary
from tube.etl.indexers.aggregation.new_translator import (
Translator as AggregationTranslator,
)
from tube.etl.indexers.injection.new_translator import Translator as InjectionTranslator
from tests.dataframe_tests.util import mock_dictionary_url, initialize_mappings
from tests.util import mock_dictionary_url, initialize_mappings

TEST_DATA_HOME = "./tests/dataframe_tests/test_data"

Expand Down
2 changes: 1 addition & 1 deletion tests/dataframe_tests/test_dataframe_aggregation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
from tests.dataframe_tests.util import (
from tests.util import (
get_spark_session,
assert_dataframe_equality,
assert_zero,
Expand Down
4 changes: 2 additions & 2 deletions tests/dataframe_tests/test_dataframe_injection.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import pytest
from tests.dataframe_tests.util import (
from tests.util import (
get_spark_session,
assert_dataframe_equality,
get_dataframes_from_names,
)
from tube.utils.general import get_node_id_name
from pyspark.sql.types import StructType, StructField, StringType, LongType


@pytest.mark.schema_ibdgc
@pytest.mark.parametrize("translator", [("ibdgc", "file", "injection", [
Expand Down
2 changes: 1 addition & 1 deletion tests/dataframe_tests/test_parser.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
from tests.dataframe_tests.util import (
from tests.util import (
get_spark_session,
assert_dataframe_equality,
get_dataframes_from_names,
Expand Down
33 changes: 33 additions & 0 deletions tests/standalone_tests/test_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import pytest
import tube.settings as config

from tube.utils.dd import init_dictionary
from unittest.mock import patch
from tests.util import mock_dictionary_url, initialize_mappings
from tube.etl.indexers.injection.parser import Parser as InjectionParser

@pytest.fixture(scope="function")
@patch("tube.etl.indexers.injection.parser.Parser.get_edges_having_data")
@patch(
"dictionaryutils.load_schemas_from_url",
)
def test_create_prop_from_json():
"""
Unit test for parser to check if the property created is correct.
"""
from tube.etl.indexers.base.parser import Parser

dictionary_url_patcher = patch(
"dictionaryutils.load_schemas_from_url",
return_value=mock_dictionary_url("midrc"),
)
dictionary_url_patcher.start()
dictionary, model = init_dictionary("nor_used_url")

for e in model.Edge.get_subclasses():
print(e.__tablename__)

mapping = initialize_mappings("midrc", "data_file")
parser = InjectionParser(mapping, model, dictionary)
for prop in parser.props:
assert prop.type is not None
File renamed without changes.

0 comments on commit 8e5a36d

Please sign in to comment.