From f4293e0999276393d7ce4e288dbd87c58d3adc32 Mon Sep 17 00:00:00 2001 From: Colin Date: Mon, 8 Jan 2024 15:00:54 -0800 Subject: [PATCH] remove circle ci --- .circleci/config.yml | 136 ------------------------------------------- README.md | 3 - 2 files changed, 139 deletions(-) delete mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index f2a3b6357..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,136 +0,0 @@ -version: 2.1 - -jobs: - unit: - environment: - DBT_INVOCATION_ENV: circle - docker: - - image: fishtownanalytics/test-container:10 - steps: - - checkout - - run: tox -e flake8,unit - -# Turning off for now due to flaky runs of tests will turn back on at later date. - integration-spark-session: - environment: - DBT_INVOCATION_ENV: circle - docker: - - image: godatadriven/pyspark:3.1 - steps: - - checkout - - run: apt-get update - - run: conda install python=3.10 - - run: python3 -m pip install --upgrade pip - - run: apt-get install -y git gcc g++ unixodbc-dev libsasl2-dev libxml2-dev libxslt-dev - - run: python3 -m pip install tox - - run: - name: Run integration tests - command: tox -e integration-spark-session - no_output_timeout: 1h - - store_artifacts: - path: ./logs - - integration-spark-thrift: - environment: - DBT_INVOCATION_ENV: circle - docker: - - image: fishtownanalytics/test-container:10 - - image: godatadriven/spark:3.1.1 - environment: - WAIT_FOR: localhost:5432 - command: > - --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 - --name Thrift JDBC/ODBC Server - - image: postgres:9.6.17-alpine - environment: - POSTGRES_USER: dbt - POSTGRES_PASSWORD: dbt - POSTGRES_DB: metastore - - steps: - - checkout - - - run: - name: Wait for Spark-Thrift - command: dockerize -wait tcp://localhost:10000 -timeout 15m -wait-retry-interval 5s - - - run: - name: Run integration tests - command: tox -e integration-spark-thrift - no_output_timeout: 1h - - store_artifacts: - path: ./logs - - integration-spark-databricks-http: - environment: - DBT_INVOCATION_ENV: circle - DBT_DATABRICKS_RETRY_ALL: True - DBT_TEST_USER_1: "buildbot+dbt_test_user_1@dbtlabs.com" - DBT_TEST_USER_2: "buildbot+dbt_test_user_2@dbtlabs.com" - DBT_TEST_USER_3: "buildbot+dbt_test_user_3@dbtlabs.com" - docker: - - image: fishtownanalytics/test-container:10 - steps: - - checkout - - run: - name: Run integration tests - command: tox -e integration-spark-databricks-http - no_output_timeout: 1h - - store_artifacts: - path: ./logs - - integration-spark-databricks-odbc-cluster: &databricks-odbc - environment: - DBT_INVOCATION_ENV: circle - ODBC_DRIVER: Simba # TODO: move env var to Docker image - DBT_TEST_USER_1: "buildbot+dbt_test_user_1@dbtlabs.com" - DBT_TEST_USER_2: "buildbot+dbt_test_user_2@dbtlabs.com" - DBT_TEST_USER_3: "buildbot+dbt_test_user_3@dbtlabs.com" - docker: - # image based on `fishtownanalytics/test-container` w/ Simba ODBC Spark driver installed - - image: 828731156495.dkr.ecr.us-east-1.amazonaws.com/dbt-spark-odbc-test-container:latest - aws_auth: - aws_access_key_id: $AWS_ACCESS_KEY_ID_STAGING - aws_secret_access_key: $AWS_SECRET_ACCESS_KEY_STAGING - steps: - - checkout - - run: - name: Run integration tests - command: tox -e integration-spark-databricks-odbc-cluster - no_output_timeout: 1h - - store_artifacts: - path: ./logs - - integration-spark-databricks-odbc-endpoint: - <<: *databricks-odbc - steps: - - checkout - - run: - name: Run integration tests - command: tox -e integration-spark-databricks-odbc-sql-endpoint - no_output_timeout: 1h - - store_artifacts: - path: ./logs - -workflows: - version: 2 - test-everything: - jobs: - - unit - - integration-spark-session: - requires: - - unit - - integration-spark-thrift: - requires: - - unit - - integration-spark-databricks-http: - requires: - - integration-spark-thrift - - integration-spark-databricks-odbc-cluster: - context: aws-credentials - requires: - - integration-spark-thrift - - integration-spark-databricks-odbc-endpoint: - context: aws-credentials - requires: - - integration-spark-thrift diff --git a/README.md b/README.md index 2d2586795..7e95b1fc3 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,6 @@ Unit Tests Badge - - Integration Tests Badge -

**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.